Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/achievements/update_user_achievement_service.rb43
-rw-r--r--app/services/analytics/cycle_analytics/stages/list_service.rb9
-rw-r--r--app/services/analytics/cycle_analytics/value_streams/list_service.rb40
-rw-r--r--app/services/application_settings/update_service.rb15
-rw-r--r--app/services/audit_event_service.rb2
-rw-r--r--app/services/audit_events/build_service.rb14
-rw-r--r--app/services/auth/container_registry_authentication_service.rb58
-rw-r--r--app/services/auth/dependency_proxy_authentication_service.rb14
-rw-r--r--app/services/auto_merge/base_service.rb17
-rw-r--r--app/services/auto_merge/merge_when_checks_pass_service.rb76
-rw-r--r--app/services/auto_merge/merge_when_pipeline_succeeds_service.rb22
-rw-r--r--app/services/auto_merge_service.rb5
-rw-r--r--app/services/award_emojis/add_service.rb3
-rw-r--r--app/services/base_container_service.rb4
-rw-r--r--app/services/boards/base_item_move_service.rb11
-rw-r--r--app/services/boards/base_items_list_service.rb24
-rw-r--r--app/services/boards/issues/list_service.rb4
-rw-r--r--app/services/branch_rules/base_service.rb47
-rw-r--r--app/services/branch_rules/destroy_service.rb21
-rw-r--r--app/services/branch_rules/update_service.rb126
-rw-r--r--app/services/branches/create_service.rb9
-rw-r--r--app/services/branches/delete_service.rb4
-rw-r--r--app/services/bulk_create_integration_service.rb35
-rw-r--r--app/services/bulk_imports/batched_relation_export_service.rb4
-rw-r--r--app/services/bulk_imports/create_service.rb51
-rw-r--r--app/services/bulk_imports/file_decompression_service.rb2
-rw-r--r--app/services/bulk_imports/file_download_service.rb14
-rw-r--r--app/services/bulk_imports/relation_export_service.rb2
-rw-r--r--app/services/bulk_imports/user_contributions_export_service.rb19
-rw-r--r--app/services/bulk_update_integration_service.rb43
-rw-r--r--app/services/ci/cancel_pipeline_service.rb14
-rw-r--r--app/services/ci/catalog/resources/aggregate_last30_day_usage_service.rb77
-rw-r--r--app/services/ci/catalog/resources/create_service.rb5
-rw-r--r--app/services/ci/catalog/resources/release_service.rb28
-rw-r--r--app/services/ci/catalog/resources/versions/build_components_service.rb108
-rw-r--r--app/services/ci/catalog/resources/versions/create_service.rb80
-rw-r--r--app/services/ci/change_variable_service.rb19
-rw-r--r--app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb192
-rw-r--r--app/services/ci/components/fetch_service.rb21
-rw-r--r--app/services/ci/components/usages/create_service.rb44
-rw-r--r--app/services/ci/create_commit_status_service.rb30
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb6
-rw-r--r--app/services/ci/create_pipeline_service.rb78
-rw-r--r--app/services/ci/create_web_ide_terminal_service.rb5
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb3
-rw-r--r--app/services/ci/delete_objects_service.rb2
-rw-r--r--app/services/ci/drop_pipeline_service.rb4
-rw-r--r--app/services/ci/ensure_stage_service.rb56
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb4
-rw-r--r--app/services/ci/generate_kubeconfig_service.rb4
-rw-r--r--app/services/ci/job_artifacts/create_service.rb2
-rw-r--r--app/services/ci/job_artifacts/expire_project_build_artifacts_service.rb6
-rw-r--r--app/services/ci/job_artifacts/update_unknown_locked_status_service.rb12
-rw-r--r--app/services/ci/job_token_scope/add_group_or_project_service.rb22
-rw-r--r--app/services/ci/job_token_scope/add_group_service.rb33
-rw-r--r--app/services/ci/job_token_scope/remove_group_service.rb28
-rw-r--r--app/services/ci/list_config_variables_service.rb51
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb3
-rw-r--r--app/services/ci/partitions/create_service.rb36
-rw-r--r--app/services/ci/partitions/setup_default_service.rb39
-rw-r--r--app/services/ci/partitions/sync_service.rb41
-rw-r--r--app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb15
-rw-r--r--app/services/ci/pipeline_bridge_status_service.rb4
-rw-r--r--app/services/ci/pipeline_creation/cancel_redundant_pipelines_service.rb103
-rw-r--r--app/services/ci/pipeline_creation/start_pipeline_service.rb4
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb37
-rw-r--r--app/services/ci/pipeline_schedules/base_save_service.rb25
-rw-r--r--app/services/ci/pipeline_schedules/calculate_next_run_service.rb8
-rw-r--r--app/services/ci/pipeline_trigger_service.rb3
-rw-r--r--app/services/ci/pipeline_triggers/create_service.rb47
-rw-r--r--app/services/ci/pipeline_triggers/destroy_service.rb35
-rw-r--r--app/services/ci/pipeline_triggers/update_service.rb42
-rw-r--r--app/services/ci/process_build_service.rb2
-rw-r--r--app/services/ci/queue/build_queue_service.rb2
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb10
-rw-r--r--app/services/ci/register_job_service.rb30
-rw-r--r--app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb20
-rw-r--r--app/services/ci/retry_job_service.rb19
-rw-r--r--app/services/ci/runners/assign_runner_service.rb41
-rw-r--r--app/services/ci/runners/reconcile_existing_runner_versions_service.rb2
-rw-r--r--app/services/ci/runners/register_runner_service.rb4
-rw-r--r--app/services/ci/runners/set_runner_associated_projects_service.rb22
-rw-r--r--app/services/ci/stuck_builds/drop_canceling_service.rb27
-rw-r--r--app/services/ci/trigger_downstream_pipeline_service.rb53
-rw-r--r--app/services/ci/unlock_pipeline_service.rb16
-rw-r--r--app/services/ci/update_build_names_service.rb37
-rw-r--r--app/services/ci/update_build_queue_service.rb15
-rw-r--r--app/services/ci/update_build_state_service.rb3
-rw-r--r--app/services/ci/update_group_pending_build_service.rb33
-rw-r--r--app/services/ci/update_instance_variables_service.rb27
-rw-r--r--app/services/ci/update_pending_build_service.rb2
-rw-r--r--app/services/click_house/sync_strategies/audit_event_sync_strategy.rb97
-rw-r--r--app/services/click_house/sync_strategies/base_sync_strategy.rb2
-rw-r--r--app/services/click_house/sync_strategies/event_sync_strategy.rb62
-rw-r--r--app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb12
-rw-r--r--app/services/clusters/agent_tokens/track_usage_service.rb6
-rw-r--r--app/services/clusters/agents/authorizations/ci_access/filter_service.rb32
-rw-r--r--app/services/clusters/agents/authorizations/ci_access/refresh_service.rb2
-rw-r--r--app/services/clusters/agents/authorizations/user_access/refresh_service.rb2
-rw-r--r--app/services/clusters/agents/create_service.rb14
-rw-r--r--app/services/clusters/agents/create_url_configuration_service.rb77
-rw-r--r--app/services/clusters/agents/delete_service.rb6
-rw-r--r--app/services/clusters/agents/delete_url_configuration_service.rb45
-rw-r--r--app/services/cohorts_service.rb2
-rw-r--r--app/services/commits/change_service.rb25
-rw-r--r--app/services/commits/cherry_pick_service.rb32
-rw-r--r--app/services/commits/commit_patch_service.rb7
-rw-r--r--app/services/commits/create_service.rb8
-rw-r--r--app/services/commits/revert_service.rb12
-rw-r--r--app/services/concerns/base_service_utility.rb4
-rw-r--r--app/services/concerns/ci/job_token_scope/edit_scope_validations.rb25
-rw-r--r--app/services/concerns/deploy_token_methods.rb4
-rw-r--r--app/services/concerns/exclusive_lease_guard.rb4
-rw-r--r--app/services/concerns/group_linkable.rb2
-rw-r--r--app/services/concerns/integrations/bulk_operation_hashes.rb31
-rw-r--r--app/services/concerns/integrations/group_test_data.rb13
-rw-r--r--app/services/concerns/integrations/project_test_data.rb29
-rw-r--r--app/services/concerns/integrations/propagation/bulk_operation_hashes.rb33
-rw-r--r--app/services/concerns/search/filter.rb8
-rw-r--r--app/services/concerns/update_repository_storage_methods.rb20
-rw-r--r--app/services/concerns/users/participable_service.rb4
-rw-r--r--app/services/concerns/validates_classification_label.rb2
-rw-r--r--app/services/concerns/work_items/widgetable_service.rb43
-rw-r--r--app/services/container_registry/protection/create_rule_service.rb6
-rw-r--r--app/services/container_registry/protection/update_rule_service.rb4
-rw-r--r--app/services/database/consistency_check_service.rb2
-rw-r--r--app/services/dependency_proxy/auth_token_service.rb33
-rw-r--r--app/services/deployments/link_merge_requests_service.rb29
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb12
-rw-r--r--app/services/design_management/delete_designs_service.rb7
-rw-r--r--app/services/design_management/save_designs_service.rb5
-rw-r--r--app/services/discussions/resolve_service.rb10
-rw-r--r--app/services/draft_notes/create_service.rb11
-rw-r--r--app/services/draft_notes/destroy_service.rb11
-rw-r--r--app/services/draft_notes/publish_service.rb44
-rw-r--r--app/services/emails/destroy_service.rb9
-rw-r--r--app/services/environments/auto_stop_service.rb4
-rw-r--r--app/services/environments/destroy_service.rb2
-rw-r--r--app/services/environments/stop_service.rb4
-rw-r--r--app/services/event_create_service.rb7
-rw-r--r--app/services/export_csv/base_service.rb2
-rw-r--r--app/services/files/base_service.rb5
-rw-r--r--app/services/files/create_service.rb8
-rw-r--r--app/services/files/update_service.rb10
-rw-r--r--app/services/git/branch_hooks_service.rb23
-rw-r--r--app/services/git/process_ref_changes_service.rb42
-rw-r--r--app/services/git/tag_push_service.rb7
-rw-r--r--app/services/google_cloud_platform/artifact_registry/list_docker_images_service.rb46
-rw-r--r--app/services/gpg_keys/create_service.rb2
-rw-r--r--app/services/gpg_keys/validate_integrations_service.rb11
-rw-r--r--app/services/gravatar_service.rb3
-rw-r--r--app/services/group_access_tokens/rotate_service.rb20
-rw-r--r--app/services/groups/agnostic_token_revocation_service.rb158
-rw-r--r--app/services/groups/autocomplete_service.rb22
-rw-r--r--app/services/groups/base_service.rb2
-rw-r--r--app/services/groups/create_service.rb147
-rw-r--r--app/services/groups/deploy_tokens/revoke_service.rb2
-rw-r--r--app/services/groups/destroy_service.rb15
-rw-r--r--app/services/groups/group_links/create_service.rb5
-rw-r--r--app/services/groups/group_links/destroy_service.rb2
-rw-r--r--app/services/groups/group_links/update_service.rb6
-rw-r--r--app/services/groups/import_export/export_service.rb32
-rw-r--r--app/services/groups/import_export/import_service.rb7
-rw-r--r--app/services/groups/nested_create_service.rb3
-rw-r--r--app/services/groups/open_issues_count_service.rb4
-rw-r--r--app/services/groups/participants_service.rb6
-rw-r--r--app/services/groups/transfer_service.rb18
-rw-r--r--app/services/groups/update_service.rb23
-rw-r--r--app/services/groups/update_shared_runners_service.rb6
-rw-r--r--app/services/ide/base_config_service.rb4
-rw-r--r--app/services/import/base_service.rb10
-rw-r--r--app/services/import/bitbucket_server_service.rb5
-rw-r--r--app/services/import/bitbucket_service.rb106
-rw-r--r--app/services/import/fogbugz_service.rb5
-rw-r--r--app/services/import/github_service.rb53
-rw-r--r--app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb7
-rw-r--r--app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb7
-rw-r--r--app/services/import/placeholder_references/base_service.rb42
-rw-r--r--app/services/import/placeholder_references/load_service.rb99
-rw-r--r--app/services/import/placeholder_references/push_service.rb70
-rw-r--r--app/services/import/reassign_placeholder_user_records_service.rb65
-rw-r--r--app/services/import/source_users/accept_reassignment_service.rb31
-rw-r--r--app/services/import/source_users/base_service.rb30
-rw-r--r--app/services/import/source_users/cancel_reassignment_service.rb31
-rw-r--r--app/services/import/source_users/generate_csv_service.rb71
-rw-r--r--app/services/import/source_users/keep_as_placeholder_service.rb31
-rw-r--r--app/services/import/source_users/reassign_service.rb49
-rw-r--r--app/services/import/source_users/reject_reassignment_service.rb41
-rw-r--r--app/services/import/source_users/resend_notification_service.rb21
-rw-r--r--app/services/import/validate_remote_git_endpoint_service.rb31
-rw-r--r--app/services/import_export_clean_up_service.rb2
-rw-r--r--app/services/incident_management/timeline_events/create_service.rb2
-rw-r--r--app/services/integrations/exclusions/base_service.rb50
-rw-r--r--app/services/integrations/exclusions/create_service.rb121
-rw-r--r--app/services/integrations/exclusions/destroy_service.rb40
-rw-r--r--app/services/integrations/propagate_service.rb30
-rw-r--r--app/services/integrations/propagation/bulk_create_service.rb90
-rw-r--r--app/services/integrations/propagation/bulk_update_service.rb73
-rw-r--r--app/services/integrations/slack_installation/base_service.rb117
-rw-r--r--app/services/integrations/slack_installation/group_service.rb33
-rw-r--r--app/services/integrations/slack_installation/instance_service.rb25
-rw-r--r--app/services/integrations/slack_installation/project_service.rb33
-rw-r--r--app/services/integrations/test/group_service.rb25
-rw-r--r--app/services/integrations/test/project_service.rb2
-rw-r--r--app/services/integrations/update_service.rb69
-rw-r--r--app/services/issuable/callbacks/base.rb9
-rw-r--r--app/services/issuable/callbacks/time_tracking.rb58
-rw-r--r--app/services/issuable/clone/base_service.rb6
-rw-r--r--app/services/issuable/common_system_notes_service.rb11
-rw-r--r--app/services/issuable/destroy_service.rb40
-rw-r--r--app/services/issuable_base_service.rb77
-rw-r--r--app/services/issuable_links/create_service.rb4
-rw-r--r--app/services/issue_email_participants/base_service.rb24
-rw-r--r--app/services/issue_email_participants/create_service.rb15
-rw-r--r--app/services/issue_email_participants/destroy_service.rb19
-rw-r--r--app/services/issue_links/create_service.rb4
-rw-r--r--app/services/issues/base_service.rb7
-rw-r--r--app/services/issues/build_service.rb3
-rw-r--r--app/services/issues/clone_service.rb1
-rw-r--r--app/services/issues/close_service.rb58
-rw-r--r--app/services/issues/convert_to_ticket_service.rb97
-rw-r--r--app/services/issues/create_service.rb18
-rw-r--r--app/services/issues/export_csv_service.rb30
-rw-r--r--app/services/issues/move_service.rb54
-rw-r--r--app/services/issues/referenced_merge_requests_service.rb7
-rw-r--r--app/services/issues/relative_position_rebalancing_service.rb2
-rw-r--r--app/services/issues/reopen_service.rb42
-rw-r--r--app/services/issues/set_crm_contacts_service.rb4
-rw-r--r--app/services/issues/update_service.rb4
-rw-r--r--app/services/jira/requests/base.rb8
-rw-r--r--app/services/jira_connect_installations/destroy_service.rb15
-rw-r--r--app/services/jira_connect_subscriptions/create_service.rb38
-rw-r--r--app/services/jira_connect_subscriptions/destroy_service.rb44
-rw-r--r--app/services/labels/create_service.rb2
-rw-r--r--app/services/labels/promote_service.rb2
-rw-r--r--app/services/labels/transfer_service.rb8
-rw-r--r--app/services/lfs/file_transformer.rb11
-rw-r--r--app/services/lfs/finalize_upload_service.rb79
-rw-r--r--app/services/lfs/lock_file_service.rb5
-rw-r--r--app/services/lfs/unlock_file_service.rb4
-rw-r--r--app/services/loose_foreign_keys/batch_cleaner_service.rb55
-rw-r--r--app/services/loose_foreign_keys/cleaner_service.rb11
-rw-r--r--app/services/loose_foreign_keys/partition_cleaner_service.rb42
-rw-r--r--app/services/loose_foreign_keys/process_deleted_records_service.rb13
-rw-r--r--app/services/markdown_content_rewriter_service.rb4
-rw-r--r--app/services/members/activity_service.rb53
-rw-r--r--app/services/members/approve_access_request_service.rb2
-rw-r--r--app/services/members/base_service.rb2
-rw-r--r--app/services/members/create_service.rb22
-rw-r--r--app/services/members/creator_service.rb38
-rw-r--r--app/services/members/destroy_service.rb8
-rw-r--r--app/services/members/import_project_team_service.rb14
-rw-r--r--app/services/members/invite_member_builder.rb10
-rw-r--r--app/services/members/invite_service.rb17
-rw-r--r--app/services/members/unassign_issuables_service.rb64
-rw-r--r--app/services/members/update_service.rb8
-rw-r--r--app/services/merge_requests/after_create_service.rb13
-rw-r--r--app/services/merge_requests/approval_service.rb3
-rw-r--r--app/services/merge_requests/base_service.rb38
-rw-r--r--app/services/merge_requests/build_service.rb6
-rw-r--r--app/services/merge_requests/cleanup_refs_service.rb2
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb2
-rw-r--r--app/services/merge_requests/create_ref_service.rb7
-rw-r--r--app/services/merge_requests/export_csv_service.rb24
-rw-r--r--app/services/merge_requests/handle_assignees_change_service.rb4
-rw-r--r--app/services/merge_requests/merge_service.rb29
-rw-r--r--app/services/merge_requests/merge_strategies/from_source_branch.rb31
-rw-r--r--app/services/merge_requests/mergeability/check_base_service.rb5
-rw-r--r--app/services/merge_requests/mergeability/check_ci_status_service.rb2
-rw-r--r--app/services/merge_requests/mergeability/check_commits_status_service.rb (renamed from app/services/merge_requests/mergeability/check_broken_status_service.rb)9
-rw-r--r--app/services/merge_requests/mergeability/check_conflict_status_service.rb2
-rw-r--r--app/services/merge_requests/mergeability/check_lfs_file_locks_service.rb59
-rw-r--r--app/services/merge_requests/mergeability/detailed_merge_status_service.rb13
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb2
-rw-r--r--app/services/merge_requests/post_merge_service.rb83
-rw-r--r--app/services/merge_requests/push_options_handler_service.rb1
-rw-r--r--app/services/merge_requests/refresh_service.rb42
-rw-r--r--app/services/merge_requests/remove_approval_service.rb18
-rw-r--r--app/services/merge_requests/request_review_service.rb12
-rw-r--r--app/services/merge_requests/retarget_chain_service.rb2
-rw-r--r--app/services/merge_requests/unstick_locked_merge_requests_service.rb153
-rw-r--r--app/services/merge_requests/update_assignees_service.rb2
-rw-r--r--app/services/merge_requests/update_reviewer_state_service.rb24
-rw-r--r--app/services/merge_requests/update_reviewers_service.rb2
-rw-r--r--app/services/merge_requests/update_service.rb55
-rw-r--r--app/services/milestones/destroy_service.rb60
-rw-r--r--app/services/milestones/update_service.rb22
-rw-r--r--app/services/ml/create_candidate_service.rb2
-rw-r--r--app/services/ml/create_experiment_service.rb32
-rw-r--r--app/services/ml/create_model_service.rb36
-rw-r--r--app/services/ml/create_model_version_service.rb36
-rw-r--r--app/services/ml/destroy_model_service.rb28
-rw-r--r--app/services/ml/destroy_model_version_service.rb34
-rw-r--r--app/services/ml/experiment_tracking/experiment_repository.rb6
-rw-r--r--app/services/ml/find_model_service.rb14
-rw-r--r--app/services/ml/find_or_create_experiment_service.rb19
-rw-r--r--app/services/ml/find_or_create_model_service.rb18
-rw-r--r--app/services/ml/find_or_create_model_version_service.rb27
-rw-r--r--app/services/ml/model_versions/delete_service.rb32
-rw-r--r--app/services/ml/update_model_service.rb11
-rw-r--r--app/services/namespace_settings/assign_attributes_service.rb (renamed from app/services/namespace_settings/update_service.rb)51
-rw-r--r--app/services/namespaces/package_settings/update_service.rb28
-rw-r--r--app/services/namespaces/update_denormalized_descendants_service.rb65
-rw-r--r--app/services/notes/abuse_report/build_service.rb39
-rw-r--r--app/services/notes/abuse_report/create_service.rb39
-rw-r--r--app/services/notes/base_service.rb11
-rw-r--r--app/services/notes/build_service.rb40
-rw-r--r--app/services/notes/copy_service.rb6
-rw-r--r--app/services/notes/create_service.rb61
-rw-r--r--app/services/notes/post_process_service.rb2
-rw-r--r--app/services/notes/quick_actions_service.rb13
-rw-r--r--app/services/notes/update_service.rb12
-rw-r--r--app/services/notification_service.rb30
-rw-r--r--app/services/organizations/create_service.rb24
-rw-r--r--app/services/packages/cleanup/execute_policy_service.rb28
-rw-r--r--app/services/packages/conan/search_service.rb9
-rw-r--r--app/services/packages/conan/single_package_search_service.rb50
-rw-r--r--app/services/packages/create_dependency_service.rb10
-rw-r--r--app/services/packages/create_event_service.rb25
-rw-r--r--app/services/packages/debian/extract_changes_metadata_service.rb2
-rw-r--r--app/services/packages/debian/process_package_file_service.rb13
-rw-r--r--app/services/packages/mark_packages_for_destruction_service.rb12
-rw-r--r--app/services/packages/maven/create_package_service.rb7
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb13
-rw-r--r--app/services/packages/ml_model/create_package_file_service.rb4
-rw-r--r--app/services/packages/ml_model/package_for_candidate_service.rb23
-rw-r--r--app/services/packages/npm/check_manifest_coherence_service.rb44
-rw-r--r--app/services/packages/npm/create_package_service.rb36
-rw-r--r--app/services/packages/npm/generate_metadata_service.rb135
-rw-r--r--app/services/packages/npm/process_package_file_service.rb75
-rw-r--r--app/services/packages/nuget/search_service.rb10
-rw-r--r--app/services/packages/protection/create_rule_service.rb2
-rw-r--r--app/services/packages/protection/update_rule_service.rb2
-rw-r--r--app/services/packages/pypi/create_package_service.rb22
-rw-r--r--app/services/packages/rpm/parse_package_service.rb2
-rw-r--r--app/services/packages/rubygems/create_dependencies_service.rb26
-rw-r--r--app/services/packages/terraform_module/create_package_service.rb14
-rw-r--r--app/services/packages/terraform_module/metadata/create_service.rb42
-rw-r--r--app/services/packages/terraform_module/metadata/extract_files_service.rb152
-rw-r--r--app/services/packages/terraform_module/metadata/parse_hcl_file_service.rb211
-rw-r--r--app/services/packages/terraform_module/metadata/process_file_service.rb73
-rw-r--r--app/services/packages/terraform_module/process_package_file_service.rb63
-rw-r--r--app/services/pages/update_service.rb39
-rw-r--r--app/services/pages_domains/create_acme_order_service.rb5
-rw-r--r--app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb24
-rw-r--r--app/services/personal_access_tokens/create_service.rb14
-rw-r--r--app/services/personal_access_tokens/last_used_service.rb20
-rw-r--r--app/services/personal_access_tokens/revoke_service.rb18
-rw-r--r--app/services/personal_access_tokens/rotate_service.rb64
-rw-r--r--app/services/post_receive_service.rb6
-rw-r--r--app/services/preview_markdown_service.rb20
-rw-r--r--app/services/project_access_tokens/rotate_service.rb44
-rw-r--r--app/services/projects/after_rename_service.rb1
-rw-r--r--app/services/projects/autocomplete_service.rb19
-rw-r--r--app/services/projects/cleanup_service.rb2
-rw-r--r--app/services/projects/container_repository/delete_tags_service.rb15
-rw-r--r--app/services/projects/create_service.rb27
-rw-r--r--app/services/projects/destroy_service.rb4
-rw-r--r--app/services/projects/detect_repository_languages_service.rb2
-rw-r--r--app/services/projects/fetch_statistics_increment_service.rb18
-rw-r--r--app/services/projects/fork_service.rb39
-rw-r--r--app/services/projects/gitlab_projects_import_service.rb4
-rw-r--r--app/services/projects/group_links/create_service.rb4
-rw-r--r--app/services/projects/hashed_storage/migrate_attachments_service.rb2
-rw-r--r--app/services/projects/import_export/export_service.rb2
-rw-r--r--app/services/projects/import_export/parallel_export_service.rb2
-rw-r--r--app/services/projects/import_export/prune_expired_export_jobs_service.rb55
-rw-r--r--app/services/projects/import_export/relation_export_service.rb13
-rw-r--r--app/services/projects/import_export/relation_import_service.rb91
-rw-r--r--app/services/projects/import_service.rb6
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_service.rb2
-rw-r--r--app/services/projects/lfs_pointers/lfs_link_service.rb8
-rw-r--r--app/services/projects/operations/update_service.rb2
-rw-r--r--app/services/projects/overwrite_project_service.rb2
-rw-r--r--app/services/projects/participants_service.rb7
-rw-r--r--app/services/projects/prometheus/metrics/base_service.rb20
-rw-r--r--app/services/projects/prometheus/metrics/destroy_service.rb13
-rw-r--r--app/services/projects/protect_default_branch_service.rb34
-rw-r--r--app/services/projects/slack_application_install_service.rb76
-rw-r--r--app/services/projects/transfer_service.rb14
-rw-r--r--app/services/projects/update_pages_service.rb42
-rw-r--r--app/services/projects/update_remote_mirror_service.rb7
-rw-r--r--app/services/projects/update_repository_storage_service.rb10
-rw-r--r--app/services/projects/update_service.rb48
-rw-r--r--app/services/projects/update_statistics_service.rb12
-rw-r--r--app/services/protected_branches/api_service.rb2
-rw-r--r--app/services/protected_branches/base_service.rb11
-rw-r--r--app/services/protected_branches/cache_service.rb27
-rw-r--r--app/services/protected_branches/create_service.rb5
-rw-r--r--app/services/protected_branches/legacy_api_create_service.rb2
-rw-r--r--app/services/protected_branches/update_service.rb8
-rw-r--r--app/services/quick_actions/interpret_service.rb45
-rw-r--r--app/services/quick_actions/target_service.rb8
-rw-r--r--app/services/releases/create_service.rb10
-rw-r--r--app/services/releases/destroy_service.rb2
-rw-r--r--app/services/remote_mirrors/create_service.rb27
-rw-r--r--app/services/remote_mirrors/destroy_service.rb23
-rw-r--r--app/services/remote_mirrors/sync_service.rb26
-rw-r--r--app/services/remote_mirrors/update_service.rb27
-rw-r--r--app/services/repositories/changelog_service.rb2
-rw-r--r--app/services/repositories/replicate_service.rb4
-rw-r--r--app/services/resource_access_tokens/create_service.rb25
-rw-r--r--app/services/resource_access_tokens/revoke_service.rb8
-rw-r--r--app/services/saved_replies/create_service.rb31
-rw-r--r--app/services/saved_replies/destroy_service.rb21
-rw-r--r--app/services/saved_replies/update_service.rb23
-rw-r--r--app/services/search/global_service.rb12
-rw-r--r--app/services/search_service.rb9
-rw-r--r--app/services/security/ci_configuration/base_create_service.rb18
-rw-r--r--app/services/security/ci_configuration/container_scanning_create_service.rb7
-rw-r--r--app/services/security/ci_configuration/dependency_scanning_create_service.rb7
-rw-r--r--app/services/security/ci_configuration/sast_iac_create_service.rb7
-rw-r--r--app/services/security/ci_configuration/secret_detection_create_service.rb9
-rw-r--r--app/services/service_desk_settings/update_service.rb13
-rw-r--r--app/services/service_ping/submit_service.rb13
-rw-r--r--app/services/service_response.rb4
-rw-r--r--app/services/snippets/create_service.rb24
-rw-r--r--app/services/snippets/update_service.rb4
-rw-r--r--app/services/spam/ham_service.rb4
-rw-r--r--app/services/spam/spam_action_service.rb10
-rw-r--r--app/services/spam/spam_verdict_service.rb3
-rw-r--r--app/services/suggestions/apply_service.rb7
-rw-r--r--app/services/system_note_service.rb23
-rw-r--r--app/services/system_notes/issuables_service.rb27
-rw-r--r--app/services/system_notes/merge_requests_service.rb43
-rw-r--r--app/services/test_hooks/project_service.rb6
-rw-r--r--app/services/timelogs/create_service.rb2
-rw-r--r--app/services/todo_service.rb14
-rw-r--r--app/services/todos/allowed_target_filter_service.rb34
-rw-r--r--app/services/upload_service.rb5
-rw-r--r--app/services/uploads/destroy_service.rb28
-rw-r--r--app/services/users/activate_service.rb10
-rw-r--r--app/services/users/activity_service.rb10
-rw-r--r--app/services/users/approve_service.rb9
-rw-r--r--app/services/users/auto_ban_service.rb8
-rw-r--r--app/services/users/ban_service.rb5
-rw-r--r--app/services/users/banned_user_base_service.rb9
-rw-r--r--app/services/users/build_service.rb14
-rw-r--r--app/services/users/create_service.rb4
-rw-r--r--app/services/users/deactivate_service.rb10
-rw-r--r--app/services/users/destroy_service.rb7
-rw-r--r--app/services/users/dismiss_broadcast_message_service.rb28
-rw-r--r--app/services/users/email_verification/validate_token_service.rb2
-rw-r--r--app/services/users/reject_service.rb9
-rw-r--r--app/services/users/reset_feed_token_service.rb61
-rw-r--r--app/services/users/saved_replies/create_service.rb29
-rw-r--r--app/services/users/saved_replies/destroy_service.rb23
-rw-r--r--app/services/users/saved_replies/update_service.rb25
-rw-r--r--app/services/users/set_namespace_commit_email_service.rb2
-rw-r--r--app/services/users/unfollow_service.rb26
-rw-r--r--app/services/users/update_canonical_email_service.rb19
-rw-r--r--app/services/users/update_service.rb10
-rw-r--r--app/services/users/upsert_credit_card_validation_service.rb36
-rw-r--r--app/services/virtual_registries/packages/maven/handle_file_request_service.rb65
-rw-r--r--app/services/web_hook_service.rb67
-rw-r--r--app/services/web_hooks/create_service.rb34
-rw-r--r--app/services/web_hooks/destroy_service.rb18
-rw-r--r--app/services/web_hooks/events/resend_service.rb43
-rw-r--r--app/services/web_hooks/log_execution_service.rb4
-rw-r--r--app/services/wiki_pages/base_service.rb26
-rw-r--r--app/services/wiki_pages/create_service.rb7
-rw-r--r--app/services/wiki_pages/destroy_service.rb9
-rw-r--r--app/services/wiki_pages/update_service.rb7
-rw-r--r--app/services/work_items/bulk_update_service.rb62
-rw-r--r--app/services/work_items/callbacks/assignees.rb32
-rw-r--r--app/services/work_items/callbacks/crm_contacts.rb71
-rw-r--r--app/services/work_items/callbacks/description.rb11
-rw-r--r--app/services/work_items/callbacks/linked_items.rb26
-rw-r--r--app/services/work_items/callbacks/start_and_due_date.rb50
-rw-r--r--app/services/work_items/closing_merge_requests/create_service.rb72
-rw-r--r--app/services/work_items/create_service.rb32
-rw-r--r--app/services/work_items/delete_service.rb13
-rw-r--r--app/services/work_items/parent_links/base_service.rb2
-rw-r--r--app/services/work_items/parent_links/create_service.rb41
-rw-r--r--app/services/work_items/parent_links/destroy_service.rb14
-rw-r--r--app/services/work_items/parent_links/reorder_service.rb26
-rw-r--r--app/services/work_items/related_work_item_links/create_service.rb14
-rw-r--r--app/services/work_items/related_work_item_links/destroy_service.rb22
-rw-r--r--app/services/work_items/update_service.rb44
-rw-r--r--app/services/work_items/widgets/base_service.rb2
-rw-r--r--app/services/work_items/widgets/hierarchy_service/base_service.rb34
-rw-r--r--app/services/work_items/widgets/labels_service/base_service.rb7
-rw-r--r--app/services/work_items/widgets/labels_service/update_service.rb9
483 files changed, 9608 insertions, 2362 deletions
diff --git a/app/services/achievements/update_user_achievement_service.rb b/app/services/achievements/update_user_achievement_service.rb
new file mode 100644
index 00000000000..3fb9d752eb3
--- /dev/null
+++ b/app/services/achievements/update_user_achievement_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+# rubocop:disable Gitlab/BoundedContexts -- the Achievements module already exists and holds the other services as well
+module Achievements
+ class UpdateUserAchievementService
+ attr_reader :current_user, :user_achievement, :params
+
+ def initialize(current_user, user_achievement, params)
+ @current_user = current_user
+ @user_achievement = user_achievement
+ @params = params
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ if user_achievement.update(params)
+ ServiceResponse.success(payload: user_achievement)
+ else
+ error_updating
+ end
+ end
+
+ private
+
+ def allowed?
+ current_user&.can?(:update_user_achievement, user_achievement)
+ end
+
+ def error_no_permissions
+ error('You have insufficient permission to update this user achievement')
+ end
+
+ def error(message)
+ ServiceResponse.error(payload: user_achievement, message: Array(message))
+ end
+
+ def error_updating
+ error(user_achievement&.errors&.full_messages || 'Failed to update user achievement')
+ end
+ end
+end
+# rubocop:enable Gitlab/BoundedContexts
diff --git a/app/services/analytics/cycle_analytics/stages/list_service.rb b/app/services/analytics/cycle_analytics/stages/list_service.rb
index 1cd7d3f5c6d..3d8e99ab7f7 100644
--- a/app/services/analytics/cycle_analytics/stages/list_service.rb
+++ b/app/services/analytics/cycle_analytics/stages/list_service.rb
@@ -7,7 +7,10 @@ module Analytics
def execute
return forbidden unless allowed?
- success(build_default_stages)
+ stages = build_default_stages
+ # In FOSS, stages are not persisted, we match them by name
+ stages = stages.select { |stage| params[:stage_ids].include?(stage.name) } if filter_by_stage_ids?
+ success(stages)
end
private
@@ -19,6 +22,10 @@ module Analytics
def success(stages)
ServiceResponse.success(payload: { stages: stages })
end
+
+ def filter_by_stage_ids?
+ params[:stage_ids].present?
+ end
end
end
end
diff --git a/app/services/analytics/cycle_analytics/value_streams/list_service.rb b/app/services/analytics/cycle_analytics/value_streams/list_service.rb
new file mode 100644
index 00000000000..17b7728bd34
--- /dev/null
+++ b/app/services/analytics/cycle_analytics/value_streams/list_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Analytics
+ module CycleAnalytics
+ module ValueStreams
+ class ListService
+ include Gitlab::Allowable
+
+ def initialize(parent:, current_user:, params: {})
+ @parent = parent
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ return forbidden unless can?(current_user, :read_cycle_analytics, parent.project)
+
+ value_stream = ::Analytics::CycleAnalytics::ValueStream
+ .build_default_value_stream(parent)
+
+ success([value_stream])
+ end
+
+ private
+
+ attr_reader :parent, :current_user, :params
+
+ def success(value_streams)
+ ServiceResponse.success(payload: { value_streams: value_streams })
+ end
+
+ def forbidden
+ ServiceResponse.error(message: 'Forbidden', payload: {})
+ end
+ end
+ end
+ end
+end
+
+Analytics::CycleAnalytics::ValueStreams::ListService.prepend_mod
diff --git a/app/services/application_settings/update_service.rb b/app/services/application_settings/update_service.rb
index a46ecc3eee6..cfb1e69cf50 100644
--- a/app/services/application_settings/update_service.rb
+++ b/app/services/application_settings/update_service.rb
@@ -25,8 +25,13 @@ module ApplicationSettings
return false
end
+ if params[:enabled_git_access_protocol] == 'all'
+ params[:enabled_git_access_protocol] = ''
+ end
+
update_terms(@params.delete(:terms))
- update_default_branch_protection_defaults(@params[:default_branch_protection])
+ update_default_branch_protection_defaults(@params[:default_branch_protection_defaults])
+ update_legacy_default_branch_protection_defaults(@params[:default_branch_protection])
add_to_outbound_local_requests_whitelist(@params.delete(:add_to_outbound_local_requests_whitelist))
@@ -78,7 +83,7 @@ module ApplicationSettings
@application_setting.reset_memoized_terms
end
- def update_default_branch_protection_defaults(default_branch_protection)
+ def update_legacy_default_branch_protection_defaults(default_branch_protection)
return unless default_branch_protection.present?
# We are migrating default_branch_protection from an integer
@@ -91,6 +96,12 @@ module ApplicationSettings
@application_setting.default_branch_protection_defaults = protection.to_hash
end
+ def update_default_branch_protection_defaults(default_branch_protection_defaults)
+ return unless default_branch_protection_defaults.present?
+
+ @application_setting.default_branch_protection_defaults.merge!(default_branch_protection_defaults)
+ end
+
def process_performance_bar_allowed_group_id
group_full_path = params.delete(:performance_bar_allowed_group_path)
enable_param_on = Gitlab::Utils.to_boolean(params.delete(:performance_bar_enabled))
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index 26244a8bcc5..9918df894e1 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -2,6 +2,7 @@
class AuditEventService
include AuditEventSaveType
+ include ::Gitlab::Audit::Logging
# Instantiates a new service
#
@@ -123,6 +124,7 @@ class AuditEventService
event = build_event
save_or_track event
+ log_to_new_tables([event], event.class.to_s) if should_save_database?(@save_type)
event
end
diff --git a/app/services/audit_events/build_service.rb b/app/services/audit_events/build_service.rb
index 9eab2f836db..ae956ff5c17 100644
--- a/app/services/audit_events/build_service.rb
+++ b/app/services/audit_events/build_service.rb
@@ -51,13 +51,13 @@ module AuditEvents
def base_details_payload
@additional_details.merge({
- author_name: @author.name,
- author_class: @author.class.name,
- target_id: @target.id,
- target_type: @target.type,
- target_details: @target_details || @target.details,
- custom_message: @message
- })
+ author_name: @author.name,
+ author_class: @author.class.name,
+ target_id: @target.id,
+ target_type: @target.type,
+ target_details: @target_details || @target.details,
+ custom_message: @message
+ })
end
def build_author(author)
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index f515fdede29..ce9e6c34165 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -14,10 +14,6 @@ module Auth
:build_destroy_container_image
].freeze
- FORBIDDEN_IMPORTING_SCOPES = %w[push delete *].freeze
-
- ActiveImportError = Class.new(StandardError)
-
def execute(authentication_abilities:)
@authentication_abilities = authentication_abilities
@@ -29,13 +25,11 @@ module Auth
return error('DENIED', status: 403, message: 'access forbidden')
end
+ if repository_path_push_protected?
+ return error('DENIED', status: 403, message: 'Pushing to protected repository path forbidden')
+ end
+
{ token: authorized_token(*scopes).encoded }
- rescue ActiveImportError
- error(
- 'DENIED',
- status: 403,
- message: 'Your repository is currently being migrated to a new platform and writes are temporarily disabled. Go to https://gitlab.com/groups/gitlab-org/-/epics/5523 to learn more.'
- )
end
def self.full_access_token(*names)
@@ -43,10 +37,6 @@ module Auth
access_token(names_and_actions)
end
- def self.import_access_token
- access_token({ 'import' => %w[*] }, 'registry')
- end
-
def self.pull_access_token(*names)
names_and_actions = names.index_with { %w[pull] }
access_token(names_and_actions)
@@ -116,8 +106,11 @@ module Auth
end
end
- # Return the project path (lowercase) as metadata
- { project_path: project&.full_path&.downcase }
+ {
+ project_path: project&.full_path&.downcase,
+ project_id: project&.id,
+ root_namespace_id: project&.root_ancestor&.id
+ }
end
private
@@ -187,8 +180,6 @@ module Auth
def process_repository_access(type, path, actions)
return unless path.valid?
- raise ActiveImportError if actively_importing?(actions, path)
-
requested_project = path.repository_project
return unless requested_project
@@ -213,15 +204,6 @@ module Auth
}
end
- def actively_importing?(actions, path)
- return false if FORBIDDEN_IMPORTING_SCOPES.intersection(actions).empty?
-
- container_repository = ContainerRepository.find_by_path(path)
- return false unless container_repository
-
- container_repository.migration_importing?
- end
-
##
# Because we do not have two way communication with registry yet,
# we create a container repository image resource when push to the
@@ -327,12 +309,34 @@ module Auth
end
end
+ def repository_path_push_protected?
+ return false if Feature.disabled?(:container_registry_protected_containers, project)
+
+ push_scopes = scopes.select { |scope| scope[:actions].include?('push') || scope[:actions].include?('*') }
+
+ push_scopes.any? do |push_scope|
+ push_scope_container_registry_path = ContainerRegistry::Path.new(push_scope[:name])
+
+ next unless push_scope_container_registry_path.valid?
+
+ repository_project = push_scope_container_registry_path.repository_project
+ current_user_project_authorization_access_level = current_user&.max_member_access_for_project(repository_project.id)
+
+ repository_project.container_registry_protection_rules.for_push_exists?(
+ access_level: current_user_project_authorization_access_level,
+ repository_path: push_scope_container_registry_path.to_s
+ )
+ end
+ end
+
# Overridden in EE
def extra_info
{}
end
def deploy_token
+ return unless Gitlab::ExternalAuthorization.allow_deploy_tokens_and_deploy_keys?
+
params[:deploy_token]
end
diff --git a/app/services/auth/dependency_proxy_authentication_service.rb b/app/services/auth/dependency_proxy_authentication_service.rb
index 29f5a50d809..3bc7df1a6c9 100644
--- a/app/services/auth/dependency_proxy_authentication_service.rb
+++ b/app/services/auth/dependency_proxy_authentication_service.rb
@@ -54,7 +54,7 @@ module Auth
end
def group_access_token
- PersonalAccessTokensFinder.new(state: 'active').find_by_token(raw_token)
+ PersonalAccessTokensFinder.new(state: 'active').find_by_token(raw_token.to_s)
end
def valid_deploy_token?
@@ -65,16 +65,28 @@ module Auth
JSONWebToken::HMACToken.new(self.class.secret).tap do |token|
token['user_id'] = current_user.id if current_user
token['deploy_token'] = deploy_token.token if deploy_token
+ token['personal_access_token'] = raw_token if personal_access_token_user?
+ token['group_access_token'] = raw_token if group_access_token_user?
token.expire_time = self.class.token_expire_at
end
end
def deploy_token
+ return unless Gitlab::ExternalAuthorization.allow_deploy_tokens_and_deploy_keys?
+
params[:deploy_token]
end
def raw_token
params[:raw_token]
end
+
+ def group_access_token_user?
+ raw_token && current_user&.project_bot? && current_user.resource_bot_resource.is_a?(Group)
+ end
+
+ def personal_access_token_user?
+ raw_token && current_user && (current_user.human? || current_user.service_account?)
+ end
end
end
diff --git a/app/services/auto_merge/base_service.rb b/app/services/auto_merge/base_service.rb
index 467a4ed2621..176f898019e 100644
--- a/app/services/auto_merge/base_service.rb
+++ b/app/services/auto_merge/base_service.rb
@@ -59,19 +59,18 @@ module AutoMerge
def available_for?(merge_request)
strong_memoize("available_for_#{merge_request.id}") do
merge_request.can_be_merged_by?(current_user) &&
- merge_request.open? &&
- !merge_request.broken? &&
- overrideable_available_for_checks(merge_request) &&
+ merge_request.mergeability_checks_pass?(**skippable_available_for_checks(merge_request)) &&
yield
end
end
private
- def overrideable_available_for_checks(merge_request)
- !merge_request.draft? &&
- merge_request.mergeable_discussions_state? &&
- !merge_request.merge_blocked_by_other_mrs?
+ def skippable_available_for_checks(merge_request)
+ merge_request.skipped_mergeable_checks(
+ auto_merge_requested: true,
+ auto_merge_strategy: strategy
+ )
end
# Overridden in child classes
@@ -113,5 +112,9 @@ module AutoMerge
def track_exception(error, merge_request)
Gitlab::ErrorTracking.track_exception(error, merge_request_id: merge_request&.id)
end
+
+ def logger
+ @logger ||= Gitlab::AppLogger
+ end
end
end
diff --git a/app/services/auto_merge/merge_when_checks_pass_service.rb b/app/services/auto_merge/merge_when_checks_pass_service.rb
new file mode 100644
index 00000000000..ab1b1d51409
--- /dev/null
+++ b/app/services/auto_merge/merge_when_checks_pass_service.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+# rubocop:disable Gitlab/BoundedContexts -- Existing module
+module AutoMerge
+ class MergeWhenChecksPassService < AutoMerge::BaseService
+ extend Gitlab::Utils::Override
+
+ override :execute
+ def execute(merge_request)
+ super do
+ add_system_note(merge_request)
+ end
+ end
+
+ override :process
+ def process(merge_request)
+ logger.info("Processing Automerge - MWCP")
+
+ return if merge_request.has_ci_enabled? && !merge_request.diff_head_pipeline_success?
+
+ logger.info("Pipeline Success - MWCP")
+
+ return unless merge_request.mergeable?
+
+ logger.info("Merge request mergeable - MWCP")
+
+ merge_request.merge_async(merge_request.merge_user_id, merge_request.merge_params)
+ end
+
+ override :cancel
+ def cancel(merge_request)
+ super do
+ SystemNoteService.cancel_auto_merge(merge_request, project, current_user)
+ end
+ end
+
+ override :abort
+ def abort(merge_request, reason)
+ super do
+ SystemNoteService.abort_auto_merge(merge_request, project, current_user, reason)
+ end
+ end
+
+ override :available_for
+ def available_for?(merge_request)
+ super do
+ next false if Feature.disabled?(:merge_when_checks_pass, merge_request.project)
+ next false if merge_request.project.merge_trains_enabled?
+ next false if merge_request.mergeable? && !merge_request.diff_head_pipeline_considered_in_progress?
+
+ next true
+ end
+ end
+
+ private
+
+ def add_system_note(merge_request)
+ return unless merge_request.saved_change_to_auto_merge_enabled?
+
+ SystemNoteService.merge_when_checks_pass(
+ merge_request,
+ project,
+ current_user,
+ merge_request.merge_params.symbolize_keys[:sha]
+ )
+ end
+
+ def notify(merge_request)
+ return unless merge_request.saved_change_to_auto_merge_enabled?
+
+ notification_service.async.merge_when_pipeline_succeeds(merge_request,
+ current_user)
+ end
+ end
+end
+# rubocop:enable Gitlab/BoundedContexts
diff --git a/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb b/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
index cb8e531f0e1..143dfa3f8b8 100644
--- a/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
+++ b/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
@@ -9,13 +9,13 @@ module AutoMerge
end
def process(merge_request)
- logger.info("Processing Automerge")
- return unless merge_request.actual_head_pipeline_success?
+ logger.info("Processing Automerge - MWPS")
+ return unless merge_request.diff_head_pipeline_success?
- logger.info("Pipeline Success")
+ logger.info("Pipeline Success - MWPS")
return unless merge_request.mergeable?
- logger.info("Merge request mergeable")
+ logger.info("Merge request mergeable - MWPS")
merge_request.merge_async(merge_request.merge_user_id, merge_request.merge_params)
end
@@ -34,26 +34,20 @@ module AutoMerge
def available_for?(merge_request)
super do
- check_availability(merge_request)
+ next false if Feature.enabled?(:merge_when_checks_pass, merge_request.project)
+
+ merge_request.diff_head_pipeline_considered_in_progress?
end
end
private
def add_system_note(merge_request)
- SystemNoteService.merge_when_pipeline_succeeds(merge_request, project, current_user, merge_request.actual_head_pipeline.sha) if merge_request.saved_change_to_auto_merge_enabled?
- end
-
- def check_availability(merge_request)
- merge_request.actual_head_pipeline&.active?
+ SystemNoteService.merge_when_pipeline_succeeds(merge_request, project, current_user, merge_request.diff_head_pipeline.sha) if merge_request.saved_change_to_auto_merge_enabled?
end
def notify(merge_request)
notification_service.async.merge_when_pipeline_succeeds(merge_request, current_user) if merge_request.saved_change_to_auto_merge_enabled?
end
-
- def logger
- @logger ||= Gitlab::AppLogger
- end
end
end
diff --git a/app/services/auto_merge_service.rb b/app/services/auto_merge_service.rb
index 912248d3a06..0bf0346ed0b 100644
--- a/app/services/auto_merge_service.rb
+++ b/app/services/auto_merge_service.rb
@@ -4,7 +4,10 @@ class AutoMergeService < BaseService
include Gitlab::Utils::StrongMemoize
STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS = 'merge_when_pipeline_succeeds'
- STRATEGIES = [STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS].freeze
+ STRATEGY_MERGE_WHEN_CHECKS_PASS = 'merge_when_checks_pass'
+ # Only used in EE
+ STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_CHECKS_PASS = 'add_to_merge_train_when_checks_pass'
+ STRATEGIES = [STRATEGY_MERGE_WHEN_CHECKS_PASS, STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS].freeze
class << self
def all_strategies_ordered_by_preference
diff --git a/app/services/award_emojis/add_service.rb b/app/services/award_emojis/add_service.rb
index b5aa8f920ff..4281df35f0d 100644
--- a/app/services/award_emojis/add_service.rb
+++ b/app/services/award_emojis/add_service.rb
@@ -34,6 +34,9 @@ module AwardEmojis
def todoable
strong_memoize(:todoable) do
case awardable
+ when DiscussionNote
+ # Only update todos associated with the discussion if note is part of a thread
+ awardable.to_discussion
when Note
# We don't create todos for personal snippet comments for now
awardable.noteable unless awardable.for_personal_snippet?
diff --git a/app/services/base_container_service.rb b/app/services/base_container_service.rb
index f46e8d5ec42..673c1fdf01f 100644
--- a/app/services/base_container_service.rb
+++ b/app/services/base_container_service.rb
@@ -40,6 +40,10 @@ class BaseContainerService
end
strong_memoize_attr :project_group
+ def root_ancestor
+ project_group&.root_ancestor || group&.root_ancestor
+ end
+
private
def handle_container_type(container)
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
index 7d202da96ce..c9da889c536 100644
--- a/app/services/boards/base_item_move_service.rb
+++ b/app/services/boards/base_item_move_service.rb
@@ -84,11 +84,20 @@ module Boards
end
def remove_label_ids
- label_ids = moving_to_list.movable? ? moving_from_list.label_id : []
+ label_ids =
+ if moving_to_list.movable?
+ moving_from_list.label_id
+ else
+ board_label_ids
+ end
Array(label_ids).compact
end
+ def board_label_ids
+ ::Label.ids_on_board(board.id)
+ end
+
def move_params_from_list_position(position)
if position == LIST_END_POSITION
{ move_before_id: moving_to_list_items_relation.reverse_order.pick(:id), move_after_id: nil }
diff --git a/app/services/boards/base_items_list_service.rb b/app/services/boards/base_items_list_service.rb
index 7c8846d2fe8..0b8e4b95c76 100644
--- a/app/services/boards/base_items_list_service.rb
+++ b/app/services/boards/base_items_list_service.rb
@@ -132,23 +132,33 @@ module Boards
def without_board_labels(items)
return items unless board_label_ids.any?
- items.where.not('EXISTS (?)', label_links(board_label_ids).limit(1))
+ items.where(label_links(items, board_label_ids.compact).arel.exists.not)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def label_links(label_ids)
- LabelLink
- .where(label_links: { target_type: item_model })
- .where(item_model.arel_table[:id].eq(LabelLink.arel_table[:target_id]).to_sql)
- .where(label_id: label_ids)
+ def label_links(items, label_ids)
+ labels_filter.label_link_query(items, label_ids: label_ids)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def with_list_label(items)
- items.where('EXISTS (?)', label_links(list.label_id).limit(1))
+ items.where(label_links(items, [list.label_id]).arel.exists)
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ def labels_filter
+ Issuables::LabelFilter.new(params: {}, project: project, group: group)
+ end
+ strong_memoize_attr :labels_filter
+
+ def group
+ parent if parent.is_a?(Group)
+ end
+
+ def project
+ parent if parent.is_a?(Project)
+ end
end
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index fcaa74555ca..b6d55f831c0 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -50,7 +50,9 @@ module Boards
end
def set_issue_types
- params[:issue_types] ||= Issue::TYPES_FOR_BOARD_LIST
+ types = Issue::TYPES_FOR_BOARD_LIST.dup
+ types << 'task' if ::Feature.enabled?(:work_items_beta, parent)
+ params[:issue_types] ||= types
end
def item_model
diff --git a/app/services/branch_rules/base_service.rb b/app/services/branch_rules/base_service.rb
new file mode 100644
index 00000000000..12043d3a3a3
--- /dev/null
+++ b/app/services/branch_rules/base_service.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module BranchRules
+ class BaseService
+ include Gitlab::Allowable
+
+ MISSING_METHOD_ERROR = Class.new(StandardError)
+
+ attr_reader :branch_rule, :current_user, :params
+
+ delegate :project, to: :branch_rule, allow_nil: true
+
+ def initialize(branch_rule, user = nil, params = {})
+ @branch_rule = branch_rule
+ @current_user = user
+ @params = ActionController::Parameters.new(**params).permit(*permitted_params).to_h
+ end
+
+ def execute(skip_authorization: false)
+ raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?
+
+ return execute_on_branch_rule if branch_rule.instance_of?(Projects::BranchRule)
+
+ ServiceResponse.error(message: 'Unknown branch rule type.')
+ end
+
+ private
+
+ def execute_on_branch_rule
+ missing_method_error('execute_on_branch_rule')
+ end
+
+ def authorized?
+ missing_method_error('authorized?')
+ end
+
+ def permitted_params
+ []
+ end
+
+ def missing_method_error(method_name)
+ raise MISSING_METHOD_ERROR, "Please define an `#{method_name}` method in #{self.class.name}"
+ end
+ end
+end
+
+BranchRules::BaseService.prepend_mod
diff --git a/app/services/branch_rules/destroy_service.rb b/app/services/branch_rules/destroy_service.rb
new file mode 100644
index 00000000000..8e639e49c92
--- /dev/null
+++ b/app/services/branch_rules/destroy_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module BranchRules
+ class DestroyService < BaseService
+ private
+
+ def authorized?
+ can?(current_user, :destroy_protected_branch, branch_rule)
+ end
+
+ def execute_on_branch_rule
+ service = ProtectedBranches::DestroyService.new(project, current_user)
+
+ return ServiceResponse.success if service.execute(branch_rule.protected_branch)
+
+ ServiceResponse.error(message: 'Failed to delete branch rule.')
+ end
+ end
+end
+
+BranchRules::DestroyService.prepend_mod
diff --git a/app/services/branch_rules/update_service.rb b/app/services/branch_rules/update_service.rb
new file mode 100644
index 00000000000..4a2857eb3d5
--- /dev/null
+++ b/app/services/branch_rules/update_service.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+module BranchRules
+ class UpdateService < BaseService
+ private
+
+ def authorized?
+ can?(current_user, :update_branch_rule, branch_rule)
+ end
+
+ def execute_on_branch_rule
+ protected_branch = ProtectedBranches::UpdateService
+ .new(project, current_user, update_params)
+ .execute(branch_rule.protected_branch, skip_authorization: true)
+
+ return ServiceResponse.success unless protected_branch.errors.any?
+
+ ServiceResponse.error(message: protected_branch.errors.full_messages)
+ end
+
+ def update_params
+ transformed_params = params.dup
+
+ extract_branch_protection_params!(transformed_params)
+ extract_push_access_levels_params!(transformed_params)
+ extract_merge_access_levels_params!(transformed_params)
+
+ transformed_params
+ end
+
+ def extract_branch_protection_params!(transformed_params)
+ branch_protection_params = transformed_params.delete(:branch_protection)
+ return unless branch_protection_params
+
+ transformed_params.merge!(branch_protection_params)
+ end
+
+ def extract_push_access_levels_params!(transformed_params)
+ push_levels_params = transformed_params.delete(:push_access_levels)
+ return unless push_levels_params
+
+ push_levels = branch_rule.branch_protection.push_access_levels
+ transformed_params[:push_access_levels_attributes] = access_levels_attributes(push_levels, push_levels_params)
+ end
+
+ def extract_merge_access_levels_params!(transformed_params)
+ merge_levels_params = transformed_params.delete(:merge_access_levels)
+ return unless merge_levels_params
+
+ merge_levels = branch_rule.branch_protection.merge_access_levels
+ transformed_params[:merge_access_levels_attributes] = access_levels_attributes(merge_levels, merge_levels_params)
+ end
+
+ # In ProtectedBranch we are using:
+ #
+ # `accepts_nested_attributes_for :{type}_access_levels, allow_destroy: true`
+ #
+ # This branch rule update service acts like we have defined this
+ # `accepts_nested_attributes_for` with `update: true`.
+ #
+ # Unfortunately we are unable to modify the `accepts_nested_attributes_for`
+ # config as we use this logic in other locations. As we are reusing the
+ # ProtectedBranches::UpdateService we also can't custom write the logic to
+ # persist the access levels manually.
+ #
+ # For now the best solution appears to be matching the params against the
+ # existing levels to check which access levels still exist and marking
+ # unmatched access levels for destruction.
+ #
+ # Given the following:
+ # access_levels = [{ id: 1, access_level: 30 }, { id: 2, user_id: 1 }, { id: 3, group_id: 1 }]
+ # access_levels_params = [{ access_level: 30 }, { user_id: 1 }, { deploy_key_id: 1 }]
+ #
+ # The output should be:
+ # [{ id: 3, _destroy: true }, { deploy_key_id: 1 }]
+ #
+ # NOTE: :user_id and :group_id are only available in EE.
+ #
+ def access_levels_attributes(access_levels, access_levels_params)
+ attributes = access_levels.filter_map do |access_level|
+ next if remove_matched_access_level_params!(access_levels_params, access_level)
+
+ # access levels that do not have matching params are marked for deletion
+ { id: access_level.id, _destroy: true }
+ end
+
+ # concat the remaining access_levels_params that don't match any existing
+ # access_levels
+ attributes.concat(access_levels_params)
+ end
+
+ def remove_matched_access_level_params!(access_levels_params, access_level)
+ # <AccessLevel(1) access_level: 0> matches params { access_level: 0 }
+ # <AccessLevel(2) deploy_key_id: 1> matched params { deploy_key_id: 1 }
+ # NOTE: In EE we also match against :user_id and :group_id
+ #
+ # If an access_level exists for a passed param we don't need to update it
+ # so we can safely reject the params.
+ access_levels_params.reject! do |params|
+ if access_level.role?
+ params[:access_level] == access_level.access_level
+ else
+ foreign_key = :"#{access_level.type}_id"
+ params[foreign_key] == access_level.public_send(foreign_key) # rubocop:disable GitlabSecurity/PublicSend -- "#{access_level.type}_id" is used to fetch the correct foreign_key attribute.
+ end
+ end
+ end
+
+ def permitted_params
+ [
+ :name,
+ {
+ branch_protection: [
+ :allow_force_push,
+ {
+ push_access_levels: %i[access_level deploy_key_id],
+ merge_access_levels: %i[access_level]
+ }
+ ]
+ }
+ ]
+ end
+ end
+end
+
+BranchRules::UpdateService.prepend_mod
diff --git a/app/services/branches/create_service.rb b/app/services/branches/create_service.rb
index 5cbd587e546..2fc8a7617f3 100644
--- a/app/services/branches/create_service.rb
+++ b/app/services/branches/create_service.rb
@@ -9,6 +9,9 @@ module Branches
end
def execute(branch_name, ref, create_default_branch_if_empty: true)
+ result = validate_ref(ref)
+ return result if result[:status] == :error
+
create_default_branch if create_default_branch_if_empty && project.empty_repo?
result = branch_validation_service.execute(branch_name)
@@ -87,6 +90,12 @@ module Branches
error(e.message)
end
+ def validate_ref(ref)
+ return error('Ref is missing') if ref.blank?
+
+ success
+ end
+
def create_default_branch
project.repository.create_file(
current_user,
diff --git a/app/services/branches/delete_service.rb b/app/services/branches/delete_service.rb
index e396d784ca6..bfa133a7821 100644
--- a/app/services/branches/delete_service.rb
+++ b/app/services/branches/delete_service.rb
@@ -18,7 +18,9 @@ module Branches
http_status: 404)
end
- if repository.rm_branch(current_user, branch_name)
+ target_sha = branch.dereferenced_target.id
+
+ if repository.rm_branch(current_user, branch_name, target_sha: target_sha)
unlock_artifacts(branch_name)
ServiceResponse.success(message: 'Branch was deleted')
else
diff --git a/app/services/bulk_create_integration_service.rb b/app/services/bulk_create_integration_service.rb
deleted file mode 100644
index 70c77444f13..00000000000
--- a/app/services/bulk_create_integration_service.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-class BulkCreateIntegrationService
- include Integrations::BulkOperationHashes
-
- def initialize(integration, batch, association)
- @integration = integration
- @batch = batch
- @association = association
- end
-
- def execute
- integration_list = Integrations::IntegrationList.new(batch, integration_hash(:create), association).to_array
-
- Integration.transaction do
- results = bulk_insert(*integration_list)
-
- if integration.data_fields_present?
- data_list = DataList.new(results, data_fields_hash(:create), integration.data_fields.class).to_array
-
- bulk_insert(*data_list)
- end
- end
- end
-
- private
-
- attr_reader :integration, :batch, :association
-
- def bulk_insert(klass, columns, values_array)
- items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
-
- klass.insert_all(items_to_insert, returning: [:id])
- end
-end
diff --git a/app/services/bulk_imports/batched_relation_export_service.rb b/app/services/bulk_imports/batched_relation_export_service.rb
index e239a6daa4c..16cff6df2b9 100644
--- a/app/services/bulk_imports/batched_relation_export_service.rb
+++ b/app/services/bulk_imports/batched_relation_export_service.rb
@@ -35,7 +35,9 @@ module BulkImports
attr_reader :user, :portable, :relation, :jid, :config, :resolved_relation
def export
- @export ||= portable.bulk_import_exports.find_or_create_by!(relation: relation) # rubocop:disable CodeReuse/ActiveRecord
+ # rubocop:disable Performance/ActiveRecordSubtransactionMethods -- This is only executed from within a worker
+ @export ||= portable.bulk_import_exports.safe_find_or_create_by!(relation: relation, user: user)
+ # rubocop:enable Performance/ActiveRecordSubtransactionMethods
end
def objects_count
diff --git a/app/services/bulk_imports/create_service.rb b/app/services/bulk_imports/create_service.rb
index d58620eb089..c5da7840c71 100644
--- a/app/services/bulk_imports/create_service.rb
+++ b/app/services/bulk_imports/create_service.rb
@@ -53,6 +53,11 @@ module BulkImports
extra: { source_equals_destination: source_equals_destination? }
)
+ if Feature.enabled?(:importer_user_mapping, current_user) &&
+ Feature.enabled?(:bulk_import_importer_user_mapping, current_user)
+ ::Import::BulkImports::EphemeralData.new(bulk_import.id).enable_importer_user_mapping
+ end
+
BulkImportWorker.perform_async(bulk_import.id)
ServiceResponse.success(payload: bulk_import)
@@ -66,10 +71,13 @@ module BulkImports
private
+ attr_accessor :source_entity_identifier
+
def validate!
client.validate_instance_version!
- validate_setting_enabled!
client.validate_import_scopes!
+ validate_source_full_path!
+ validate_setting_enabled!
end
def create_bulk_import
@@ -102,22 +110,35 @@ module BulkImports
end
end
- def validate_setting_enabled!
- source_full_path, source_type = Array.wrap(params)[0].values_at(:source_full_path, :source_type)
- entity_type = ENTITY_TYPES_MAPPING.fetch(source_type)
- if /^[0-9]+$/.match?(source_full_path)
- query = query_type(entity_type)
- response = graphql_client.execute(
- graphql_client.parse(query.to_s),
- { full_path: source_full_path }
- ).original_hash
-
- source_entity_identifier = ::GlobalID.parse(response.dig(*query.data_path, 'id')).model_id
- else
- source_entity_identifier = ERB::Util.url_encode(source_full_path)
- end
+ def validate_source_full_path!
+ gql_query = query_type(entity_type)
+
+ response = graphql_client.execute(
+ graphql_client.parse(gql_query.to_s),
+ { full_path: source_full_path }
+ ).original_hash
+ self.source_entity_identifier = ::GlobalID.parse(response.dig(*gql_query.data_path, 'id'))&.model_id
+
+ raise BulkImports::Error.source_full_path_validation_failure(source_full_path) if source_entity_identifier.nil?
+ end
+
+ def validate_setting_enabled!
client.get("/#{entity_type}/#{source_entity_identifier}/export_relations/status")
+ rescue BulkImports::NetworkError => e
+ raise BulkImports::Error.not_authorized(source_full_path) if e.message.include?("URL is blocked")
+ raise BulkImports::Error.setting_not_enabled if e.response.code == 404
+ raise BulkImports::Error.not_authorized(source_full_path) if e.response.code == 403
+
+ raise e
+ end
+
+ def entity_type
+ @entity_type ||= ENTITY_TYPES_MAPPING.fetch(Array.wrap(params)[0][:source_type])
+ end
+
+ def source_full_path
+ @source_full_path ||= Array.wrap(params)[0][:source_full_path]
end
def track_access_level(entity_params)
diff --git a/app/services/bulk_imports/file_decompression_service.rb b/app/services/bulk_imports/file_decompression_service.rb
index 77638f10f54..ddbcef0686d 100644
--- a/app/services/bulk_imports/file_decompression_service.rb
+++ b/app/services/bulk_imports/file_decompression_service.rb
@@ -21,7 +21,7 @@ module BulkImports
def execute
validate_tmpdir
validate_filepath
- validate_decompressed_file_size if Feature.enabled?(:validate_import_decompressed_archive_size)
+ validate_decompressed_file_size
validate_symlink(filepath)
decompress_file
diff --git a/app/services/bulk_imports/file_download_service.rb b/app/services/bulk_imports/file_download_service.rb
index 39c27c04b8c..56e81733225 100644
--- a/app/services/bulk_imports/file_download_service.rb
+++ b/app/services/bulk_imports/file_download_service.rb
@@ -49,7 +49,7 @@ module BulkImports
private
attr_reader :configuration, :relative_url, :tmpdir, :file_size_limit, :allowed_content_types,
- :response_headers, :last_chunk_context, :response_code
+ :response_headers, :response_code
def download_file
File.open(filepath, 'wb') do |file|
@@ -67,7 +67,7 @@ module BulkImports
@response_code = chunk.code
@response_headers ||= Gitlab::HTTP::Response::Headers.new(chunk.http_response.to_hash)
- @last_chunk_context = chunk.to_s.truncate(LAST_CHUNK_CONTEXT_CHAR_LIMIT)
+ @last_chunk_context = chunk
unless @remote_content_validated
validate_content_type
@@ -134,12 +134,18 @@ module BulkImports
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w[http https],
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?
- )
+ deny_all_requests_except_allowed: ::Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: ::Gitlab::CurrentSettings.outbound_local_requests_whitelist) # rubocop:disable Naming/InclusiveLanguage -- existing setting
end
def default_file_size_limit
Gitlab::CurrentSettings.current_application_settings.bulk_import_max_download_file_size.megabytes
end
+
+ # Before logging, we truncate the context to a reasonable length and scrub
+ # any non-printable characters.
+ def last_chunk_context
+ @last_chunk_context.to_s.truncate(LAST_CHUNK_CONTEXT_CHAR_LIMIT).force_encoding('utf-8').scrub
+ end
end
end
diff --git a/app/services/bulk_imports/relation_export_service.rb b/app/services/bulk_imports/relation_export_service.rb
index ea3b8adad74..337f245cf27 100644
--- a/app/services/bulk_imports/relation_export_service.rb
+++ b/app/services/bulk_imports/relation_export_service.rb
@@ -33,7 +33,7 @@ module BulkImports
delegate :export_path, to: :config
def find_or_create_export!
- export = portable.bulk_import_exports.safe_find_or_create_by!(relation: relation)
+ export = portable.bulk_import_exports.safe_find_or_create_by!(relation: relation, user: user)
return export if export.finished? && export.updated_at > EXISTING_EXPORT_TTL.ago && !export.batched?
diff --git a/app/services/bulk_imports/user_contributions_export_service.rb b/app/services/bulk_imports/user_contributions_export_service.rb
new file mode 100644
index 00000000000..a184a34e79b
--- /dev/null
+++ b/app/services/bulk_imports/user_contributions_export_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class UserContributionsExportService
+ def initialize(user_id, portable, jid)
+ @user = User.find(user_id)
+ @portable = portable
+ @jid = jid
+ end
+
+ def execute
+ # Set up query to get cached users and set it as user_contributions on the portable model
+ @portable.user_contributions = UserContributionsExportMapper.new(@portable).get_contributing_users
+ relation = BulkImports::FileTransfer::BaseConfig::USER_CONTRIBUTIONS_RELATION
+
+ RelationExportService.new(@user, @portable, relation, @jid).execute
+ end
+ end
+end
diff --git a/app/services/bulk_update_integration_service.rb b/app/services/bulk_update_integration_service.rb
deleted file mode 100644
index 57ceec57962..00000000000
--- a/app/services/bulk_update_integration_service.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-class BulkUpdateIntegrationService
- include Integrations::BulkOperationHashes
-
- def initialize(integration, batch)
- @integration = integration
- @batch = batch
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def execute
- Integration.transaction do
- Integration.where(id: batch_ids).update_all(integration_hash(:update))
-
- if integration.data_fields_present?
- integration.data_fields.class.where(data_fields_foreign_key => batch_ids)
- .update_all(
- data_fields_hash(:update)
- )
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- private
-
- attr_reader :integration, :batch
-
- # service_id or integration_id
- def data_fields_foreign_key
- integration.data_fields.class.reflections['integration'].foreign_key
- end
-
- def batch_ids
- @batch_ids ||=
- if batch.is_a?(ActiveRecord::Relation)
- batch.select(:id)
- else
- batch.map(&:id)
- end
- end
-end
diff --git a/app/services/ci/cancel_pipeline_service.rb b/app/services/ci/cancel_pipeline_service.rb
index 92eead3fdd1..68e893c8a6d 100644
--- a/app/services/ci/cancel_pipeline_service.rb
+++ b/app/services/ci/cancel_pipeline_service.rb
@@ -43,8 +43,7 @@ module Ci
end
log_pipeline_being_canceled
-
- pipeline.update_column(:auto_canceled_by_id, @auto_canceled_by_pipeline.id) if @auto_canceled_by_pipeline
+ update_auto_canceled_pipeline_attributes
if @safe_cancellation
# Only build and bridge (trigger) jobs can be interruptible.
@@ -61,7 +60,7 @@ module Ci
private
- attr_reader :pipeline, :current_user
+ attr_reader :pipeline, :current_user, :auto_canceled_by_pipeline
def log_pipeline_being_canceled
Gitlab::AppJsonLogger.info(
@@ -74,6 +73,15 @@ module Ci
)
end
+ def update_auto_canceled_pipeline_attributes
+ return unless auto_canceled_by_pipeline
+
+ pipeline.update_columns(
+ auto_canceled_by_id: auto_canceled_by_pipeline.id,
+ auto_canceled_by_partition_id: auto_canceled_by_pipeline.partition_id
+ )
+ end
+
def cascade_to_children?
@cascade_to_children
end
diff --git a/app/services/ci/catalog/resources/aggregate_last30_day_usage_service.rb b/app/services/ci/catalog/resources/aggregate_last30_day_usage_service.rb
new file mode 100644
index 00000000000..076140c1a85
--- /dev/null
+++ b/app/services/ci/catalog/resources/aggregate_last30_day_usage_service.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+module Ci
+ module Catalog
+ module Resources
+ # This service aggregates CI component usage data and updates `last_30_day_usage_count` for
+ # each catalog resource daily. It utilizes Gitlab::Ci::Components::Usages::Aggregator which
+ # implements a "continue later" mechanism to process the data in time-boxed jobs.
+ # rubocop: disable CodeReuse/ActiveRecord -- Custom queries required
+ class AggregateLast30DayUsageService
+ include Gitlab::Utils::StrongMemoize
+
+ TARGET_MODEL = Ci::Catalog::Resource
+ GROUP_BY_COLUMN = :catalog_resource_id
+ WINDOW_LENGTH = 30.days
+
+ def execute
+ return ServiceResponse.success(message: "Processing complete for #{today}") if done_processing?
+
+ aggregator = Gitlab::Ci::Components::Usages::Aggregator.new(
+ target_model: TARGET_MODEL,
+ group_by_column: GROUP_BY_COLUMN,
+ usage_start_date: today - WINDOW_LENGTH,
+ usage_end_date: today - 1.day,
+ lease_key: lease_key
+ )
+
+ result = aggregator.each_batch do |usage_counts|
+ save_usage_counts!(usage_counts)
+ end
+
+ if result
+ ServiceResponse.success(message: 'Targets processed', payload: result.to_h)
+ else
+ ServiceResponse.success(message: 'Lease taken', payload: { lease_key: lease_key })
+ end
+ end
+
+ private
+
+ # NOTE: New catalog resources added today are considered already processed
+ # because their `last_30_day_usage_count_updated_at` is defaulted to NOW().
+ def done_processing?
+ min_updated_at = TARGET_MODEL.minimum(:last_30_day_usage_count_updated_at)
+ return true unless min_updated_at
+
+ min_updated_at >= today.to_time
+ end
+
+ def save_usage_counts!(usage_counts)
+ mapping = usage_counts.transform_values { |v| { last_30_day_usage_count: v } }
+ catalog_resource_ids = usage_counts.keys.map(&:id)
+
+ TARGET_MODEL.transaction do
+ Gitlab::Database::BulkUpdate.execute(%i[last_30_day_usage_count], mapping)
+
+ # Gitlab::Database::BulkUpdate does not support column type
+ # `:timestamptz` so we must update the timestamps separately.
+ TARGET_MODEL
+ .where(id: catalog_resource_ids)
+ .update_all(last_30_day_usage_count_updated_at: Time.current)
+ end
+ end
+
+ def today
+ Date.today
+ end
+ strong_memoize_attr :today
+
+ def lease_key
+ self.class.name
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/services/ci/catalog/resources/create_service.rb b/app/services/ci/catalog/resources/create_service.rb
index 89367c70e82..aedfc37850f 100644
--- a/app/services/ci/catalog/resources/create_service.rb
+++ b/app/services/ci/catalog/resources/create_service.rb
@@ -16,7 +16,10 @@ module Ci
def execute
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :add_catalog_resource, project)
- catalog_resource = Ci::Catalog::Resource.new(project: project)
+ verified_namespace = Ci::Catalog::VerifiedNamespace.for_project(project)
+ catalog_resource = Ci::Catalog::Resource.new(
+ project: project, verification_level: verified_namespace&.verification_level || :unverified
+ )
if catalog_resource.valid?
catalog_resource.save!
diff --git a/app/services/ci/catalog/resources/release_service.rb b/app/services/ci/catalog/resources/release_service.rb
index ad77bff3ef9..022772c283a 100644
--- a/app/services/ci/catalog/resources/release_service.rb
+++ b/app/services/ci/catalog/resources/release_service.rb
@@ -4,15 +4,19 @@ module Ci
module Catalog
module Resources
class ReleaseService
- def initialize(release)
+ def initialize(release, user, components_data)
@release = release
+ @user = user
+ @components_data = components_data
@project = release.project
@errors = []
end
def execute
- validate_catalog_resource
- create_version
+ track_release_duration do
+ validate_catalog_resource
+ create_version
+ end
if errors.empty?
ServiceResponse.success
@@ -23,7 +27,21 @@ module Ci
private
- attr_reader :project, :errors, :release
+ attr_reader :project, :errors, :release, :user, :components_data
+
+ def track_release_duration
+ name = :gitlab_ci_catalog_release_duration_seconds
+ comment = 'CI Catalog Release duration'
+ buckets = [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 20.0, 50.0, 240.0]
+
+ histogram = ::Gitlab::Metrics.histogram(name, comment, {}, buckets)
+ start_time = ::Gitlab::Metrics::System.monotonic_time
+
+ yield
+
+ duration = ::Gitlab::Metrics::System.monotonic_time - start_time
+ histogram.observe({}, duration.seconds)
+ end
def validate_catalog_resource
response = Ci::Catalog::Resources::ValidateService.new(project, release.sha).execute
@@ -35,7 +53,7 @@ module Ci
def create_version
return if errors.present?
- response = Ci::Catalog::Resources::Versions::CreateService.new(release).execute
+ response = Ci::Catalog::Resources::Versions::CreateService.new(release, user, components_data).execute
return if response.success?
errors << response.message
diff --git a/app/services/ci/catalog/resources/versions/build_components_service.rb b/app/services/ci/catalog/resources/versions/build_components_service.rb
new file mode 100644
index 00000000000..ef759274f6b
--- /dev/null
+++ b/app/services/ci/catalog/resources/versions/build_components_service.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+module Ci
+ module Catalog
+ module Resources
+ module Versions
+ # This service is called from the Versions::CreateService and
+ # responsible for building components for a release version.
+ class BuildComponentsService
+ MAX_COMPONENTS = Ci::Catalog::ComponentsProject::COMPONENTS_LIMIT
+
+ def initialize(release, version, components_data)
+ @release = release
+ @version = version
+ @components_data = components_data
+ @project = release.project
+ @components_project = Ci::Catalog::ComponentsProject.new(project)
+ @errors = []
+ end
+
+ def execute
+ components = if components_data
+ build_components_from_passed_data
+ else
+ build_components_from_fetched_data
+ end
+
+ if errors.empty?
+ ServiceResponse.success(payload: components)
+ else
+ ServiceResponse.error(message: errors.flatten.first(10).join(', '))
+ end
+ end
+
+ private
+
+ attr_reader :release, :version, :project, :components_project, :components_data, :errors
+
+ def build_components_from_passed_data
+ check_number_of_components(components_data.size)
+ return if errors.present?
+
+ components_data.map do |component_data|
+ build_catalog_resource_component(component_data)
+ end
+ end
+
+ def build_components_from_fetched_data
+ component_paths = components_project.fetch_component_paths(release.sha, limit: MAX_COMPONENTS + 1)
+
+ check_number_of_components(component_paths.size)
+ return if errors.present?
+
+ build_components_from_paths(component_paths)
+ end
+
+ def build_components_from_paths(component_paths)
+ paths_with_oids = component_paths.map { |path| [release.sha, path] }
+ blobs = project.repository.blobs_at(paths_with_oids)
+
+ blobs.map do |blob|
+ metadata = extract_metadata(blob)
+ build_catalog_resource_component(metadata)
+ end
+ rescue ::Gitlab::Config::Loader::FormatError => e
+ error(e)
+ end
+
+ def extract_metadata(blob)
+ component_name = components_project.extract_component_name(blob.path)
+
+ {
+ name: component_name,
+ spec: components_project.extract_spec(blob.data)
+ }
+ end
+
+ def check_number_of_components(size)
+ return if size <= MAX_COMPONENTS
+
+ error("Release cannot contain more than #{MAX_COMPONENTS} components")
+ end
+
+ def build_catalog_resource_component(metadata)
+ return if errors.present?
+
+ component = Ci::Catalog::Resources::Component.new(
+ name: metadata[:name],
+ project: version.project,
+ spec: metadata[:spec],
+ version: version,
+ catalog_resource: version.catalog_resource,
+ created_at: Time.current
+ )
+
+ return component if component.valid?
+
+ error("Build component error: #{component.errors.full_messages.join(', ')}")
+ end
+
+ def error(message)
+ errors << message
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/ci/catalog/resources/versions/create_service.rb b/app/services/ci/catalog/resources/versions/create_service.rb
index 9547db7bcf1..45ec08cd4d5 100644
--- a/app/services/ci/catalog/resources/versions/create_service.rb
+++ b/app/services/ci/catalog/resources/versions/create_service.rb
@@ -5,18 +5,18 @@ module Ci
module Resources
module Versions
class CreateService
- def initialize(release)
- @project = release.project
+ def initialize(release, user, components_data)
@release = release
+ @user = user
+ @project = release.project
+ @components_data = components_data
@errors = []
- @version = nil
- @components_project = Ci::Catalog::ComponentsProject.new(project)
end
def execute
- build_catalog_resource_version
- fetch_and_build_components if Feature.enabled?(:ci_catalog_create_metadata, project)
- publish_catalog_resource!
+ version = build_catalog_resource_version
+ build_components(version)
+ publish(version)
if errors.empty?
ServiceResponse.success
@@ -27,76 +27,42 @@ module Ci
private
- attr_reader :project, :errors, :release, :components_project
+ attr_reader :project, :errors, :release, :user, :components_data
def build_catalog_resource_version
return error('Project is not a catalog resource') unless project.catalog_resource
- @version = Ci::Catalog::Resources::Version.new(
+ version = Ci::Catalog::Resources::Version.new(
+ published_by: user,
release: release,
catalog_resource: project.catalog_resource,
- project: project
+ project: project,
+ semver: release.tag
)
- end
-
- def fetch_and_build_components
- return if errors.present?
-
- max_components = Ci::Catalog::ComponentsProject::COMPONENTS_LIMIT
- component_paths = components_project.fetch_component_paths(release.sha, limit: max_components + 1)
-
- if component_paths.size > max_components
- return error("Release cannot contain more than #{max_components} components")
- end
-
- build_components(component_paths)
- end
-
- def build_components(component_paths)
- paths_with_oids = component_paths.map { |path| [release.sha, path] }
- blobs = project.repository.blobs_at(paths_with_oids)
-
- blobs.each do |blob|
- metadata = extract_metadata(blob)
- build_catalog_resource_component(metadata)
- end
- rescue ::Gitlab::Config::Loader::FormatError => e
- error(e)
- end
- def extract_metadata(blob)
- component_name = components_project.extract_component_name(blob.path)
+ error(version.errors.full_messages) unless version.valid?
- {
- name: component_name,
- inputs: components_project.extract_inputs(blob.data),
- path: "#{Settings.gitlab.host}/#{project.full_path}/#{component_name}@#{release.tag}"
- }
+ version
end
- def build_catalog_resource_component(metadata)
+ def build_components(version)
return if errors.present?
- component = @version.components.build(
- name: metadata[:name],
- project: @version.project,
- inputs: metadata[:inputs],
- catalog_resource: @version.catalog_resource,
- path: metadata[:path],
- created_at: Time.current
- )
-
- return if component.valid?
+ response = BuildComponentsService.new(release, version, components_data).execute
- error("Build component error: #{component.errors.full_messages.join(', ')}")
+ if response.success?
+ version.components = response.payload
+ else
+ error(response.message)
+ end
end
- def publish_catalog_resource!
+ def publish(version)
return if errors.present?
::Ci::Catalog::Resources::Version.transaction do
BulkInsertableAssociations.with_bulk_insert do
- @version.save!
+ version.save!
end
project.catalog_resource.publish!
diff --git a/app/services/ci/change_variable_service.rb b/app/services/ci/change_variable_service.rb
index 83cd6aae14b..ea9fda14cac 100644
--- a/app/services/ci/change_variable_service.rb
+++ b/app/services/ci/change_variable_service.rb
@@ -5,10 +5,10 @@ module Ci
def execute
case params[:action]
when :create
- container.variables.create(params[:variable_params])
+ container.variables.create(create_variable_params)
when :update
variable.tap do |target_variable|
- target_variable.update(params[:variable_params].except(:key))
+ target_variable.update(update_variable_params.except(:key))
end
when :destroy
variable.tap do |target_variable|
@@ -23,6 +23,21 @@ module Ci
params[:variable] || find_variable
end
+ def create_variable_params
+ params[:variable_params].tap do |variables|
+ if variables[:masked_and_hidden]
+ variables[:hidden] = true
+ variables[:masked] = true
+ end
+
+ variables.delete(:masked_and_hidden)
+ end
+ end
+
+ def update_variable_params
+ params[:variable_params]
+ end
+
def find_variable
identifier = params[:variable_params].slice(:id).presence || params[:variable_params].slice(:key)
container.variables.find_by!(identifier) # rubocop:disable CodeReuse/ActiveRecord
diff --git a/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb b/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb
new file mode 100644
index 00000000000..bb67d95d419
--- /dev/null
+++ b/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb
@@ -0,0 +1,192 @@
+# frozen_string_literal: true
+
+module Ci
+ module ClickHouse
+ module DataIngestion
+ class FinishedPipelinesSyncService
+ include Gitlab::ExclusiveLeaseHelpers
+ include Gitlab::Utils::StrongMemoize
+
+ # the job is scheduled every 3 minutes and we will allow maximum 6 minutes runtime
+ # we must allow a minimum of 2 minutes + 15 seconds PG timeout + 1 minute for the various
+ # CH Gitlab::HTTP timeouts
+ MAX_TTL = 6.minutes.to_i
+ MAX_RUNTIME = 120.seconds
+ PIPELINES_BATCH_SIZE = 500
+ PIPELINES_BATCH_COUNT = 10 # How many batches to process before submitting the CSV to ClickHouse
+ PIPELINE_ID_PARTITIONS = 100
+
+ PIPELINE_FIELD_NAMES = %i[id duration status source ref].freeze
+ PIPELINE_EPOCH_FIELD_NAMES = %i[committed_at created_at started_at finished_at].freeze
+ PIPELINE_COMPUTED_FIELD_NAMES = %i[path].freeze
+
+ CSV_MAPPING = {
+ **PIPELINE_FIELD_NAMES.index_with { |n| n },
+ **PIPELINE_EPOCH_FIELD_NAMES.index_with { |n| :"casted_#{n}" },
+ **PIPELINE_COMPUTED_FIELD_NAMES.index_with { |n| n }
+ }.freeze
+
+ INSERT_FINISHED_PIPELINES_QUERY = <<~SQL.squish
+ INSERT INTO ci_finished_pipelines (#{CSV_MAPPING.keys.join(',')})
+ SETTINGS async_insert=1, wait_for_async_insert=1 FORMAT CSV
+ SQL
+
+ def self.enabled?
+ ::Gitlab::ClickHouse.configured?
+ end
+
+ def initialize(worker_index: 0, total_workers: 1)
+ @runtime_limiter = Gitlab::Metrics::RuntimeLimiter.new(MAX_RUNTIME)
+ @worker_index = worker_index
+ @total_workers = total_workers
+ end
+
+ def execute
+ unless self.class.enabled?
+ return ServiceResponse.error(
+ message: 'Disabled: ClickHouse database is not configured.',
+ reason: :db_not_configured,
+ payload: service_payload
+ )
+ end
+
+ # Prevent parallel jobs
+ in_lock("#{self.class.name.underscore}/worker/#{@worker_index}", ttl: MAX_TTL, retries: 0) do
+ ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
+ report = insert_new_finished_pipelines
+
+ ServiceResponse.success(payload: report.merge(service_payload))
+ end
+ end
+ rescue Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError => e
+ # Skip retrying, just let the next worker to start after a few minutes
+ ServiceResponse.error(message: e.message, reason: :skipped, payload: service_payload)
+ end
+
+ private
+
+ def continue?
+ !@reached_end_of_table && !@runtime_limiter.over_time?
+ end
+
+ def service_payload
+ {
+ worker_index: @worker_index,
+ total_workers: @total_workers
+ }
+ end
+
+ def insert_new_finished_pipelines
+ # Read PIPELINES_BATCH_COUNT batches of PIPELINES_BATCH_SIZE until the timeout in MAX_RUNTIME is reached
+ # We can expect a single worker to process around 2M pipelines/hour with a single worker,
+ # and a bit over 5M pipelines/hour with three workers (measured in prod).
+ @reached_end_of_table = false
+ @processed_record_ids = []
+
+ csv_batches.each do |csv_batch|
+ break unless continue?
+
+ csv_builder = CsvBuilder::Gzip.new(csv_batch, CSV_MAPPING)
+ csv_builder.render do |tempfile|
+ next if csv_builder.rows_written == 0
+
+ File.open(tempfile.path) do |f|
+ ::ClickHouse::Client.insert_csv(INSERT_FINISHED_PIPELINES_QUERY, f, :main)
+ end
+ end
+ end
+
+ {
+ records_inserted:
+ Ci::FinishedPipelineChSyncEvent.primary_key_in(@processed_record_ids).update_all(processed: true),
+ reached_end_of_table: @reached_end_of_table
+ }
+ end
+
+ def csv_batches
+ events_batches_enumerator = Enumerator.new do |small_batches_yielder|
+ # Main loop to page through the events
+ keyset_iterator_scope.each_batch(of: PIPELINES_BATCH_SIZE) { |batch| small_batches_yielder << batch }
+ @reached_end_of_table = true
+ end
+
+ Enumerator.new do |batches_yielder|
+ # Each batches_yielder value represents a CSV file upload
+ while continue?
+ batches_yielder << Enumerator.new do |records_yielder|
+ # records_yielder sends rows to the CSV builder
+ PIPELINES_BATCH_COUNT.times do
+ break unless continue?
+
+ yield_pipelines(events_batches_enumerator.next, records_yielder)
+
+ rescue StopIteration
+ break
+ end
+ end
+ end
+ end
+ end
+
+ def yield_pipelines(events_batch, records_yielder)
+ # NOTE: The `.to_a` call is necessary here to materialize the ActiveRecord relationship, so that the call
+ # to `.last` in `.each_batch` (see https://gitlab.com/gitlab-org/gitlab/-/blob/a38c93c792cc0d2536018ed464862076acb8d3d7/lib/gitlab/pagination/keyset/iterator.rb#L27)
+ # doesn't mess it up and cause duplicates (see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/138066)
+ # rubocop: disable CodeReuse/ActiveRecord -- this is an expression that is specific to this service
+ # rubocop: disable Database/AvoidUsingPluckWithoutLimit -- the batch is already limited by definition
+ events_batch = events_batch.to_a
+ pipeline_ids = events_batch.pluck(:pipeline_id)
+ project_namespace_ids = events_batch.pluck(:pipeline_id, :project_namespace_id).to_h
+ # rubocop: enable Database/AvoidUsingPluckWithoutLimit
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ pipelines = Ci::Pipeline.id_in(pipeline_ids)
+ pipelines
+ .left_outer_joins(project_mirror: :namespace_mirror)
+ .select(:finished_at, *finished_pipeline_projections)
+ .each do |pipeline|
+ records_yielder << pipeline.attributes.symbolize_keys.tap do |record|
+ # add the project namespace ID segment to the path selected in the query
+ record[:path] += "#{project_namespace_ids[record[:id]]}/"
+ end
+ end
+
+ @processed_record_ids += pipeline_ids
+ end
+
+ def finished_pipeline_projections
+ [
+ *PIPELINE_FIELD_NAMES.map { |n| "#{::Ci::Pipeline.table_name}.#{n}" },
+ *PIPELINE_EPOCH_FIELD_NAMES
+ .map { |n| "EXTRACT(epoch FROM #{::Ci::Pipeline.table_name}.#{n}) AS casted_#{n}" },
+ "ARRAY_TO_STRING(#{::Ci::NamespaceMirror.table_name}.traversal_ids, '/') || '/' AS path"
+ ]
+ end
+ strong_memoize_attr :finished_pipeline_projections
+
+ def keyset_iterator_scope
+ lower_bound = (@worker_index * PIPELINE_ID_PARTITIONS / @total_workers).to_i
+ upper_bound = ((@worker_index + 1) * PIPELINE_ID_PARTITIONS / @total_workers).to_i - 1
+
+ table_name = Ci::FinishedPipelineChSyncEvent.quoted_table_name
+ array_scope = Ci::FinishedPipelineChSyncEvent.select(:pipeline_id_partition)
+ .from("generate_series(#{lower_bound}, #{upper_bound}) as #{table_name}(pipeline_id_partition)") # rubocop: disable CodeReuse/ActiveRecord -- this is an expression that is specific to this service
+
+ opts = {
+ in_operator_optimization_options: {
+ array_scope: array_scope,
+ array_mapping_scope: ->(id_expression) do
+ Ci::FinishedPipelineChSyncEvent
+ .where(Arel.sql("(pipeline_id % #{PIPELINE_ID_PARTITIONS})") # rubocop: disable CodeReuse/ActiveRecord -- this is an expression that is specific to this service
+ .eq(id_expression))
+ end
+ }
+ }
+
+ Gitlab::Pagination::Keyset::Iterator.new(
+ scope: Ci::FinishedPipelineChSyncEvent.pending.order_by_pipeline_id, **opts)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/ci/components/fetch_service.rb b/app/services/ci/components/fetch_service.rb
index f83c6e30cbb..5c2dbbdf8e0 100644
--- a/app/services/ci/components/fetch_service.rb
+++ b/app/services/ci/components/fetch_service.rb
@@ -22,6 +22,7 @@ module Ci
end
component_path = component_path_class.new(address: address)
+
result = component_path.fetch_content!(current_user: current_user)
if result&.content
@@ -29,15 +30,27 @@ module Ci
content: result.content,
path: result.path,
project: component_path.project,
- sha: component_path.sha
+ sha: component_path.sha,
+ name: component_path.component_name
})
+ elsif component_path.invalid_usage_for_latest?
+ ServiceResponse.error(
+ message: 'The ~latest version reference is not supported for non-catalog resources. ' \
+ 'Use a tag, branch, or SHA instead',
+ reason: :invalid_usage)
else
ServiceResponse.error(message: "#{error_prefix} content not found", reason: :content_not_found)
end
rescue Gitlab::Access::AccessDeniedError
- ServiceResponse.error(
- message: "#{error_prefix} project does not exist or you don't have sufficient permissions",
- reason: :not_allowed)
+ if current_user.external? && component_path.project.internal?
+ ServiceResponse.error(
+ message: "#{error_prefix} project is `Internal`, it cannot be accessed by an External User",
+ reason: :not_allowed)
+ else
+ ServiceResponse.error(
+ message: "#{error_prefix} project does not exist or you don't have sufficient permissions",
+ reason: :not_allowed)
+ end
end
private
diff --git a/app/services/ci/components/usages/create_service.rb b/app/services/ci/components/usages/create_service.rb
new file mode 100644
index 00000000000..83202a0df11
--- /dev/null
+++ b/app/services/ci/components/usages/create_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Ci
+ module Components
+ module Usages
+ class CreateService
+ ValidationError = Class.new(StandardError)
+
+ def initialize(component, used_by_project:)
+ @component = component
+ @used_by_project = used_by_project
+ end
+
+ def execute
+ component_usage = Ci::Catalog::Resources::Components::Usage.new(
+ component: component,
+ catalog_resource: component.catalog_resource,
+ project: component.project,
+ used_by_project_id: used_by_project.id
+ )
+
+ if component_usage.save
+ ServiceResponse.success(message: 'Usage recorded')
+ else
+ errors = component_usage.errors
+
+ if errors.size == 1 && errors.first.type == :taken # Only unique validation failed
+ ServiceResponse.success(message: 'Usage already recorded for today')
+ else
+ exception = ValidationError.new(errors.full_messages.join(', '))
+
+ Gitlab::ErrorTracking.track_exception(exception)
+ ServiceResponse.error(message: exception.message)
+ end
+ end
+ end
+
+ private
+
+ attr_reader :component, :used_by_project
+ end
+ end
+ end
+end
diff --git a/app/services/ci/create_commit_status_service.rb b/app/services/ci/create_commit_status_service.rb
index de3e7b3f7ff..00c9dc1cd21 100644
--- a/app/services/ci/create_commit_status_service.rb
+++ b/app/services/ci/create_commit_status_service.rb
@@ -20,9 +20,8 @@ module Ci
attr_reader :pipeline, :stage, :commit_status, :optional_commit_status_params
def unsafe_execute
- return not_found('Commit') if commit.blank?
- return bad_request('State is required') if params[:state].blank?
- return not_found('References for commit') if ref.blank?
+ result = validate
+ return result if result&.error?
@pipeline = first_matching_pipeline || create_pipeline
return forbidden unless ::Ability.allowed?(current_user, :update_pipeline, pipeline)
@@ -36,10 +35,19 @@ module Ci
return bad_request(response.message) if response.error?
- update_merge_request_head_pipeline
response
end
+ def validate
+ return not_found('Commit') if commit.blank?
+ return bad_request('State is required') if params[:state].blank?
+ return not_found('References for commit') if ref.blank?
+
+ return unless params[:pipeline_id] && !first_matching_pipeline
+
+ not_found("Pipeline for pipeline_id, sha and ref")
+ end
+
def ref
params[:ref] || first_matching_pipeline&.ref ||
repository.branch_names_contains(sha).first
@@ -52,7 +60,7 @@ module Ci
strong_memoize_attr :commit
def first_matching_pipeline
- pipelines = project.ci_pipelines.newest_first(sha: sha)
+ pipelines = project.ci_pipelines.newest_first(sha: sha, limit: 100)
pipelines = pipelines.for_ref(params[:ref]) if params[:ref]
pipelines = pipelines.id_in(params[:pipeline_id]) if params[:pipeline_id]
pipelines.first
@@ -73,6 +81,10 @@ module Ci
).tap do |new_pipeline|
new_pipeline.ensure_project_iid!
new_pipeline.save!
+
+ Gitlab::EventStore.publish(
+ Ci::PipelineCreatedEvent.new(data: { pipeline_id: new_pipeline.id })
+ )
end
end
@@ -106,14 +118,6 @@ module Ci
end
end
- def update_merge_request_head_pipeline
- return unless pipeline.latest?
-
- ::MergeRequest
- .from_project(project).from_source_branches(ref)
- .update_all(head_pipeline_id: pipeline.id)
- end
-
def apply_job_state!(job)
case params[:state]
when 'pending'
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index e38f5c98814..efce1604276 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -64,8 +64,10 @@ module Ci
end
end
rescue StateMachines::InvalidTransition => e
+ error = Ci::Bridge::InvalidTransitionError.new(e.message)
+ error.set_backtrace(caller)
Gitlab::ErrorTracking.track_exception(
- Ci::Bridge::InvalidTransitionError.new(e.message),
+ error,
bridge_id: bridge.id,
downstream_pipeline_id: pipeline.id)
ServiceResponse.error(payload: pipeline, message: e.message)
@@ -119,7 +121,7 @@ module Ci
def can_create_downstream_pipeline?(target_ref)
can?(current_user, :update_pipeline, project) &&
can?(current_user, :create_pipeline, downstream_project) &&
- can_update_branch?(target_ref)
+ can_update_branch?(target_ref)
end
def can_update_branch?(target_ref)
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 7d3e71b003e..de56c573888 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -8,35 +8,38 @@ module Ci
LOG_MAX_PIPELINE_SIZE = 2_000
LOG_MAX_CREATION_THRESHOLD = 20.seconds
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
- Gitlab::Ci::Pipeline::Chain::Build::Associations,
- Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
- Gitlab::Ci::Pipeline::Chain::Validate::Repository,
- Gitlab::Ci::Pipeline::Chain::Limit::RateLimit,
- Gitlab::Ci::Pipeline::Chain::Validate::SecurityOrchestrationPolicy,
- Gitlab::Ci::Pipeline::Chain::Skip,
- Gitlab::Ci::Pipeline::Chain::Config::Content,
- Gitlab::Ci::Pipeline::Chain::Config::Process,
- Gitlab::Ci::Pipeline::Chain::Validate::AfterConfig,
- Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
- Gitlab::Ci::Pipeline::Chain::SeedBlock,
- Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
- Gitlab::Ci::Pipeline::Chain::AssignPartition,
- Gitlab::Ci::Pipeline::Chain::Seed,
- Gitlab::Ci::Pipeline::Chain::Limit::Size,
- Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs,
- Gitlab::Ci::Pipeline::Chain::Limit::Deployments,
- Gitlab::Ci::Pipeline::Chain::Validate::External,
- Gitlab::Ci::Pipeline::Chain::Populate,
- Gitlab::Ci::Pipeline::Chain::PopulateMetadata,
- Gitlab::Ci::Pipeline::Chain::StopDryRun,
- Gitlab::Ci::Pipeline::Chain::EnsureEnvironments,
- Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
- Gitlab::Ci::Pipeline::Chain::Create,
- Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
- Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
- Gitlab::Ci::Pipeline::Chain::Metrics,
- Gitlab::Ci::Pipeline::Chain::TemplateUsage,
- Gitlab::Ci::Pipeline::Chain::Pipeline::Process].freeze
+ Gitlab::Ci::Pipeline::Chain::Build::Associations,
+ Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
+ Gitlab::Ci::Pipeline::Chain::Validate::Repository,
+ Gitlab::Ci::Pipeline::Chain::Limit::RateLimit,
+ Gitlab::Ci::Pipeline::Chain::Validate::SecurityOrchestrationPolicy,
+ Gitlab::Ci::Pipeline::Chain::AssignPartition,
+ Gitlab::Ci::Pipeline::Chain::PipelineExecutionPolicies::FindConfigs,
+ Gitlab::Ci::Pipeline::Chain::Skip,
+ Gitlab::Ci::Pipeline::Chain::Config::Content,
+ Gitlab::Ci::Pipeline::Chain::Config::Process,
+ Gitlab::Ci::Pipeline::Chain::Validate::AfterConfig,
+ Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
+ Gitlab::Ci::Pipeline::Chain::SeedBlock,
+ Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
+ Gitlab::Ci::Pipeline::Chain::Seed,
+ Gitlab::Ci::Pipeline::Chain::Limit::Size,
+ Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs,
+ Gitlab::Ci::Pipeline::Chain::Limit::Deployments,
+ Gitlab::Ci::Pipeline::Chain::Validate::External,
+ Gitlab::Ci::Pipeline::Chain::Populate,
+ Gitlab::Ci::Pipeline::Chain::PopulateMetadata,
+ Gitlab::Ci::Pipeline::Chain::PipelineExecutionPolicies::MergeJobs,
+ Gitlab::Ci::Pipeline::Chain::StopDryRun,
+ Gitlab::Ci::Pipeline::Chain::EnsureEnvironments,
+ Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
+ Gitlab::Ci::Pipeline::Chain::Create,
+ Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
+ Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
+ Gitlab::Ci::Pipeline::Chain::Metrics,
+ Gitlab::Ci::Pipeline::Chain::TemplateUsage,
+ Gitlab::Ci::Pipeline::Chain::ComponentUsage,
+ Gitlab::Ci::Pipeline::Chain::Pipeline::Process].freeze
# Create a new pipeline in the specified project.
#
@@ -56,11 +59,14 @@ module Ci
# generating a dangling pipeline.
#
# @return [Ci::Pipeline] The created Ci::Pipeline object.
- # rubocop: disable Metrics/ParameterLists
+ # rubocop: disable Metrics/ParameterLists, Metrics/AbcSize
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
@logger = build_logger
+ @command_logger = Gitlab::Ci::Pipeline::CommandLogger.new
@pipeline = Ci::Pipeline.new
+ validate_options!(options)
+
command = Gitlab::Ci::Pipeline::Chain::Command.new(
source: source,
origin_ref: params[:ref],
@@ -83,6 +89,7 @@ module Ci
chat_data: params[:chat_data],
bridge: bridge,
logger: @logger,
+ partition_id: params[:partition_id],
**extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
@@ -111,11 +118,20 @@ module Ci
ensure
@logger.commit(pipeline: pipeline, caller: self.class.name)
+ @command_logger.commit(pipeline: pipeline, command: command) if command
end
- # rubocop: enable Metrics/ParameterLists
+ # rubocop: enable Metrics/ParameterLists, Metrics/AbcSize
private
+ # rubocop:disable Gitlab/NoCodeCoverageComment
+ # :nocov: Tested in FOSS and fully overridden and tested in EE
+ def validate_options!(_)
+ raise ArgumentError, "Param `partition_id` is not allowed" if params[:partition_id]
+ end
+ # :nocov:
+ # rubocop:enable Gitlab/NoCodeCoverageComment
+
def create_namespace_onboarding_action
Onboarding::PipelineCreatedWorker.perform_async(project.namespace_id)
end
diff --git a/app/services/ci/create_web_ide_terminal_service.rb b/app/services/ci/create_web_ide_terminal_service.rb
index 9cfba0cbee6..9b716dc9844 100644
--- a/app/services/ci/create_web_ide_terminal_service.rb
+++ b/app/services/ci/create_web_ide_terminal_service.rb
@@ -39,7 +39,7 @@ module Ci
.new(pipeline)
.execute
- pipeline_created_counter.increment(source: :webide)
+ pipeline_created_counter.increment(source: :webide, partition_id: pipeline.partition_id)
end
end
@@ -104,8 +104,7 @@ module Ci
end
def pipeline_created_counter
- @pipeline_created_counter ||= Gitlab::Metrics
- .counter(:pipelines_created_total, "Counter of pipelines created")
+ ::Gitlab::Ci::Pipeline::Metrics.pipelines_created_counter
end
def terminal_active?
diff --git a/app/services/ci/daily_build_group_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index 25c6d57d961..9555d627373 100644
--- a/app/services/ci/daily_build_group_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -21,7 +21,8 @@ module Ci
date: pipeline.created_at.to_date,
last_pipeline_id: pipeline.id,
default_branch: pipeline.default_branch?,
- group_id: pipeline.project&.group&.id
+ group_id: pipeline.project&.group&.id,
+ partition_id: pipeline.partition_id
}
aggregate(pipeline.builds.with_coverage).map do |group_name, group|
diff --git a/app/services/ci/delete_objects_service.rb b/app/services/ci/delete_objects_service.rb
index 7a93d0e9665..38c62b4f149 100644
--- a/app/services/ci/delete_objects_service.rb
+++ b/app/services/ci/delete_objects_service.rb
@@ -27,7 +27,7 @@ module Ci
# `find_by_sql` performs a write in this case and we need to wrap it in
# a transaction to stick to the primary database.
Ci::DeletedObject.transaction do
- Ci::DeletedObject.find_by_sql([next_batch_sql, new_pick_up_at: RETRY_IN.from_now])
+ Ci::DeletedObject.find_by_sql([next_batch_sql, { new_pick_up_at: RETRY_IN.from_now }])
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/drop_pipeline_service.rb b/app/services/ci/drop_pipeline_service.rb
index 5772ab8f29c..94b664c5a5b 100644
--- a/app/services/ci/drop_pipeline_service.rb
+++ b/app/services/ci/drop_pipeline_service.rb
@@ -9,8 +9,8 @@ module Ci
pipelines.cancelable.select(:id).find_in_batches do |pipelines_batch|
Ci::DropPipelineWorker.bulk_perform_async_with_contexts(
pipelines_batch,
- arguments_proc: -> (pipeline) { [pipeline.id, failure_reason] },
- context_proc: -> (_) { { user: context_user } }
+ arguments_proc: ->(pipeline) { [pipeline.id, failure_reason] },
+ context_proc: ->(_) { { user: context_user } }
)
end
end
diff --git a/app/services/ci/ensure_stage_service.rb b/app/services/ci/ensure_stage_service.rb
deleted file mode 100644
index 9d5ccecbe33..00000000000
--- a/app/services/ci/ensure_stage_service.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- ##
- # We call this service everytime we persist a CI/CD job.
- #
- # In most cases a job should already have a stage assigned, but in cases it
- # doesn't have we need to either find existing one or create a brand new
- # stage.
- #
- class EnsureStageService < BaseService
- EnsureStageError = Class.new(StandardError)
-
- def execute(build)
- @build = build
-
- return if build.stage_id.present?
- return if build.invalid?
-
- ensure_stage.tap do |stage|
- build.stage_id = stage.id
-
- yield stage if block_given?
- end
- end
-
- private
-
- def ensure_stage(attempts: 2)
- find_stage || create_stage
- rescue ActiveRecord::RecordNotUnique
- retry if (attempts -= 1) > 0
-
- raise EnsureStageError, <<~EOS
- We failed to find or create a unique pipeline stage after 2 retries.
- This should never happen and is most likely the result of a bug in
- the database load balancing code.
- EOS
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def find_stage
- @build.pipeline.stages.find_by(name: @build.stage)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def create_stage
- Ci::Stage.create!(
- name: @build.stage,
- position: @build.stage_idx,
- pipeline: @build.pipeline,
- project: @build.project
- )
- end
- end
-end
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index 15597eb7209..e0c98e17e8f 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -50,6 +50,10 @@ module Ci
yield(pipelines_project_merge_request_path(merge_request))
yield(merge_request_widget_path(merge_request))
end
+
+ pipeline.project.merge_requests.by_merged_or_merge_or_squash_commit_sha(pipeline.sha).each do |merge_request|
+ yield(merge_request_widget_path(merge_request))
+ end
end
def graphql_pipeline_path(pipeline)
diff --git a/app/services/ci/generate_kubeconfig_service.rb b/app/services/ci/generate_kubeconfig_service.rb
index 56e22a64529..967224f2e1a 100644
--- a/app/services/ci/generate_kubeconfig_service.rb
+++ b/app/services/ci/generate_kubeconfig_service.rb
@@ -43,7 +43,9 @@ module Ci
def agent_authorizations
::Clusters::Agents::Authorizations::CiAccess::FilterService.new(
pipeline.cluster_agent_authorizations,
- environment: environment
+ { environment: environment,
+ protected_ref: pipeline.protected_ref? },
+ pipeline.project
).execute
end
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index 0791fff8545..82f2c22adad 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -130,7 +130,7 @@ module Ci
return accessibility if accessibility.present?
- job.artifact_is_public_in_config? ? :public : :private
+ job.artifact_access_setting_in_config
end
def parse_artifact(artifact)
diff --git a/app/services/ci/job_artifacts/expire_project_build_artifacts_service.rb b/app/services/ci/job_artifacts/expire_project_build_artifacts_service.rb
index 836b1d39736..adba9a15e39 100644
--- a/app/services/ci/job_artifacts/expire_project_build_artifacts_service.rb
+++ b/app/services/ci/job_artifacts/expire_project_build_artifacts_service.rb
@@ -12,16 +12,16 @@ module Ci
# rubocop:disable CodeReuse/ActiveRecord
def execute
- scope = Ci::JobArtifact.for_project(project_id).order(:id)
+ scope = Ci::JobArtifact.select(:id).for_project(project_id).order(:id)
file_type_values = Ci::JobArtifact.erasable_file_types.map { |file_type| [Ci::JobArtifact.file_types[file_type]] }
from_sql = Arel::Nodes::Grouping.new(Arel::Nodes::ValuesList.new(file_type_values)).as('file_types (file_type)').to_sql
array_scope = Ci::JobArtifact.from(from_sql).select(:file_type)
- array_mapping_scope = -> (file_type_expression) { Ci::JobArtifact.where(Ci::JobArtifact.arel_table[:file_type].eq(file_type_expression)) }
+ array_mapping_scope = ->(file_type_expression) { Ci::JobArtifact.where(Ci::JobArtifact.arel_table[:file_type].eq(file_type_expression)) }
Gitlab::Pagination::Keyset::Iterator
.new(scope: scope, in_operator_optimization_options: { array_scope: array_scope, array_mapping_scope: array_mapping_scope })
.each_batch(of: BATCH_SIZE) do |batch|
- ids = batch.reselect!(:id).to_a.map(&:id)
+ ids = batch.to_a.map(&:id)
Ci::JobArtifact.unlocked.where(id: ids).update_all(locked: Ci::JobArtifact.lockeds[:unlocked], expire_at: expiry_time)
end
end
diff --git a/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb b/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb
index 0d35a90ed04..80b4efc615e 100644
--- a/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb
+++ b/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb
@@ -17,7 +17,11 @@ module Ci
@removed_count = 0
@locked_count = 0
@start_at = Time.current
- @loop_limit = Feature.enabled?(:ci_job_artifacts_backlog_large_loop_limit) ? LARGE_LOOP_LIMIT : LOOP_LIMIT
+ @loop_limit = if Feature.enabled?(:ci_job_artifacts_backlog_large_loop_limit, type: :ops)
+ LARGE_LOOP_LIMIT
+ else
+ LOOP_LIMIT
+ end
end
def execute
@@ -35,9 +39,9 @@ module Ci
unknown_status_build_ids = safely_ordered_ci_job_artifacts_locked_unknown_relation.pluck_job_id.uniq
locked_pipe_build_ids = ::Ci::Build
- .with_pipeline_locked_artifacts
- .id_in(unknown_status_build_ids)
- .pluck_primary_key
+ .with_pipeline_locked_artifacts
+ .id_in(unknown_status_build_ids)
+ .pluck_primary_key
@locked_count += update_unknown_artifacts(locked_pipe_build_ids, Ci::JobArtifact.lockeds[:artifacts_locked])
diff --git a/app/services/ci/job_token_scope/add_group_or_project_service.rb b/app/services/ci/job_token_scope/add_group_or_project_service.rb
new file mode 100644
index 00000000000..5d4914d82c7
--- /dev/null
+++ b/app/services/ci/job_token_scope/add_group_or_project_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobTokenScope
+ class AddGroupOrProjectService < ::BaseService
+ include EditScopeValidations
+
+ def execute(target)
+ validate_target_exists!(target)
+
+ if target.is_a?(::Group)
+ ::Ci::JobTokenScope::AddGroupService.new(project, current_user).execute(target)
+ else
+ ::Ci::JobTokenScope::AddProjectService.new(project, current_user).execute(target)
+ end
+
+ rescue EditScopeValidations::NotFoundError => e
+ ServiceResponse.error(message: e.message, reason: :not_found)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/job_token_scope/add_group_service.rb b/app/services/ci/job_token_scope/add_group_service.rb
new file mode 100644
index 00000000000..09269c9d69d
--- /dev/null
+++ b/app/services/ci/job_token_scope/add_group_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobTokenScope
+ class AddGroupService < ::BaseService
+ include EditScopeValidations
+
+ def execute(target_group)
+ validate_group_add!(project, target_group, current_user)
+
+ link = allowlist
+ .add_group!(target_group, user: current_user)
+
+ ServiceResponse.success(payload: { group_link: link })
+
+ rescue ActiveRecord::RecordNotUnique
+ ServiceResponse.error(message: 'Target group is already in the job token scope')
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(message: e.message)
+ rescue EditScopeValidations::ValidationError => e
+ ServiceResponse.error(message: e.message, reason: :insufficient_permissions)
+ end
+
+ private
+
+ def allowlist
+ Ci::JobToken::Allowlist.new(project)
+ end
+ end
+ end
+end
+
+Ci::JobTokenScope::AddGroupService.prepend_mod_with('Ci::JobTokenScope::AddGroupService')
diff --git a/app/services/ci/job_token_scope/remove_group_service.rb b/app/services/ci/job_token_scope/remove_group_service.rb
new file mode 100644
index 00000000000..aef9742c7ff
--- /dev/null
+++ b/app/services/ci/job_token_scope/remove_group_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobTokenScope
+ class RemoveGroupService < ::BaseService
+ include EditScopeValidations
+
+ def execute(target_group)
+ validate_group_remove!(project, current_user)
+
+ link = ::Ci::JobToken::GroupScopeLink
+ .for_source_and_target(project, target_group)
+
+ return ServiceResponse.error(message: 'Target group is not in the job token scope') unless link
+
+ if link.destroy
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: link.errors.full_messages.to_sentence, payload: { group_link: link })
+ end
+ rescue EditScopeValidations::ValidationError => e
+ ServiceResponse.error(message: e.message, reason: :insufficient_permissions)
+ end
+ end
+ end
+end
+
+Ci::JobTokenScope::RemoveGroupService.prepend_mod_with('Ci::JobTokenScope::RemoveGroupService')
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
index e028d7252ae..e3465f639ce 100644
--- a/app/services/ci/list_config_variables_service.rb
+++ b/app/services/ci/list_config_variables_service.rb
@@ -18,30 +18,65 @@ module Ci
end
def execute(ref)
+ # "ref" is not a enough for a cache key because the name is static but that branch can be changed any time
sha = project.commit(ref).try(:sha)
- with_reactive_cache(sha) { |result| result }
+ with_reactive_cache(sha, ref) { |result| result }
end
- def calculate_reactive_cache(sha)
+ # Changing parameters in an `calculate_reactive_cache` method is like changing parameters in a Sidekiq worker.
+ # So, we need to follow the same rules: https://docs.gitlab.com/ee/development/sidekiq/compatibility_across_updates.html#add-an-argument
+ # That's why `ref` is an optional parameter for now.
+ def calculate_reactive_cache(sha, ref = nil) # rubocop:disable Lint/UnusedMethodArgument -- explained above
config = ::Gitlab::Ci::ProjectConfig.new(project: project, sha: sha)
return {} unless config.exists?
- result = Gitlab::Ci::YamlProcessor.new(
+ # Because of the same reason as above, we need to check if `ref` is nil or not for backward compatibility.
+ # In the next iteration, we can remove this check and make `ref` a required parameter.
+ result = if ref.nil?
+ legacy_calculation(sha, config)
+ else
+ new_calculation(sha, ref, config)
+ end
+
+ result.valid? ? result.root_variables_with_prefill_data : {}
+ end
+
+ # Required for ReactiveCaching, it is also used in `reactive_cache_worker_finder`
+ def id
+ "#{project.id}-#{current_user.id}"
+ end
+
+ private
+
+ def legacy_calculation(sha, config)
+ ref_name = Gitlab::Ci::RefFinder.new(project).find_by_sha(sha)
+
+ Gitlab::Ci::YamlProcessor.new(
config.content,
project: project,
user: current_user,
sha: sha,
+ ref: ref_name,
verify_project_sha: true
).execute
-
- result.valid? ? result.root_variables_with_prefill_data : {}
end
- # Required for ReactiveCaching, it is also used in `reactive_cache_worker_finder`
- def id
- "#{project.id}-#{current_user.id}"
+ def new_calculation(sha, ref, config)
+ # The `ref` parameter should be branch or tag name. However, the API also accepts a commit SHA and we can't
+ # change it to not introduce breaking changes. Instead, here we're checking if a commit SHA is passed
+ # as `ref`. If so, we should verify the sha whether it belongs to the project in YamlProcessor.
+ sha_passed_as_ref_parameter = !project.repository.branch_or_tag?(ref)
+
+ Gitlab::Ci::YamlProcessor.new(
+ config.content,
+ project: project,
+ user: current_user,
+ sha: sha,
+ ref: ref,
+ verify_project_sha: sha_passed_as_ref_parameter
+ ).execute
end
end
end
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
index 89a3c7d9e03..6f9785bd81b 100644
--- a/app/services/ci/parse_dotenv_artifact_service.rb
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -49,7 +49,8 @@ module Ci
source: :dotenv,
key: key,
value: value,
- raw: false
+ raw: false,
+ project_id: artifact.project_id
)
end
end
diff --git a/app/services/ci/partitions/create_service.rb b/app/services/ci/partitions/create_service.rb
new file mode 100644
index 00000000000..730832775d7
--- /dev/null
+++ b/app/services/ci/partitions/create_service.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Ci
+ module Partitions
+ class CreateService
+ HEADROOM_PARTITIONS = 3
+
+ def initialize(partition)
+ @partition = partition
+ end
+
+ def execute
+ return unless Feature.enabled?(:ci_partitioning_automation, :instance)
+ return unless partition
+
+ Ci::Partition.create_next! if should_create_next?
+ end
+
+ private
+
+ attr_reader :partition
+
+ def should_create_next?
+ above_threshold? && headroom_available?
+ end
+
+ def above_threshold?
+ partition.above_threshold?(Ci::Partition::MAX_PARTITION_SIZE)
+ end
+
+ def headroom_available?
+ Ci::Partition.id_after(partition.id).count < HEADROOM_PARTITIONS
+ end
+ end
+ end
+end
diff --git a/app/services/ci/partitions/setup_default_service.rb b/app/services/ci/partitions/setup_default_service.rb
new file mode 100644
index 00000000000..e620ecf8a75
--- /dev/null
+++ b/app/services/ci/partitions/setup_default_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Ci
+ module Partitions
+ class SetupDefaultService
+ DEFAULT_PARTITION_IDS = [
+ Ci::Pipeline::INITIAL_PARTITION_VALUE,
+ Ci::Pipeline::SECOND_PARTITION_VALUE,
+ Ci::Pipeline::NEXT_PARTITION_VALUE
+ ].freeze
+
+ def execute
+ return if Ci::Partition.current
+
+ setup_default_partitions
+ end
+
+ private
+
+ def setup_default_partitions
+ setup_active_partitions
+ setup_current_partition
+ end
+
+ def setup_active_partitions
+ active_partitions = DEFAULT_PARTITION_IDS
+ .map { |value| { id: value, status: Ci::Partition.statuses[:active] } }
+
+ Ci::Partition.upsert_all(active_partitions, unique_by: :id)
+ end
+
+ def setup_current_partition
+ Ci::Partition
+ .find(Ci::Pipeline.current_partition_value)
+ .update!(status: Ci::Partition.statuses[:current])
+ end
+ end
+ end
+end
diff --git a/app/services/ci/partitions/sync_service.rb b/app/services/ci/partitions/sync_service.rb
new file mode 100644
index 00000000000..a73d96cc5df
--- /dev/null
+++ b/app/services/ci/partitions/sync_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Ci
+ module Partitions
+ class SyncService
+ def initialize(partition)
+ @partition = partition
+ end
+
+ def execute
+ return unless Feature.enabled?(:ci_partitioning_automation, :instance)
+ return unless partition
+
+ sync_available_partitions_statuses!
+
+ next_ci_partition = next_available_partition
+ return unless next_ci_partition.present? && above_threshold?
+
+ next_ci_partition.switch_writes!
+ end
+
+ private
+
+ attr_reader :partition
+
+ def above_threshold?
+ partition.above_threshold?(Ci::Partition::MAX_PARTITION_SIZE)
+ end
+
+ def sync_available_partitions_statuses!
+ Ci::Partition.id_after(partition.id).each do |partition|
+ partition.ready! if partition.all_partitions_exist?
+ end
+ end
+
+ def next_available_partition
+ Ci::Partition.next_available(partition.id)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
index 8dddf3c3f6c..d388febc8ca 100644
--- a/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
+++ b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
@@ -21,8 +21,6 @@ module Ci
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_unlocked_pipeline_artifacts
-
- legacy_destroy_pipeline_artifacts
end
@removed_artifacts_count
@@ -40,19 +38,6 @@ module Ci
end
end
- def legacy_destroy_pipeline_artifacts
- loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
- destroy_artifacts_batch
- end
- end
-
- def destroy_artifacts_batch
- artifacts = ::Ci::PipelineArtifact.unlocked.expired.limit(BATCH_SIZE).to_a
- return false if artifacts.empty?
-
- destroy_batch(artifacts)
- end
-
def destroy_batch(artifacts)
artifacts.each(&:destroy!)
increment_stats(artifacts.size)
diff --git a/app/services/ci/pipeline_bridge_status_service.rb b/app/services/ci/pipeline_bridge_status_service.rb
index aeac43588f7..11fade6cc52 100644
--- a/app/services/ci/pipeline_bridge_status_service.rb
+++ b/app/services/ci/pipeline_bridge_status_service.rb
@@ -8,8 +8,10 @@ module Ci
begin
pipeline.source_bridge.inherit_status_from_downstream!(pipeline)
rescue StateMachines::InvalidTransition => e
+ error = Ci::Bridge::InvalidTransitionError.new(e.message)
+ error.set_backtrace(caller)
Gitlab::ErrorTracking.track_exception(
- Ci::Bridge::InvalidTransitionError.new(e.message),
+ error,
bridge_id: pipeline.source_bridge.id,
downstream_pipeline_id: pipeline.id)
end
diff --git a/app/services/ci/pipeline_creation/cancel_redundant_pipelines_service.rb b/app/services/ci/pipeline_creation/cancel_redundant_pipelines_service.rb
index 98469e82af3..6e69f22591f 100644
--- a/app/services/ci/pipeline_creation/cancel_redundant_pipelines_service.rb
+++ b/app/services/ci/pipeline_creation/cancel_redundant_pipelines_service.rb
@@ -7,6 +7,8 @@ module Ci
BATCH_SIZE = 25
PAGE_SIZE = 500
+ MAX_CANCELLATIONS_PER_PIPELINE = 3000
+ ID_BATCH_SIZE = 1000
def initialize(pipeline)
@pipeline = pipeline
@@ -19,6 +21,12 @@ module Ci
return if pipeline.parent_pipeline? # skip if child pipeline
return unless project.auto_cancel_pending_pipelines?
+ if Feature.enabled?(:cancel_redundant_pipelines_without_hierarchy_cte, @project)
+ auto_cancel_all_pipelines_with_cancelable_statuses
+
+ return
+ end
+
paginator.each do |ids|
pipelines = parent_and_child_pipelines(ids)
@@ -67,23 +75,96 @@ module Ci
def parent_and_child_pipelines(ids)
Ci::Pipeline.object_hierarchy(parent_auto_cancelable_pipelines(ids), project_condition: :same)
.base_and_descendants
- .alive_or_scheduled
+ .cancelable
end
- def legacy_auto_cancel_pipelines(pipeline_ids)
- ::Ci::Pipeline
- .id_in(pipeline_ids)
- .conservative_interruptible
- .each do |cancelable_pipeline|
- cancel_pipeline(cancelable_pipeline, safe_cancellation: false)
- end
+ def cancelable_status_pipeline_ids
+ project.all_pipelines
+ .for_ref(pipeline.ref)
+ .id_not_in(pipeline.id)
+ .with_status(Ci::Pipeline::CANCELABLE_STATUSES)
+ .order_id_desc
+ .limit(MAX_CANCELLATIONS_PER_PIPELINE)
+ .pluck(:id)
end
+ strong_memoize_attr :cancelable_status_pipeline_ids
- def auto_cancel_pipelines(pipeline_ids)
- if Feature.disabled?(:ci_workflow_auto_cancel_on_new_commit, project)
- return legacy_auto_cancel_pipelines(pipeline_ids)
+ def ref_head_sha
+ project.commit(pipeline.ref).try(:id)
+ end
+ strong_memoize_attr :ref_head_sha
+
+ # rubocop:disable Metrics/CyclomaticComplexity -- Keep logic tightly bound while this is still experimental
+ def auto_cancel_all_pipelines_with_cancelable_statuses
+ skipped_for_old_age = 0
+ conservatively_cancelled = 0
+ aggressively_cancelled = 0
+ configured_to_not_cancel = 0
+
+ cancelable_status_pipeline_ids.each_slice(ID_BATCH_SIZE) do |ids_batch|
+ Ci::Pipeline.id_in(ids_batch).each do |cancelable|
+ case cancelable.source.to_sym
+ when *Enums::Ci::Pipeline.ci_sources.keys
+ # Newer pipelines are not cancelable
+ next if cancelable.created_at >= pipeline.created_at
+ when :parent_pipeline
+ # Child pipelines are cancelable based on the root parent age
+ next if cancelable.root_ancestor.created_at >= pipeline.created_at
+ else
+ # Skip other pipeline sources
+ next
+ end
+
+ next if cancelable.sha == pipeline.sha
+ next if cancelable.sha == ref_head_sha
+
+ if cancelable.created_at < pipelines_created_after
+ skipped_for_old_age += 1
+
+ next
+ end
+
+ # Cancel method based on configured strategy
+ case cancelable.auto_cancel_on_new_commit
+ when 'none'
+ # no-op
+
+ configured_to_not_cancel += 1
+ when 'conservative'
+ next unless conservative_cancellable_pipeline_ids(ids_batch).include?(cancelable.id)
+
+ conservatively_cancelled += 1
+
+ cancel_pipeline(cancelable, safe_cancellation: false)
+ when 'interruptible'
+
+ aggressively_cancelled += 1
+
+ cancel_pipeline(cancelable, safe_cancellation: true)
+ else
+ raise ArgumentError,
+ "Unknown auto_cancel_on_new_commit value: #{cancelable.auto_cancel_on_new_commit}"
+ end
+ end
end
+ Gitlab::AppLogger.info(
+ class: self.class.name,
+ message: "Canceling redundant pipelines",
+ cancellable_count: cancelable_status_pipeline_ids.count,
+ skipped_for_old_age: skipped_for_old_age,
+ conservatively_cancelled: conservatively_cancelled,
+ aggressively_cancelled: aggressively_cancelled,
+ configured_to_not_cancel: configured_to_not_cancel,
+ canceled_by_pipeline_id: pipeline.id,
+ project_id: pipeline.project_id,
+ ref: pipeline.ref,
+ sha: pipeline.sha
+ )
+ end
+ # rubocop:enable Metrics/CyclomaticComplexity
+
+ def auto_cancel_pipelines(pipeline_ids)
::Ci::Pipeline
.id_in(pipeline_ids)
.each do |cancelable_pipeline|
diff --git a/app/services/ci/pipeline_creation/start_pipeline_service.rb b/app/services/ci/pipeline_creation/start_pipeline_service.rb
index 65a045f32dd..8f685123964 100644
--- a/app/services/ci/pipeline_creation/start_pipeline_service.rb
+++ b/app/services/ci/pipeline_creation/start_pipeline_service.rb
@@ -15,6 +15,10 @@ module Ci
# The pipeline ref is fetched in the jobs and deleted when the pipeline transitions to a finished state.
pipeline.ensure_persistent_ref
+ if Feature.enabled?(:populate_and_use_build_names_table, pipeline.project)
+ Ci::UpdateBuildNamesWorker.perform_async(pipeline.id)
+ end
+
Ci::ProcessPipelineService.new(pipeline).execute
end
end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index 84e5089b0d5..ccdadaa9989 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -55,25 +55,46 @@ module Ci
end
def update_stage!(stage)
- # Update jobs for a given stage in bulk/slices
- @collection
- .created_job_ids_in_stage(stage.position)
- .in_groups_of(BATCH_SIZE, false) { |ids| update_jobs!(ids) }
-
+ sorted_update_stage!(stage)
status = @collection.status_of_stage(stage.position)
stage.set_status(status)
end
- def update_jobs!(ids)
- created_jobs = pipeline
+ def sorted_update_stage!(stage)
+ ordered_jobs(stage).each { |job| update_job!(job) }
+ end
+
+ def ordered_jobs(stage)
+ jobs = load_jobs_in_batches(stage)
+ sorted_job_names = sort_jobs(jobs).each_with_index.to_h
+ jobs.sort_by { |job| sorted_job_names.fetch(job.name) }
+ end
+
+ def load_jobs_in_batches(stage)
+ @collection
+ .created_job_ids_in_stage(stage.position)
+ .in_groups_of(BATCH_SIZE, false)
+ .each_with_object([]) do |ids, jobs|
+ jobs.concat(load_jobs(ids))
+ end
+ end
+
+ def load_jobs(ids)
+ pipeline
.current_processable_jobs
.id_in(ids)
.with_project_preload
.created
.ordered_by_stage
.select_with_aggregated_needs(project)
+ end
- created_jobs.each { |job| update_job!(job) }
+ def sort_jobs(jobs)
+ Gitlab::Ci::YamlProcessor::Dag.order( # rubocop: disable CodeReuse/ActiveRecord -- this is not ActiveRecord
+ jobs.to_h do |job|
+ [job.name, job.aggregated_needs_names.to_a]
+ end
+ )
end
def update_pipeline!
diff --git a/app/services/ci/pipeline_schedules/base_save_service.rb b/app/services/ci/pipeline_schedules/base_save_service.rb
index e6f633498e9..c51b419be50 100644
--- a/app/services/ci/pipeline_schedules/base_save_service.rb
+++ b/app/services/ci/pipeline_schedules/base_save_service.rb
@@ -5,12 +5,27 @@ module Ci
class BaseSaveService
include Gitlab::Utils::StrongMemoize
+ # The only way that ref can be unexpanded after #expand_short_ref runs is if the ref
+ # is ambiguous because both a branch and a tag with the name exist, or it is
+ # ambiguous because neither exists.
+ INVALID_REF_MESSAGE = 'Ref is ambiguous'
+ INVALID_REF_MODEL_MESSAGE = 'is ambiguous'
+
def execute
schedule.assign_attributes(params)
return forbidden_to_save unless allowed_to_save?
return forbidden_to_save_variables unless allowed_to_save_variables?
+ # This validation cannot be added to the model yet due to operation hooks
+ # causing incidents
+ unless valid_ref_format?
+ schedule.expand_short_ref
+
+ # Only return an error if the ref fails to expand
+ return invalid_ref_format unless valid_ref_format?
+ end
+
if schedule.save
ServiceResponse.success(payload: schedule)
else
@@ -22,6 +37,10 @@ module Ci
attr_reader :project, :user, :params, :schedule
+ def valid_ref_format?
+ schedule.ref.present? && Ci::PipelineSchedule::VALID_REF_FORMAT_REGEX.match?(schedule.ref)
+ end
+
def allowed_to_save?
# Disable cache because the same ability may already have been checked
# for the same records with different attributes. For example, we do not
@@ -53,6 +72,12 @@ module Ci
ServiceResponse.error(payload: schedule, message: [message], reason: :forbidden)
end
+
+ def invalid_ref_format
+ schedule.errors.add(:ref, INVALID_REF_MODEL_MESSAGE)
+
+ ServiceResponse.error(payload: schedule, message: [INVALID_REF_MESSAGE])
+ end
end
end
end
diff --git a/app/services/ci/pipeline_schedules/calculate_next_run_service.rb b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
index a1b9ab5f82e..064b004a5b8 100644
--- a/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
+++ b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
@@ -47,7 +47,13 @@ module Ci
every_x_minutes = (1.day.in_minutes / daily_limit).to_i
- Gitlab::Ci::CronParser.parse_natural("every #{every_x_minutes} minutes", Time.zone.name)
+ begin
+ Gitlab::Ci::CronParser.parse_natural("every #{every_x_minutes} minutes", Time.zone.name)
+ rescue ZeroDivisionError
+ # Fugit returns ZeroDivision Error if provided a number
+ # less than 1 in the expression.
+ nil
+ end
end
end
end
diff --git a/app/services/ci/pipeline_trigger_service.rb b/app/services/ci/pipeline_trigger_service.rb
index d7065680053..4645b771f46 100644
--- a/app/services/ci/pipeline_trigger_service.rb
+++ b/app/services/ci/pipeline_trigger_service.rb
@@ -94,7 +94,8 @@ module Ci
def payload_variable
{ key: PAYLOAD_VARIABLE_KEY,
value: Gitlab::Json.dump(params.except(*PAYLOAD_VARIABLE_HIDDEN_PARAMS)),
- variable_type: :file }
+ variable_type: :file,
+ raw: true }
end
def set_application_context_from_trigger(trigger)
diff --git a/app/services/ci/pipeline_triggers/create_service.rb b/app/services/ci/pipeline_triggers/create_service.rb
new file mode 100644
index 00000000000..13c581e2988
--- /dev/null
+++ b/app/services/ci/pipeline_triggers/create_service.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineTriggers
+ class CreateService
+ include Gitlab::Allowable
+
+ attr_reader :project, :current_user, :description
+
+ def initialize(project:, user:, description:)
+ @project = project
+ @current_user = user
+ @description = description
+ end
+
+ def execute
+ unless can?(current_user, :manage_trigger, project)
+ return ServiceResponse.error(
+ message: _('The current user is not authorized to create a pipeline trigger token'),
+ payload: { trigger: nil },
+ reason: :forbidden
+ )
+ end
+
+ trigger = project.triggers.create(**create_params)
+
+ if trigger.present? && trigger.persisted?
+ ServiceResponse.success(payload: { trigger: trigger })
+ elsif trigger.present? && trigger.errors.any?
+ ServiceResponse.error(
+ message: trigger.errors.to_json,
+ payload: { trigger: trigger },
+ reason: :validation_error
+ )
+ else
+ raise "Unexpected Ci::Trigger creation failure. Description: #{@description}"
+ end
+ end
+
+ private
+
+ def create_params
+ { description: description, owner: current_user }
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_triggers/destroy_service.rb b/app/services/ci/pipeline_triggers/destroy_service.rb
new file mode 100644
index 00000000000..36b174a5d58
--- /dev/null
+++ b/app/services/ci/pipeline_triggers/destroy_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineTriggers
+ class DestroyService
+ include Gitlab::Allowable
+
+ attr_reader :project, :current_user, :description, :trigger
+
+ def initialize(user:, trigger:)
+ @current_user = user
+ @trigger = trigger
+ end
+
+ def execute
+ unless can?(current_user, :manage_trigger, trigger)
+ return ServiceResponse.error(
+ message: _('The current user is not authorized to manage the pipeline trigger token'),
+ reason: :forbidden
+ )
+ end
+
+ trigger.destroy
+
+ unless trigger.destroyed?
+ return ServiceResponse.error(
+ message: _('Attempted to destroy the pipeline trigger token but failed')
+ )
+ end
+
+ ServiceResponse.success
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_triggers/update_service.rb b/app/services/ci/pipeline_triggers/update_service.rb
new file mode 100644
index 00000000000..ffcd781ea87
--- /dev/null
+++ b/app/services/ci/pipeline_triggers/update_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineTriggers
+ class UpdateService
+ include Gitlab::Allowable
+
+ attr_reader :current_user, :description, :trigger
+
+ def initialize(user:, trigger:, description:)
+ @current_user = user
+ @description = description
+ @trigger = trigger
+ end
+
+ def execute
+ unless can?(current_user, :admin_trigger, trigger)
+ return ServiceResponse.error(
+ message: _('The current user is not authorized to update the pipeline trigger token'),
+ payload: { trigger: trigger },
+ reason: :forbidden
+ )
+ end
+
+ if trigger.update(**update_params)
+ ServiceResponse.success(payload: { trigger: trigger })
+ else
+ ServiceResponse.error(
+ message: _('Attempted to update the pipeline trigger token but failed'),
+ payload: { trigger: trigger }
+ )
+ end
+ end
+
+ private
+
+ def update_params
+ { description: description }
+ end
+ end
+ end
+end
diff --git a/app/services/ci/process_build_service.rb b/app/services/ci/process_build_service.rb
index afaf18a4de2..47002a7a45d 100644
--- a/app/services/ci/process_build_service.rb
+++ b/app/services/ci/process_build_service.rb
@@ -27,7 +27,7 @@ module Ci
end
def enqueue(processable)
- return processable.drop!(:failed_outdated_deployment_job) if processable.outdated_deployment?
+ return processable.drop!(:failed_outdated_deployment_job) if processable.has_outdated_deployment?
processable.enqueue
end
diff --git a/app/services/ci/queue/build_queue_service.rb b/app/services/ci/queue/build_queue_service.rb
index d6a252df82f..dbd5f73a5a7 100644
--- a/app/services/ci/queue/build_queue_service.rb
+++ b/app/services/ci/queue/build_queue_service.rb
@@ -51,7 +51,7 @@ module Ci
end
def execute(relation)
- strategy.build_ids(relation)
+ strategy.build_and_partition_ids(relation)
end
private
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index b2929390e58..340458c32e3 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -19,11 +19,9 @@ module Ci
def builds_for_group_runner
return new_builds.none if runner.namespace_ids.empty?
- new_builds_relation = new_builds.where('ci_pending_builds.namespace_traversal_ids && ARRAY[?]::int[]', runner.namespace_ids)
+ new_builds_relation = new_builds.where("ci_pending_builds.namespace_traversal_ids && '{?}'", runner.namespace_ids)
- return order(new_builds_relation) if ::Feature.enabled?(:order_builds_for_group_runner)
-
- new_builds_relation
+ order(new_builds_relation)
end
def builds_matching_tag_ids(relation, ids)
@@ -42,8 +40,8 @@ module Ci
::Ci::PendingBuild.all
end
- def build_ids(relation)
- relation.pluck(:build_id)
+ def build_and_partition_ids(relation)
+ relation.pluck(:build_id, :partition_id)
end
private
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 470a1d3951b..54b43f36934 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -129,11 +129,13 @@ module Ci
builds = queue.builds_with_any_tags(builds)
end
- build_ids = retrieve_queue(-> { queue.execute(builds) })
+ build_and_partition_ids = retrieve_queue(-> { queue.execute(builds) })
- @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
+ @metrics.observe_queue_size(-> { build_and_partition_ids.size }, @runner.runner_type)
- build_ids.each { |build_id| yield Ci::Build.find(build_id) }
+ build_and_partition_ids.each do |build_id, partition_id|
+ yield Ci::Build.find_by!(partition_id: partition_id, id: build_id)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -250,6 +252,8 @@ module Ci
@metrics.increment_queue_operation(:runner_pre_assign_checks_success)
build.run!
+ persist_runtime_features(build, params)
+
build.runner_manager = runner_manager if runner_manager
end
@@ -288,14 +292,22 @@ module Ci
)
end
+ def persist_runtime_features(build, params)
+ return unless params.dig(:info, :features, :cancel_gracefully)
+
+ build.set_cancel_gracefully
+
+ build.save
+ end
+
def pre_assign_runner_checks
{
- missing_dependency_failure: -> (build, _) { !build.has_valid_build_dependencies? },
- runner_unsupported: -> (build, params) { !build.supported_runner?(params.dig(:info, :features)) },
- archived_failure: -> (build, _) { build.archived? },
- project_deleted: -> (build, _) { build.project.pending_delete? },
- builds_disabled: -> (build, _) { !build.project.builds_enabled? },
- user_blocked: -> (build, _) { build.user&.blocked? }
+ missing_dependency_failure: ->(build, _) { !build.has_valid_build_dependencies? },
+ runner_unsupported: ->(build, params) { !build.supported_runner?(params.dig(:info, :features)) },
+ archived_failure: ->(build, _) { build.archived? },
+ project_deleted: ->(build, _) { build.project.pending_delete? },
+ builds_disabled: ->(build, _) { !build.project.builds_enabled? },
+ user_blocked: ->(build, _) { build.user&.blocked? }
}
end
end
diff --git a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
index d7078200c14..8da084ea554 100644
--- a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
+++ b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
@@ -3,22 +3,34 @@
module Ci
module ResourceGroups
class AssignResourceFromResourceGroupService < ::BaseService
- # rubocop: disable CodeReuse/ActiveRecord
+ RESPAWN_WAIT_TIME = 1.minute
+
def execute(resource_group)
release_resource_from_stale_jobs(resource_group)
free_resources = resource_group.resources.free.count
+ return if free_resources == 0
+
+ enqueue_upcoming_processables(free_resources, resource_group)
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def enqueue_upcoming_processables(free_resources, resource_group)
resource_group.upcoming_processables.take(free_resources).each do |upcoming|
Gitlab::OptimisticLocking.retry_lock(upcoming, name: 'enqueue_waiting_for_resource') do |processable|
- processable.enqueue_waiting_for_resource
+ if processable.has_outdated_deployment?
+ processable.drop!(:failed_outdated_deployment_job)
+ else
+ processable.enqueue_waiting_for_resource
+ end
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
- private
-
def release_resource_from_stale_jobs(resource_group)
resource_group.resources.stale_processables.find_each do |processable|
resource_group.release_resource_from(processable)
diff --git a/app/services/ci/retry_job_service.rb b/app/services/ci/retry_job_service.rb
index a8ea5ac6df0..bcfa2c52906 100644
--- a/app/services/ci/retry_job_service.rb
+++ b/app/services/ci/retry_job_service.rb
@@ -26,6 +26,7 @@ module Ci
raise TypeError unless job.instance_of?(Ci::Build) || job.instance_of?(Ci::Bridge)
check_access!(job)
+ variables = ensure_project_id!(variables)
new_job = job.clone(current_user: current_user, new_job_variables_attributes: variables)
if enqueue_if_actionable && new_job.action?
@@ -44,9 +45,15 @@ module Ci
.close(new_job)
end
- ::Ci::Pipelines::AddJobService.new(job.pipeline).execute!(new_job) do |processable|
- BulkInsertableAssociations.with_bulk_insert do
- processable.save!
+ # This method is called on the `drop!` state transition for Ci::Build which runs the retry in the
+ # `after_transition` block within a transaction.
+ # Ci::Pipelines::AddJobService then obtains the exclusive lease inside the same transaction.
+ # See issue: https://gitlab.com/gitlab-org/gitlab/-/issues/441525
+ Gitlab::ExclusiveLease.skipping_transaction_check do
+ ::Ci::Pipelines::AddJobService.new(job.pipeline).execute!(new_job) do |processable|
+ BulkInsertableAssociations.with_bulk_insert do
+ processable.save!
+ end
end
end
@@ -58,6 +65,12 @@ module Ci
private
+ def ensure_project_id!(variables)
+ variables.map do |variables|
+ variables.merge(project_id: project.id)
+ end
+ end
+
def check_assignable_runners!(job); end
def retry_job(job, variables: [])
diff --git a/app/services/ci/runners/assign_runner_service.rb b/app/services/ci/runners/assign_runner_service.rb
index 4e7b08bdd7a..b815e1048bc 100644
--- a/app/services/ci/runners/assign_runner_service.rb
+++ b/app/services/ci/runners/assign_runner_service.rb
@@ -2,35 +2,56 @@
module Ci
module Runners
+ # Service used to assign a runner to a project.
+ # This class can be reused by SetRunnerAssociatedProjectsService in the context of a bulk assignment.
class AssignRunnerService
# @param [Ci::Runner] runner: the runner to assign to a project
# @param [Project] project: the new project to assign the runner to
# @param [User] user: the user performing the operation
- def initialize(runner, project, user)
+ # @param [Boolean] quiet: true if service should avoid side-effects, such as logging
+ # (e.g. when used by another service)
+ def initialize(runner, project, user, quiet: false)
@runner = runner
@project = project
@user = user
+ @quiet = quiet
end
def execute
- unless @user.present? && @user.can?(:assign_runner, @runner)
- return ServiceResponse.error(message: 'user not allowed to assign runner', http_status: :forbidden)
- end
-
- unless @user.can?(:register_project_runners, @project)
- return ServiceResponse.error(message: 'user not allowed to add runners to project', http_status: :forbidden)
- end
+ response = validate
+ return response if response.error?
if @runner.assign_to(@project, @user)
ServiceResponse.success
else
- ServiceResponse.error(message: 'failed to assign runner')
+ ServiceResponse.error(
+ message: @runner.errors.full_messages_for(:assign_to).presence || _('failed to assign runner to project'),
+ reason: :runner_error)
end
end
private
- attr_reader :runner, :project, :user
+ attr_reader :runner, :project, :user, :quiet
+
+ def validate
+ unless @user.present? && @user.can?(:assign_runner, @runner)
+ return ServiceResponse.error(message: _('user not allowed to assign runner'),
+ reason: :not_authorized_to_assign_runner)
+ end
+
+ unless @user.can?(:create_runner, @project)
+ return ServiceResponse.error(message: _('user is not authorized to add runners to project'),
+ reason: :not_authorized_to_add_runner_in_project)
+ end
+
+ if runner.owner_project && project.organization_id != runner.owner_project.organization_id
+ return ServiceResponse.error(message: _('runner can only be assigned to projects in the same organization'),
+ reason: :project_not_in_same_organization)
+ end
+
+ ServiceResponse.success
+ end
end
end
end
diff --git a/app/services/ci/runners/reconcile_existing_runner_versions_service.rb b/app/services/ci/runners/reconcile_existing_runner_versions_service.rb
index 1950d82845b..0f67bf636b4 100644
--- a/app/services/ci/runners/reconcile_existing_runner_versions_service.rb
+++ b/app/services/ci/runners/reconcile_existing_runner_versions_service.rb
@@ -27,7 +27,7 @@ module Ci
def insert_runner_versions
versions_from_runners = Set[]
new_record_count = 0
- Ci::Runner.distinct_each_batch(column: :version, of: VERSION_BATCH_SIZE) do |version_batch|
+ Ci::RunnerManager.distinct_each_batch(column: :version, of: VERSION_BATCH_SIZE) do |version_batch|
batch_versions = version_batch.pluck(:version).to_set
versions_from_runners += batch_versions
diff --git a/app/services/ci/runners/register_runner_service.rb b/app/services/ci/runners/register_runner_service.rb
index 0c13c32e236..da9c555e8fb 100644
--- a/app/services/ci/runners/register_runner_service.rb
+++ b/app/services/ci/runners/register_runner_service.rb
@@ -48,6 +48,8 @@ module Ci
elsif runner_registrar_valid?('group') && group = ::Group.find_by_runners_token(registration_token)
# Create a group runner
{ runner_type: :group_type, groups: [group] }
+ elsif registration_token.present? && !Gitlab::CurrentSettings.allow_runner_registration_token
+ {} # Will result in a :runner_registration_disallowed response
end
end
strong_memoize_attr :attrs_from_token
@@ -64,6 +66,8 @@ module Ci
end
def runner_registration_token_valid?(registration_token)
+ return false if registration_token.nil? || Gitlab::CurrentSettings.runners_registration_token.nil?
+
ActiveSupport::SecurityUtils.secure_compare(registration_token, Gitlab::CurrentSettings.runners_registration_token)
end
diff --git a/app/services/ci/runners/set_runner_associated_projects_service.rb b/app/services/ci/runners/set_runner_associated_projects_service.rb
index 3608fdfac71..cc06669a560 100644
--- a/app/services/ci/runners/set_runner_associated_projects_service.rb
+++ b/app/services/ci/runners/set_runner_associated_projects_service.rb
@@ -14,7 +14,8 @@ module Ci
def execute
unless current_user&.can?(:assign_runner, runner)
- return ServiceResponse.error(message: 'user not allowed to assign runner', http_status: :forbidden)
+ return ServiceResponse.error(message: _('user not allowed to assign runner'),
+ reason: :not_authorized_to_assign_runner)
end
return ServiceResponse.success if project_ids.nil?
@@ -25,7 +26,7 @@ module Ci
private
def set_associated_projects
- new_project_ids = [runner.owner_project.id] + project_ids
+ new_project_ids = [runner.owner_project&.id].compact + project_ids
response = ServiceResponse.success
runner.transaction do
@@ -44,12 +45,17 @@ module Ci
def associate_new_projects(new_project_ids, current_project_ids)
missing_projects = Project.id_in(new_project_ids - current_project_ids)
- unless missing_projects.all? { |project| current_user.can?(:register_project_runners, project) }
- return ServiceResponse.error(message: 'user is not authorized to add runners to project')
- end
+ error_responses = missing_projects.map do |project|
+ Ci::Runners::AssignRunnerService.new(runner, project, current_user, quiet: true)
+ end.map(&:execute).select(&:error?)
+
+ if error_responses.any?
+ return error_responses.first if error_responses.count == 1
- unless missing_projects.all? { |project| runner.assign_to(project, current_user) }
- return ServiceResponse.error(message: 'failed to assign projects to runner')
+ return ServiceResponse.error(
+ message: error_responses.map(&:message).uniq,
+ reason: :multiple_errors
+ )
end
ServiceResponse.success
@@ -65,7 +71,7 @@ module Ci
.all?(&:destroyed?)
return ServiceResponse.success if all_destroyed
- ServiceResponse.error(message: 'failed to destroy runner project')
+ ServiceResponse.error(message: _('failed to destroy runner project'), reason: :failed_runner_project_destroy)
end
attr_reader :runner, :current_user, :project_ids
diff --git a/app/services/ci/stuck_builds/drop_canceling_service.rb b/app/services/ci/stuck_builds/drop_canceling_service.rb
new file mode 100644
index 00000000000..3a898130f8b
--- /dev/null
+++ b/app/services/ci/stuck_builds/drop_canceling_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Ci
+ module StuckBuilds
+ class DropCancelingService
+ include DropHelpers
+
+ TIMEOUT = 1.hour
+
+ def execute
+ Gitlab::AppLogger.info "#{self.class}: Cleaning canceling, timed-out builds"
+
+ drop(canceling_timed_out_builds, failure_reason: :stuck_or_timeout_failure)
+ end
+
+ private
+
+ def canceling_timed_out_builds
+ Ci::Build
+ .canceling
+ .created_at_before(TIMEOUT.ago)
+ .updated_at_before(TIMEOUT.ago)
+ .order(created_at: :asc, project_id: :asc) # rubocop:disable CodeReuse/ActiveRecord -- query optimization
+ end
+ end
+ end
+end
diff --git a/app/services/ci/trigger_downstream_pipeline_service.rb b/app/services/ci/trigger_downstream_pipeline_service.rb
new file mode 100644
index 00000000000..05f69613536
--- /dev/null
+++ b/app/services/ci/trigger_downstream_pipeline_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Ci
+ # Enqueues the downstream pipeline worker.
+ class TriggerDownstreamPipelineService
+ def initialize(bridge)
+ @bridge = bridge
+ @current_user = bridge.user
+ @project = bridge.project
+ @pipeline = bridge.pipeline
+ end
+
+ def execute
+ unless bridge.triggers_downstream_pipeline?
+ return ServiceResponse.success(message: 'Does not trigger a downstream pipeline')
+ end
+
+ if rate_limit_throttled?
+ bridge.drop!(:reached_downstream_pipeline_trigger_rate_limit)
+
+ return ServiceResponse.error(message: 'Reached downstream pipeline trigger rate limit')
+ end
+
+ CreateDownstreamPipelineWorker.perform_async(bridge.id)
+
+ ServiceResponse.success(message: 'Downstream pipeline enqueued')
+ end
+
+ private
+
+ attr_reader :bridge, :current_user, :project, :pipeline
+
+ def rate_limit_throttled?
+ scope = [project, current_user, pipeline.sha]
+
+ ::Gitlab::ApplicationRateLimiter.throttled?(:downstream_pipeline_trigger, scope: scope).tap do |throttled|
+ create_throttled_log_entry if throttled
+ end
+ end
+
+ def create_throttled_log_entry
+ ::Gitlab::AppJsonLogger.info(
+ class: self.class.name,
+ project_id: project.id,
+ current_user_id: current_user.id,
+ pipeline_sha: pipeline.sha,
+ subscription_plan: project.actual_plan_name,
+ downstream_type: bridge.triggers_child_pipeline? ? 'child' : 'multi-project',
+ message: 'Activated downstream pipeline trigger rate limit'
+ )
+ end
+ end
+end
diff --git a/app/services/ci/unlock_pipeline_service.rb b/app/services/ci/unlock_pipeline_service.rb
index bd42871ffbe..e0e4004f4b2 100644
--- a/app/services/ci/unlock_pipeline_service.rb
+++ b/app/services/ci/unlock_pipeline_service.rb
@@ -86,10 +86,12 @@ module Ci
builds_relation.each_batch(of: BATCH_SIZE) do |builds|
# rubocop: disable CodeReuse/ActiveRecord
- Ci::JobArtifact.where(job_id: builds.pluck(:id)).each_batch(of: BATCH_SIZE) do |job_artifacts|
- unlocked_count = Ci::JobArtifact
- .where(id: job_artifacts.pluck(:id))
- .update_all(locked: :unlocked)
+ Ci::JobArtifact.where(job_id: builds.pluck(:id), partition_id: partition_id)
+ .each_batch(of: BATCH_SIZE) do |job_artifacts|
+ unlocked_count = Ci::JobArtifact.where(
+ id: job_artifacts.pluck(:id),
+ partition_id: partition_id
+ ).update_all(locked: :unlocked)
@unlocked_job_artifacts_count ||= 0
@unlocked_job_artifacts_count += unlocked_count
@@ -110,6 +112,12 @@ module Ci
end
end
+ # All the partitionable entities connected to a pipeline
+ # belong to the same partition where the pipeline is.
+ def partition_id
+ pipeline.partition_id
+ end
+
def unlock_pipeline_artifacts
@unlocked_pipeline_artifacts_count = pipeline.pipeline_artifacts.update_all(locked: :unlocked)
end
diff --git a/app/services/ci/update_build_names_service.rb b/app/services/ci/update_build_names_service.rb
new file mode 100644
index 00000000000..d040ef40785
--- /dev/null
+++ b/app/services/ci/update_build_names_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdateBuildNamesService
+ attr_reader :pipeline
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def execute
+ scope = pipeline.builds.latest
+ iterator = Gitlab::Pagination::Keyset::Iterator.new(scope: scope)
+
+ iterator.each_batch(of: 100) do |records|
+ upsert_records(records)
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord -- plucking attributes is more efficient than loading the records
+ # rubocop: disable Database/AvoidUsingPluckWithoutLimit -- plucking on batch
+ def upsert_records(batch)
+ keys = %i[build_id partition_id name project_id]
+
+ builds_upsert_data =
+ batch
+ .pluck(:id, :partition_id, :name, :project_id)
+ .map { |values| Hash[keys.zip(values)] }
+
+ return unless builds_upsert_data.any?
+
+ Ci::BuildName.upsert_all(builds_upsert_data, unique_by: [:build_id, :partition_id])
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ # rubocop: enable Database/AvoidUsingPluckWithoutLimit
+ end
+end
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index 40941dd4cd0..cfe841bd13f 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -50,18 +50,18 @@ module Ci
end
##
- # Add shared runner build tracking entry (used for queuing).
+ # Add runner build tracking entry (used for queuing and for runner fleet dashboard).
#
def track(build, transition)
- return unless build.shared_runner_build?
+ return if build.runner.nil?
raise InvalidQueueTransition unless transition.to == 'running'
transition.within_transaction do
- result = ::Ci::RunningBuild.upsert_shared_runner_build!(build)
+ result = ::Ci::RunningBuild.upsert_build!(build)
unless result.empty?
- metrics.increment_queue_operation(:shared_runner_build_new)
+ metrics.increment_queue_operation(:shared_runner_build_new) if build.shared_runner_build?
result.rows.dig(0, 0)
end
@@ -69,11 +69,10 @@ module Ci
end
##
- # Remove a runtime build tracking entry for a shared runner build (used for
- # queuing).
+ # Remove a runtime build tracking entry for a runner build (used for queuing and for runner fleet dashboard).
#
def untrack(build, transition)
- return unless build.shared_runner_build?
+ return if build.runner.nil?
raise InvalidQueueTransition unless transition.from == 'running'
@@ -81,7 +80,7 @@ module Ci
removed = build.all_runtime_metadata.delete_all
if removed > 0
- metrics.increment_queue_operation(:shared_runner_build_done)
+ metrics.increment_queue_operation(:shared_runner_build_done) if build.shared_runner_build?
build.id
end
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index bd76f6dbda8..b233fa69bfa 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -217,8 +217,7 @@ module Ci
end
def chunks_migration_enabled?
- ::Feature.enabled?(:ci_enable_live_trace, build.project) &&
- ::Feature.enabled?(:ci_accept_trace, build.project, type: :ops)
+ ::Feature.enabled?(:ci_enable_live_trace, build.project)
end
def log_invalid_chunks?
diff --git a/app/services/ci/update_group_pending_build_service.rb b/app/services/ci/update_group_pending_build_service.rb
new file mode 100644
index 00000000000..209c9dcc5e9
--- /dev/null
+++ b/app/services/ci/update_group_pending_build_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdateGroupPendingBuildService
+ BATCH_SIZE = 500
+ BATCH_QUIET_PERIOD = 2.seconds
+
+ def initialize(group, update_params)
+ @group = group
+ @update_params = update_params.symbolize_keys
+ end
+
+ def execute
+ Ci::UpdatePendingBuildService.new(@group, @update_params).execute
+
+ @group.descendants.each_batch(of: BATCH_SIZE) do |subgroups|
+ subgroups.each do |subgroup|
+ Ci::UpdatePendingBuildService.new(subgroup, update_params_for_group(subgroup)).execute
+ end
+
+ sleep BATCH_QUIET_PERIOD
+ end
+ end
+
+ private
+
+ def update_params_for_group(group)
+ # Update the params with an eventual updated version from Ci::PendingBuild.namespace_transfer_params
+ transfer_params = Ci::PendingBuild.namespace_transfer_params(group)
+ @update_params.merge(transfer_params.slice(*@update_params.keys))
+ end
+ end
+end
diff --git a/app/services/ci/update_instance_variables_service.rb b/app/services/ci/update_instance_variables_service.rb
index 2f941118a1c..a1cee950d21 100644
--- a/app/services/ci/update_instance_variables_service.rb
+++ b/app/services/ci/update_instance_variables_service.rb
@@ -7,7 +7,8 @@ module Ci
class UpdateInstanceVariablesService
UNASSIGNABLE_KEYS = %w[id _destroy].freeze
- def initialize(params)
+ def initialize(params, current_user)
+ @current_user = current_user
@params = params[:variables_attributes]
end
@@ -22,7 +23,7 @@ module Ci
private
- attr_reader :params
+ attr_reader :params, :current_user
def existing_records_by_id
@existing_records_by_id ||= Ci::InstanceVariable
@@ -49,20 +50,29 @@ module Ci
end
end
+ # overridden in EE
+ def audit_change(instance_variable); end
+
def persist_records
+ changes = []
+ success = false
+
Ci::InstanceVariable.transaction do
- success = @records.map do |record|
+ changes = @records.map do |record|
if record.marked_for_destruction?
- record.destroy
+ { action: record.destroy, record: record }
else
- record.save
+ { action: record.save, record: record }
end
- end.all?
+ end
+ success = changes.all? { |change| change[:action] }
raise ActiveRecord::Rollback unless success
-
- success
end
+
+ changes.each { |change| audit_change change[:record] }
+
+ success
end
def has_destroy_flag?(hash)
@@ -70,3 +80,4 @@ module Ci
end
end
end
+Ci::UpdateInstanceVariablesService.prepend_mod
diff --git a/app/services/ci/update_pending_build_service.rb b/app/services/ci/update_pending_build_service.rb
index 2118dbcc19e..40dd62d9408 100644
--- a/app/services/ci/update_pending_build_service.rb
+++ b/app/services/ci/update_pending_build_service.rb
@@ -35,7 +35,7 @@ module Ci
def validate_params!
extra_params = @update_params.keys - VALID_PARAMS
- raise InvalidParamsError, "Unvalid params: #{extra_params.join(', ')}" unless extra_params.empty?
+ raise InvalidParamsError, "Invalid params: #{extra_params.join(', ')}" if extra_params.any?
true
end
diff --git a/app/services/click_house/sync_strategies/audit_event_sync_strategy.rb b/app/services/click_house/sync_strategies/audit_event_sync_strategy.rb
new file mode 100644
index 00000000000..73e23f908d1
--- /dev/null
+++ b/app/services/click_house/sync_strategies/audit_event_sync_strategy.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+module ClickHouse
+ module SyncStrategies
+ class AuditEventSyncStrategy < BaseSyncStrategy
+ def execute(identifier)
+ @serialized_model = build_serialized_class(identifier)
+ @non_serialized_model = build_non_serialized_class(identifier)
+
+ super()
+ end
+
+ private
+
+ def build_serialized_class(identifier)
+ Class.new(ApplicationRecord) do
+ self.table_name = identifier
+
+ include EachBatch
+ self.primary_key = :id
+
+ serialize :details, Hash
+ end
+ end
+
+ def build_non_serialized_class(identifier)
+ Class.new(ApplicationRecord) do
+ self.table_name = identifier
+
+ include EachBatch
+ self.primary_key = :id
+
+ attr_accessor :casted_created_at
+ end
+ end
+
+ def model_class
+ @serialized_model
+ end
+
+ def enabled?
+ super && Feature.enabled?(:sync_audit_events_to_clickhouse, type: :gitlab_com_derisk)
+ end
+
+ def transform_row(row)
+ convert_to_non_serialized_model(row)
+ end
+
+ def convert_to_non_serialized_model(serialized_model)
+ non_serialized_model = @non_serialized_model.new(serialized_model.attributes)
+ non_serialized_model.details = serialized_model.details.to_json
+ non_serialized_model
+ end
+
+ def csv_mapping
+ {
+ id: :id,
+ author_id: :author_id,
+ author_name: :author_name,
+ details: :details,
+ entity_id: :entity_id,
+ entity_path: :entity_path,
+ entity_type: :entity_type,
+ ip_address: :ip_address,
+ target_details: :target_details,
+ target_id: :target_id,
+ target_type: :target_type,
+ created_at: :casted_created_at
+ }
+ end
+
+ def projections
+ [
+ :id,
+ :author_id,
+ :author_name,
+ :details,
+ :entity_id,
+ :entity_path,
+ :entity_type,
+ :ip_address,
+ :target_details,
+ :target_id,
+ :target_type,
+ 'EXTRACT(epoch FROM created_at) AS casted_created_at'
+ ]
+ end
+
+ def insert_query
+ <<~SQL.squish
+ INSERT INTO audit_events (#{csv_mapping.keys.join(', ')})
+ SETTINGS async_insert=1, wait_for_async_insert=1 FORMAT CSV
+ SQL
+ end
+ end
+ end
+end
diff --git a/app/services/click_house/sync_strategies/base_sync_strategy.rb b/app/services/click_house/sync_strategies/base_sync_strategy.rb
index 54f0f084d05..2a0b4cf7062 100644
--- a/app/services/click_house/sync_strategies/base_sync_strategy.rb
+++ b/app/services/click_house/sync_strategies/base_sync_strategy.rb
@@ -48,7 +48,7 @@ module ClickHouse
@context ||= ClickHouse::RecordSyncContext.new(
last_record_id: ClickHouse::SyncCursor.cursor_for(model_class.table_name),
max_records_per_batch: INSERT_BATCH_SIZE,
- runtime_limiter: Analytics::CycleAnalytics::RuntimeLimiter.new(MAX_RUNTIME)
+ runtime_limiter: Gitlab::Metrics::RuntimeLimiter.new(MAX_RUNTIME)
)
end
diff --git a/app/services/click_house/sync_strategies/event_sync_strategy.rb b/app/services/click_house/sync_strategies/event_sync_strategy.rb
deleted file mode 100644
index 3e86e8c52bc..00000000000
--- a/app/services/click_house/sync_strategies/event_sync_strategy.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-module ClickHouse
- module SyncStrategies
- class EventSyncStrategy < BaseSyncStrategy
- # transforms the traversal_ids to a String:
- # Example: group_id/subgroup_id/group_or_projectnamespace_id/
- PATH_COLUMN = <<~SQL
- (
- CASE
- WHEN project_id IS NOT NULL THEN (SELECT array_to_string(traversal_ids, '/') || '/' FROM namespaces WHERE id = (SELECT project_namespace_id FROM projects WHERE id = events.project_id LIMIT 1) LIMIT 1)
- WHEN group_id IS NOT NULL THEN (SELECT array_to_string(traversal_ids, '/') || '/' FROM namespaces WHERE id = events.group_id LIMIT 1)
- ELSE ''
- END
- ) AS path
- SQL
-
- private
-
- def csv_mapping
- {
- id: :id,
- path: :path,
- author_id: :author_id,
- target_id: :target_id,
- target_type: :target_type,
- action: :raw_action,
- created_at: :casted_created_at,
- updated_at: :casted_updated_at
- }
- end
-
- def projections
- [
- :id,
- PATH_COLUMN,
- :author_id,
- :target_id,
- :target_type,
- 'action AS raw_action',
- 'EXTRACT(epoch FROM created_at) AS casted_created_at',
- 'EXTRACT(epoch FROM updated_at) AS casted_updated_at'
- ]
- end
-
- def insert_query
- <<~SQL.squish
- INSERT INTO events (#{csv_mapping.keys.join(', ')})
- SETTINGS async_insert=1, wait_for_async_insert=1 FORMAT CSV
- SQL
- end
-
- def model_class
- ::Event
- end
-
- def enabled?
- super && Feature.enabled?(:event_sync_worker_for_click_house)
- end
- end
- end
-end
diff --git a/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb b/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb
index 8b967a2d551..62fb5ccbc30 100644
--- a/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb
+++ b/app/services/cloud_seed/google_cloud/create_cloudsql_instance_service.rb
@@ -34,12 +34,12 @@ module CloudSeed
current_user.id,
project.id,
{
- 'google_oauth2_token': google_oauth2_token,
- 'gcp_project_id': gcp_project_id,
- 'instance_name': instance_name,
- 'database_version': database_version,
- 'environment_name': environment_name,
- 'is_protected': protected?
+ google_oauth2_token: google_oauth2_token,
+ gcp_project_id: gcp_project_id,
+ instance_name: instance_name,
+ database_version: database_version,
+ environment_name: environment_name,
+ is_protected: protected?
}
)
end
diff --git a/app/services/clusters/agent_tokens/track_usage_service.rb b/app/services/clusters/agent_tokens/track_usage_service.rb
index 18fe236c44d..6359a009f52 100644
--- a/app/services/clusters/agent_tokens/track_usage_service.rb
+++ b/app/services/clusters/agent_tokens/track_usage_service.rb
@@ -4,7 +4,7 @@ module Clusters
module AgentTokens
class TrackUsageService
# The `UPDATE_USED_COLUMN_EVERY` defines how often the token DB entry can be updated
- UPDATE_USED_COLUMN_EVERY = (40.minutes..55.minutes)
+ UPDATE_USED_COLUMN_EVERY = ((40.minutes)..(55.minutes))
delegate :agent, to: :token
@@ -23,6 +23,10 @@ module Clusters
# Use update_column so updated_at is skipped
token.update_columns(track_values)
end
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(e, agent_id: token.agent_id)
+
+ ServiceResponse.error(message: e.message)
end
private
diff --git a/app/services/clusters/agents/authorizations/ci_access/filter_service.rb b/app/services/clusters/agents/authorizations/ci_access/filter_service.rb
index cd08aaa12d4..3ac3170a2b7 100644
--- a/app/services/clusters/agents/authorizations/ci_access/filter_service.rb
+++ b/app/services/clusters/agents/authorizations/ci_access/filter_service.rb
@@ -5,20 +5,26 @@ module Clusters
module Authorizations
module CiAccess
class FilterService
- def initialize(authorizations, filter_params)
+ def initialize(authorizations, filter_params, project)
@authorizations = authorizations
@filter_params = filter_params
+ @project = project
@environments_matcher = {}
end
def execute
- filter_by_environment(authorizations)
+ filtered_authorizations = filter_by_environment(authorizations)
+ if Feature.enabled?(:kubernetes_agent_protected_branches, project)
+ filtered_authorizations = filter_protected_ref(filtered_authorizations)
+ end
+
+ filtered_authorizations
end
private
- attr_reader :authorizations, :filter_params
+ attr_reader :authorizations, :filter_params, :project
def filter_by_environment(auths)
return auths unless filter_by_environment?
@@ -47,6 +53,26 @@ module Clusters
def environments_matcher(environment_pattern)
@environments_matcher[environment_pattern] ||= ::Gitlab::Ci::EnvironmentMatcher.new(environment_pattern)
end
+
+ def filter_protected_ref(authorizations)
+ # we deny all if the protected_ref is not set, since we can't check if the branch is protected:
+ return [] unless protected_ref_filter_present?
+
+ # when the branch is protected we don't need to check the authorization settings
+ return authorizations if filter_params[:protected_ref]
+
+ authorizations.reject do |authorization|
+ only_run_on_protected_ref?(authorization)
+ end
+ end
+
+ def protected_ref_filter_present?
+ filter_params.has_key?(:protected_ref)
+ end
+
+ def only_run_on_protected_ref?(authorization)
+ authorization.config['protected_branches_only']
+ end
end
end
end
diff --git a/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb b/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb
index 047a0725a2c..305eec7f7ca 100644
--- a/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb
+++ b/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb
@@ -7,7 +7,7 @@ module Clusters
class RefreshService
include Gitlab::Utils::StrongMemoize
- AUTHORIZED_ENTITY_LIMIT = 100
+ AUTHORIZED_ENTITY_LIMIT = 500
delegate :project, to: :agent, private: true
delegate :root_ancestor, to: :project, private: true
diff --git a/app/services/clusters/agents/authorizations/user_access/refresh_service.rb b/app/services/clusters/agents/authorizations/user_access/refresh_service.rb
index 4c3d059777a..ece49c936b1 100644
--- a/app/services/clusters/agents/authorizations/user_access/refresh_service.rb
+++ b/app/services/clusters/agents/authorizations/user_access/refresh_service.rb
@@ -7,7 +7,7 @@ module Clusters
class RefreshService
include Gitlab::Utils::StrongMemoize
- AUTHORIZED_ENTITY_LIMIT = 100
+ AUTHORIZED_ENTITY_LIMIT = 500
delegate :project, to: :agent, private: true
delegate :root_ancestor, to: :project, private: true
diff --git a/app/services/clusters/agents/create_service.rb b/app/services/clusters/agents/create_service.rb
index 568f168d63b..052f4caae5f 100644
--- a/app/services/clusters/agents/create_service.rb
+++ b/app/services/clusters/agents/create_service.rb
@@ -3,15 +3,15 @@
module Clusters
module Agents
class CreateService < BaseService
- def execute(name:)
+ def execute
return error_no_permissions unless cluster_agent_permissions?
- agent = ::Clusters::Agent.new(name: name, project: project, created_by_user: current_user)
+ agent = ::Clusters::Agent.new(name: params[:name], project: project, created_by_user: current_user)
if agent.save
- success.merge(cluster_agent: agent)
+ ServiceResponse.new(status: :success, payload: { cluster_agent: agent }, reason: :created)
else
- error(agent.errors.full_messages)
+ ServiceResponse.error(message: agent.errors.full_messages)
end
end
@@ -22,8 +22,12 @@ module Clusters
end
def error_no_permissions
- error(s_('ClusterAgent|You have insufficient permissions to create a cluster agent for this project'))
+ ServiceResponse.error(
+ message: s_('ClusterAgent|You have insufficient permissions to create a cluster agent for this project')
+ )
end
end
end
end
+
+Clusters::Agents::CreateService.prepend_mod
diff --git a/app/services/clusters/agents/create_url_configuration_service.rb b/app/services/clusters/agents/create_url_configuration_service.rb
new file mode 100644
index 00000000000..e6607c29f66
--- /dev/null
+++ b/app/services/clusters/agents/create_url_configuration_service.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class CreateUrlConfigurationService
+ attr_reader :agent, :current_user, :params
+
+ def initialize(agent:, current_user:, params:)
+ @agent = agent
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ return error_receptive_agents_disabled unless receptive_agents_enabled?
+ return error_no_permissions unless cluster_agent_permissions?
+ return error_already_receptive if agent.is_receptive
+
+ config = ::Clusters::Agents::UrlConfiguration.new(
+ agent: agent,
+ project: project,
+ created_by_user: current_user,
+ url: params[:url],
+ ca_cert: params[:ca_cert],
+ tls_host: params[:tls_host]
+ )
+
+ if params[:client_key]
+ config.client_key = params[:client_key]
+ config.client_cert = params[:client_cert]
+ else
+ private_key = Ed25519::SigningKey.generate
+ public_key = private_key.verify_key
+
+ config.private_key = private_key.to_bytes
+ config.public_key = public_key.to_bytes
+ end
+
+ if config.save
+ ServiceResponse.new(status: :success, payload: { url_configuration: config }, reason: :created)
+ else
+ ServiceResponse.error(message: config.errors.full_messages)
+ end
+ end
+
+ private
+
+ delegate :project, to: :agent
+
+ def cluster_agent_permissions?
+ current_user.can?(:admin_pipeline, project) && current_user.can?(:create_cluster, project)
+ end
+
+ def receptive_agents_enabled?
+ ::Gitlab::CurrentSettings.receptive_cluster_agents_enabled
+ end
+
+ def error_receptive_agents_disabled
+ ServiceResponse.error(
+ message: s_('ClusterAgent|Receptive agents are disabled for this GitLab instance')
+ )
+ end
+
+ def error_already_receptive
+ ServiceResponse.error(
+ message: s_('ClusterAgent|URL configuration already exists for this agent')
+ )
+ end
+
+ def error_no_permissions
+ ServiceResponse.error(
+ message: s_('ClusterAgent|You have insufficient permissions to create an url configuration for this agent')
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/delete_service.rb b/app/services/clusters/agents/delete_service.rb
index 2132dffa606..bf085455587 100644
--- a/app/services/clusters/agents/delete_service.rb
+++ b/app/services/clusters/agents/delete_service.rb
@@ -3,7 +3,9 @@
module Clusters
module Agents
class DeleteService < ::BaseContainerService
- def execute(cluster_agent)
+ def execute
+ cluster_agent = params[:cluster_agent]
+
return error_no_permissions unless current_user.can?(:admin_cluster, cluster_agent)
if cluster_agent.destroy
@@ -21,3 +23,5 @@ module Clusters
end
end
end
+
+Clusters::Agents::DeleteService.prepend_mod
diff --git a/app/services/clusters/agents/delete_url_configuration_service.rb b/app/services/clusters/agents/delete_url_configuration_service.rb
new file mode 100644
index 00000000000..909c20ece91
--- /dev/null
+++ b/app/services/clusters/agents/delete_url_configuration_service.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class DeleteUrlConfigurationService
+ attr_reader :agent, :current_user, :url_configuration
+
+ def initialize(agent:, current_user:, url_configuration:)
+ @agent = agent
+ @current_user = current_user
+ @url_configuration = url_configuration
+ end
+
+ def execute
+ return error_receptive_agents_disabled unless receptive_agents_enabled?
+ return error_no_permissions unless current_user.can?(:admin_cluster, agent)
+
+ if url_configuration.destroy
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: url_configuration.errors.full_messages)
+ end
+ end
+
+ private
+
+ delegate :project, to: :agent
+
+ def error_no_permissions
+ ServiceResponse.error(
+ message: s_('ClusterAgent|You have insufficient permissions to delete this agent url configuration'))
+ end
+
+ def receptive_agents_enabled?
+ ::Gitlab::CurrentSettings.receptive_cluster_agents_enabled
+ end
+
+ def error_receptive_agents_disabled
+ ServiceResponse.error(
+ message: s_('ClusterAgent|Receptive agents are disabled for this GitLab instance')
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/cohorts_service.rb b/app/services/cohorts_service.rb
index 1b1598b301c..8adc8d3913e 100644
--- a/app/services/cohorts_service.rb
+++ b/app/services/cohorts_service.rb
@@ -57,7 +57,7 @@ class CohortsService
month_totals =
all_months
.map { |activity_month| counts_by_month[[registration_month, activity_month]] }
- .reduce([]) { |result, total| result << result.last.to_i + total.to_i }
+ .reduce([]) { |result, total| result << (result.last.to_i + total.to_i) }
.reverse
overall_total = month_totals.first
diff --git a/app/services/commits/change_service.rb b/app/services/commits/change_service.rb
index 0b97aae9972..696e975af1a 100644
--- a/app/services/commits/change_service.rb
+++ b/app/services/commits/change_service.rb
@@ -9,25 +9,18 @@ module Commits
@message = params[:message]
end
+ def commit_message
+ raise NotImplementedError
+ end
+
private
+ attr_reader :commit
+
def commit_change(action)
- raise NotImplementedError unless repository.respond_to?(action)
-
- # rubocop:disable GitlabSecurity/PublicSend
- message =
- @message || @commit.public_send(:"#{action}_message", current_user)
-
- repository.public_send(
- action,
- current_user,
- @commit,
- @branch_name,
- message,
- start_project: @start_project,
- start_branch_name: @start_branch,
- dry_run: @dry_run
- )
+ message = @message || commit_message
+
+ yield message
rescue Gitlab::Git::Repository::CreateTreeError => ex
type = @commit.change_type_title(current_user)
diff --git a/app/services/commits/cherry_pick_service.rb b/app/services/commits/cherry_pick_service.rb
index 2a634c5ec71..40894cbb436 100644
--- a/app/services/commits/cherry_pick_service.rb
+++ b/app/services/commits/cherry_pick_service.rb
@@ -11,14 +11,28 @@ module Commits
def create_commit!
Gitlab::Git::CrossRepo.new(@project.repository, @source_project.repository).execute(@commit.id) do
- commit_change(:cherry_pick).tap do |sha|
- track_mr_picking(sha)
+ commit_sha = commit_change(:cherry_pick) do |message|
+ perform_cherry_pick(message)
end
+
+ track_mr_picking(commit_sha)
+
+ commit_sha
end
end
private
+ def commit_message
+ message = commit.cherry_pick_message(current_user)
+
+ return message unless ::Feature.enabled?(:web_ui_commit_author_change, project)
+
+ co_authored_trailer = "#{Commit::CO_AUTHORED_TRAILER}: #{commit.author_name} <#{commit.author_email}>"
+
+ "#{message}\n\n#{co_authored_trailer}"
+ end
+
def track_mr_picking(pick_sha)
merge_request = project.merge_requests.by_merge_commit_sha(@commit.sha).first
return unless merge_request
@@ -29,5 +43,19 @@ module Commits
author: current_user
).picked_into_branch(@branch_name, pick_sha)
end
+
+ def perform_cherry_pick(message)
+ author_kwargs =
+ if Feature.enabled?(:web_ui_commit_author_change, project)
+ { author_name: current_user.name, author_email: current_user.email }
+ else
+ {}
+ end
+
+ repository.cherry_pick(current_user, @commit, @branch_name, message,
+ start_project: @start_project, start_branch_name: @start_branch, dry_run: @dry_run,
+ **author_kwargs
+ )
+ end
end
end
diff --git a/app/services/commits/commit_patch_service.rb b/app/services/commits/commit_patch_service.rb
index 4fa6c30e901..657db3fa248 100644
--- a/app/services/commits/commit_patch_service.rb
+++ b/app/services/commits/commit_patch_service.rb
@@ -22,12 +22,11 @@ module Commits
end
def create_commit!
- if @start_branch && new_branch?
- prepare_branch!
- end
+ prepare_branch! if @start_branch && new_branch?
+ target_sha = @start_branch ? project.repository.commit(@start_branch).sha : nil
Gitlab::Git::Patches::CommitPatches
- .new(current_user, project.repository, @branch_name, @patches)
+ .new(current_user, project.repository, @branch_name, @patches, target_sha)
.commit
end
diff --git a/app/services/commits/create_service.rb b/app/services/commits/create_service.rb
index 5fc84e5aad7..c38b709556e 100644
--- a/app/services/commits/create_service.rb
+++ b/app/services/commits/create_service.rb
@@ -34,10 +34,10 @@ module Commits
Gitlab::ErrorTracking.log_exception(ex)
error(ex.message, pass_back: { error_code: ex.error_code })
rescue ValidationError,
- Gitlab::Git::Index::IndexError,
- Gitlab::Git::CommitError,
- Gitlab::Git::PreReceiveError,
- Gitlab::Git::CommandError => ex
+ Gitlab::Git::Index::IndexError,
+ Gitlab::Git::CommitError,
+ Gitlab::Git::PreReceiveError,
+ Gitlab::Git::CommandError => ex
Gitlab::ErrorTracking.log_exception(ex)
error(Gitlab::EncodingHelper.encode_utf8_no_detect(ex.message))
diff --git a/app/services/commits/revert_service.rb b/app/services/commits/revert_service.rb
index dddb8b24eac..951fd62dc21 100644
--- a/app/services/commits/revert_service.rb
+++ b/app/services/commits/revert_service.rb
@@ -3,7 +3,17 @@
module Commits
class RevertService < ChangeService
def create_commit!
- commit_change(:revert)
+ commit_change(:revert) do |message|
+ repository.revert(current_user, @commit, @branch_name, message,
+ start_project: @start_project, start_branch_name: @start_branch, dry_run: @dry_run
+ )
+ end
+ end
+
+ private
+
+ def commit_message
+ commit.revert_message(current_user)
end
end
end
diff --git a/app/services/concerns/base_service_utility.rb b/app/services/concerns/base_service_utility.rb
index 70b223a0289..e1bc59200e3 100644
--- a/app/services/concerns/base_service_utility.rb
+++ b/app/services/concerns/base_service_utility.rb
@@ -52,10 +52,10 @@ module BaseServiceUtility
# message - Error message to include in the Hash
# http_status - Optional HTTP status code override (default: nil)
# pass_back - Additional attributes to be included in the resulting Hash
- def error(message, http_status = nil, pass_back: {})
+ def error(message, http_status = nil, status: :error, pass_back: {})
result = {
message: message,
- status: :error
+ status: status
}.reverse_merge(pass_back)
result[:http_status] = http_status if http_status
diff --git a/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb b/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
index 427aebf397e..3fa3ce9812e 100644
--- a/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
+++ b/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
@@ -4,10 +4,16 @@ module Ci
module JobTokenScope
module EditScopeValidations
ValidationError = Class.new(StandardError)
+ NotFoundError = Class.new(StandardError)
TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND = "The target_project that you are attempting to access does " \
"not exist or you don't have permission to perform this action"
+ TARGET_GROUP_UNAUTHORIZED_OR_UNFOUND = "The target_group that you are attempting to access does " \
+ "not exist or you don't have permission to perform this action"
+
+ TARGET_DOES_NOT_EXIST = 'The target does not exists'
+
def validate_edit!(source_project, target_project, current_user)
unless can?(current_user, :admin_project, source_project)
raise ValidationError, "Insufficient permissions to modify the job token scope"
@@ -17,6 +23,25 @@ module Ci
raise ValidationError, TARGET_PROJECT_UNAUTHORIZED_OR_UNFOUND
end
end
+
+ def validate_group_add!(source_project, target_group, current_user)
+ unless can?(current_user, :admin_project, source_project)
+ raise ValidationError, "Insufficient permissions to modify the job token scope"
+ end
+
+ raise ValidationError, TARGET_GROUP_UNAUTHORIZED_OR_UNFOUND unless can?(current_user, :read_group,
+ target_group)
+ end
+
+ def validate_group_remove!(source_project, current_user)
+ unless can?(current_user, :admin_project, source_project)
+ raise ValidationError, "Insufficient permissions to modify the job token scope"
+ end
+ end
+
+ def validate_target_exists!(target)
+ raise NotFoundError, TARGET_DOES_NOT_EXIST if target.nil?
+ end
end
end
end
diff --git a/app/services/concerns/deploy_token_methods.rb b/app/services/concerns/deploy_token_methods.rb
index 578be53f82c..686bdd48800 100644
--- a/app/services/concerns/deploy_token_methods.rb
+++ b/app/services/concerns/deploy_token_methods.rb
@@ -2,7 +2,9 @@
module DeployTokenMethods
def create_deploy_token_for(entity, current_user, params)
- params[:deploy_token_type] = DeployToken.deploy_token_types["#{entity.class.name.downcase}_type".to_sym]
+ entity_name = entity.class.name.downcase
+ params[:deploy_token_type] = DeployToken.deploy_token_types["#{entity_name}_type".to_sym]
+ params["#{entity_name}_id".to_sym] = entity.id
entity.deploy_tokens.create(params) do |deploy_token|
deploy_token.username = params[:username].presence
diff --git a/app/services/concerns/exclusive_lease_guard.rb b/app/services/concerns/exclusive_lease_guard.rb
index 74acaa0522a..1f557ef70bc 100644
--- a/app/services/concerns/exclusive_lease_guard.rb
+++ b/app/services/concerns/exclusive_lease_guard.rb
@@ -20,14 +20,18 @@ module ExclusiveLeaseGuard
def try_obtain_lease
lease = exclusive_lease.try_obtain
+ Gitlab::Instrumentation::ExclusiveLock.increment_requested_count
+
unless lease
log_lease_taken
return
end
begin
+ lease_start_time = Time.current
yield lease
ensure
+ Gitlab::Instrumentation::ExclusiveLock.add_hold_duration(Time.current - lease_start_time)
release_lease(lease) if lease_release?
end
end
diff --git a/app/services/concerns/group_linkable.rb b/app/services/concerns/group_linkable.rb
index 43d10e01a4a..261e509df43 100644
--- a/app/services/concerns/group_linkable.rb
+++ b/app/services/concerns/group_linkable.rb
@@ -4,6 +4,8 @@ module GroupLinkable
extend ActiveSupport::Concern
def execute
+ remove_unallowed_params
+
return error('Not Found', 404) unless valid_to_create?
build_link
diff --git a/app/services/concerns/integrations/bulk_operation_hashes.rb b/app/services/concerns/integrations/bulk_operation_hashes.rb
deleted file mode 100644
index 3f13c764ebe..00000000000
--- a/app/services/concerns/integrations/bulk_operation_hashes.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-# Returns hashes of attributes suitable for passing to `.insert_all` or `update_all`
-module Integrations
- module BulkOperationHashes
- private
-
- def integration_hash(operation)
- integration
- .to_database_hash
- .merge('inherit_from_id' => integration.inherit_from_id || integration.id)
- .merge(update_timestamps(operation))
- end
-
- def data_fields_hash(operation)
- integration
- .data_fields
- .to_database_hash
- .merge(update_timestamps(operation))
- end
-
- def update_timestamps(operation)
- time_now = Time.current
-
- {
- 'created_at' => (time_now if operation == :create),
- 'updated_at' => time_now
- }.compact
- end
- end
-end
diff --git a/app/services/concerns/integrations/group_test_data.rb b/app/services/concerns/integrations/group_test_data.rb
new file mode 100644
index 00000000000..716580e07b5
--- /dev/null
+++ b/app/services/concerns/integrations/group_test_data.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Integrations
+ module GroupTestData
+ NoDataError = Class.new(ArgumentError)
+
+ private
+
+ def push_events_data
+ Gitlab::DataBuilder::Push.sample_data
+ end
+ end
+end
diff --git a/app/services/concerns/integrations/project_test_data.rb b/app/services/concerns/integrations/project_test_data.rb
index fcef22a8cab..fe13a74c392 100644
--- a/app/services/concerns/integrations/project_test_data.rb
+++ b/app/services/concerns/integrations/project_test_data.rb
@@ -14,12 +14,16 @@ module Integrations
Gitlab::DataBuilder::Push.build_sample(project, current_user)
end
+ def tag_push_events_data
+ Gitlab::DataBuilder::Push.build_sample(project, current_user, is_tag: true)
+ end
+
def note_events_data
note = NotesFinder.new(current_user, project: project, target: project, sort: 'id_desc').execute.first
no_data_error(s_('TestHooks|Ensure the project has notes.')) unless note.present?
- Gitlab::DataBuilder::Note.build(note, current_user)
+ Gitlab::DataBuilder::Note.build(note, current_user, :create)
end
def issues_events_data
@@ -27,7 +31,7 @@ module Integrations
no_data_error(s_('TestHooks|Ensure the project has issues.')) unless issue.present?
- issue.to_hook_data(current_user)
+ issue.to_hook_data(current_user, action: 'open')
end
def merge_requests_events_data
@@ -35,7 +39,7 @@ module Integrations
no_data_error(s_('TestHooks|Ensure the project has merge requests.')) unless merge_request.present?
- merge_request.to_hook_data(current_user)
+ merge_request.to_hook_data(current_user, action: 'open')
end
def job_events_data
@@ -92,5 +96,24 @@ module Integrations
Gitlab::DataBuilder::Emoji.build(award_emoji, current_user, 'award')
end
+
+ def access_tokens_events_data
+ resource_access_token = PersonalAccessToken.new(
+ id: 1,
+ name: 'pat_for_webhook_event',
+ user: project.bots.first,
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ expires_at: 2.days.from_now
+ )
+
+ Gitlab::DataBuilder::ResourceAccessToken.build(resource_access_token, :expiring, project)
+ end
+
+ def current_user_events_data
+ {
+ current_user: current_user
+ }
+ end
end
end
diff --git a/app/services/concerns/integrations/propagation/bulk_operation_hashes.rb b/app/services/concerns/integrations/propagation/bulk_operation_hashes.rb
new file mode 100644
index 00000000000..5ec0b25e9aa
--- /dev/null
+++ b/app/services/concerns/integrations/propagation/bulk_operation_hashes.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+# Returns hashes of attributes suitable for passing to `.insert_all` or `update_all`
+module Integrations
+ module Propagation
+ module BulkOperationHashes
+ private
+
+ def integration_hash(operation)
+ integration
+ .to_database_hash
+ .merge('inherit_from_id' => integration.inherit_from_id || integration.id)
+ .merge(update_timestamps(operation))
+ end
+
+ def data_fields_hash(operation)
+ integration
+ .data_fields
+ .to_database_hash
+ .merge(update_timestamps(operation))
+ end
+
+ def update_timestamps(operation)
+ time_now = Time.current
+
+ {
+ 'created_at' => (time_now if operation == :create),
+ 'updated_at' => time_now
+ }.compact
+ end
+ end
+ end
+end
diff --git a/app/services/concerns/search/filter.rb b/app/services/concerns/search/filter.rb
index e234edcfce4..718318cb912 100644
--- a/app/services/concerns/search/filter.rb
+++ b/app/services/concerns/search/filter.rb
@@ -5,7 +5,13 @@ module Search
private
def filters
- { state: params[:state], confidential: params[:confidential], include_archived: params[:include_archived] }
+ {
+ state: params[:state],
+ confidential: params[:confidential],
+ include_archived: params[:include_archived],
+ num_context_lines: params[:num_context_lines]&.to_i,
+ hybrid_similarity: params[:hybrid_similarity]&.to_f
+ }
end
end
end
diff --git a/app/services/concerns/update_repository_storage_methods.rb b/app/services/concerns/update_repository_storage_methods.rb
index 8ed87fdb048..f1b614bfa32 100644
--- a/app/services/concerns/update_repository_storage_methods.rb
+++ b/app/services/concerns/update_repository_storage_methods.rb
@@ -27,19 +27,18 @@ module UpdateRepositoryStorageMethods
return response if response
unless same_filesystem?
+ # Mirror the object pool first, as we'll later provide the pool's disk path as
+ # partitioning hints when mirroring member repositories.
+ mirror_object_pool(destination_storage_name)
mirror_repositories
-
- repository_storage_move.transaction do
- mirror_object_pool(destination_storage_name)
- end
end
- repository_storage_move.finish_replication!
-
repository_storage_move.transaction do
track_repository(destination_storage_name)
end
+ repository_storage_move.finish_replication!
+
remove_old_paths unless same_filesystem?
repository_storage_move.finish_cleanup!
@@ -94,7 +93,14 @@ module UpdateRepositoryStorageMethods
full_path
)
- Repositories::ReplicateService.new(raw_repository).execute(new_repository, type.name)
+ # Provide the object pool's disk path as a partitioning hint to Gitaly. This
+ # ensures Gitaly creates the repository in the same partition as its pool, so
+ # they can be correctly linked.
+ object_pool = repository.project&.pool_repository&.object_pool
+ hint = object_pool ? object_pool.relative_path : ""
+
+ Repositories::ReplicateService.new(raw_repository)
+ .execute(new_repository, type.name, partition_hint: hint)
end
def same_filesystem?
diff --git a/app/services/concerns/users/participable_service.rb b/app/services/concerns/users/participable_service.rb
index f84793d869c..e18b7f5923f 100644
--- a/app/services/concerns/users/participable_service.rb
+++ b/app/services/concerns/users/participable_service.rb
@@ -44,6 +44,10 @@ module Users
end
end
+ def relation_at_search_limit?(users_relation)
+ params[:search] && users_relation.size >= SEARCH_LIMIT
+ end
+
def groups
return [] unless current_user
diff --git a/app/services/concerns/validates_classification_label.rb b/app/services/concerns/validates_classification_label.rb
index ebcf5c24ff8..35bbfe7fdbe 100644
--- a/app/services/concerns/validates_classification_label.rb
+++ b/app/services/concerns/validates_classification_label.rb
@@ -18,7 +18,7 @@ module ValidatesClassificationLabel
def rejection_reason_for_label(label)
reason_from_service = ::Gitlab::ExternalAuthorization.rejection_reason(current_user, label).presence
- reason_from_service || _("Access to '%{classification_label}' not allowed") % { classification_label: label }
+ reason_from_service || (_("Access to '%{classification_label}' not allowed") % { classification_label: label })
end
def classification_label_change?(record, attribute_name)
diff --git a/app/services/concerns/work_items/widgetable_service.rb b/app/services/concerns/work_items/widgetable_service.rb
index 9d1132b1aba..cfbd851671b 100644
--- a/app/services/concerns/work_items/widgetable_service.rb
+++ b/app/services/concerns/work_items/widgetable_service.rb
@@ -2,13 +2,19 @@
module WorkItems
module WidgetableService
+ extend ActiveSupport::Concern
+
+ included do
+ attr_reader :widget_params
+ end
+
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def initialize_callbacks!(work_item)
@callbacks = work_item.widgets.filter_map do |widget|
callback_class = widget.class.try(:callback_class)
- callback_params = @widget_params[widget.class.api_symbol]
+ callback_params = widget_params[widget.class.api_symbol]
- if new_type_excludes_widget?(widget)
+ if new_type_excludes_widget?(widget, work_item.resource_parent)
callback_params = {} if callback_params.nil?
callback_params[:excluded_in_new_type] = true
end
@@ -22,6 +28,14 @@ module WorkItems
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
+ def handle_quick_actions(work_item)
+ # Do not handle quick actions from params[:description] unless the work item is the default Issue.
+ super if work_item.work_item_type == WorkItems::Type.default_by_type(:issue)
+
+ # Handle quick actions from description widget depending on the available widgets for the type
+ handle_widget_quick_actions!(work_item)
+ end
+
def execute_widgets(work_item:, callback:, widget_params: {}, service_params: {})
work_item.widgets.each do |widget|
widget_service(widget, service_params).try(callback, params: widget_params[widget.class.api_symbol] || {})
@@ -49,10 +63,31 @@ module WorkItems
private
- def new_type_excludes_widget?(widget)
+ def new_type_excludes_widget?(widget, resource_parent)
return false unless params[:work_item_type]
- params[:work_item_type].widgets.exclude?(widget.class)
+ params[:work_item_type].widget_classes(resource_parent).exclude?(widget.class)
+ end
+
+ def handle_widget_quick_actions!(work_item)
+ return unless work_item.has_widget?(:description)
+
+ description_widget_params = widget_params[::WorkItems::Widgets::Description.api_symbol]
+ return unless description_widget_params
+
+ merge_quick_actions_into_params!(work_item, params: description_widget_params)
+
+ # When there are residual quick actions, `#handle_quick_actions` will set a description param
+ # with the sanitized description. We need to remove it here so it does not override the description
+ # value we are trying to set from the description widget. This description is also sanitized already
+ # since it uses the same `#merge_quick_actions_into_params!` method.
+ params.delete(:description) if description_widget_params[:description].present?
+
+ # exclude `description` param so that it is not passed into common params after transform_quick_action_params
+ parsed_params = work_item.transform_quick_action_params(description_widget_params.except(:description))
+
+ widget_params.merge!(parsed_params[:widgets])
+ params.merge!(parsed_params[:common])
end
end
end
diff --git a/app/services/container_registry/protection/create_rule_service.rb b/app/services/container_registry/protection/create_rule_service.rb
index 6aa9bd657f6..574f124cd81 100644
--- a/app/services/container_registry/protection/create_rule_service.rb
+++ b/app/services/container_registry/protection/create_rule_service.rb
@@ -5,8 +5,8 @@ module ContainerRegistry
class CreateRuleService < BaseService
ALLOWED_ATTRIBUTES = %i[
repository_path_pattern
- push_protected_up_to_access_level
- delete_protected_up_to_access_level
+ minimum_access_level_for_push
+ minimum_access_level_for_delete
].freeze
def execute
@@ -19,7 +19,7 @@ module ContainerRegistry
project.container_registry_protection_rules.create(params.slice(*ALLOWED_ATTRIBUTES))
unless container_registry_protection_rule.persisted?
- return service_response_error(message: container_registry_protection_rule.errors.full_messages.to_sentence)
+ return service_response_error(message: container_registry_protection_rule.errors.full_messages)
end
ServiceResponse.success(payload: { container_registry_protection_rule: container_registry_protection_rule })
diff --git a/app/services/container_registry/protection/update_rule_service.rb b/app/services/container_registry/protection/update_rule_service.rb
index af74e542ac7..70c29424e6f 100644
--- a/app/services/container_registry/protection/update_rule_service.rb
+++ b/app/services/container_registry/protection/update_rule_service.rb
@@ -7,8 +7,8 @@ module ContainerRegistry
ALLOWED_ATTRIBUTES = %i[
repository_path_pattern
- delete_protected_up_to_access_level
- push_protected_up_to_access_level
+ minimum_access_level_for_delete
+ minimum_access_level_for_push
].freeze
def initialize(container_registry_protection_rule, current_user:, params:)
diff --git a/app/services/database/consistency_check_service.rb b/app/services/database/consistency_check_service.rb
index fee2e79a6cb..63648810615 100644
--- a/app/services/database/consistency_check_service.rb
+++ b/app/services/database/consistency_check_service.rb
@@ -21,7 +21,7 @@ module Database
# It compares up to 25 batches (1000 records / batch), or up to 30 seconds
# for all the batches in total.
#
- # It saves the cursor of the next start_id (cusror) in Redis. If the start_id
+ # It saves the cursor of the next start_id (cursor) in Redis. If the start_id
# wasn't saved in Redis, for example, in the first run, it will choose some random start_id
#
# Example:
diff --git a/app/services/dependency_proxy/auth_token_service.rb b/app/services/dependency_proxy/auth_token_service.rb
index c6c9eb534bb..89e9e7712e7 100644
--- a/app/services/dependency_proxy/auth_token_service.rb
+++ b/app/services/dependency_proxy/auth_token_service.rb
@@ -12,6 +12,10 @@ module DependencyProxy
JSONWebToken::HMACToken.decode(token, ::Auth::DependencyProxyAuthenticationService.secret).first
end
+ # TODO: Rename to make it obvious how it's used in Gitlab::Auth::RequestAuthenticator
+ # which is to return an <object>.<id> that is used as a rack-attack discriminator
+ # that way it cannot be confused with `.user_or_token_from_jwt`
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/454518
def self.user_or_deploy_token_from_jwt(raw_jwt)
token_payload = self.new(raw_jwt).execute
@@ -23,5 +27,34 @@ module DependencyProxy
rescue JWT::DecodeError, JWT::ExpiredSignature, JWT::ImmatureSignature
nil
end
+
+ def self.user_or_token_from_jwt(raw_jwt)
+ token_payload = self.new(raw_jwt).execute
+
+ if token_payload['personal_access_token']
+ get_personal_access_token(token_payload['personal_access_token'])
+ elsif token_payload['group_access_token']
+ # a group access token is a personal access token in disguise
+ get_personal_access_token(token_payload['group_access_token'])
+ elsif token_payload['user_id']
+ get_user(token_payload['user_id'])
+ elsif token_payload['deploy_token']
+ get_deploy_token(token_payload['deploy_token'])
+ end
+ rescue JWT::DecodeError, JWT::ExpiredSignature, JWT::ImmatureSignature
+ nil
+ end
+
+ def self.get_user(user_id)
+ User.find(user_id)
+ end
+
+ def self.get_personal_access_token(raw_token)
+ PersonalAccessTokensFinder.new(state: 'active').find_by_token(raw_token)
+ end
+
+ def self.get_deploy_token(raw_token)
+ DeployToken.active.find_by_token(raw_token)
+ end
end
end
diff --git a/app/services/deployments/link_merge_requests_service.rb b/app/services/deployments/link_merge_requests_service.rb
index 40385418e48..69f807c9922 100644
--- a/app/services/deployments/link_merge_requests_service.rb
+++ b/app/services/deployments/link_merge_requests_service.rb
@@ -61,10 +61,8 @@ module Deployments
# deployment we may end up running a handful of queries to get and insert
# the data.
commits.each_slice(COMMITS_PER_QUERY) do |slice|
- merge_requests =
- project.merge_requests.merged.by_merge_commit_sha(slice)
-
- deployment.link_merge_requests(merge_requests)
+ link_merge_requests_by_merge_commits(slice)
+ link_fast_forward_merge_requests(slice)
# The cherry picked commits are tracked via `notes.commit_id`
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22209
@@ -89,6 +87,29 @@ module Deployments
private
+ def link_merge_requests_by_merge_commits(commits)
+ deployment.link_merge_requests(merge_requests_by_merge_commit_sha(commits))
+ end
+
+ def link_fast_forward_merge_requests(commits)
+ deployment.link_merge_requests(merge_requests_by_head_commit_sha(commits))
+ deployment.link_merge_requests(merge_requests_by_squash_commit_sha(commits))
+ end
+
+ def merge_requests_by_merge_commit_sha(commits)
+ project.merge_requests.merged.by_merge_commit_sha(commits)
+ end
+
+ def merge_requests_by_squash_commit_sha(commits)
+ project.merge_requests.merged.by_squash_commit_sha(commits)
+ end
+
+ def merge_requests_by_head_commit_sha(commits)
+ merge_request_diffs = MergeRequestDiff.by_head_commit_sha(commits)
+
+ project.merge_requests.merged.by_latest_merge_request_diffs(merge_request_diffs)
+ end
+
def project
deployment.project
end
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index 3d78ad80f0f..f1ec99a063b 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -47,6 +47,8 @@ module DesignManagement
end
ServiceResponse.success
+ rescue Gitlab::Git::CommandError => ex
+ error(message: ex.message)
rescue StandardError => error
log_exception(error)
@@ -118,7 +120,9 @@ module DesignManagement
def remove_temporary_branch!
return unless target_repository.branch_exists?(temporary_branch)
- target_repository.rm_branch(git_user, temporary_branch)
+ target_sha = target_repository.commit(temporary_branch).id
+
+ target_repository.rm_branch(git_user, temporary_branch, target_sha: target_sha)
end
# Merge the temporary branch containing the commits to default branch
@@ -268,7 +272,11 @@ module DesignManagement
oids = blobs.values.flat_map(&:values).map(&:lfs_oid)
repository_type = LfsObjectsProject.repository_types[:design]
- new_rows = LfsObject.where(oid: oids).find_each(batch_size: 1000).map do |lfs_object|
+ lfs_objects = oids.each_slice(1000).flat_map do |oids_batch|
+ LfsObject.for_oids(oids_batch).not_linked_to_project(target_project, repository_type: repository_type)
+ end
+
+ new_rows = lfs_objects.compact.map do |lfs_object|
{
project_id: target_project.id,
lfs_object_id: lfs_object.id,
diff --git a/app/services/design_management/delete_designs_service.rb b/app/services/design_management/delete_designs_service.rb
index a6a0f5e0252..c28c4e70a60 100644
--- a/app/services/design_management/delete_designs_service.rb
+++ b/app/services/design_management/delete_designs_service.rb
@@ -4,6 +4,7 @@ module DesignManagement
class DeleteDesignsService < DesignService
include RunsDesignActions
include OnSuccessCallbacks
+ include Gitlab::InternalEventsTracking
def initialize(project, user, params = {})
super
@@ -55,16 +56,12 @@ module DesignManagement
def design_action(design)
on_success do
- counter.count(:delete)
+ track_internal_event('delete_design_management_design', user: current_user, project: project)
end
DesignManagement::DesignAction.new(design, :delete)
end
- def counter
- ::Gitlab::UsageDataCounters::DesignsCounter
- end
-
def formatted_file_list
designs.map { |design| "- #{design.full_path}" }.join("\n")
end
diff --git a/app/services/design_management/save_designs_service.rb b/app/services/design_management/save_designs_service.rb
index f9f2f4bf290..438b07c1b1f 100644
--- a/app/services/design_management/save_designs_service.rb
+++ b/app/services/design_management/save_designs_service.rb
@@ -4,6 +4,7 @@ module DesignManagement
class SaveDesignsService < DesignService
include RunsDesignActions
include OnSuccessCallbacks
+ include Gitlab::InternalEventsTracking
MAX_FILES = 10
@@ -133,12 +134,12 @@ module DesignManagement
if action == :update
::Gitlab::UsageDataCounters::IssueActivityUniqueCounter
.track_issue_designs_modified_action(author: current_user, project: project)
+ track_internal_event('update_design_management_design', user: current_user, project: project)
else
::Gitlab::UsageDataCounters::IssueActivityUniqueCounter
.track_issue_designs_added_action(author: current_user, project: project)
+ track_internal_event('create_design_management_design', user: current_user, project: project)
end
-
- ::Gitlab::UsageDataCounters::DesignsCounter.count(action)
end
end
end
diff --git a/app/services/discussions/resolve_service.rb b/app/services/discussions/resolve_service.rb
index 20b4ec0921f..8f5260986c2 100644
--- a/app/services/discussions/resolve_service.rb
+++ b/app/services/discussions/resolve_service.rb
@@ -83,7 +83,15 @@ module Discussions
def process_auto_merge
return unless discussions_ready_to_merge?
- AutoMergeProcessWorker.perform_async(merge_request.id)
+ if Feature.enabled?(:merge_when_checks_pass, merge_request.project)
+ Gitlab::EventStore.publish(
+ MergeRequests::DiscussionsResolvedEvent.new(
+ data: { current_user_id: current_user.id, merge_request_id: merge_request.id }
+ )
+ )
+ else
+ AutoMergeProcessWorker.perform_async(merge_request.id)
+ end
end
def discussions_ready_to_merge?
diff --git a/app/services/draft_notes/create_service.rb b/app/services/draft_notes/create_service.rb
index e5a070e9db7..40d8edae2d0 100644
--- a/app/services/draft_notes/create_service.rb
+++ b/app/services/draft_notes/create_service.rb
@@ -36,11 +36,22 @@ module DraftNotes
merge_request_activity_counter.track_create_review_note_action(user: current_user)
end
+ after_execute
+
draft_note
end
private
+ def after_execute
+ # Update reviewer state to `REVIEW_STARTED` when a new review has started
+ return unless draft_notes.one?
+
+ ::MergeRequests::UpdateReviewerStateService
+ .new(project: merge_request.project, current_user: current_user)
+ .execute(merge_request, 'review_started')
+ end
+
def base_error(text)
DraftNote.new.tap do |draft|
draft.errors.add(:base, text)
diff --git a/app/services/draft_notes/destroy_service.rb b/app/services/draft_notes/destroy_service.rb
index 6c7b0dfdbd7..0cc1b7f6dd3 100644
--- a/app/services/draft_notes/destroy_service.rb
+++ b/app/services/draft_notes/destroy_service.rb
@@ -10,10 +10,21 @@ module DraftNotes
clear_highlight_diffs_cache(Array.wrap(drafts))
drafts.is_a?(DraftNote) ? drafts.destroy! : drafts.delete_all
+
+ after_execute
end
private
+ def after_execute
+ # Update reviewer state to `UNREVIEWED` when a new review was deleted
+ return unless draft_notes.empty?
+
+ ::MergeRequests::UpdateReviewerStateService
+ .new(project: merge_request.project, current_user: current_user)
+ .execute(merge_request, 'unreviewed')
+ end
+
def clear_highlight_diffs_cache(drafts)
merge_request.diffs.clear_cache if unfolded_drafts?(drafts)
end
diff --git a/app/services/draft_notes/publish_service.rb b/app/services/draft_notes/publish_service.rb
index 5ba7f829c8e..5baaf0895f3 100644
--- a/app/services/draft_notes/publish_service.rb
+++ b/app/services/draft_notes/publish_service.rb
@@ -2,13 +2,15 @@
module DraftNotes
class PublishService < DraftNotes::BaseService
- def execute(draft = nil)
- return error('Not allowed to create notes') unless can?(current_user, :create_note, merge_request)
+ def execute(draft: nil, executing_user: nil)
+ executing_user ||= current_user
+
+ return error('Not allowed to create notes') unless can?(executing_user, :create_note, merge_request)
if draft
- publish_draft_note(draft)
+ publish_draft_note(draft, executing_user)
else
- publish_draft_notes
+ publish_draft_notes(executing_user)
merge_request_activity_counter.track_publish_review_action(user: current_user)
end
@@ -20,15 +22,15 @@ module DraftNotes
private
- def publish_draft_note(draft)
- create_note_from_draft(draft)
+ def publish_draft_note(draft, executing_user)
+ create_note_from_draft(draft, executing_user)
draft.delete
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
end
- def publish_draft_notes
- return if draft_notes.empty?
+ def publish_draft_notes(executing_user)
+ return if draft_notes.blank?
review = Review.create!(author: current_user, merge_request: merge_request, project: project)
@@ -36,6 +38,7 @@ module DraftNotes
draft_note.review = review
create_note_from_draft(
draft_note,
+ executing_user,
skip_capture_diff_note_position: true,
skip_keep_around_commits: true,
skip_merge_status_trigger: true
@@ -45,23 +48,22 @@ module DraftNotes
capture_diff_note_positions(created_notes)
keep_around_commits(created_notes)
draft_notes.delete_all
- set_reviewed
notification_service.async.new_review(review)
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
- after_publish(review)
+ after_publish
end
- def create_note_from_draft(draft, skip_capture_diff_note_position: false, skip_keep_around_commits: false, skip_merge_status_trigger: false)
+ def create_note_from_draft(draft, executing_user, skip_capture_diff_note_position: false, skip_keep_around_commits: false, skip_merge_status_trigger: false)
# Make sure the diff file is unfolded in order to find the correct line
# codes.
draft.diff_file&.unfold_diff_lines(draft.original_position)
note_params = draft.publish_params.merge(skip_keep_around_commits: skip_keep_around_commits)
- note = Notes::CreateService.new(draft.project, draft.author, note_params).execute(
+ note = Notes::CreateService.new(project, current_user, note_params).execute(
skip_capture_diff_note_position: skip_capture_diff_note_position,
skip_merge_status_trigger: skip_merge_status_trigger,
- skip_set_reviewed: true
+ executing_user: executing_user
)
set_discussion_resolve_status(note, draft)
@@ -80,12 +82,6 @@ module DraftNotes
end
end
- def set_reviewed
- return if Feature.enabled?(:mr_request_changes, current_user)
-
- ::MergeRequests::UpdateReviewerStateService.new(project: project, current_user: current_user).execute(merge_request, "reviewed")
- end
-
def capture_diff_note_positions(notes)
paths = notes.flat_map do |note|
note.diff_file&.paths if note.diff_note?
@@ -108,12 +104,16 @@ module DraftNotes
# We are allowing this since gitaly call will be created for each sha and
# even though they're unique, there will still be multiple Gitaly calls.
Gitlab::GitalyClient.allow_n_plus_1_calls do
- project.repository.keep_around(*shas)
+ project.repository.keep_around(*shas, source: self.class.name)
end
end
- def after_publish(review)
- # Overridden in EE
+ def after_publish
+ merge_request.assignees.each do |assignee|
+ next unless assignee.merge_request_dashboard_enabled?
+
+ assignee.invalidate_merge_request_cache_counts
+ end
end
end
end
diff --git a/app/services/emails/destroy_service.rb b/app/services/emails/destroy_service.rb
index d211c3470b2..8425c8779f9 100644
--- a/app/services/emails/destroy_service.rb
+++ b/app/services/emails/destroy_service.rb
@@ -5,11 +5,18 @@ module Emails
def execute(email)
raise StandardError, 'Cannot delete primary email' if email.user_primary_email?
- email.destroy && update_secondary_emails!(email.email)
+ return unless email.destroy
+
+ reset_email_in_notification_settings!(email)
+ update_secondary_emails!(email.email)
end
private
+ def reset_email_in_notification_settings!(deleted_email)
+ NotificationSetting.reset_email_for_user!(deleted_email)
+ end
+
def update_secondary_emails!(deleted_email)
result = ::Users::UpdateService.new(@current_user, user: @user).execute do |user|
user.unset_secondary_emails_matching_deleted_email!(deleted_email)
diff --git a/app/services/environments/auto_stop_service.rb b/app/services/environments/auto_stop_service.rb
index 686ba050326..cb9384ec491 100644
--- a/app/services/environments/auto_stop_service.rb
+++ b/app/services/environments/auto_stop_service.rb
@@ -34,8 +34,8 @@ module Environments
Environments::AutoStopWorker.bulk_perform_async_with_contexts(
environments,
- arguments_proc: -> (environment) { environment.id },
- context_proc: -> (environment) { { project: environment.project } }
+ arguments_proc: ->(environment) { environment.id },
+ context_proc: ->(environment) { { project: environment.project } }
)
true
diff --git a/app/services/environments/destroy_service.rb b/app/services/environments/destroy_service.rb
index f1530489a40..db9faf8d8ac 100644
--- a/app/services/environments/destroy_service.rb
+++ b/app/services/environments/destroy_service.rb
@@ -13,7 +13,7 @@ module Environments
unless environment.destroyed?
return ServiceResponse.error(
- message: 'Attemped to destroy the environment but failed'
+ message: 'Attempted to destroy the environment but failed'
)
end
diff --git a/app/services/environments/stop_service.rb b/app/services/environments/stop_service.rb
index 1b2e7ef3cf9..30df95abea9 100644
--- a/app/services/environments/stop_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -20,7 +20,7 @@ module Environments
unless environment.saved_change_to_attribute?(:state)
return ServiceResponse.error(
- message: 'Attemped to stop the environment but failed to change the status',
+ message: 'Attempted to stop the environment but failed to change the status',
payload: { environment: environment }
)
end
@@ -37,7 +37,7 @@ module Environments
end
def execute_for_merge_request_pipeline(merge_request)
- return unless merge_request.actual_head_pipeline&.merge_request?
+ return unless merge_request.diff_head_pipeline&.merge_request?
created_environments = merge_request.created_environments
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 1a4e691a059..1e56cf32fa1 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -257,6 +257,13 @@ class EventCreateService
.cache_last_push_event(event)
Users::ActivityService.new(author: current_user, namespace: namespace, project: project).execute
+
+ Gitlab::EventStore.publish(
+ Users::ActivityEvent.new(data: {
+ user_id: current_user.id,
+ namespace_id: project.namespace_id
+ })
+ )
end
def create_event(resource_parent, current_user, status, attributes = {})
diff --git a/app/services/export_csv/base_service.rb b/app/services/export_csv/base_service.rb
index 84d44fd75fc..cb2efd792b0 100644
--- a/app/services/export_csv/base_service.rb
+++ b/app/services/export_csv/base_service.rb
@@ -5,7 +5,7 @@ module ExportCsv
# Target attachment size before base64 encoding
TARGET_FILESIZE = 15.megabytes
- def initialize(relation, resource_parent, fields = [])
+ def initialize(relation, resource_parent = nil, fields = [])
@objects = relation
@resource_parent = resource_parent
@fields = fields
diff --git a/app/services/files/base_service.rb b/app/services/files/base_service.rb
index 613785d01cc..af681e842e8 100644
--- a/app/services/files/base_service.rb
+++ b/app/services/files/base_service.rb
@@ -18,7 +18,10 @@ module Files
@previous_path = params[:previous_path]
@file_content = params[:file_content]
- @file_content = Base64.decode64(@file_content) if params[:file_content_encoding] == 'base64'
+
+ if params[:file_content_encoding] == 'base64' && @file_content.present?
+ @file_content = Base64.decode64(@file_content)
+ end
@execute_filemode = params[:execute_filemode]
end
diff --git a/app/services/files/create_service.rb b/app/services/files/create_service.rb
index f9ced112896..983667e4d1a 100644
--- a/app/services/files/create_service.rb
+++ b/app/services/files/create_service.rb
@@ -3,7 +3,7 @@
module Files
class CreateService < Files::BaseService
def create_commit!
- transformer = Lfs::FileTransformer.new(project, repository, @branch_name)
+ transformer = Lfs::FileTransformer.new(project, repository, @branch_name, start_branch_name: @start_branch)
result = transformer.new_file(@file_path, @file_content)
@@ -12,6 +12,12 @@ module Files
private
+ def validate!
+ super
+
+ raise_error(_('You must provide a file path')) if @file_path.nil?
+ end
+
def create_transformed_commit(content_or_lfs_pointer)
repository.create_file(
current_user,
diff --git a/app/services/files/update_service.rb b/app/services/files/update_service.rb
index c11917b92ec..073c6af9b29 100644
--- a/app/services/files/update_service.rb
+++ b/app/services/files/update_service.rb
@@ -3,10 +3,18 @@
module Files
class UpdateService < Files::BaseService
def create_commit!
+ transformer = Lfs::FileTransformer.new(project, repository, @branch_name, start_branch_name: @start_branch)
+
+ result = transformer.new_file(@file_path, @file_content)
+
+ create_transformed_commit(result.content)
+ end
+
+ def create_transformed_commit(content_or_lfs_pointer)
repository.update_file(
current_user,
@file_path,
- @file_content,
+ content_or_lfs_pointer,
message: @commit_message,
branch_name: @branch_name,
previous_path: @previous_path,
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index c6214311692..7417f79e603 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -50,7 +50,7 @@ module Git
strong_memoize(:commits_count) do
next threshold_commits.count if
strong_memoized?(:threshold_commits) &&
- threshold_commits.count <= PROCESS_COMMIT_LIMIT
+ threshold_commits.count <= PROCESS_COMMIT_LIMIT
if creating_default_branch?
project.repository.commit_count_for_ref(ref)
@@ -81,7 +81,6 @@ module Git
project.repository.after_push_commit(branch_name)
branch_create_hooks if creating_branch?
- branch_update_hooks if updating_branch?
branch_change_hooks if creating_branch? || updating_branch?
branch_remove_hooks if removing_branch?
@@ -93,12 +92,6 @@ module Git
project.after_create_default_branch if default_branch?
end
- def branch_update_hooks
- # Update the bare repositories info/attributes file using the contents of
- # the default branch's .gitattributes file
- project.repository.copy_gitattributes(ref) if default_branch?
- end
-
def branch_change_hooks
enqueue_process_commit_messages
enqueue_jira_connect_sync_messages
@@ -106,6 +99,7 @@ module Git
end
def branch_remove_hooks
+ enqueue_jira_connect_remove_branches
project.repository.after_remove_branch(expire_cache: false)
end
@@ -175,6 +169,19 @@ module Git
end
end
+ def enqueue_jira_connect_remove_branches
+ return unless project.jira_subscription_exists?
+
+ return unless Atlassian::JiraIssueKeyExtractors::Branch.has_keys?(project, branch_name)
+
+ Integrations::JiraConnect::RemoveBranchWorker.perform_async(
+ project.id,
+ {
+ branch_name: branch_name
+ }
+ )
+ end
+
def filtered_commit_shas
limited_commits.select { |commit| Atlassian::JiraIssueKeyExtractor.has_keys?(commit.safe_message) }.map(&:sha)
end
diff --git a/app/services/git/process_ref_changes_service.rb b/app/services/git/process_ref_changes_service.rb
index d4081fc149b..2a5d94c91a5 100644
--- a/app/services/git/process_ref_changes_service.rb
+++ b/app/services/git/process_ref_changes_service.rb
@@ -9,6 +9,7 @@ module Git
process_changes_by_action(:branch, changes.branch_changes)
process_changes_by_action(:tag, changes.tag_changes)
+ warn_if_over_process_limit(changes.branch_changes + changes.tag_changes)
perform_housekeeping
end
@@ -63,7 +64,46 @@ module Git
end
def under_process_limit?(change)
- change[:index] < PIPELINE_PROCESS_LIMIT || Feature.enabled?(:git_push_create_all_pipelines, project)
+ change[:index] < process_limit || Feature.enabled?(:git_push_create_all_pipelines, project)
+ end
+
+ def process_limit
+ PIPELINE_PROCESS_LIMIT
+ end
+
+ def warn_if_over_process_limit(changes)
+ return unless process_limit > 0
+ return if changes.length <= process_limit
+
+ # We don't know for sure whether the project has CI enabled or CI rules
+ # that might excluded pipelines from being created.
+ omitted_refs = possible_omitted_pipeline_refs(changes)
+
+ return unless omitted_refs.present?
+
+ # This notification only lets the admin know that we might have skipped some refs.
+ Gitlab::AppJsonLogger.info(
+ message: "Some pipelines may not have been created because ref count exceeded limit",
+ ref_limit: process_limit,
+ total_ref_count: changes.length,
+ possible_omitted_refs: omitted_refs,
+ possible_omitted_ref_count: omitted_refs.length,
+ **Gitlab::ApplicationContext.current
+ )
+ end
+
+ def possible_omitted_pipeline_refs(changes)
+ # Pipelines can only be created on pushed for branch creation or updates
+ omitted_changes = changes.select do |change|
+ change[:index] >= process_limit &&
+ change_action(change) != :removed
+ end
+
+ # rubocop:disable CodeReuse/ActiveRecord -- not an ActiveRecord model
+ # rubocop:disable Database/AvoidUsingPluckWithoutLimit -- not an ActiveRecord model
+ omitted_changes.pluck(:ref).sort
+ # rubocop:enable CodeReuse/ActiveRecord
+ # rubocop:enable Database/AvoidUsingPluckWithoutLimit
end
def create_bulk_push_event(ref_type, action, changes)
diff --git a/app/services/git/tag_push_service.rb b/app/services/git/tag_push_service.rb
index 641fe8e3916..dcb2877d54e 100644
--- a/app/services/git/tag_push_service.rb
+++ b/app/services/git/tag_push_service.rb
@@ -10,6 +10,7 @@ module Git
project.repository.before_push_tag
TagHooksService.new(project, current_user, params).execute
+ destroy_releases
unlock_artifacts
true
@@ -17,6 +18,12 @@ module Git
private
+ def destroy_releases
+ return unless removing_tag?
+
+ Releases::DestroyService.new(project, current_user, tag: Gitlab::Git.tag_name(ref)).execute
+ end
+
def unlock_artifacts
return unless removing_tag?
diff --git a/app/services/google_cloud_platform/artifact_registry/list_docker_images_service.rb b/app/services/google_cloud_platform/artifact_registry/list_docker_images_service.rb
deleted file mode 100644
index 1e227a2fcb6..00000000000
--- a/app/services/google_cloud_platform/artifact_registry/list_docker_images_service.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-module GoogleCloudPlatform
- module ArtifactRegistry
- class ListDockerImagesService < BaseProjectService
- def execute(page_token: nil)
- return ServiceResponse.error(message: "Access denied") unless allowed?
-
- ServiceResponse.success(payload: client.list_docker_images(page_token: page_token))
- end
-
- private
-
- def allowed?
- can?(current_user, :read_container_image, project)
- end
-
- def client
- ::GoogleCloudPlatform::ArtifactRegistry::Client.new(
- project: project,
- user: current_user,
- gcp_project_id: gcp_project_id,
- gcp_location: gcp_location,
- gcp_repository: gcp_repository,
- gcp_wlif: gcp_wlif
- )
- end
-
- def gcp_project_id
- params[:gcp_project_id]
- end
-
- def gcp_location
- params[:gcp_location]
- end
-
- def gcp_repository
- params[:gcp_repository]
- end
-
- def gcp_wlif
- params[:gcp_wlif]
- end
- end
- end
-end
diff --git a/app/services/gpg_keys/create_service.rb b/app/services/gpg_keys/create_service.rb
index c061c92df3e..559b88bbfa4 100644
--- a/app/services/gpg_keys/create_service.rb
+++ b/app/services/gpg_keys/create_service.rb
@@ -16,6 +16,8 @@ module GpgKeys
private
def validate(key)
+ return false unless key.valid?
+
GpgKeys::ValidateIntegrationsService.new(key).execute
end
diff --git a/app/services/gpg_keys/validate_integrations_service.rb b/app/services/gpg_keys/validate_integrations_service.rb
index f593eb6925a..ab6eb6f5388 100644
--- a/app/services/gpg_keys/validate_integrations_service.rb
+++ b/app/services/gpg_keys/validate_integrations_service.rb
@@ -9,8 +9,6 @@ module GpgKeys
end
def execute
- return false unless key.valid?
-
validate_beyond_identity!
key.errors.empty?
@@ -26,8 +24,13 @@ module GpgKeys
return unless integration&.activated?
integration.execute({ key_id: key.primary_keyid, committer_email: key.user.email })
- rescue ::Gitlab::BeyondIdentity::Client::Error => e
- key.errors.add(:base, "BeyondIdentity: #{e.message}")
+
+ key.externally_verified_at = Time.current
+ key.externally_verified = true
+ rescue ::Gitlab::BeyondIdentity::Client::ApiError => e
+ key.errors.add(:base, "BeyondIdentity: #{e.message}") unless e.acceptable_error?
+
+ key.externally_verified = false
end
end
end
diff --git a/app/services/gravatar_service.rb b/app/services/gravatar_service.rb
index 9d5990f2c8a..c9c6fae5cc6 100644
--- a/app/services/gravatar_service.rb
+++ b/app/services/gravatar_service.rb
@@ -2,13 +2,12 @@
class GravatarService
def execute(email, size = nil, scale = 2, username: nil)
- return if Gitlab::FIPS.enabled?
return unless Gitlab::CurrentSettings.gravatar_enabled?
identifier = email.presence || username.presence
return unless identifier
- hash = Digest::MD5.hexdigest(identifier.strip.downcase)
+ hash = Digest::SHA256.hexdigest(identifier.strip.downcase)
size = Groups::GroupMembersHelper::AVATAR_SIZE unless size && size > 0
sprintf gravatar_url,
diff --git a/app/services/group_access_tokens/rotate_service.rb b/app/services/group_access_tokens/rotate_service.rb
new file mode 100644
index 00000000000..0e52d720aeb
--- /dev/null
+++ b/app/services/group_access_tokens/rotate_service.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module GroupAccessTokens
+ class RotateService < ::PersonalAccessTokens::RotateService
+ extend ::Gitlab::Utils::Override
+
+ alias_method :group, :resource
+
+ override :valid_access_level?
+ def valid_access_level?
+ return true if current_user.can_admin_all_resources?
+ return false unless current_user.can?(:manage_resource_access_tokens, group)
+
+ token_access_level = group.max_member_access_for_user(token.user).to_i
+ current_user_access_level = group.max_member_access_for_user(current_user).to_i
+
+ token_access_level <= current_user_access_level
+ end
+ end
+end
diff --git a/app/services/groups/agnostic_token_revocation_service.rb b/app/services/groups/agnostic_token_revocation_service.rb
new file mode 100644
index 00000000000..db481c5419e
--- /dev/null
+++ b/app/services/groups/agnostic_token_revocation_service.rb
@@ -0,0 +1,158 @@
+# frozen_string_literal: true
+
+# This Service takes authentication tokens of multiple types, and will
+# call a revocation for it if the token has access to the
+# group or any of the group's descendants. If revocation is not
+# possible, the token will be rotated or otherwise made unusable.
+#
+# If the token provided has access to the group and is revoked, it will
+# be returned by the service with a :success status.
+# If the token type is not supported, if the token doesn't have access
+# to the group, or if any error occurs, a generic :failure status is
+# returned.
+#
+# This Service does not create logs or Audit events. Those can be found
+# at the API layer or in specific revocation services.
+#
+# This Service returns a ServiceResponse object.
+module Groups # rubocop:disable Gitlab/BoundedContexts -- This service is strictly related to groups
+ class AgnosticTokenRevocationService < Groups::BaseService
+ AUDIT_SOURCE = :group_token_revocation_service
+
+ attr_reader :revocable
+
+ def initialize(group, current_user, plaintext)
+ @group = group
+ @current_user = current_user
+ @plaintext = plaintext.to_s
+ end
+
+ def execute
+ return error("Feature not enabled") unless Feature.enabled?(:group_agnostic_token_revocation, group)
+ return error("Group cannot be a subgroup") if group.subgroup?
+ return error("Unauthorized") unless can?(current_user, :admin_group, group)
+
+ # Determine the type of token
+ if plaintext.start_with?(Gitlab::CurrentSettings.current_application_settings.personal_access_token_prefix,
+ ApplicationSetting.defaults[:personal_access_token_prefix])
+ @revocable = PersonalAccessToken.find_by_token(plaintext)
+ return error('PAT not found') unless revocable
+
+ handle_personal_access_token
+ elsif plaintext.start_with?(DeployToken::DEPLOY_TOKEN_PREFIX)
+ @revocable = DeployToken.find_by_token(plaintext)
+ return error('DeployToken not found') unless revocable && revocable.group_type?
+
+ handle_deploy_token
+ elsif plaintext.start_with?(User::FEED_TOKEN_PREFIX)
+ @revocable = User.find_by_feed_token(plaintext)
+ return error('Feed Token not found') unless revocable
+
+ handle_feed_token
+ else
+ error('Unsupported token type')
+ end
+ end
+
+ private
+
+ attr_reader :plaintext, :group, :current_user
+
+ def success(revocable, type, api_entity: nil)
+ api_entity ||= type
+ ServiceResponse.success(
+ message: "#{type} is revoked",
+ payload: {
+ revocable: revocable,
+ type: type,
+ api_entity: api_entity
+ }
+ )
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def handle_personal_access_token
+ if user_has_group_membership?(revocable.user)
+ # Only revoke active tokens. (Ignore expired tokens)
+ if revocable.active?
+ ::PersonalAccessTokens::RevokeService.new(
+ current_user,
+ token: revocable,
+ source: AUDIT_SOURCE
+ ).execute
+ end
+
+ # Always validate that, if we're returning token info, it
+ # has been successfully revoked
+ return success(revocable, 'PersonalAccessToken') if revocable.reset.revoked?
+ end
+
+ # If we get here the token exists but either:
+ # - didn't belong to the group or descendants
+ # - did, but was already expired
+ # - does and is active, but revocation failed for some reason
+ error('PAT revocation failed')
+ end
+
+ # Validate whether the user has access to a group or any of its
+ # descendants. Includes membership that might not be active, but
+ # could be later, e.g. bans. Includes membership of non-human
+ # users.
+ def user_has_group_membership?(user)
+ ::GroupMember
+ .with_user(user)
+ .with_source_id(group.self_and_descendants)
+ .any? ||
+ ::ProjectMember
+ .with_user(user)
+ .in_namespaces(group.self_and_descendants)
+ .any?
+ end
+
+ def handle_deploy_token
+ if group.self_and_descendants.include?(revocable.group)
+ if revocable.active?
+ service = ::Groups::DeployTokens::RevokeService.new(
+ revocable.group,
+ current_user,
+ { id: revocable.id }
+ )
+
+ service.source = AUDIT_SOURCE
+ service.execute
+ end
+
+ return success(revocable, 'DeployToken') if revocable.reset.revoked?
+ end
+
+ error('DeployToken revocation failed')
+ end
+
+ def handle_feed_token
+ if user_has_group_membership?(revocable)
+ current_token = revocable.feed_token
+
+ response = Users::ResetFeedTokenService.new(
+ current_user,
+ user: revocable,
+ source: AUDIT_SOURCE
+ ).execute
+
+ # Always validate that, if we're returning token info, it
+ # has been successfully revoked. Feed tokens can only be rotated
+ # so we also check that the old and new value are different.
+ if response.success? && !ActiveSupport::SecurityUtils.secure_compare(current_token, revocable.reset.feed_token)
+ return success(revocable, 'FeedToken', api_entity: 'UserSafe')
+ end
+ end
+
+ # If we get here the feed token exists but either:
+ # - the user didn't belong to the group or descendants
+ # - rotation failed for some reason
+ error('Feed token revocation failed')
+ end
+ end
+end
diff --git a/app/services/groups/autocomplete_service.rb b/app/services/groups/autocomplete_service.rb
index 5b9d60495e9..79feec246c0 100644
--- a/app/services/groups/autocomplete_service.rb
+++ b/app/services/groups/autocomplete_service.rb
@@ -6,14 +6,24 @@ module Groups
# rubocop: disable CodeReuse/ActiveRecord
def issues(confidential_only: false, issue_types: nil)
- finder_params = { group_id: group.id, include_subgroups: true, state: 'opened' }
+ finder_params = { group_id: group.id, state: 'opened' }
finder_params[:confidential] = true if confidential_only.present?
finder_params[:issue_types] = issue_types if issue_types.present?
- IssuesFinder.new(current_user, finder_params)
- .execute
- .preload(project: :namespace)
- .select(:iid, :title, :project_id, :namespace_id)
+ finder_class =
+ if group.namespace_work_items_enabled?(current_user)
+ finder_params[:include_descendants] = true
+ WorkItems::WorkItemsFinder
+ else
+ finder_params[:include_subgroups] = true
+ IssuesFinder
+ end
+
+ finder_class.new(current_user, finder_params)
+ .execute
+ .preload(project: :namespace)
+ .with_work_item_type
+ .select(:iid, :title, :project_id, :namespace_id, 'work_item_types.icon_name')
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -41,7 +51,7 @@ module Groups
def commands(noteable)
return [] unless noteable
- QuickActions::InterpretService.new(nil, current_user).available_commands(noteable)
+ QuickActions::InterpretService.new(container: group, current_user: current_user).available_commands(noteable)
end
end
end
diff --git a/app/services/groups/base_service.rb b/app/services/groups/base_service.rb
index 9705f3a560d..fbe5b3f3ddc 100644
--- a/app/services/groups/base_service.rb
+++ b/app/services/groups/base_service.rb
@@ -21,7 +21,7 @@ module Groups
params.delete(nsp)
end
- ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
+ ::NamespaceSettings::AssignAttributesService.new(current_user, group, settings_params).execute
end
def remove_unallowed_params
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 06c6560f0fe..8e81329c0db 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -6,60 +6,79 @@ module Groups
@current_user = user
@params = params.dup
@chat_team = @params.delete(:create_chat_team)
- @create_event = @params.delete(:create_event)
end
def execute
- remove_unallowed_params
- set_visibility_level
+ build_group
+ after_build_hook
- @group = Group.new(params.except(*::NamespaceSetting.allowed_namespace_settings_params))
+ return error_response unless valid?
- @group.build_namespace_settings
- handle_namespace_settings
+ @group.name ||= @group.path.dup
- after_build_hook(@group, params)
+ create_chat_team
+ Namespace.with_disabled_organization_validation { create_group }
- inherit_group_shared_runners_settings
+ return error_response unless @group.persisted?
- unless can_use_visibility_level? && can_create_group?
- return @group
- end
+ after_successful_creation_hook
- @group.name ||= @group.path.dup
+ ServiceResponse.success(payload: { group: @group })
+ end
- if create_chat_team?
- response = ::Mattermost::CreateTeamService.new(@group, current_user).execute
- return @group if @group.errors.any?
+ private
- @group.build_chat_team(name: response['name'], team_id: response['id'])
- end
+ def valid?
+ valid_visibility_level? && valid_user_permissions?
+ end
+ def error_response
+ ServiceResponse.error(message: 'Group has errors', payload: { group: @group })
+ end
+
+ def create_chat_team
+ return unless valid_to_create_chat_team?
+
+ response = ::Mattermost::CreateTeamService.new(@group, current_user).execute
+ return ServiceResponse.error(message: 'Group has errors', payload: { group: @group }) if @group.errors.any?
+
+ @group.build_chat_team(name: response['name'], team_id: response['id'])
+ end
+
+ def build_group
+ remove_unallowed_params
+
+ set_visibility_level
+
+ except_keys = ::NamespaceSetting.allowed_namespace_settings_params + [:organization_id, :import_export_upload]
+ @group = Group.new(params.except(*except_keys))
+
+ set_organization
+
+ @group.import_export_uploads << params[:import_export_upload] if params[:import_export_upload]
+ @group.build_namespace_settings
+ handle_namespace_settings
+ end
+
+ def create_group
Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction(
%w[routes redirect_routes], url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424281'
) do
Group.transaction do
if @group.save
@group.add_owner(current_user)
- Integration.create_from_active_default_integrations(@group, :group_id)
+ @group.add_creator(current_user)
+ Integration.create_from_default_integrations(@group, :group_id)
end
end
end
-
- after_create_hook
-
- @group
end
- private
-
- attr_reader :create_event
-
- def after_build_hook(group, params)
- # overridden in EE
+ def after_build_hook
+ inherit_group_shared_runners_settings
end
- def after_create_hook
+ def after_successful_creation_hook
# overridden in EE
end
@@ -70,13 +89,17 @@ module Groups
end
params.delete(:allow_mfa_for_subgroups)
+ params.delete(:remove_dormant_members)
+ params.delete(:remove_dormant_members_period)
+ params.delete(:math_rendering_limits_enabled)
+ params.delete(:lock_math_rendering_limits_enabled)
end
- def create_chat_team?
- Gitlab.config.mattermost.enabled && @chat_team && group.chat_team.nil?
+ def valid_to_create_chat_team?
+ Gitlab.config.mattermost.enabled && @chat_team && @group.chat_team.nil?
end
- def can_create_group?
+ def valid_user_permissions?
if @group.subgroup?
unless can?(current_user, :create_subgroup, @group.parent)
@group.parent = nil
@@ -92,17 +115,32 @@ module Groups
end
end
- unless organization_setting_valid?
- # We are unsetting this here to match behavior of invalid parent_id above and protect against possible
- # committing to the database of a value that isn't allowed.
- @group.organization = nil
- message = s_("CreateGroup|You don't have permission to create a group in the provided organization.")
- @group.errors.add(:organization_id, message)
+ return true if organization_setting_valid?
- return false
- end
+ # We are unsetting this here to match behavior of invalid parent_id above and protect against possible
+ # committing to the database of a value that isn't allowed.
+ @group.organization = nil
+
+ false
+ end
+
+ def can_create_group_in_organization?
+ return true if can?(current_user, :create_group, @group.organization)
+
+ message = s_("CreateGroup|You don't have permission to create a group in the provided organization.")
+ @group.errors.add(:organization_id, message)
+
+ false
+ end
+
+ def matches_parent_organization?
+ return true if @group.parent_id.blank?
+ return true if @group.parent.organization_id == @group.organization_id
- true
+ message = s_("CreateGroup|You can't create a group in a different organization than the parent group.")
+ @group.errors.add(:organization_id, message)
+
+ false
end
def organization_setting_valid?
@@ -113,18 +151,21 @@ module Groups
# 2. We shouldn't need to check if this is allowed if the user didn't try to set it themselves. i.e.
# provided in the params
return true if params[:organization_id].blank?
- return true if @group.organization.blank?
+ # There is a chance the organization is still blank(if not default organization), but that is the only case
+ # where we should allow this to not actually be a record in the database.
+ # Otherwise it isn't valid to set this to a non-existent record id and we'll check that in the lines after
+ # this code.
+ return true if @group.organization.blank? && Organizations::Organization.default?(params[:organization_id])
- can?(current_user, :create_group, @group.organization)
+ can_create_group_in_organization? && matches_parent_organization?
end
- def can_use_visibility_level?
- unless Gitlab::VisibilityLevel.allowed_for?(current_user, visibility_level)
- deny_visibility_level(@group)
- return false
- end
+ def valid_visibility_level?
+ return true if Gitlab::VisibilityLevel.allowed_for?(current_user, visibility_level)
+
+ deny_visibility_level(@group)
- true
+ false
end
def set_visibility_level
@@ -139,6 +180,14 @@ module Groups
@group.shared_runners_enabled = @group.parent.shared_runners_enabled
@group.allow_descendants_override_disabled_shared_runners = @group.parent.allow_descendants_override_disabled_shared_runners
end
+
+ def set_organization
+ if params[:organization_id]
+ @group.organization_id = params[:organization_id]
+ elsif @group.parent_id
+ @group.organization = @group.parent.organization
+ end
+ end
end
end
diff --git a/app/services/groups/deploy_tokens/revoke_service.rb b/app/services/groups/deploy_tokens/revoke_service.rb
index cf91d3b27fa..0aa88f6190d 100644
--- a/app/services/groups/deploy_tokens/revoke_service.rb
+++ b/app/services/groups/deploy_tokens/revoke_service.rb
@@ -3,7 +3,7 @@
module Groups
module DeployTokens
class RevokeService < BaseService
- attr_accessor :token
+ attr_accessor :token, :source
def execute
@token = group.deploy_tokens.find(params[:id])
diff --git a/app/services/groups/destroy_service.rb b/app/services/groups/destroy_service.rb
index aeab2667737..d961effb669 100644
--- a/app/services/groups/destroy_service.rb
+++ b/app/services/groups/destroy_service.rb
@@ -5,6 +5,8 @@ module Groups
DestroyError = Class.new(StandardError)
def async_execute
+ mark_pending_delete
+
job_id = GroupDestroyWorker.perform_async(group.id, current_user.id)
Gitlab::AppLogger.info("User #{current_user.id} scheduled a deletion of group ID #{group.id} with job ID #{job_id}")
end
@@ -14,6 +16,8 @@ module Groups
# TODO - add a policy check here https://gitlab.com/gitlab-org/gitlab/-/issues/353082
raise DestroyError, "You can't delete this group because you're blocked." if current_user.blocked?
+ mark_pending_delete
+
group.projects.includes(:project_feature).each do |project|
# Execute the destruction of the models immediately to ensure atomic cleanup.
success = ::Projects::DestroyService.new(project, current_user).execute
@@ -46,11 +50,22 @@ module Groups
publish_event
group
+ rescue Exception # rubocop:disable Lint/RescueException -- Namespace.transaction can raise Exception
+ unmark_pending_delete
+ raise
end
# rubocop: enable CodeReuse/ActiveRecord
private
+ def mark_pending_delete
+ group.update_attribute(:pending_delete, true)
+ end
+
+ def unmark_pending_delete
+ group.update_attribute(:pending_delete, false)
+ end
+
def any_groups_shared_with_this_group?
group.shared_group_links.any?
end
diff --git a/app/services/groups/group_links/create_service.rb b/app/services/groups/group_links/create_service.rb
index a6e2c0b952e..b7e0d0001bb 100644
--- a/app/services/groups/group_links/create_service.rb
+++ b/app/services/groups/group_links/create_service.rb
@@ -22,11 +22,14 @@ module Groups
end
def build_link
+ remove_unallowed_params
+
@link = GroupGroupLink.new(
shared_group: group,
shared_with_group: shared_with_group,
group_access: params[:shared_group_access],
- expires_at: params[:expires_at]
+ expires_at: params[:expires_at],
+ member_role_id: params[:member_role_id]
)
end
diff --git a/app/services/groups/group_links/destroy_service.rb b/app/services/groups/group_links/destroy_service.rb
index 8eed46b28ca..b2de99c86c0 100644
--- a/app/services/groups/group_links/destroy_service.rb
+++ b/app/services/groups/group_links/destroy_service.rb
@@ -4,7 +4,7 @@ module Groups
module GroupLinks
class DestroyService < ::Groups::BaseService
def execute(one_or_more_links, skip_authorization: false)
- unless skip_authorization || group && can?(current_user, :admin_group_member, group)
+ unless skip_authorization || (group && can?(current_user, :admin_group_member, group))
return error('Not Found', 404)
end
diff --git a/app/services/groups/group_links/update_service.rb b/app/services/groups/group_links/update_service.rb
index 913bf2bfce7..a2272fc6153 100644
--- a/app/services/groups/group_links/update_service.rb
+++ b/app/services/groups/group_links/update_service.rb
@@ -10,6 +10,10 @@ module Groups
end
def execute(group_link_params)
+ @group_link_params = group_link_params
+
+ remove_unallowed_params
+
group_link.update!(group_link_params)
if requires_authorization_refresh?(group_link_params)
@@ -21,7 +25,7 @@ module Groups
private
- attr_accessor :group_link
+ attr_accessor :group_link, :group_link_params
def requires_authorization_refresh?(params)
params.include?(:group_access)
diff --git a/app/services/groups/import_export/export_service.rb b/app/services/groups/import_export/export_service.rb
index 2d88283661c..cab7e9c248f 100644
--- a/app/services/groups/import_export/export_service.rb
+++ b/app/services/groups/import_export/export_service.rb
@@ -3,22 +3,27 @@
module Groups
module ImportExport
class ExportService
- def initialize(group:, user:, params: {})
+ def initialize(group:, user:, exported_by_admin:, params: {})
@group = group
@current_user = user
+ @exported_by_admin = exported_by_admin
@params = params
@shared = @params[:shared] || Gitlab::ImportExport::Shared.new(@group)
@logger = Gitlab::Export::Logger.build
end
def async_execute
- GroupExportWorker.perform_async(current_user.id, group.id, params)
+ GroupExportWorker.perform_async(
+ current_user.id,
+ group.id,
+ params.merge(exported_by_admin: @exported_by_admin)
+ )
end
def execute
validate_user_permissions
- remove_existing_export! if group.export_file_exists?
+ remove_existing_export! if group.export_file_exists?(current_user)
save!
ensure
@@ -27,7 +32,7 @@ module Groups
private
- attr_reader :group, :current_user, :params
+ attr_reader :group, :current_user, :exported_by_admin, :params
attr_accessor :shared
def validate_user_permissions
@@ -39,7 +44,7 @@ module Groups
end
def remove_existing_export!
- import_export_upload = group.import_export_upload
+ import_export_upload = group.import_export_upload_by_user(current_user)
import_export_upload.remove_export_file!
import_export_upload.save
@@ -50,6 +55,7 @@ module Groups
# it removes the tmp dir. This means that if we want to add new savers
# in EE the data won't be available.
if save_exporters && file_saver.save
+ audit_export
notify_success
else
notify_error!
@@ -84,7 +90,7 @@ module Groups
end
def file_saver
- Gitlab::ImportExport::Saver.new(exportable: group, shared: shared)
+ Gitlab::ImportExport::Saver.new(exportable: group, shared: shared, user: current_user)
end
def remove_archive_tmp_dir
@@ -105,6 +111,20 @@ module Groups
)
end
+ def audit_export
+ return if exported_by_admin && Gitlab::CurrentSettings.silent_admin_exports_enabled?
+
+ audit_context = {
+ name: 'group_export_created',
+ author: current_user,
+ scope: group,
+ target: group,
+ message: 'Group file export was created'
+ }
+
+ Gitlab::Audit::Auditor.audit(audit_context)
+ end
+
def notify_success
log_info('Group Export succeeded')
diff --git a/app/services/groups/import_export/import_service.rb b/app/services/groups/import_export/import_service.rb
index 15948ab82a2..8bf5a6ed5cd 100644
--- a/app/services/groups/import_export/import_service.rb
+++ b/app/services/groups/import_export/import_service.rb
@@ -10,7 +10,7 @@ module Groups
@current_user = user
@user_role = user_role
@shared = Gitlab::ImportExport::Shared.new(@group)
- @logger = Gitlab::Import::Logger.build
+ @logger = ::Import::Framework::Logger.build
end
def async_execute
@@ -65,7 +65,8 @@ module Groups
@import_file ||= Gitlab::ImportExport::FileImporter.import(
importable: group,
archive_file: nil,
- shared: shared
+ shared: shared,
+ user: current_user
)
end
@@ -83,7 +84,7 @@ module Groups
end
def remove_import_file
- upload = group.import_export_upload
+ upload = group.import_export_upload_by_user(current_user)
return unless upload&.import_file&.file
diff --git a/app/services/groups/nested_create_service.rb b/app/services/groups/nested_create_service.rb
index 35d45aaf0cc..b91fba2d669 100644
--- a/app/services/groups/nested_create_service.rb
+++ b/app/services/groups/nested_create_service.rb
@@ -39,9 +39,10 @@ module Groups
parent: last_group,
visibility_level: visibility_level
)
+ new_params.delete(:organization_id) if new_params[:parent]
last_group = namespace_or_group(partial_path) ||
- Groups::CreateService.new(current_user, new_params).execute
+ Groups::CreateService.new(current_user, new_params).execute[:group]
end
last_group
diff --git a/app/services/groups/open_issues_count_service.rb b/app/services/groups/open_issues_count_service.rb
index 17cf3d38987..19c3859099b 100644
--- a/app/services/groups/open_issues_count_service.rb
+++ b/app/services/groups/open_issues_count_service.rb
@@ -29,13 +29,15 @@ module Groups
end
def relation_for_count
+ confidential_filter = public_only? ? false : nil
+
IssuesFinder.new(
user,
group_id: group.id,
state: 'opened',
non_archived: true,
include_subgroups: true,
- public_only: public_only?
+ confidential: confidential_filter
).execute
end
diff --git a/app/services/groups/participants_service.rb b/app/services/groups/participants_service.rb
index ae1a917f022..47a5bcd07d1 100644
--- a/app/services/groups/participants_service.rb
+++ b/app/services/groups/participants_service.rb
@@ -12,8 +12,9 @@ module Groups
noteable_owner +
participants_in_noteable +
all_members +
- group_hierarchy_users +
- groups
+ group_hierarchy_users
+
+ participants += groups unless relation_at_search_limit?(group_hierarchy_users)
render_participants_as_hash(participants.uniq)
end
@@ -33,5 +34,6 @@ module Groups
filter_and_sort_users(relation)
end
+ strong_memoize_attr :group_hierarchy_users
end
end
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index c81a879ad1a..4bf14516dde 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -73,6 +73,7 @@ module Groups
end
end
+ transfer_labels
remove_paid_features_for_projects(old_root_ancestor_id)
post_update_hooks(@updated_project_ids, old_root_ancestor_id)
propagate_integrations
@@ -81,6 +82,14 @@ module Groups
true
end
+ def transfer_labels
+ @group.all_projects.each_batch(of: 10) do |projects|
+ projects.each do |project|
+ Labels::TransferService.new(current_user, @group, project).execute
+ end
+ end
+ end
+
# Overridden in EE
def post_update_hooks(updated_project_ids, old_root_ancestor_id)
refresh_project_authorizations
@@ -138,7 +147,7 @@ module Groups
def transfer_to_subgroup?
@new_parent_group && \
- @group.self_and_descendants.pluck_primary_key.include?(@new_parent_group.id)
+ @group.self_and_descendants.pluck_primary_key.include?(@new_parent_group.id)
end
def valid_policies?
@@ -284,7 +293,7 @@ module Groups
def update_integrations
@group.integrations.with_default_settings.delete_all
- Integration.create_from_active_default_integrations(@group, :group_id)
+ Integration.create_from_default_integrations(@group, :group_id)
end
def propagate_integrations
@@ -298,10 +307,7 @@ module Groups
end
def pending_builds_params
- {
- namespace_traversal_ids: group.traversal_ids,
- namespace_id: group.id
- }
+ ::Ci::PendingBuild.namespace_transfer_params(group)
end
def update_crm_objects(was_root_group)
diff --git a/app/services/groups/update_service.rb b/app/services/groups/update_service.rb
index a6ef8c8743b..8072baa5f2e 100644
--- a/app/services/groups/update_service.rb
+++ b/app/services/groups/update_service.rb
@@ -4,7 +4,12 @@ module Groups
class UpdateService < Groups::BaseService
include UpdateVisibilityLevel
- SETTINGS_PARAMS = [:allow_mfa_for_subgroups].freeze
+ SETTINGS_PARAMS = [
+ :allow_mfa_for_subgroups,
+ :remove_dormant_members,
+ :remove_dormant_members_period,
+ :early_access_program_participant
+ ].freeze
def execute
reject_parent_id!
@@ -34,7 +39,7 @@ module Groups
group.assign_attributes(params)
begin
- success = group.save
+ success = Namespace.with_disabled_organization_validation { group.save }
after_update if success
@@ -61,7 +66,8 @@ module Groups
params[:namespace_descendants_attributes] = {
traversal_ids: group.traversal_ids,
all_project_ids: [],
- self_and_descendant_group_ids: []
+ self_and_descendant_group_ids: [],
+ outdated_at: Time.current
}
else
return unless group.namespace_descendants
@@ -134,12 +140,19 @@ module Groups
# overridden in EE
def remove_unallowed_params
- params.delete(:emails_disabled) unless can?(current_user, :set_emails_disabled, group)
+ params.delete(:emails_enabled) unless can?(current_user, :set_emails_disabled, group)
+ params.delete(:max_artifacts_size) unless can?(current_user, :update_max_artifacts_size, group)
unless can?(current_user, :update_default_branch_protection, group)
params.delete(:default_branch_protection)
params.delete(:default_branch_protection_defaults)
end
+
+ unless can?(current_user, :admin_namespace, group)
+ params.delete(:math_rendering_limits_enabled)
+ params.delete(:lock_math_rendering_limits_enabled)
+ params.delete(:allow_runner_registration_token)
+ end
end
def handle_changes
@@ -152,7 +165,7 @@ module Groups
settings_params.merge!({ default_branch_protection: params[:default_branch_protection] }.compact)
allowed_settings_params.each { |param| params.delete(param) }
- ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
+ ::NamespaceSettings::AssignAttributesService.new(current_user, group, settings_params).execute
end
def handle_crm_settings_update
diff --git a/app/services/groups/update_shared_runners_service.rb b/app/services/groups/update_shared_runners_service.rb
index 08b43037c4c..2a5ba214690 100644
--- a/app/services/groups/update_shared_runners_service.rb
+++ b/app/services/groups/update_shared_runners_service.rb
@@ -3,7 +3,7 @@
module Groups
class UpdateSharedRunnersService < Groups::BaseService
def execute
- return error('Operation not allowed', 403) unless can?(current_user, :admin_group, group)
+ return error('Operation not allowed', 403) unless can?(current_user, :admin_runner, group)
validate_params
@@ -28,7 +28,7 @@ module Groups
case params[:shared_runners_setting]
when Namespace::SR_DISABLED_AND_UNOVERRIDABLE
set_shared_runners_enabled!(false)
- when Namespace::SR_DISABLED_WITH_OVERRIDE, Namespace::SR_DISABLED_AND_OVERRIDABLE
+ when Namespace::SR_DISABLED_AND_OVERRIDABLE
disable_shared_runners_and_allow_override!
when Namespace::SR_ENABLED
set_shared_runners_enabled!(true)
@@ -45,7 +45,7 @@ module Groups
group.run_after_commit_or_now do |group|
pending_builds_params = { instance_runners_enabled: group.shared_runners_enabled }
- ::Ci::UpdatePendingBuildService.new(group, pending_builds_params).execute
+ ::Ci::PendingBuilds::UpdateGroupWorker.perform_async(group.id, pending_builds_params)
end
end
diff --git a/app/services/ide/base_config_service.rb b/app/services/ide/base_config_service.rb
index 0501fab53af..1094abd2622 100644
--- a/app/services/ide/base_config_service.rb
+++ b/app/services/ide/base_config_service.rb
@@ -41,12 +41,12 @@ module Ide
end
def load_config!
- @config = Gitlab::WebIde::Config.new(config_content)
+ @config = WebIde::Config.new(config_content)
unless @config.valid?
raise ValidationError, @config.errors.first
end
- rescue Gitlab::WebIde::Config::ConfigError => e
+ rescue WebIde::Config::ConfigError => e
raise ValidationError, e.message
end
diff --git a/app/services/import/base_service.rb b/app/services/import/base_service.rb
index 64cf3cfa04a..535ceeaeec1 100644
--- a/app/services/import/base_service.rb
+++ b/app/services/import/base_service.rb
@@ -19,7 +19,11 @@ module Import
return current_user.namespace if namespace == owner
- group = Groups::NestedCreateService.new(current_user, group_path: namespace).execute
+ group = Groups::NestedCreateService.new(
+ current_user,
+ organization_id: params[:organization_id],
+ group_path: namespace
+ ).execute
group.errors.any? ? current_user.namespace : group
rescue StandardError => e
@@ -32,8 +36,8 @@ module Import
project.errors.full_messages.join(', ')
end
- def success(project)
- super().merge(project: project, status: :success)
+ def success(project, warning: nil)
+ super().merge(project: project, status: :success, warning: warning)
end
def track_access_level(import_type)
diff --git a/app/services/import/bitbucket_server_service.rb b/app/services/import/bitbucket_server_service.rb
index d8f39d7b963..27af8100027 100644
--- a/app/services/import/bitbucket_server_service.rb
+++ b/app/services/import/bitbucket_server_service.rb
@@ -89,7 +89,8 @@ module Import
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w[http https],
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?
+ deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: Gitlab::CurrentSettings.outbound_local_requests_whitelist # rubocop:disable Naming/InclusiveLanguage -- existing setting
)
end
@@ -99,7 +100,7 @@ module Import
end
def log_error(message)
- Gitlab::Import::Logger.error(
+ ::Import::Framework::Logger.error(
message: 'Import failed due to a BitBucket Server error',
error: message
)
diff --git a/app/services/import/bitbucket_service.rb b/app/services/import/bitbucket_service.rb
new file mode 100644
index 00000000000..c910dbff15b
--- /dev/null
+++ b/app/services/import/bitbucket_service.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+# Imports a project from Bitbucket Cloud using
+# username and app password (not OAuth)
+module Import
+ class BitbucketService < Import::BaseService
+ attr_reader :current_user, :params
+
+ # @param [User] current_user
+ # @param [Hash] params
+ # @option params [String] bitbucket_username - Bitbucket Cloud username
+ # @option params [String] bitbucket_app_password - Bitbucket Cloud user app password
+ def initialize(current_user, params)
+ @current_user = current_user
+ @params = params
+ end
+
+ # rubocop:disable Style/IfUnlessModifier -- line becomes too long
+ def execute
+ unless authorized?
+ return log_and_return_error("You don't have permissions to import this project", :unauthorized)
+ end
+
+ unless bitbucket_user.present?
+ return log_and_return_error('Unable to authorize with Bitbucket. Check your credentials', :unauthorized)
+ end
+
+ if bitbucket_repo.error
+ return log_and_return_error(
+ Kernel.format("Project %{repo_path} could not be found", repo_path: normalized_repo_path),
+ :unprocessable_entity
+ )
+ end
+
+ project = create_project
+
+ track_access_level('bitbucket')
+
+ if project.persisted?
+ success(project)
+ elsif project.errors[:import_source_disabled].present?
+ error(project.errors[:import_source_disabled], :forbidden)
+ else
+ log_and_return_error(project_save_error(project), :unprocessable_entity)
+ end
+ rescue StandardError => e
+ log_and_return_error("Import failed due to an error: #{e}", :bad_request)
+ end
+ # rubocop:enable Style/IfUnlessModifier
+
+ private
+
+ def client
+ @client ||= Bitbucket::Client.new(credentials)
+ end
+
+ def credentials
+ {
+ username: params[:bitbucket_username],
+ app_password: params[:bitbucket_app_password]
+ }
+ end
+
+ def create_project
+ Gitlab::BitbucketImport::ProjectCreator.new(
+ bitbucket_repo,
+ project_name,
+ target_namespace,
+ current_user,
+ credentials
+ ).execute
+ end
+
+ def bitbucket_repo
+ @bitbucket_repo ||= client.repo(normalized_repo_path)
+ end
+
+ def bitbucket_user
+ @bitbucket_user = client.user
+ end
+
+ def normalized_repo_path
+ @normalized_repo_path ||= params[:repo_path].to_s.gsub('___', '/')
+ end
+
+ def project_name
+ @project_name ||= params[:new_name].presence || bitbucket_repo.name
+ end
+
+ def target_namespace
+ @target_namespace ||= find_or_create_namespace(params[:target_namespace], current_user.namespace_path)
+ end
+
+ def log_and_return_error(message, error_type)
+ log_error(message)
+ error(_(message), error_type)
+ end
+
+ def log_error(message)
+ ::Import::Framework::Logger.error(
+ message: 'BitBucket Cloud import failed',
+ error: message
+ )
+ end
+ end
+end
diff --git a/app/services/import/fogbugz_service.rb b/app/services/import/fogbugz_service.rb
index 52d9cb77c0a..fa31d9f7248 100644
--- a/app/services/import/fogbugz_service.rb
+++ b/app/services/import/fogbugz_service.rb
@@ -89,7 +89,8 @@ module Import
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w[http https],
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?
+ deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: Gitlab::CurrentSettings.outbound_local_requests_whitelist # rubocop:disable Naming/InclusiveLanguage -- existing setting
)
end
@@ -99,7 +100,7 @@ module Import
end
def log_error(message)
- Gitlab::Import::Logger.error(
+ ::Import::Framework::Logger.error(
message: 'Import failed due to a Fogbugz error',
error: message
)
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index b8389192b18..d705a2d801f 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -4,6 +4,7 @@ module Import
class GithubService < Import::BaseService
include ActiveSupport::NumberHelper
include Gitlab::Utils::StrongMemoize
+ include SafeFormatHelper
attr_accessor :client
attr_reader :params, :current_user
@@ -12,8 +13,13 @@ module Import
context_error = validate_context
return context_error if context_error
+ if provider == :github # we skip access token validation for Gitea importer calls
+ access_token_error = validate_access_token
+ return access_token_error if access_token_error
+ end
+
project = create_project(access_params, provider)
- track_access_level('github')
+ track_access_level(provider.to_s) # provider may be :gitea
if project.persisted?
store_import_settings(project)
@@ -59,7 +65,7 @@ module Import
end
def oversize_error_message
- _('"%{repository_name}" size (%{repository_size}) is larger than the limit of %{limit}.') % {
+ s_('GithubImport|"%{repository_name}" size (%{repository_size}) is larger than the limit of %{limit}.') % {
repository_name: repo[:name],
repository_size: number_to_human_size(repo[:size]),
limit: number_to_human_size(repository_size_limit)
@@ -92,38 +98,66 @@ module Import
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w[http https],
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?
+ deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: Gitlab::CurrentSettings.outbound_local_requests_whitelist # rubocop:disable Naming/InclusiveLanguage -- existing setting
)
end
private
+ def validate_access_token
+ begin
+ client.octokit.repository(params[:repo_id].to_i)
+ rescue Octokit::Forbidden, Octokit::Unauthorized
+ return error(repository_access_error_message, :unprocessable_entity)
+ end
+
+ return unless Gitlab::Utils.to_boolean(params.dig(:optional_stages, :collaborators_import))
+
+ begin
+ client.octokit.collaborators(params[:repo_id].to_i)
+ rescue Octokit::Forbidden, Octokit::Unauthorized
+ return error(collaborators_access_error_message, :unprocessable_entity)
+ end
+ nil # we intentionally return nil if we don't raise an error
+ end
+
+ def repository_access_error_message
+ s_("GithubImport|Your GitHub personal access token does not have read access to the repository. " \
+ "Please use a classic GitHub personal access token with the `repo` scope. Fine-grained tokens are not supported.")
+ end
+
+ def collaborators_access_error_message
+ s_("GithubImport|Your GitHub personal access token does not have read access to collaborators. " \
+ "Please use a classic GitHub personal access token with the `read:org` scope. Fine-grained tokens are not supported.")
+ end
+
def validate_context
if blocked_url?
log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request)
elsif target_namespace.nil?
- error(_('Namespace or group to import repository into does not exist.'), :unprocessable_entity)
+ error(s_('GithubImport|Namespace or group to import repository into does not exist.'), :unprocessable_entity)
elsif !authorized?
- error(_('You are not allowed to import projects in this namespace.'), :unprocessable_entity)
+ error(s_('GithubImport|You are not allowed to import projects in this namespace.'), :unprocessable_entity)
elsif oversized?
error(oversize_error_message, :unprocessable_entity)
end
end
def target_namespace_path
- raise ArgumentError, 'Target namespace is required' if params[:target_namespace].blank?
+ raise ArgumentError, s_('GithubImport|Target namespace is required') if params[:target_namespace].blank?
params[:target_namespace]
end
def log_error(exception)
Gitlab::GithubImport::Logger.error(
- message: 'Import failed due to a GitHub error',
+ message: 'Import failed because of a GitHub error',
status: exception.response_status,
error: exception.response_body
)
- error(_('Import failed due to a GitHub error: %{original} (HTTP %{code})') % { original: exception.response_body, code: exception.response_status }, :unprocessable_entity)
+ error(s_('GithubImport|Import failed because of a GitHub error: %{original} (HTTP %{code})') % { original: exception.response_body, code: exception.response_status }, :unprocessable_entity)
end
def log_and_return_error(message, translated_message, http_status)
@@ -140,8 +174,7 @@ module Import
.new(project)
.write(
timeout_strategy: params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT,
- optional_stages: params[:optional_stages],
- extended_events: Feature.enabled?(:github_import_extended_events, current_user)
+ optional_stages: params[:optional_stages]
)
end
end
diff --git a/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb b/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb
index ed99d20d67f..609f8207e17 100644
--- a/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb
+++ b/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb
@@ -21,11 +21,12 @@ module Import
def initialize(params:, current_user: nil)
@params = params
+ @current_user = current_user
end
def project_params
- @project_parms ||= {
- import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url)
+ @project_params ||= {
+ import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url, user: current_user)
}
end
@@ -43,7 +44,7 @@ module Import
private
- attr_reader :params
+ attr_reader :params, :current_user
def s3_request?
headers['Server'] == 'AmazonS3' && headers['x-amz-request-id'].present?
diff --git a/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb b/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb
index 57ed717b966..c8bda2b2bc7 100644
--- a/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb
+++ b/app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb
@@ -27,11 +27,12 @@ module Import
def initialize(params:, current_user: nil)
@params = params
+ @current_user = current_user
end
def project_params
- @project_parms ||= {
- import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url)
+ @project_params ||= {
+ import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url, user: current_user)
}
end
@@ -56,7 +57,7 @@ module Import
private
- attr_reader :params
+ attr_reader :params, :current_user
def s3_object
strong_memoize(:s3_object) do
diff --git a/app/services/import/placeholder_references/base_service.rb b/app/services/import/placeholder_references/base_service.rb
new file mode 100644
index 00000000000..681298f1d99
--- /dev/null
+++ b/app/services/import/placeholder_references/base_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Import
+ module PlaceholderReferences
+ class BaseService
+ include Services::ReturnServiceResponses
+
+ def initialize(import_source:, import_uid:)
+ @import_source = import_source
+ @import_uid = import_uid
+ end
+
+ private
+
+ attr_reader :import_source, :import_uid
+
+ def store
+ @store ||= PlaceholderReferences::Store.new(import_source: import_source, import_uid: import_uid)
+ end
+
+ def logger
+ Framework::Logger
+ end
+
+ def log_info(...)
+ logger.info(logger_params(...))
+ end
+
+ def log_error(...)
+ logger.error(logger_params(...))
+ end
+
+ def logger_params(message:, **params)
+ params.merge(
+ message: message,
+ import_source: import_source,
+ import_uid: import_uid
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/import/placeholder_references/load_service.rb b/app/services/import/placeholder_references/load_service.rb
new file mode 100644
index 00000000000..6a382c033e1
--- /dev/null
+++ b/app/services/import/placeholder_references/load_service.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+module Import
+ module PlaceholderReferences
+ class LoadService < BaseService
+ BATCH_LIMIT = 500
+
+ def initialize(import_source:, import_uid:)
+ super(import_source: import_source, import_uid: import_uid)
+
+ @processed_count = 0
+ @error_count = 0
+ end
+
+ def execute
+ log_info(message: 'Processing placeholder references')
+
+ while (batch = next_batch).present?
+ load!(batch)
+
+ # End this loop if we know that we cleared the set earlier.
+ # This prevents processing just a few records at a time if an import is simultaneously writing data to Redis.
+ break if batch.size < BATCH_LIMIT
+ end
+
+ log_info(
+ message: 'Processed placeholder references',
+ processed_count: processed_count,
+ error_count: error_count
+ )
+
+ success(processed_count: processed_count, error_count: error_count)
+ end
+
+ private
+
+ attr_accessor :error_count, :processed_count
+
+ def next_batch
+ store.get(BATCH_LIMIT)
+ end
+
+ def load!(batch)
+ to_load = batch.filter_map do |item|
+ SourceUserPlaceholderReference.from_serialized(item)
+ rescue JSON::ParserError, SourceUserPlaceholderReference::SerializationError => e
+ log_error(item, e)
+ nil
+ end
+
+ begin
+ bulk_insert!(to_load)
+ rescue ActiveRecord::RecordInvalid => e
+ # We optimise for all records being valid and only filter for validity
+ # when there was a problem
+ to_load.reject! do |item|
+ next false if item.valid?
+
+ log_error(item.attributes, e)
+ true
+ end
+
+ # Try again
+ bulk_insert!(to_load)
+ rescue ActiveRecord::InvalidForeignKey => e
+ # This is an unrecoverable situation where we allow the error to clear the batch
+ log_error(to_load, e)
+ end
+
+ clear_batch!(batch)
+ end
+
+ def bulk_insert!(to_load)
+ Import::SourceUserPlaceholderReference.bulk_insert!(to_load)
+ end
+
+ def clear_batch!(batch)
+ processed_count = batch.size
+
+ self.processed_count += processed_count
+
+ store.remove(batch)
+ end
+
+ def log_error(item, exception)
+ super(
+ message: 'Error processing placeholder reference',
+ item: item,
+ exception: {
+ class: exception.class,
+ message: exception.message
+ }
+ )
+
+ self.error_count += 1
+ end
+ end
+ end
+end
diff --git a/app/services/import/placeholder_references/push_service.rb b/app/services/import/placeholder_references/push_service.rb
new file mode 100644
index 00000000000..419d8d242ed
--- /dev/null
+++ b/app/services/import/placeholder_references/push_service.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Import
+ module PlaceholderReferences
+ InvalidReferenceError = Class.new(StandardError)
+
+ class PushService < BaseService
+ class << self
+ def from_record(import_source:, import_uid:, source_user:, record:, user_reference_column:)
+ if record.is_a?(IssueAssignee)
+ composite_key = { 'issue_id' => record.issue_id, 'user_id' => record.user_id }
+ elsif record.respond_to?(:id) && record.id.is_a?(Integer)
+ numeric_key = record.id
+ end
+
+ new(
+ import_source: import_source,
+ import_uid: import_uid,
+ model: record.class,
+ composite_key: composite_key,
+ numeric_key: numeric_key,
+ source_user_id: source_user.id,
+ source_user_namespace_id: source_user.namespace_id,
+ user_reference_column: user_reference_column
+ )
+ end
+ end
+
+ def initialize(import_source:, import_uid:, source_user_id:, source_user_namespace_id:, model:, user_reference_column:, numeric_key: nil, composite_key: nil) # rubocop:disable Layout/LineLength -- Its easier to read being on one line
+ super(import_source: import_source, import_uid: import_uid)
+
+ @reference = Import::SourceUserPlaceholderReference.new(
+ model: model.name,
+ source_user_id: source_user_id,
+ namespace_id: source_user_namespace_id,
+ user_reference_column: user_reference_column,
+ numeric_key: numeric_key,
+ composite_key: composite_key,
+ alias_version: PlaceholderReferences::AliasResolver.version_for_model(model.name)
+ )
+ end
+
+ def execute
+ if reference.invalid?
+ track_error(reference)
+
+ return error(reference.errors.full_messages, :bad_request)
+ end
+
+ serialized_reference = reference.to_serialized
+
+ store.add(serialized_reference)
+
+ success(serialized_reference: serialized_reference)
+ end
+
+ private
+
+ attr_reader :reference
+
+ def track_error(reference)
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
+ InvalidReferenceError.new('Invalid placeholder user reference'),
+ model: reference.model,
+ errors: reference.errors.full_messages.join(', ')
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/import/reassign_placeholder_user_records_service.rb b/app/services/import/reassign_placeholder_user_records_service.rb
new file mode 100644
index 00000000000..7af4f753406
--- /dev/null
+++ b/app/services/import/reassign_placeholder_user_records_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Import
+ class ReassignPlaceholderUserRecordsService
+ NoReassignToUser = Class.new(StandardError)
+
+ attr_accessor :import_source_user
+
+ def initialize(import_source_user)
+ @import_source_user = import_source_user
+ end
+
+ def execute
+ return unless import_source_user.reassignment_in_progress?
+
+ Import::SourceUserPlaceholderReference.model_groups_for_source_user(import_source_user).each do |reference_group|
+ model = reference_group.model
+ user_reference_column = reference_group.user_reference_column
+
+ begin
+ Import::SourceUserPlaceholderReference.model_relations_for_source_user_reference(
+ model: model,
+ source_user: import_source_user,
+ user_reference_column: user_reference_column
+ ) do |model_relation, placeholder_references|
+ reassign_placeholder_records_batch(model_relation, placeholder_references, user_reference_column)
+ end
+ rescue NameError => e
+ ::Import::Framework::Logger.error(
+ message: "#{model} is not a model, #{user_reference_column} cannot be reassigned.",
+ error: e.message,
+ source_user_id: import_source_user&.id
+ )
+
+ next
+ end
+ end
+
+ import_source_user.complete!
+ end
+
+ private
+
+ def reassign_placeholder_records_batch(model_relation, placeholder_references, user_reference_column)
+ model_relation.klass.transaction do
+ model_relation.update_all({ user_reference_column => import_source_user.reassign_to_user_id })
+ end
+ placeholder_references.delete_all
+ rescue ActiveRecord::RecordNotUnique
+ placeholder_references.each do |placeholder_reference|
+ reassign_placeholder_record(placeholder_reference, user_reference_column)
+ end
+ end
+
+ def reassign_placeholder_record(placeholder_reference, user_reference_column)
+ placeholder_reference.model_record.update!({ user_reference_column => import_source_user.reassign_to_user_id })
+ placeholder_reference.destroy!
+ rescue ActiveRecord::RecordNotUnique, ActiveRecord::RecordInvalid
+ ::Import::Framework::Logger.warn(
+ message: "Unable to reassign record, reassigned user is invalid or not unique",
+ source_user_id: import_source_user.id
+ )
+ end
+ end
+end
diff --git a/app/services/import/source_users/accept_reassignment_service.rb b/app/services/import/source_users/accept_reassignment_service.rb
new file mode 100644
index 00000000000..f0bbab94c35
--- /dev/null
+++ b/app/services/import/source_users/accept_reassignment_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class AcceptReassignmentService < BaseService
+ def initialize(import_source_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user_matches_reassign_to_user
+
+ if import_source_user.accept
+ Import::ReassignPlaceholderUserRecordsWorker.perform_async(import_source_user.id)
+ ServiceResponse.success(payload: import_source_user)
+ else
+ ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)
+ end
+ end
+
+ private
+
+ def current_user_matches_reassign_to_user
+ return false if current_user.nil?
+
+ current_user.id == import_source_user.reassign_to_user_id
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/base_service.rb b/app/services/import/source_users/base_service.rb
new file mode 100644
index 00000000000..94b784b904d
--- /dev/null
+++ b/app/services/import/source_users/base_service.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class BaseService
+ private
+
+ attr_reader :import_source_user, :current_user
+
+ def error_invalid_permissions
+ ServiceResponse.error(
+ message: s_('Import|You have insufficient permissions to update the import source user'),
+ reason: :forbidden
+ )
+ end
+
+ def error_invalid_status
+ ServiceResponse.error(
+ message: s_('Import|Import source user has an invalid status for this operation'),
+ reason: :invalid_status,
+ payload: import_source_user
+ )
+ end
+
+ def send_user_reassign_email
+ Notify.import_source_user_reassign(import_source_user.id).deliver_now
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/cancel_reassignment_service.rb b/app/services/import/source_users/cancel_reassignment_service.rb
new file mode 100644
index 00000000000..65069d29742
--- /dev/null
+++ b/app/services/import/source_users/cancel_reassignment_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class CancelReassignmentService < BaseService
+ def initialize(import_source_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
+ return error_invalid_status unless import_source_user.cancelable_status?
+
+ if cancel_reassignment
+ ServiceResponse.success(payload: import_source_user)
+ else
+ ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)
+ end
+ end
+
+ private
+
+ def cancel_reassignment
+ import_source_user.reassign_to_user = nil
+ import_source_user.reassigned_by_user = nil
+ import_source_user.cancel_reassignment
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/generate_csv_service.rb b/app/services/import/source_users/generate_csv_service.rb
new file mode 100644
index 00000000000..115600200af
--- /dev/null
+++ b/app/services/import/source_users/generate_csv_service.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ # This class generates CSV data for `Import::SourceUser` records associated
+ # with a namespace. This spreadsheet is filled in and re-uploaded to
+ # facilitate the user mapping flow.
+ class GenerateCsvService
+ HEADERS = [
+ 'Source host',
+ 'Import type',
+ 'Source user identifier',
+ 'Source user name',
+ 'Source username',
+ 'GitLab username',
+ 'GitLab public email'
+ ].freeze
+
+ # @param namespace [Namespace, Group] The namespace where the import source users are associated
+ # @param current_user [User] The user performing the CSV export
+ def initialize(namespace, current_user:)
+ @namespace = namespace
+ @current_user = current_user
+ end
+
+ def execute
+ # We use :owner_access here because it's shared between GroupPolicy and
+ # NamespacePolicy.
+ return error_invalid_permissions unless current_user.can?(:owner_access, namespace)
+
+ ServiceResponse.success(payload: csv_data)
+ end
+
+ private
+
+ attr_reader :namespace, :current_user
+
+ def csv_data
+ CSV.generate do |csv|
+ csv << HEADERS
+
+ import_source_users.each_batch(of: 1000) do |batch|
+ batch.each do |source_user|
+ csv << [
+ source_user.source_hostname,
+ source_user.import_type,
+ source_user.source_user_identifier,
+ source_user.source_name,
+ source_user.source_username,
+ '',
+ ''
+ ]
+ end
+ end
+ end
+ end
+
+ def import_source_users
+ statuses = Import::SourceUser::STATUSES.slice(*Import::SourceUser::REASSIGNABLE_STATUSES).values
+ namespace.import_source_users.by_statuses(statuses)
+ end
+
+ def error_invalid_permissions
+ ServiceResponse.error(
+ message: s_('Import|You do not have permission to view import source users for this namespace'),
+ reason: :forbidden
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/keep_as_placeholder_service.rb b/app/services/import/source_users/keep_as_placeholder_service.rb
new file mode 100644
index 00000000000..3773dc622c4
--- /dev/null
+++ b/app/services/import/source_users/keep_as_placeholder_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class KeepAsPlaceholderService < BaseService
+ def initialize(import_source_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
+ return error_invalid_status unless import_source_user.reassignable_status?
+
+ if keep_as_placeholder
+ ServiceResponse.success(payload: import_source_user)
+ else
+ ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)
+ end
+ end
+
+ private
+
+ def keep_as_placeholder
+ import_source_user.reassign_to_user = nil
+ import_source_user.reassigned_by_user = current_user
+ import_source_user.keep_as_placeholder
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/reassign_service.rb b/app/services/import/source_users/reassign_service.rb
new file mode 100644
index 00000000000..e872bd124ec
--- /dev/null
+++ b/app/services/import/source_users/reassign_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class ReassignService < BaseService
+ def initialize(import_source_user, assignee_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ @assignee_user = assignee_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
+ return error_invalid_status unless import_source_user.reassignable_status?
+ return error_invalid_assignee unless valid_assignee?(assignee_user)
+
+ if reassign_user
+ send_user_reassign_email
+
+ ServiceResponse.success(payload: import_source_user)
+ else
+ ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)
+ end
+ end
+
+ private
+
+ attr_reader :assignee_user
+
+ def reassign_user
+ import_source_user.reassign_to_user = assignee_user
+ import_source_user.reassigned_by_user = current_user
+ import_source_user.reassign
+ end
+
+ def error_invalid_assignee
+ ServiceResponse.error(
+ message: s_('Import|Only active regular, auditor, or administrator users can be assigned'),
+ reason: :invalid_assignee,
+ payload: import_source_user
+ )
+ end
+
+ def valid_assignee?(user)
+ user.present? && user.human? && user.active?
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/reject_reassignment_service.rb b/app/services/import/source_users/reject_reassignment_service.rb
new file mode 100644
index 00000000000..ad99163ad6d
--- /dev/null
+++ b/app/services/import/source_users/reject_reassignment_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class RejectReassignmentService < BaseService
+ def initialize(import_source_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user_matches_reassign_to_user
+ return error_invalid_status unless import_source_user.awaiting_approval?
+
+ if reject
+ send_user_reassign_rejected_email
+
+ ServiceResponse.success(payload: import_source_user)
+ else
+ ServiceResponse.error(payload: import_source_user, message: import_source_user.errors.full_messages)
+ end
+ end
+
+ def send_user_reassign_rejected_email
+ Notify.import_source_user_rejected(import_source_user.id).deliver_now
+ end
+
+ private
+
+ def current_user_matches_reassign_to_user
+ return false if current_user.nil?
+
+ current_user.id == import_source_user.reassign_to_user_id
+ end
+
+ def reject
+ import_source_user.reject
+ end
+ end
+ end
+end
diff --git a/app/services/import/source_users/resend_notification_service.rb b/app/services/import/source_users/resend_notification_service.rb
new file mode 100644
index 00000000000..af714948c1d
--- /dev/null
+++ b/app/services/import/source_users/resend_notification_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Import
+ module SourceUsers
+ class ResendNotificationService < BaseService
+ def initialize(import_source_user, current_user:)
+ @import_source_user = import_source_user
+ @current_user = current_user
+ end
+
+ def execute
+ return error_invalid_permissions unless current_user.can?(:admin_import_source_user, import_source_user)
+ return error_invalid_status unless import_source_user.awaiting_approval?
+
+ send_user_reassign_email
+
+ ServiceResponse.success(payload: import_source_user)
+ end
+ end
+ end
+end
diff --git a/app/services/import/validate_remote_git_endpoint_service.rb b/app/services/import/validate_remote_git_endpoint_service.rb
index 8297757997f..5eb65b6dc08 100644
--- a/app/services/import/validate_remote_git_endpoint_service.rb
+++ b/app/services/import/validate_remote_git_endpoint_service.rb
@@ -18,6 +18,7 @@ module Import
def initialize(params)
@params = params
+ @auth = nil
end
def execute
@@ -27,6 +28,11 @@ module Import
return ServiceResponse.error(message: "#{@params[:url]} is not a valid URL")
end
+ # Credentials extracted from URL will be rewritten
+ # if credentials were also set via params
+ extract_auth_credentials!(uri)
+ set_auth_from_params
+
return ServiceResponse.success if uri.scheme == 'git'
uri.fragment = nil
@@ -46,6 +52,22 @@ module Import
private
+ attr_reader :auth
+
+ def extract_auth_credentials!(uri)
+ if uri.userinfo.present?
+ @auth = { username: uri.user, password: uri.password }
+
+ # Remove username/password params from URL after extraction,
+ # because they will be sent via Basic authorization header
+ uri.userinfo = nil
+ end
+ end
+
+ def set_auth_from_params
+ @auth = { username: @params[:user], password: @params[:password] } if @params[:user].present?
+ end
+
def http_get_and_extract_first_chunks(url)
# We are interested only in the first chunks of the response
# So we're using stream_body: true and breaking when receive enough body
@@ -61,15 +83,6 @@ module Import
[response, response_body]
end
- def auth
- unless @params[:user].to_s.blank?
- {
- username: @params[:user],
- password: @params[:password]
- }
- end
- end
-
def validate(uri, response, response_body)
return status_code_error(uri, response) unless status_code_is_valid?(response)
return error_result(INVALID_CONTENT_TYPE_MESSAGE) unless content_type_is_valid?(response)
diff --git a/app/services/import_export_clean_up_service.rb b/app/services/import_export_clean_up_service.rb
index 5ee2f70ec4c..74ca3f5df92 100644
--- a/app/services/import_export_clean_up_service.rb
+++ b/app/services/import_export_clean_up_service.rb
@@ -64,6 +64,6 @@ class ImportExportCleanUpService
end
def logger
- @logger ||= Gitlab::Import::Logger.build
+ @logger ||= ::Import::Framework::Logger.build
end
end
diff --git a/app/services/incident_management/timeline_events/create_service.rb b/app/services/incident_management/timeline_events/create_service.rb
index b2ea1f1b020..f8c7197df0b 100644
--- a/app/services/incident_management/timeline_events/create_service.rb
+++ b/app/services/incident_management/timeline_events/create_service.rb
@@ -65,7 +65,7 @@ module IncidentManagement
return ServiceResponse.error(message: _('There are no changed labels'))
end
- labels_note = -> (verb, labels) {
+ labels_note = ->(verb, labels) {
"#{verb} #{labels.map(&:to_reference).join(' ')} #{'label'.pluralize(labels.count)}" if labels.present?
}
diff --git a/app/services/integrations/exclusions/base_service.rb b/app/services/integrations/exclusions/base_service.rb
new file mode 100644
index 00000000000..2a7cf6d1c3b
--- /dev/null
+++ b/app/services/integrations/exclusions/base_service.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Integrations
+ module Exclusions
+ class BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user:, integration_name:, projects: [], groups: [])
+ @user = current_user
+ @integration_name = integration_name
+ @projects = projects
+ @groups = groups
+ end
+
+ private
+
+ attr_reader :user, :integration_name, :projects, :groups
+
+ def validate
+ return ServiceResponse.error(message: 'not authorized') unless allowed?
+ return ServiceResponse.error(message: 'not instance specific') unless instance_specific_integration?
+
+ ServiceResponse.success(payload: []) unless projects.present? || groups.present?
+ end
+
+ def allowed?
+ user.can?(:admin_all_resources)
+ end
+
+ def instance_specific_integration?
+ Integration.instance_specific_integration_names.include?(integration_name)
+ end
+
+ def instance_integration
+ integration_model.for_instance.first
+ end
+ strong_memoize_attr :instance_integration
+
+ def integration_model
+ Integration.integration_name_to_model(integration_name)
+ end
+ strong_memoize_attr :integration_model
+
+ def integration_type
+ Integration.integration_name_to_type(integration_name)
+ end
+ strong_memoize_attr :integration_type
+ end
+ end
+end
diff --git a/app/services/integrations/exclusions/create_service.rb b/app/services/integrations/exclusions/create_service.rb
new file mode 100644
index 00000000000..e204250d091
--- /dev/null
+++ b/app/services/integrations/exclusions/create_service.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+# Exclusions are custom settings at the group or project level used to selectively deactivate an instance integration
+# https://gitlab.com/gitlab-org/gitlab/-/issues/454372
+module Integrations
+ module Exclusions
+ class CreateService < BaseService
+ MAX_PROJECTS = 100
+ MAX_GROUPS = 100
+
+ def execute
+ result = validate
+ return result if result.present?
+
+ return ServiceResponse.error(message: 'project limit exceeded') if projects_over_limit?
+ return ServiceResponse.error(message: 'group limit exceeded') if groups_over_limit?
+
+ project_integrations = create_project_integrations
+ group_integrations = create_group_integrations
+ ServiceResponse.success(payload: group_integrations + project_integrations)
+ end
+
+ private
+
+ def create_project_integrations
+ projects = filtered_projects
+ return Integration.none unless projects.present?
+
+ integration_attrs = projects.map do |project|
+ {
+ project_id: project.id,
+ type_new: integration_type,
+ active: false,
+ inherit_from_id: nil
+ }
+ end
+
+ result = Integration.upsert_all(integration_attrs, unique_by: [:project_id, :type_new])
+ Integration.id_in(result.rows.flatten)
+ end
+
+ def create_group_integrations
+ groups = filtered_groups
+ return Integration.none unless groups.present?
+
+ integrations_for_groups = integration_model.for_group(groups)
+ existing_group_ids = integrations_for_groups.map(&:group_id).to_set
+ groups_missing_integrations = groups.reject do |g|
+ existing_group_ids.include?(g.id)
+ end
+
+ integrations_to_update = integrations_for_groups.select do |integration|
+ integration.inherit_from_id.present? || integration.activated?
+ end
+ integration_ids_to_update = integrations_to_update.map(&:id)
+ integration_model.id_in(integration_ids_to_update).update_all(inherit_from_id: nil, active: false)
+
+ integration_attrs = groups_missing_integrations.map do |g|
+ {
+ group_id: g.id,
+ active: false,
+ inherit_from_id: nil,
+ type_new: integration_type
+ }
+ end
+
+ created_group_integration_ids = []
+ if integration_attrs.present?
+ created_group_integration_ids = Integration.insert_all(integration_attrs,
+ returning: :id).rows.flatten
+ end
+
+ new_exclusions = Integration.id_in(integration_ids_to_update + created_group_integration_ids)
+ new_exclusions.each do |integration|
+ PropagateIntegrationWorker.perform_async(integration.id)
+ end
+ new_exclusions
+ end
+
+ # Exclusions for groups should propagate to subgroup children
+ # Skip creating integrations for subgroups and projects that would already be deactivated by an ancestor
+ # integration.
+ # Also skip for projects and groups that would be deactivated by creating an integration for another group in the
+ # same call to #execute.
+ def filtered_groups
+ group_ids = groups.map(&:id) + ancestor_integration_group_ids
+ groups.reject do |g|
+ g.ancestor_ids.intersect?(group_ids)
+ end
+ end
+ strong_memoize_attr :filtered_groups
+
+ def filtered_projects
+ filtered_group_ids = filtered_groups.map(&:id) + ancestor_integration_group_ids
+
+ projects.reject do |p|
+ p&.group&.self_and_ancestor_ids&.intersect?(filtered_group_ids)
+ end
+ end
+ strong_memoize_attr :filtered_projects
+
+ def ancestor_integration_group_ids
+ integration_model
+ .with_custom_settings
+ .for_group(
+ (groups.flat_map(&:traversal_ids) + projects.flat_map { |p| p&.group&.traversal_ids }).compact.uniq
+ ).limit(MAX_GROUPS + MAX_PROJECTS)
+ .pluck_group_id
+ end
+ strong_memoize_attr :ancestor_integration_group_ids
+
+ def projects_over_limit?
+ projects.size > MAX_PROJECTS
+ end
+
+ def groups_over_limit?
+ groups.size > MAX_GROUPS
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/exclusions/destroy_service.rb b/app/services/integrations/exclusions/destroy_service.rb
new file mode 100644
index 00000000000..f09d509e88c
--- /dev/null
+++ b/app/services/integrations/exclusions/destroy_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Integrations
+ module Exclusions
+ class DestroyService < BaseService
+ def execute
+ result = validate
+ return result if result.present?
+
+ destroy_exclusions
+ end
+
+ private
+
+ def destroy_exclusions
+ exclusions = integration_model.from_union([
+ integration_model.with_custom_settings.by_active_flag(false).for_group(groups),
+ integration_model.with_custom_settings.exclusions_for_project(projects)
+ ])
+
+ return ServiceResponse.success(payload: []) unless exclusions.present?
+
+ unless instance_integration
+ # rubocop:disable Cop/DestroyAll -- loading objects into memory to run callbacks and return objects
+ return ServiceResponse.success(payload: exclusions.destroy_all)
+ # rubocop:enable Cop/DestroyAll
+ end
+
+ ::Integrations::Propagation::BulkUpdateService.new(instance_integration, exclusions).execute
+
+ group_exclusions = exclusions.select(&:group_level?)
+ group_exclusions.each do |exclusion|
+ PropagateIntegrationWorker.perform_async(exclusion.id)
+ end
+
+ ServiceResponse.success(payload: exclusions)
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/propagate_service.rb b/app/services/integrations/propagate_service.rb
index 6d27929d2d0..8cce1801506 100644
--- a/app/services/integrations/propagate_service.rb
+++ b/app/services/integrations/propagate_service.rb
@@ -8,26 +8,29 @@ module Integrations
@integration = integration
end
- def propagate
- if integration.instance_level?
- update_inherited_integrations
- create_integration_for_groups_without_integration
- create_integration_for_projects_without_integration
+ def execute
+ return propagate_instance_level_integration if integration.instance_level?
+
+ if integration.class.instance_specific?
+ update_descendant_integrations
else
update_inherited_descendant_integrations
- create_integration_for_groups_without_integration_belonging_to_group
- create_integration_for_projects_without_integration_belonging_to_group
end
- end
- def self.propagate(integration)
- new(integration).propagate
+ create_integration_for_groups_without_integration_belonging_to_group
+ create_integration_for_projects_without_integration_belonging_to_group
end
private
attr_reader :integration
+ def propagate_instance_level_integration
+ update_inherited_integrations
+ create_integration_for_groups_without_integration
+ create_integration_for_projects_without_integration
+ end
+
def create_integration_for_projects_without_integration
propagate_integrations(
Project.without_integration(integration),
@@ -42,6 +45,13 @@ module Integrations
)
end
+ def update_descendant_integrations
+ propagate_integrations(
+ Integration.descendants_from_self_or_ancestors_from(integration),
+ PropagateIntegrationDescendantWorker
+ )
+ end
+
def update_inherited_descendant_integrations
propagate_integrations(
Integration.inherited_descendants_from_self_or_ancestors_from(integration),
diff --git a/app/services/integrations/propagation/bulk_create_service.rb b/app/services/integrations/propagation/bulk_create_service.rb
new file mode 100644
index 00000000000..e9c192633c8
--- /dev/null
+++ b/app/services/integrations/propagation/bulk_create_service.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+module Integrations
+ module Propagation
+ class BulkCreateService
+ include BulkOperationHashes
+
+ def initialize(integration, batch, association)
+ @integration = integration
+ @batch = batch.to_a
+ @association = association
+ end
+
+ def execute
+ Integration.transaction do
+ inserted_ids = bulk_insert_integrations
+
+ bulk_insert_data_fields(inserted_ids) if integration.data_fields_present?
+
+ if integration.is_a?(GitlabSlackApplication) && integration.active? &&
+ Feature.enabled?(:gitlab_for_slack_app_instance_and_group_level, type: :beta)
+ inserted_slack_ids = bulk_insert_slack_integrations(inserted_ids)
+ bulk_insert_slack_integration_scopes(inserted_slack_ids)
+ end
+ end
+ end
+
+ private
+
+ attr_reader :integration, :batch, :association
+
+ def bulk_insert_new(model, items_to_insert)
+ model.insert_all(
+ items_to_insert,
+ returning: [:id]
+ ).rows.flatten
+ end
+
+ def bulk_insert_integrations
+ attributes = integration_hash(:create)
+
+ items_to_insert = batch.map do |record|
+ attributes.merge("#{association}_id" => record.id)
+ end
+
+ bulk_insert_new(Integration, items_to_insert)
+ end
+
+ def bulk_insert_data_fields(integration_ids)
+ model = integration.data_fields.class
+ integration_fk_name = model.reflections['integration'].foreign_key
+ attributes = data_fields_hash(:create)
+
+ items_to_insert = integration_ids.map do |id|
+ attributes.merge(integration_fk_name => id)
+ end
+
+ bulk_insert_new(model, items_to_insert)
+ end
+
+ def bulk_insert_slack_integrations(integration_ids)
+ hash = integration.slack_integration.to_database_hash
+
+ items_to_insert = integration_ids.zip(batch).map do |integration_id, record|
+ hash.merge(
+ 'integration_id' => integration_id,
+ 'alias' => record.full_path
+ )
+ end
+
+ bulk_insert_new(SlackIntegration, items_to_insert)
+ end
+
+ def bulk_insert_slack_integration_scopes(inserted_slack_ids)
+ scopes = integration.slack_integration.slack_api_scopes
+
+ items_to_insert = scopes.flat_map do |scope|
+ inserted_slack_ids.map do |record_id|
+ {
+ 'slack_integration_id' => record_id,
+ 'slack_api_scope_id' => scope.id
+ }
+ end
+ end
+
+ bulk_insert_new(SlackWorkspace::IntegrationApiScope, items_to_insert)
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/propagation/bulk_update_service.rb b/app/services/integrations/propagation/bulk_update_service.rb
new file mode 100644
index 00000000000..615dce6d937
--- /dev/null
+++ b/app/services/integrations/propagation/bulk_update_service.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Integrations
+ module Propagation
+ class BulkUpdateService
+ include BulkOperationHashes
+
+ def initialize(integration, batch)
+ @integration = integration
+ @batch = batch
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute
+ Integration.transaction do
+ Integration.where(id: batch_ids).update_all(integration_hash(:update))
+
+ if integration.data_fields_present?
+ integration.data_fields.class.where(data_fields_foreign_key => batch_ids)
+ .update_all(
+ data_fields_hash(:update)
+ )
+ end
+
+ if integration.is_a?(GitlabSlackApplication) &&
+ Feature.enabled?(:gitlab_for_slack_app_instance_and_group_level, type: :beta)
+ if integration.active? # rubocop: disable Cop/LineBreakAroundConditionalBlock -- Misidentified
+ bulk_update_slack_integrations
+ else
+ bulk_delete_slack_integrations
+ end
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ attr_reader :integration, :batch
+
+ # service_id or integration_id
+ def data_fields_foreign_key
+ integration.data_fields.class.reflections['integration'].foreign_key
+ end
+
+ def batch_ids
+ @batch_ids ||=
+ if batch.is_a?(ActiveRecord::Relation)
+ batch.select(:id)
+ else
+ batch.map(&:id)
+ end
+ end
+
+ def bulk_update_slack_integrations
+ slack_integration_batch = SlackIntegration.by_integration(batch_ids)
+
+ slack_integration_batch.update_all(
+ integration.slack_integration.to_database_hash
+ )
+
+ Integrations::SlackWorkspace::IntegrationApiScope.update_scopes(
+ slack_integration_batch.pluck_primary_key,
+ integration.slack_integration.slack_api_scopes
+ )
+ end
+
+ def bulk_delete_slack_integrations
+ SlackIntegration.by_integration(batch_ids).delete_all
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_installation/base_service.rb b/app/services/integrations/slack_installation/base_service.rb
new file mode 100644
index 00000000000..c9781533cdf
--- /dev/null
+++ b/app/services/integrations/slack_installation/base_service.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+# Base class for services that handle enabling the GitLab for Slack app integration.
+#
+# Inheriting services should define these methods:
+# - `#authorized?` return true if the user is authorized to install the app
+# - `#redirect_uri` return the redirect URI for the OAuth flow
+# - `#find_or_create_integration` find or create the Integrations::GitlabSlackApplication record
+# - `#installation_alias` return the alias property for the SlackIntegration record
+module Integrations
+ module SlackInstallation
+ class BaseService
+ include Gitlab::Routing
+
+ # Endpoint to initiate the OAuth flow, redirects to Slack's authorization screen
+ # https://api.slack.com/authentication/oauth-v2#asking
+ SLACK_AUTHORIZE_URL = 'https://slack.com/oauth/v2/authorize'
+
+ # Endpoint to exchange the temporary authorization code for an access token
+ # https://api.slack.com/authentication/oauth-v2#exchanging
+ SLACK_EXCHANGE_TOKEN_URL = 'https://slack.com/api/oauth.v2.access'
+
+ def initialize(current_user:, params:)
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ unless Gitlab::CurrentSettings.slack_app_enabled
+ return ServiceResponse.error(message: s_('SlackIntegration|Slack app not enabled on GitLab instance'))
+ end
+
+ return ServiceResponse.error(message: s_('SlackIntegration|Unauthorized')) unless authorized?
+
+ begin
+ slack_data = exchange_slack_token
+ rescue *::Gitlab::HTTP::HTTP_ERRORS => e
+ return ServiceResponse
+ .error(message: s_('SlackIntegration|Error exchanging OAuth token with Slack'))
+ .track_exception(as: e.class)
+ end
+
+ unless slack_data['ok']
+ return ServiceResponse.error(
+ message: format(
+ s_('SlackIntegration|Error exchanging OAuth token with Slack: %{error}'),
+ error: slack_data['error']
+ )
+ )
+ end
+
+ integration = find_or_create_integration!
+ installation = integration.slack_integration || integration.build_slack_integration
+
+ installation.update!(
+ bot_user_id: slack_data['bot_user_id'],
+ bot_access_token: slack_data['access_token'],
+ team_id: slack_data.dig('team', 'id'),
+ team_name: slack_data.dig('team', 'name'),
+ alias: installation_alias,
+ user_id: slack_data.dig('authed_user', 'id'),
+ authorized_scope_names: slack_data['scope']
+ )
+
+ update_other_installations!(installation)
+
+ PropagateIntegrationWorker.perform_async(integration.id) unless integration.project_level?
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :current_user, :params
+
+ def exchange_slack_token
+ query = {
+ client_id: Gitlab::CurrentSettings.slack_app_id,
+ client_secret: Gitlab::CurrentSettings.slack_app_secret,
+ code: params[:code],
+ redirect_uri: redirect_uri
+ }
+
+ Gitlab::HTTP.get(SLACK_EXCHANGE_TOKEN_URL, query: query).to_hash
+ end
+
+ # Due to our modelling (mentioned in epic 9418) we create a SlackIntegration record
+ # for a Slack workspace (team_id) for every GitLab for Slack integration.
+ # The repetition is redundant, and we should more correctly only create
+ # a single record per workspace.
+ #
+ # Records that share a team_id (Slack workspace ID) should have identical bot token
+ # and permission scope data. We currently paper-over the modelling problem
+ # by mass-updating all records that share a team_id so they always reflect the same state.
+ # for this data. This means if we release a new version of the GitLab for Slack app that has
+ # a new required permission scope, the first time the workspace authorizes the new scope
+ # all other records for their workspace will be updated with the latest authorization data
+ # for that workspace.
+ def update_other_installations!(installation)
+ updatable_attributes = installation.attributes.slice(
+ 'user_id',
+ 'bot_user_id',
+ 'encrypted_bot_access_token',
+ 'encrypted_bot_access_token_iv',
+ 'updated_at'
+ )
+
+ SlackIntegration.by_team(installation.team_id).id_not_in(installation.id).each_batch do |batch|
+ batch_ids = batch.pluck_primary_key
+ batch.update_all(updatable_attributes)
+
+ Integrations::SlackWorkspace::IntegrationApiScope.update_scopes(batch_ids, installation.slack_api_scopes)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_installation/group_service.rb b/app/services/integrations/slack_installation/group_service.rb
new file mode 100644
index 00000000000..8e721859ff1
--- /dev/null
+++ b/app/services/integrations/slack_installation/group_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInstallation
+ class GroupService < BaseService
+ def initialize(group, current_user:, params:)
+ @group = group
+
+ super(current_user: current_user, params: params)
+ end
+
+ private
+
+ attr_reader :group
+
+ def redirect_uri
+ slack_auth_group_settings_slack_url(group)
+ end
+
+ def installation_alias
+ group.full_path
+ end
+
+ def authorized?
+ current_user.can?(:admin_group, group)
+ end
+
+ def find_or_create_integration!
+ GitlabSlackApplication.for_group(group).first_or_create!
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_installation/instance_service.rb b/app/services/integrations/slack_installation/instance_service.rb
new file mode 100644
index 00000000000..6b6e2950ca2
--- /dev/null
+++ b/app/services/integrations/slack_installation/instance_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInstallation
+ class InstanceService < BaseService
+ private
+
+ def redirect_uri
+ slack_auth_admin_application_settings_slack_url
+ end
+
+ def installation_alias
+ '_gitlab-instance'
+ end
+
+ def authorized?
+ current_user.can_admin_all_resources?
+ end
+
+ def find_or_create_integration!
+ GitlabSlackApplication.for_instance.first_or_create!
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_installation/project_service.rb b/app/services/integrations/slack_installation/project_service.rb
new file mode 100644
index 00000000000..b90624a8906
--- /dev/null
+++ b/app/services/integrations/slack_installation/project_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInstallation
+ class ProjectService < BaseService
+ def initialize(project, current_user:, params:)
+ @project = project
+
+ super(current_user: current_user, params: params)
+ end
+
+ private
+
+ attr_reader :project
+
+ def redirect_uri
+ slack_auth_project_settings_slack_url(project)
+ end
+
+ def installation_alias
+ project.full_path
+ end
+
+ def authorized?
+ current_user.can?(:admin_project, project)
+ end
+
+ def find_or_create_integration!
+ project.gitlab_slack_application_integration || project.create_gitlab_slack_application_integration!
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/test/group_service.rb b/app/services/integrations/test/group_service.rb
new file mode 100644
index 00000000000..d10b4609199
--- /dev/null
+++ b/app/services/integrations/test/group_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Integrations
+ module Test
+ class GroupService < Integrations::Test::BaseService
+ include Integrations::GroupTestData
+ include Gitlab::Utils::StrongMemoize
+
+ def group
+ integration.group
+ end
+ strong_memoize_attr :group
+
+ private
+
+ def data
+ case event || integration.default_test_event
+ when 'push', 'tag_push'
+ push_events_data
+ end
+ end
+ strong_memoize_attr :data
+ end
+ end
+end
diff --git a/app/services/integrations/test/project_service.rb b/app/services/integrations/test/project_service.rb
index 48240f297fe..1e077d49e5a 100644
--- a/app/services/integrations/test/project_service.rb
+++ b/app/services/integrations/test/project_service.rb
@@ -37,6 +37,8 @@ module Integrations
releases_events_data
when 'award_emoji'
emoji_events_data
+ when 'current_user'
+ current_user_events_data
end
end
end
diff --git a/app/services/integrations/update_service.rb b/app/services/integrations/update_service.rb
new file mode 100644
index 00000000000..ba8af9c5cfb
--- /dev/null
+++ b/app/services/integrations/update_service.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+module Integrations
+ class UpdateService
+ include ::Services::ReturnServiceResponses
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user:, integration:, attributes:)
+ @current_user = current_user
+ @integration = integration
+ @attributes = attributes
+ end
+
+ def execute
+ return error('Integration not found.', :not_found) unless integration
+
+ if handle_inherited_settings?
+ handle_inherited_settings
+ else
+ handle_default_settings
+ end
+ end
+
+ private
+
+ attr_reader :current_user, :integration, :attributes
+
+ def handle_inherited_settings?
+ if attributes.key?(:use_inherited_settings)
+ Gitlab::Utils.to_boolean(attributes[:use_inherited_settings], default: false)
+ else
+ integration.inherit_from_id?
+ end
+ end
+
+ def default_integration
+ ::Integration.default_integration(integration.type, integration.parent)
+ end
+ strong_memoize_attr :default_integration
+
+ def handle_inherited_settings
+ return error('Default integration not found.', :not_found) unless default_integration
+
+ integration.inherit_from_id = default_integration.id
+
+ unless integration.save(context: :manual_change)
+ return error("Failed to update integration. #{integration.errors.messages}", :bad_request)
+ end
+
+ if integration.project_level?
+ ::Integrations::Propagation::BulkUpdateService.new(default_integration, [integration]).execute
+ end
+
+ success(integration)
+ end
+
+ def handle_default_settings
+ attributes.delete(:use_inherited_settings)
+ integration.inherit_from_id = nil
+ integration.attributes = attributes
+
+ if integration.save(context: :manual_change)
+ success(integration)
+ else
+ error("Failed to update integration. #{integration.errors.messages}", :bad_request)
+ end
+ end
+ end
+end
diff --git a/app/services/issuable/callbacks/base.rb b/app/services/issuable/callbacks/base.rb
index 368dd76c16c..024fa20ecbc 100644
--- a/app/services/issuable/callbacks/base.rb
+++ b/app/services/issuable/callbacks/base.rb
@@ -3,18 +3,21 @@
module Issuable
module Callbacks
class Base
+ Error = Class.new(StandardError)
include Gitlab::Allowable
- def initialize(issuable:, current_user:, params:)
+ def initialize(issuable:, current_user:, params: {})
@issuable = issuable
@current_user = current_user
@params = params
end
def after_initialize; end
+ def before_create; end
def before_update; end
def after_update_commit; end
def after_save_commit; end
+ def after_save; end
private
@@ -27,6 +30,10 @@ module Issuable
def has_permission?(permission)
can?(current_user, permission, issuable)
end
+
+ def raise_error(message)
+ raise ::Issuable::Callbacks::Base::Error, message
+ end
end
end
end
diff --git a/app/services/issuable/callbacks/time_tracking.rb b/app/services/issuable/callbacks/time_tracking.rb
new file mode 100644
index 00000000000..4d2d0d9d8ae
--- /dev/null
+++ b/app/services/issuable/callbacks/time_tracking.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Issuable
+ module Callbacks
+ class TimeTracking < Base
+ ALLOWED_PARAMS = %i[time_estimate spend_time timelog].freeze
+
+ def after_initialize
+ if excluded_in_new_type?
+ params.delete(:time_estimate)
+ params.delete(:spend_time)
+ params.delete(:timelog)
+ end
+
+ return unless has_permission?(:"admin_#{issuable.to_ability_name}")
+
+ # below 2 parse_*_data methods, parse the data coming in from `time_tracking_widget` argument, in
+ # work item update mutation.
+ parse_timelog_data if params.key?(:timelog) && !params[:spend_time]
+ parse_time_estimate_data if params.key?(:time_estimate) && params[:time_estimate].is_a?(String)
+
+ # we still need to set the data here, in case when we had no data coming in from the `time_tracking_widget`
+ # argument, but data was still set through updating the description and using quick actions.
+ issuable.time_estimate = params[:time_estimate] if params.has_key?(:time_estimate)
+ issuable.spend_time = params[:spend_time] if params[:spend_time].present?
+ end
+
+ private
+
+ def parse_timelog_data
+ time_spent = params.dig(:timelog, :time_spent)
+ parsed_time_spent = if time_spent == ":reset"
+ :reset
+ else
+ Gitlab::TimeTrackingFormatter.parse(time_spent)
+ end
+
+ raise_error(invalid_time_spent_format('Time spent')) if parsed_time_spent.nil?
+
+ params[:spend_time] = { duration: parsed_time_spent, user_id: current_user.id }.merge(params[:timelog])
+ end
+
+ def parse_time_estimate_data
+ params[:time_estimate] = begin
+ Integer(params[:time_estimate] || '')
+ rescue ArgumentError
+ parsed_time_estimate = Gitlab::TimeTrackingFormatter.parse(params[:time_estimate])
+ raise_error(invalid_time_spent_format('Time estimate')) if parsed_time_estimate.nil?
+ parsed_time_estimate
+ end
+ end
+
+ def invalid_time_spent_format(argument_name)
+ format(_("%{argument_name} must be formatted correctly. For example: 1h 30m."), argument_name: argument_name)
+ end
+ end
+ end
+end
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index d7fdd235db1..5cbb1fb3df6 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -13,7 +13,7 @@ module Issuable
# Using transaction because of a high resources footprint
# on rewriting notes (unfolding references)
- #
+
ApplicationRecord.transaction do
@new_entity = create_new_entity
@new_entity.system_note_timestamp = nil
@@ -21,6 +21,7 @@ module Issuable
update_new_entity
update_old_entity
create_notes
+ after_clone_actions
end
end
@@ -28,6 +29,9 @@ module Issuable
attr_reader :target_parent
+ # Overwritten in child class
+ def after_clone_actions; end
+
def rewritten_old_entity_attributes(include_milestone: true)
Gitlab::Issuable::Clone::AttributesRewriter.new(
current_user,
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index a0fa1616f7b..2620c7746ab 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -32,8 +32,15 @@ module Issuable
def handle_start_date_or_due_date_change_note
# Type check needed as some issuables do their own date change handling for date fields other than due_date
- change_date_fields = issuable.is_a?(Issue) ? %w[due_date start_date] : %w[due_date]
- changed_dates = issuable.previous_changes.slice(*change_date_fields)
+ changed_dates =
+ if issuable.is_a?(WorkItem) && issuable.dates_source.present?
+ issuable.dates_source.previous_changes&.slice(*%w[due_date start_date])
+ elsif issuable.is_a?(Issue)
+ issuable.previous_changes&.slice(*%w[due_date start_date])
+ else
+ issuable.previous_changes.slice(:due_date)
+ end
+
create_start_date_or_due_date_note(changed_dates)
end
diff --git a/app/services/issuable/destroy_service.rb b/app/services/issuable/destroy_service.rb
index 47770d101f9..38515149d07 100644
--- a/app/services/issuable/destroy_service.rb
+++ b/app/services/issuable/destroy_service.rb
@@ -8,6 +8,10 @@ module Issuable
end
def execute(issuable)
+ # load sync object before destroy otherwise we cannot access it for
+ # deletion of label links in delete_label_links
+ @synced_object_to_delete = issuable.try(:sync_object)
+
before_destroy(issuable)
after_destroy(issuable) if issuable.destroy
end
@@ -23,26 +27,38 @@ module Issuable
issuable.assignees.each(&:invalidate_cache_counts)
end
- def group_for(issuable)
- issuable.resource_parent.group
- end
-
def delete_associated_records(issuable)
- actor = group_for(issuable)
-
- delete_todos(actor, issuable)
- delete_label_links(actor, issuable)
+ delete_todos(issuable)
+ delete_label_links(issuable)
end
- def delete_todos(actor, issuable)
+ def delete_todos(issuable)
+ synced_object_to_delete = @synced_object_to_delete
+
issuable.run_after_commit_or_now do
- TodosDestroyer::DestroyedIssuableWorker.perform_async(issuable.id, issuable.class.name)
+ TodosDestroyer::DestroyedIssuableWorker.perform_async(issuable.id, issuable.class.base_class.name)
+
+ # if there is a sync object, we need to cleanup its todos as well
+ next unless synced_object_to_delete
+
+ TodosDestroyer::DestroyedIssuableWorker.perform_async(
+ synced_object_to_delete.id, synced_object_to_delete.class.base_class.name
+ )
end
end
- def delete_label_links(actor, issuable)
+ def delete_label_links(issuable)
+ synced_object_to_delete = @synced_object_to_delete
+
issuable.run_after_commit_or_now do
- Issuable::LabelLinksDestroyWorker.perform_async(issuable.id, issuable.class.name)
+ Issuable::LabelLinksDestroyWorker.perform_async(issuable.id, issuable.class.base_class.name)
+
+ # if there is a sync object, we need to cleanup its label links as well
+ next unless synced_object_to_delete
+
+ Issuable::LabelLinksDestroyWorker.perform_async(
+ synced_object_to_delete.id, synced_object_to_delete.class.base_class.name
+ )
end
end
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 27c52fc7303..f3296b42383 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -5,7 +5,8 @@ class IssuableBaseService < ::BaseContainerService
def available_callbacks
[
- Issuable::Callbacks::Milestone
+ Issuable::Callbacks::Milestone,
+ Issuable::Callbacks::TimeTracking
].freeze
end
@@ -58,6 +59,10 @@ class IssuableBaseService < ::BaseContainerService
can?(current_user, ability_name, issuable)
end
+ def can_set_confidentiality?(issuable)
+ can?(current_user, :set_confidentiality, issuable)
+ end
+
def filter_params(issuable)
unless can_set_issuable_metadata?(issuable)
params.delete(:labels)
@@ -78,7 +83,7 @@ class IssuableBaseService < ::BaseContainerService
# confidential attribute is a special type of metadata and needs to be allowed to be set
# by non-members on issues in public projects so that security issues can be reported as confidential.
- params.delete(:confidential) unless can?(current_user, :set_confidentiality, issuable)
+ params.delete(:confidential) unless can_set_confidentiality?(issuable)
filter_contact_params(issuable)
filter_assignees(issuable)
filter_labels
@@ -204,18 +209,24 @@ class IssuableBaseService < ::BaseContainerService
end
def handle_quick_actions(issuable)
- merge_quick_actions_into_params!(issuable)
+ merge_quick_actions_into_params!(issuable, params: params)
end
- def merge_quick_actions_into_params!(issuable, only: nil)
- original_description = params.fetch(:description, issuable.description)
+ # Notes: When the description has been edited, then we need to sanitize and compare with
+ # the original description, removing any extra quick actions.
+ # If the description has not been edited, then just remove any quick actions
+ # in the current description.
+ def merge_quick_actions_into_params!(issuable, params:, only: nil)
+ target_description = params.fetch(:description, issuable.description)
- description, command_params =
- QuickActions::InterpretService.new(project, current_user, quick_action_options)
- .execute(original_description, issuable, only: only)
+ description, command_params = QuickActions::InterpretService.new(
+ container: container,
+ current_user: current_user,
+ params: quick_action_options
+ ).execute_with_original_text(target_description, issuable, only: only, original_text: issuable.description_was)
# Avoid a description already set on an issuable to be overwritten by a nil
- params[:description] = description if description && description != original_description
+ params[:description] = description if description && description != target_description
params.merge!(command_params)
end
@@ -225,10 +236,8 @@ class IssuableBaseService < ::BaseContainerService
end
def create(issuable, skip_system_notes: false)
- initialize_callbacks!(issuable)
-
- prepare_create_params(issuable)
handle_quick_actions(issuable)
+ prepare_create_params(issuable)
filter_params(issuable)
params.delete(:state_event)
@@ -242,11 +251,13 @@ class IssuableBaseService < ::BaseContainerService
params.delete(:remove_contacts)
add_crm_contact_emails = params.delete(:add_contacts)
+ initialize_callbacks!(issuable)
issuable.assign_attributes(allowed_create_params(params))
before_create(issuable)
issuable_saved = issuable.with_transaction_returning_status do
+ @callbacks.each(&:before_create)
transaction_create(issuable)
end
@@ -271,7 +282,7 @@ class IssuableBaseService < ::BaseContainerService
def set_crm_contacts(issuable, add_crm_contact_emails, remove_crm_contact_emails = [])
return unless add_crm_contact_emails.present? || remove_crm_contact_emails.present?
- ::Issues::SetCrmContactsService.new(project: project, current_user: current_user, params: { add_emails: add_crm_contact_emails, remove_emails: remove_crm_contact_emails }).execute(issuable)
+ ::Issues::SetCrmContactsService.new(container: project, current_user: current_user, params: { add_emails: add_crm_contact_emails, remove_emails: remove_crm_contact_emails }).execute(issuable)
end
def before_create(issuable)
@@ -306,12 +317,12 @@ class IssuableBaseService < ::BaseContainerService
end
def update(issuable)
- old_associations = associations_before_update(issuable)
+ ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
- initialize_callbacks!(issuable)
+ old_associations = associations_before_update(issuable)
- prepare_update_params(issuable)
handle_quick_actions(issuable)
+ prepare_update_params(issuable)
filter_params(issuable)
change_additional_attributes(issuable)
@@ -321,12 +332,12 @@ class IssuableBaseService < ::BaseContainerService
assign_requested_crm_contacts(issuable)
widget_params = filter_widget_params
+ initialize_callbacks!(issuable)
+
if issuable.changed? || params.present? || widget_params.present? || @callbacks.present?
issuable.assign_attributes(allowed_update_params(params))
- if issuable.description_changed?
- issuable.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
- end
+ assign_last_edited(issuable)
before_update(issuable)
@@ -372,17 +383,26 @@ class IssuableBaseService < ::BaseContainerService
end
end
+ trigger_update_subscriptions(issuable, old_associations)
+
issuable
end
+ # Overriden in child class
+ def trigger_update_subscriptions(issuable, old_associations); end
+
def transaction_update(issuable, opts = {})
touch = opts[:save_with_touch] || false
- issuable.save(touch: touch)
+ issuable.save(touch: touch).tap do |saved|
+ @callbacks.each(&:after_save) if saved
+ end
end
def transaction_create(issuable)
- issuable.save
+ issuable.save.tap do |saved|
+ @callbacks.each(&:after_save) if saved
+ end
end
def update_task(issuable)
@@ -397,7 +417,7 @@ class IssuableBaseService < ::BaseContainerService
before_update(issuable, skip_spam_check: true)
- if issuable.with_transaction_returning_status { issuable.save }
+ if issuable.with_transaction_returning_status { transaction_update_task(issuable) }
create_system_notes(issuable, old_labels: nil)
handle_task_changes(issuable)
@@ -416,6 +436,10 @@ class IssuableBaseService < ::BaseContainerService
issuable
end
+ def transaction_update_task(issuable)
+ issuable.save
+ end
+
# Handle the `update_task` event sent from UI. Attempts to update a specific
# line in the markdown and cached html, bypassing any unnecessary updates or checks.
def update_task_event(issuable)
@@ -513,6 +537,12 @@ class IssuableBaseService < ::BaseContainerService
end
end
+ def assign_last_edited(issuable)
+ return unless issuable.description_changed?
+
+ issuable.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
+ end
+
# Arrays of ids are used, but we should really use sets of ids, so
# let's have an helper to properly check if some ids are changing
def ids_changing?(old_array, new_array)
@@ -541,6 +571,7 @@ class IssuableBaseService < ::BaseContainerService
associations[:description] = issuable.description
associations[:reviewers] = issuable.reviewers.to_a if issuable.allows_reviewers?
associations[:severity] = issuable.severity if issuable.supports_severity?
+ associations[:target_branch] = issuable.target_branch if issuable.is_a?(MergeRequest)
if issuable.supports_escalation? && issuable.escalation_status
associations[:escalation_status] = issuable.escalation_status.status_name
@@ -630,7 +661,7 @@ class IssuableBaseService < ::BaseContainerService
end
def allowed_create_params(params)
- params
+ params.except(:observability_links)
end
def allowed_update_params(params)
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index c855a58522c..77492cc850e 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -79,10 +79,10 @@ module IssuableLinks
link = relate_issuables(referenced_object)
if link.errors.any?
- @errors << _("%{ref} cannot be added: %{error}") % {
+ @errors << (_("%{ref} cannot be added: %{error}") % {
ref: referenced_object.to_reference,
error: link.errors.messages.values.flatten.to_sentence
- }
+ })
else
after_create_for(link)
end
diff --git a/app/services/issue_email_participants/base_service.rb b/app/services/issue_email_participants/base_service.rb
index c9847bae537..4591048e339 100644
--- a/app/services/issue_email_participants/base_service.rb
+++ b/app/services/issue_email_participants/base_service.rb
@@ -4,31 +4,35 @@ module IssueEmailParticipants
class BaseService < ::BaseProjectService
MAX_NUMBER_OF_EMAILS = 6
- attr_reader :target, :emails
+ attr_reader :target, :emails, :options
- def initialize(target:, current_user:, emails:)
+ def initialize(target:, current_user:, emails:, options: {})
super(project: target.project, current_user: current_user)
@target = target
@emails = emails
+ @options = options
end
private
- def response_from_guard_checks
- return error_feature_flag unless Feature.enabled?(:issue_email_participants, target.project)
- return error_underprivileged unless current_user.can?(:"admin_#{target.to_ability_name}", target)
+ def add_system_note(emails, user: nil)
+ return unless emails.present?
- nil
- end
-
- def add_system_note(emails)
message = format(system_note_text, emails: emails.to_sentence)
- ::SystemNoteService.email_participants(target, project, current_user, message)
+ ::SystemNoteService.email_participants(target, project, (user || current_user), message)
message
end
+ def user_privileged?
+ current_user&.can?(:"admin_#{target.to_ability_name}", target) || skip_permission_check?
+ end
+
+ def skip_permission_check?
+ options[:skip_permission_check] == true
+ end
+
def error(message)
ServiceResponse.error(message: message)
end
diff --git a/app/services/issue_email_participants/create_service.rb b/app/services/issue_email_participants/create_service.rb
index aac396ba226..f2d91b5cbc4 100644
--- a/app/services/issue_email_participants/create_service.rb
+++ b/app/services/issue_email_participants/create_service.rb
@@ -7,8 +7,8 @@ module IssueEmailParticipants
MAX_NUMBER_OF_RECORDS = 10
def execute
- response = response_from_guard_checks
- return response unless response.nil?
+ return error_feature_flag unless Feature.enabled?(:issue_email_participants, target.project)
+ return error_underprivileged unless user_privileged?
return error_no_participants_added unless emails.present?
added_emails = add_participants(deduplicate_and_limit_emails)
@@ -41,10 +41,13 @@ module IssueEmailParticipants
end
new_participant = target.issue_email_participants.create(email: email)
- if new_participant.persisted?
- added_emails << email
- existing_emails_count += 1
- end
+ next unless new_participant.persisted?
+
+ added_emails << email
+ existing_emails_count += 1
+
+ Notify.service_desk_new_participant_email(target.id, new_participant).deliver_later
+ Gitlab::Metrics::BackgroundTransaction.current&.add_event(:service_desk_new_participant_email)
end
added_emails
diff --git a/app/services/issue_email_participants/destroy_service.rb b/app/services/issue_email_participants/destroy_service.rb
index 8cd0178da00..fd0a62117c8 100644
--- a/app/services/issue_email_participants/destroy_service.rb
+++ b/app/services/issue_email_participants/destroy_service.rb
@@ -3,14 +3,13 @@
module IssueEmailParticipants
class DestroyService < BaseService
def execute
- response = response_from_guard_checks
- return response unless response.nil?
+ return error_underprivileged unless user_privileged?
return error_no_participants_removed unless emails.present?
removed_emails = remove_participants(emails.first(MAX_NUMBER_OF_EMAILS))
if removed_emails.any?
- message = add_system_note(removed_emails)
+ message = add_system_note(removed_emails, user: system_note_author)
ServiceResponse.success(message: message.upcase_first << ".")
else
error_no_participants_removed
@@ -34,9 +33,23 @@ module IssueEmailParticipants
end
def system_note_text
+ return system_note_unsubscribe_text if unsubscribe_context?
+
_("removed %{emails}")
end
+ def system_note_unsubscribe_text
+ _("unsubscribed %{emails}")
+ end
+
+ def system_note_author
+ Users::Internal.support_bot if unsubscribe_context?
+ end
+
+ def unsubscribe_context?
+ options[:context] == :unsubscribe
+ end
+
def error_no_participants_removed
error(_("No email participants were removed. Either none were provided, or they don't exist."))
end
diff --git a/app/services/issue_links/create_service.rb b/app/services/issue_links/create_service.rb
index 3523e945d37..ce18ce94c23 100644
--- a/app/services/issue_links/create_service.rb
+++ b/app/services/issue_links/create_service.rb
@@ -25,6 +25,10 @@ module IssueLinks
def link_class
IssueLink
end
+
+ def issuables_no_permission_error_message
+ _("Couldn't link issues. You must have at least the Guest role in both projects.")
+ end
end
end
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index d87dc013c7f..a01f7cb799d 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -6,11 +6,10 @@ module Issues
include IncidentManagement::UsageData
include IssueTypeHelpers
- def hook_data(issue, action, old_associations: {})
- hook_data = issue.to_hook_data(current_user, old_associations: old_associations)
- hook_data[:object_attributes][:action] = action
+ EpicAssignmentError = Class.new(::ArgumentError)
- hook_data
+ def hook_data(issue, action, old_associations: {})
+ issue.to_hook_data(current_user, old_associations: old_associations, action: action)
end
def reopen_service
diff --git a/app/services/issues/build_service.rb b/app/services/issues/build_service.rb
index 63cad593936..69dd2ce90b7 100644
--- a/app/services/issues/build_service.rb
+++ b/app/services/issues/build_service.rb
@@ -62,9 +62,8 @@ module Issues
discussion_info = ["- [ ] #{first_note_to_resolve.author.to_reference} #{action} a [discussion](#{note_url}): "]
discussion_info << "(+#{other_note_count} #{'comment'.pluralize(other_note_count)})" if other_note_count > 0
- note_without_block_quotes = Banzai::Filter::BlockquoteFenceFilter.new(first_note_to_resolve.note).call
spaces = ' ' * 4
- quote = note_without_block_quotes.lines.map { |line| "#{spaces}> #{line}" }.join
+ quote = first_note_to_resolve.note.lines.map { |line| "#{spaces}> #{line}" }.join
[discussion_info.join(' '), quote].join("\n\n")
end
diff --git a/app/services/issues/clone_service.rb b/app/services/issues/clone_service.rb
index 8af44fb1e3c..d20485dbe54 100644
--- a/app/services/issues/clone_service.rb
+++ b/app/services/issues/clone_service.rb
@@ -65,6 +65,7 @@ module Issues
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
new_params = new_params.merge(rewritten_old_entity_attributes)
+ new_params.delete(:imported_from)
new_params.delete(:created_at)
new_params.delete(:updated_at)
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index ef43e707a21..09325ec4e00 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -6,12 +6,7 @@ module Issues
def execute(issue, commit: nil, notifications: true, system_note: true, skip_authorization: false)
return issue unless can_close?(issue, skip_authorization: skip_authorization)
- close_issue(
- issue,
- closed_via: commit,
- notifications: notifications,
- system_note: system_note
- )
+ close_issue(issue, closed_via: commit, notifications: notifications, system_note: system_note)
end
# Closes the supplied issue without checking if the user is authorized to
@@ -26,38 +21,45 @@ module Issues
return issue
end
- if issue.close(current_user)
- event_service.close_issue(issue, current_user)
- create_note(issue, closed_via) if system_note
+ return issue unless handle_closing_issue!(issue, current_user)
- if current_user.project_bot?
- log_audit_event(issue, current_user, "#{issue.issue_type}_closed_by_project_bot",
- "Closed #{issue.issue_type.humanize(capitalize: false)} #{issue.title}")
- end
+ after_close(issue, closed_via: closed_via, notifications: notifications, system_note: system_note)
+ end
- closed_via = _("commit %{commit_id}") % { commit_id: closed_via.id } if closed_via.is_a?(Commit)
+ private
- notification_service.async.close_issue(issue, current_user, { closed_via: closed_via }) if notifications
- todo_service.close_issue(issue, current_user)
- perform_incident_management_actions(issue)
- execute_hooks(issue, 'close')
- invalidate_cache_counts(issue, users: issue.assignees)
- issue.update_project_counter_caches
- track_incident_action(current_user, issue, :incident_closed)
+ # overriden in EE
+ def handle_closing_issue!(issue, current_user)
+ issue.close(current_user)
+ end
- if closed_via.is_a?(MergeRequest)
- store_first_mentioned_in_commit_at(issue, closed_via)
- Onboarding::ProgressService.new(project.namespace).execute(action: :issue_auto_closed)
- end
+ # overriden in EE
+ def after_close(issue, closed_via: nil, notifications: true, system_note: true)
+ event_service.close_issue(issue, current_user)
+ create_note(issue, closed_via) if system_note
- Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
+ if current_user.project_bot?
+ log_audit_event(issue, current_user, "#{issue.issue_type}_closed_by_project_bot",
+ "Closed #{issue.issue_type.humanize(capitalize: false)} #{issue.title}")
end
+ closed_via = _("commit %{commit_id}") % { commit_id: closed_via.id } if closed_via.is_a?(Commit)
+
+ notification_service.async.close_issue(issue, current_user, { closed_via: closed_via }) if notifications
+ todo_service.close_issue(issue, current_user)
+ perform_incident_management_actions(issue)
+ execute_hooks(issue, 'close')
+ invalidate_cache_counts(issue, users: issue.assignees)
+ issue.update_project_counter_caches
+ track_incident_action(current_user, issue, :incident_closed)
+
+ store_first_mentioned_in_commit_at(issue, closed_via) if closed_via.is_a?(MergeRequest)
+
+ Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
+
issue
end
- private
-
def can_close?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue)
end
diff --git a/app/services/issues/convert_to_ticket_service.rb b/app/services/issues/convert_to_ticket_service.rb
new file mode 100644
index 00000000000..d3d36fc2f14
--- /dev/null
+++ b/app/services/issues/convert_to_ticket_service.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+module Issues
+ class ConvertToTicketService < ::BaseContainerService
+ def initialize(target:, current_user:, email:)
+ super(container: target.resource_parent, current_user: current_user)
+
+ @target = target
+ @email = email
+ @original_author = target.author
+ end
+
+ def execute
+ return error_underprivileged unless current_user.can?(:"admin_#{target.to_ability_name}", target)
+ return error_already_ticket if ticket?
+ return error_invalid_email unless valid_email?
+
+ update_target
+ add_note
+
+ ServiceResponse.success(message: success_message)
+ end
+
+ private
+
+ attr_reader :target, :email, :original_author
+
+ def update_target
+ target.update!(
+ service_desk_reply_to: email,
+ author: Users::Internal.support_bot,
+ confidential: target_confidentiality
+ )
+
+ # Migrate to IssueEmailParticipants::CreateService
+ # once :issue_email_participants feature flag has been removed
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/440456
+ IssueEmailParticipant.create!(issue: target, email: email)
+ end
+
+ def add_note
+ message = s_(
+ "ServiceDesk|This issue has been converted to a Service Desk ticket. " \
+ "The email address `%{email}` is the new author of this issue. " \
+ "GitLab didn't send a `thank_you` Service Desk email. " \
+ "The original author of this issue was `%{original_author}`."
+ )
+
+ ::Notes::CreateService.new(
+ project,
+ Users::Internal.support_bot,
+ noteable: target,
+ note: format(message, email: email, original_author: original_author.to_reference),
+ internal: true
+ ).execute
+ end
+
+ def ticket?
+ target.from_service_desk?
+ end
+
+ def valid_email?
+ email.present? && IssueEmailParticipant.new(issue: target, email: email).valid?
+ end
+
+ def target_confidentiality
+ return true if project.service_desk_setting.nil?
+ # This quick action runs on existing issues so
+ # don't change the confidentiality of an already confidential issue.
+ return true if target.confidential?
+
+ project.service_desk_setting.tickets_confidential_by_default?
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def error_underprivileged
+ error(_("You don't have permission to manage this issue."))
+ end
+
+ def error_already_ticket
+ error(s_("ServiceDesk|Cannot convert to ticket because it is already a ticket."))
+ end
+
+ def error_invalid_email
+ error(
+ s_("ServiceDesk|Cannot convert issue to ticket because no email was provided or the format was invalid.")
+ )
+ end
+
+ def success_message
+ s_('ServiceDesk|Converted issue to Service Desk ticket.')
+ end
+ end
+end
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index c828c156d50..aa01a459b0f 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -49,15 +49,8 @@ module Issues
def before_create(issue)
issue.check_for_spam(user: current_user, action: :create) if perform_spam_check
- # current_user (defined in BaseService) is not available within run_after_commit block
- user = current_user
assign_description_from_template(issue)
- issue.run_after_commit do
- NewIssueWorker.perform_async(issue.id, user.id, issue.class.to_s)
- Issues::PlacementWorker.perform_async(nil, issue.project_id)
- # issue.namespace_id can point to either a project through project namespace or a group.
- Onboarding::IssueCreatedWorker.perform_async(issue.namespace_id)
- end
+ after_commit_tasks(current_user, issue)
end
# Add new items to Issues::AfterCreateService if they can be performed in Sidekiq
@@ -160,6 +153,15 @@ module Issues
issue.description = default_template.content if default_template.present?
end
+
+ def after_commit_tasks(user, issue)
+ issue.run_after_commit do
+ NewIssueWorker.perform_async(issue.id, user.id, issue.class.to_s)
+ Issues::PlacementWorker.perform_async(nil, issue.project_id)
+ # issue.namespace_id can point to either a project through project namespace or a group.
+ Onboarding::IssueCreatedWorker.perform_async(issue.namespace_id)
+ end
+ end
end
end
diff --git a/app/services/issues/export_csv_service.rb b/app/services/issues/export_csv_service.rb
index 99c0e9f1a37..4e1a713c496 100644
--- a/app/services/issues/export_csv_service.rb
+++ b/app/services/issues/export_csv_service.rb
@@ -26,23 +26,23 @@ module Issues
'Title' => 'title',
'Description' => 'description',
'Issue ID' => 'iid',
- 'URL' => -> (issue) { issue_url(issue) },
- 'State' => -> (issue) { issue.closed? ? 'Closed' : 'Open' },
+ 'URL' => ->(issue) { issue_url(issue) },
+ 'State' => ->(issue) { issue.closed? ? 'Closed' : 'Open' },
'Author' => 'author_name',
- 'Author Username' => -> (issue) { issue.author&.username },
- 'Assignee' => -> (issue) { issue.assignees.map(&:name).join(', ') },
- 'Assignee Username' => -> (issue) { issue.assignees.map(&:username).join(', ') },
- 'Confidential' => -> (issue) { issue.confidential? ? 'Yes' : 'No' },
- 'Locked' => -> (issue) { issue.discussion_locked? ? 'Yes' : 'No' },
- 'Due Date' => -> (issue) { issue.due_date&.to_fs(:csv) },
- 'Created At (UTC)' => -> (issue) { issue.created_at&.to_fs(:csv) },
- 'Updated At (UTC)' => -> (issue) { issue.updated_at&.to_fs(:csv) },
- 'Closed At (UTC)' => -> (issue) { issue.closed_at&.to_fs(:csv) },
- 'Milestone' => -> (issue) { issue.milestone&.title },
- 'Weight' => -> (issue) { issue.weight },
- 'Labels' => -> (issue) { issue_labels(issue) },
+ 'Author Username' => ->(issue) { issue.author&.username },
+ 'Assignee' => ->(issue) { issue.assignees.map(&:name).join(', ') },
+ 'Assignee Username' => ->(issue) { issue.assignees.map(&:username).join(', ') },
+ 'Confidential' => ->(issue) { issue.confidential? ? 'Yes' : 'No' },
+ 'Locked' => ->(issue) { issue.discussion_locked? ? 'Yes' : 'No' },
+ 'Due Date' => ->(issue) { issue.due_date&.to_fs(:csv) },
+ 'Created At (UTC)' => ->(issue) { issue.created_at&.to_fs(:csv) },
+ 'Updated At (UTC)' => ->(issue) { issue.updated_at&.to_fs(:csv) },
+ 'Closed At (UTC)' => ->(issue) { issue.closed_at&.to_fs(:csv) },
+ 'Milestone' => ->(issue) { issue.milestone&.title },
+ 'Weight' => ->(issue) { issue.weight },
+ 'Labels' => ->(issue) { issue_labels(issue) },
'Time Estimate' => ->(issue) { issue.time_estimate.to_fs(:csv) },
- 'Time Spent' => -> (issue) { issue_time_spent(issue) }
+ 'Time Spent' => ->(issue) { issue_time_spent(issue) }
}
end
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index c3ddf7b6709..f39098c034c 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -6,12 +6,13 @@ module Issues
MoveError = Class.new(StandardError)
- def execute(issue, target_project)
+ def execute(issue, target_project, move_any_issue_type = false)
+ @move_any_issue_type = move_any_issue_type
@target_project = target_project
verify_can_move_issue!(issue, target_project)
- super
+ super(issue, target_project)
notify_participants
@@ -21,16 +22,37 @@ module Issues
copy_email_participants
queue_copy_designs
+ copy_timelogs
new_entity
end
private
- attr_reader :target_project
+ attr_reader :target_project, :move_any_issue_type
+
+ override :after_clone_actions
+ def after_clone_actions
+ move_children
+ end
+
+ def move_children
+ WorkItems::ParentLink.for_parents(original_entity).each do |link|
+ new_child = self.class.new(
+ container: container,
+ current_user: current_user
+ ).execute(
+ ::Issue.find(link.work_item_id),
+ target_project,
+ true
+ )
+
+ WorkItems::ParentLink.create!(work_item_id: new_child.id, work_item_parent_id: new_entity.id)
+ end
+ end
def verify_can_move_issue!(issue, target_project)
- unless issue.supports_move_and_clone?
+ unless issue.supports_move_and_clone? || move_any_issue_type
raise MoveError, s_('MoveIssue|Cannot move issues of \'%{issue_type}\' type.') % { issue_type: issue.issue_type }
end
@@ -44,10 +66,13 @@ module Issues
end
def update_service_desk_sent_notifications
- return unless original_entity.from_service_desk?
+ context = { project_id: new_entity.project_id, noteable_id: new_entity.id }
+
+ original_entity.run_after_commit_or_now do
+ next unless from_service_desk?
- original_entity
- .sent_notifications.update_all(project_id: new_entity.project_id, noteable_id: new_entity.id)
+ sent_notifications.update_all(**context)
+ end
end
def copy_email_participants
@@ -85,7 +110,8 @@ module Issues
project: target_project,
author: original_entity.author,
assignee_ids: original_entity.assignee_ids,
- moved_issue: true
+ moved_issue: true,
+ imported_from: :none
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
@@ -118,6 +144,12 @@ module Issues
log_error(response.message) if response.error?
end
+ def copy_timelogs
+ return if original_entity.timelogs.empty?
+
+ WorkItems::CopyTimelogsWorker.perform_async(original_entity.id, new_entity.id)
+ end
+
def mark_as_moved
original_entity.update(moved_to: new_entity)
end
@@ -137,7 +169,11 @@ module Issues
end
def notify_participants
- notification_service.async.issue_moved(original_entity, new_entity, @current_user)
+ context = { original: original_entity, new: new_entity, user: @current_user, service: notification_service }
+
+ original_entity.run_after_commit_or_now do
+ context[:service].async.issue_moved(context[:original], context[:new], context[:user])
+ end
end
def add_note_from
diff --git a/app/services/issues/referenced_merge_requests_service.rb b/app/services/issues/referenced_merge_requests_service.rb
index ff7cf65e757..d8767f9a0b4 100644
--- a/app/services/issues/referenced_merge_requests_service.rb
+++ b/app/services/issues/referenced_merge_requests_service.rb
@@ -19,7 +19,7 @@ module Issues
def referenced_merge_requests(issue)
merge_requests = extract_merge_requests(issue)
- cross_project_filter = -> (merge_requests) do
+ cross_project_filter = ->(merge_requests) do
merge_requests.select { |mr| mr.target_project == project }
end
@@ -40,7 +40,10 @@ module Issues
return [] if merge_requests.empty?
- ids = MergeRequestsClosingIssues.where(merge_request_id: merge_requests.map(&:id), issue_id: issue.id).pluck(:merge_request_id)
+ ids = MergeRequestsClosingIssues.where(
+ merge_request_id: merge_requests.map(&:id),
+ issue_id: issue.id
+ ).pluck(:merge_request_id)
merge_requests.select { |mr| mr.id.in?(ids) }
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/issues/relative_position_rebalancing_service.rb b/app/services/issues/relative_position_rebalancing_service.rb
index e165cb36634..dad20fb66c0 100644
--- a/app/services/issues/relative_position_rebalancing_service.rb
+++ b/app/services/issues/relative_position_rebalancing_service.rb
@@ -161,7 +161,7 @@ module Issues
end
def start_position
- @start_position ||= (RelativePositioning::START_POSITION - (gaps / 2) * gap_size).to_i
+ @start_position ||= (RelativePositioning::START_POSITION - ((gaps / 2) * gap_size)).to_i
end
def with_retry(initial_batch_size, exit_batch_size)
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index d71ba4e3414..07f389cab08 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -5,29 +5,37 @@ module Issues
def execute(issue, skip_authorization: false)
return issue unless can_reopen?(issue, skip_authorization: skip_authorization)
- if issue.reopen
- event_service.reopen_issue(issue, current_user)
-
- if current_user.project_bot?
- log_audit_event(issue, current_user, "#{issue.issue_type}_reopened_by_project_bot",
- "Reopened #{issue.issue_type.humanize(capitalize: false)} #{issue.title}")
- end
-
- create_note(issue, 'reopened')
- notification_service.async.reopen_issue(issue, current_user)
- perform_incident_management_actions(issue)
- execute_hooks(issue, 'reopen')
- invalidate_cache_counts(issue, users: issue.assignees)
- issue.update_project_counter_caches
- Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
- track_incident_action(current_user, issue, :incident_reopened)
- end
+ after_reopen(issue) if reopen_issue(issue)
issue
end
private
+ # overriden in EE
+ def after_reopen(issue)
+ event_service.reopen_issue(issue, current_user)
+
+ if current_user.project_bot?
+ log_audit_event(issue, current_user, "#{issue.issue_type}_reopened_by_project_bot",
+ "Reopened #{issue.issue_type.humanize(capitalize: false)} #{issue.title}")
+ end
+
+ create_note(issue, 'reopened')
+ notification_service.async.reopen_issue(issue, current_user)
+ perform_incident_management_actions(issue)
+ execute_hooks(issue, 'reopen')
+ invalidate_cache_counts(issue, users: issue.assignees)
+ issue.update_project_counter_caches
+ Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
+ track_incident_action(current_user, issue, :incident_reopened)
+ end
+
+ # overriden in EE
+ def reopen_issue(issue)
+ issue.reopen
+ end
+
def can_reopen?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :reopen_issue, issue)
end
diff --git a/app/services/issues/set_crm_contacts_service.rb b/app/services/issues/set_crm_contacts_service.rb
index c2ed7c554be..1a697678ac0 100644
--- a/app/services/issues/set_crm_contacts_service.rb
+++ b/app/services/issues/set_crm_contacts_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Issues
- class SetCrmContactsService < ::BaseProjectService
+ class SetCrmContactsService < ::BaseContainerService
MAX_ADDITIONAL_CONTACTS = 6
# Replacing contacts by email is not currently supported
@@ -52,7 +52,7 @@ module Issues
end
def add_by_email
- contact_ids = ::CustomerRelations::Contact.find_ids_by_emails(project_group.root_ancestor, emails(:add_emails))
+ contact_ids = ::CustomerRelations::Contact.find_ids_by_emails(container.root_ancestor, emails(:add_emails))
add_by_id(contact_ids)
end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index 839d0e664a4..f5e9953915f 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -100,8 +100,8 @@ module Issues
target_project = params.delete(:target_project)
return unless target_project &&
- issue.can_move?(current_user, target_project) &&
- target_project != issue.project
+ issue.can_move?(current_user, target_project) &&
+ target_project != issue.project
update(issue)
Issues::MoveService.new(container: project, current_user: current_user).execute(issue, target_project)
diff --git a/app/services/jira/requests/base.rb b/app/services/jira/requests/base.rb
index 8d7b460bf69..a0a3fecb61b 100644
--- a/app/services/jira/requests/base.rb
+++ b/app/services/jira/requests/base.rb
@@ -83,7 +83,7 @@ module Jira
def error_message(error)
reportable_error_message(error) ||
- s_('JiraRequest|An error occurred while requesting data from Jira. Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe }
+ (s_('JiraRequest|An error occurred while requesting data from Jira. Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe })
end
# Returns a user-facing error message if possible, otherwise `nil`.
@@ -113,9 +113,7 @@ module Jira
when 'Forbidden'
s_('JiraRequest|The credentials for accessing Jira are not allowed to access the data. Check your %{docs_link_start}Jira integration credentials%{docs_link_end} and try again.').html_safe % { docs_link_start: auth_docs_link_start, docs_link_end: '</a>'.html_safe }
when 'Bad Request'
- s_('JiraRequest|An error occurred while requesting data from Jira. Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe }
- when /errorMessages/
- jira_ruby_json_error_message(error.message)
+ jira_ruby_json_error_message(error.response.body) || (s_('JiraRequest|An error occurred while requesting data from Jira. Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe })
end
end
@@ -127,7 +125,7 @@ module Jira
messages = Rails::Html::FullSanitizer.new.sanitize(messages).presence
return unless messages
- s_('JiraRequest|An error occurred while requesting data from Jira: %{messages}. Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { messages: messages, docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe }
+ s_('JiraRequest|An error occurred while requesting data from Jira: %{messages} Check your %{docs_link_start}Jira integration configuration%{docs_link_end} and try again.').html_safe % { messages: messages, docs_link_start: config_docs_link_start, docs_link_end: '</a>'.html_safe }
rescue JSON::ParserError
end
end
diff --git a/app/services/jira_connect_installations/destroy_service.rb b/app/services/jira_connect_installations/destroy_service.rb
index cfe58575dcf..7f063eb311a 100644
--- a/app/services/jira_connect_installations/destroy_service.rb
+++ b/app/services/jira_connect_installations/destroy_service.rb
@@ -18,7 +18,20 @@ module JiraConnectInstallations
return true
end
- @installation.destroy
+ # rubocop:disable Database/AvoidUsingPluckWithoutLimit, CodeReuse/ActiveRecord -- Limit of 100 max per page is defined in kaminari config
+ subscriptions_namespace_ids = @installation.subscriptions.pluck(:namespace_id)
+ # rubocop:enable Database/AvoidUsingPluckWithoutLimit, CodeReuse/ActiveRecord
+
+ return false unless @installation.destroy
+
+ deactivate_jira_cloud_app_integrations(subscriptions_namespace_ids)
+ true
+ end
+
+ def deactivate_jira_cloud_app_integrations(subscriptions_namespace_ids)
+ subscriptions_namespace_ids.each do |namespace_id|
+ JiraConnect::JiraCloudAppDeactivationWorker.perform_async(namespace_id)
+ end
end
end
end
diff --git a/app/services/jira_connect_subscriptions/create_service.rb b/app/services/jira_connect_subscriptions/create_service.rb
index f537da5c091..cd588387acb 100644
--- a/app/services/jira_connect_subscriptions/create_service.rb
+++ b/app/services/jira_connect_subscriptions/create_service.rb
@@ -5,6 +5,7 @@ module JiraConnectSubscriptions
include Gitlab::Utils::StrongMemoize
MERGE_REQUEST_SYNC_BATCH_SIZE = 20
MERGE_REQUEST_SYNC_BATCH_DELAY = 1.minute.freeze
+ BATCH_SIZE = 1_000
def execute
if !params[:jira_user]
@@ -32,6 +33,7 @@ module JiraConnectSubscriptions
subscription = JiraConnectSubscription.new(installation: jira_connect_installation, namespace: namespace)
if subscription.save
+ create_jira_cloud_integration!
schedule_sync_project_jobs
success
@@ -40,6 +42,38 @@ module JiraConnectSubscriptions
end
end
+ # We must make all GitLab for Jira app integrations active (or inactive in the DestroyService)
+ # regardless of whether those integration inherit, or have defined their own custom settings.
+ # Unless the group namespace is linked in Jira,
+ # the project integrations do not work, even if they are non-inheriting.
+ #
+ # Using Integration.descendants_from_self_or_ancestors_from we update
+ # all integrations of all subgroups and sub projects to be active.
+ #
+ # We keep their inherit_from_id in tact, as they might have custom service_ids fields.
+ # We also still queue a PropagateIntegrationWorker in order to create integrations
+ # (the Integration.descendants_from_self_or_ancestors_from only updates existing ones).
+ def create_jira_cloud_integration!
+ integration = Integration.find_or_initialize_non_project_specific_integration(
+ 'jira_cloud_app',
+ group_id: namespace.id
+ )
+
+ return unless integration
+
+ Integrations::JiraCloudApp.transaction do
+ integration.inherit_from_id = nil
+ integration.activate!
+
+ Integration.descendants_from_self_or_ancestors_from(integration).each_batch(of: BATCH_SIZE) do |records|
+ records.update!(active: true)
+ end
+ end
+
+ # This worker must be queued outside of the PostgreSQL transaction.
+ PropagateIntegrationWorker.perform_async(integration.id) if integration.persisted?
+ end
+
def namespace
strong_memoize(:namespace) do
Namespace.find_by_full_path(params[:namespace_path])
@@ -51,8 +85,8 @@ module JiraConnectSubscriptions
JiraConnect::SyncProjectWorker.bulk_perform_in_with_contexts(
index * MERGE_REQUEST_SYNC_BATCH_DELAY,
projects,
- arguments_proc: -> (project) { [project.id, Atlassian::JiraConnect::Client.generate_update_sequence_id] },
- context_proc: -> (project) { { project: project } }
+ arguments_proc: ->(project) { [project.id, Atlassian::JiraConnect::Client.generate_update_sequence_id] },
+ context_proc: ->(project) { { project: project } }
)
end
end
diff --git a/app/services/jira_connect_subscriptions/destroy_service.rb b/app/services/jira_connect_subscriptions/destroy_service.rb
new file mode 100644
index 00000000000..9dcd17e1936
--- /dev/null
+++ b/app/services/jira_connect_subscriptions/destroy_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module JiraConnectSubscriptions
+ class DestroyService
+ attr_accessor :subscription, :jira_user
+
+ def initialize(subscription, jira_user)
+ @subscription = subscription
+ @jira_user = jira_user
+ end
+
+ def execute
+ unless subscription
+ return ServiceResponse.error(message: _('Invalid JiraConnectSubscriptions'),
+ reason: :unprocessable_entity)
+ end
+
+ return ServiceResponse.error(message: _('Forbidden'), reason: :forbidden) unless can_administer_jira?
+
+ namespace_id = subscription.namespace_id
+
+ if subscription.destroy
+ deactivate_jira_cloud_app_integrations(namespace_id)
+
+ return ServiceResponse.success
+ end
+
+ ServiceResponse.error(
+ message: subscription.errors.full_messages.to_sentence,
+ reason: :unprocessable_entity
+ )
+ end
+
+ private
+
+ def can_administer_jira?
+ jira_user&.jira_admin?
+ end
+
+ def deactivate_jira_cloud_app_integrations(namespace_id)
+ JiraConnect::JiraCloudAppDeactivationWorker.perform_async(namespace_id)
+ end
+ end
+end
diff --git a/app/services/labels/create_service.rb b/app/services/labels/create_service.rb
index c69b9bd8de7..b283e8acd73 100644
--- a/app/services/labels/create_service.rb
+++ b/app/services/labels/create_service.rb
@@ -27,5 +27,3 @@ module Labels
end
end
end
-
-Labels::CreateService.prepend_mod_with('Labels::CreateService')
diff --git a/app/services/labels/promote_service.rb b/app/services/labels/promote_service.rb
index 2786a2e357e..5970dfbf51c 100644
--- a/app/services/labels/promote_service.rb
+++ b/app/services/labels/promote_service.rb
@@ -7,7 +7,7 @@ module Labels
# rubocop: disable CodeReuse/ActiveRecord
def execute(label)
return unless project.group &&
- label.is_a?(ProjectLabel)
+ label.is_a?(ProjectLabel)
ProjectLabel.transaction do
# use the existing group label if it exists
diff --git a/app/services/labels/transfer_service.rb b/app/services/labels/transfer_service.rb
index 79e807d8010..993e565645a 100644
--- a/app/services/labels/transfer_service.rb
+++ b/app/services/labels/transfer_service.rb
@@ -17,7 +17,7 @@ module Labels
# rubocop: disable CodeReuse/ActiveRecord
link_ids = group_labels_applied_to_issues.pluck("label_links.id") +
- group_labels_applied_to_merge_requests.pluck("label_links.id")
+ group_labels_applied_to_merge_requests.pluck("label_links.id")
# rubocop: disable CodeReuse/ActiveRecord
Label.transaction do
@@ -40,9 +40,9 @@ module Labels
def labels_to_transfer
Label
.from_union([
- group_labels_applied_to_issues,
- group_labels_applied_to_merge_requests
- ])
+ group_labels_applied_to_issues,
+ group_labels_applied_to_merge_requests
+ ])
.reorder(nil)
.distinct
end
diff --git a/app/services/lfs/file_transformer.rb b/app/services/lfs/file_transformer.rb
index a02fce552cf..ef227b6e20f 100644
--- a/app/services/lfs/file_transformer.rb
+++ b/app/services/lfs/file_transformer.rb
@@ -22,11 +22,12 @@ module Lfs
class FileTransformer
attr_reader :project, :repository, :repository_type, :branch_name
- def initialize(project, repository, branch_name)
+ def initialize(project, repository, branch_name, start_branch_name: nil)
@project = project
@repository = repository
@repository_type = repository.repo_type.name
@branch_name = branch_name
+ @start_branch_name = start_branch_name
end
def new_file(file_path, file_content, encoding: nil, detect_content_type: false)
@@ -43,6 +44,10 @@ module Lfs
end
end
+ def branch_to_base_off
+ @branch_to_base_off ||= (start_branch_name || branch_name)
+ end
+
class Result
attr_reader :content, :encoding
@@ -54,12 +59,14 @@ module Lfs
private
+ attr_reader :start_branch_name
+
def lfs_file?(file_path)
cached_attributes.attributes(file_path)['filter'] == 'lfs'
end
def cached_attributes
- @cached_attributes ||= repository.attributes_at(branch_name)
+ @cached_attributes ||= repository.attributes_at(branch_to_base_off)
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/lfs/finalize_upload_service.rb b/app/services/lfs/finalize_upload_service.rb
new file mode 100644
index 00000000000..d0685403394
--- /dev/null
+++ b/app/services/lfs/finalize_upload_service.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module Lfs
+ class FinalizeUploadService
+ InvalidUploadedFile = Class.new(StandardError)
+
+ def initialize(oid:, size:, uploaded_file:, project:)
+ @oid = oid
+ @size = size
+ @uploaded_file = uploaded_file
+ @project = project
+ end
+
+ def execute
+ validate_uploaded_file!
+
+ if store_file!
+ ServiceResponse.success
+ else
+ ServiceResponse.error(reason: :unprocessable_entity, message: 'Unprocessable entity')
+ end
+ rescue ActiveRecord::RecordInvalid
+ ServiceResponse.error(reason: :invalid_record, message: 'Invalid record')
+ rescue UploadedFile::InvalidPathError
+ ServiceResponse.error(reason: :invalid_path, message: 'Invalid path')
+ rescue ObjectStorage::RemoteStoreError
+ ServiceResponse.error(reason: :remote_store_error, message: 'Remote store error')
+ rescue InvalidUploadedFile
+ ServiceResponse.error(reason: :invalid_uploaded_file, message: 'SHA256 or size mismatch')
+ end
+
+ private
+
+ attr_reader :oid, :size, :uploaded_file, :project
+
+ def store_file!
+ object = LfsObject.for_oid_and_size(oid, size)
+
+ if object
+ replace_file!(object) unless object.file&.exists?
+ else
+ object = create_file!
+ end
+
+ return unless object
+
+ link_to_project!(object)
+ end
+
+ def create_file!
+ return unless uploaded_file.is_a?(UploadedFile)
+
+ LfsObject.create!(oid: oid, size: size, file: uploaded_file)
+ end
+
+ def replace_file!(lfs_object)
+ raise UploadedFile::InvalidPathError unless uploaded_file.is_a?(UploadedFile)
+
+ Gitlab::AppJsonLogger.info(message: "LFS file replaced because it did not exist", oid: oid, size: size)
+ lfs_object.file = uploaded_file
+ lfs_object.save!
+ end
+
+ def link_to_project!(object)
+ LfsObjectsProject.safe_find_or_create_by!( # rubocop:disable Performance/ActiveRecordSubtransactionMethods -- Used in the original controller: https://gitlab.com/gitlab-org/gitlab/-/blob/3841ce47b1d6d4611067ff5b8b86dc9cbf290641/app/controllers/repositories/lfs_storage_controller.rb#L118
+ project: project,
+ lfs_object: object
+ )
+ end
+
+ def validate_uploaded_file!
+ return unless uploaded_file
+
+ return unless size != uploaded_file.size || oid != uploaded_file.sha256
+
+ raise InvalidUploadedFile
+ end
+ end
+end
diff --git a/app/services/lfs/lock_file_service.rb b/app/services/lfs/lock_file_service.rb
index 54f193c86e6..b4505e2e49d 100644
--- a/app/services/lfs/lock_file_service.rb
+++ b/app/services/lfs/lock_file_service.rb
@@ -25,8 +25,9 @@ module Lfs
# rubocop: enable CodeReuse/ActiveRecord
def create_lock!
- lock = project.lfs_file_locks.create!(user: current_user,
- path: params[:path])
+ lock = project.lfs_file_locks.create!(user: current_user, path: params[:path])
+
+ project.refresh_lfs_file_locks_changed_epoch
success(http_status: 201, lock: lock)
end
diff --git a/app/services/lfs/unlock_file_service.rb b/app/services/lfs/unlock_file_service.rb
index 7a3025ee7ea..a96994af7a9 100644
--- a/app/services/lfs/unlock_file_service.rb
+++ b/app/services/lfs/unlock_file_service.rb
@@ -24,11 +24,13 @@ module Lfs
if lock.can_be_unlocked_by?(current_user, forced)
lock.destroy!
+ project.refresh_lfs_file_locks_changed_epoch
+
success(lock: lock, http_status: :ok)
elsif forced
error(_('You must have maintainer access to force delete a lock'), 403)
else
- error(_("%{lock_path} is locked by GitLab User %{lock_user_id}") % { lock_path: lock.path, lock_user_id: lock.user_id }, 403)
+ error(format(_("'%{lock_path}' is locked by @%{lock_user_name}"), lock_path: lock.path, lock_user_name: lock.user.username), 403)
end
end
diff --git a/app/services/loose_foreign_keys/batch_cleaner_service.rb b/app/services/loose_foreign_keys/batch_cleaner_service.rb
index 95eb8b47009..d43ee5cf508 100644
--- a/app/services/loose_foreign_keys/batch_cleaner_service.rb
+++ b/app/services/loose_foreign_keys/batch_cleaner_service.rb
@@ -5,11 +5,20 @@ module LooseForeignKeys
CLEANUP_ATTEMPTS_BEFORE_RESCHEDULE = 3
CONSUME_AFTER_RESCHEDULE = 5.minutes
- def initialize(parent_table:, loose_foreign_key_definitions:, deleted_parent_records:, modification_tracker: LooseForeignKeys::ModificationTracker.new)
+ def initialize(
+ parent_table:,
+ loose_foreign_key_definitions:,
+ deleted_parent_records:,
+ connection:,
+ logger: Sidekiq.logger,
+ modification_tracker: LooseForeignKeys::ModificationTracker.new
+ )
@parent_table = parent_table
@loose_foreign_key_definitions = loose_foreign_key_definitions
@deleted_parent_records = deleted_parent_records
@modification_tracker = modification_tracker
+ @connection = connection
+ @logger = logger
@deleted_records_counter = Gitlab::Metrics.counter(
:loose_foreign_key_processed_deleted_records,
'The number of processed loose foreign key deleted records'
@@ -44,14 +53,16 @@ module LooseForeignKeys
return if modification_tracker.over_limit?
# At this point, all associations are cleaned up, we can update the status of the parent records
- update_count = LooseForeignKeys::DeletedRecord.mark_records_processed(deleted_parent_records)
+ update_count = Gitlab::Database::SharedModel.using_connection(connection) do
+ LooseForeignKeys::DeletedRecord.mark_records_processed(deleted_parent_records)
+ end
deleted_records_counter.increment({ table: parent_table, db_config_name: db_config_name }, update_count)
end
private
- attr_reader :parent_table, :loose_foreign_key_definitions, :deleted_parent_records, :modification_tracker, :deleted_records_counter, :deleted_records_rescheduled_count, :deleted_records_incremented_count
+ attr_reader :parent_table, :loose_foreign_key_definitions, :deleted_parent_records, :modification_tracker, :deleted_records_counter, :deleted_records_rescheduled_count, :deleted_records_incremented_count, :connection, :logger
def handle_over_limit
records_to_reschedule = []
@@ -65,11 +76,13 @@ module LooseForeignKeys
end
end
- reschedule_count = LooseForeignKeys::DeletedRecord.reschedule(records_to_reschedule, CONSUME_AFTER_RESCHEDULE.from_now)
- deleted_records_rescheduled_count.increment({ table: parent_table, db_config_name: db_config_name }, reschedule_count)
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ reschedule_count = LooseForeignKeys::DeletedRecord.reschedule(records_to_reschedule, CONSUME_AFTER_RESCHEDULE.from_now)
+ deleted_records_rescheduled_count.increment({ table: parent_table, db_config_name: db_config_name }, reschedule_count)
- increment_count = LooseForeignKeys::DeletedRecord.increment_attempts(records_to_increment)
- deleted_records_incremented_count.increment({ table: parent_table, db_config_name: db_config_name }, increment_count)
+ increment_count = LooseForeignKeys::DeletedRecord.increment_attempts(records_to_increment)
+ deleted_records_incremented_count.increment({ table: parent_table, db_config_name: db_config_name }, increment_count)
+ end
end
def record_result(cleaner, result)
@@ -77,30 +90,48 @@ module LooseForeignKeys
modification_tracker.add_deletions(result[:table], result[:affected_rows])
elsif cleaner.async_nullify?
modification_tracker.add_updates(result[:table], result[:affected_rows])
+ else
+ logger.error("Invalid on_delete argument for definition: #{result[:table]}")
+ false
end
end
def run_cleaner_service(loose_foreign_key_definition, with_skip_locked:)
base_models_for_gitlab_schema = Gitlab::Database.schemas_to_base_models.fetch(loose_foreign_key_definition.options[:gitlab_schema])
+
base_models_for_gitlab_schema.each do |base_model|
- cleaner = CleanerService.new(
+ table_partitioned = Gitlab::Database::SharedModel.using_connection(base_model.connection) do
+ Gitlab::Database::PostgresPartitionedTable.find_by_name_in_current_schema(loose_foreign_key_definition.from_table).present?
+ end
+
+ klass =
+ if table_partitioned
+ PartitionCleanerService
+ else
+ CleanerService
+ end
+
+ cleaner = klass.new(
loose_foreign_key_definition: loose_foreign_key_definition,
connection: base_model.connection,
deleted_parent_records: deleted_parent_records,
- with_skip_locked: with_skip_locked
+ with_skip_locked: with_skip_locked,
+ logger: logger
)
loop do
result = cleaner.execute
- record_result(cleaner, result)
+ recorded = record_result(cleaner, result)
- break if modification_tracker.over_limit? || result[:affected_rows] == 0
+ break if modification_tracker.over_limit? || result[:affected_rows] == 0 || !recorded
end
end
end
def db_config_name
- LooseForeignKeys::DeletedRecord.connection.pool.db_config.name
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ LooseForeignKeys::DeletedRecord.connection.pool.db_config.name
+ end
end
end
end
diff --git a/app/services/loose_foreign_keys/cleaner_service.rb b/app/services/loose_foreign_keys/cleaner_service.rb
index 44a922aad87..d12f7c2b6ea 100644
--- a/app/services/loose_foreign_keys/cleaner_service.rb
+++ b/app/services/loose_foreign_keys/cleaner_service.rb
@@ -6,11 +6,12 @@ module LooseForeignKeys
DELETE_LIMIT = 1000
UPDATE_LIMIT = 500
- def initialize(loose_foreign_key_definition:, connection:, deleted_parent_records:, with_skip_locked: false)
+ def initialize(loose_foreign_key_definition:, connection:, deleted_parent_records:, logger: Sidekiq.logger, with_skip_locked: false)
@loose_foreign_key_definition = loose_foreign_key_definition
@connection = connection
@deleted_parent_records = deleted_parent_records
@with_skip_locked = with_skip_locked
+ @logger = logger
end
def execute
@@ -29,7 +30,7 @@ module LooseForeignKeys
private
- attr_reader :loose_foreign_key_definition, :connection, :deleted_parent_records, :with_skip_locked
+ attr_reader :loose_foreign_key_definition, :connection, :deleted_parent_records, :with_skip_locked, :logger
def build_query
query = if async_delete?
@@ -37,11 +38,13 @@ module LooseForeignKeys
elsif async_nullify?
update_query
else
- raise "Invalid on_delete argument: #{loose_foreign_key_definition.on_delete}"
+ logger.error("Invalid on_delete argument: #{loose_foreign_key_definition.on_delete}")
+ return ""
end
unless query.include?(%{"#{loose_foreign_key_definition.column}" IN (})
- raise("FATAL: foreign key condition is missing from the generated query: #{query}")
+ logger.error("FATAL: foreign key condition is missing from the generated query: #{query}")
+ return ""
end
query
diff --git a/app/services/loose_foreign_keys/partition_cleaner_service.rb b/app/services/loose_foreign_keys/partition_cleaner_service.rb
new file mode 100644
index 00000000000..7a8a9e6e594
--- /dev/null
+++ b/app/services/loose_foreign_keys/partition_cleaner_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module LooseForeignKeys
+ class PartitionCleanerService < CleanerService
+ def execute
+ result = execute_partitioned_queries
+
+ { affected_rows: result, table: loose_foreign_key_definition.from_table }
+ end
+
+ private
+
+ def arel_table
+ Arel::Table.new(@partition_identifier)
+ end
+
+ def primary_keys
+ connection.primary_keys(@partition_identifier).map { |key| arel_table[key] }
+ end
+
+ def quoted_table_name
+ Arel.sql(connection.quote_table_name(@partition_identifier))
+ end
+
+ def execute_partitioned_queries
+ sum = 0
+
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ target_table = loose_foreign_key_definition.from_table
+
+ Gitlab::Database::PostgresPartitionedTable.each_partition(target_table) do |partition|
+ @partition_identifier = partition.identifier
+
+ result = connection.execute(build_query)
+ sum += result.cmd_tuples
+ end
+ end
+
+ sum
+ end
+ end
+end
diff --git a/app/services/loose_foreign_keys/process_deleted_records_service.rb b/app/services/loose_foreign_keys/process_deleted_records_service.rb
index e0c9c19f5b9..b3e79815ac9 100644
--- a/app/services/loose_foreign_keys/process_deleted_records_service.rb
+++ b/app/services/loose_foreign_keys/process_deleted_records_service.rb
@@ -4,9 +4,10 @@ module LooseForeignKeys
class ProcessDeletedRecordsService
BATCH_SIZE = 1000
- def initialize(connection:, modification_tracker: LooseForeignKeys::ModificationTracker.new)
+ def initialize(connection:, logger: Sidekiq.logger, modification_tracker: LooseForeignKeys::ModificationTracker.new)
@connection = connection
@modification_tracker = modification_tracker
+ @logger = logger
end
def execute
@@ -30,6 +31,8 @@ module LooseForeignKeys
parent_table: table,
loose_foreign_key_definitions: loose_foreign_key_definitions,
deleted_parent_records: records,
+ connection: connection,
+ logger: logger,
modification_tracker: modification_tracker)
.execute
@@ -54,15 +57,17 @@ module LooseForeignKeys
private
- attr_reader :connection, :modification_tracker
+ attr_reader :connection, :logger, :modification_tracker
def db_config_name
::Gitlab::Database.db_config_name(connection)
end
def load_batch_for_table(table)
- fully_qualified_table_name = "#{current_schema}.#{table}"
- LooseForeignKeys::DeletedRecord.load_batch_for_table(fully_qualified_table_name, BATCH_SIZE)
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ fully_qualified_table_name = "#{current_schema}.#{table}"
+ LooseForeignKeys::DeletedRecord.load_batch_for_table(fully_qualified_table_name, BATCH_SIZE)
+ end
end
def current_schema
diff --git a/app/services/markdown_content_rewriter_service.rb b/app/services/markdown_content_rewriter_service.rb
index 4d8f523fa77..6d47b89f260 100644
--- a/app/services/markdown_content_rewriter_service.rb
+++ b/app/services/markdown_content_rewriter_service.rb
@@ -67,6 +67,6 @@ class MarkdownContentRewriterService
end
attr_reader :current_user, :content, :source_parent,
- :target_parent, :rewriters, :content_html,
- :field, :html_field, :object, :result
+ :target_parent, :rewriters, :content_html,
+ :field, :html_field, :object, :result
end
diff --git a/app/services/members/activity_service.rb b/app/services/members/activity_service.rb
new file mode 100644
index 00000000000..effa2b819cf
--- /dev/null
+++ b/app/services/members/activity_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Members
+ class ActivityService
+ include ExclusiveLeaseGuard
+
+ def initialize(user, namespace)
+ @user = user
+ @namespace = namespace&.root_ancestor
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'Invalid params') unless namespace && user
+
+ try_obtain_lease do
+ find_members
+
+ break unless members.any?
+
+ # Rails throws away the `in_hierarchy` scope, so this generates a WHERE IN instead
+ # rubocop:disable CodeReuse/ActiveRecord -- Scope is lost
+ Member.where(id: members.select(:id)).touch_all(:last_activity_on)
+ # rubocop:enable CodeReuse/ActiveRecord
+ end
+
+ ServiceResponse.success(message: 'Member activity tracked')
+ end
+
+ private
+
+ attr_reader :user, :namespace, :members
+
+ def lease_timeout
+ (Time.current.end_of_day - Time.current).to_i
+ end
+
+ def lease_key
+ "members_activity_event:#{namespace.id}:#{user.id}"
+ end
+
+ # Used by ExclusiveLeaseGuard
+ # Overriding value as we only release the lease
+ # before the timeout if there was no members found, in order to prevent multiple
+ # updates in a short span of time but allow an update if the member is added later
+ def lease_release?
+ members.empty?
+ end
+
+ def find_members
+ @members = Member.in_hierarchy(namespace).with_user(user).no_activity_today
+ end
+ end
+end
diff --git a/app/services/members/approve_access_request_service.rb b/app/services/members/approve_access_request_service.rb
index f8c91fbae7d..906162eb93d 100644
--- a/app/services/members/approve_access_request_service.rb
+++ b/app/services/members/approve_access_request_service.rb
@@ -25,7 +25,7 @@ module Members
raise Gitlab::Access::AccessDeniedError unless can_approve_access_requester?(access_requester)
if approving_member_with_owner_access_level?(access_requester) &&
- cannot_assign_owner_responsibilities_to_member_in_project?(access_requester)
+ cannot_assign_owner_responsibilities_to_member_in_project?(access_requester)
raise Gitlab::Access::AccessDeniedError
end
end
diff --git a/app/services/members/base_service.rb b/app/services/members/base_service.rb
index 80fba33b20e..8d5996fda2b 100644
--- a/app/services/members/base_service.rb
+++ b/app/services/members/base_service.rb
@@ -76,6 +76,6 @@ module Members
end
alias_method :cannot_revoke_owner_responsibilities_from_member_in_project?,
- :cannot_assign_owner_responsibilities_to_member_in_project?
+ :cannot_assign_owner_responsibilities_to_member_in_project?
end
end
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index ec9a4f9f4a6..fcf4dd8e0dd 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -5,6 +5,7 @@ module Members
BlankInvitesError = Class.new(StandardError)
TooManyInvitesError = Class.new(StandardError)
MembershipLockedError = Class.new(StandardError)
+ SeatLimitExceededError = Class.new(StandardError)
DEFAULT_INVITE_LIMIT = 100
@@ -37,13 +38,14 @@ module Members
publish_event!
result
- rescue BlankInvitesError, TooManyInvitesError, MembershipLockedError => e
+ rescue BlankInvitesError, TooManyInvitesError, MembershipLockedError, SeatLimitExceededError => e
Gitlab::ErrorTracking.log_exception(e, class: self.class.to_s, user_id: current_user.id)
- error(e.message)
+
+ error(e.message, pass_back: { reason: e.class.name.demodulize.underscore.to_sym })
end
def single_member
- members.last
+ members&.last
end
private
@@ -60,10 +62,18 @@ module Members
def invites_from_params
# String, Nil, Array, Integer
- return params[:user_id] if params[:user_id].is_a?(Array)
- return [] unless params[:user_id]
+ users = param_to_array(params[:user_id] || params[:username])
+ if params.key?(:username)
+ User.by_username(users).pluck_primary_key
+ else
+ users.to_a
+ end
+ end
+
+ def param_to_array(param)
+ return param if param.is_a?(Array)
- params[:user_id].to_s.split(',').uniq
+ param.to_s.split(',').uniq
end
def validate_source_type!
diff --git a/app/services/members/creator_service.rb b/app/services/members/creator_service.rb
index 57159c14b3b..30a83d8f5d0 100644
--- a/app/services/members/creator_service.rb
+++ b/app/services/members/creator_service.rb
@@ -56,20 +56,24 @@ module Members
existing_members: existing_members
}.merge(parsed_args(args))
- members = emails.map do |email|
- new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
- end
-
- members += users.map do |user|
- new(invitee: user, **common_arguments).execute
- end
-
- members
+ build_members(emails, users, common_arguments)
end
end
end
end
+ def build_members(emails, users, common_arguments)
+ members = emails.map do |email|
+ new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
+ end
+
+ members += users.map do |user|
+ new(invitee: user, **common_arguments).execute
+ end
+
+ members
+ end
+
def add_member(source, invitee, access_level, **args)
add_members(source, [invitee], access_level, **args).first
end
@@ -112,11 +116,23 @@ module Members
# de-duplicate just in case as there is no controlling if user records and ids are sent multiple times
users.uniq!
- users_by_emails = source.users_by_emails(emails) # preloads our request store for all emails
+ # We need to downcase any input of emails here for our caching so that emails sent in with uppercase
+ # are also found since all emails are stored in users, emails tables downcase. user.private_commit_emails are
+ # not though, so we'll never cache those I guess at this layer for now.
+ # Since there is possibility of duplicate values once we downcase, we'll de-duplicate.
+ # Uniq call here has no current testable impact as it will get the same parsed_emails
+ # result without it, but it merely helps it do a bit less work.
+ case_insensitive_emails = emails.map(&:downcase).uniq
+ users_by_emails = source.users_by_emails(case_insensitive_emails) # preloads our request store for all emails
# in case emails belong to a user that is being invited by user or user_id, remove them from
# emails and let users/user_ids handle it.
+ # parsed_emails have to preserve casing due to the invite process also being used to update
+ # existing members and we have to let them be found if not lowercased.
parsed_emails = emails.select do |email|
- user = users_by_emails[email]
+ # Since we are caching by lowercased emails as a key for the users as they only
+ # ever have lowercased emails(except for private_commit_emails), we need to then
+ # operate against that cache for lookups like here with a matching lowercase.
+ user = users_by_emails[email.downcase]
!user || (users.exclude?(user) && user_ids.exclude?(user.id))
end
diff --git a/app/services/members/destroy_service.rb b/app/services/members/destroy_service.rb
index 14cdb83b4e5..b1e77048603 100644
--- a/app/services/members/destroy_service.rb
+++ b/app/services/members/destroy_service.rb
@@ -206,9 +206,15 @@ module Members
def enqueue_unassign_issuables(member)
source_type = member.is_a?(GroupMember) ? 'Group' : 'Project'
+ current_user_id = current_user.id
member.run_after_commit_or_now do
- MembersDestroyer::UnassignIssuablesWorker.perform_async(member.user_id, member.source_id, source_type)
+ MembersDestroyer::UnassignIssuablesWorker.perform_async(
+ member.user_id,
+ member.source_id,
+ source_type,
+ current_user_id
+ )
end
end
end
diff --git a/app/services/members/import_project_team_service.rb b/app/services/members/import_project_team_service.rb
index ef43d8206a9..21320fc3b35 100644
--- a/app/services/members/import_project_team_service.rb
+++ b/app/services/members/import_project_team_service.rb
@@ -3,6 +3,7 @@
module Members
class ImportProjectTeamService < BaseService
ImportProjectTeamForbiddenError = Class.new(StandardError)
+ SeatLimitExceededError = Class.new(StandardError)
def initialize(*args)
super
@@ -13,13 +14,14 @@ module Members
def execute
check_target_and_source_projects_exist!
check_user_permissions!
+ check_seats!
import_project_team
process_import_result
result
- rescue ArgumentError, ImportProjectTeamForbiddenError => e
- ServiceResponse.error(message: e.message, reason: :unprocessable_entity)
+ rescue ArgumentError, ImportProjectTeamForbiddenError, SeatLimitExceededError => e
+ ServiceResponse.error(message: e.message, reason: e.class.name.demodulize.underscore.to_sym)
end
private
@@ -32,7 +34,7 @@ module Members
if members.is_a?(Array)
members.each { |member| check_member_validity(member) }
else
- @result = ServiceResponse.error(message: 'Import failed', reason: :unprocessable_entity)
+ @result = ServiceResponse.error(message: 'Import failed', reason: :import_failed_error)
end
end
@@ -44,6 +46,10 @@ module Members
end
end
+ def check_seats!
+ # Overridden in EE
+ end
+
def check_user_permissions!
return if can?(current_user, :read_project_member, source_project) &&
can?(current_user, :import_project_members_from_another_project, target_project)
@@ -82,3 +88,5 @@ module Members
end
end
end
+
+Members::ImportProjectTeamService.prepend_mod_with('Members::ImportProjectTeamService')
diff --git a/app/services/members/invite_member_builder.rb b/app/services/members/invite_member_builder.rb
index e925121bb1e..5164ab443ab 100644
--- a/app/services/members/invite_member_builder.rb
+++ b/app/services/members/invite_member_builder.rb
@@ -6,14 +6,20 @@ module Members
if user_by_email
find_or_initialize_member_by_user(user_by_email.id)
else
- source.members_and_requesters.find_or_initialize_by(invite_email: invitee) # rubocop:disable CodeReuse/ActiveRecord
+ source.members_and_requesters.find_or_initialize_by(invite_email: invitee).tap do |record| # rubocop:disable CodeReuse/ActiveRecord -- TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/462165
+ # We do not want to cause misses for existing records as the invite process is also sometimes used
+ # as a way to update existing invites.
+ record.invite_email = invitee.downcase if record.new_record?
+ end
end
end
private
def user_by_email
- source.users_by_emails([invitee])[invitee]
+ # Since we cache the user lookups for the emails in lowercase format, we
+ # now need to look them up the same way to ensure we don't get cache misses.
+ source.users_by_emails([invitee.downcase])[invitee.downcase]
end
end
end
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
index 6d23a9bc2dc..4426bb8ae6f 100644
--- a/app/services/members/invite_service.rb
+++ b/app/services/members/invite_service.rb
@@ -56,7 +56,14 @@ module Members
end
def invited_object(member)
- return member.invite_email if member.invite_email
+ if member.invite_email
+ # We reverse here as the case with duplicate emails on the same request the last one is likely the issue as
+ # the first one will be committed to db first and so it will be the last instance of that email that has
+ # the error.
+ # For updates, they can still have an upper case email, so we need compare case insensitively on the both sides
+ # of this find.
+ return invites.reverse.find { |email| email.casecmp?(member.invite_email) }
+ end
# There is a case where someone was invited by email, but the `user` record exists.
# The member record returned will not have an invite_email attribute defined since
@@ -71,8 +78,14 @@ module Members
if member.user_id.to_s.in?(invites)
member.user.username
else
- member.user.all_emails.detect { |email| email.in?(invites) }
+ # We find the correct match here case insensitively user.all_emails since it can
+ # have an uppercase email for private_commit_email.
+ # We need to downcase our invites against the rest since the user could input
+ # uppercase invite and we need to find the case insensitive match on that.
+ invites.find { |email| email.downcase.in?(member.user.all_emails.map(&:downcase)) }
end
end
end
end
+
+Members::InviteService.prepend_mod
diff --git a/app/services/members/unassign_issuables_service.rb b/app/services/members/unassign_issuables_service.rb
index 95e07deb761..1cbbe6859b3 100644
--- a/app/services/members/unassign_issuables_service.rb
+++ b/app/services/members/unassign_issuables_service.rb
@@ -2,22 +2,78 @@
module Members
class UnassignIssuablesService
- attr_reader :user, :entity
+ attr_reader :user, :entity, :requesting_user
- def initialize(user, entity)
+ # @param [User] user user whose membership is being deleted from entity
+ # @param [Group, Project] entity
+ # @param [User] requesting_user user who initiated the membership deletion of `user`
+ def initialize(user, entity, requesting_user)
@user = user
+ @requesting_user = requesting_user
@entity = entity
end
def execute
+ raise ArgumentError, 'requesting_user must be given' unless requesting_user
+
return unless entity && user
project_ids = entity.is_a?(Group) ? entity.all_projects.select(:id) : [entity.id]
- user.issue_assignees.on_issues(Issue.in_projects(project_ids).select(:id)).delete_all
- user.merge_request_assignees.in_projects(project_ids).delete_all
+ unassign_from_issues(project_ids)
+ unassign_from_merge_requests(project_ids)
user.invalidate_cache_counts
end
+
+ private
+
+ def unassign_from_issues(project_ids)
+ IssueAssignee
+ .for_assignee(user)
+ .in_projects(project_ids)
+ .each_batch(column: :issue_id) do |assignees|
+ assignees.each do |assignee|
+ issue = assignee.issue
+ next unless issue
+
+ Issues::UpdateService.new(
+ container: issue.project,
+ current_user: requesting_user,
+ params: { assignee_ids: new_assignee_ids(issue) }
+ ).execute(issue)
+
+ rescue ActiveRecord::StaleObjectError
+ # It's possible for `issue` to be stale (removed) by the time Issues::UpdateService attempts to update it.
+ # Continue to the next item.
+ end
+ end
+ end
+
+ def unassign_from_merge_requests(project_ids)
+ MergeRequestAssignee
+ .for_assignee(user)
+ .in_projects(project_ids)
+ .each_batch(column: :merge_request_id) do |assignees|
+ assignees.each do |assignee|
+ merge_request = assignee.merge_request
+ next unless merge_request
+
+ ::MergeRequests::UpdateAssigneesService.new(
+ project: merge_request.project,
+ current_user: requesting_user,
+ params: { assignee_ids: new_assignee_ids(merge_request) }
+ ).execute(merge_request)
+
+ rescue ActiveRecord::StaleObjectError
+ # It's possible for `merge_request` to be stale (removed) by the time
+ # MergeRequests::UpdateAssigneesService attempts to update it. Continue to the next item.
+ end
+ end
+ end
+
+ def new_assignee_ids(issuable)
+ issuable.assignees.map(&:id) - [user.id]
+ end
end
end
diff --git a/app/services/members/update_service.rb b/app/services/members/update_service.rb
index 3a3d0e53aae..e126f062be1 100644
--- a/app/services/members/update_service.rb
+++ b/app/services/members/update_service.rb
@@ -8,7 +8,7 @@ module Members
members = Array.wrap(members)
old_access_level_expiry_map = members.to_h do |member|
- [member.id, { human_access: member.human_access, expires_at: member.expires_at }]
+ [member.id, { human_access: member.human_access_labeled, expires_at: member.expires_at }]
end
updated_members = update_members(members, permission)
@@ -17,6 +17,9 @@ module Members
end
prepare_response(members)
+
+ rescue ActiveRecord::RecordInvalid
+ prepare_response(members)
end
private
@@ -26,8 +29,6 @@ module Members
Member.transaction do
members.filter_map { |member| update_member(member, permission) }
end
- rescue ActiveRecord::RecordInvalid
- []
end
def update_member(member, permission)
@@ -37,6 +38,7 @@ module Members
return unless member.changed?
member.expiry_notified_at = nil if member.expires_at_changed?
+
member.tap(&:save!)
end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index 5c1ec5add73..fdbcc03f288 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -7,13 +7,14 @@ module MergeRequests
def execute(merge_request)
merge_request.ensure_merge_request_diff
- logger.info(**log_payload(merge_request, 'Executing hooks'))
- execute_hooks(merge_request)
- logger.info(**log_payload(merge_request, 'Executed hooks'))
prepare_for_mergeability(merge_request)
prepare_merge_request(merge_request)
mark_merge_request_as_prepared(merge_request)
+
+ logger.info(**log_payload(merge_request, 'Executing hooks'))
+ execute_hooks(merge_request)
+ logger.info(**log_payload(merge_request, 'Executed hooks'))
end
private
@@ -42,7 +43,11 @@ module MergeRequests
todo_service.new_merge_request(merge_request, current_user)
merge_request.cache_merge_request_closes_issues!(current_user)
- Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
+ Gitlab::InternalEvents.track_event(
+ 'create_merge_request',
+ user: current_user,
+ project: merge_request.target_project
+ )
link_lfs_objects(merge_request)
end
diff --git a/app/services/merge_requests/approval_service.rb b/app/services/merge_requests/approval_service.rb
index 8458eaeaf57..2770bb95c70 100644
--- a/app/services/merge_requests/approval_service.rb
+++ b/app/services/merge_requests/approval_service.rb
@@ -13,12 +13,13 @@ module MergeRequests
return success unless save_approval(approval)
+ update_reviewer_state(merge_request, current_user, 'approved')
+
reset_approvals_cache(merge_request)
merge_request_activity_counter.track_approve_mr_action(user: current_user, merge_request: merge_request)
trigger_merge_request_merge_status_updated(merge_request)
- trigger_merge_request_reviewers_updated(merge_request)
trigger_merge_request_approval_state_updated(merge_request)
# Approval side effects (things not required to be done immediately but
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 1477307c67b..26fee25b890 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -17,8 +17,8 @@ module MergeRequests
end
def hook_data(merge_request, action, old_rev: nil, old_associations: {})
- hook_data = merge_request.to_hook_data(current_user, old_associations: old_associations)
- hook_data[:object_attributes][:action] = action
+ hook_data = merge_request.to_hook_data(current_user, old_associations: old_associations, action: action)
+
if old_rev && !Gitlab::Git.blank_ref?(old_rev)
hook_data[:object_attributes][:oldrev] = old_rev
end
@@ -88,6 +88,7 @@ module MergeRequests
trigger_merge_request_reviewers_updated(merge_request)
capture_suggested_reviewers_accepted(merge_request)
+ set_first_reviewer_assigned_at_metrics(merge_request) if new_reviewers.any?
end
def cleanup_environments(merge_request)
@@ -120,7 +121,7 @@ module MergeRequests
end
def deactivate_pages_deployments(merge_request)
- Pages::DeactivateMrDeploymentsWorker.perform_async(merge_request)
+ Pages::DeactivateMrDeploymentsWorker.perform_async(merge_request.id)
end
private
@@ -278,9 +279,34 @@ module MergeRequests
# Implemented in EE
end
- def remove_approval(merge_request)
- MergeRequests::RemoveApprovalService.new(project: project, current_user: current_user)
- .execute(merge_request)
+ def set_first_reviewer_assigned_at_metrics(merge_request)
+ metrics = merge_request.metrics
+ return unless metrics
+
+ current_time = Time.current
+
+ return if metrics.reviewer_first_assigned_at && metrics.reviewer_first_assigned_at <= current_time
+
+ metrics.update(reviewer_first_assigned_at: current_time)
+ end
+
+ def remove_approval(merge_request, user)
+ MergeRequests::RemoveApprovalService.new(project: project, current_user: user)
+ .execute(merge_request, skip_system_note: true, skip_notification: true, skip_updating_state: true)
+ end
+
+ def update_reviewer_state(merge_request, user, state)
+ ::MergeRequests::UpdateReviewerStateService
+ .new(project: merge_request.project, current_user: user)
+ .execute(merge_request, state)
+ end
+
+ def abort_auto_merge_with_todo(merge_request, reason)
+ response = abort_auto_merge(merge_request, reason)
+ response = ServiceResponse.new(**response)
+ return unless response.success?
+
+ todo_service.merge_request_became_unmergeable(merge_request)
end
end
end
diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb
index bb347096274..b8f9ae7a362 100644
--- a/app/services/merge_requests/build_service.rb
+++ b/app/services/merge_requests/build_service.rb
@@ -9,7 +9,7 @@ module MergeRequests
self.merge_request = MergeRequest.new
# TODO: this should handle all quick actions that don't have side effects
# https://gitlab.com/gitlab-org/gitlab-foss/issues/53658
- merge_quick_actions_into_params!(merge_request, only: [:target_branch])
+ merge_quick_actions_into_params!(merge_request, params: params, only: [:target_branch])
# Assign the projects first so we can use policies for `filter_params`
merge_request.author = current_user
@@ -202,11 +202,11 @@ module MergeRequests
end
def source_branch_exists?
- source_branch.blank? || source_project.commit(source_branch)
+ source_branch.blank? || source_project.branch_exists?(source_branch)
end
def target_branch_exists?
- target_branch.blank? || target_project.commit(target_branch)
+ target_branch.blank? || target_project.branch_exists?(target_branch)
end
def set_draft_title_if_needed
diff --git a/app/services/merge_requests/cleanup_refs_service.rb b/app/services/merge_requests/cleanup_refs_service.rb
index 5081655601b..95c9164fa8c 100644
--- a/app/services/merge_requests/cleanup_refs_service.rb
+++ b/app/services/merge_requests/cleanup_refs_service.rb
@@ -66,7 +66,7 @@ module MergeRequests
end
def keep_around
- repository.keep_around(ref_head_sha, merge_ref_sha)
+ repository.keep_around(ref_head_sha, merge_ref_sha, source: self.class.name)
end
def cache_merge_ref_sha
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index 4f20ade2a42..72114d05e11 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -21,7 +21,7 @@ module MergeRequests
##
# UpdateMergeRequestsWorker could be retried by an exception.
# pipelines for merge request should not be recreated in such case.
- return false if !allow_duplicate && merge_request.find_actual_head_pipeline&.merge_request?
+ return false if !allow_duplicate && merge_request.find_diff_head_pipeline&.merge_request?
return false if merge_request.has_no_commits?
true
diff --git a/app/services/merge_requests/create_ref_service.rb b/app/services/merge_requests/create_ref_service.rb
index 1e5e127072e..de6e2f81595 100644
--- a/app/services/merge_requests/create_ref_service.rb
+++ b/app/services/merge_requests/create_ref_service.rb
@@ -20,8 +20,11 @@ module MergeRequests
end
def execute
- # TODO: Update this message with the removal of FF merge_trains_create_ref_service and update tests
- # This is for compatibility with MergeToRefService during the rollout.
+ # The "3:" prefix is for compatibility with the output of
+ # MergeToRefService, which is still used to create merge refs and some
+ # merge train refs. The prefix can be dropped once MergeToRefService is no
+ # longer used. See https://gitlab.com/gitlab-org/gitlab/-/issues/455421
+ # and https://gitlab.com/gitlab-org/gitlab/-/issues/421025
return ServiceResponse.error(message: '3:Invalid merge source') unless first_parent_sha.present?
result = {
diff --git a/app/services/merge_requests/export_csv_service.rb b/app/services/merge_requests/export_csv_service.rb
index 96b4cdd0fe5..22276b3287d 100644
--- a/app/services/merge_requests/export_csv_service.rb
+++ b/app/services/merge_requests/export_csv_service.rb
@@ -16,23 +16,23 @@ module MergeRequests
'Title' => 'title',
'Description' => 'description',
'MR IID' => 'iid',
- 'URL' => -> (merge_request) { merge_request_url(merge_request) },
+ 'URL' => ->(merge_request) { merge_request_url(merge_request) },
'State' => 'state',
'Source Branch' => 'source_branch',
'Target Branch' => 'target_branch',
'Source Project ID' => 'source_project_id',
'Target Project ID' => 'target_project_id',
- 'Author' => -> (merge_request) { merge_request.author.name },
- 'Author Username' => -> (merge_request) { merge_request.author.username },
- 'Assignees' => -> (merge_request) { merge_request.assignees.map(&:name).join(', ') },
- 'Assignee Usernames' => -> (merge_request) { merge_request.assignees.map(&:username).join(', ') },
- 'Approvers' => -> (merge_request) { merge_request.approved_by_users.map(&:name).join(', ') },
- 'Approver Usernames' => -> (merge_request) { merge_request.approved_by_users.map(&:username).join(', ') },
- 'Merged User' => -> (merge_request) { merge_request.metrics&.merged_by&.name.to_s },
- 'Merged Username' => -> (merge_request) { merge_request.metrics&.merged_by&.username.to_s },
- 'Milestone ID' => -> (merge_request) { merge_request&.milestone&.id || '' },
- 'Created At (UTC)' => -> (merge_request) { merge_request.created_at.utc },
- 'Updated At (UTC)' => -> (merge_request) { merge_request.updated_at.utc }
+ 'Author' => ->(merge_request) { merge_request.author.name },
+ 'Author Username' => ->(merge_request) { merge_request.author.username },
+ 'Assignees' => ->(merge_request) { merge_request.assignees.map(&:name).join(', ') },
+ 'Assignee Usernames' => ->(merge_request) { merge_request.assignees.map(&:username).join(', ') },
+ 'Approvers' => ->(merge_request) { merge_request.approved_by_users.map(&:name).join(', ') },
+ 'Approver Usernames' => ->(merge_request) { merge_request.approved_by_users.map(&:username).join(', ') },
+ 'Merged User' => ->(merge_request) { merge_request.metrics&.merged_by&.name.to_s },
+ 'Merged Username' => ->(merge_request) { merge_request.metrics&.merged_by&.username.to_s },
+ 'Milestone ID' => ->(merge_request) { merge_request&.milestone&.id || '' },
+ 'Created At (UTC)' => ->(merge_request) { merge_request.created_at.utc },
+ 'Updated At (UTC)' => ->(merge_request) { merge_request.updated_at.utc }
}
end
end
diff --git a/app/services/merge_requests/handle_assignees_change_service.rb b/app/services/merge_requests/handle_assignees_change_service.rb
index 835d56a7070..ba1e56a3d71 100644
--- a/app/services/merge_requests/handle_assignees_change_service.rb
+++ b/app/services/merge_requests/handle_assignees_change_service.rb
@@ -22,6 +22,10 @@ module MergeRequests
merge_request_activity_counter.track_users_assigned_to_mr(users: new_assignees)
merge_request_activity_counter.track_assignees_changed_action(user: current_user)
+ if current_user.merge_request_dashboard_enabled?
+ invalidate_cache_counts(merge_request, users: merge_request.assignees)
+ end
+
execute_assignees_hooks(merge_request, old_assignees) if options['execute_hooks']
end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index 89e5920a4fb..2fa9e77e5ca 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -11,13 +11,12 @@ module MergeRequests
include Gitlab::Utils::StrongMemoize
GENERIC_ERROR_MESSAGE = 'An error occurred while merging'
- LEASE_TIMEOUT = 15.minutes.to_i
delegate :merge_jid, :state, to: :@merge_request
def execute(merge_request, options = {})
return if merge_request.merged?
- return unless exclusive_lease(merge_request.id).try_obtain
+ return unless exclusive_lease(merge_request).try_obtain
merge_strategy_class = options[:merge_strategy] || MergeRequests::MergeStrategies::FromSourceBranch
@merge_strategy = merge_strategy_class.new(merge_request, current_user, merge_params: params, options: options)
@@ -31,7 +30,6 @@ module MergeRequests
merge_request.in_locked_state do
if commit
after_merge
- clean_merge_jid
success
end
end
@@ -40,7 +38,7 @@ module MergeRequests
rescue MergeError, MergeRequests::MergeStrategies::StrategyError => e
handle_merge_error(log_message: e.message, save_message_on_model: true)
ensure
- exclusive_lease(merge_request.id).cancel
+ exclusive_lease(merge_request).cancel
end
private
@@ -102,9 +100,14 @@ module MergeRequests
end
def after_merge
- log_info("Post merge started on JID #{merge_jid} with state #{state}")
- MergeRequests::PostMergeService.new(project: project, current_user: current_user).execute(merge_request)
- log_info("Post merge finished on JID #{merge_jid} with state #{state}")
+ # Need to store `merge_jid` in a variable since `MergeRequests::PostMergeService`
+ # will call `MergeRequest#mark_as_merged` and will unset `merge_jid`.
+ jid = merge_jid
+
+ log_info("Post merge started on JID #{jid} with state #{state}")
+ MergeRequests::PostMergeService.new(project: project, current_user: current_user, params: { delete_source_branch:
+ delete_source_branch? }).execute(merge_request)
+ log_info("Post merge finished on JID #{jid} with state #{state}")
if delete_source_branch?
MergeRequests::DeleteSourceBranchWorker.perform_async(@merge_request.id, @merge_request.source_branch_sha, branch_deletion_user.id)
@@ -113,10 +116,6 @@ module MergeRequests
merge_request_merge_param
end
- def clean_merge_jid
- merge_request.update_column(:merge_jid, nil)
- end
-
def branch_deletion_user
@merge_request.force_remove_source_branch? ? @merge_request.author : current_user
end
@@ -172,11 +171,9 @@ module MergeRequests
params.with_indifferent_access[:sha] == merge_request.diff_head_sha
end
- def exclusive_lease(merge_request_id)
- strong_memoize(:"exclusive_lease_#{merge_request_id}") do
- lease_key = ['merge_requests_merge_service', merge_request_id].join(':')
-
- Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT)
+ def exclusive_lease(merge_request)
+ strong_memoize(:"exclusive_lease_#{merge_request.id}") do
+ merge_request.merge_exclusive_lease
end
end
end
diff --git a/app/services/merge_requests/merge_strategies/from_source_branch.rb b/app/services/merge_requests/merge_strategies/from_source_branch.rb
index fe0e4d8a90c..731a66ac01d 100644
--- a/app/services/merge_requests/merge_strategies/from_source_branch.rb
+++ b/app/services/merge_requests/merge_strategies/from_source_branch.rb
@@ -18,21 +18,17 @@ module MergeRequests
end
def validate!
- error_message =
- if source_sha.blank?
- 'No source for merge'
- elsif merge_request.should_be_rebased?
- 'Only fast-forward merge is allowed for your project. Please update your source branch'
- elsif !merge_request.mergeable?(
- skip_discussions_check: @options[:skip_discussions_check],
- check_mergeability_retry_lease: @options[:check_mergeability_retry_lease]
- )
- 'Merge request is not mergeable'
- elsif merge_request.missing_required_squash?
- 'This project requires squashing commits when merge requests are accepted.'
- end
+ raise_error('No source for merge') if source_sha.blank?
+
+ if merge_request.should_be_rebased?
+ raise_error('Only fast-forward merge is allowed for your project. Please update your source branch')
+ end
+
+ raise_error('Merge request is not mergeable') unless mergeable?
- raise_error(error_message) if error_message
+ return unless merge_request.missing_required_squash?
+
+ raise_error('This project requires squashing commits when merge requests are accepted.')
end
def execute_git_merge!
@@ -104,6 +100,13 @@ module MergeRequests
merge_request.default_merge_commit_message(user: current_user)
end
+ def mergeable?
+ merge_request.mergeable?(
+ skip_discussions_check: options[:skip_discussions_check],
+ check_mergeability_retry_lease: options[:check_mergeability_retry_lease]
+ )
+ end
+
def raise_error(message)
raise ::MergeRequests::MergeStrategies::StrategyError, message
end
diff --git a/app/services/merge_requests/mergeability/check_base_service.rb b/app/services/merge_requests/mergeability/check_base_service.rb
index 8f8ba812246..495e305806d 100644
--- a/app/services/merge_requests/mergeability/check_base_service.rb
+++ b/app/services/merge_requests/mergeability/check_base_service.rb
@@ -49,6 +49,11 @@ module MergeRequests
.inactive(payload: default_payload(args))
end
+ def warning(**args)
+ Gitlab::MergeRequests::Mergeability::CheckResult
+ .warning(payload: default_payload(args))
+ end
+
def default_payload(args)
args.merge(identifier: self.class.identifier)
end
diff --git a/app/services/merge_requests/mergeability/check_ci_status_service.rb b/app/services/merge_requests/mergeability/check_ci_status_service.rb
index 5c1aaaaf885..15807d4945d 100644
--- a/app/services/merge_requests/mergeability/check_ci_status_service.rb
+++ b/app/services/merge_requests/mergeability/check_ci_status_service.rb
@@ -7,7 +7,7 @@ module MergeRequests
description 'Checks whether CI has passed'
def execute
- return inactive unless merge_request.only_allow_merge_if_pipeline_succeeds?
+ return inactive unless merge_request.auto_merge_enabled? || merge_request.only_allow_merge_if_pipeline_succeeds?
if merge_request.mergeable_ci_state?
success
diff --git a/app/services/merge_requests/mergeability/check_broken_status_service.rb b/app/services/merge_requests/mergeability/check_commits_status_service.rb
index d432375c423..5b480423778 100644
--- a/app/services/merge_requests/mergeability/check_broken_status_service.rb
+++ b/app/services/merge_requests/mergeability/check_commits_status_service.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
+
module MergeRequests
module Mergeability
- class CheckBrokenStatusService < CheckBaseService
- identifier :broken_status
- description 'Checks whether the merge request is broken'
+ class CheckCommitsStatusService < CheckBaseService
+ identifier :commits_status
+ description 'Checks source branch exists and contains commits.'
def execute
- if merge_request.broken?
+ if merge_request.has_no_commits? || merge_request.branch_missing?
failure
else
success
diff --git a/app/services/merge_requests/mergeability/check_conflict_status_service.rb b/app/services/merge_requests/mergeability/check_conflict_status_service.rb
index b60bb0cecb5..d8ee79c3f4c 100644
--- a/app/services/merge_requests/mergeability/check_conflict_status_service.rb
+++ b/app/services/merge_requests/mergeability/check_conflict_status_service.rb
@@ -15,7 +15,7 @@ module MergeRequests
end
def skip?
- false
+ params[:skip_conflict_check].present?
end
def cacheable?
diff --git a/app/services/merge_requests/mergeability/check_lfs_file_locks_service.rb b/app/services/merge_requests/mergeability/check_lfs_file_locks_service.rb
new file mode 100644
index 00000000000..23c4defe4c9
--- /dev/null
+++ b/app/services/merge_requests/mergeability/check_lfs_file_locks_service.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ module Mergeability
+ class CheckLfsFileLocksService < CheckBaseService
+ include ::Gitlab::Utils::StrongMemoize
+
+ identifier :locked_lfs_files
+ description <<~DESC.chomp
+ Checks whether the merge request contains locked LFS files that are locked by users other than the merge request author
+ DESC
+
+ CACHE_KEY = 'merge_request:%{id}:%{sha}:lfs_file_locks_mergeability:%{epoch}'
+
+ def execute
+ return inactive if check_inactive?
+ return failure if contains_locked_lfs_files?
+
+ success
+ end
+
+ def skip?
+ params[:skip_locked_lfs_files_check].present?
+ end
+
+ def cacheable?
+ true
+ end
+
+ def cache_key
+ # If the feature is disabled we will return inactive so we don't need to
+ # link the cache key to a specific MR.
+ return 'inactive_lfs_file_locks_mergeability_check' if check_inactive?
+
+ # Cache is linked to a specific MR
+ id = merge_request.id
+ # Cache is invalidated when new changes are added
+ sha = merge_request.diff_head_sha
+ # Cache is invalidated when lfs_file_locks are added or removed
+ epoch = project.lfs_file_locks_changed_epoch
+
+ format(CACHE_KEY, id: id, sha: sha, epoch: epoch)
+ end
+
+ private
+
+ delegate :project, :author_id, :changed_paths, to: :merge_request
+
+ def contains_locked_lfs_files?
+ project.lfs_file_locks.for_paths(changed_paths.map(&:path)).not_for_users(author_id).exists?
+ end
+
+ def check_inactive?
+ !project.lfs_enabled?
+ end
+ strong_memoize_attr :check_inactive?
+ end
+ end
+end
diff --git a/app/services/merge_requests/mergeability/detailed_merge_status_service.rb b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
index 2e28ffc4363..c9f70d52a94 100644
--- a/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
+++ b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
@@ -15,7 +15,6 @@ module MergeRequests
return :unchecked if unchecked?
if check_results.success?
-
# If everything else is mergeable, but CI is not, the frontend expects two potential states to be returned
# See discussion: gitlab.com/gitlab-org/gitlab/-/merge_requests/96778#note_1093063523
if check_ci_results.failed?
@@ -24,6 +23,12 @@ module MergeRequests
:mergeable
end
else
+ # This check can only fail in EE
+ if check_results.payload[:failed_check] == :not_approved &&
+ merge_request.temporarily_unapproved?
+ return :approvals_syncing
+ end
+
check_results.payload[:failed_check]
end
end
@@ -33,7 +38,7 @@ module MergeRequests
attr_reader :merge_request, :checks, :ci_check
def preparing?
- merge_request.preparing? && !merge_request.merge_request_diff.persisted?
+ merge_request.preparing?
end
def checking?
@@ -48,7 +53,7 @@ module MergeRequests
strong_memoize(:check_results) do
merge_request
.execute_merge_checks(
- MergeRequest.mergeable_state_checks,
+ MergeRequest.all_mergeability_checks,
params: { skip_ci_check: true }
)
end
@@ -61,7 +66,7 @@ module MergeRequests
end
def ci_check_failed_check
- if merge_request.actual_head_pipeline&.active?
+ if merge_request.diff_head_pipeline_considered_in_progress?
:ci_still_running
else
check_ci_results.payload.fetch(:identifier)
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index 2a3f417a33b..99adc5e0725 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -114,11 +114,11 @@ module MergeRequests
merge_to_ref_success = merge_to_ref
- reload_merge_head_diff
update_diff_discussion_positions! if merge_to_ref_success
if merge_to_ref_success && can_git_merge?
merge_request.mark_as_mergeable
+ reload_merge_head_diff
else
merge_request.mark_as_unmergeable
end
diff --git a/app/services/merge_requests/post_merge_service.rb b/app/services/merge_requests/post_merge_service.rb
index d2bfadc2205..f63bd704e80 100644
--- a/app/services/merge_requests/post_merge_service.rb
+++ b/app/services/merge_requests/post_merge_service.rb
@@ -11,7 +11,7 @@ module MergeRequests
MAX_RETARGET_MERGE_REQUESTS = 4
- def execute(merge_request)
+ def execute(merge_request, source = nil)
return if merge_request.merged?
# Mark the merge request as merged, everything that happens afterwards is
@@ -23,7 +23,7 @@ module MergeRequests
merge_request_activity_counter.track_merge_mr_action(user: current_user)
- create_note(merge_request)
+ create_note(merge_request, source)
close_issues(merge_request)
notification_service.merge_mr(merge_request, current_user)
invalidate_cache_counts(merge_request, users: merge_request.assignees | merge_request.reviewers)
@@ -33,32 +33,67 @@ module MergeRequests
cleanup_environments(merge_request)
cleanup_refs(merge_request)
deactivate_pages_deployments(merge_request)
+ cancel_auto_merges_targeting_source_branch(merge_request)
execute_hooks(merge_request, 'merge')
end
+ def create_note(merge_request, source)
+ SystemNoteService.change_status(
+ merge_request,
+ merge_request.target_project,
+ current_user,
+ merge_request.state,
+ source
+ )
+ end
+
private
def close_issues(merge_request)
return unless merge_request.target_branch == project.default_branch
- closed_issues = merge_request.visible_closing_issues_for(current_user)
-
- closed_issues.each do |issue|
- # We are intentionally only closing Issues asynchronously (excluding ExternalIssues)
- # as the worker only supports finding an Issue. We are also only experiencing
- # SQL timeouts when closing an Issue.
- if issue.is_a?(Issue)
- MergeRequests::CloseIssueWorker.perform_async(
- project.id,
- current_user.id,
- issue.id,
- merge_request.id
- )
- else
- Issues::CloseService.new(container: project, current_user: current_user).execute(issue, commit: merge_request)
+ if merge_request.target_project.has_external_issue_tracker?
+ merge_request.closes_issues(current_user).each do |issue|
+ close_issue(issue, merge_request)
end
+ else
+ merge_request.merge_requests_closing_issues.preload_issue.find_each(batch_size: 100) do |closing_issue| # rubocop:disable CodeReuse/ActiveRecord -- Would require exact redefinition of the method
+ close_issue(closing_issue.issue, merge_request, !closing_issue.from_mr_description)
+ end
+ end
+ end
+
+ def close_issue(issue, merge_request, skip_authorization = false)
+ # We are intentionally only closing Issues asynchronously (excluding ExternalIssues)
+ # as the worker only supports finding an Issue. We are also only experiencing
+ # SQL timeouts when closing an Issue.
+ if issue.is_a?(Issue)
+ # Doing this check here only to save a scheduled worker. The worker will also do this policy check.
+ return if !skip_authorization && !current_user.can?(:update_issue, issue)
+ return unless issue.autoclose_by_merged_closing_merge_request?
+
+ MergeRequests::CloseIssueWorker.perform_async(
+ *close_worker_arguments(issue, merge_request, skip_authorization)
+ )
+ else
+ Issues::CloseService.new(container: project, current_user: current_user).execute(issue, commit: merge_request)
+ end
+ end
+
+ def close_worker_arguments(issue, merge_request, skip_authorization)
+ worker_arguments = [
+ project.id,
+ current_user.id,
+ issue.id,
+ merge_request.id
+ ]
+
+ if Feature.enabled?(:mr_merge_skips_close_issue_authorization, project)
+ worker_arguments << { skip_authorization: skip_authorization }
end
+
+ worker_arguments
end
def delete_non_latest_diffs(merge_request)
@@ -77,6 +112,20 @@ module MergeRequests
merge_request_metrics_service(merge_request).merge(merge_event)
end
end
+
+ def cancel_auto_merges_targeting_source_branch(merge_request)
+ return unless Feature.enabled?(:merge_when_checks_pass, merge_request.project)
+ return unless params[:delete_source_branch]
+
+ merge_request.source_project
+ .merge_requests
+ .by_target_branch(merge_request.source_branch)
+ .with_auto_merge_enabled.each do |targetting_merge_request|
+ if targetting_merge_request.auto_merge_strategy == ::AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS
+ abort_auto_merge_with_todo(targetting_merge_request, "target branch was merged in !#{merge_request.iid}")
+ end
+ end
+ end
end
end
diff --git a/app/services/merge_requests/push_options_handler_service.rb b/app/services/merge_requests/push_options_handler_service.rb
index 3f972e747b9..9d8dc000bf7 100644
--- a/app/services/merge_requests/push_options_handler_service.rb
+++ b/app/services/merge_requests/push_options_handler_service.rb
@@ -147,6 +147,7 @@ module MergeRequests
draft: push_options[:draft],
target_branch: push_options[:target],
force_remove_source_branch: push_options[:remove_source_branch],
+ squash: push_options[:squash],
label: push_options[:label],
unlabel: push_options[:unlabel],
assign: push_options[:assign],
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 7a7d0dbfef2..8f6979460a9 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -31,7 +31,7 @@ module MergeRequests
mark_pending_todos_done(mr)
end
- abort_ff_merge_requests_with_when_pipeline_succeeds
+ abort_ff_merge_requests_with_auto_merges
cache_merge_requests_closing_issues
merge_requests_for_source_branch.each do |mr|
@@ -98,9 +98,24 @@ module MergeRequests
merge_request.merge_commit_sha = sha
merge_request.merged_commit_sha = sha
+ # Look for a merged MR that includes the SHA to associate it with
+ # the MR we're about to mark as merged.
+ # Only the merged MRs without the event source would be considered
+ # to avoid associating it with other MRs that we may have marked as merged here.
+ source_merge_request = MergeRequestsFinder.new(
+ @current_user,
+ project_id: @project.id,
+ merged_without_event_source: true,
+ state: 'merged',
+ sort: 'merged_at',
+ commit_sha: sha
+ ).execute.first
+
+ source = source_merge_request || @project.commit(sha)
+
MergeRequests::PostMergeService
.new(project: merge_request.target_project, current_user: @current_user)
- .execute(merge_request)
+ .execute(merge_request, source)
end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -178,28 +193,29 @@ module MergeRequests
def abort_auto_merges(merge_request)
return unless abort_auto_merges?(merge_request)
- abort_auto_merge(merge_request, 'source branch was updated')
+ learn_more_url = Rails.application.routes.url_helpers.help_page_url(
+ 'ci/pipelines/merge_trains',
+ anchor: 'merge-request-dropped-from-the-merge-train'
+ )
+
+ abort_auto_merge(merge_request, "the source branch was updated. [Learn more](#{learn_more_url}).")
end
- def abort_ff_merge_requests_with_when_pipeline_succeeds
+ def abort_ff_merge_requests_with_auto_merges
return unless @project.ff_merge_must_be_possible?
merge_requests_with_auto_merge_enabled_to(@push.branch_name).each do |merge_request|
- next unless merge_request.auto_merge_strategy == AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
+ unless merge_request.auto_merge_strategy == AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS ||
+ merge_request.auto_merge_strategy == AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS
+ next
+ end
+
next unless merge_request.should_be_rebased?
abort_auto_merge_with_todo(merge_request, 'target branch was updated')
end
end
- def abort_auto_merge_with_todo(merge_request, reason)
- response = abort_auto_merge(merge_request, reason)
- response = ServiceResponse.new(**response)
- return unless response.success?
-
- todo_service.merge_request_became_unmergeable(merge_request)
- end
-
def merge_requests_with_auto_merge_enabled_to(target_branch)
@project
.merge_requests
diff --git a/app/services/merge_requests/remove_approval_service.rb b/app/services/merge_requests/remove_approval_service.rb
index b8f512bdb2c..9bf582e2091 100644
--- a/app/services/merge_requests/remove_approval_service.rb
+++ b/app/services/merge_requests/remove_approval_service.rb
@@ -3,7 +3,7 @@
module MergeRequests
class RemoveApprovalService < MergeRequests::BaseService
# rubocop: disable CodeReuse/ActiveRecord
- def execute(merge_request)
+ def execute(merge_request, skip_updating_state: false, skip_system_note: false, skip_notification: false)
return unless merge_request.approved_by?(current_user)
return if merge_request.merged?
@@ -12,14 +12,18 @@ module MergeRequests
approval = merge_request.approvals.where(user: current_user)
- trigger_approval_hooks(merge_request) do
+ trigger_approval_hooks(merge_request, skip_notification) do
next unless approval.destroy_all # rubocop: disable Cop/DestroyAll
+ update_reviewer_state(merge_request, current_user, 'unapproved') unless skip_updating_state
reset_approvals_cache(merge_request)
- create_note(merge_request)
- merge_request_activity_counter.track_unapprove_mr_action(user: current_user)
+
+ unless skip_system_note
+ create_note(merge_request)
+ merge_request_activity_counter.track_unapprove_mr_action(user: current_user)
+ end
+
trigger_merge_request_merge_status_updated(merge_request)
- trigger_merge_request_reviewers_updated(merge_request)
trigger_merge_request_approval_state_updated(merge_request)
end
@@ -33,9 +37,11 @@ module MergeRequests
merge_request.approvals.reset
end
- def trigger_approval_hooks(merge_request)
+ def trigger_approval_hooks(merge_request, skip_notification)
yield
+ return if skip_notification
+
notification_service.async.unapprove_mr(merge_request, current_user)
execute_hooks(merge_request, 'unapproved')
end
diff --git a/app/services/merge_requests/request_review_service.rb b/app/services/merge_requests/request_review_service.rb
index 6af728f7039..681d40b83ff 100644
--- a/app/services/merge_requests/request_review_service.rb
+++ b/app/services/merge_requests/request_review_service.rb
@@ -8,13 +8,17 @@ module MergeRequests
reviewer = merge_request.find_reviewer(user)
if reviewer
+ has_unapproved = remove_approval(merge_request, user).present?
+
return error("Failed to update reviewer") unless reviewer.update(state: :unreviewed)
notify_reviewer(merge_request, user)
+ trigger_merge_request_merge_status_updated(merge_request)
trigger_merge_request_reviewers_updated(merge_request)
- create_system_note(merge_request, user)
+ trigger_merge_request_approval_state_updated(merge_request)
+ create_system_note(merge_request, user, has_unapproved)
- remove_approval(merge_request) if Feature.enabled?(:mr_request_changes, current_user)
+ user.invalidate_merge_request_cache_counts if user.merge_request_dashboard_enabled?
success
else
@@ -29,8 +33,8 @@ module MergeRequests
todo_service.create_request_review_todo(merge_request, current_user, reviewer)
end
- def create_system_note(merge_request, user)
- ::SystemNoteService.request_review(merge_request, merge_request.project, current_user, user)
+ def create_system_note(merge_request, user, has_unapproved)
+ ::SystemNoteService.request_review(merge_request, merge_request.project, current_user, user, has_unapproved)
end
end
end
diff --git a/app/services/merge_requests/retarget_chain_service.rb b/app/services/merge_requests/retarget_chain_service.rb
index b6434819914..b4b05ffb08c 100644
--- a/app/services/merge_requests/retarget_chain_service.rb
+++ b/app/services/merge_requests/retarget_chain_service.rb
@@ -29,8 +29,6 @@ module MergeRequests
target_branch_was_deleted: true
}
).execute(other_merge_request)
-
- other_merge_request.rebase_async(current_user.id)
end
end
end
diff --git a/app/services/merge_requests/unstick_locked_merge_requests_service.rb b/app/services/merge_requests/unstick_locked_merge_requests_service.rb
new file mode 100644
index 00000000000..77202338709
--- /dev/null
+++ b/app/services/merge_requests/unstick_locked_merge_requests_service.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class UnstickLockedMergeRequestsService
+ include BaseServiceUtility
+
+ def execute
+ Gitlab::MergeRequests::LockedSet.each_batch(100) do |batch|
+ merge_requests = merge_requests_batch(batch)
+ merge_requests_with_merge_jid = merge_requests.select { |mr| mr.locked? && mr.merge_jid.present? }
+ merge_requests_without_merge_jid = merge_requests.select { |mr| mr.locked? && mr.merge_jid.blank? }
+ unlocked_merge_requests = merge_requests.select { |mr| !mr.locked? }
+
+ attempt_to_unstick_mrs_with_merge_jid(merge_requests_with_merge_jid)
+ attempt_to_unstick_mrs_without_merge_jid(merge_requests_without_merge_jid)
+ remove_from_locked_set(unlocked_merge_requests)
+ end
+ end
+
+ private
+
+ # This method is overridden in EE to extend its functionality like preloading
+ # associations.
+ def merge_requests_batch(ids)
+ MergeRequest.id_in(ids)
+ end
+
+ # The logic in this method is the same as in `StuckMergeJobsWorker`. This service
+ # is intended to replace the logic in that worker once feature flag is fully rolled out.
+ #
+ # Any changes that needs to be applied here should be applied to the worker as well.
+ #
+ # rubocop: disable CodeReuse/ActiveRecord -- TODO: Introduce new AR scopes for queries used in this method
+ def attempt_to_unstick_mrs_with_merge_jid(merge_requests)
+ return if merge_requests.empty?
+
+ jids = merge_requests.map(&:merge_jid)
+
+ # Find the jobs that aren't currently running or that exceeded the threshold.
+ completed_jids = Gitlab::SidekiqStatus.completed_jids(jids)
+
+ return if completed_jids.empty?
+
+ completed_ids = merge_requests.select do |merge_request|
+ completed_jids.include?(merge_request.merge_jid)
+ end.map(&:id)
+
+ completed_merge_requests = MergeRequest.id_in(completed_ids)
+
+ mark_merge_requests_as_merged(completed_merge_requests.where.not(merge_commit_sha: nil))
+ unlock_merge_requests(completed_merge_requests.where(merge_commit_sha: nil))
+
+ log_info("Updated state of locked merge jobs. JIDs: #{completed_jids.join(', ')}")
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def attempt_to_unstick_mrs_without_merge_jid(merge_requests)
+ return if merge_requests.empty?
+
+ merge_requests_to_reopen = []
+ merge_request_ids_to_mark_as_merged = []
+
+ merge_requests.each do |merge_request|
+ next unless Feature.enabled?(:unstick_locked_mrs_without_merge_jid, merge_request.project)
+ next unless should_unstick?(merge_request)
+
+ # Reset merge request record to ensure we get updated record state before
+ # we check attributes. It is possible that after we queried the MRs, they
+ # got merged or unlocked and marked as such successfully. If so, skip MR.
+ next unless merge_request.reset.locked?
+
+ # Set MR to be marked as merged if one of the following is true:
+ # - it already has merged_commit_sha in the DB
+ # - it alreeady has merge_commit_sha in the DB
+ # - it has no diffs where source and target branches are compared
+ #
+ # This means the MR changes were already merged.
+ #
+ # We read the value of the column from the DB instead of MergeRequest#merged_commit_sha
+ # as that method can return nil when MR is still not merged.
+ #
+ # We also check the `merge_commit_sha` if present as there are older MRs that do not have
+ # `merged_commit_sha` set on merge.
+ #
+ # When both attributes aren't set, we check if the MR still has diffs to see
+ # if the MR changes are already merged or not.
+ if merge_request.read_attribute(:merged_commit_sha).present? ||
+ merge_request.merge_commit_sha.present? ||
+ (merge_request.source_and_target_branches_exist? && !merge_request.has_diffs?)
+ merge_request_ids_to_mark_as_merged << merge_request.id
+ else
+ # Set MR to be unlocked since it's stuck and maybe not merged yet.
+ merge_requests_to_reopen << merge_request
+ end
+ end
+
+ mark_merge_requests_as_merged(MergeRequest.id_in(merge_request_ids_to_mark_as_merged))
+ unlock_merge_requests(merge_requests_to_reopen)
+
+ updated_mr_ids = merge_request_ids_to_mark_as_merged | merge_requests_to_reopen.map(&:id)
+ log_info("Updated state of locked MRs without JIDs. IDs: #{updated_mr_ids.join(', ')}")
+ end
+
+ # Check if MR is still in the process of merging so we don't interrupt the process.
+ # MergeRequest::MergeService will acquire a lease when merging and keep it for
+ # 15 minutes so we can check if the lease still exists and we can consider
+ # the MR as still merging.
+ def should_unstick?(merge_request)
+ !merge_request.merge_exclusive_lease.exists?
+ end
+
+ def mark_merge_requests_as_merged(merge_requests)
+ return if merge_requests.empty?
+
+ merge_requests.update_all(state_id: MergeRequest.available_states[:merged])
+ remove_from_locked_set(merge_requests)
+ end
+
+ # Do not reopen merge requests using direct queries.
+ # We rely on state machine callbacks to update head_pipeline_id
+ def unlock_merge_requests(merge_requests)
+ errors = Hash.new { |h, k| h[k] = [] }
+
+ merge_requests.each do |mr|
+ mjid = mr.merge_jid
+
+ if mr.unlock_mr
+ mr.remove_from_locked_set
+ next
+ end
+
+ mr.errors.full_messages.each do |msg|
+ errors[msg] << if mjid.present?
+ ["#{mjid}|#{mr.id}"]
+ else
+ [mr.id]
+ end
+ end
+ end
+
+ built_errors = errors.map { |k, v| "#{k} - IDS: #{v.join(',')}\n" }.join
+ log_info("Errors:\n#{built_errors}")
+ end
+
+ def remove_from_locked_set(merge_requests)
+ return if merge_requests.empty?
+
+ Gitlab::MergeRequests::LockedSet.remove(merge_requests.map(&:id))
+ end
+ end
+end
+
+MergeRequests::UnstickLockedMergeRequestsService.prepend_mod
diff --git a/app/services/merge_requests/update_assignees_service.rb b/app/services/merge_requests/update_assignees_service.rb
index d45d55cbebc..c1e970740b9 100644
--- a/app/services/merge_requests/update_assignees_service.rb
+++ b/app/services/merge_requests/update_assignees_service.rb
@@ -7,7 +7,7 @@ module MergeRequests
# This saves a lot of queries for irrelevant things that cannot possibly
# change in the execution of this service.
def execute(merge_request)
- return merge_request unless current_user&.can?(:update_merge_request, merge_request)
+ return merge_request unless current_user&.can?(:set_merge_request_metadata, merge_request)
old_assignees = merge_request.assignees.to_a
old_ids = old_assignees.map(&:id)
diff --git a/app/services/merge_requests/update_reviewer_state_service.rb b/app/services/merge_requests/update_reviewer_state_service.rb
index e59c2b178db..1955d532abf 100644
--- a/app/services/merge_requests/update_reviewer_state_service.rb
+++ b/app/services/merge_requests/update_reviewer_state_service.rb
@@ -7,14 +7,22 @@ module MergeRequests
reviewer = merge_request.find_reviewer(current_user)
+ create_requested_changes(merge_request) if state == 'requested_changes'
+ destroy_requested_changes(merge_request) if state == 'approved'
+
if reviewer
return error("Failed to update reviewer") unless reviewer.update(state: state)
trigger_merge_request_reviewers_updated(merge_request)
+ if current_user.merge_request_dashboard_enabled?
+ invalidate_cache_counts(merge_request, users: merge_request.assignees)
+ current_user.invalidate_merge_request_cache_counts
+ end
+
return success if state != 'requested_changes'
- if merge_request.approved_by?(current_user) && !remove_approval(merge_request)
+ if merge_request.approved_by?(current_user) && !remove_approval(merge_request, current_user)
return error("Failed to remove approval")
end
@@ -23,5 +31,19 @@ module MergeRequests
error("Reviewer not found")
end
end
+
+ private
+
+ def create_requested_changes(merge_request)
+ merge_request.create_requested_changes(current_user)
+
+ SystemNoteService.requested_changes(merge_request, current_user)
+
+ trigger_merge_request_merge_status_updated(merge_request)
+ end
+
+ def destroy_requested_changes(merge_request)
+ merge_request.destroy_requested_changes(current_user)
+ end
end
end
diff --git a/app/services/merge_requests/update_reviewers_service.rb b/app/services/merge_requests/update_reviewers_service.rb
index 8e974d75676..3a4ac831db3 100644
--- a/app/services/merge_requests/update_reviewers_service.rb
+++ b/app/services/merge_requests/update_reviewers_service.rb
@@ -3,7 +3,7 @@
module MergeRequests
class UpdateReviewersService < UpdateService
def execute(merge_request)
- return merge_request unless current_user&.can?(:update_merge_request, merge_request)
+ return merge_request unless current_user&.can?(:set_merge_request_metadata, merge_request)
old_reviewers = merge_request.reviewers.to_a
old_ids = old_reviewers.map(&:id)
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index fb6544a910a..ae937daffca 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -69,15 +69,51 @@ module MergeRequests
MergeRequests::CloseService
end
- def after_update(issuable, old_associations)
+ def after_update(merge_request, old_associations)
super
- issuable.cache_merge_request_closes_issues!(current_user)
+
+ merge_request.cache_merge_request_closes_issues!(current_user)
+ @trigger_work_item_updated = true
end
private
attr_reader :target_branch_was_deleted
+ def trigger_updated_work_item_on_closing_issues(merge_request, old_closing_issues_ids)
+ new_issue_ids = merge_request.merge_requests_closing_issues.limit(1000).pluck(:issue_id) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
+ all_issue_ids = new_issue_ids | old_closing_issues_ids
+ return if all_issue_ids.blank?
+
+ WorkItem.id_in(all_issue_ids).find_each(batch_size: 100) do |work_item| # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
+ GraphqlTriggers.work_item_updated(work_item)
+ end
+ end
+
+ override :associations_before_update
+ def associations_before_update(merge_request)
+ super.merge(
+ closing_issues_ids: merge_request.merge_requests_closing_issues.limit(1000).pluck(:issue_id) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be the same in the model
+ )
+ end
+
+ override :change_state
+ def change_state(merge_request)
+ return unless super
+
+ @trigger_work_item_updated = true
+ end
+
+ override :trigger_update_subscriptions
+ def trigger_update_subscriptions(merge_request, old_associations)
+ return unless @trigger_work_item_updated
+
+ trigger_updated_work_item_on_closing_issues(
+ merge_request,
+ old_associations.fetch(:closing_issues_ids, [])
+ )
+ end
+
def general_fallback(merge_request)
# We don't allow change of source/target projects and source branch
# after merge request was created
@@ -169,7 +205,18 @@ module MergeRequests
)
delete_approvals_on_target_branch_change(merge_request)
- refresh_pipelines_on_merge_requests(merge_request, allow_duplicate: true)
+
+ # `target_branch_was_deleted` is set to true when MR gets re-targeted due to
+ # deleted target branch. In this case we don't want to create a new pipeline
+ # on behalf of MR author.
+ # We nullify head_pipeline_id to force that a new pipeline is explicitly
+ # created in order to pass mergeability checks.
+ if target_branch_was_deleted
+ merge_request.head_pipeline_id = nil
+ merge_request.retargeted = true
+ else
+ refresh_pipelines_on_merge_requests(merge_request, allow_duplicate: true)
+ end
abort_auto_merge(merge_request, 'target branch was changed')
end
@@ -186,7 +233,7 @@ module MergeRequests
# email template itself, see `change_in_merge_request_draft_status_email` template.
notify_draft_status_changed(merge_request)
trigger_merge_request_status_updated(merge_request)
- publish_draft_change_event(merge_request) if Feature.enabled?(:additional_merge_when_checks_ready, project)
+ publish_draft_change_event(merge_request) if Feature.enabled?(:merge_when_checks_pass, project)
end
if !old_title_draft && new_title_draft
diff --git a/app/services/milestones/destroy_service.rb b/app/services/milestones/destroy_service.rb
index aa122b1282a..6966764634f 100644
--- a/app/services/milestones/destroy_service.rb
+++ b/app/services/milestones/destroy_service.rb
@@ -2,24 +2,68 @@
module Milestones
class DestroyService < Milestones::BaseService
+ BATCH_SIZE = 500
+
def execute(milestone)
Milestone.transaction do
- update_params = { milestone_id: nil, skip_milestone_email: true }
+ update_issues(milestone)
+ update_merge_requests(milestone)
- milestone.issues.each do |issue|
- Issues::UpdateService.new(container: parent, current_user: current_user, params: update_params).execute(issue)
- end
+ log_destroy_event_for(milestone) if milestone.destroy
+ end
+
+ return unless milestone.destroyed?
+
+ milestone
+ end
+
+ private
+
+ def update_issues(milestone)
+ batched_issue_ids = []
+ milestone.issues.each_batch(of: BATCH_SIZE) do |issues|
+ batched_issue_ids << issues.map do |issue|
+ Issues::UpdateService.new(
+ container: parent,
+ current_user: current_user,
+ params: update_params
+ ).execute(issue)
- milestone.merge_requests.each do |merge_request|
- MergeRequests::UpdateService.new(project: merge_request.project, current_user: current_user, params: update_params).execute(merge_request)
+ issue.id
end
+ end
+
+ publish_events(milestone, batched_issue_ids)
+ end
+
+ def publish_events(milestone, batched_issue_ids)
+ root_namespace_id = (milestone.group || milestone.project).root_ancestor.id
- log_destroy_event_for(milestone)
+ milestone.run_after_commit do
+ Gitlab::EventStore.publish_group(batched_issue_ids.map do |issue_ids|
+ WorkItems::BulkUpdatedEvent.new(data: {
+ work_item_ids: issue_ids,
+ root_namespace_id: root_namespace_id,
+ updated_attributes: %w[milestone_id]
+ })
+ end)
+ end
+ end
- milestone.destroy
+ def update_merge_requests(milestone)
+ milestone.merge_requests.each do |merge_request|
+ MergeRequests::UpdateService.new(
+ project: merge_request.project,
+ current_user: current_user,
+ params: update_params
+ ).execute(merge_request)
end
end
+ def update_params
+ @update_params ||= { milestone_id: nil, skip_milestone_email: true }
+ end
+
def log_destroy_event_for(milestone)
return if milestone.group_milestone?
diff --git a/app/services/milestones/update_service.rb b/app/services/milestones/update_service.rb
index 90cb8ea9f5c..1e42bfe3e37 100644
--- a/app/services/milestones/update_service.rb
+++ b/app/services/milestones/update_service.rb
@@ -12,15 +12,10 @@ module Milestones
Milestones::CloseService.new(parent, current_user, {}).execute(milestone)
end
- if params.present?
- milestone.assign_attributes(params.except(:state_event))
- end
-
- if milestone.changed?
- before_update(milestone)
- end
+ milestone.assign_attributes(params.except(:state_event)) if params.present?
+ before_update(milestone) if milestone.changed?
+ publish_event(milestone) if milestone.save
- milestone.save
milestone
end
@@ -29,6 +24,17 @@ module Milestones
def before_update(milestone)
milestone.check_for_spam(user: current_user, action: :update)
end
+
+ def publish_event(milestone)
+ Gitlab::EventStore.publish(
+ Milestones::MilestoneUpdatedEvent.new(data: {
+ id: milestone.id,
+ group_id: milestone.group_id,
+ project_id: milestone.project_id,
+ updated_attributes: milestone.previous_changes&.keys&.map(&:to_s)
+ }.tap(&:compact_blank!))
+ )
+ end
end
end
diff --git a/app/services/ml/create_candidate_service.rb b/app/services/ml/create_candidate_service.rb
index 53913c3fb19..28870bce827 100644
--- a/app/services/ml/create_candidate_service.rb
+++ b/app/services/ml/create_candidate_service.rb
@@ -28,7 +28,7 @@ module Ml
end
def random_candidate_name
- parts = Array.new(3).map { FFaker::Animal.common_name.downcase.delete(' ') } << rand(10000)
+ parts = Array.new(3).map { FFaker::AnimalUS.common_name.downcase.delete(' ') } << rand(10000)
parts.join('-').truncate(255)
end
diff --git a/app/services/ml/create_experiment_service.rb b/app/services/ml/create_experiment_service.rb
new file mode 100644
index 00000000000..ed312b6e965
--- /dev/null
+++ b/app/services/ml/create_experiment_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Ml
+ class CreateExperimentService
+ def initialize(project, experiment_name, user = nil)
+ @project = project
+ @name = experiment_name
+ @user = user
+ end
+
+ def execute
+ experiment = Ml::Experiment.new(project: project, name: name, user: user)
+ experiment.save
+
+ return error(experiment.errors.full_messages) unless experiment.persisted?
+
+ success(experiment)
+ end
+
+ private
+
+ def success(model)
+ ServiceResponse.success(payload: model)
+ end
+
+ def error(reason)
+ ServiceResponse.error(message: reason)
+ end
+
+ attr_reader :project, :name, :user
+ end
+end
diff --git a/app/services/ml/create_model_service.rb b/app/services/ml/create_model_service.rb
index 7ac9c2a2737..46035194a12 100644
--- a/app/services/ml/create_model_service.rb
+++ b/app/services/ml/create_model_service.rb
@@ -12,34 +12,42 @@ module Ml
def execute
ApplicationRecord.transaction do
+ experiment_result = Ml::CreateExperimentService.new(@project, experiment_name, @user).execute
+
+ next experiment_result if experiment_result.error?
+
model = Ml::Model.new(
project: @project,
name: @name,
user: @user,
description: @description,
- default_experiment: default_experiment
+ default_experiment: experiment_result.payload
)
model.save
- if model.persisted?
- add_metadata(model, @metadata)
+ next error(model.errors.full_messages) unless model.persisted?
+
+ Gitlab::InternalEvents.track_event(
+ 'model_registry_ml_model_created',
+ project: @project,
+ user: @user
+ )
- Gitlab::InternalEvents.track_event(
- 'model_registry_ml_model_created',
- project: @project,
- user: @user
- )
- end
+ add_metadata(model, @metadata)
- model
+ success(model)
end
end
private
- def default_experiment
- @default_experiment ||= Ml::FindOrCreateExperimentService.new(@project, @name).execute
+ def success(model)
+ ServiceResponse.success(payload: model)
+ end
+
+ def error(reason)
+ ServiceResponse.error(message: reason)
end
def add_metadata(model, metadata_key_value)
@@ -57,5 +65,9 @@ module Ml
::Ml::ModelMetadata.create!(entry)
end
end
+
+ def experiment_name
+ Ml::Model.prefixed_experiment(@name)
+ end
end
end
diff --git a/app/services/ml/create_model_version_service.rb b/app/services/ml/create_model_version_service.rb
index 4af9dd40d12..74490f00024 100644
--- a/app/services/ml/create_model_version_service.rb
+++ b/app/services/ml/create_model_version_service.rb
@@ -15,17 +15,27 @@ module Ml
ApplicationRecord.transaction do
@version ||= Ml::IncrementVersionService.new(@model.latest_version.try(:version)).execute
+ error(_("Version must be semantic version")) unless Packages::SemVer.match(@version)
+
package = @package || find_or_create_package(@model.name, @version)
- model_version = Ml::ModelVersion.create!(model: @model, project: @model.project, version: @version,
+ error(_("Can't create model version package")) unless package
+
+ @model_version = Ml::ModelVersion.new(model: @model, project: @model.project, version: @version,
package: package, description: @description)
- model_version.candidate = ::Ml::CreateCandidateService.new(
+ @model_version.save
+
+ error(@model_version.errors.full_messages) unless @model_version.persisted?
+
+ @model_version.candidate = ::Ml::CreateCandidateService.new(
@model.default_experiment,
- { model_version: model_version }
+ { model_version: @model_version }
).execute
- model_version.add_metadata(@metadata)
+ error(_("Version must be semantic version")) unless @model_version.candidate
+
+ @model_version.add_metadata(@metadata)
Gitlab::InternalEvents.track_event(
'model_registry_ml_model_version_created',
@@ -33,8 +43,12 @@ module Ml
user: @user
)
- model_version
+ ServiceResponse.success(message: [], payload: { model_version: @model_version })
end
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(message: [e.message], payload: { model_version: nil })
+ rescue ModelVersionCreationError => e
+ ServiceResponse.error(message: e.errors, payload: { model_version: nil })
end
private
@@ -49,5 +63,17 @@ module Ml
.new(@model.project, @user, package_params)
.execute
end
+
+ def error(errors)
+ raise ModelVersionCreationError.new(Array.wrap(errors)) # rubocop:disable Style/RaiseArgs -- This is a custom error and is handled in this class
+ end
+
+ class ModelVersionCreationError < StandardError
+ attr_reader :errors
+
+ def initialize(errors)
+ @errors = errors
+ end
+ end
end
end
diff --git a/app/services/ml/destroy_model_service.rb b/app/services/ml/destroy_model_service.rb
index 308d289fbe1..f2a33d58cd8 100644
--- a/app/services/ml/destroy_model_service.rb
+++ b/app/services/ml/destroy_model_service.rb
@@ -8,12 +8,34 @@ module Ml
end
def execute
- return unless @model.destroy
-
- ::Packages::MarkPackagesForDestructionService.new(
+ package_deletion_result = ::Packages::MarkPackagesForDestructionService.new(
packages: @model.all_packages,
current_user: @user
).execute
+
+ return packages_not_deleted(package_deletion_result.message) if package_deletion_result.error?
+
+ return error unless @model.destroy
+
+ success
+ end
+
+ private
+
+ def success
+ ServiceResponse.success(payload: payload)
+ end
+
+ def error
+ ServiceResponse.error(message: @model.errors.full_messages, payload: payload)
+ end
+
+ def packages_not_deleted(error_message)
+ ServiceResponse.error(message: error_message, payload: payload)
+ end
+
+ def payload
+ { model: @model }
end
end
end
diff --git a/app/services/ml/destroy_model_version_service.rb b/app/services/ml/destroy_model_version_service.rb
new file mode 100644
index 00000000000..157cb6f2db9
--- /dev/null
+++ b/app/services/ml/destroy_model_version_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Ml
+ class DestroyModelVersionService
+ def initialize(model_version, user)
+ @model_version = model_version
+ @user = user
+ end
+
+ def execute
+ if model_version.package.present?
+ result = ::Packages::MarkPackageForDestructionService
+ .new(container: model_version.package, current_user: @user)
+ .execute
+
+ return ServiceResponse.error(message: result.message, payload: payload) unless result.success?
+ end
+
+ if model_version.destroy
+ ServiceResponse.success(payload: payload)
+ else
+ ServiceResponse.error(message: model_version.errors.full_messages, payload: payload)
+ end
+ end
+
+ private
+
+ def payload
+ { model_version: model_version }
+ end
+
+ attr_reader :model_version, :user
+ end
+end
diff --git a/app/services/ml/experiment_tracking/experiment_repository.rb b/app/services/ml/experiment_tracking/experiment_repository.rb
index 90f4cf1abec..767eda13934 100644
--- a/app/services/ml/experiment_tracking/experiment_repository.rb
+++ b/app/services/ml/experiment_tracking/experiment_repository.rb
@@ -17,13 +17,13 @@ module Ml
end
def all
- ::Ml::Experiment.by_project_id(project.id)
+ Projects::Ml::ExperimentFinder.new(@project).execute
end
def create!(name, tags = nil)
experiment = ::Ml::Experiment.create!(name: name,
- user: user,
- project: project)
+ user: user,
+ project: project)
add_tags(experiment, tags)
diff --git a/app/services/ml/find_model_service.rb b/app/services/ml/find_model_service.rb
index 23ca0266629..cd376daa31c 100644
--- a/app/services/ml/find_model_service.rb
+++ b/app/services/ml/find_model_service.rb
@@ -2,12 +2,24 @@
module Ml
class FindModelService
- def initialize(project, name)
+ def initialize(project, name = nil, model_id = nil)
@project = project
@name = name
+ @model_id = model_id
end
def execute
+ return find_by_model_id if @model_id
+ return find_by_project_and_name if @name
+
+ nil
+ end
+
+ def find_by_model_id
+ Ml::Model.by_project_id_and_id(@project.id, @model_id)
+ end
+
+ def find_by_project_and_name
Ml::Model.by_project_id_and_name(@project.id, @name)
end
end
diff --git a/app/services/ml/find_or_create_experiment_service.rb b/app/services/ml/find_or_create_experiment_service.rb
deleted file mode 100644
index 1fe10c7f856..00000000000
--- a/app/services/ml/find_or_create_experiment_service.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-module Ml
- class FindOrCreateExperimentService
- def initialize(project, experiment_name, user = nil)
- @project = project
- @name = experiment_name
- @user = user
- end
-
- def execute
- Ml::Experiment.find_or_create(project, name, user)
- end
-
- private
-
- attr_reader :project, :name, :user
- end
-end
diff --git a/app/services/ml/find_or_create_model_service.rb b/app/services/ml/find_or_create_model_service.rb
deleted file mode 100644
index 9199730e84b..00000000000
--- a/app/services/ml/find_or_create_model_service.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-module Ml
- class FindOrCreateModelService
- def initialize(project, name, user = nil, description = nil, metadata = [])
- @project = project
- @name = name
- @description = description
- @metadata = metadata
- @user = user
- end
-
- def execute
- FindModelService.new(@project, @name).execute ||
- CreateModelService.new(@project, @name, @user, @description, @metadata).execute
- end
- end
-end
diff --git a/app/services/ml/find_or_create_model_version_service.rb b/app/services/ml/find_or_create_model_version_service.rb
deleted file mode 100644
index 61782166726..00000000000
--- a/app/services/ml/find_or_create_model_version_service.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-module Ml
- class FindOrCreateModelVersionService
- def initialize(project, params = {})
- @project = project
- @name = params[:model_name]
- @version = params[:version]
- @package = params[:package]
- @description = params[:description]
- @user = params[:user]
- @params = params
- end
-
- def execute
- model_version = Ml::ModelVersion.by_project_id_name_and_version(@project.id, @name, @version)
-
- return model_version if model_version
-
- model = Ml::Model.by_project_id_and_name(@project.id, @name)
-
- return unless model
-
- Ml::CreateModelVersionService.new(model, @params).execute
- end
- end
-end
diff --git a/app/services/ml/model_versions/delete_service.rb b/app/services/ml/model_versions/delete_service.rb
deleted file mode 100644
index 4eb8d367a19..00000000000
--- a/app/services/ml/model_versions/delete_service.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Ml
- module ModelVersions
- class DeleteService
- def initialize(project, name, version, user)
- @project = project
- @name = name
- @version = version
- @user = user
- end
-
- def execute
- model_version = Ml::ModelVersion
- .by_project_id_name_and_version(@project.id, @name, @version)
- return ServiceResponse.error(message: 'Model not found') unless model_version
-
- if model_version.package.present?
- result = ::Packages::MarkPackageForDestructionService
- .new(container: model_version.package, current_user: @user)
- .execute
-
- return ServiceResponse.error(message: result.message) unless result.success?
- end
-
- return ServiceResponse.error(message: 'Could not destroy the model version') unless model_version.destroy
-
- ServiceResponse.success
- end
- end
- end
-end
diff --git a/app/services/ml/update_model_service.rb b/app/services/ml/update_model_service.rb
index dade6c72588..c40872a4f35 100644
--- a/app/services/ml/update_model_service.rb
+++ b/app/services/ml/update_model_service.rb
@@ -8,9 +8,18 @@ module Ml
end
def execute
+ return error('Model not found') unless @model
+
@model.update!(description: @description)
+ success(@model)
+ end
+
+ def success(model)
+ ServiceResponse.success(payload: model)
+ end
- @model
+ def error(reason)
+ ServiceResponse.error(message: reason)
end
end
end
diff --git a/app/services/namespace_settings/update_service.rb b/app/services/namespace_settings/assign_attributes_service.rb
index f6f59738d44..e7dcddd4356 100644
--- a/app/services/namespace_settings/update_service.rb
+++ b/app/services/namespace_settings/assign_attributes_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module NamespaceSettings
- class UpdateService
+ class AssignAttributesService
include ::Gitlab::Allowable
attr_reader :current_user, :group, :settings_params
@@ -20,14 +20,18 @@ module NamespaceSettings
user_policy: :change_prevent_sharing_groups_outside_hierarchy
)
validate_settings_param_for_root_group(
+ param_key: :seat_control,
+ user_policy: :change_seat_control
+ )
+ validate_settings_param_for_root_group(
param_key: :new_user_signups_cap,
user_policy: :change_new_user_signups_cap
)
- validate_settings_param_for_root_group(
+ validate_settings_param_for_admin(
param_key: :default_branch_protection,
user_policy: :update_default_branch_protection
)
- validate_settings_param_for_root_group(
+ validate_settings_param_for_admin(
param_key: :default_branch_protection_defaults,
user_policy: :update_default_branch_protection
)
@@ -36,7 +40,9 @@ module NamespaceSettings
user_policy: :update_git_access_protocol
)
+ handle_default_branch_name
handle_default_branch_protection unless settings_params[:default_branch_protection].blank?
+ handle_early_access_program_participation
if group.namespace_settings
group.namespace_settings.attributes = settings_params
@@ -47,6 +53,17 @@ module NamespaceSettings
private
+ def handle_default_branch_name
+ default_branch_key = :default_branch_name
+
+ return if settings_params[default_branch_key].blank?
+
+ unless Gitlab::GitRefValidator.validate(settings_params[default_branch_key])
+ settings_params.delete(default_branch_key)
+ group.namespace_settings.errors.add(default_branch_key, _('is invalid.'))
+ end
+ end
+
def handle_default_branch_protection
# We are migrating default_branch_protection from an integer
# column to a jsonb column. While completing the rest of the
@@ -58,22 +75,34 @@ module NamespaceSettings
settings_params[:default_branch_protection_defaults] = protection.to_hash
end
- def validate_resource_access_token_creation_allowed_param
- return if settings_params[:resource_access_token_creation_allowed].nil?
+ def handle_early_access_program_participation
+ want_participate = Gitlab::Utils.to_boolean(settings_params[:early_access_program_participant])
+ return unless want_participate
- unless can?(current_user, :admin_group, group)
- settings_params.delete(:resource_access_token_creation_allowed)
- group.namespace_settings.errors.add(:resource_access_token_creation_allowed, _('can only be changed by a group admin.'))
- end
+ not_participant = !group.namespace_settings&.early_access_program_participant
+ settings_params[:early_access_program_joined_by_id] = current_user.id if not_participant
end
- def validate_settings_param_for_root_group(param_key:, user_policy:)
+ def validate_resource_access_token_creation_allowed_param
+ validate_settings_param_for_admin(
+ param_key: :resource_access_token_creation_allowed,
+ user_policy: :admin_group
+ )
+ end
+
+ def validate_settings_param_for_admin(param_key:, user_policy:)
return if settings_params[param_key].nil?
unless can?(current_user, user_policy, group)
settings_params.delete(param_key)
group.namespace_settings.errors.add(param_key, _('can only be changed by a group admin.'))
end
+ end
+
+ def validate_settings_param_for_root_group(param_key:, user_policy:)
+ return if settings_params[param_key].nil?
+
+ validate_settings_param_for_admin(param_key: param_key, user_policy: user_policy)
unless group.root?
settings_params.delete(param_key)
@@ -83,4 +112,4 @@ module NamespaceSettings
end
end
-NamespaceSettings::UpdateService.prepend_mod_with('NamespaceSettings::UpdateService')
+NamespaceSettings::AssignAttributesService.prepend_mod_with('NamespaceSettings::AssignAttributesService')
diff --git a/app/services/namespaces/package_settings/update_service.rb b/app/services/namespaces/package_settings/update_service.rb
index 06a15671f25..579873c9711 100644
--- a/app/services/namespaces/package_settings/update_service.rb
+++ b/app/services/namespaces/package_settings/update_service.rb
@@ -6,20 +6,20 @@ module Namespaces
include Gitlab::Utils::StrongMemoize
ALLOWED_ATTRIBUTES = %i[maven_duplicates_allowed
- maven_duplicate_exception_regex
- generic_duplicates_allowed
- generic_duplicate_exception_regex
- maven_package_requests_forwarding
- nuget_duplicates_allowed
- nuget_duplicate_exception_regex
- terraform_module_duplicates_allowed
- terraform_module_duplicate_exception_regex
- npm_package_requests_forwarding
- pypi_package_requests_forwarding
- lock_maven_package_requests_forwarding
- lock_npm_package_requests_forwarding
- lock_pypi_package_requests_forwarding
- nuget_symbol_server_enabled].freeze
+ maven_duplicate_exception_regex
+ generic_duplicates_allowed
+ generic_duplicate_exception_regex
+ maven_package_requests_forwarding
+ nuget_duplicates_allowed
+ nuget_duplicate_exception_regex
+ terraform_module_duplicates_allowed
+ terraform_module_duplicate_exception_regex
+ npm_package_requests_forwarding
+ pypi_package_requests_forwarding
+ lock_maven_package_requests_forwarding
+ lock_npm_package_requests_forwarding
+ lock_pypi_package_requests_forwarding
+ nuget_symbol_server_enabled].freeze
def execute
return ServiceResponse.error(message: 'Access Denied', http_status: 403) unless allowed?
diff --git a/app/services/namespaces/update_denormalized_descendants_service.rb b/app/services/namespaces/update_denormalized_descendants_service.rb
new file mode 100644
index 00000000000..5a6b9fd3f96
--- /dev/null
+++ b/app/services/namespaces/update_denormalized_descendants_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Namespaces
+ class UpdateDenormalizedDescendantsService
+ include Gitlab::Utils::StrongMemoize
+
+ NAMESPACE_TYPE_MAPPING = {
+ 'Project' => :all_project_ids,
+ 'Group' => :self_and_descendant_group_ids
+ }.freeze
+
+ def initialize(namespace_id:)
+ @namespace_id = namespace_id
+ end
+
+ def execute
+ Namespaces::Descendants.transaction do
+ namespace = Namespace.primary_key_in(namespace_id).lock.first # rubocop: disable CodeReuse/ActiveRecord -- this is a special service for updating records
+ # If there is another process updating the hierarchy, this query will return nil and we just
+ # stop the processing.
+ descendants = Namespaces::Descendants.primary_key_in(namespace_id).lock('FOR UPDATE SKIP LOCKED').first # rubocop: disable CodeReuse/ActiveRecord -- this is a special service for updating records
+ next unless descendants
+
+ if namespace
+ update_namespace_descendants(namespace)
+ else
+ descendants.destroy
+ end
+ end
+ end
+
+ private
+
+ attr_reader :namespace_id
+
+ def update_namespace_descendants(namespace)
+ ids = collect_namespace_ids
+
+ Namespaces::Descendants.upsert_with_consistent_data(
+ namespace: namespace,
+ self_and_descendant_group_ids: ids[:self_and_descendant_group_ids].sort,
+ all_project_ids: Project.where(project_namespace_id: ids[:all_project_ids]).order(:id).pluck_primary_key # rubocop: disable CodeReuse/ActiveRecord -- Service specific record lookup
+ )
+ end
+
+ def collect_namespace_ids
+ denormalized_ids = { self_and_descendant_group_ids: [], all_project_ids: [] }
+
+ iterator.each_batch do |ids|
+ namespaces = Namespace.primary_key_in(ids).select(:id, :type)
+ namespaces.each do |namespace|
+ denormalized_attribute = NAMESPACE_TYPE_MAPPING[namespace.type]
+ denormalized_ids[denormalized_attribute] << namespace.id if denormalized_attribute
+ end
+ end
+
+ denormalized_ids
+ end
+
+ def iterator
+ Gitlab::Database::NamespaceEachBatch
+ .new(namespace_class: Namespace, cursor: { current_id: namespace_id, depth: [namespace_id] })
+ end
+ end
+end
diff --git a/app/services/notes/abuse_report/build_service.rb b/app/services/notes/abuse_report/build_service.rb
new file mode 100644
index 00000000000..4f85e416d1e
--- /dev/null
+++ b/app/services/notes/abuse_report/build_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Notes
+ module AbuseReport
+ class BuildService < ::Notes::BuildService
+ extend ::Gitlab::Utils::Override
+
+ def initialize(user = nil, params = {})
+ @current_user = user
+ @params = params.dup
+ end
+
+ private
+
+ override :handle_external_author
+ def handle_external_author; end
+
+ override :handle_confidentiality_params
+ def handle_confidentiality_params; end
+
+ override :new_note
+ def new_note(params, _discussion)
+ AntiAbuse::Reports::Note.new(params.merge(author: current_user))
+ end
+
+ override :find_discussion
+ def find_discussion(discussion_id)
+ AntiAbuse::Reports::Note.find_discussion(discussion_id)
+ end
+
+ override :discussion_not_found
+ def discussion_not_found
+ note = AntiAbuse::Reports::Note.new
+ note.errors.add(:base, _('Discussion to reply to cannot be found'))
+ note
+ end
+ end
+ end
+end
diff --git a/app/services/notes/abuse_report/create_service.rb b/app/services/notes/abuse_report/create_service.rb
new file mode 100644
index 00000000000..014ce78170f
--- /dev/null
+++ b/app/services/notes/abuse_report/create_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Notes
+ module AbuseReport
+ class CreateService < ::Notes::CreateService
+ def initialize(user = nil, params = {})
+ @current_user = user
+ @params = params.dup
+ end
+
+ private
+
+ def build_note(_executing_user)
+ Notes::AbuseReport::BuildService.new(current_user, params).execute
+ end
+
+ def after_commit(note)
+ note.run_after_commit do
+ # TODO: enqueue creation of todos, NewNoteWorker or similar (create a new one)
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/477320
+ end
+ end
+
+ def quick_actions_supported?(_note)
+ false
+ end
+
+ def check_for_spam?(_only_commands)
+ false
+ end
+
+ def when_saved(note, _additional_params)
+ # add todos and events tracking
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/477320
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/477322
+ end
+ end
+ end
+end
diff --git a/app/services/notes/base_service.rb b/app/services/notes/base_service.rb
index 87f7cb0e8ac..8265fda10a1 100644
--- a/app/services/notes/base_service.rb
+++ b/app/services/notes/base_service.rb
@@ -2,6 +2,8 @@
module Notes
class BaseService < ::BaseService
+ include Gitlab::InternalEventsTracking
+
def clear_noteable_diffs_cache(note)
if note.is_a?(DiffNote) &&
note.start_of_discussion? &&
@@ -11,7 +13,14 @@ module Notes
end
def increment_usage_counter(note)
- Gitlab::UsageDataCounters::NoteCounter.count(:create, note.noteable_type)
+ case note.noteable_type
+ when 'Commit'
+ track_internal_event('create_commit_note', project: project, user: current_user)
+ when 'Snippet'
+ track_internal_event('create_snippet_note', project: project, user: current_user)
+ when 'MergeRequest'
+ track_internal_event('create_merge_request_note', project: project, user: current_user)
+ end
end
end
end
diff --git a/app/services/notes/build_service.rb b/app/services/notes/build_service.rb
index 91993700e25..59f130704e3 100644
--- a/app/services/notes/build_service.rb
+++ b/app/services/notes/build_service.rb
@@ -2,27 +2,45 @@
module Notes
class BuildService < ::BaseService
- def execute
+ def execute(executing_user: nil)
in_reply_to_discussion_id = params.delete(:in_reply_to_discussion_id)
- external_author = params.delete(:external_author)
+ handle_external_author
- discussion = nil
+ executing_user ||= current_user
- if external_author.present?
- note_metadata = Notes::NoteMetadata.new(email_participant: external_author)
- params[:note_metadata] = note_metadata
- end
+ discussion = nil
if in_reply_to_discussion_id.present?
discussion = find_discussion(in_reply_to_discussion_id)
- return discussion_not_found unless discussion && can?(current_user, :create_note, discussion.noteable)
+ return discussion_not_found unless discussion && can?(executing_user, :create_note, discussion.noteable)
discussion = discussion.convert_to_discussion! if discussion.can_convert_to_discussion?
- params.merge!(discussion.reply_attributes)
+ reply_attributes = discussion.reply_attributes
+ # NOTE: Avoid overriding noteable if it already exists so that we don't have to reload noteable.
+ reply_attributes = reply_attributes.except(:noteable_id, :noteable_type) if params[:noteable]
+
+ params.merge!(reply_attributes)
end
+ handle_confidentiality_params
+
+ new_note(params, discussion)
+ end
+
+ private
+
+ def handle_external_author
+ external_author = params.delete(:external_author)
+
+ return unless external_author.present?
+
+ note_metadata = Notes::NoteMetadata.new(email_participant: external_author)
+ params[:note_metadata] = note_metadata
+ end
+
+ def handle_confidentiality_params
# The `confidential` param for notes is deprecated with 15.3
# and renamed to `internal`.
# We still accept `confidential` until the param gets removed from the API.
@@ -30,12 +48,8 @@ module Notes
# the parameter. Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/367923.
params[:confidential] = params[:internal] || params[:confidential]
params.delete(:internal)
-
- new_note(params, discussion)
end
- private
-
def new_note(params, discussion)
note = Note.new(params)
note.project = project
diff --git a/app/services/notes/copy_service.rb b/app/services/notes/copy_service.rb
index e7182350837..3a7961d96e8 100644
--- a/app/services/notes/copy_service.rb
+++ b/app/services/notes/copy_service.rb
@@ -37,16 +37,16 @@ module Notes
end
def params_from_note(note, new_note)
- new_discussion_ids[note.discussion_id] ||= Discussion.discussion_id(new_note)
+ new_discussion_ids[note.discussion_id] ||= ::Discussion.discussion_id(new_note)
new_params = sanitized_note_params(note)
-
new_params.merge!(
project: to_noteable.project,
noteable: to_noteable,
discussion_id: new_discussion_ids[note.discussion_id],
created_at: note.created_at,
- updated_at: note.updated_at
+ updated_at: note.updated_at,
+ imported_from: :none
)
if note.system_note_metadata
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 0ccb25c2335..cb8c7c447da 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -4,8 +4,8 @@ module Notes
class CreateService < ::Notes::BaseService
include IncidentManagement::UsageData
- def execute(skip_capture_diff_note_position: false, skip_merge_status_trigger: false, skip_set_reviewed: false)
- note = Notes::BuildService.new(project, current_user, params.except(:merge_request_diff_head_sha)).execute
+ def execute(skip_capture_diff_note_position: false, skip_merge_status_trigger: false, executing_user: nil)
+ note = build_note(executing_user)
# n+1: https://gitlab.com/gitlab-org/gitlab-foss/issues/37440
note_valid = Gitlab::GitalyClient.allow_n_plus_1_calls do
@@ -21,12 +21,9 @@ module Notes
# only, there is no need be create a note!
execute_quick_actions(note) do |only_commands|
- note.check_for_spam(action: :create, user: current_user) unless only_commands
+ note.check_for_spam(action: :create, user: current_user) if check_for_spam?(only_commands)
- note.run_after_commit do
- # Finish the harder work in the background
- NewNoteWorker.perform_async(note.id)
- end
+ after_commit(note)
note_saved = note.with_transaction_returning_status do
break false if only_commands
@@ -40,8 +37,7 @@ module Notes
when_saved(
note,
skip_capture_diff_note_position: skip_capture_diff_note_position,
- skip_merge_status_trigger: skip_merge_status_trigger,
- skip_set_reviewed: skip_set_reviewed
+ skip_merge_status_trigger: skip_merge_status_trigger
)
end
end
@@ -51,6 +47,24 @@ module Notes
private
+ def build_note(executing_user)
+ Notes::BuildService
+ .new(project, current_user, params.except(:merge_request_diff_head_sha))
+ .execute(executing_user: executing_user)
+ end
+
+ def check_for_spam?(only_commands)
+ !only_commands
+ end
+
+ def after_commit(note)
+ note.run_after_commit do
+ # Complete more expensive operations like sending
+ # notifications and post processing in a background worker.
+ NewNoteWorker.perform_async(note.id)
+ end
+ end
+
def execute_quick_actions(note)
return yield(false) unless quick_actions_supported?(note)
@@ -83,8 +97,7 @@ module Notes
end
def when_saved(
- note, skip_capture_diff_note_position: false, skip_merge_status_trigger: false,
- skip_set_reviewed: false)
+ note, skip_capture_diff_note_position: false, skip_merge_status_trigger: false)
todo_service.new_note(note, current_user)
clear_noteable_diffs_cache(note)
Suggestions::CreateService.new(note).execute
@@ -92,8 +105,6 @@ module Notes
track_event(note, current_user)
if note.for_merge_request? && note.start_of_discussion?
- set_reviewed(note) unless skip_set_reviewed
-
if !skip_capture_diff_note_position && note.diff_note?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
@@ -176,19 +187,6 @@ module Notes
track_note_creation_usage_for_merge_requests(note) if note.for_merge_request?
track_incident_action(user, note.noteable, 'incident_comment') if note.for_issue?
track_note_creation_in_ipynb(note)
- track_note_creation_visual_review(note)
-
- metric_key_path = 'counts.commit_comment'
-
- Gitlab::Tracking.event(
- 'Notes::CreateService',
- 'create_commit_comment',
- project: project,
- namespace: project&.namespace,
- user: user,
- label: metric_key_path,
- context: [Gitlab::Usage::MetricDefinition.context_for(metric_key_path).to_context]
- )
end
def tracking_data_for(note)
@@ -220,17 +218,6 @@ module Notes
Gitlab::UsageDataCounters::IpynbDiffActivityCounter.note_created(note)
end
-
- def track_note_creation_visual_review(note)
- Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
- end
-
- def set_reviewed(note)
- return if Feature.enabled?(:mr_request_changes, current_user)
-
- ::MergeRequests::UpdateReviewerStateService.new(project: project, current_user: current_user)
- .execute(note.noteable, "reviewed")
- end
end
end
diff --git a/app/services/notes/post_process_service.rb b/app/services/notes/post_process_service.rb
index 6e92a887cdd..b4e5816bf4d 100644
--- a/app/services/notes/post_process_service.rb
+++ b/app/services/notes/post_process_service.rb
@@ -29,7 +29,7 @@ module Notes
end
def hook_data
- Gitlab::DataBuilder::Note.build(note, note.author)
+ Gitlab::DataBuilder::Note.build(note, note.author, :create)
end
def execute_note_hooks
diff --git a/app/services/notes/quick_actions_service.rb b/app/services/notes/quick_actions_service.rb
index 1b852710677..d78e53909df 100644
--- a/app/services/notes/quick_actions_service.rb
+++ b/app/services/notes/quick_actions_service.rb
@@ -34,9 +34,16 @@ module Notes
def execute(note, options = {})
return [note.note, {}] unless supported?(note)
- @interpret_service = QuickActions::InterpretService.new(project, current_user, options)
+ @interpret_service = QuickActions::InterpretService.new(
+ container: note.resource_parent,
+ current_user: current_user,
+ params: options
+ )
- interpret_service.execute(note.note, note.noteable)
+ # NOTE: old_note would be nil if the note hasn't changed or it is a new record
+ old_note, _ = note.note_change
+
+ interpret_service.execute_with_original_text(note.note, note.noteable, original_text: old_note)
end
# Applies updates extracted to note#noteable
@@ -90,4 +97,4 @@ module Notes
end
end
-Notes::QuickActionsService.prepend_mod_with('Notes::QuickActionsService')
+Notes::QuickActionsService.prepend_mod
diff --git a/app/services/notes/update_service.rb b/app/services/notes/update_service.rb
index 52940281018..60e902051c9 100644
--- a/app/services/notes/update_service.rb
+++ b/app/services/notes/update_service.rb
@@ -34,6 +34,8 @@ module Notes
update_todos(note, old_mentioned_users)
update_suggestions(note)
+
+ execute_note_webhook(note)
end
if quick_actions_service.commands_executed_count.to_i > 0
@@ -97,6 +99,16 @@ module Notes
)
end
+ def execute_note_webhook(note)
+ return unless note.project && note.previous_changes.include?('note')
+
+ note_data = Gitlab::DataBuilder::Note.build(note, note.author, :update)
+ is_confidential = note.confidential?(include_noteable: true)
+ hooks_scope = is_confidential ? :confidential_note_hooks : :note_hooks
+
+ note.project.execute_hooks(note_data, hooks_scope)
+ end
+
def track_note_edit_usage_for_merge_requests(note)
Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_edit_comment_action(note: note)
end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 3c40707d0c6..4d3bf1efedc 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -75,15 +75,17 @@ class NotificationService
end
end
- def resource_access_tokens_about_to_expire(bot_user, token_names)
- recipients = bot_user.resource_bot_owners.select { |owner| owner.can?(:receive_notifications) }
+ def bot_resource_access_token_about_to_expire(bot_user, token_name)
+ recipients = bot_user.resource_bot_owners_and_maintainers.select { |user| user.can?(:receive_notifications) }
resource = bot_user.resource_bot_resource
recipients.each do |recipient|
- mailer.resource_access_tokens_about_to_expire_email(
+ log_info("Notifying resource access token owner about expiring tokens", recipient)
+
+ mailer.bot_resource_access_token_about_to_expire_email(
recipient,
resource,
- token_names
+ token_name
).deliver_later
end
end
@@ -100,6 +102,8 @@ class NotificationService
def access_token_about_to_expire(user, token_names)
return unless user.can?(:receive_notifications)
+ log_info("Notifying User about expiring tokens", user)
+
mailer.access_token_about_to_expire_email(user, token_names).deliver_later
end
@@ -443,7 +447,7 @@ class NotificationService
def send_service_desk_notification(note)
return unless note.noteable_type == 'Issue'
return if note.confidential
- return unless note.project.service_desk_enabled?
+ return unless note.project&.service_desk_enabled?
issue = note.noteable
recipients = issue.issue_email_participants
@@ -526,10 +530,6 @@ class NotificationService
).deliver_later
end
- def invite_member(member, token)
- mailer.member_invited_email(member.real_source_type, member.id, token).deliver_later
- end
-
def new_member(member)
notifiable_options = case member.source
when Group
@@ -568,10 +568,6 @@ class NotificationService
mailer.member_about_to_expire_email(member.real_source_type, member.id).deliver_later
end
- def invite_member_reminder(group_member, token, reminder_index)
- mailer.member_invited_reminder_email(group_member.real_source_type, group_member.id, token, reminder_index).deliver_later
- end
-
def project_was_moved(project, old_path_with_namespace)
recipients = project_moved_recipients(project)
recipients = notifiable_users(recipients, :custom, custom_action: :moved_project, project: project)
@@ -894,6 +890,14 @@ class NotificationService
private
+ def log_info(message_text, user)
+ Gitlab::AppLogger.info(
+ message: message_text,
+ class: self.class,
+ user_id: user.id
+ )
+ end
+
def approve_mr_email(merge_request, project, current_user)
recipients = ::NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: 'approve')
diff --git a/app/services/organizations/create_service.rb b/app/services/organizations/create_service.rb
index f29065b8ffd..59f88196559 100644
--- a/app/services/organizations/create_service.rb
+++ b/app/services/organizations/create_service.rb
@@ -3,13 +3,18 @@
module Organizations
class CreateService < ::Organizations::BaseService
def execute
- return error_no_permissions unless current_user&.can?(:create_organization)
-
- organization = Organization.create(params)
+ return error_no_permissions unless can?(current_user, :create_organization)
+ return error_feature_flag unless Feature.enabled?(:allow_organization_creation, current_user)
+
+ add_organization_owner_attributes
+ organization = Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification
+ .allow_cross_database_modification_within_transaction(
+ url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/438757'
+ ) do
+ Organization.create(params)
+ end
if organization.persisted?
- add_organization_owner(organization)
-
ServiceResponse.success(payload: { organization: organization })
else
error_creating(organization)
@@ -18,8 +23,8 @@ module Organizations
private
- def add_organization_owner(organization)
- organization.organization_users.create(user: current_user, access_level: :owner)
+ def add_organization_owner_attributes
+ @params[:organization_users_attributes] = [{ user: current_user, access_level: :owner }]
end
def error_no_permissions
@@ -31,5 +36,10 @@ module Organizations
ServiceResponse.error(message: Array(message))
end
+
+ def error_feature_flag
+ # Don't translate feature flag error because it's temporary.
+ ServiceResponse.error(message: ['Feature flag `allow_organization_creation` is not enabled for this user.'])
+ end
end
end
diff --git a/app/services/packages/cleanup/execute_policy_service.rb b/app/services/packages/cleanup/execute_policy_service.rb
index 891866bce5f..f10da978083 100644
--- a/app/services/packages/cleanup/execute_policy_service.rb
+++ b/app/services/packages/cleanup/execute_policy_service.rb
@@ -10,6 +10,8 @@ module Packages
DUPLICATED_FILES_BATCH_SIZE = 10_000
MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE = 200
+ delegate :project, to: :@policy, private: true
+
def initialize(policy)
@policy = policy
@counts = {
@@ -34,6 +36,12 @@ module Packages
response_success(timeout: result == :timeout)
end
+ def installable_package_files
+ ::Packages::PackageFile
+ .installable
+ .for_package_ids(project.packages.installable)
+ end
+
def cleanup_duplicated_files_on(package_files)
unique_package_id_and_file_name_from(package_files).each do |package_id, file_name|
result = remove_duplicated_files_for(package_id: package_id, file_name: file_name)
@@ -45,11 +53,10 @@ module Packages
end
def unique_package_id_and_file_name_from(package_files)
- # This is a highly custom query for this service, that's why it's not in the model.
- # rubocop: disable CodeReuse/ActiveRecord
+ # rubocop: disable CodeReuse/ActiveRecord -- This is a highly custom query for this service, that's why it's not in the model.
package_files.group(:package_id, :file_name)
.having("COUNT(*) > #{@policy.keep_n_duplicated_package_files}")
- .pluck(:package_id, :file_name)
+ .pluck(:package_id, :file_name) # rubocop:disable Database/AvoidUsingPluckWithoutLimit -- package_files is already in batches
# rubocop: enable CodeReuse/ActiveRecord
end
@@ -61,21 +68,16 @@ module Packages
.limit(@policy.keep_n_duplicated_package_files)
.pluck_primary_key
+ keep_conan_manifest_file(base, ids_to_keep) if file_name == ::Packages::Conan::FileMetadatum::CONAN_MANIFEST
+
duplicated_package_files = base.id_not_in(ids_to_keep)
::Packages::MarkPackageFilesForDestructionService.new(duplicated_package_files)
.execute(batch_deadline: batch_deadline, batch_size: MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE)
end
- def project
- @policy.project
- end
-
- def installable_package_files
- ::Packages::PackageFile.installable
- .for_package_ids(
- ::Packages::Package.installable
- .for_projects(project.id)
- )
+ def keep_conan_manifest_file(base, ids)
+ recipe_manifest_id = base.with_conan_file_type(:recipe_file).recent.limit(1).pluck_primary_key
+ ids.concat(recipe_manifest_id)
end
def batch_deadline
diff --git a/app/services/packages/conan/search_service.rb b/app/services/packages/conan/search_service.rb
index c65c9a85da8..28d9878325d 100644
--- a/app/services/packages/conan/search_service.rb
+++ b/app/services/packages/conan/search_service.rb
@@ -6,7 +6,6 @@ module Packages
include ActiveRecord::Sanitization::ClassMethods
WILDCARD = '*'
- RECIPE_SEPARATOR = '@'
def execute
ServiceResponse.success(payload: { results: search_results })
@@ -17,8 +16,6 @@ module Packages
def search_results
return [] if wildcard_query?
- return search_for_single_package(sanitized_query) if params[:query].include?(RECIPE_SEPARATOR)
-
search_packages
end
@@ -30,12 +27,6 @@ module Packages
@sanitized_query ||= sanitize_sql_like(params[:query].delete(WILDCARD))
end
- def search_for_single_package(query)
- ::Packages::Conan::SinglePackageSearchService
- .new(query, current_user)
- .execute[:results]
- end
-
def search_packages
::Packages::Conan::PackageFinder
.new(current_user, { query: build_query }, project: project)
diff --git a/app/services/packages/conan/single_package_search_service.rb b/app/services/packages/conan/single_package_search_service.rb
deleted file mode 100644
index e133b35c2cf..00000000000
--- a/app/services/packages/conan/single_package_search_service.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-module Packages
- module Conan
- class SinglePackageSearchService # rubocop:disable Search/NamespacedClass
- include Gitlab::Utils::StrongMemoize
-
- def initialize(query, current_user)
- @name, @version, @username, _ = query.split(%r{[@/]})
- @current_user = current_user
- end
-
- def execute
- ServiceResponse.success(payload: { results: search_results })
- end
-
- private
-
- attr_reader :name, :version, :username, :current_user
-
- def search_results
- return [] unless can_access_project_package?
-
- [package&.conan_recipe].compact
- end
-
- def package
- project
- .packages
- .with_name(name)
- .with_version(version)
- .order_created
- .last
- end
-
- def project
- Project.find_by_full_path(full_path)
- end
- strong_memoize_attr :project
-
- def full_path
- ::Packages::Conan::Metadatum.full_path_from(package_username: username)
- end
-
- def can_access_project_package?
- Ability.allowed?(current_user, :read_package, project.try(:packages_policy_subject))
- end
- end
- end
-end
diff --git a/app/services/packages/create_dependency_service.rb b/app/services/packages/create_dependency_service.rb
index 51f8a514c55..72084292af3 100644
--- a/app/services/packages/create_dependency_service.rb
+++ b/app/services/packages/create_dependency_service.rb
@@ -1,9 +1,12 @@
# frozen_string_literal: true
+
module Packages
# rubocop: disable Gitlab/BulkInsert
class CreateDependencyService < BaseService
attr_reader :package, :dependencies
+ delegate :project_id, to: :package, private: true
+
def initialize(package, dependencies)
@package = package
@dependencies = dependencies
@@ -35,7 +38,7 @@ module Packages
end
def find_existing_ids_and_names(names_and_version_patterns)
- ids_and_names = Packages::Dependency.for_package_names_and_version_patterns(names_and_version_patterns)
+ ids_and_names = Packages::Dependency.for_package_project_id_names_and_version_patterns(project_id, names_and_version_patterns)
.pluck_ids_and_names
ids = ids_and_names.map(&:first) || []
names = ids_and_names.map(&:second) || []
@@ -48,7 +51,8 @@ module Packages
rows = names_and_version_patterns.map do |name, version_pattern|
{
name: name,
- version_pattern: version_pattern
+ version_pattern: version_pattern,
+ project_id: project_id
}
end
@@ -60,7 +64,7 @@ module Packages
# sure that the results are fresh from the database and not from a stalled
# and potentially wrong cache, this query has to be done with the query
# cache disabled.
- Packages::Dependency.ids_for_package_names_and_version_patterns(names_and_version_patterns)
+ Packages::Dependency.ids_for_package_project_id_names_and_version_patterns(project_id, names_and_version_patterns)
end
end
diff --git a/app/services/packages/create_event_service.rb b/app/services/packages/create_event_service.rb
index 8eac30f0022..a47a7359e91 100644
--- a/app/services/packages/create_event_service.rb
+++ b/app/services/packages/create_event_service.rb
@@ -2,14 +2,33 @@
module Packages
class CreateEventService < BaseService
+ INTERNAL_EVENTS_NAMES = {
+ 'delete_package' => 'delete_package_from_registry',
+ 'pull_package' => 'pull_package_from_registry',
+ 'push_package' => 'push_package_to_registry',
+ 'push_symbol_package' => 'push_symbol_package_to_registry',
+ 'pull_symbol_package' => 'pull_symbol_package_from_registry'
+ }.freeze
+
def execute
::Packages::Event.unique_counters_for(event_scope, event_name, originator_type).each do |event_name|
::Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name, values: current_user.id)
end
- ::Packages::Event.counters_for(event_scope, event_name, originator_type).each do |event_name|
- ::Gitlab::UsageDataCounters::PackageEventCounter.count(event_name)
- end
+ return unless INTERNAL_EVENTS_NAMES.key?(event_name)
+
+ user = current_user if current_user.is_a?(User)
+
+ Gitlab::InternalEvents.track_event(
+ INTERNAL_EVENTS_NAMES[event_name],
+ user: user,
+ project: project,
+ namespace: params[:namespace],
+ additional_properties: {
+ label: event_scope.to_s,
+ property: originator_type.to_s
+ }
+ )
end
def originator_type
diff --git a/app/services/packages/debian/extract_changes_metadata_service.rb b/app/services/packages/debian/extract_changes_metadata_service.rb
index 5f06f46de58..a8e4784a02e 100644
--- a/app/services/packages/debian/extract_changes_metadata_service.rb
+++ b/app/services/packages/debian/extract_changes_metadata_service.rb
@@ -107,7 +107,7 @@ module Packages
end
def incoming
- @package_file.package.project.packages.debian_incoming_package!
+ ::Packages::Debian::Package.for_projects(@package_file.package.project).incoming_package!
end
strong_memoize_attr(:incoming)
end
diff --git a/app/services/packages/debian/process_package_file_service.rb b/app/services/packages/debian/process_package_file_service.rb
index 684192f6006..a1bcc0a6f73 100644
--- a/app/services/packages/debian/process_package_file_service.rb
+++ b/app/services/packages/debian/process_package_file_service.rb
@@ -32,7 +32,7 @@ module Packages
cleanup_temp_package
end
- ::Packages::Debian::GenerateDistributionWorker.perform_async(:project, package.debian_distribution.id)
+ ::Packages::Debian::GenerateDistributionWorker.perform_async(:project, package.distribution.id)
end
end
@@ -90,10 +90,9 @@ module Packages
end
def package
- packages = temp_package.project
- .packages
- .existing_debian_packages_with(name: package_name, version: package_version)
- package = packages.with_debian_codename_or_suite(package_distribution)
+ packages = ::Packages::Debian::Package.for_projects(temp_package.project)
+ .existing_packages_with(name: package_name, version: package_version)
+ package = packages.with_codename_or_suite(package_distribution)
.first
unless package
@@ -101,7 +100,7 @@ module Packages
if package_in_other_distribution
raise ArgumentError, "Debian package #{package_name} #{package_version} exists " \
- "in distribution #{package_in_other_distribution.debian_distribution.codename}"
+ "in distribution #{package_in_other_distribution.distribution.codename}"
end
end
@@ -153,7 +152,7 @@ module Packages
return unless using_temporary_package?
package.update!(
- debian_publication_attributes: { distribution_id: distribution.id }
+ publication_attributes: { distribution_id: distribution.id }
)
end
diff --git a/app/services/packages/mark_packages_for_destruction_service.rb b/app/services/packages/mark_packages_for_destruction_service.rb
index 2c81a52ea24..11722f696f6 100644
--- a/app/services/packages/mark_packages_for_destruction_service.rb
+++ b/app/services/packages/mark_packages_for_destruction_service.rb
@@ -60,8 +60,8 @@ module Packages
def mark_package_files_for_destruction(packages)
::Packages::MarkPackageFilesForDestructionWorker.bulk_perform_async_with_contexts(
packages,
- arguments_proc: -> (package) { package.id },
- context_proc: -> (package) { { project: package.project, user: @current_user } }
+ arguments_proc: ->(package) { package.id },
+ context_proc: ->(package) { { project: package.project, user: @current_user } }
)
end
@@ -69,8 +69,8 @@ module Packages
maven_packages_with_version = packages.select { |pkg| pkg.maven? && pkg.version? }
::Packages::Maven::Metadata::SyncWorker.bulk_perform_async_with_contexts(
maven_packages_with_version,
- arguments_proc: -> (package) { [@current_user.id, package.project_id, package.name] },
- context_proc: -> (package) { { project: package.project, user: @current_user } }
+ arguments_proc: ->(package) { [@current_user.id, package.project_id, package.name] },
+ context_proc: ->(package) { { project: package.project, user: @current_user } }
)
end
@@ -78,8 +78,8 @@ module Packages
npm_packages = packages.select(&:npm?)
::Packages::Npm::CreateMetadataCacheWorker.bulk_perform_async_with_contexts(
npm_packages,
- arguments_proc: -> (package) { [package.project_id, package.name] },
- context_proc: -> (package) { { project: package.project, user: @current_user } }
+ arguments_proc: ->(package) { [package.project_id, package.name] },
+ context_proc: ->(package) { { project: package.project, user: @current_user } }
)
end
diff --git a/app/services/packages/maven/create_package_service.rb b/app/services/packages/maven/create_package_service.rb
index 540c7b1d4da..14a915e165b 100644
--- a/app/services/packages/maven/create_package_service.rb
+++ b/app/services/packages/maven/create_package_service.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
module Packages
module Maven
class CreatePackageService < ::Packages::CreatePackageService
@@ -6,7 +7,7 @@ module Packages
app_group, _, app_name = params[:name].rpartition('/')
app_group.tr!('/', '.')
- create_package!(:maven,
+ package = create_package!(:maven,
maven_metadatum_attributes: {
path: params[:path],
app_group: app_group,
@@ -14,6 +15,10 @@ module Packages
app_version: params[:version]
}
)
+
+ ServiceResponse.success(payload: { package: package })
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(message: e.message, reason: :invalid_parameter)
end
end
end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index 2ff3ebc3bb2..abaa5d93407 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
module Packages
module Maven
class FindOrCreatePackageService < BaseService
@@ -12,8 +13,8 @@ module Packages
::Packages::Maven::PackageFinder.new(current_user, project, path: path)
.execute&.last
- unless Namespace::PackageSetting.duplicates_allowed?(package)
- return ServiceResponse.error(message: 'Duplicate package is not allowed') if target_package_is_duplicate?(package)
+ if !Namespace::PackageSetting.duplicates_allowed?(package) && target_package_is_duplicate?(package)
+ return ServiceResponse.error(message: 'Duplicate package is not allowed')
end
unless package
@@ -49,9 +50,13 @@ module Packages
version: version
}
- package =
+ service_response =
::Packages::Maven::CreatePackageService.new(project, current_user, package_params)
.execute
+
+ return service_response if service_response.error?
+
+ package = service_response[:package]
end
package.create_build_infos!(params[:build])
@@ -94,7 +99,7 @@ module Packages
match_data = file_name.match(Gitlab::Regex::Packages::MAVEN_SNAPSHOT_DYNAMIC_PARTS)
if match_data
- file_name.gsub(match_data.captures.last, "")
+ file_name.gsub(match_data.captures.last, '')
else
file_name
end
diff --git a/app/services/packages/ml_model/create_package_file_service.rb b/app/services/packages/ml_model/create_package_file_service.rb
index ee2f3077e4c..2cecf87d5b1 100644
--- a/app/services/packages/ml_model/create_package_file_service.rb
+++ b/app/services/packages/ml_model/create_package_file_service.rb
@@ -4,7 +4,7 @@ module Packages
module MlModel
class CreatePackageFileService < BaseService
def execute
- @package = params[:model_version]&.package
+ @package = params[:package]
return unless @package
@@ -29,7 +29,7 @@ module Packages
file: params[:file],
size: params[:file].size,
file_sha256: params[:file].sha256,
- file_name: params[:file_name],
+ file_name: URI.encode_uri_component(params[:file_name]),
build: params[:build]
}
diff --git a/app/services/packages/ml_model/package_for_candidate_service.rb b/app/services/packages/ml_model/package_for_candidate_service.rb
new file mode 100644
index 00000000000..2bb3d9e66a4
--- /dev/null
+++ b/app/services/packages/ml_model/package_for_candidate_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Packages
+ module MlModel
+ class PackageForCandidateService < ::Packages::CreatePackageService
+ def execute
+ candidate = params[:candidate]
+
+ return unless candidate&.for_model?
+
+ package = find_or_create_package!(
+ ::Packages::Package.package_types['ml_model'],
+ name: candidate.package_name,
+ version: candidate.package_version
+ )
+
+ candidate.update!(package: package) if candidate.package_id != package.id
+
+ package
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/check_manifest_coherence_service.rb b/app/services/packages/npm/check_manifest_coherence_service.rb
new file mode 100644
index 00000000000..d8cf0d2440e
--- /dev/null
+++ b/app/services/packages/npm/check_manifest_coherence_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Packages
+ module Npm
+ class CheckManifestCoherenceService
+ MismatchError = Class.new(StandardError)
+
+ PKG_TYPE = 'npm'
+ MANIFEST_NOT_COHERENT_ERROR = 'Package manifest is not coherent'
+ VERSION_NOT_COMPLIANT_ERROR = 'Version in package.json is not SemVer compliant'
+
+ def initialize(package, package_json_entry)
+ @package = package
+ @package_json_entry = package_json_entry
+ end
+
+ def execute
+ parsed_package_json = Gitlab::Json.parse(package_json_entry.read)
+
+ raise MismatchError, MANIFEST_NOT_COHERENT_ERROR unless coherent?(parsed_package_json)
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :package, :package_json_entry
+
+ def coherent?(package_json)
+ package_json['name'] == package.name &&
+ same_version?(package_json['version'], package.version)
+ end
+
+ def same_version?(version1, version2)
+ v1 = SemverDialects.parse_version(PKG_TYPE, version1)
+ v2 = SemverDialects.parse_version(PKG_TYPE, version2)
+
+ v1 == v2
+ rescue SemverDialects::InvalidVersionError
+ raise MismatchError, VERSION_NOT_COMPLIANT_ERROR
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index b1970053745..bb9432f033c 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -1,17 +1,19 @@
# frozen_string_literal: true
+
module Packages
module Npm
class CreatePackageService < ::Packages::CreatePackageService
include Gitlab::Utils::StrongMemoize
include ExclusiveLeaseGuard
+ INSTALL_SCRIPT_KEYS = %w[preinstall install postinstall].freeze
PACKAGE_JSON_NOT_ALLOWED_FIELDS = %w[readme readmeFilename licenseText contributors exports].freeze
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i
ERROR_REASON_INVALID_PARAMETER = :invalid_parameter
ERROR_REASON_PACKAGE_EXISTS = :package_already_exists
ERROR_REASON_PACKAGE_LEASE_TAKEN = :package_lease_taken
- ERROR_REASON_PACKAGE_PROTECTED = :package_attachment_data_empty
+ ERROR_REASON_PACKAGE_PROTECTED = :package_protected
def execute
return error('Version is empty.', ERROR_REASON_INVALID_PARAMETER) if version.blank?
@@ -20,7 +22,7 @@ module Packages
return error('Package protected.', ERROR_REASON_PACKAGE_PROTECTED) if current_package_protected?
return error('File is too large.', ERROR_REASON_INVALID_PARAMETER) if file_size_exceeded?
- package = try_obtain_lease do
+ package, package_file = try_obtain_lease do
ApplicationRecord.transaction { create_npm_package! }
end
@@ -28,6 +30,8 @@ module Packages
return error('Could not obtain package lease. Please try again.', ERROR_REASON_PACKAGE_LEASE_TAKEN)
end
+ ::Packages::Npm::ProcessPackageFileWorker.perform_async(package_file.id) if package.processing?
+
ServiceResponse.success(payload: { package: package })
end
@@ -38,21 +42,22 @@ module Packages
end
def create_npm_package!
- package = create_package!(:npm, name: name, version: version)
+ package = create_package!(:npm, name: name, version: version, status: :processing)
- ::Packages::CreatePackageFileService.new(package, file_params).execute
+ package_file = ::Packages::CreatePackageFileService.new(package, file_params).execute
::Packages::CreateDependencyService.new(package, package_dependencies).execute
::Packages::Npm::CreateTagService.new(package, dist_tag).execute
create_npm_metadatum!(package)
- package
+ [package, package_file]
end
def create_npm_metadatum!(package)
package.create_npm_metadatum!(package_json: package_json)
rescue ActiveRecord::RecordInvalid => e
- if package.npm_metadatum && package.npm_metadatum.errors.where(:package_json, :too_large).any? # rubocop: disable CodeReuse/ActiveRecord
+
+ if package.npm_metadatum && package.npm_metadatum.errors.added?(:package_json, :too_large)
Gitlab::ErrorTracking.track_exception(e, field_sizes: field_sizes_for_error_tracking)
end
@@ -71,8 +76,18 @@ module Packages
def current_package_protected?
return false if Feature.disabled?(:packages_protected_packages, project)
+ unless current_user.is_a?(User)
+ return project.package_protection_rules.for_package_type(:npm).for_package_name(name).exists?
+ end
+
+ return false if current_user.can_admin_all_resources?
+
user_project_authorization_access_level = current_user.max_member_access_for_project(project.id)
- project.package_protection_rules.for_push_exists?(access_level: user_project_authorization_access_level, package_name: name, package_type: :npm)
+
+ project.package_protection_rules.for_push_exists?(
+ access_level: user_project_authorization_access_level,
+ package_name: name, package_type: :npm
+ )
end
def name
@@ -89,6 +104,10 @@ module Packages
end
def package_json
+ if version_data['scripts'] && (version_data['scripts'].keys & INSTALL_SCRIPT_KEYS).any?
+ version_data['hasInstallScript'] = true
+ end
+
version_data.except(*PACKAGE_JSON_NOT_ALLOWED_FIELDS)
end
@@ -110,7 +129,8 @@ module Packages
def calculated_package_file_size
# This calculation is based on:
# 1. 4 chars in a Base64 encoded string are 3 bytes in the original string. Meaning 1 char is 0.75 bytes.
- # 2. The encoded string may have 1 or 2 extra '=' chars used for padding. Each padding char means 1 byte less in the original string.
+ # 2. The encoded string may have 1 or 2 extra '=' chars used for padding. Each padding char means 1 byte less in
+ # the original string.
# Reference:
# - https://blog.aaronlenoir.com/2017/11/10/get-original-length-from-base-64-string/
# - https://en.wikipedia.org/wiki/Base64#Decoding_Base64_with_padding
diff --git a/app/services/packages/npm/generate_metadata_service.rb b/app/services/packages/npm/generate_metadata_service.rb
index 240c657039f..7c0a81b0941 100644
--- a/app/services/packages/npm/generate_metadata_service.rb
+++ b/app/services/packages/npm/generate_metadata_service.rb
@@ -9,13 +9,17 @@ module Packages
# Allowed fields are those defined in the abbreviated form
# defined here: https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
# except: name, version, dist, dependencies and xDependencies. Those are generated by this service.
- PACKAGE_JSON_ALLOWED_FIELDS = %w[deprecated bin directories dist engines _hasShrinkwrap].freeze
+ PACKAGE_JSON_ALLOWED_FIELDS = %w[deprecated bin directories dist engines _hasShrinkwrap hasInstallScript].freeze
def initialize(name, packages)
@name = name
@packages = packages
@dependencies = {}
@dependency_ids = Hash.new { |h, key| h[key] = {} }
+ @tags = {}
+ @tags_updated_at = {}
+ @versions = {}
+ @latest_version = nil
end
def execute(only_dist_tags: false)
@@ -24,47 +28,83 @@ module Packages
private
- attr_reader :name, :packages, :dependencies, :dependency_ids
+ attr_reader :name, :packages, :dependencies, :dependency_ids, :tags, :tags_updated_at, :versions
+ attr_accessor :latest_version
def metadata(only_dist_tags)
- result = { dist_tags: dist_tags }
+ packages.each_batch do |batch|
+ relation = preload_needed_relations(batch, only_dist_tags)
- unless only_dist_tags
- result[:name] = name
- result[:versions] = versions
+ relation.each do |package|
+ build_tags(package)
+ store_latest_version(package.version)
+ next if only_dist_tags
+
+ build_versions(package)
+ end
end
- result
+ {
+ name: only_dist_tags ? nil : name,
+ versions: versions,
+ dist_tags: tags.tap { |t| t['latest'] ||= latest_version }
+ }.compact_blank
end
- def versions
- package_versions = {}
-
- packages.each_batch do |relation|
- load_dependencies(relation)
- load_dependency_ids(relation)
+ def preload_needed_relations(batch, only_dist_tags)
+ relation = batch.preload_tags
+ return relation if only_dist_tags
- batched_packages = relation.preload_files
- .preload_npm_metadatum
+ load_dependencies(relation)
+ load_dependency_ids(relation)
- batched_packages.each do |package|
- package_file = package.installable_package_files.last
+ relation.preload_files.preload_npm_metadatum
+ end
- next unless package_file
+ def load_dependencies(packages)
+ Packages::Dependency
+ .id_in(
+ Packages::DependencyLink
+ .for_packages(packages)
+ .select_dependency_id
+ )
+ .id_not_in(dependencies.keys)
+ .each_batch do |relation|
+ relation.each do |dependency|
+ dependencies[dependency.id] = { dependency.name => dependency.version_pattern }
+ end
+ end
+ end
- package_versions[package.version] = build_package_version(package, package_file)
+ def load_dependency_ids(packages)
+ Packages::DependencyLink
+ .dependency_ids_grouped_by_type(packages)
+ .each_batch(column: :package_id) do |relation|
+ relation.each do |dependency_link|
+ dependency_ids[dependency_link.package_id] = dependency_link.dependency_ids_by_type
+ end
end
- end
+ end
- package_versions
+ def build_tags(package)
+ package.tags.each do |tag|
+ next if tags.key?(tag.name) && tags_updated_at[tag.name] > tag.updated_at
+
+ tags[tag.name] = package.version
+ tags_updated_at[tag.name] = tag.updated_at
+ end
end
- def dist_tags
- build_package_tags.tap { |t| t['latest'] ||= sorted_versions.last }
+ def store_latest_version(version)
+ self.latest_version = version if latest_version.blank? || VersionSorter.compare(version, latest_version) == 1
end
- def build_package_tags
- package_tags.to_h { |tag| [tag.name, tag.package.version] }
+ def build_versions(package)
+ package_file = package.installable_package_files.last
+
+ return unless package_file
+
+ versions[package.version] = build_package_version(package, package_file)
end
def build_package_version(package, package_file)
@@ -80,6 +120,11 @@ module Packages
end
end
+ def abbreviated_package_json(package)
+ json = package.npm_metadatum&.package_json || {}
+ json.slice(*PACKAGE_JSON_ALLOWED_FIELDS)
+ end
+
def tarball_url(package, package_file)
expose_url api_v4_projects_packages_npm_package_name___file_name_path(
{ id: package.project_id, package_name: package.name, file_name: package_file.file_name }, true
@@ -98,46 +143,6 @@ module Packages
Packages::DependencyLink.dependency_types.invert.stringify_keys
end
strong_memoize_attr :inverted_dependency_types
-
- def sorted_versions
- versions = packages.pluck_versions.compact
- VersionSorter.sort(versions)
- end
-
- def package_tags
- Packages::Tag.for_package_ids_with_distinct_names(packages)
- .preload_package
- end
-
- def abbreviated_package_json(package)
- json = package.npm_metadatum&.package_json || {}
- json.slice(*PACKAGE_JSON_ALLOWED_FIELDS)
- end
-
- def load_dependencies(packages)
- Packages::Dependency
- .id_in(
- Packages::DependencyLink
- .for_packages(packages)
- .select_dependency_id
- )
- .id_not_in(dependencies.keys)
- .each_batch do |relation|
- relation.each do |dependency|
- dependencies[dependency.id] = { dependency.name => dependency.version_pattern }
- end
- end
- end
-
- def load_dependency_ids(packages)
- Packages::DependencyLink
- .dependency_ids_grouped_by_type(packages)
- .each_batch(column: :package_id) do |relation|
- relation.each do |dependency_link|
- dependency_ids[dependency_link.package_id] = dependency_link.dependency_ids_by_type
- end
- end
- end
end
end
end
diff --git a/app/services/packages/npm/process_package_file_service.rb b/app/services/packages/npm/process_package_file_service.rb
new file mode 100644
index 00000000000..7b6a2ff0d2d
--- /dev/null
+++ b/app/services/packages/npm/process_package_file_service.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+module Packages
+ module Npm
+ class ProcessPackageFileService
+ ExtractionError = Class.new(StandardError)
+ PACKAGE_JSON_ENTRY_REGEX = %r{^[^/]+/package.json$}
+ MAX_FILE_SIZE = 4.megabytes
+
+ delegate :package, to: :package_file
+
+ def initialize(package_file)
+ @package_file = package_file
+ end
+
+ def execute
+ raise ExtractionError, 'invalid package file' unless valid_package_file?
+
+ with_package_json_entry do |entry|
+ raise ExtractionError, 'package.json not found' unless entry
+ raise ExtractionError, 'package.json file too large' if entry.size > MAX_FILE_SIZE
+
+ ::Packages::Npm::CheckManifestCoherenceService.new(package, entry).execute
+ end
+
+ package.default!
+
+ ::Packages::Npm::CreateMetadataCacheWorker.perform_async(package.project_id, package.name)
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :package_file
+
+ def valid_package_file?
+ package_file && !package_file.file.empty_size? && package&.npm? && package&.processing?
+ end
+
+ def with_package_json_entry
+ package_file.file.use_open_file(unlink_early: false) do |open_file|
+ Zlib::GzipReader.open(open_file.file_path) do |gz|
+ tar_reader = Gem::Package::TarReader.new(gz)
+
+ entry_path = entry_full_name(tar_reader)
+ yield unless entry_path.is_a?(String)
+
+ tar_reader.rewind
+ entry = tar_reader.find { |e| path_for(e) == entry_path }
+
+ yield entry
+ end
+ end
+ end
+
+ def entry_full_name(tar_reader)
+ # We need to reverse the entries to find the last package.json file in the tarball,
+ # as the last one is the one that's used by npm.
+ # We cannot get the entry directly when using #reverse_each because
+ # TarReader closes the stream after iterating over all entries
+ tar_reader.reverse_each do |entry|
+ entry_path = path_for(entry)
+ break entry_path if entry_path.match?(PACKAGE_JSON_ENTRY_REGEX)
+ end
+ end
+
+ def path_for(entry)
+ entry.full_name
+ rescue ::Gem::Package::TarInvalidError
+ entry.header.name
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/search_service.rb b/app/services/packages/nuget/search_service.rb
index 7d1585f8903..1b1568feeef 100644
--- a/app/services/packages/nuget/search_service.rb
+++ b/app/services/packages/nuget/search_service.rb
@@ -102,15 +102,19 @@ module Packages
def nuget_packages
Packages::Package.nuget
- .displayable
+ .installable
.has_version
- .without_nuget_temporary_name
end
def project_ids_cte
return unless use_project_ids_cte?
- query = projects_visible_to_user(@current_user, within_group: @project_or_group)
+ query = if Feature.enabled?(:allow_anyone_to_pull_public_nuget_packages_on_group_level, @project_or_group)
+ projects_visible_to_user_including_public_registries(@current_user, within_group: @project_or_group)
+ else
+ projects_visible_to_user(@current_user, within_group: @project_or_group)
+ end
+
Gitlab::SQL::CTE.new(:project_ids, query.select(:id))
end
strong_memoize_attr :project_ids_cte
diff --git a/app/services/packages/protection/create_rule_service.rb b/app/services/packages/protection/create_rule_service.rb
index e69eb8faf60..22149b2be1c 100644
--- a/app/services/packages/protection/create_rule_service.rb
+++ b/app/services/packages/protection/create_rule_service.rb
@@ -6,7 +6,7 @@ module Packages
ALLOWED_ATTRIBUTES = %i[
package_name_pattern
package_type
- push_protected_up_to_access_level
+ minimum_access_level_for_push
].freeze
def execute
diff --git a/app/services/packages/protection/update_rule_service.rb b/app/services/packages/protection/update_rule_service.rb
index 0dc7eb6a7b9..9bfa91dc902 100644
--- a/app/services/packages/protection/update_rule_service.rb
+++ b/app/services/packages/protection/update_rule_service.rb
@@ -8,7 +8,7 @@ module Packages
ALLOWED_ATTRIBUTES = %i[
package_name_pattern
package_type
- push_protected_up_to_access_level
+ minimum_access_level_for_push
].freeze
def initialize(package_protection_rule, current_user:, params:)
diff --git a/app/services/packages/pypi/create_package_service.rb b/app/services/packages/pypi/create_package_service.rb
index 3c582b40203..2c943fa7d4e 100644
--- a/app/services/packages/pypi/create_package_service.rb
+++ b/app/services/packages/pypi/create_package_service.rb
@@ -12,15 +12,15 @@ module Packages
required_python: params[:requires_python] || '',
metadata_version: params[:metadata_version],
author_email: params[:author_email],
- description: params[:description]&.truncate(::Packages::Pypi::Metadatum::MAX_DESCRIPTION_LENGTH),
+ description: params[:description],
description_content_type: params[:description_content_type],
summary: params[:summary],
keywords: params[:keywords]
)
- unless meta.valid?
- raise ActiveRecord::RecordInvalid, meta
- end
+ truncate_fields(meta)
+
+ raise ActiveRecord::RecordInvalid, meta unless meta.valid?
params.delete(:md5_digest) if Gitlab::FIPS.enabled?
@@ -28,8 +28,10 @@ module Packages
::Packages::CreatePackageFileService.new(created_package, file_params).execute
- created_package
+ ServiceResponse.success(payload: { package: created_package })
end
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(message: e.message, reason: :invalid_parameter)
end
private
@@ -48,6 +50,16 @@ module Packages
file_sha256: params[:sha256_digest]
}
end
+
+ def truncate_fields(meta)
+ return if meta.valid?
+
+ meta.errors.select { |error| error.type == :too_long }.each do |error|
+ field = error.attribute
+
+ meta[field] = meta[field].truncate(error.options[:count])
+ end
+ end
end
end
end
diff --git a/app/services/packages/rpm/parse_package_service.rb b/app/services/packages/rpm/parse_package_service.rb
index 3995eedef53..dd5eb0e4e4d 100644
--- a/app/services/packages/rpm/parse_package_service.rb
+++ b/app/services/packages/rpm/parse_package_service.rb
@@ -7,7 +7,7 @@ module Packages
BUILD_ATTRIBUTES_METHOD_NAMES = %i[changelogs requirements provides].freeze
STATIC_ATTRIBUTES = %i[name version release summary description arch
- license sourcerpm group buildhost packager vendor].freeze
+ license sourcerpm group buildhost packager vendor].freeze
CHANGELOGS_RPM_KEYS = %i[changelogtext changelogtime].freeze
REQUIREMENTS_RPM_KEYS = %i[requirename requireversion requireflags].freeze
diff --git a/app/services/packages/rubygems/create_dependencies_service.rb b/app/services/packages/rubygems/create_dependencies_service.rb
index 0b2ae56bf45..0b1f2672bbf 100644
--- a/app/services/packages/rubygems/create_dependencies_service.rb
+++ b/app/services/packages/rubygems/create_dependencies_service.rb
@@ -9,33 +9,19 @@ module Packages
end
def execute
- set_dependencies
+ ::Packages::CreateDependencyService.new(package, dependencies).execute
end
private
attr_reader :package, :gemspec
- def set_dependencies
- Packages::Dependency.transaction do
- dependency_type_rows = gemspec.dependencies.map do |dependency|
- dependency = Packages::Dependency.safe_find_or_create_by!(
- name: dependency.name,
- version_pattern: dependency.requirement.to_s
- )
-
- {
- dependency_id: dependency.id,
- package_id: package.id,
- dependency_type: :dependencies
- }
- end
-
- package.dependency_links.upsert_all(
- dependency_type_rows,
- unique_by: %i[package_id dependency_id dependency_type]
- )
+ def dependencies
+ names_and_versions = gemspec.dependencies.to_h do |dependency|
+ [dependency.name, dependency.requirement.to_s]
end
+
+ { 'dependencies' => names_and_versions }
end
end
end
diff --git a/app/services/packages/terraform_module/create_package_service.rb b/app/services/packages/terraform_module/create_package_service.rb
index eb48b481dd8..00255e204fc 100644
--- a/app/services/packages/terraform_module/create_package_service.rb
+++ b/app/services/packages/terraform_module/create_package_service.rb
@@ -21,17 +21,21 @@ module Packages
return ServiceResponse.error(message: 'Package version already exists.', reason: :forbidden)
end
- ApplicationRecord.transaction { create_terraform_module_package! }
+ package, package_file = ApplicationRecord.transaction { create_terraform_module_package! }
+
+ ::Packages::TerraformModule::ProcessPackageFileWorker.perform_async(package_file.id)
+
+ ServiceResponse.success(payload: { package: package })
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(message: e.message, reason: :unprocessable_entity)
end
private
def create_terraform_module_package!
package = create_package!(:terraform_module, name: name, version: params[:module_version])
-
- ::Packages::CreatePackageFileService.new(package, file_params).execute
-
- package
+ package_file = ::Packages::CreatePackageFileService.new(package, file_params).execute
+ [package, package_file]
end
def duplicates_not_allowed?
diff --git a/app/services/packages/terraform_module/metadata/create_service.rb b/app/services/packages/terraform_module/metadata/create_service.rb
new file mode 100644
index 00000000000..95f9935e995
--- /dev/null
+++ b/app/services/packages/terraform_module/metadata/create_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Packages
+ module TerraformModule
+ module Metadata
+ class CreateService
+ def initialize(package, metadata_hash)
+ @package = package
+ @metadata_hash = metadata_hash
+ end
+
+ def execute
+ metadata = ::Packages::TerraformModule::Metadatum.new(
+ package: package,
+ project: package.project,
+ fields: metadata_hash,
+ updated_at: Time.current,
+ created_at: Time.current
+ )
+
+ if metadata.valid?
+ ::Packages::TerraformModule::Metadatum.upsert(metadata.attributes, returning: false)
+
+ ServiceResponse.success(payload: { metadata: metadata })
+ else
+ Gitlab::ErrorTracking.track_exception(
+ ActiveRecord::RecordInvalid.new(metadata),
+ class: self.class.name,
+ package_id: package.id
+ )
+
+ ServiceResponse.error(message: metadata.errors.full_messages, reason: :bad_request)
+ end
+ end
+
+ private
+
+ attr_reader :package, :metadata_hash
+ end
+ end
+ end
+end
diff --git a/app/services/packages/terraform_module/metadata/extract_files_service.rb b/app/services/packages/terraform_module/metadata/extract_files_service.rb
new file mode 100644
index 00000000000..9471f80a114
--- /dev/null
+++ b/app/services/packages/terraform_module/metadata/extract_files_service.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+module Packages
+ module TerraformModule
+ module Metadata
+ class ExtractFilesService
+ MAX_FILE_SIZE = 3.megabytes
+ MAX_PROCESSED_FILES_COUNT = 400
+ README_FILES = %w[README.md README].freeze
+ SUBMODULES_REGEX = /\bmodules\b/
+ EXAMPLES_REGEX = /\bexamples\b/
+
+ ExtractionError = Class.new(StandardError)
+
+ def initialize(archive_file)
+ @archive_file = archive_file
+ @metadata = {}
+ end
+
+ def execute
+ parse_file
+ aggregate_metadata_into_root
+
+ ServiceResponse.success(payload: metadata)
+ end
+
+ private
+
+ attr_reader :archive_file, :metadata
+
+ def parse_file
+ Tempfile.create('extracted_terraform_module_metadata') do |tmp_file|
+ process_archive do |entry|
+ case entry
+ when Gem::Package::TarReader::Entry
+ process_tar_entry(tmp_file, entry)
+ when Zip::Entry
+ process_zip_entry(tmp_file, entry)
+ end
+ end
+ end
+ end
+
+ def process_archive
+ archive_file.each_with_index do |entry, index|
+ raise ExtractionError, 'Too many files to process' if index >= MAX_PROCESSED_FILES_COUNT
+
+ next unless entry.file? && entry.size <= MAX_FILE_SIZE
+
+ yield(entry)
+ end
+ end
+
+ def process_tar_entry(tmp_file, entry)
+ module_type = module_type_from_path(entry.full_name)
+ return unless module_type
+
+ File.open(tmp_file.path, 'w+') do |file|
+ IO.copy_stream(entry, file)
+ file.rewind
+ raise ExtractionError, 'metadata file has the wrong entry size' if File.size(file) > MAX_FILE_SIZE
+
+ parse_and_merge_metadata(file, entry.full_name, module_type)
+ end
+ end
+
+ def process_zip_entry(tmp_file, entry)
+ module_type = module_type_from_path(entry.name)
+ return unless module_type
+
+ entry.extract(tmp_file.path) { true }
+ File.open(tmp_file.path) do |file|
+ parse_and_merge_metadata(file, entry.name, module_type)
+ end
+ rescue Zip::EntrySizeError => e
+ raise ExtractionError, "metadata file has the wrong entry size: #{e.message}"
+ end
+
+ def module_type_from_path(path)
+ return unless File.extname(path) == '.tf' || File.basename(path).in?(README_FILES)
+
+ %i[root submodule example].detect do |type|
+ method(:"#{type}?").call(path)
+ end
+ end
+
+ def root?(path)
+ File.dirname(path).exclude?('/') || (File.dirname(path).count('/') == 1 && path.start_with?('./'))
+ end
+
+ def submodule?(path)
+ match_directory_pattern?(path, SUBMODULES_REGEX, 'modules')
+ end
+
+ def example?(path)
+ match_directory_pattern?(path, EXAMPLES_REGEX, 'examples')
+ end
+
+ def match_directory_pattern?(path, regex, suffix)
+ File.dirname(path).match?(regex) &&
+ !File.dirname(path).end_with?(suffix) &&
+ (File.dirname(path).count('/').in?([1, 2]) ||
+ (File.dirname(path).count('/') == 3 && path.start_with?('./')))
+ end
+
+ def parse_and_merge_metadata(file, entry_name, module_type)
+ parsed_content = ::Packages::TerraformModule::Metadata::ProcessFileService
+ .new(file, entry_name, module_type)
+ .execute
+ .payload
+
+ deep_merge_metadata(parsed_content)
+ end
+
+ def deep_merge_metadata(parsed_content)
+ return if parsed_content.empty?
+
+ metadata.deep_merge!(parsed_content) do |_, old, new|
+ [old, new].all?(Array) ? old.concat(new) : new
+ end
+ end
+
+ def aggregate_metadata_into_root
+ aggregate_submodules_and_examples(metadata[:submodules])
+ aggregate_submodules_and_examples(metadata[:examples], clear_data: true)
+ end
+
+ def aggregate_submodules_and_examples(data, clear_data: false)
+ return unless data
+
+ ensure_root_metadata_exists
+
+ data.each_value do |val|
+ metadata[:root][:resources] |= val[:resources] || []
+ metadata[:root][:dependencies][:modules] |= val.dig(:dependencies, :modules) || []
+ metadata[:root][:dependencies][:providers] |= val.dig(:dependencies, :providers) || []
+
+ val.except!(:resources, :dependencies) if clear_data
+ end
+ end
+
+ def ensure_root_metadata_exists
+ metadata[:root] ||= {}
+ metadata[:root][:resources] ||= []
+ metadata[:root][:dependencies] ||= {}
+ metadata[:root][:dependencies][:modules] ||= []
+ metadata[:root][:dependencies][:providers] ||= []
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/terraform_module/metadata/parse_hcl_file_service.rb b/app/services/packages/terraform_module/metadata/parse_hcl_file_service.rb
new file mode 100644
index 00000000000..a632538e9cd
--- /dev/null
+++ b/app/services/packages/terraform_module/metadata/parse_hcl_file_service.rb
@@ -0,0 +1,211 @@
+# frozen_string_literal: true
+
+module Packages
+ module TerraformModule
+ module Metadata
+ class ParseHclFileService
+ COMMENT_NOTATIONS = %w[// #].freeze
+ RESOURCE_REGEX = /resource\s+"([^"]+)"\s+"([^"]+)"/
+ QUOTED_STRING_BOUNDARIES_REGEX = /\A"|"\Z/
+ TO_KEEP_ARGUMENTS = {
+ variable: %w[name type description default].freeze,
+ output: %w[name description].freeze
+ }.freeze
+ DEPENDENCY_REGEXES = {
+ source: /source\s*=\s*"([^"]+)"/,
+ version: /version\s*=\s*"([^"]+)"/
+ }.freeze
+ ARGUMENTS = {
+ variable: ['default =', 'type =', 'description =', 'validation {', 'sensitive =', 'nullable ='].freeze,
+ output: ['description =', 'value ='].freeze
+ }.freeze
+ HEREDOC_PREFIX_REGEX = /^<<-?/
+ PROVIDER_REGEXES = {
+ v012: /(\w+-?\w*)\s*=\s*"([^"]+)"/,
+ v013: /\s*([\w-]+)\s*=(?=\s*{)/
+ }.freeze
+
+ def initialize(file)
+ @file = file
+ @resources = []
+ @modules = []
+ @providers = []
+ @variables = []
+ @outputs = []
+ @block_data = {}
+ @current_block = nil
+ @current_argument = nil
+ @heredoc_tag = nil
+ end
+
+ def execute
+ return ServiceResponse.success(payload: {}) if file.blank?
+
+ file.each do |line|
+ next if skip_line?(line)
+
+ process_line(line)
+ end
+
+ ServiceResponse.success(payload: { resources: resources, modules: modules, providers: providers,
+ variables: variables, outputs: outputs })
+ end
+
+ private
+
+ attr_reader :file, :resources, :modules, :providers, :variables, :outputs
+ attr_accessor :block_data, :current_block, :current_argument, :heredoc_tag
+
+ def skip_line?(line)
+ line.strip.empty? || line.strip.start_with?(*COMMENT_NOTATIONS)
+ end
+
+ def process_line(line)
+ case line
+ when /^resource/, /^module/, /^provider/, /^variable/, /^output/, /^terraform/
+ start_new_block(determine_block_type(line), line)
+ else
+ process_block_content(line)
+ end
+ end
+
+ def determine_block_type(line)
+ line.split.first.to_sym
+ end
+
+ def start_new_block(block_type, line)
+ self.current_block = block_type
+ resources << line.match(RESOURCE_REGEX).captures.join('.') if block_type == :resource
+ block_data['name'] = line.sub(block_type.to_s, '').split.first if %i[resource terraform].exclude?(block_type)
+ end
+
+ def process_block_content(line)
+ block_end?(line) ? finalize_current_block : process_block_arguments(line)
+ end
+
+ def block_end?(line)
+ cond = line.start_with?('}') || (current_argument == :required_providers && line.strip.start_with?('}'))
+ cond && line.sub('}', '').strip.empty? && !heredoc_tag
+ end
+
+ def finalize_current_block
+ return if block_data.empty?
+
+ clean_block_data
+ store_block_data
+ reset_block_state
+ end
+
+ def clean_block_data
+ self.block_data = block_data.compact_blank.each_value do |v|
+ v.gsub!(QUOTED_STRING_BOUNDARIES_REGEX, '')&.strip!
+ end
+ end
+
+ def store_block_data
+ case current_block
+ when :module
+ modules << block_data unless block_data['source']&.start_with?('.')
+ when :provider, :terraform
+ providers << block_data
+ when :variable
+ variables << block_data.slice(*TO_KEEP_ARGUMENTS[:variable])
+ when :output
+ outputs << block_data.slice(*TO_KEEP_ARGUMENTS[:output])
+ end
+ end
+
+ def reset_block_state
+ self.block_data = {}
+ self.current_block = nil
+ self.current_argument = nil
+ end
+
+ def process_block_arguments(line)
+ case current_block
+ when :module
+ process_module_arguments(line)
+ when :variable, :output
+ process_variable_or_output_arguments(line)
+ when :terraform
+ process_terraform_arguments(line)
+ end
+ end
+
+ def process_module_arguments(line)
+ DEPENDENCY_REGEXES.each do |key, regex|
+ block_data[key.to_s] = Regexp.last_match(1) if line =~ regex
+ end
+ end
+
+ def process_variable_or_output_arguments(line)
+ args = ARGUMENTS[current_block.to_sym]
+ return process_argument_declaration(line, args) if argument_declared?(line, args)
+ return process_heredoc if current_argument && heredoc_tag && line.squish == heredoc_tag
+
+ append_argument_value(line)
+ end
+
+ def process_argument_declaration(line, args)
+ self.current_argument, argument_value = extract_argument(line, args)
+ is_heredoc = current_argument == 'description' && argument_value.start_with?('<<')
+
+ if is_heredoc
+ self.heredoc_tag = argument_value.sub(HEREDOC_PREFIX_REGEX, '').strip
+ block_data[current_argument] = +''
+ else
+ block_data[current_argument] = argument_value
+ end
+ end
+
+ def process_heredoc
+ self.heredoc_tag = nil
+ self.current_argument = nil
+ end
+
+ def append_argument_value(line)
+ return unless block_data[current_argument]
+
+ block_data[current_argument] << " #{line.squish}"
+ end
+
+ def process_terraform_arguments(line)
+ if line.strip.start_with?('required_providers')
+ self.current_argument = :required_providers
+ elsif current_argument
+ process_provider_arguments(line)
+ end
+ end
+
+ def process_provider_arguments(line)
+ if line =~ PROVIDER_REGEXES[:v012] && current_argument == :required_providers
+ block_data.merge!('name' => Regexp.last_match(1), 'version' => Regexp.last_match(2))
+ finalize_provider_block
+ elsif line =~ PROVIDER_REGEXES[:v013]
+ finalize_provider_block if block_data.any?
+ block_data['name'] = Regexp.last_match(1)
+ self.current_argument = block_data['name'].to_sym
+ elsif line =~ DEPENDENCY_REGEXES[:source]
+ block_data['source'] = Regexp.last_match(1)
+ elsif line =~ DEPENDENCY_REGEXES[:version]
+ block_data['version'] = Regexp.last_match(1)
+ end
+ end
+
+ def finalize_provider_block
+ providers << block_data
+ self.block_data = {}
+ end
+
+ def argument_declared?(line, args)
+ args.any? { |arg| line.squish.start_with?(arg) && current_argument != arg.split(' ').first }
+ end
+
+ def extract_argument(line, args)
+ arg = args.find { |arg| line.squish.start_with?(arg) }
+ [arg.split(' ').first, line.squish.sub(arg, '').strip]
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/terraform_module/metadata/process_file_service.rb b/app/services/packages/terraform_module/metadata/process_file_service.rb
new file mode 100644
index 00000000000..8524ca7c436
--- /dev/null
+++ b/app/services/packages/terraform_module/metadata/process_file_service.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Packages
+ module TerraformModule
+ module Metadata
+ class ProcessFileService
+ README_FILES = %w[README.md README].freeze
+
+ def initialize(file, path, module_type)
+ @file = file
+ @path = path
+ @module_type = module_type
+ end
+
+ def execute
+ result = README_FILES.include?(file_name) ? parse_readme : parse_tf_file
+
+ ServiceResponse.success(payload: result)
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(
+ e,
+ class: self.class.name
+ )
+ end
+
+ private
+
+ attr_reader :file, :path, :module_type
+
+ def file_name
+ File.basename(path)
+ end
+
+ def module_name
+ File.basename(dirname)
+ end
+
+ def dirname
+ File.dirname(path)
+ end
+
+ def parse_readme
+ build_module_type_hash(:readme, file.read)
+ end
+
+ def parse_tf_file
+ parsed_hcl = ::Packages::TerraformModule::Metadata::ParseHclFileService.new(file).execute.payload
+
+ merge_module_type_hashes(parsed_hcl)
+ end
+
+ def merge_module_type_hashes(parsed_hcl)
+ build_module_type_hash(:resources, parsed_hcl[:resources])
+ .deep_merge(build_module_type_hash(:dependencies,
+ { providers: parsed_hcl[:providers], modules: parsed_hcl[:modules] }))
+ .deep_merge(build_module_type_hash(:inputs, parsed_hcl[:variables]))
+ .deep_merge(build_module_type_hash(:outputs, parsed_hcl[:outputs]))
+ end
+
+ def build_module_type_hash(key, content)
+ case module_type
+ when :root
+ { root: { key => content } }
+ when :submodule
+ { submodules: { module_name => { key => content } } }
+ when :example
+ { examples: { module_name => { key => content } } }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/terraform_module/process_package_file_service.rb b/app/services/packages/terraform_module/process_package_file_service.rb
new file mode 100644
index 00000000000..ae4b1f02c90
--- /dev/null
+++ b/app/services/packages/terraform_module/process_package_file_service.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module Packages
+ module TerraformModule
+ class ProcessPackageFileService
+ include Gitlab::Utils::StrongMemoize
+
+ ExtractionError = Class.new(StandardError)
+
+ def initialize(package_file)
+ @package_file = package_file
+ end
+
+ def execute
+ raise ExtractionError, 'invalid package file' unless valid_package_file?
+
+ result = nil
+
+ with_archive_file do |archive_file|
+ result = ::Packages::TerraformModule::Metadata::ExtractFilesService.new(archive_file).execute
+ end
+
+ if result&.success?
+ ::Packages::TerraformModule::Metadata::CreateService.new(package_file.package, result.payload).execute
+ end
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :package_file
+
+ def valid_package_file?
+ package_file && package_file.package&.terraform_module? && !package_file.file.empty_size?
+ end
+
+ def with_archive_file(&block)
+ package_file.file.use_open_file(unlink_early: false) do |open_file|
+ success = process_as_gzip(open_file, &block)
+ process_as_zip(open_file, &block) unless success
+ end
+ end
+
+ def process_as_gzip(open_file, &block)
+ Zlib::GzipReader.open(open_file.file_path) do |gzip_file|
+ Gem::Package::TarReader.new(gzip_file, &block)
+ end
+ true
+ rescue Zlib::GzipFile::Error => e
+ return false if e.message == 'not in gzip format'
+
+ raise ExtractionError, e.message
+ end
+
+ def process_as_zip(open_file, &block)
+ Zip::File.open(open_file.file_path, &block) # rubocop:disable Performance/Rubyzip -- Zip::InputStream has some limitations
+ rescue Zip::Error => e
+ raise ExtractionError, e.message
+ end
+ end
+ end
+end
diff --git a/app/services/pages/update_service.rb b/app/services/pages/update_service.rb
new file mode 100644
index 00000000000..f27b1c29442
--- /dev/null
+++ b/app/services/pages/update_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Pages
+ class UpdateService < BaseService
+ include Gitlab::Allowable
+
+ def execute
+ unless can_update_page_settings?
+ return ServiceResponse.error(message: _('The current user is not authorized to update the page settings'),
+ reason: :forbidden)
+ end
+
+ Project.transaction do
+ update_pages_unique_domain_enabled!
+ update_pages_https_only!
+ end
+
+ ServiceResponse.success(payload: { project: project })
+ end
+
+ private
+
+ def update_pages_unique_domain_enabled!
+ return unless params.key?(:pages_unique_domain_enabled)
+
+ project.project_setting.update!(pages_unique_domain_enabled: params[:pages_unique_domain_enabled])
+ end
+
+ def update_pages_https_only!
+ return unless params.key?(:pages_https_only)
+
+ project.update!(pages_https_only: params[:pages_https_only])
+ end
+
+ def can_update_page_settings?
+ current_user&.can_read_all_resources? && can?(current_user, :update_pages, project)
+ end
+ end
+end
diff --git a/app/services/pages_domains/create_acme_order_service.rb b/app/services/pages_domains/create_acme_order_service.rb
index c600f497fa5..ebda46ec6c4 100644
--- a/app/services/pages_domains/create_acme_order_service.rb
+++ b/app/services/pages_domains/create_acme_order_service.rb
@@ -25,7 +25,10 @@ module PagesDomains
)
challenge.request_validation
- saved_order
+
+ ServiceResponse.success(payload: { acme_order: saved_order })
+ rescue Acme::Client::Error => e
+ ServiceResponse.error(message: e.message, payload: { acme_order: saved_order })
end
end
end
diff --git a/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb b/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
index 1733021cbb5..46dc463fa69 100644
--- a/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
+++ b/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
@@ -21,7 +21,12 @@ module PagesDomains
acme_order = pages_domain.acme_orders.first
unless acme_order
- ::PagesDomains::CreateAcmeOrderService.new(pages_domain).execute
+ service_response = ::PagesDomains::CreateAcmeOrderService.new(pages_domain).execute
+ if service_response.error?
+ save_order_error(service_response[:acme_order], service_response.message)
+ return
+ end
+
PagesDomainSslRenewalWorker.perform_in(CHALLENGE_PROCESSING_DELAY, pages_domain.id)
return
end
@@ -37,7 +42,7 @@ module PagesDomains
save_certificate(acme_order.private_key, api_order)
acme_order.destroy!
when 'invalid'
- save_order_error(acme_order, api_order)
+ save_order_error(acme_order, get_challenge_error(api_order))
end
end
@@ -48,27 +53,32 @@ module PagesDomains
pages_domain.update!(gitlab_provided_key: private_key, gitlab_provided_certificate: certificate)
end
- def save_order_error(acme_order, api_order)
- log_error(api_order)
+ def save_order_error(acme_order, acme_error_message)
+ log_error(acme_error_message)
pages_domain.assign_attributes(auto_ssl_failed: true)
pages_domain.save!(validate: false)
- acme_order.destroy!
+ acme_order&.destroy!
NotificationService.new.pages_domain_auto_ssl_failed(pages_domain)
end
- def log_error(api_order)
+ def log_error(acme_error_message)
Gitlab::AppLogger.error(
message: "Failed to obtain Let's Encrypt certificate",
- acme_error: api_order.challenge_error,
+ acme_error: acme_error_message,
project_id: pages_domain.project_id,
pages_domain: pages_domain.domain
)
+ end
+
+ def get_challenge_error(api_order)
+ api_order.challenge_error
rescue StandardError => e
# getting authorizations is an additional network request which can raise errors
Gitlab::ErrorTracking.track_exception(e)
+ e.message
end
end
end
diff --git a/app/services/personal_access_tokens/create_service.rb b/app/services/personal_access_tokens/create_service.rb
index 095cfadf02c..9fd9c135640 100644
--- a/app/services/personal_access_tokens/create_service.rb
+++ b/app/services/personal_access_tokens/create_service.rb
@@ -2,12 +2,13 @@
module PersonalAccessTokens
class CreateService < BaseService
- def initialize(current_user:, target_user:, params: {}, concatenate_errors: true)
+ def initialize(current_user:, target_user:, organization_id:, params: {}, concatenate_errors: true)
@current_user = current_user
@target_user = target_user
@params = params.dup
@ip_address = @params.delete(:ip_address)
@concatenate_errors = concatenate_errors
+ @organization_id = organization_id
end
def execute
@@ -29,19 +30,24 @@ module PersonalAccessTokens
private
- attr_reader :target_user, :ip_address
+ attr_reader :target_user, :ip_address, :organization_id
def personal_access_token_params
{
name: params[:name],
impersonation: params[:impersonation] || false,
scopes: params[:scopes],
- expires_at: pat_expiration
+ expires_at: pat_expiration,
+ organization_id: organization_id
}
end
def pat_expiration
- params[:expires_at].presence || max_expiry_date
+ return params[:expires_at] if params[:expires_at].present?
+
+ return max_expiry_date if Gitlab::CurrentSettings.require_personal_access_token_expiry?
+
+ nil
end
def max_expiry_date
diff --git a/app/services/personal_access_tokens/last_used_service.rb b/app/services/personal_access_tokens/last_used_service.rb
index 3b075364458..4e402a44dec 100644
--- a/app/services/personal_access_tokens/last_used_service.rb
+++ b/app/services/personal_access_tokens/last_used_service.rb
@@ -2,6 +2,10 @@
module PersonalAccessTokens
class LastUsedService
+ include ExclusiveLeaseGuard
+
+ LEASE_TIMEOUT = 60.seconds.to_i
+
def initialize(personal_access_token)
@personal_access_token = personal_access_token
end
@@ -12,11 +16,25 @@ module PersonalAccessTokens
# We _only_ want to update last_used_at and not also updated_at (which
# would be updated when using #touch).
- @personal_access_token.update_column(:last_used_at, Time.zone.now) if update?
+ return unless update?
+
+ try_obtain_lease do
+ ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
+ @personal_access_token.update_column(:last_used_at, Time.zone.now)
+ end
+ end
end
private
+ def lease_timeout
+ LEASE_TIMEOUT
+ end
+
+ def lease_key
+ @lease_key ||= "pat:last_used_update_lock:#{@personal_access_token.id}"
+ end
+
def update?
return false if ::Gitlab::Database.read_only?
diff --git a/app/services/personal_access_tokens/revoke_service.rb b/app/services/personal_access_tokens/revoke_service.rb
index 237c95bc456..897a2b12caa 100644
--- a/app/services/personal_access_tokens/revoke_service.rb
+++ b/app/services/personal_access_tokens/revoke_service.rb
@@ -2,9 +2,9 @@
module PersonalAccessTokens
class RevokeService < BaseService
- attr_reader :token, :current_user, :group
+ attr_reader :token, :current_user, :group, :source
- VALID_SOURCES = %i[self secret_detection].freeze
+ VALID_SOURCES = %i[self secret_detection group_token_revocation_service].freeze
def initialize(current_user = nil, token: nil, group: nil, source: nil)
@current_user = current_user
@@ -22,7 +22,7 @@ module PersonalAccessTokens
if token.revoke!
log_event
- notification_service.access_token_revoked(token.user, token.name, @source)
+ notification_service.access_token_revoked(token.user, token.name, source)
ServiceResponse.success(message: success_message)
else
ServiceResponse.error(message: error_message)
@@ -40,10 +40,10 @@ module PersonalAccessTokens
end
def revocation_permitted?
- case @source
+ case source
when :self
Ability.allowed?(current_user, :revoke_token, token)
- when :secret_detection
+ when :secret_detection, :group_token_revocation_service
true
else
false
@@ -54,10 +54,16 @@ module PersonalAccessTokens
Gitlab::AppLogger.info(
class: self.class.name,
message: "PAT Revoked",
- revoked_by: current_user&.username || @source,
+ revoked_by: revoked_by,
revoked_for: token.user.username,
token_id: token.id)
end
+
+ def revoked_by
+ return current_user&.username if source == :self
+
+ source
+ end
end
end
diff --git a/app/services/personal_access_tokens/rotate_service.rb b/app/services/personal_access_tokens/rotate_service.rb
index e381d86fbed..65098b06ce1 100644
--- a/app/services/personal_access_tokens/rotate_service.rb
+++ b/app/services/personal_access_tokens/rotate_service.rb
@@ -4,12 +4,15 @@ module PersonalAccessTokens
class RotateService
EXPIRATION_PERIOD = 1.week
- def initialize(current_user, token)
+ def initialize(current_user, token, resource = nil, params = {})
@current_user = current_user
@token = token
+ @resource = resource
+ @params = params.dup
+ @target_user = token.user
end
- def execute(params = {})
+ def execute
return error_response(_('token already revoked')) if token.revoked?
response = ServiceResponse.success
@@ -20,7 +23,7 @@ module PersonalAccessTokens
raise ActiveRecord::Rollback
end
- response = create_access_token(params)
+ response = create_access_token
raise ActiveRecord::Rollback unless response.success?
end
@@ -30,22 +33,52 @@ module PersonalAccessTokens
private
- attr_reader :current_user, :token
+ attr_reader :current_user, :token, :resource, :params, :target_user
- def create_access_token(params)
- target_user = token.user
+ def create_access_token
+ unless valid_access_level?
+ return error_response(_('Not eligible to rotate token with access level higher than the user'))
+ end
+
+ new_token = target_user.personal_access_tokens.create(create_token_params)
- new_token = target_user.personal_access_tokens.create(create_token_params(token, params))
+ if new_token.persisted?
+ update_bot_membership(target_user, new_token.expires_at)
- return success_response(new_token) if new_token.persisted?
+ return success_response(new_token)
+ end
error_response(new_token.errors.full_messages.to_sentence)
end
- def expires_at(params)
- return params[:expires_at] if params[:expires_at]
+ def valid_access_level?
+ true
+ end
+
+ def update_bot_membership(target_user, expires_at)
+ return if target_user.human?
+
+ if resource && Feature.enabled?(:retain_resource_access_token_user_after_revoke, resource.root_ancestor)
+ # Tokens created before the feature flag is enabled will have an
+ # expiring membership. We must explicitly set it to nil to
+ # - stop the membership from expiring on its old expiry date
+ # - retain the membership when this token does eventually expire
+ # or get revoked.
+ #
+ # Applies only to resource (group and project) access tokens
+ # not personal access tokens.
+ expires_at = nil
+ end
+
+ target_user.members.update(expires_at: expires_at)
+ end
+
+ def expires_at
+ return params[:expires_at] if params[:expires_at].present?
- params[:expires_at] || EXPIRATION_PERIOD.from_now.to_date
+ return default_expiration_date if Gitlab::CurrentSettings.require_personal_access_token_expiry?
+
+ nil
end
def success_response(new_token)
@@ -56,12 +89,17 @@ module PersonalAccessTokens
ServiceResponse.error(message: message)
end
- def create_token_params(token, params)
+ def create_token_params
{ name: token.name,
previous_personal_access_token_id: token.id,
impersonation: token.impersonation,
scopes: token.scopes,
- expires_at: expires_at(params) }
+ expires_at: expires_at,
+ organization: token.organization }
+ end
+
+ def default_expiration_date
+ EXPIRATION_PERIOD.from_now.to_date
end
end
end
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index 7cf1855988e..16a09e3d616 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -27,7 +27,7 @@ class PostReceiveService
repository&.expire_branches_cache if mr_options&.fetch(:create, false)
PostReceive.perform_async(params[:gl_repository], params[:identifier],
- params[:changes], push_options.as_json)
+ params[:changes], push_options.as_json)
if mr_options.present?
message = process_mr_push_options(mr_options, params[:changes])
@@ -102,6 +102,10 @@ class PostReceiveService
def record_onboarding_progress
return unless project
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/456533 we should remove from here and place this
+ # when repository is created instead.
+ # In order to do that, we need to check for all onboarded namespaces where this action is not
+ # completed and then see if any project underneath them has a repository.
Onboarding::ProgressService.new(project.namespace).execute(action: :git_write)
end
end
diff --git a/app/services/preview_markdown_service.rb b/app/services/preview_markdown_service.rb
index 31f79bc7164..a8247fdb908 100644
--- a/app/services/preview_markdown_service.rb
+++ b/app/services/preview_markdown_service.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class PreviewMarkdownService < BaseService
+class PreviewMarkdownService < BaseContainerService
def execute
text, commands = explain_quick_actions(params[:text])
users = find_user_references(text)
@@ -23,7 +23,7 @@ class PreviewMarkdownService < BaseService
def explain_quick_actions(text)
return text, [] unless quick_action_types.include?(target_type)
- quick_actions_service = QuickActions::InterpretService.new(project, current_user)
+ quick_actions_service = QuickActions::InterpretService.new(container: container, current_user: current_user)
quick_actions_service.explain(text, find_commands_target, keep_actions: params[:render_quick_actions])
end
@@ -37,14 +37,14 @@ class PreviewMarkdownService < BaseService
return [] unless preview_sugestions?
position = Gitlab::Diff::Position.new(new_path: params[:file_path],
- new_line: params[:line].to_i,
- base_sha: params[:base_sha],
- head_sha: params[:head_sha],
- start_sha: params[:start_sha])
+ new_line: params[:line].to_i,
+ base_sha: params[:base_sha],
+ head_sha: params[:head_sha],
+ start_sha: params[:start_sha])
Gitlab::Diff::SuggestionsParser.parse(text, position: position,
- project: project,
- supports_suggestion: params[:preview_suggestions])
+ project: project,
+ supports_suggestion: params[:preview_suggestions])
end
def preview_sugestions?
@@ -55,7 +55,7 @@ class PreviewMarkdownService < BaseService
def find_commands_target
QuickActions::TargetService
- .new(container: project, current_user: current_user, params: { group: params[:group] })
+ .new(container: container, current_user: current_user)
.execute(target_type, target_id)
end
@@ -68,4 +68,4 @@ class PreviewMarkdownService < BaseService
end
end
-PreviewMarkdownService.prepend_mod_with('PreviewMarkdownService')
+PreviewMarkdownService.prepend_mod
diff --git a/app/services/project_access_tokens/rotate_service.rb b/app/services/project_access_tokens/rotate_service.rb
index 63d8d2a82cc..c8a4454faf3 100644
--- a/app/services/project_access_tokens/rotate_service.rb
+++ b/app/services/project_access_tokens/rotate_service.rb
@@ -4,45 +4,9 @@ module ProjectAccessTokens
class RotateService < ::PersonalAccessTokens::RotateService
extend ::Gitlab::Utils::Override
- def initialize(current_user, token, resource = nil)
- @current_user = current_user
- @token = token
- @project = resource
- end
-
- def execute(params = {})
- super
- end
-
- attr_reader :project
-
- private
-
- override :create_access_token
- def create_access_token(params)
- target_user = token.user
-
- unless valid_access_level?
- return error_response(
- _("Not eligible to rotate token with access level higher than the user")
- )
- end
-
- new_token = target_user.personal_access_tokens.create(create_token_params(token, params))
-
- if new_token.persisted?
- update_bot_membership(target_user, new_token.expires_at)
-
- return success_response(new_token)
- end
-
- error_response(new_token.errors.full_messages.to_sentence)
- end
-
- def update_bot_membership(target_user, expires_at)
- target_user.members.update(expires_at: expires_at)
- end
+ alias_method :project, :resource
+ override :valid_access_level?
def valid_access_level?
return true if current_user.can_admin_all_resources?
return false unless current_user.can?(:manage_resource_access_tokens, project)
@@ -50,9 +14,7 @@ module ProjectAccessTokens
token_access_level = project.team.max_member_access(token.user.id).to_i
current_user_access_level = project.team.max_member_access(current_user.id).to_i
- return true if token_access_level.to_i <= current_user_access_level
-
- false
+ token_access_level <= current_user_access_level
end
end
end
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index 5cd30689faf..3f079a14b65 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -97,7 +97,6 @@ module Projects
def update_repository_configuration
project.reload_repository!
- project.set_full_path
project.track_project_repository
end
diff --git a/app/services/projects/autocomplete_service.rb b/app/services/projects/autocomplete_service.rb
index 11437ad90fc..1db342ccb26 100644
--- a/app/services/projects/autocomplete_service.rb
+++ b/app/services/projects/autocomplete_service.rb
@@ -3,8 +3,13 @@
module Projects
class AutocompleteService < BaseService
include LabelsAsHash
+ include Routing::WikiHelper
+
def issues
- IssuesFinder.new(current_user, project_id: project.id, state: 'opened').execute.select([:iid, :title])
+ IssuesFinder.new(current_user, project_id: project.id, state: 'opened')
+ .execute
+ .with_work_item_type
+ .select([:iid, :title, 'work_item_types.icon_name'])
end
def milestones
@@ -26,13 +31,23 @@ module Projects
def commands(noteable)
return [] unless noteable && current_user
- QuickActions::InterpretService.new(project, current_user).available_commands(noteable)
+ QuickActions::InterpretService.new(container: project, current_user: current_user).available_commands(noteable)
end
def snippets
SnippetsFinder.new(current_user, project: project).execute.select([:id, :title])
end
+ def wikis
+ wiki = Wiki.for_container(project, current_user)
+ return [] unless can?(current_user, :read_wiki, wiki.container)
+
+ wiki
+ .list_pages(limit: 5000, load_content: true, size_limit: 512)
+ .reject { |page| page.slug.start_with?('templates/') }
+ .map { |page| { path: wiki_page_path(page.wiki, page), slug: page.slug, title: page.human_title } }
+ end
+
def contacts(target)
available_contacts = Crm::ContactsFinder.new(current_user, group: project.group).execute
.select([:id, :email, :first_name, :last_name, :state])
diff --git a/app/services/projects/cleanup_service.rb b/app/services/projects/cleanup_service.rb
index 75be3425029..d0f6e5331ea 100644
--- a/app/services/projects/cleanup_service.rb
+++ b/app/services/projects/cleanup_service.rb
@@ -20,7 +20,7 @@ module Projects
RepositoryCleanupWorker.perform_async(project.id, current_user.id)
end
rescue Project::RepositoryReadOnlyError => err
- { status: :error, message: (_('Failed to make repository read-only. %{reason}') % { reason: err.message }) }
+ { status: :error, message: (_('Failed to make repository read-only: %{reason}') % { reason: err.message }) }
end
def cleanup_after(project)
diff --git a/app/services/projects/container_repository/delete_tags_service.rb b/app/services/projects/container_repository/delete_tags_service.rb
index 9378bb31360..23814edf19b 100644
--- a/app/services/projects/container_repository/delete_tags_service.rb
+++ b/app/services/projects/container_repository/delete_tags_service.rb
@@ -13,8 +13,7 @@ module Projects
end
@tag_names = params[:tags]
- return error('not tags specified') if @tag_names.blank?
- return error('repository importing') if cancel_while_importing?
+ return error('no tags specified') if @tag_names.blank?
delete_tags
end
@@ -51,19 +50,11 @@ module Projects
end
end
- def cancel_while_importing?
- return true if @container_repository.importing?
-
- if container_expiration_policy?
- return @container_repository.pre_importing? || @container_repository.pre_import_done?
- end
-
- false
- end
-
def container_expiration_policy?
params[:container_expiration_policy].present?
end
end
end
end
+
+Projects::ContainerRepository::DeleteTagsService.prepend_mod_with('Projects::ContainerRepository::DeleteTagsService')
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 6f29c72e25a..844c4e8574d 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -6,6 +6,7 @@ module Projects
ImportSourceDisabledError = Class.new(StandardError)
INTERNAL_IMPORT_SOURCES = %w[gitlab_custom_project_template gitlab_project_migration].freeze
+ README_FILE = 'README.md'
def initialize(user, params)
@current_user = user
@@ -18,6 +19,7 @@ module Projects
@default_branch = @params.delete(:default_branch)
@readme_template = @params.delete(:readme_template)
@repository_object_format = @params.delete(:repository_object_format)
+ @import_export_upload = @params.delete(:import_export_upload)
build_topics
end
@@ -35,6 +37,10 @@ module Projects
@project = Project.new(params.merge(creator: current_user))
+ if @import_export_upload
+ @import_export_upload.project = project
+ end
+
validate_import_source_enabled!
@project.visibility_level = @project.group.visibility_level unless @project.visibility_level_allowed_by_group?
@@ -51,9 +57,8 @@ module Projects
set_project_name_from_path
- # get namespace id
- namespace_id = params[:namespace_id] || current_user.namespace_id
- @project.namespace_id = namespace_id.to_i
+ @project.namespace_id = (params[:namespace_id] || current_user.namespace_id).to_i
+ @project.organization_id = (params[:organization_id] || @project.namespace.organization_id).to_i
@project.check_personal_projects_limit
return @project if @project.errors.any?
@@ -102,6 +107,7 @@ module Projects
def validate_import_permissions
return unless @project.import?
+ return if @project.gitlab_project_import?
return if current_user.can?(:import_projects, parent_namespace)
@project.errors.add(:user, 'is not allowed to import projects')
@@ -112,11 +118,6 @@ module Projects
if @project.import?
Gitlab::Tracking.event(self.class.name, 'import_project', user: current_user)
- else
- # Skip writing the config for project imports/forks because it
- # will always fail since the Git directory doesn't exist until
- # a background job creates it (see Project#add_import_job).
- @project.set_full_path
end
unless @project.gitlab_project_import?
@@ -202,7 +203,7 @@ module Projects
commit_attrs = {
branch_name: default_branch,
commit_message: 'Initial commit',
- file_path: 'README.md',
+ file_path: README_FILE,
file_content: readme_content
}
@@ -244,8 +245,9 @@ module Projects
Namespaces::ProjectNamespace.create_from_project!(@project) if @project.valid?
if @project.saved?
- Integration.create_from_active_default_integrations(@project, :project_id)
+ Integration.create_from_default_integrations(@project, :project_id)
+ @import_export_upload.save if @import_export_upload
@project.create_labels unless @project.gitlab_project_import?
next if @project.import?
@@ -313,9 +315,12 @@ module Projects
return if INTERNAL_IMPORT_SOURCES.include?(import_type)
# Skip validation when creating project from a built in template
- return if @params[:import_export_upload].present? && import_type == 'gitlab_project'
+ return if @import_export_upload.present? && import_type == 'gitlab_project'
unless ::Gitlab::CurrentSettings.import_sources&.include?(import_type)
+ return if import_type == 'github' && Feature.enabled?(:override_github_disabled, current_user, type: :ops)
+ return if import_type == 'bitbucket_server' && Feature.enabled?(:override_bitbucket_server_disabled, current_user, type: :ops)
+
raise ImportSourceDisabledError, "#{import_type} import source is disabled"
end
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 033d90abc7a..6b91e4d85e8 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -172,6 +172,10 @@ module Projects
# called multiple times, and it doesn't destroy any database records.
project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
project.destroy!
+ rescue ActiveRecord::RecordNotDestroyed => e
+ raise_error(
+ e.record.errors.full_messages.to_sentence
+ )
end
def log_destroy_event
diff --git a/app/services/projects/detect_repository_languages_service.rb b/app/services/projects/detect_repository_languages_service.rb
index 9db0b71d106..194d0b4c640 100644
--- a/app/services/projects/detect_repository_languages_service.rb
+++ b/app/services/projects/detect_repository_languages_service.rb
@@ -66,3 +66,5 @@ module Projects
end
end
end
+
+Projects::DetectRepositoryLanguagesService.prepend_mod_with('Projects::DetectRepositoryLanguagesService')
diff --git a/app/services/projects/fetch_statistics_increment_service.rb b/app/services/projects/fetch_statistics_increment_service.rb
index 3354a074d1e..d7d334dd035 100644
--- a/app/services/projects/fetch_statistics_increment_service.rb
+++ b/app/services/projects/fetch_statistics_increment_service.rb
@@ -9,13 +9,19 @@ module Projects
end
def execute
- increment_fetch_count_sql = <<~SQL
- INSERT INTO #{table_name} (project_id, date, fetch_count)
- VALUES (#{project.id}, '#{Date.today}', 1)
- ON CONFLICT (project_id, date) DO UPDATE SET fetch_count = #{table_name}.fetch_count + 1
- SQL
+ if Feature.enabled?(:project_daily_statistic_counter_attribute_fetch, project)
+ ProjectDailyStatistic
+ .find_or_create_project_daily_statistic(project.id, Date.today)
+ .increment_fetch_count(1)
+ else
+ increment_fetch_count_sql = <<~SQL
+ INSERT INTO #{table_name} (project_id, date, fetch_count)
+ VALUES (#{project.id}, '#{Date.today}', 1)
+ ON CONFLICT (project_id, date) DO UPDATE SET fetch_count = #{table_name}.fetch_count + 1
+ SQL
- ProjectDailyStatistic.connection.execute(increment_fetch_count_sql)
+ ProjectDailyStatistic.connection.execute(increment_fetch_count_sql)
+ end
end
private
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index 168420b17bf..23028e5a0c8 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -3,14 +3,11 @@
module Projects
class ForkService < BaseService
def execute(fork_to_project = nil)
- forked_project = fork_to_project ? link_existing_project(fork_to_project) : fork_new_project
+ response = fork_to_project ? link_existing_project(fork_to_project) : fork_new_project
- if forked_project&.saved?
- refresh_forks_count
- stream_audit_event(forked_project)
- end
+ after_fork(response[:project]) if response.success?
- forked_project
+ response
end
def valid_fork_targets(options = {})
@@ -29,23 +26,43 @@ module Projects
private
+ def after_fork(project)
+ return unless project&.saved?
+
+ refresh_forks_count
+ stream_audit_event(project)
+ end
+
def link_existing_project(fork_to_project)
- return if fork_to_project.forked?
+ if fork_to_project.forked?
+ return ServiceResponse.error(message: _('Project already forked'), reason: :already_forked)
+ end
+
+ if fork_to_project == @project
+ return ServiceResponse.error(message: _('Target project cannot be equal to source project'), reason: :self_fork)
+ end
build_fork_network_member(fork_to_project)
- fork_to_project if link_fork_network(fork_to_project)
+ if link_fork_network(fork_to_project)
+ ServiceResponse.success(payload: { project: fork_to_project })
+ else
+ ServiceResponse.error(message: fork_to_project.errors.full_messages)
+ end
end
def fork_new_project
new_project = CreateService.new(current_user, new_fork_params).execute
- return new_project unless new_project.persisted?
+
+ unless new_project.persisted?
+ return ServiceResponse.error(message: new_project.errors.full_messages)
+ end
new_project.project_feature.update!(
@project.project_feature.slice(ProjectFeature::FEATURES.map { |f| "#{f}_access_level" })
)
- new_project
+ ServiceResponse.success(payload: { project: new_project })
end
def new_fork_params
@@ -67,7 +84,7 @@ module Projects
# been instantiated to avoid ActiveRecord trying to create it when
# initializing the project, as that would cause a foreign key constraint
# exception.
- relations_block: -> (project) { build_fork_network_member(project) },
+ relations_block: ->(project) { build_fork_network_member(project) },
skip_disk_validation: skip_disk_validation,
external_authorization_classification_label: @project.external_authorization_classification_label,
suggestion_commit_message: @project.suggestion_commit_message,
diff --git a/app/services/projects/gitlab_projects_import_service.rb b/app/services/projects/gitlab_projects_import_service.rb
index 63a41d172ea..ad5882b36ad 100644
--- a/app/services/projects/gitlab_projects_import_service.rb
+++ b/app/services/projects/gitlab_projects_import_service.rb
@@ -17,7 +17,7 @@ module Projects
end
def execute
- prepare_template_environment(template_file)
+ prepare_template_environment(template_file, current_user)
prepare_import_params
@@ -72,6 +72,8 @@ module Projects
params[:import_type] = 'gitlab_project'
end
+ params[:organization_id] = current_namespace.organization_id
+
params[:import_data] = { data: data } if data.present?
end
end
diff --git a/app/services/projects/group_links/create_service.rb b/app/services/projects/group_links/create_service.rb
index cc7478540d2..b51bdc94648 100644
--- a/app/services/projects/group_links/create_service.rb
+++ b/app/services/projects/group_links/create_service.rb
@@ -57,6 +57,10 @@ module Projects
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
+
+ def remove_unallowed_params
+ # no-op
+ end
end
end
end
diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb
index 40c4fd5376c..a61ea459533 100644
--- a/app/services/projects/hashed_storage/migrate_attachments_service.rb
+++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb
@@ -63,5 +63,3 @@ module Projects
end
end
end
-
-Projects::HashedStorage::MigrateAttachmentsService.prepend_mod_with('Projects::HashedStorage::MigrateAttachmentsService')
diff --git a/app/services/projects/import_export/export_service.rb b/app/services/projects/import_export/export_service.rb
index a1f55f547a1..2261fd14f1e 100644
--- a/app/services/projects/import_export/export_service.rb
+++ b/app/services/projects/import_export/export_service.rb
@@ -70,7 +70,7 @@ module Projects
end
def save_export_archive
- @export_saver ||= Gitlab::ImportExport::Saver.save(exportable: project, shared: shared)
+ @export_saver ||= Gitlab::ImportExport::Saver.save(exportable: project, shared: shared, user: current_user)
end
def version_saver
diff --git a/app/services/projects/import_export/parallel_export_service.rb b/app/services/projects/import_export/parallel_export_service.rb
index 7e4c0279b06..6f5cf1f12d3 100644
--- a/app/services/projects/import_export/parallel_export_service.rb
+++ b/app/services/projects/import_export/parallel_export_service.rb
@@ -50,7 +50,7 @@ module Projects
end
def save_export_archive
- Gitlab::ImportExport::Saver.save(exportable: project, shared: shared)
+ Gitlab::ImportExport::Saver.save(exportable: project, shared: shared, user: current_user)
end
def version_saver
diff --git a/app/services/projects/import_export/prune_expired_export_jobs_service.rb b/app/services/projects/import_export/prune_expired_export_jobs_service.rb
new file mode 100644
index 00000000000..1a9daf1d12c
--- /dev/null
+++ b/app/services/projects/import_export/prune_expired_export_jobs_service.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Projects
+ module ImportExport
+ class PruneExpiredExportJobsService
+ BATCH_SIZE = 1000
+
+ class << self
+ def execute
+ delete_uploads_for_expired_jobs
+ delete_expired_jobs
+ end
+
+ private
+
+ def delete_expired_jobs
+ loop do
+ deleted_count = ProjectExportJob.prunable.limit(BATCH_SIZE).delete_all
+ break if deleted_count == 0
+ end
+ end
+
+ def delete_uploads_for_expired_jobs
+ prunable_scope = ProjectExportJob.prunable.select(:id, :updated_at)
+ iterator = Gitlab::Pagination::Keyset::Iterator.new(scope: prunable_scope.order_by_updated_at)
+
+ iterator.each_batch(of: BATCH_SIZE) do |prunable_job_batch_scope|
+ prunable_job_batch = prunable_job_batch_scope.to_a
+
+ loop do
+ prunable_uploads = uploads_for_expired_jobs(prunable_job_batch)
+ prunable_upload_keys = prunable_uploads.begin_fast_destroy
+
+ deleted_count = prunable_uploads.delete_all
+
+ break if deleted_count == 0
+
+ Upload.finalize_fast_destroy(prunable_upload_keys)
+ end
+ end
+ end
+
+ def uploads_for_expired_jobs(prunable_jobs)
+ prunable_export_uploads = Projects::ImportExport::RelationExportUpload
+ .for_project_export_jobs(prunable_jobs.map(&:id))
+
+ Upload.for_model_type_and_id(
+ Projects::ImportExport::RelationExportUpload,
+ prunable_export_uploads.select(:id)
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/projects/import_export/relation_export_service.rb b/app/services/projects/import_export/relation_export_service.rb
index a38d66e5ff7..079489df74f 100644
--- a/app/services/projects/import_export/relation_export_service.rb
+++ b/app/services/projects/import_export/relation_export_service.rb
@@ -24,10 +24,9 @@ module Projects
upload_compressed_file
relation_export.finish!
else
- fail_export(shared.errors.join(', '))
+ raise_error(shared.errors.join(', '))
end
- rescue StandardError => e
- fail_export(e.message)
+
ensure
FileUtils.remove_entry(shared.export_path) if File.exist?(shared.export_path)
FileUtils.remove_entry(shared.archive_path) if File.exist?(shared.archive_path)
@@ -83,10 +82,8 @@ module Projects
@archive_file ||= File.join(shared.archive_path, "#{relation}.tar.gz")
end
- def fail_export(error_message)
- relation_export.update!(status_event: :fail_op, export_error: error_message.truncate(300))
-
- logger.error(
+ def raise_error(error_message)
+ logger.warn(
message: 'Project relation export failed',
export_error: error_message,
relation: relation_export.relation,
@@ -94,6 +91,8 @@ module Projects
project_name: project.name,
project_id: project.id
)
+
+ raise ::Gitlab::ImportExport::Error.new, error_message
end
end
end
diff --git a/app/services/projects/import_export/relation_import_service.rb b/app/services/projects/import_export/relation_import_service.rb
new file mode 100644
index 00000000000..fc663332d2c
--- /dev/null
+++ b/app/services/projects/import_export/relation_import_service.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+# Imports a selected relation into an existing project, skipping any identified
+# duplicates. Duplicates are matched on the `iid` of the record being imported.
+module Projects
+ module ImportExport
+ class RelationImportService
+ include ::Services::ReturnServiceResponses
+
+ IMPORTABLE_RELATIONS = %w[issues merge_requests milestones ci_pipelines].freeze
+
+ # Creates a new RelationImportService.
+ #
+ # @param [User] current_user
+ # @param [Hash] params
+ # @option params [String] path The full path of the project
+ # @option params [String] relation The relation to import. See IMPORTABLE_RELATIONS for permitted values.
+ # @option params [UploadedFile] file The export archive containing the data to import
+ def initialize(current_user:, params:)
+ @current_user = current_user
+ @params = params
+ end
+
+ # Checks the validity of the chosen project and triggers the re-import of
+ # the chosen relation.
+ #
+ # @return [Services::ServiceResponse]
+ def execute
+ return error(_('Project not found'), :not_found) unless project
+
+ unless relation_valid?
+ return error(
+ format(
+ _('Imported relation must be one of %{relations}'),
+ relations: IMPORTABLE_RELATIONS.to_sentence(last_word_connector: ', or ')
+ ),
+ :bad_request
+ )
+ end
+
+ return error(_('You are not authorized to perform this action'), :forbidden) unless user_permitted?
+ return error(_('A relation import is already in progress for this project'), :conflict) if import_in_progress?
+
+ tracker = create_status_tracker
+
+ attach_import_file
+
+ Projects::ImportExport::RelationImportWorker.perform_async(
+ tracker.id,
+ current_user.id
+ )
+
+ success(tracker)
+ end
+
+ private
+
+ attr_reader :current_user, :params
+
+ def user_permitted?
+ Ability.allowed?(current_user, :admin_project, project)
+ end
+
+ def relation_valid?
+ IMPORTABLE_RELATIONS.include?(params[:relation])
+ end
+
+ def attach_import_file
+ import_export_upload = project.import_export_upload_by_user(current_user) ||
+ project.import_export_uploads.new(user: current_user)
+
+ import_export_upload.import_file = params[:file]
+ import_export_upload.save
+ end
+
+ def create_status_tracker
+ project.relation_import_trackers.create(
+ relation: params[:relation]
+ )
+ end
+
+ def project
+ @project ||= Project.find_by_full_path(params[:path])
+ end
+
+ def import_in_progress?
+ project.any_import_in_progress?
+ end
+ end
+ end
+end
diff --git a/app/services/projects/import_service.rb b/app/services/projects/import_service.rb
index 0da3326a100..c8e322b920c 100644
--- a/app/services/projects/import_service.rb
+++ b/app/services/projects/import_service.rb
@@ -174,7 +174,8 @@ module Projects
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
dns_rebind_protection: dns_rebind_protection?,
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?)
+ deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: Gitlab::CurrentSettings.outbound_local_requests_whitelist) # rubocop:disable Naming/InclusiveLanguage -- existing setting
.then do |(import_url, resolved_host)|
next '' if resolved_host.nil? || !import_url.scheme.in?(%w[http https])
@@ -183,8 +184,7 @@ module Projects
end
def allow_local_requests?
- Rails.env.development? && # There is no known usecase for this in non-development environments
- Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def dns_rebind_protection?
diff --git a/app/services/projects/lfs_pointers/lfs_download_service.rb b/app/services/projects/lfs_pointers/lfs_download_service.rb
index 26352198e5c..1fdd744f107 100644
--- a/app/services/projects/lfs_pointers/lfs_download_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_service.rb
@@ -23,7 +23,7 @@ module Projects
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
- return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project) && lfs_size > LARGE_FILE_SIZE && lfs_object
+ return link_existing_lfs_object! if lfs_size > LARGE_FILE_SIZE && lfs_object
wrap_download_errors do
download_lfs_file!
diff --git a/app/services/projects/lfs_pointers/lfs_link_service.rb b/app/services/projects/lfs_pointers/lfs_link_service.rb
index 852f5e0222e..daa3b1f6e1e 100644
--- a/app/services/projects/lfs_pointers/lfs_link_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_link_service.rb
@@ -26,8 +26,12 @@ module Projects
private
def validate!(oids)
- return if oids.size <= MAX_OIDS
+ if oids.size <= MAX_OIDS
+ Gitlab::Metrics::Lfs.validate_link_objects_error_rate.increment(error: false, labels: {})
+ return
+ end
+ Gitlab::Metrics::Lfs.validate_link_objects_error_rate.increment(error: true, labels: {})
raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
end
@@ -56,7 +60,7 @@ module Projects
end
def log_lfs_link_results(lfs_objects_linked_count, iterations)
- Gitlab::Import::Logger.info(
+ ::Import::Framework::Logger.info(
class: self.class.name,
project_id: project.id,
project_path: project.full_path,
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index e7a8d5305ea..8c37287d709 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -19,7 +19,7 @@ module Projects
end
def alerting_setting_params
- return {} unless can?(current_user, :read_prometheus_alerts, project)
+ return {} unless can?(current_user, :admin_operations, project)
attr = params[:alerting_setting_attributes]
return {} unless attr
diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb
index aff258c418b..3a48e9d18f3 100644
--- a/app/services/projects/overwrite_project_service.rb
+++ b/app/services/projects/overwrite_project_service.rb
@@ -41,8 +41,6 @@ module Projects
private
def track_service(start_time, source_project, exception)
- return if ::Feature.disabled?(:project_overwrite_service_tracking, source_project)
-
duration = ::Gitlab::Metrics::System.monotonic_time - start_time
Gitlab::AppJsonLogger.info(
diff --git a/app/services/projects/participants_service.rb b/app/services/projects/participants_service.rb
index 188f12a287b..d27c2ff38cf 100644
--- a/app/services/projects/participants_service.rb
+++ b/app/services/projects/participants_service.rb
@@ -2,6 +2,7 @@
module Projects
class ParticipantsService < BaseService
+ include Gitlab::Utils::StrongMemoize
include Users::ParticipableService
def execute(noteable)
@@ -11,8 +12,9 @@ module Projects
noteable_owner +
participants_in_noteable +
all_members +
- project_members +
- groups
+ project_members
+
+ participants += groups unless relation_at_search_limit?(project_members)
render_participants_as_hash(participants.uniq)
end
@@ -20,6 +22,7 @@ module Projects
def project_members
filter_and_sort_users(project_members_relation)
end
+ strong_memoize_attr :project_members
def all_members
return [] if Feature.enabled?(:disable_all_mention)
diff --git a/app/services/projects/prometheus/metrics/base_service.rb b/app/services/projects/prometheus/metrics/base_service.rb
deleted file mode 100644
index 15247d45776..00000000000
--- a/app/services/projects/prometheus/metrics/base_service.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- module Prometheus
- module Metrics
- class BaseService
- include Gitlab::Utils::StrongMemoize
-
- def initialize(metric, params = {})
- @metric = metric
- @params = params.dup
- end
-
- protected
-
- attr_reader :metric, :params
- end
- end
- end
-end
diff --git a/app/services/projects/prometheus/metrics/destroy_service.rb b/app/services/projects/prometheus/metrics/destroy_service.rb
deleted file mode 100644
index d85499dc4ae..00000000000
--- a/app/services/projects/prometheus/metrics/destroy_service.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- module Prometheus
- module Metrics
- class DestroyService < Metrics::BaseService
- def execute
- metric.destroy
- end
- end
- end
- end
-end
diff --git a/app/services/projects/protect_default_branch_service.rb b/app/services/projects/protect_default_branch_service.rb
index 0aca525921c..126e3879590 100644
--- a/app/services/projects/protect_default_branch_service.rb
+++ b/app/services/projects/protect_default_branch_service.rb
@@ -10,8 +10,9 @@ module Projects
def initialize(project)
@project = project
- @default_branch_protection = Gitlab::Access::BranchProtection
- .new(project.namespace.default_branch_protection)
+ @default_branch_protection = Gitlab::Access::DefaultBranchProtection.new(
+ project.namespace.default_branch_protection_settings
+ )
end
def execute
@@ -29,7 +30,9 @@ module Projects
params = {
name: default_branch,
push_access_levels_attributes: [{ access_level: push_access_level }],
- merge_access_levels_attributes: [{ access_level: merge_access_level }]
+ merge_access_levels_attributes: [{ access_level: merge_access_level }],
+ code_owner_approval_required: code_owner_approval_required?,
+ allow_force_push: allow_force_push?
}
# The creator of the project is always allowed to create protected
@@ -39,6 +42,15 @@ module Projects
.execute(skip_authorization: true)
end
+ # overriden in EE
+ def code_owner_approval_required?
+ false
+ end
+
+ def allow_force_push?
+ default_branch_protection.allow_force_push?
+ end
+
def protect_branch?
default_branch_protection.any? &&
!ProtectedBranch.protected?(project, default_branch)
@@ -53,18 +65,26 @@ module Projects
end
def push_access_level
- if default_branch_protection.developer_can_push?
+ if default_branch_protection.no_one_can_push?
+ Gitlab::Access::NO_ACCESS
+ elsif default_branch_protection.developer_can_push?
Gitlab::Access::DEVELOPER
- else
+ elsif default_branch_protection.maintainer_can_push?
Gitlab::Access::MAINTAINER
+ else
+ Gitlab::Access::ADMIN
end
end
def merge_access_level
- if default_branch_protection.developer_can_merge?
+ if default_branch_protection.no_one_can_merge?
+ Gitlab::Access::NO_ACCESS
+ elsif default_branch_protection.developer_can_merge?
Gitlab::Access::DEVELOPER
- else
+ elsif default_branch_protection.maintainer_can_merge?
Gitlab::Access::MAINTAINER
+ else
+ Gitlab::Access::ADMIN
end
end
end
diff --git a/app/services/projects/slack_application_install_service.rb b/app/services/projects/slack_application_install_service.rb
deleted file mode 100644
index 812b8b0a082..00000000000
--- a/app/services/projects/slack_application_install_service.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- class SlackApplicationInstallService < BaseService
- include Gitlab::Routing
-
- # Endpoint to initiate the OAuth flow, redirects to Slack's authorization screen
- # https://api.slack.com/authentication/oauth-v2#asking
- SLACK_AUTHORIZE_URL = 'https://slack.com/oauth/v2/authorize'
-
- # Endpoint to exchange the temporary authorization code for an access token
- # https://api.slack.com/authentication/oauth-v2#exchanging
- SLACK_EXCHANGE_TOKEN_URL = 'https://slack.com/api/oauth.v2.access'
-
- def execute
- slack_data = exchange_slack_token
-
- return error("Slack: #{slack_data['error']}") unless slack_data['ok']
-
- integration = project.gitlab_slack_application_integration \
- || project.create_gitlab_slack_application_integration!
-
- installation = integration.slack_integration || integration.build_slack_integration
-
- installation.update!(
- bot_user_id: slack_data['bot_user_id'],
- bot_access_token: slack_data['access_token'],
- team_id: slack_data.dig('team', 'id'),
- team_name: slack_data.dig('team', 'name'),
- alias: project.full_path,
- user_id: slack_data.dig('authed_user', 'id'),
- authorized_scope_names: slack_data['scope']
- )
-
- update_legacy_installations!(installation)
-
- success
- end
-
- private
-
- def exchange_slack_token
- query = {
- client_id: Gitlab::CurrentSettings.slack_app_id,
- client_secret: Gitlab::CurrentSettings.slack_app_secret,
- code: params[:code],
- # NOTE: Needs to match the `redirect_uri` passed to the authorization endpoint,
- # otherwise we get a `bad_redirect_uri` error.
- redirect_uri: slack_auth_project_settings_slack_url(project)
- }
-
- Gitlab::HTTP.get(SLACK_EXCHANGE_TOKEN_URL, query: query).to_hash
- end
-
- # Update any legacy SlackIntegration records for the Slack Workspace. Legacy SlackIntegration records
- # are any created before our Slack App was upgraded to use Granular Bot Permissions and issue a
- # bot_access_token. Any SlackIntegration records for the Slack Workspace will already have the same
- # bot_access_token.
- def update_legacy_installations!(installation)
- updatable_attributes = installation.attributes.slice(
- 'user_id',
- 'bot_user_id',
- 'encrypted_bot_access_token',
- 'encrypted_bot_access_token_iv',
- 'updated_at'
- )
-
- SlackIntegration.by_team(installation.team_id).id_not_in(installation.id).each_batch do |batch|
- batch_ids = batch.pluck(:id) # rubocop: disable CodeReuse/ActiveRecord
- batch.update_all(updatable_attributes)
-
- ::Integrations::SlackWorkspace::IntegrationApiScope.update_scopes(batch_ids, installation.slack_api_scopes)
- end
- end
- end
-end
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index 49648216808..86cbacced8c 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -133,7 +133,7 @@ module Projects
project.old_path_with_namespace = @old_path
- update_repository_configuration(@new_path)
+ update_repository_configuration
remove_issue_contacts
@@ -196,8 +196,7 @@ module Projects
project.visibility_level = to_namespace.visibility_level unless project.visibility_level_allowed_by_group?
end
- def update_repository_configuration(full_path)
- project.set_full_path(gl_full_path: full_path)
+ def update_repository_configuration
project.track_project_repository
end
@@ -233,7 +232,7 @@ module Projects
def rollback_side_effects
project.reset
update_namespace_and_visibility(@old_namespace)
- update_repository_configuration(@old_path)
+ update_repository_configuration
end
def execute_system_hooks
@@ -268,7 +267,7 @@ module Projects
def update_integrations
project.integrations.with_default_settings.delete_all
- Integration.create_from_active_default_integrations(project, :project_id)
+ Integration.create_from_default_integrations(project, :project_id)
end
def update_pending_builds
@@ -276,10 +275,7 @@ module Projects
end
def pending_builds_params
- {
- namespace_id: new_namespace.id,
- namespace_traversal_ids: new_namespace.traversal_ids
- }
+ ::Ci::PendingBuild.namespace_transfer_params(new_namespace)
end
def remove_issue_contacts
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index fd6c9a86540..5513363ba01 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -2,6 +2,7 @@
module Projects
class UpdatePagesService < BaseService
+ include Gitlab::InternalEventsTracking
include Gitlab::Utils::StrongMemoize
# old deployment can be cached by pages daemon
@@ -9,12 +10,12 @@ module Projects
# 10 minutes is enough, but 30 feels safer
OLD_DEPLOYMENTS_DESTRUCTION_DELAY = 30.minutes
- attr_reader :build, :deployment_update
+ attr_reader :build, :deployment_validations
def initialize(project, build)
@project = project
@build = build
- @deployment_update = ::Gitlab::Pages::DeploymentUpdate.new(project, build)
+ @deployment_validations = ::Gitlab::Pages::DeploymentValidations.new(project, build)
end
def execute
@@ -25,13 +26,29 @@ module Projects
job.run!
end
- return error(deployment_update.errors.first.full_message) unless deployment_update.valid?
+ return error(deployment_validations.errors.first.full_message) unless deployment_validations.valid?
build.artifacts_file.use_file do |artifacts_path|
deployment = create_pages_deployment(artifacts_path, build)
break error('The uploaded artifact size does not match the expected value') unless deployment
- break error(deployment_update.errors.first.full_message) unless deployment_update.valid?
+ break error(deployment_validations.errors.first.full_message) unless deployment_validations.valid?
+
+ track_internal_event(
+ 'create_pages_deployment',
+ project: project,
+ namespace: project.namespace,
+ user: build.user
+ )
+
+ if extra_deployment?
+ track_internal_event(
+ 'create_pages_extra_deployment',
+ project: project,
+ namespace: project.namespace,
+ user: build.user
+ )
+ end
deactive_old_deployments(deployment)
success
@@ -43,6 +60,14 @@ module Projects
private
+ def extra_deployment?
+ path_prefix.present?
+ end
+
+ def path_prefix
+ ::Gitlab::Utils.slugify(build.pages&.fetch(:path_prefix, nil) || '')
+ end
+
def success
commit_status.success
publish_deployed_event
@@ -52,7 +77,7 @@ module Projects
def error(message)
register_failure
log_error("Projects::UpdatePagesService: #{message}")
- commit_status.allow_failure = !deployment_update.latest?
+ commit_status.allow_failure = !deployment_validations.latest_build?
commit_status.description = message
commit_status.drop(:script_failure)
super
@@ -77,7 +102,7 @@ module Projects
stage.project = build.project
end
end
- strong_memoize_attr :commit_status
+ strong_memoize_attr :stage
# rubocop: enable Performance/ActiveRecordSubtransactionMethods
def create_pages_deployment(artifacts_path, build)
@@ -95,11 +120,10 @@ module Projects
def pages_deployment_attributes(file, build)
{
file: file,
- file_count: deployment_update.entries_count,
+ file_count: deployment_validations.entries_count,
file_sha256: build.job_artifacts_archive.file_sha256,
ci_build_id: build.id,
- root_directory: build.options[:publish],
- upload_ready: false
+ root_directory: build.options[:publish]
}
end
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index 6053e834f68..0463c3aac0b 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -13,7 +13,10 @@ module Projects
if Gitlab::HTTP_V2::UrlBlocker.blocked_url?(
normalized_url(remote_mirror.url),
schemes: Project::VALID_MIRROR_PROTOCOLS,
- deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?
+ allow_localhost: Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?,
+ allow_local_network: Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?,
+ deny_all_requests_except_allowed: Gitlab::CurrentSettings.deny_all_requests_except_allowed?,
+ outbound_local_requests_allowlist: Gitlab::CurrentSettings.outbound_local_requests_whitelist # rubocop:disable Naming/InclusiveLanguage -- existing setting
)
hard_retry_or_fail(remote_mirror, _('The remote mirror URL is invalid.'), tries)
return error(remote_mirror.last_error)
@@ -36,7 +39,7 @@ module Projects
private
def normalized_url(url)
- strong_memoize(:normalized_url) do
+ strong_memoize_with(:normalized_url, url) do
CGI.unescape(Gitlab::UrlSanitizer.sanitize(url))
end
end
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
index 9c65b261274..8387ed593dc 100644
--- a/app/services/projects/update_repository_storage_service.rb
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -49,8 +49,14 @@ module Projects
pool_repository: pool_repository
)
- Repositories::ReplicateService.new(pool_repository.object_pool.repository)
- .execute(target_pool_repository.object_pool.repository, :object_pool)
+ begin
+ Repositories::ReplicateService.new(pool_repository.object_pool.repository)
+ .execute(target_pool_repository.object_pool.repository, :object_pool)
+ rescue StandardError => e
+ target_pool_repository.destroy!
+
+ raise e
+ end
end
def remove_old_paths
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index 1366370527d..1180416e5ee 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -6,6 +6,7 @@ module Projects
include ValidatesClassificationLabel
ValidationError = Class.new(StandardError)
+ ApiError = Class.new(StandardError)
def execute
build_topics
@@ -38,12 +39,14 @@ module Projects
else
update_failed!
end
- rescue ValidationError => e
+ rescue ApiError => e
+ error(e.message, status: :api_error)
+ rescue ValidationError, Gitlab::Pages::UniqueDomainGenerationFailure => e
error(e.message)
end
def run_auto_devops_pipeline?
- return false if project.repository.gitlab_ci_yml || !project.auto_devops&.previous_changes&.include?('enabled')
+ return false if project.has_ci_config_file? || !project.auto_devops&.previous_changes&.include?('enabled')
project.auto_devops_enabled?
end
@@ -63,6 +66,21 @@ module Projects
validate_default_branch_change
validate_renaming_project_with_tags
+ validate_restrict_user_defined_variables_change
+ end
+
+ def validate_restrict_user_defined_variables_change
+ return unless changing_restrict_user_defined_variables? || changing_pipeline_variables_minimum_override_role?
+
+ if changing_pipeline_variables_minimum_override_role? &&
+ params[:ci_pipeline_variables_minimum_override_role] == 'owner' &&
+ !can?(current_user, :owner_access, project)
+ raise_api_error(s_("UpdateProject|Changing the ci_pipeline_variables_minimum_override_role to the owner role is not allowed"))
+ end
+
+ return if can?(current_user, :change_restrict_user_defined_variables, project)
+
+ raise_api_error(s_("UpdateProject|Changing the restrict_user_defined_variables or ci_pipeline_variables_minimum_override_role is not allowed"))
end
def validate_default_branch_change
@@ -126,6 +144,9 @@ module Projects
end
# overridden by EE module
+ def audit_topic_change(from:); end
+
+ # overridden by EE module
def remove_unallowed_params
params.delete(:emails_enabled) unless can?(current_user, :set_emails_disabled, project)
@@ -156,6 +177,8 @@ module Projects
update_pending_builds if runners_settings_toggled?
+ audit_topic_change(from: @previous_topics)
+
publish_events
end
@@ -167,6 +190,10 @@ module Projects
raise ValidationError, message
end
+ def raise_api_error(message)
+ raise ApiError, message
+ end
+
def update_failed!
model_errors = project.errors.full_messages.to_sentence
error_message = model_errors.presence || s_('UpdateProject|Project could not be updated!')
@@ -188,6 +215,20 @@ module Projects
new_branch != project.default_branch
end
+ def changing_restrict_user_defined_variables?
+ new_restrict_user_defined_variables = params[:restrict_user_defined_variables]
+ return false if new_restrict_user_defined_variables.nil?
+
+ project.restrict_user_defined_variables != new_restrict_user_defined_variables
+ end
+
+ def changing_pipeline_variables_minimum_override_role?
+ new_pipeline_variables_minimum_override_role = params[:ci_pipeline_variables_minimum_override_role]
+ return false if new_pipeline_variables_minimum_override_role.nil?
+
+ project.ci_pipeline_variables_minimum_override_role != new_pipeline_variables_minimum_override_role
+ end
+
def enabling_wiki?
return false if project.wiki_enabled?
@@ -210,6 +251,9 @@ module Projects
end
def build_topics
+ # Used in EE. Can't be cached in override due to Gitlab/ModuleWithInstanceVariables cop
+ @previous_topics = project.topic_list
+
topics = params.delete(:topics)
tag_list = params.delete(:tag_list)
topic_list = topics || tag_list
diff --git a/app/services/projects/update_statistics_service.rb b/app/services/projects/update_statistics_service.rb
index 0d51de4d26e..ce775a74bd5 100644
--- a/app/services/projects/update_statistics_service.rb
+++ b/app/services/projects/update_statistics_service.rb
@@ -50,9 +50,19 @@ module Projects
end
def record_onboarding_progress
- return unless repository.commit_count > 1 || repository.branch_count > 1
+ return unless repository.commit_count > 1 ||
+ repository.branch_count > 1 ||
+ !initialized_repository_with_no_or_only_readme_file?
Onboarding::ProgressService.new(project.namespace).execute(action: :code_added)
end
+
+ def initialized_repository_with_no_or_only_readme_file?
+ return true if repository.empty?
+
+ !repository.ls_files(project.default_branch).reject do |file|
+ file == ::Projects::CreateService::README_FILE
+ end.any?
+ end
end
end
diff --git a/app/services/protected_branches/api_service.rb b/app/services/protected_branches/api_service.rb
index 0a7777c7fed..51cef412a88 100644
--- a/app/services/protected_branches/api_service.rb
+++ b/app/services/protected_branches/api_service.rb
@@ -8,7 +8,7 @@ module ProtectedBranches
def update(protected_branch)
::ProtectedBranches::UpdateService.new(project_or_group, @current_user,
-protected_branch_params(with_defaults: false)).execute(protected_branch)
+ protected_branch_params(with_defaults: false)).execute(protected_branch)
end
private
diff --git a/app/services/protected_branches/base_service.rb b/app/services/protected_branches/base_service.rb
index 62d5e04b499..0ab46bf236c 100644
--- a/app/services/protected_branches/base_service.rb
+++ b/app/services/protected_branches/base_service.rb
@@ -18,20 +18,9 @@ module ProtectedBranches
def refresh_cache
CacheService.new(@project_or_group, @current_user, @params).refresh
- refresh_cache_for_groups_projects
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
end
-
- private
-
- def refresh_cache_for_groups_projects
- return unless @project_or_group.is_a?(Group)
-
- @project_or_group.all_projects.find_each do |project|
- CacheService.new(project, @current_user, @params).refresh
- end
- end
end
end
diff --git a/app/services/protected_branches/cache_service.rb b/app/services/protected_branches/cache_service.rb
index cb2977796d7..cebc1eda0a6 100644
--- a/app/services/protected_branches/cache_service.rb
+++ b/app/services/protected_branches/cache_service.rb
@@ -2,7 +2,8 @@
module ProtectedBranches
class CacheService < ProtectedBranches::BaseService
- CACHE_ROOT_KEY = 'cache:gitlab:protected_branch'
+ include Gitlab::Utils::StrongMemoize
+
TTL_UNSET = -1
CACHE_EXPIRE_IN = 1.day
CACHE_LIMIT = 1000
@@ -49,6 +50,14 @@ module ProtectedBranches
def refresh
with_redis { |redis| redis.unlink(redis_key) }
+
+ return unless (group = project_or_group).is_a?(Group)
+
+ group.all_projects.find_each do |project|
+ with_redis do |redis|
+ redis.unlink redis_key(project)
+ end
+ end
end
private
@@ -72,18 +81,10 @@ module ProtectedBranches
)
end
- def redis_key
- group = project_or_group.is_a?(Group) ? project_or_group : project_or_group.group
- @redis_key ||= if allow_protected_branches_for_group?(group)
- [CACHE_ROOT_KEY, project_or_group.class.name, project_or_group.id].join(':')
- else
- [CACHE_ROOT_KEY, project_or_group.id].join(':')
- end
- end
-
- def allow_protected_branches_for_group?(group)
- Feature.enabled?(:group_protected_branches, group) ||
- Feature.enabled?(:allow_protected_branches_for_group, group)
+ def redis_key(entity = project_or_group)
+ strong_memoize_with(:redis_key, entity) do
+ ProtectedBranch::CacheKey.new(entity).to_s
+ end
end
def metrics
diff --git a/app/services/protected_branches/create_service.rb b/app/services/protected_branches/create_service.rb
index 46585e0b65d..42b178546d0 100644
--- a/app/services/protected_branches/create_service.rb
+++ b/app/services/protected_branches/create_service.rb
@@ -19,7 +19,10 @@ module ProtectedBranches
private
def save_protected_branch
- protected_branch.save
+ protected_branch.save.tap do
+ # Refresh all_protected_branches association as it is not automatically updated
+ project_or_group.all_protected_branches.reset if project_or_group.is_a?(Project)
+ end
end
def protected_branch
diff --git a/app/services/protected_branches/legacy_api_create_service.rb b/app/services/protected_branches/legacy_api_create_service.rb
index f662d9d1bf0..b4cf5da251b 100644
--- a/app/services/protected_branches/legacy_api_create_service.rb
+++ b/app/services/protected_branches/legacy_api_create_service.rb
@@ -22,7 +22,7 @@ module ProtectedBranches
end
@params.merge!(push_access_levels_attributes: [{ access_level: push_access_level }],
- merge_access_levels_attributes: [{ access_level: merge_access_level }])
+ merge_access_levels_attributes: [{ access_level: merge_access_level }])
service = ProtectedBranches::CreateService.new(project_or_group, @current_user, @params)
service.execute
diff --git a/app/services/protected_branches/update_service.rb b/app/services/protected_branches/update_service.rb
index 4b54bf92989..0c44871f6dc 100644
--- a/app/services/protected_branches/update_service.rb
+++ b/app/services/protected_branches/update_service.rb
@@ -2,8 +2,8 @@
module ProtectedBranches
class UpdateService < ProtectedBranches::BaseService
- def execute(protected_branch)
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, :update_protected_branch, protected_branch)
+ def execute(protected_branch, skip_authorization: false)
+ raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?(protected_branch)
old_merge_access_levels = protected_branch.merge_access_levels.map(&:clone)
old_push_access_levels = protected_branch.push_access_levels.map(&:clone)
@@ -16,6 +16,10 @@ module ProtectedBranches
protected_branch
end
+
+ def authorized?(protected_branch)
+ can?(current_user, :update_protected_branch, protected_branch)
+ end
end
end
diff --git a/app/services/quick_actions/interpret_service.rb b/app/services/quick_actions/interpret_service.rb
index b5f6bff756b..b47b84c5f94 100644
--- a/app/services/quick_actions/interpret_service.rb
+++ b/app/services/quick_actions/interpret_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module QuickActions
- class InterpretService < BaseService
+ class InterpretService < BaseContainerService
include Gitlab::Utils::StrongMemoize
include Gitlab::QuickActions::Dsl
include Gitlab::QuickActions::IssueActions
@@ -32,6 +32,9 @@ module QuickActions
end.compact
end
+ # IMPORTANT: unsafe! Use `execute_with_original_text` instead as it handles cleanup of any residual quick actions
+ # left in the original description.
+ #
# Takes a text and interprets the commands that are extracted from it.
# Returns the content without commands, a hash of changes to be applied to a record
# and a string containing the execution_message to show to the user.
@@ -48,6 +51,26 @@ module QuickActions
[content, @updates, execution_messages_for(commands), command_names(commands)]
end
+ # Similar to `execute` except also tries to extract any quick actions from original_text,
+ # and if found removes them from the main list of quick actions.
+ def execute_with_original_text(new_text, quick_action_target, only: nil, original_text: nil)
+ sanitized_new_text, new_command_params, execution_messages, command_names = execute(
+ new_text, quick_action_target, only: only
+ )
+
+ if original_text
+ _, original_command_params = self.class.new(
+ container: container,
+ current_user: current_user,
+ params: params
+ ).execute(original_text, quick_action_target, only: only)
+
+ new_command_params = (new_command_params.to_a - original_command_params.to_a).to_h if original_command_params
+ end
+
+ [sanitized_new_text, new_command_params, execution_messages, command_names]
+ end
+
# Takes a text and interprets the commands that are extracted from it.
# Returns the content without commands, and array of changes explained.
# `keep_actions: true` will keep the quick actions in the content.
@@ -89,10 +112,10 @@ module QuickActions
failed_parse(format(_("Failed to find users for %{missing}"), missing: err.message))
when Gitlab::QuickActions::UsersExtractor::TooManyRefsError
failed_parse(format(_('Too many references. Quick actions are limited to at most %{max_count} user references'),
- max_count: err.limit))
+ max_count: err.limit))
when Gitlab::QuickActions::UsersExtractor::TooManyFoundError
failed_parse(format(_("Too many users found. Quick actions are limited to at most %{max_count} users"),
- max_count: err.limit))
+ max_count: err.limit))
else
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(err)
failed_parse(_('Something went wrong'))
@@ -109,12 +132,6 @@ module QuickActions
project || group
end
- def group
- strong_memoize(:group) do
- quick_action_target.group if quick_action_target.respond_to?(:group)
- end
- end
-
def find_labels(labels_params = nil)
extract_references(labels_params, :label) | find_labels_by_name_no_tilde(labels_params)
end
@@ -160,7 +177,7 @@ module QuickActions
end
def map_commands(commands, method)
- commands.map do |name_or_alias, arg|
+ commands.flat_map do |name_or_alias, arg|
definition = self.class.definition_by_name(name_or_alias)
next unless definition
@@ -205,6 +222,12 @@ module QuickActions
# rubocop: enable CodeReuse/ActiveRecord
def usage_ping_tracking(quick_action_name, arg)
+ # Need to add this guard clause as `duo_code_review` quick action will fail
+ # if we continue to track its usage. This is because we don't have a metric
+ # for it and this is something that can change soon (e.g. quick action may
+ # be replaced by a UI component).
+ return if quick_action_name == :duo_code_review
+
Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter.track_unique_action(
quick_action_name.to_s,
args: arg&.strip,
@@ -218,4 +241,4 @@ module QuickActions
end
end
-QuickActions::InterpretService.prepend_mod_with('QuickActions::InterpretService')
+QuickActions::InterpretService.prepend_mod
diff --git a/app/services/quick_actions/target_service.rb b/app/services/quick_actions/target_service.rb
index 63e2c58fc55..089b1736b65 100644
--- a/app/services/quick_actions/target_service.rb
+++ b/app/services/quick_actions/target_service.rb
@@ -19,6 +19,14 @@ module QuickActions
# rubocop: disable CodeReuse/ActiveRecord
def work_item(type_iid)
+ if type_iid.blank?
+ parent = group_container? ? { namespace: group } : { project: project, namespace: project.project_namespace }
+ return WorkItem.new(
+ work_item_type_id: params[:work_item_type_id] || WorkItems::Type.default_issue_type.id,
+ **parent
+ )
+ end
+
WorkItems::WorkItemsFinder.new(current_user, **parent_params).find_by(iid: type_iid)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index 38c9e6d60a7..7063fa8447b 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -50,8 +50,8 @@ module Releases
def create_release(tag, evidence_pipeline)
release = build_release(tag)
- if project.catalog_resource && release.valid?
- response = Ci::Catalog::Resources::ReleaseService.new(release).execute
+ if publish_catalog?(release)
+ response = Ci::Catalog::Resources::ReleaseService.new(release, current_user, nil).execute
return error(response.message, 422) if response.error?
end
@@ -99,5 +99,11 @@ module Releases
::Releases::CreateEvidenceWorker.perform_async(release.id, pipeline&.id)
end
+
+ def publish_catalog?(release)
+ return false unless project.catalog_resource && release.valid?
+
+ ::Feature.enabled?(:ci_release_cli_catalog_publish_option, project) ? params[:legacy_catalog_publish] : true
+ end
end
end
diff --git a/app/services/releases/destroy_service.rb b/app/services/releases/destroy_service.rb
index 1e8338651a8..8522f6c5776 100644
--- a/app/services/releases/destroy_service.rb
+++ b/app/services/releases/destroy_service.rb
@@ -26,7 +26,7 @@ module Releases
return unless project.catalog_resource.versions.none?
- project.catalog_resource.update!(state: 'draft')
+ project.catalog_resource.update!(state: 'unpublished')
end
def allowed?
diff --git a/app/services/remote_mirrors/create_service.rb b/app/services/remote_mirrors/create_service.rb
new file mode 100644
index 00000000000..8819cdfc0fb
--- /dev/null
+++ b/app/services/remote_mirrors/create_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module RemoteMirrors
+ class CreateService < BaseService
+ def execute
+ return ServiceResponse.error(message: _('Access Denied')) unless allowed?
+
+ remote_mirror = project.remote_mirrors.create(allowed_attributes)
+
+ if remote_mirror.persisted?
+ ServiceResponse.success(payload: { remote_mirror: remote_mirror })
+ else
+ ServiceResponse.error(message: remote_mirror.errors)
+ end
+ end
+
+ private
+
+ def allowed_attributes
+ RemoteMirrors::Attributes.new(params).allowed
+ end
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_remote_mirror, project)
+ end
+ end
+end
diff --git a/app/services/remote_mirrors/destroy_service.rb b/app/services/remote_mirrors/destroy_service.rb
new file mode 100644
index 00000000000..05e28e9ea99
--- /dev/null
+++ b/app/services/remote_mirrors/destroy_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module RemoteMirrors # rubocop:disable Gitlab/BoundedContexts -- https://gitlab.com/gitlab-org/gitlab/-/issues/462816
+ class DestroyService < BaseService
+ def execute(remote_mirror)
+ return ServiceResponse.error(message: _('Access Denied')) unless allowed?
+ return ServiceResponse.error(message: _('Remote mirror is missing')) unless remote_mirror
+ return ServiceResponse.error(message: _('Project mismatch')) unless remote_mirror.project == project
+
+ if remote_mirror.destroy
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: remote_mirror.errors)
+ end
+ end
+
+ private
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_remote_mirror, project)
+ end
+ end
+end
diff --git a/app/services/remote_mirrors/sync_service.rb b/app/services/remote_mirrors/sync_service.rb
new file mode 100644
index 00000000000..a541b3fcdda
--- /dev/null
+++ b/app/services/remote_mirrors/sync_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module RemoteMirrors
+ class SyncService < BaseService
+ def execute(remote_mirror)
+ return ServiceResponse.error(message: _('Access Denied')) unless allowed?
+ return ServiceResponse.error(message: _('Mirror does not exist')) unless remote_mirror
+
+ if remote_mirror.disabled?
+ return ServiceResponse.error(
+ message: _('Cannot proceed with the push mirroring. Please verify your mirror configuration.')
+ )
+ end
+
+ remote_mirror.sync unless remote_mirror.update_in_progress?
+
+ ServiceResponse.success
+ end
+
+ private
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_remote_mirror, project)
+ end
+ end
+end
diff --git a/app/services/remote_mirrors/update_service.rb b/app/services/remote_mirrors/update_service.rb
new file mode 100644
index 00000000000..dc7de04576c
--- /dev/null
+++ b/app/services/remote_mirrors/update_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module RemoteMirrors # rubocop:disable Gitlab/BoundedContexts -- https://gitlab.com/gitlab-org/gitlab/-/issues/462816
+ class UpdateService < BaseService
+ def execute(remote_mirror)
+ return ServiceResponse.error(message: _('Access Denied')) unless allowed?
+ return ServiceResponse.error(message: _('Remote mirror is missing')) unless remote_mirror
+ return ServiceResponse.error(message: _('Project mismatch')) unless remote_mirror.project == project
+
+ if remote_mirror.update(allowed_attributes)
+ ServiceResponse.success(payload: { remote_mirror: remote_mirror })
+ else
+ ServiceResponse.error(message: remote_mirror.errors)
+ end
+ end
+
+ private
+
+ def allowed_attributes
+ RemoteMirrors::Attributes.new(params).allowed
+ end
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_remote_mirror, project)
+ end
+ end
+end
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index 447d4d979a6..1bda1185da7 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -142,8 +142,6 @@ module Repositories
end
def verify_commit_range!(from, to)
- return unless Feature.enabled?(:changelog_commits_limitation, @project)
-
commits = @project.repository.commits_by(oids: [from, to])
raise Gitlab::Changelog::Error, "Invalid or not found commit value in the given range" unless commits.count == 2
diff --git a/app/services/repositories/replicate_service.rb b/app/services/repositories/replicate_service.rb
index 0148223910f..ee9088dd86a 100644
--- a/app/services/repositories/replicate_service.rb
+++ b/app/services/repositories/replicate_service.rb
@@ -4,8 +4,8 @@ module Repositories
class ReplicateService < Repositories::BaseService
Error = Class.new(StandardError)
- def execute(new_repository, type)
- new_repository.replicate(repository)
+ def execute(new_repository, type, partition_hint: "")
+ new_repository.replicate(repository, partition_hint: partition_hint)
new_checksum = new_repository.checksum
checksum = repository.checksum
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index 824b1a8c377..a69f3405996 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -84,13 +84,15 @@ module ResourceAccessTokens
email: username_and_email_generator.email,
username: username_and_email_generator.username,
user_type: :project_bot,
- skip_confirmation: true # Bot users should always have their emails confirmed.
+ skip_confirmation: true, # Bot users should always have their emails confirmed.
+ organization_id: resource.organization_id
}
end
def create_personal_access_token(user)
+ organization_id = resource.organization_id || params[:organization_id]
PersonalAccessTokens::CreateService.new(
- current_user: user, target_user: user, params: personal_access_token_params
+ current_user: user, target_user: user, organization_id: organization_id, params: personal_access_token_params
).execute
end
@@ -108,11 +110,21 @@ module ResourceAccessTokens
end
def create_membership(resource, user, access_level)
- resource.add_member(user, access_level, expires_at: pat_expiration)
+ if Feature.enabled?(:retain_resource_access_token_user_after_revoke, resource.root_ancestor)
+ resource.add_member(user, access_level)
+ else
+ resource.add_member(user, access_level, expires_at: pat_expiration)
+ end
end
def pat_expiration
- params[:expires_at].presence || PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
+ return params[:expires_at] if params[:expires_at].present?
+
+ if Gitlab::CurrentSettings.require_personal_access_token_expiry?
+ return PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
+ end
+
+ nil
end
def log_event(token)
@@ -128,11 +140,10 @@ module ResourceAccessTokens
end
def validate_access_level(access_level)
- return true unless resource.is_a?(Project)
return true if current_user.bot?
- return true if current_user.can?(:manage_owners, resource)
+ return true if current_user.can?(:owner_access, resource)
- current_user.authorized_project?(resource, access_level.to_i)
+ resource.member?(current_user, access_level.to_i)
end
def do_not_allow_owner_access_level_for_project_bot?(access_level)
diff --git a/app/services/resource_access_tokens/revoke_service.rb b/app/services/resource_access_tokens/revoke_service.rb
index 46c71b04632..b6c402428d1 100644
--- a/app/services/resource_access_tokens/revoke_service.rb
+++ b/app/services/resource_access_tokens/revoke_service.rb
@@ -19,11 +19,15 @@ module ResourceAccessTokens
access_token.revoke!
- destroy_bot_user
+ success_message = "Access token #{access_token.name} has been revoked"
+ unless Feature.enabled?(:retain_resource_access_token_user_after_revoke, resource.root_ancestor)
+ destroy_bot_user
+ success_message += " and the bot user has been scheduled for deletion"
+ end
log_event
- success("Access token #{access_token.name} has been revoked and the bot user has been scheduled for deletion.")
+ success("#{success_message}.")
rescue StandardError => error
log_error("Failed to revoke access token for #{bot_user.name}: #{error.message}")
error(error.message)
diff --git a/app/services/saved_replies/create_service.rb b/app/services/saved_replies/create_service.rb
new file mode 100644
index 00000000000..57f1bca9a08
--- /dev/null
+++ b/app/services/saved_replies/create_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module SavedReplies
+ class CreateService < BaseService
+ def initialize(object:, name:, content:)
+ @object = object
+ @name = name
+ @content = content
+ end
+
+ def execute
+ unless object.try(:supports_saved_replies?)
+ return error(_('You have insufficient permissions to create a saved reply'))
+ end
+
+ saved_reply = saved_replies.build(name: name, content: content)
+
+ if saved_reply.save
+ success(saved_reply: saved_reply)
+ else
+ error(saved_reply.errors.full_messages)
+ end
+ end
+
+ private
+
+ attr_reader :object, :name, :content
+
+ delegate :saved_replies, to: :object
+ end
+end
diff --git a/app/services/saved_replies/destroy_service.rb b/app/services/saved_replies/destroy_service.rb
new file mode 100644
index 00000000000..e54f55e556c
--- /dev/null
+++ b/app/services/saved_replies/destroy_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module SavedReplies
+ class DestroyService < BaseService
+ def initialize(saved_reply:)
+ @saved_reply = saved_reply
+ end
+
+ def execute
+ if saved_reply.destroy
+ success(saved_reply: saved_reply)
+ else
+ error(saved_reply.errors.full_messages)
+ end
+ end
+
+ private
+
+ attr_reader :saved_reply
+ end
+end
diff --git a/app/services/saved_replies/update_service.rb b/app/services/saved_replies/update_service.rb
new file mode 100644
index 00000000000..0cf2eeeee77
--- /dev/null
+++ b/app/services/saved_replies/update_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module SavedReplies
+ class UpdateService < BaseService
+ def initialize(saved_reply:, name:, content:)
+ @saved_reply = saved_reply
+ @name = name
+ @content = content
+ end
+
+ def execute
+ if saved_reply.update(name: name, content: content)
+ success(saved_reply: saved_reply.reset)
+ else
+ error(saved_reply.errors.full_messages)
+ end
+ end
+
+ private
+
+ attr_reader :saved_reply, :name, :content
+ end
+end
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 24549b1498b..937d363faf3 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -17,16 +17,16 @@ module Search
def execute
Gitlab::SearchResults.new(current_user,
- params[:search],
- projects,
- order_by: params[:order_by],
- sort: params[:sort],
- filters: filters)
+ params[:search],
+ projects,
+ order_by: params[:order_by],
+ sort: params[:sort],
+ filters: filters)
end
# rubocop: disable CodeReuse/ActiveRecord
def projects
- @projects ||= ProjectsFinder.new(current_user: current_user).execute.preload(:topics, :project_topics)
+ @projects ||= ProjectsFinder.new(current_user: current_user).execute.preload(:topics, :project_topics, :route)
end
def allowed_scopes
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 3d413ed9f7b..6f4a8aa689b 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -40,6 +40,10 @@ class SearchService
# overridden in EE
end
+ def search_type_errors
+ # overridden in EE
+ end
+
def global_search?
project.blank? && group.blank?
end
@@ -58,10 +62,9 @@ class SearchService
delegate :valid_terms_count?, :valid_query_length?, to: :params
def search_results
- strong_memoize(:search_results) do
- abuse_detected? ? Gitlab::EmptySearchResults.new : search_service.execute
- end
+ abuse_detected? ? ::Search::EmptySearchResults.new : search_service.execute
end
+ strong_memoize_attr :search_results
def search_objects(preload_method = nil)
@search_objects ||= redact_unauthorized_results(
diff --git a/app/services/security/ci_configuration/base_create_service.rb b/app/services/security/ci_configuration/base_create_service.rb
index a205a68532b..81b6eb118ef 100644
--- a/app/services/security/ci_configuration/base_create_service.rb
+++ b/app/services/security/ci_configuration/base_create_service.rb
@@ -15,11 +15,14 @@ module Security
def execute
if project.repository.empty? && !(@params && @params[:initialize_with_sast])
- docs_link = ActionController::Base.helpers.link_to _('add at least one file to the repository'),
- Rails.application.routes.url_helpers.help_page_url('user/project/repository/index.md',
- anchor: 'add-files-to-a-repository'),
- target: '_blank',
- rel: 'noopener noreferrer'
+ docs_link = ActionController::Base.helpers.link_to(
+ _('add at least one file to the repository'),
+ Rails.application.routes.url_helpers.help_page_url(
+ 'user/project/repository/index.md', anchor: 'add-files-to-a-repository'
+ ),
+ target: '_blank',
+ rel: 'noopener noreferrer'
+ )
return ServiceResponse.error(
message: _(format('You must %s before using Security features.', docs_link)).html_safe
@@ -79,7 +82,10 @@ module Security
end
def remove_branch_on_exception
- project.repository.rm_branch(current_user, branch_name) if project.repository.branch_exists?(branch_name)
+ return unless project.repository.branch_exists?(branch_name)
+
+ target_sha = project.repository.commit(branch_name).sha
+ project.repository.rm_branch(current_user, branch_name, target_sha: target_sha)
end
def track_event(attributes_for_commit)
diff --git a/app/services/security/ci_configuration/container_scanning_create_service.rb b/app/services/security/ci_configuration/container_scanning_create_service.rb
index 4dfd05451ad..b1dd9d9c750 100644
--- a/app/services/security/ci_configuration/container_scanning_create_service.rb
+++ b/app/services/security/ci_configuration/container_scanning_create_service.rb
@@ -6,8 +6,11 @@ module Security
private
def action
- Security::CiConfiguration::ContainerScanningBuildAction.new(project.auto_devops_enabled?, existing_gitlab_ci_content,
- project.ci_config_path).generate
+ Security::CiConfiguration::ContainerScanningBuildAction.new(
+ project.auto_devops_enabled?,
+ existing_gitlab_ci_content,
+ project.ci_config_path
+ ).generate
end
def next_branch
diff --git a/app/services/security/ci_configuration/dependency_scanning_create_service.rb b/app/services/security/ci_configuration/dependency_scanning_create_service.rb
index 66dd76c4b5d..c10d57974b7 100644
--- a/app/services/security/ci_configuration/dependency_scanning_create_service.rb
+++ b/app/services/security/ci_configuration/dependency_scanning_create_service.rb
@@ -6,8 +6,11 @@ module Security
private
def action
- Security::CiConfiguration::DependencyScanningBuildAction.new(project.auto_devops_enabled?, existing_gitlab_ci_content,
- project.ci_config_path).generate
+ Security::CiConfiguration::DependencyScanningBuildAction.new(
+ project.auto_devops_enabled?,
+ existing_gitlab_ci_content,
+ project.ci_config_path
+ ).generate
end
def next_branch
diff --git a/app/services/security/ci_configuration/sast_iac_create_service.rb b/app/services/security/ci_configuration/sast_iac_create_service.rb
index 61bbebd77d0..ab667581d81 100644
--- a/app/services/security/ci_configuration/sast_iac_create_service.rb
+++ b/app/services/security/ci_configuration/sast_iac_create_service.rb
@@ -6,8 +6,11 @@ module Security
private
def action
- Security::CiConfiguration::SastIacBuildAction.new(project.auto_devops_enabled?, existing_gitlab_ci_content,
- project.ci_config_path).generate
+ Security::CiConfiguration::SastIacBuildAction.new(
+ project.auto_devops_enabled?,
+ existing_gitlab_ci_content,
+ project.ci_config_path
+ ).generate
end
def next_branch
diff --git a/app/services/security/ci_configuration/secret_detection_create_service.rb b/app/services/security/ci_configuration/secret_detection_create_service.rb
index 792fe4986e9..b4bd0ee3d4b 100644
--- a/app/services/security/ci_configuration/secret_detection_create_service.rb
+++ b/app/services/security/ci_configuration/secret_detection_create_service.rb
@@ -6,8 +6,11 @@ module Security
private
def action
- Security::CiConfiguration::SecretDetectionBuildAction.new(project.auto_devops_enabled?, existing_gitlab_ci_content,
- project.ci_config_path).generate
+ Security::CiConfiguration::SecretDetectionBuildAction.new(
+ project.auto_devops_enabled?,
+ existing_gitlab_ci_content,
+ project.ci_config_path
+ ).generate
end
def next_branch
@@ -19,7 +22,7 @@ module Security
end
def description
- _('Configure Secret Detection in `.gitlab-ci.yml` using the GitLab managed template. You can [add variable overrides](https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings) to customize Secret Detection settings.')
+ _('Configure Secret Detection in `.gitlab-ci.yml` using the GitLab managed template. You can [add variable overrides](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#configuration) to customize Secret Detection settings.')
end
def name
diff --git a/app/services/service_desk_settings/update_service.rb b/app/services/service_desk_settings/update_service.rb
index f8b825923f3..3b0c8e01410 100644
--- a/app/services/service_desk_settings/update_service.rb
+++ b/app/services/service_desk_settings/update_service.rb
@@ -9,7 +9,10 @@ module ServiceDeskSettings
params[:project_key] = nil if params[:project_key].blank?
- apply_feature_flag_restrictions!
+ apply_feature_flag_restrictions!(
+ feature_flag: :issue_email_participants,
+ field: :add_external_participants_from_cc
+ )
# We want to know when custom email got enabled
write_log_message = params[:custom_email_enabled].present? && !settings.custom_email_enabled?
@@ -25,11 +28,11 @@ module ServiceDeskSettings
private
- def apply_feature_flag_restrictions!
- return if Feature.enabled?(:issue_email_participants, project)
- return unless params.include?(:add_external_participants_from_cc)
+ def apply_feature_flag_restrictions!(feature_flag:, field:)
+ return if Feature.enabled?(feature_flag, project)
+ return unless params.include?(field)
- params.delete(:add_external_participants_from_cc)
+ params.delete(field)
end
end
end
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 7243bc411f7..3d48cd01210 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -10,8 +10,7 @@ module ServicePing
SubmissionError = Class.new(StandardError)
- def initialize(skip_db_write: false, payload: nil)
- @skip_db_write = skip_db_write
+ def initialize(payload: nil)
@payload = payload
end
@@ -36,7 +35,7 @@ module ServicePing
private
- attr_reader :payload, :skip_db_write
+ attr_reader :payload
def metadata(service_ping_payload)
{
@@ -65,7 +64,10 @@ module ServicePing
URI.join(base_url, path),
body: Gitlab::Json.dump(payload),
allow_local_requests: true,
- headers: { 'Content-type' => 'application/json' }
+ headers: {
+ 'Content-type' => 'application/json',
+ 'Accept' => 'application/json'
+ }
)
end
@@ -83,8 +85,6 @@ module ServicePing
raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
end
- return if skip_db_write
-
raw_usage_data = save_raw_usage_data(payload)
raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
ServicePing::DevopsReport.new(response).execute
@@ -114,6 +114,7 @@ module ServicePing
# rubocop: disable CodeReuse/ActiveRecord
RawUsageData.find_or_create_by(recorded_at: usage_data[:recorded_at]) do |record|
record.payload = usage_data
+ record.organization_id = Organizations::Organization::DEFAULT_ORGANIZATION_ID
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/app/services/service_response.rb b/app/services/service_response.rb
index fbc5660315b..fa495d16468 100644
--- a/app/services/service_response.rb
+++ b/app/services/service_response.rb
@@ -78,6 +78,10 @@ class ServiceResponse
Array.wrap(message)
end
+ def cause
+ ActiveSupport::StringInquirer.new(reason.to_s)
+ end
+
private
attr_writer :status, :message, :http_status, :payload, :reason
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index 569b8b76518..58da5195c44 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -2,6 +2,8 @@
module Snippets
class CreateService < Snippets::BaseService
+ include Gitlab::InternalEventsTracking
+
def initialize(project:, current_user: nil, params: {}, perform_spam_check: true)
super(project: project, current_user: current_user, params: params)
@perform_spam_check = perform_spam_check
@@ -24,7 +26,7 @@ module Snippets
if save_and_commit
UserAgentDetailService.new(spammable: @snippet, perform_spam_check: perform_spam_check).create
- Gitlab::UsageDataCounters::SnippetCounter.count(:create)
+ track_internal_event('create_snippet', project: project, user: current_user)
move_temporary_files
@@ -38,14 +40,26 @@ module Snippets
attr_reader :snippet, :perform_spam_check
+ # If the snippet is a "project snippet", specifically set it to nil to override the default database value of 1.
+ # We only want organization_id on the PersonalSnippet subclass.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/460827
def build_from_params
if project
- project.snippets.build(create_params)
+ project.snippets.build(
+ create_params.merge(organization_id: nil)
+ )
else
- PersonalSnippet.new(create_params)
+ PersonalSnippet.new(
+ create_params.merge(organization_id: organization_id)
+ )
end
end
+ def organization_id
+ params[:organization_id].presence || Organizations::Organization::DEFAULT_ORGANIZATION_ID
+ end
+
# If the snippet_actions param is present
# we need to fill content and file_name from
# the model
@@ -110,5 +124,9 @@ module Snippets
def restricted_files_actions
:create
end
+
+ def commit_attrs(snippet, msg)
+ super.merge(skip_target_sha: true)
+ end
end
end
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 8cc6458227f..ccf073d363a 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -2,6 +2,8 @@
module Snippets
class UpdateService < Snippets::BaseService
+ include Gitlab::InternalEventsTracking
+
COMMITTABLE_ATTRIBUTES = %w[file_name content].freeze
UpdateError = Class.new(StandardError)
@@ -27,7 +29,7 @@ module Snippets
end
if save_and_commit(snippet)
- Gitlab::UsageDataCounters::SnippetCounter.count(:update)
+ track_internal_event('update_snippet', project: project, user: current_user)
ServiceResponse.success(payload: { snippet: snippet })
else
diff --git a/app/services/spam/ham_service.rb b/app/services/spam/ham_service.rb
index 87069d5cd54..515f7203418 100644
--- a/app/services/spam/ham_service.rb
+++ b/app/services/spam/ham_service.rb
@@ -10,8 +10,8 @@ module Spam
@spam_log = spam_log
@user = spam_log.user
@options = {
- ip_address: spam_log.source_ip,
- user_agent: spam_log.user_agent
+ ip_address: spam_log.source_ip,
+ user_agent: spam_log.user_agent
}
end
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index cca0bb709aa..1e4ca55b08d 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -79,7 +79,7 @@ module Spam
target.spam!
create_spam_log
create_spam_abuse_event(result)
- ban_user!
+ ban_user
when DISALLOW
target.spam!
create_spam_log
@@ -132,15 +132,17 @@ module Spam
verdict: result
}
+ base_class = Feature.enabled?(:rename_abuse_workers, user, type: :worker) ? AntiAbuse : Abuse
+
target.run_after_commit_or_now do
- Abuse::SpamAbuseEventsWorker.perform_async(params)
+ base_class::SpamAbuseEventsWorker.perform_async(params)
end
end
- def ban_user!
+ def ban_user
UserCustomAttribute.set_banned_by_spam_log(target.spam_log)
- user.ban!
+ Users::AutoBanService.new(user: user, reason: 'spam').execute
end
def spam_verdict_service
diff --git a/app/services/spam/spam_verdict_service.rb b/app/services/spam/spam_verdict_service.rb
index f69ee255e01..a8381589cc9 100644
--- a/app/services/spam/spam_verdict_service.rb
+++ b/app/services/spam/spam_verdict_service.rb
@@ -71,7 +71,8 @@ module Spam
if result.evaluated?
correlation_id = Labkit::Correlation::CorrelationId.current_id || ''
- Abuse::TrustScoreWorker.perform_async(user.id, :spamcheck, result.score, correlation_id)
+ base_class = Feature.enabled?(:rename_abuse_workers, user, type: :worker) ? AntiAbuse : Abuse
+ base_class::TrustScoreWorker.perform_async(user.id, :spamcheck, result.score, correlation_id)
end
result.verdict
diff --git a/app/services/suggestions/apply_service.rb b/app/services/suggestions/apply_service.rb
index a20eb6b79c5..4895e2c2b91 100644
--- a/app/services/suggestions/apply_service.rb
+++ b/app/services/suggestions/apply_service.rb
@@ -54,6 +54,13 @@ module Suggestions
author_email: author&.email
}
+ if ::Feature.enabled?(:web_ui_commit_author_change, project)
+ params.merge!({
+ author_name: current_user.name,
+ author_email: current_user.commit_email_or_default
+ })
+ end
+
::Files::MultiService.new(suggestion_set.source_project, current_user, params)
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index fc27303792b..edfce0ec13c 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -45,8 +45,8 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_reviewers(old_reviewers)
end
- def request_review(issuable, project, author, user)
- ::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).request_review(user)
+ def request_review(issuable, project, author, user, has_unapproved)
+ ::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).request_review(user, has_unapproved)
end
def change_issuable_contacts(issuable, project, author, added_count, removed_count)
@@ -160,6 +160,21 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_status(status, source)
end
+ # Called when 'merge when checks pass' is executed
+ def merge_when_checks_pass(noteable, project, author, sha)
+ ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).merge_when_checks_pass(sha)
+ end
+
+ # Called when 'auto merge' is canceled
+ def cancel_auto_merge(noteable, project, author)
+ ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).cancel_auto_merge
+ end
+
+ # Called when 'auto merge' is aborted
+ def abort_auto_merge(noteable, project, author, reason)
+ ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).abort_auto_merge(reason)
+ end
+
# Called when 'merge when pipeline succeeds' is executed
def merge_when_pipeline_succeeds(noteable, project, author, sha)
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).merge_when_pipeline_succeeds(sha)
@@ -368,6 +383,10 @@ module SystemNoteService
merge_requests_service(noteable, noteable.project, user).unapprove_mr
end
+ def requested_changes(noteable, user)
+ merge_requests_service(noteable, noteable.project, user).requested_changes
+ end
+
def change_alert_status(alert, author, reason = nil)
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project, author: author).change_alert_status(reason)
end
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 3f96ca9cefb..2beb473b085 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -133,10 +133,12 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'reviewer'))
end
- def request_review(user)
- body = "#{self.class.issuable_events[:review_requested]} #{user.to_reference}"
+ def request_review(user, has_unapproved)
+ body = ["#{self.class.issuable_events[:review_requested]} #{user.to_reference}"]
- create_note(NoteSummary.new(noteable, project, author, body, action: 'reviewer'))
+ body << "removed approval" if has_unapproved
+
+ create_note(NoteSummary.new(noteable, project, author, body.to_sentence, action: 'reviewer'))
end
# Called when the contacts of an issuable are changed or removed
@@ -208,7 +210,7 @@ module SystemNotes
params = hierarchy_note_params(action, noteable, work_item)
create_note(NoteSummary.new(noteable, project, author, params[:parent_note_body], action: params[:parent_action]))
- create_note(NoteSummary.new(work_item, project, author, params[:child_note_body], action: params[:child_action]))
+ create_note(NoteSummary.new(work_item, work_item.project, author, params[:child_note_body], action: params[:child_action]))
end
# Called when the description of a Noteable is changed
@@ -262,7 +264,7 @@ module SystemNotes
else
track_cross_reference_action
- created_at = mentioner.created_at if USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE && mentioner.is_a?(Commit)
+ created_at = mentioned_in.created_at if USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE && mentioned_in.is_a?(Commit)
create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference', created_at: created_at))
end
end
@@ -438,7 +440,7 @@ module SystemNotes
end
def email_participants(body)
- create_note(NoteSummary.new(noteable, project, author, body))
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'issue_email_participants'))
end
def discussion_lock
@@ -517,18 +519,23 @@ module SystemNotes
child_type = child.issue_type.humanize(capitalize: false)
parent_type = parent.issue_type.humanize(capitalize: false)
+ child_reference, parent_reference = if child.namespace_id == parent.namespace_id
+ [child.to_reference, parent.to_reference]
+ else
+ [child.to_reference(full: true), parent.to_reference(full: true)]
+ end
if action == 'relate'
{
- parent_note_body: "added #{child.to_reference} as child #{child_type}",
- child_note_body: "added #{parent.to_reference} as parent #{parent_type}",
+ parent_note_body: "added #{child_reference} as child #{child_type}",
+ child_note_body: "added #{parent_reference} as parent #{parent_type}",
parent_action: 'relate_to_child',
child_action: 'relate_to_parent'
}
else
{
- parent_note_body: "removed child #{child_type} #{child.to_reference}",
- child_note_body: "removed parent #{parent_type} #{parent.to_reference}",
+ parent_note_body: "removed child #{child_type} #{child_reference}",
+ child_note_body: "removed parent #{parent_type} #{parent_reference}",
parent_action: 'unrelate_from_child',
child_action: 'unrelate_from_parent'
}
diff --git a/app/services/system_notes/merge_requests_service.rb b/app/services/system_notes/merge_requests_service.rb
index d71388a1552..8738d5bbcb0 100644
--- a/app/services/system_notes/merge_requests_service.rb
+++ b/app/services/system_notes/merge_requests_service.rb
@@ -2,6 +2,30 @@
module SystemNotes
class MergeRequestsService < ::SystemNotes::BaseService
+ # Called when the auto merge is executed
+ def merge_when_checks_pass(sha)
+ body = "enabled an automatic merge when all merge checks for #{sha} pass"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
+ end
+
+ # Called when the auto merge is canceled
+ def cancel_auto_merge
+ body = 'canceled the automatic merge'
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
+ end
+
+ # Called when the auto merge is aborted
+ def abort_auto_merge(reason)
+ body = "aborted the automatic merge because #{format_reason(reason)}"
+
+ ##
+ # TODO: Abort message should be sent by the system, not a particular user.
+ # See https://gitlab.com/gitlab-org/gitlab-foss/issues/63187.
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
+ end
+
# Called when 'merge when pipeline succeeds' is executed
def merge_when_pipeline_succeeds(sha)
body = "enabled an automatic merge when the pipeline for #{sha} succeeds"
@@ -18,7 +42,7 @@ module SystemNotes
# Called when 'merge when pipeline succeeds' is aborted
def abort_merge_when_pipeline_succeeds(reason)
- body = "aborted the automatic merge because #{reason}"
+ body = "aborted the automatic merge because #{format_reason(reason)}"
##
# TODO: Abort message should be sent by the system, not a particular user.
@@ -179,6 +203,23 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'unapproved'))
end
+
+ def requested_changes
+ body = "requested changes"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'requested_changes'))
+ end
+
+ private
+
+ def format_reason(reason)
+ return if reason.blank?
+
+ formatted_reason = +reason
+ formatted_reason[0] = formatted_reason[0].downcase
+
+ formatted_reason
+ end
end
end
diff --git a/app/services/test_hooks/project_service.rb b/app/services/test_hooks/project_service.rb
index 42af65ebd57..e161ac145fa 100644
--- a/app/services/test_hooks/project_service.rb
+++ b/app/services/test_hooks/project_service.rb
@@ -16,8 +16,10 @@ module TestHooks
def data
strong_memoize(:data) do
case trigger
- when 'push_events', 'tag_push_events'
+ when 'push_events'
push_events_data
+ when 'tag_push_events'
+ tag_push_events_data
when 'note_events'
note_events_data
when 'issues_events', 'confidential_issues_events'
@@ -34,6 +36,8 @@ module TestHooks
releases_events_data
when 'emoji_events'
emoji_events_data
+ when 'resource_access_token_events'
+ access_tokens_events_data
end
end
end
diff --git a/app/services/timelogs/create_service.rb b/app/services/timelogs/create_service.rb
index f65f9482d76..e349f44ac74 100644
--- a/app/services/timelogs/create_service.rb
+++ b/app/services/timelogs/create_service.rb
@@ -42,6 +42,8 @@ module Timelogs
if !timelog.save
error_in_save(timelog)
else
+ issuable.reset
+
SystemNoteService.created_timelog(issuable, issuable.project, current_user, timelog)
issuable_base_service.execute_hooks(issuable, 'update', old_associations: old_associations)
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index 168b36ea4d1..596cd26798c 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -288,7 +288,7 @@ class TodoService
).distinct_user_ids
end
- if users_multiple_todos.present? && !Todo::ACTIONS_MULTIPLE_ALLOWED.include?(attributes.fetch(:action))
+ if users_multiple_todos.present? && Todo::ACTIONS_MULTIPLE_ALLOWED.exclude?(attributes.fetch(:action))
excluded_user_ids += pending_todos(
users_multiple_todos,
attributes.slice(:project_id, :target_id, :target_type, :commit_id, :discussion, :action)
@@ -331,10 +331,16 @@ class TodoService
return unless note.can_create_todo?
project = note.project
- target = note.noteable
+ noteable = note.noteable
+ discussion = note.discussion
+
+ # Only update todos associated with the discussion if note is part of a thread
+ # Otherwise, update all todos associated with the noteable
+ #
+ target = discussion.individual_note? ? noteable : discussion
resolve_todos_for_target(target, author)
- create_mention_todos(project, target, author, note, skip_users)
+ create_mention_todos(project, noteable, author, note, skip_users)
end
def create_assignment_todo(target, author, old_assignees = [])
@@ -391,6 +397,8 @@ class TodoService
when Issue
attributes[:issue_type] = target.issue_type
attributes[:group] = target.namespace if target.project.blank?
+ when DiscussionNote
+ attributes.merge!(target_type: nil, target_id: nil, discussion: target.discussion)
when Discussion
attributes.merge!(target_type: nil, target_id: nil, discussion: target)
end
diff --git a/app/services/todos/allowed_target_filter_service.rb b/app/services/todos/allowed_target_filter_service.rb
index dfed616710b..058ff7bc9e3 100644
--- a/app/services/todos/allowed_target_filter_service.rb
+++ b/app/services/todos/allowed_target_filter_service.rb
@@ -7,12 +7,42 @@ module Todos
def initialize(todos, current_user)
@todos = todos
@current_user = current_user
+ @project_can_read_by_id = {}
end
def execute
- Preloaders::UserMaxAccessLevelInProjectsPreloader.new(@todos.map(&:project).compact, @current_user).execute
+ Preloaders::ProjectPolicyPreloader.new(projects, @current_user).execute
- @todos.select { |todo| can?(@current_user, :read_todo, todo) }
+ @todos.select do |todo|
+ can_read_target_project?(todo) && can?(@current_user, :read_todo, todo)
+ end
+ end
+
+ private
+
+ def projects
+ @projects ||= Project.id_in(@todos.map(&:project_id).compact)
+ end
+
+ def projects_by_id
+ @projects_by_id ||= projects.index_by(&:id)
+ end
+
+ def can_read_target_project?(todo)
+ project_id = todo.target.try(:project_id)
+
+ return true unless project_id
+
+ can_read_project?(project_id)
+ end
+
+ def can_read_project?(project_id)
+ unless @project_can_read_by_id.has_key?(project_id)
+ project = projects_by_id[project_id]
+ @project_can_read_by_id[project_id] = can?(@current_user, :read_project, project)
+ end
+
+ @project_can_read_by_id[project_id]
end
end
end
diff --git a/app/services/upload_service.rb b/app/services/upload_service.rb
index 39d1ffa4d6b..dbeca2fac01 100644
--- a/app/services/upload_service.rb
+++ b/app/services/upload_service.rb
@@ -1,9 +1,6 @@
# frozen_string_literal: true
class UploadService
- # Temporarily introduced for upload API: https://gitlab.com/gitlab-org/gitlab/-/issues/325788
- attr_accessor :override_max_attachment_size
-
def initialize(model, file, uploader_class = FileUploader, **uploader_context)
@model = model
@file = file
@@ -25,6 +22,6 @@ class UploadService
attr_reader :model, :file, :uploader_class, :uploader_context
def max_attachment_size
- override_max_attachment_size || Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
+ Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
end
end
diff --git a/app/services/uploads/destroy_service.rb b/app/services/uploads/destroy_service.rb
index 1f0d99ff7bb..91d50b1d58c 100644
--- a/app/services/uploads/destroy_service.rb
+++ b/app/services/uploads/destroy_service.rb
@@ -9,9 +9,7 @@ module Uploads
@current_user = user
end
- def execute(secret, filename)
- upload = find_upload(secret, filename)
-
+ def execute(upload)
unless current_user && upload && current_user.can?(:destroy_upload, upload)
return error(_("The resource that you are attempting to access does not "\
"exist or you don't have permission to perform this action."))
@@ -23,29 +21,5 @@ module Uploads
error(_('Upload could not be deleted.'))
end
end
-
- private
-
- # rubocop: disable CodeReuse/ActiveRecord
- def find_upload(secret, filename)
- uploader = uploader_class.new(model, secret: secret)
- upload_paths = uploader.upload_paths(filename)
-
- Upload.find_by(model: model, uploader: uploader_class.to_s, path: upload_paths)
- rescue FileUploader::InvalidSecret
- nil
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def uploader_class
- case model
- when Group
- NamespaceFileUploader
- when Project
- FileUploader
- else
- raise ArgumentError, "unknown uploader for #{model.class.name}"
- end
- end
end
end
diff --git a/app/services/users/activate_service.rb b/app/services/users/activate_service.rb
index dfc2996bcce..9453760ea10 100644
--- a/app/services/users/activate_service.rb
+++ b/app/services/users/activate_service.rb
@@ -35,8 +35,14 @@ module Users
end
def log_event(user)
- Gitlab::AppLogger.info(message: 'User activated', user: user.username.to_s, email: user.email.to_s,
- activated_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ Gitlab::AppLogger.info(
+ message: 'User activated',
+ username: user.username.to_s,
+ user_id: user.id,
+ email: user.email.to_s,
+ activated_by: current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
def success(message)
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index b490df6a134..8f3c2ea3137 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -18,6 +18,7 @@ module Users
def execute
return unless user
+ return if user.last_activity_on == Date.today
::Gitlab::Database::LoadBalancing::Session.without_sticky_writes { record_activity }
end
@@ -29,13 +30,12 @@ module Users
def record_activity
return if Gitlab::Database.read_only?
- today = Date.today
- return if user.last_activity_on == today
-
lease = Gitlab::ExclusiveLease.new("activity_service:#{user.id}", timeout: LEASE_TIMEOUT)
- return unless lease.try_obtain
+ # Skip transaction checks for exclusive lease as it is breaking system specs.
+ # See issue: https://gitlab.com/gitlab-org/gitlab/-/issues/441536
+ return unless Gitlab::ExclusiveLease.skipping_transaction_check { lease.try_obtain }
- user.update_attribute(:last_activity_on, today)
+ user.update_attribute(:last_activity_on, Date.today)
Gitlab::UsageDataCounters::HLLRedisCounter.track_event('unique_active_user', values: user.id)
diff --git a/app/services/users/approve_service.rb b/app/services/users/approve_service.rb
index 53ec37d0ff7..3b8229ec113 100644
--- a/app/services/users/approve_service.rb
+++ b/app/services/users/approve_service.rb
@@ -47,7 +47,14 @@ module Users
end
def log_event(user)
- Gitlab::AppLogger.info(message: "User instance access request approved", user: user.username.to_s, email: user.email.to_s, approved_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ Gitlab::AppLogger.info(
+ message: "User instance access request approved",
+ username: user.username.to_s,
+ user_id: user.id,
+ email: user.email.to_s,
+ approved_by: current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
end
end
diff --git a/app/services/users/auto_ban_service.rb b/app/services/users/auto_ban_service.rb
index fa3b738b4cd..523cc9aca99 100644
--- a/app/services/users/auto_ban_service.rb
+++ b/app/services/users/auto_ban_service.rb
@@ -17,6 +17,12 @@ module Users
end
end
+ def execute!
+ user.ban!
+ record_custom_attribute
+ success
+ end
+
private
attr_reader :user, :reason
@@ -31,3 +37,5 @@ module Users
end
end
end
+
+Users::AutoBanService.prepend_mod
diff --git a/app/services/users/ban_service.rb b/app/services/users/ban_service.rb
index 20c34b15f15..b5751dda8fc 100644
--- a/app/services/users/ban_service.rb
+++ b/app/services/users/ban_service.rb
@@ -17,11 +17,6 @@ module Users
def action
:ban
end
-
- override :track_event
- def track_event(user)
- experiment(:phone_verification_for_low_risk_users, user: user).track(:banned)
- end
end
end
diff --git a/app/services/users/banned_user_base_service.rb b/app/services/users/banned_user_base_service.rb
index cec351904a9..e53c3cf57ba 100644
--- a/app/services/users/banned_user_base_service.rb
+++ b/app/services/users/banned_user_base_service.rb
@@ -40,7 +40,14 @@ module Users
end
def log_event(user)
- Gitlab::AppLogger.info(message: "User #{action}", user: user.username.to_s, email: user.email.to_s, "#{action}_by": current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ Gitlab::AppLogger.info(
+ message: "User #{action}",
+ username: user.username.to_s,
+ user_id: user.id,
+ email: user.email.to_s,
+ "#{action}_by": current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
end
end
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 2a9e4be91d3..5c7a9518d74 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -11,12 +11,14 @@ module Users
def initialize(current_user, params = {})
@current_user = current_user
@params = params.dup
+ @organization_id = params.delete(:organization_id)
@identity_params = params.slice(*identity_attributes)
end
def execute
build_user
build_identity
+ build_user_detail
update_canonical_email
user
@@ -24,7 +26,7 @@ module Users
private
- attr_reader :identity_params, :user_params, :user
+ attr_reader :identity_params, :user_params, :user, :organization_id
def identity_attributes
[:extern_uid, :provider]
@@ -37,7 +39,9 @@ module Users
standard_build_user
end
- user.assign_personal_namespace
+ organization = Organizations::Organization.find_by_id(organization_id) if organization_id
+
+ user.assign_personal_namespace(organization)
end
def admin?
@@ -138,6 +142,11 @@ module Users
user.identities.build(identity_params)
end
+ def build_user_detail
+ # This will ensure we either load an existing record or create it.
+ user.user_detail
+ end
+
def update_canonical_email
Users::UpdateCanonicalEmailService.new(user: user).execute
end
@@ -150,6 +159,7 @@ module Users
:avatar,
:bio,
:can_create_group,
+ :color_mode_id,
:color_scheme_id,
:email,
:external,
diff --git a/app/services/users/create_service.rb b/app/services/users/create_service.rb
index 591d88b275e..bcae6e8ce3f 100644
--- a/app/services/users/create_service.rb
+++ b/app/services/users/create_service.rb
@@ -13,7 +13,9 @@ module Users
user = build_class.new(current_user, params).execute
reset_token = user.generate_reset_token if user.recently_sent_password_reset?
- after_create_hook(user, reset_token) if user.save
+ Namespace.with_disabled_organization_validation do
+ after_create_hook(user, reset_token) if user.save
+ end
user
end
diff --git a/app/services/users/deactivate_service.rb b/app/services/users/deactivate_service.rb
index e69ce13d3cc..6fc04eab1b4 100644
--- a/app/services/users/deactivate_service.rb
+++ b/app/services/users/deactivate_service.rb
@@ -56,8 +56,14 @@ module Users
end
def log_event(user)
- Gitlab::AppLogger.info(message: 'User deactivated', user: user.username.to_s, email: user.email.to_s,
- deactivated_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ Gitlab::AppLogger.info(
+ message: 'User deactivated',
+ username: user.username.to_s,
+ user_id: user.id,
+ email: user.email.to_s,
+ deactivated_by: current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
end
end
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index e4b593e3140..ae6af875ab4 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -43,11 +43,16 @@ module Users
raise Gitlab::Access::AccessDeniedError, "#{current_user} tried to destroy user #{user}!"
end
+ if user.solo_owned_organizations.present?
+ user.errors.add(:base, 'You must transfer ownership of organizations before you can remove user')
+ end
+
if !delete_solo_owned_groups && user.solo_owned_groups.present?
user.errors.add(:base, 'You must transfer ownership or delete groups before you can remove user')
- return user
end
+ return user if user.errors.any?
+
user.block
# Load the records. Groups are unavailable after membership is destroyed.
diff --git a/app/services/users/dismiss_broadcast_message_service.rb b/app/services/users/dismiss_broadcast_message_service.rb
new file mode 100644
index 00000000000..9c95fd1e3a8
--- /dev/null
+++ b/app/services/users/dismiss_broadcast_message_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Users
+ class DismissBroadcastMessageService
+ def initialize(current_user:, params: {})
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ result = dismissal.tap do |record|
+ record.expires_at = params[:expires_at]
+ end.save
+
+ return ServiceResponse.success if result
+
+ ServiceResponse.error(message: _('Failed to save dismissal'))
+ end
+
+ private
+
+ attr_reader :current_user, :params
+
+ def dismissal
+ Users::BroadcastMessageDismissal.find_or_initialize_dismissal(current_user, params[:broadcast_message_id])
+ end
+ end
+end
diff --git a/app/services/users/email_verification/validate_token_service.rb b/app/services/users/email_verification/validate_token_service.rb
index 0cfd4876abf..30413de805c 100644
--- a/app/services/users/email_verification/validate_token_service.rb
+++ b/app/services/users/email_verification/validate_token_service.rb
@@ -5,7 +5,7 @@ module Users
class ValidateTokenService < EmailVerification::BaseService
include ActionView::Helpers::DateHelper
- TOKEN_VALID_FOR_MINUTES = 240
+ TOKEN_VALID_FOR_MINUTES = 60
def initialize(attr:, user:, token:)
super(attr: attr, user: user)
diff --git a/app/services/users/reject_service.rb b/app/services/users/reject_service.rb
index dc22b2ec21d..1b1af99422b 100644
--- a/app/services/users/reject_service.rb
+++ b/app/services/users/reject_service.rb
@@ -34,7 +34,14 @@ module Users
end
def log_event(user)
- Gitlab::AppLogger.info(message: "User instance access request rejected", user: user.username.to_s, email: user.email.to_s, rejected_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ Gitlab::AppLogger.info(
+ message: "User instance access request rejected",
+ username: user.username.to_s,
+ user_id: user.id,
+ email: user.email.to_s,
+ rejected_by: current_user.username.to_s,
+ ip_address: current_user.current_sign_in_ip.to_s
+ )
end
end
end
diff --git a/app/services/users/reset_feed_token_service.rb b/app/services/users/reset_feed_token_service.rb
new file mode 100644
index 00000000000..fae9db6d19e
--- /dev/null
+++ b/app/services/users/reset_feed_token_service.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Users
+ class ResetFeedTokenService < BaseService
+ VALID_SOURCES = %i[self group_token_revocation_service].freeze
+
+ def initialize(current_user = nil, user: nil, source: nil)
+ @current_user = current_user
+ @user = user
+ @source = source
+
+ @source = :self if @current_user && !@source
+
+ raise ArgumentError unless user
+ raise ArgumentError unless VALID_SOURCES.include?(@source)
+ end
+
+ def execute
+ return ServiceResponse.error(message: s_('Not permitted to reset user feed token')) unless reset_permitted?
+
+ result = Users::UpdateService.new(current_user, user: user).execute(&:reset_feed_token!)
+ if result[:status] == :success
+ log_event
+ ServiceResponse.success(message: success_message)
+ else
+ ServiceResponse.error(message: error_message)
+ end
+ end
+
+ private
+
+ attr_reader :user, :source
+
+ def error_message
+ s_('Profiles|Feed token could not be reset')
+ end
+
+ def success_message
+ s_('Profiles|Feed token was successfully reset')
+ end
+
+ def reset_permitted?
+ case source
+ when :self
+ Ability.allowed?(current_user, :update_user, user)
+ when :group_token_revocation_service
+ true
+ end
+ end
+
+ def log_event
+ Gitlab::AppLogger.info(
+ class: self.class.name,
+ message: "User Feed Token Reset",
+ source: source,
+ reset_by: current_user&.username,
+ reset_for: user.username,
+ user_id: user.id)
+ end
+ end
+end
diff --git a/app/services/users/saved_replies/create_service.rb b/app/services/users/saved_replies/create_service.rb
deleted file mode 100644
index 21378ec4435..00000000000
--- a/app/services/users/saved_replies/create_service.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-module Users
- module SavedReplies
- class CreateService
- def initialize(current_user:, name:, content:)
- @current_user = current_user
- @name = name
- @content = content
- end
-
- def execute
- saved_reply = saved_replies.build(name: name, content: content)
-
- if saved_reply.save
- ServiceResponse.success(payload: { saved_reply: saved_reply })
- else
- ServiceResponse.error(message: saved_reply.errors.full_messages)
- end
- end
-
- private
-
- attr_reader :current_user, :name, :content
-
- delegate :saved_replies, to: :current_user
- end
- end
-end
diff --git a/app/services/users/saved_replies/destroy_service.rb b/app/services/users/saved_replies/destroy_service.rb
deleted file mode 100644
index ac08cddad0c..00000000000
--- a/app/services/users/saved_replies/destroy_service.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-module Users
- module SavedReplies
- class DestroyService
- def initialize(saved_reply:)
- @saved_reply = saved_reply
- end
-
- def execute
- if saved_reply.destroy
- ServiceResponse.success(payload: { saved_reply: saved_reply })
- else
- ServiceResponse.error(message: saved_reply.errors.full_messages)
- end
- end
-
- private
-
- attr_reader :saved_reply
- end
- end
-end
diff --git a/app/services/users/saved_replies/update_service.rb b/app/services/users/saved_replies/update_service.rb
deleted file mode 100644
index 80d3da8a0a3..00000000000
--- a/app/services/users/saved_replies/update_service.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-module Users
- module SavedReplies
- class UpdateService
- def initialize(saved_reply:, name:, content:)
- @saved_reply = saved_reply
- @name = name
- @content = content
- end
-
- def execute
- if saved_reply.update(name: name, content: content)
- ServiceResponse.success(payload: { saved_reply: saved_reply.reset })
- else
- ServiceResponse.error(message: saved_reply.errors.full_messages)
- end
- end
-
- private
-
- attr_reader :saved_reply, :name, :content
- end
- end
-end
diff --git a/app/services/users/set_namespace_commit_email_service.rb b/app/services/users/set_namespace_commit_email_service.rb
index 775db364625..30ee597120d 100644
--- a/app/services/users/set_namespace_commit_email_service.rb
+++ b/app/services/users/set_namespace_commit_email_service.rb
@@ -20,7 +20,7 @@ module Users
return error(_("User doesn't exist or you don't have permission to change namespace commit emails."))
end
- unless can?(target_user, :read_namespace_via_membership, namespace)
+ unless can?(target_user, :read_namespace, namespace)
return error(_("Namespace doesn't exist or you don't have permission."))
end
diff --git a/app/services/users/unfollow_service.rb b/app/services/users/unfollow_service.rb
new file mode 100644
index 00000000000..1f04e452962
--- /dev/null
+++ b/app/services/users/unfollow_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Users
+ class UnfollowService
+ def initialize(params)
+ @follower = params[:follower]
+ @followee = params[:followee]
+ end
+
+ def execute
+ # rubocop: disable CodeReuse/ActiveRecord -- This is special service for unfollowing users
+ deleted_rows = Users::UserFollowUser.where(
+ follower_id: @follower.id,
+ followee_id: @followee.id
+ ).delete_all
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ if deleted_rows > 0
+ @follower.followees.reset
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: _('Failed to unfollow user'))
+ end
+ end
+ end
+end
diff --git a/app/services/users/update_canonical_email_service.rb b/app/services/users/update_canonical_email_service.rb
index c4b7a98f60b..c7db3d3d821 100644
--- a/app/services/users/update_canonical_email_service.rb
+++ b/app/services/users/update_canonical_email_service.rb
@@ -16,9 +16,9 @@ module Users
return unless user.email
return unless user.email.match? Devise.email_regexp
- canonical_email = canonicalize_email
+ canonical_email = ::Gitlab::Utils::Email.normalize_email(user.email)
- unless canonical_email
+ unless Regexp.union(INCLUDED_DOMAINS_PATTERN).match?(canonical_email)
# the canonical email doesn't exist, probably because the domain doesn't match
# destroy any UserCanonicalEmail record associated with this user
user.user_canonical_email&.delete
@@ -37,20 +37,5 @@ module Users
private
attr_reader :user
-
- def canonicalize_email
- email = user.email
-
- portions = email.split('@')
- username = portions.shift
- rest = portions.join
-
- regex = Regexp.union(INCLUDED_DOMAINS_PATTERN)
- return unless regex.match?(rest)
-
- no_dots = username.tr('.', '')
- before_plus = no_dots.split('+')[0]
- "#{before_plus}@#{rest}"
- end
end
end
diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb
index cc179ba964a..05388ce9491 100644
--- a/app/services/users/update_service.rb
+++ b/app/services/users/update_service.rb
@@ -88,10 +88,14 @@ module Users
end
def discard_synced_attributes
- if (metadata = @user.user_synced_attributes_metadata)
- read_only = metadata.read_only_attributes
+ params.reject! { |key, _| synced_attributes.include?(key.to_sym) }
+ end
- params.reject! { |key, _| read_only.include?(key.to_sym) }
+ def synced_attributes
+ if (metadata = @user.user_synced_attributes_metadata)
+ metadata.read_only_attributes
+ else
+ []
end
end
diff --git a/app/services/users/upsert_credit_card_validation_service.rb b/app/services/users/upsert_credit_card_validation_service.rb
index e0f81971944..a49f8c10032 100644
--- a/app/services/users/upsert_credit_card_validation_service.rb
+++ b/app/services/users/upsert_credit_card_validation_service.rb
@@ -11,18 +11,26 @@ module Users
def execute
credit_card = Users::CreditCardValidation.find_or_initialize_by_user(user_id)
- credit_card_params = {
+ credit_card_attributes = {
credit_card_validated_at: credit_card_validated_at,
last_digits: last_digits,
holder_name: holder_name,
network: network,
- expiration_date: expiration_date
+ expiration_date: expiration_date,
+ zuora_payment_method_xid: zuora_payment_method_xid,
+ stripe_setup_intent_xid: stripe_setup_intent_xid,
+ stripe_payment_method_xid: stripe_payment_method_xid,
+ stripe_card_fingerprint: stripe_card_fingerprint
}
- credit_card.update(credit_card_params)
+ credit_card.assign_attributes(credit_card_attributes)
+
+ return blocked if credit_card.exceeded_daily_verification_limit?
+
+ credit_card.save!
success
- rescue ActiveRecord::InvalidForeignKey, ActiveRecord::NotNullViolation
+ rescue ActiveRecord::InvalidForeignKey, ActiveRecord::NotNullViolation, ActiveRecord::RecordInvalid
error
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
@@ -51,6 +59,22 @@ module Users
params.fetch(:credit_card_type)
end
+ def zuora_payment_method_xid
+ params[:zuora_payment_method_xid]
+ end
+
+ def stripe_setup_intent_xid
+ params[:stripe_setup_intent_xid]
+ end
+
+ def stripe_payment_method_xid
+ params[:stripe_payment_method_xid]
+ end
+
+ def stripe_card_fingerprint
+ params[:stripe_card_fingerprint]
+ end
+
def expiration_date
year = params.fetch(:credit_card_expiration_year)
month = params.fetch(:credit_card_expiration_month)
@@ -65,5 +89,9 @@ module Users
def error
ServiceResponse.error(message: _('Error saving credit card validation record'))
end
+
+ def blocked
+ ServiceResponse.error(message: 'Credit card verification limit exceeded', reason: :rate_limited)
+ end
end
end
diff --git a/app/services/virtual_registries/packages/maven/handle_file_request_service.rb b/app/services/virtual_registries/packages/maven/handle_file_request_service.rb
new file mode 100644
index 00000000000..b3917d13d12
--- /dev/null
+++ b/app/services/virtual_registries/packages/maven/handle_file_request_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module VirtualRegistries
+ module Packages
+ module Maven
+ class HandleFileRequestService < ::BaseContainerService
+ alias_method :registry, :container
+
+ TIMEOUT = 5
+
+ def initialize(registry:, current_user: nil, params: {})
+ super(container: registry, current_user: current_user, params: params)
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'Path not present', reason: :path_not_present) unless path.present?
+ return ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized) unless allowed?
+
+ unless registry.upstream.present?
+ return ServiceResponse.error(message: 'No upstreams set', reason: :no_upstreams)
+ end
+
+ # TODO check cached responses here
+ # If one exists and can be used, return it.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/467983
+ handle_upstream(registry.upstream)
+ end
+
+ private
+
+ def handle_upstream(upstream)
+ url = upstream.url_for(path)
+ headers = upstream.headers
+ response = head_upstream(url: url, headers: headers)
+
+ if response.success?
+ workhorse_send_url_response(url: url, headers: headers)
+ else
+ ServiceResponse.error(message: 'File not found on any upstream', reason: :file_not_found_on_upstreams)
+ end
+ rescue *::Gitlab::HTTP::HTTP_ERRORS
+ ServiceResponse.error(message: 'Upstream not available', reason: :upstream_not_available)
+ end
+
+ def head_upstream(url:, headers:)
+ ::Gitlab::HTTP.head(url, headers: headers, follow_redirects: true, timeout: TIMEOUT)
+ end
+
+ def allowed?
+ can?(current_user, :read_virtual_registry, registry)
+ end
+
+ def path
+ params[:path]
+ end
+
+ def workhorse_send_url_response(url:, headers:)
+ ServiceResponse.success(
+ payload: { action: :workhorse_send_url, action_params: { url: url, headers: headers } }
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index 035f1754cbb..9659a871c7f 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class WebHookService
+ include Gitlab::Utils::StrongMemoize
+
class InternalErrorResponse
ERROR_MESSAGE = 'internal error'
@@ -33,8 +35,10 @@ class WebHookService
RESPONSE_HEADERS_COUNT_LIMIT = 50
RESPONSE_HEADERS_SIZE_LIMIT = 1.kilobytes
+ CUSTOM_TEMPLATE_INTERPOLATION_REGEX = /{{(.+?)}}/
+
attr_accessor :hook, :data, :hook_name, :request_options
- attr_reader :uniqueness_token
+ attr_reader :uniqueness_token, :idempotency_key
def self.hook_to_event(hook_name, hook = nil)
return hook.class.name.titleize if hook.is_a?(SystemHook)
@@ -42,11 +46,12 @@ class WebHookService
hook_name.to_s.singularize.titleize
end
- def initialize(hook, data, hook_name, uniqueness_token = nil, force: false)
+ def initialize(hook, data, hook_name, uniqueness_token = nil, idempotency_key: nil, force: false)
@hook = hook
@data = data.to_h
@hook_name = hook_name.to_s
@uniqueness_token = uniqueness_token
+ @idempotency_key = idempotency_key || generate_idempotency_key
@force = force
@request_options = {
timeout: Gitlab.config.gitlab.webhook_timeout,
@@ -87,15 +92,19 @@ class WebHookService
)
ServiceResponse.success(message: response.body, payload: { http_status: response.code })
- rescue *Gitlab::HTTP::HTTP_ERRORS,
- Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
+ rescue *Gitlab::HTTP::HTTP_ERRORS, JSON::ParserError, Zlib::DataError,
+ Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
execution_duration = ::Gitlab::Metrics::System.monotonic_time - start_time
error_message = e.to_s
+ # An exception raised while rendering the custom template prevents us from calling `#request_payload`
+ request_data = e.instance_of?(JSON::ParserError) ? {} : request_payload
+
log_execution(
response: InternalErrorResponse.new,
execution_duration: execution_duration,
- error_message: error_message
+ error_message: error_message,
+ request_data: request_data
)
Gitlab::AppLogger.error("WebHook Error after #{execution_duration.to_i.seconds}s => #{e}")
@@ -110,7 +119,8 @@ class WebHookService
break log_recursion_blocked if recursion_blocked?
params = {
- "recursion_detection_request_uuid" => Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid
+ "recursion_detection_request_uuid" => Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid,
+ "idempotency_key" => idempotency_key
}.compact
WebHookWorker.perform_async(hook.id, data.deep_stringify_keys, hook_name.to_s, params)
@@ -127,10 +137,14 @@ class WebHookService
@parsed_url = URI.parse(hook.url)
end
+ def generate_idempotency_key
+ SecureRandom.uuid
+ end
+
def make_request(url, basic_auth = false)
Gitlab::HTTP.post(url,
- body: Gitlab::Json::LimitedEncoder.encode(data, limit: REQUEST_BODY_SIZE_LIMIT),
- headers: build_headers,
+ body: Gitlab::Json::LimitedEncoder.encode(request_payload, limit: REQUEST_BODY_SIZE_LIMIT),
+ headers: build_custom_headers.merge(build_headers),
verify: hook.enable_ssl_verification,
basic_auth: basic_auth,
**request_options)
@@ -145,15 +159,15 @@ class WebHookService
make_request(post_url, basic_auth)
end
- def log_execution(response:, execution_duration:, error_message: nil)
+ def log_execution(response:, execution_duration:, error_message: nil, request_data: request_payload)
category = response_category(response)
log_data = {
trigger: hook_name,
url: hook.url,
interpolated_url: hook.interpolated_url,
execution_duration: execution_duration,
- request_headers: build_headers,
- request_data: data,
+ request_headers: build_custom_headers(values_redacted: true).merge(build_headers),
+ request_data: request_data,
response_headers: safe_response_headers(response),
response_body: safe_response_body(response),
response_status: response.code,
@@ -198,6 +212,7 @@ class WebHookService
headers = {
'Content-Type' => 'application/json',
'User-Agent' => "GitLab/#{Gitlab::VERSION}",
+ 'Idempotency-Key' => idempotency_key,
Gitlab::WebHooks::GITLAB_EVENT_HEADER => self.class.hook_to_event(hook_name, hook),
Gitlab::WebHooks::GITLAB_UUID_HEADER => SecureRandom.uuid,
Gitlab::WebHooks::GITLAB_INSTANCE_HEADER => Gitlab.config.gitlab.base_url
@@ -208,6 +223,14 @@ class WebHookService
end
end
+ def build_custom_headers(values_redacted: false)
+ return {} unless hook.custom_headers.present?
+
+ return hook.custom_headers.transform_values { '[REDACTED]' } if values_redacted
+
+ hook.custom_headers
+ end
+
# Make response headers more stylish
# Net::HTTPHeader has downcased hash with arrays: { 'content-type' => ['text/html; charset=utf-8'] }
# This method format response to capitalized hash with strings: { 'Content-Type' => 'text/html; charset=utf-8' }
@@ -267,4 +290,26 @@ class WebHookService
def enforce_utf8(str)
Gitlab::EncodingHelper.encode_utf8(str)
end
+
+ def request_payload
+ return data unless hook.custom_webhook_template.present?
+
+ start_time = Gitlab::Metrics::System.monotonic_time
+ rendered_template = render_custom_template(hook.custom_webhook_template, data.deep_stringify_keys)
+ duration = Gitlab::Metrics::System.monotonic_time - start_time
+
+ Gitlab::AppLogger.info(
+ message: "Rendered custom webhook template",
+ hook_id: hook.id,
+ duration_s: duration
+ )
+ Gitlab::Json.parse(rendered_template)
+ rescue JSON::ParserError => e
+ raise JSON::ParserError, "Error while parsing rendered custom webhook template: #{e.message}"
+ end
+ strong_memoize_attr :request_payload
+
+ def render_custom_template(template, params)
+ template.gsub(CUSTOM_TEMPLATE_INTERPOLATION_REGEX) { params.dig(*Regexp.last_match(1).split('.')) }
+ end
end
diff --git a/app/services/web_hooks/create_service.rb b/app/services/web_hooks/create_service.rb
new file mode 100644
index 00000000000..c187590fd61
--- /dev/null
+++ b/app/services/web_hooks/create_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class CreateService
+ include Services::ReturnServiceResponses
+
+ def initialize(current_user)
+ @current_user = current_user
+ end
+
+ def execute(hook_params, relation)
+ hook = relation.new(hook_params)
+
+ if hook.save
+ after_create(hook)
+ else
+ return error("Invalid url given", 422) if hook.errors[:url].present?
+ return error("Invalid branch filter given", 422) if hook.errors[:push_events_branch_filter].present?
+
+ error(hook.errors.full_messages.to_sentence, 422)
+ end
+ end
+
+ private
+
+ def after_create(hook)
+ success({ hook: hook, async: false })
+ end
+
+ attr_reader :current_user
+ end
+end
+
+WebHooks::CreateService.prepend_mod_with('WebHooks::CreateService')
diff --git a/app/services/web_hooks/destroy_service.rb b/app/services/web_hooks/destroy_service.rb
index dbd164ab20e..788463bded1 100644
--- a/app/services/web_hooks/destroy_service.rb
+++ b/app/services/web_hooks/destroy_service.rb
@@ -16,13 +16,8 @@ module WebHooks
def execute(web_hook)
return error(DENIED, 401) unless authorized?(web_hook)
- hook_id = web_hook.id
-
if web_hook.destroy
- WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => hook_id })
- Gitlab::AppLogger.info(log_message(web_hook))
-
- success({ async: false })
+ after_destroy(web_hook)
else
error("Unable to destroy #{web_hook.model_name.human}", 500)
end
@@ -30,12 +25,21 @@ module WebHooks
private
+ def after_destroy(web_hook)
+ WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => web_hook.id })
+ Gitlab::AppLogger.info(log_message(web_hook))
+
+ success({ async: false })
+ end
+
def log_message(hook)
"User #{current_user&.id} scheduled a deletion of logs for hook ID #{hook.id}"
end
def authorized?(web_hook)
- Ability.allowed?(current_user, :destroy_web_hook, web_hook)
+ Ability.allowed?(current_user, :admin_web_hook, web_hook)
end
end
end
+
+WebHooks::DestroyService.prepend_mod_with('WebHooks::DestroyService')
diff --git a/app/services/web_hooks/events/resend_service.rb b/app/services/web_hooks/events/resend_service.rb
new file mode 100644
index 00000000000..2168c8035de
--- /dev/null
+++ b/app/services/web_hooks/events/resend_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module WebHooks
+ module Events
+ class ResendService
+ def initialize(web_hook_log, current_user:)
+ @web_hook_log = web_hook_log
+ @current_user = current_user
+ end
+
+ def execute
+ return unauthorized_response unless authorized?
+ return url_changed_response unless web_hook_log.url_current?
+
+ web_hook_log.web_hook.execute(web_hook_log.request_data, web_hook_log.trigger,
+ idempotency_key: web_hook_log.idempotency_key)
+ end
+
+ private
+
+ def authorized?
+ case web_hook_log.web_hook.type
+ when 'ServiceHook'
+ current_user.can?(:admin_integrations, web_hook_log.web_hook.integration)
+ else
+ current_user.can?(:admin_web_hook, web_hook_log.web_hook)
+ end
+ end
+
+ def unauthorized_response
+ ServiceResponse.error(message: s_('WebHooks|The current user is not authorized to resend a hook event'))
+ end
+
+ def url_changed_response
+ ServiceResponse.error(
+ message: _('The hook URL has changed, and this log entry cannot be retried')
+ )
+ end
+
+ attr_reader :web_hook_log, :current_user
+ end
+ end
+end
diff --git a/app/services/web_hooks/log_execution_service.rb b/app/services/web_hooks/log_execution_service.rb
index b1da0c1642f..de87d6ce30c 100644
--- a/app/services/web_hooks/log_execution_service.rb
+++ b/app/services/web_hooks/log_execution_service.rb
@@ -46,7 +46,7 @@ module WebHooks
# Perform this operation within an `Gitlab::ExclusiveLease` lock to make it
# safe to be called concurrently from different workers.
def update_hook_failure_state
- in_lock(lock_name, ttl: LOCK_TTL, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRY) do |retried|
+ in_lock(lock_name, ttl: LOCK_TTL, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRY) do |_retried|
hook.reset # Reload within the lock so properties are guaranteed to be current.
case response_category
@@ -58,7 +58,7 @@ module WebHooks
hook.failed!
end
- hook.update_last_failure
+ hook.parent.update_last_webhook_failure(hook) if hook.parent
end
rescue Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError
raise if raise_lock_error?
diff --git a/app/services/wiki_pages/base_service.rb b/app/services/wiki_pages/base_service.rb
index 891e18c0acc..d4d6cace4af 100644
--- a/app/services/wiki_pages/base_service.rb
+++ b/app/services/wiki_pages/base_service.rb
@@ -4,7 +4,7 @@ module WikiPages
# There are 3 notions of 'action' that inheriting classes must implement:
#
# - external_action: the action we report to external clients with webhooks
- # - usage_counter_action: the action that we count in out internal counters
+ # - internal_event_name: the action that we count in out internal counters
# - event_action: what we record as the value of `Event#action`
class BaseService < ::BaseContainerService
private
@@ -13,7 +13,7 @@ module WikiPages
page_data = payload(page)
container.execute_hooks(page_data, :wiki_page_hooks)
container.execute_integrations(page_data, :wiki_page_hooks)
- increment_usage
+ increment_usage(page)
create_wiki_event(page)
end
@@ -22,9 +22,8 @@ module WikiPages
raise NotImplementedError
end
- # Passed to the WikiPageCounter to count events.
- # Must be one of WikiPageCounter::KNOWN_EVENTS
- def usage_counter_action
+ # Should return a valid event name to be used with Gitlab::InternalEvents
+ def internal_event_name
raise NotImplementedError
end
@@ -38,9 +37,20 @@ module WikiPages
Gitlab::DataBuilder::WikiPage.build(page, current_user, external_action)
end
- # This method throws an error if the action is an unanticipated value.
- def increment_usage
- Gitlab::UsageDataCounters::WikiPageCounter.count(usage_counter_action)
+ # This method throws an error if internal_event_name returns an unknown event name
+ def increment_usage(page)
+ label = 'template' if page.template?
+
+ Gitlab::InternalEvents.track_event(
+ internal_event_name,
+ user: current_user,
+ project: project,
+ namespace: group,
+ additional_properties: {
+ label: label,
+ property: page[:format].to_s
+ }
+ )
end
def create_wiki_event(page)
diff --git a/app/services/wiki_pages/create_service.rb b/app/services/wiki_pages/create_service.rb
index d14d94d77df..6ec9be37d09 100644
--- a/app/services/wiki_pages/create_service.rb
+++ b/app/services/wiki_pages/create_service.rb
@@ -14,12 +14,13 @@ module WikiPages
execute_hooks(page)
ServiceResponse.success(payload: { page: page })
else
- ServiceResponse.error(message: _('Could not create wiki page'), payload: { page: page })
+ message = page.template? ? _('Could not create wiki template') : _('Could not create wiki page')
+ ServiceResponse.error(message: message, payload: { page: page })
end
end
- def usage_counter_action
- :create
+ def internal_event_name
+ 'create_wiki_page'
end
def external_action
diff --git a/app/services/wiki_pages/destroy_service.rb b/app/services/wiki_pages/destroy_service.rb
index 1d566f98760..8669098f8cb 100644
--- a/app/services/wiki_pages/destroy_service.rb
+++ b/app/services/wiki_pages/destroy_service.rb
@@ -7,14 +7,13 @@ module WikiPages
execute_hooks(page)
ServiceResponse.success(payload: { page: page })
else
- ServiceResponse.error(
- message: _('Could not delete wiki page'), payload: { page: page }
- )
+ message = page.template? ? _('Could not delete wiki template') : _('Could not delete wiki page')
+ ServiceResponse.error(message: message, payload: { page: page })
end
end
- def usage_counter_action
- :delete
+ def internal_event_name
+ 'delete_wiki_page'
end
def external_action
diff --git a/app/services/wiki_pages/update_service.rb b/app/services/wiki_pages/update_service.rb
index cf9eddbd13f..d3afe9401d1 100644
--- a/app/services/wiki_pages/update_service.rb
+++ b/app/services/wiki_pages/update_service.rb
@@ -12,7 +12,8 @@ module WikiPages
execute_hooks(page)
ServiceResponse.success(payload: { page: page })
else
- raise UpdateError, _('Could not update wiki page')
+ message = page.template? ? _('Could not update wiki template') : _('Could not update wiki page')
+ raise UpdateError, message
end
rescue UpdateError, WikiPage::PageChangedError, WikiPage::PageRenameError => e
page.update_attributes(@params) # rubocop:disable Rails/ActiveRecordAliases
@@ -23,8 +24,8 @@ module WikiPages
)
end
- def usage_counter_action
- :update
+ def internal_event_name
+ 'update_wiki_page'
end
def external_action
diff --git a/app/services/work_items/bulk_update_service.rb b/app/services/work_items/bulk_update_service.rb
new file mode 100644
index 00000000000..b0b7958e686
--- /dev/null
+++ b/app/services/work_items/bulk_update_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module WorkItems
+ class BulkUpdateService
+ def initialize(parent:, current_user:, work_item_ids:, widget_params: {})
+ @parent = parent
+ @work_item_ids = work_item_ids
+ @current_user = current_user
+ @widget_params = widget_params.dup
+ end
+
+ def execute
+ unless @current_user.can?(:"read_#{@parent.to_ability_name}", @parent)
+ return ServiceResponse.error(message: "User can't read parent", reason: :authorization)
+ end
+
+ updated_work_items = scoped_work_items.find_each(batch_size: 100) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be identical in model
+ .filter_map do |work_item|
+ next unless @current_user.can?(:update_work_item, work_item)
+
+ update_result = WorkItems::UpdateService.new(
+ container: work_item.resource_parent,
+ widget_params: @widget_params,
+ current_user: @current_user
+ ).execute(work_item)
+
+ work_item if update_result[:status] == :success
+ end
+
+ ServiceResponse.success(payload: { updated_work_item_count: updated_work_items.count })
+ end
+
+ private
+
+ def scoped_work_items
+ ids = WorkItem.id_in(@work_item_ids)
+ cte = Gitlab::SQL::CTE.new(:work_item_ids_cte, ids)
+ work_item_scope = WorkItem.all
+ cte.apply_to(work_item_scope).in_namespaces_with_cte(namespaces)
+ end
+
+ def namespaces
+ relations = [group_namespaces, project_namespaces].compact
+
+ Namespace.from_union(relations, remove_duplicates: false)
+ end
+
+ def group_namespaces
+ return unless @parent.is_a?(Group)
+
+ @parent.self_and_descendants.select(:id)
+ end
+
+ def project_namespaces
+ if @parent.is_a?(Project)
+ Project.id_in(@parent)
+ else
+ Project.in_namespace(@parent.self_and_descendant_ids)
+ end.select('projects.project_namespace_id as id')
+ end
+ end
+end
diff --git a/app/services/work_items/callbacks/assignees.rb b/app/services/work_items/callbacks/assignees.rb
index 14755ff0b46..0febcfb2e79 100644
--- a/app/services/work_items/callbacks/assignees.rb
+++ b/app/services/work_items/callbacks/assignees.rb
@@ -3,32 +3,30 @@
module WorkItems
module Callbacks
class Assignees < Base
- def before_update
- params[:assignee_ids] = [] if excluded_in_new_type?
-
- return unless params.present? && params.has_key?(:assignee_ids)
- return unless has_permission?(:set_work_item_metadata)
-
- assignee_ids = filter_assignees_count(params[:assignee_ids])
- assignee_ids = filter_assignee_permissions(assignee_ids)
-
- return if assignee_ids.sort == work_item.assignee_ids.sort
+ def before_create
+ set_assignee_ids
+ end
- work_item.assignee_ids = assignee_ids
- work_item.touch
+ def before_update
+ set_assignee_ids
end
private
- def filter_assignees_count(assignee_ids)
- return assignee_ids if work_item.allows_multiple_assignees?
+ def set_assignee_ids
+ params[:assignee_ids] = [] if excluded_in_new_type?
+ return unless params.has_key?(:assignee_ids) && has_permission?(:set_work_item_metadata)
+
+ new_assignee_ids = filter_assignee_ids(params[:assignee_ids])
+ return if new_assignee_ids.sort == work_item.assignee_ids.sort
- assignee_ids.first(1)
+ work_item.assignee_ids = new_assignee_ids
end
- def filter_assignee_permissions(assignee_ids)
- assignees = User.id_in(assignee_ids)
+ def filter_assignee_ids(assignee_ids)
+ assignee_ids = assignee_ids.first(1) unless work_item.allows_multiple_assignees?
+ assignees = User.id_in(assignee_ids)
assignees.select { |assignee| assignee.can?(:read_work_item, work_item) }.map(&:id)
end
end
diff --git a/app/services/work_items/callbacks/crm_contacts.rb b/app/services/work_items/callbacks/crm_contacts.rb
new file mode 100644
index 00000000000..a78e2fc1bd9
--- /dev/null
+++ b/app/services/work_items/callbacks/crm_contacts.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Callbacks
+ class CrmContacts < Base
+ OPERATION_MODES = {
+ 'APPEND' => :add_ids,
+ 'REMOVE' => :remove_ids,
+ 'REPLACE' => :replace_ids
+ }.freeze
+
+ def after_save
+ return clear_contacts if excluded_in_new_type?
+
+ set_contacts
+ end
+
+ private
+
+ def clear_contacts
+ return unless work_item.customer_relations_contact_ids.present?
+
+ call_service({ replace_ids: [] })
+ end
+
+ def set_contacts
+ return unless params.present?
+
+ contact_ids = params[:contact_ids]
+ return if contact_ids.nil?
+ return if operation_mode_attribute.nil?
+ return if work_item.customer_relations_contact_ids.sort == contact_ids.sort
+
+ raise_error(unsupported_work_item_message) if group.nil?
+ raise_error(feature_disabled_message) unless feature_enabled?
+
+ call_service({ operation_mode_attribute => contact_ids })
+ end
+
+ def call_service(params)
+ response = ::Issues::SetCrmContactsService.new(
+ container: work_item.resource_parent,
+ current_user: current_user,
+ params: params
+ ).execute(work_item)
+
+ raise_error(response.message) unless response.success?
+ end
+
+ def feature_enabled?
+ group&.crm_enabled?
+ end
+
+ def group
+ @group ||= work_item.resource_parent.root_ancestor
+ end
+
+ def operation_mode_attribute
+ @operation_mode_attribute = OPERATION_MODES[params[:operation_mode] || 'REPLACE']
+ end
+
+ def feature_disabled_message
+ _('Feature disabled')
+ end
+
+ def unsupported_work_item_message
+ _('Work item not supported')
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/callbacks/description.rb b/app/services/work_items/callbacks/description.rb
index b9620c65214..4a8e4d71769 100644
--- a/app/services/work_items/callbacks/description.rb
+++ b/app/services/work_items/callbacks/description.rb
@@ -3,15 +3,20 @@
module WorkItems
module Callbacks
class Description < Base
- def before_update
+ def after_initialize
params[:description] = nil if excluded_in_new_type?
- return unless params.present? && params.key?(:description)
- return unless has_permission?(:update_work_item)
+ return unless update_description?
work_item.description = params[:description]
work_item.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
+
+ private
+
+ def update_description?
+ params.present? && params.key?(:description) && has_permission?(:update_work_item)
+ end
end
end
end
diff --git a/app/services/work_items/callbacks/linked_items.rb b/app/services/work_items/callbacks/linked_items.rb
new file mode 100644
index 00000000000..903317d6f4b
--- /dev/null
+++ b/app/services/work_items/callbacks/linked_items.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Callbacks
+ class LinkedItems < Base
+ def after_save_commit
+ return unless params.present? && params.key?(:work_items_ids)
+ return unless has_permission?(:set_work_item_metadata)
+
+ execute_linked_items_service(params[:work_items_ids], params[:link_type])
+ end
+
+ private
+
+ def execute_linked_items_service(item_ids, link_type)
+ items_to_link = WorkItem.id_in(item_ids)
+
+ result = ::WorkItems::RelatedWorkItemLinks::CreateService
+ .new(work_item, current_user, { target_issuable: items_to_link, link_type: link_type })
+ .execute
+
+ raise_error(result[:message]) if result[:status] == :error
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/callbacks/start_and_due_date.rb b/app/services/work_items/callbacks/start_and_due_date.rb
index b7318dcfcf4..31da54d139f 100644
--- a/app/services/work_items/callbacks/start_and_due_date.rb
+++ b/app/services/work_items/callbacks/start_and_due_date.rb
@@ -3,13 +3,57 @@
module WorkItems
module Callbacks
class StartAndDueDate < Base
+ include ::Gitlab::Utils::StrongMemoize
+
def before_update
- return work_item.assign_attributes({ start_date: nil, due_date: nil }) if excluded_in_new_type?
+ assign_attributes
+ end
+
+ def before_create
+ assign_attributes
+ end
+
+ private
- return if params.blank?
+ def assign_attributes
return unless has_permission?(:set_work_item_metadata)
+ return if dates_source_params.blank?
+ return if work_item.invalid?
+
+ # Although we have the database trigger to ensure the sync between the
+ # work_items_dates_sources[start_date, due_date] and issues[start_date, due_date]
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/157993
+ # for now, here we also assign the values directly to work_item to avoid
+ # having to reload this object after the Update service is finished.
+ #
+ # This is important for places like the GraphQL where we use the same
+ # instance in memory for all the changes and then use the same object
+ # to build the GraphQL response
+ work_item.assign_attributes(dates_source_params.slice(:start_date, :due_date))
+ (work_item.dates_source || work_item.build_dates_source).then do |dates_source|
+ dates_source.assign_attributes(dates_source_params)
+ end
+ end
+
+ def dates_source_params
+ return empty_dates_source if excluded_in_new_type?
+
+ params[:start_date_fixed] = params[:start_date] if params.key?(:start_date)
+ params[:due_date_fixed] = params[:due_date] if params.key?(:due_date)
+
+ params.merge(due_date_is_fixed: true, start_date_is_fixed: true)
+ end
+ strong_memoize_attr :dates_source_params
- work_item.assign_attributes(params.slice(:start_date, :due_date))
+ def empty_dates_source
+ {
+ due_date: nil,
+ due_date_fixed: nil,
+ due_date_is_fixed: true,
+ start_date: nil,
+ start_date_fixed: nil,
+ start_date_is_fixed: true
+ }
end
end
end
diff --git a/app/services/work_items/closing_merge_requests/create_service.rb b/app/services/work_items/closing_merge_requests/create_service.rb
new file mode 100644
index 00000000000..fa371893b82
--- /dev/null
+++ b/app/services/work_items/closing_merge_requests/create_service.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module ClosingMergeRequests
+ class CreateService
+ ResourceNotAvailable = Class.new(StandardError)
+
+ def initialize(current_user:, work_item:, merge_request_reference:, namespace_path: nil)
+ @current_user = current_user
+ @work_item = work_item
+ @merge_request_reference = merge_request_reference
+ @namespace_path = namespace_path
+ end
+
+ def execute
+ raise ResourceNotAvailable, 'Cannot update work item' unless @current_user.can?(:update_work_item, @work_item)
+
+ if @work_item.get_widget(:development).blank?
+ return ServiceResponse.error(message: _('Development widget is not enabled for this work item type'))
+ end
+
+ merge_request = merge_request_from_reference
+ raise ResourceNotAvailable, 'Merge request not available' if merge_request.blank?
+
+ mr_closing_issue = MergeRequestsClosingIssues.new(
+ merge_request: merge_request,
+ issue_id: @work_item.id,
+ from_mr_description: false
+ )
+
+ if mr_closing_issue.save
+ GraphqlTriggers.work_item_updated(@work_item)
+
+ ServiceResponse.success(payload: { merge_request_closing_issue: mr_closing_issue })
+ else
+ ServiceResponse.error(message: mr_closing_issue.errors.full_messages)
+ end
+ end
+
+ private
+
+ def merge_request_from_reference
+ parent = parent_from_path
+
+ extractor = if parent.is_a?(Project)
+ ::Gitlab::ReferenceExtractor.new(parent, @current_user)
+ else
+ ::Gitlab::ReferenceExtractor.new(nil, @current_user)
+ end
+
+ extractor.analyze(@merge_request_reference, extractor_params_for(parent))
+ extractor.merge_requests.first
+ end
+
+ def parent_from_path
+ parent = Routable.find_by_full_path(@namespace_path)
+ return parent if parent.present?
+
+ # We fallback to the work item's parent as reference extractor always needs a parent to work
+ @work_item.project || @work_item.namespace
+ end
+
+ def extractor_params_for(parent)
+ if parent.is_a?(Group)
+ { group: parent }
+ else
+ {}
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/create_service.rb b/app/services/work_items/create_service.rb
index e1e6063c8ac..df797eec6c1 100644
--- a/app/services/work_items/create_service.rb
+++ b/app/services/work_items/create_service.rb
@@ -2,7 +2,6 @@
module WorkItems
class CreateService < Issues::CreateService
- extend ::Gitlab::Utils::Override
include WidgetableService
def initialize(container:, perform_spam_check: true, current_user: nil, params: {}, widget_params: {})
@@ -16,18 +15,19 @@ module WorkItems
@widget_params = widget_params
end
- def execute
- result = super
+ def execute(skip_system_notes: false)
+ result = skip_system_notes? ? super(skip_system_notes: true) : super
return result if result.error?
work_item = result[:issue]
if work_item.valid?
+ publish_event(work_item)
success(payload(work_item))
else
error(work_item.errors.full_messages, :unprocessable_entity, pass_back: payload(work_item))
end
- rescue ::WorkItems::Widgets::BaseService::WidgetError => e
+ rescue ::WorkItems::Widgets::BaseService::WidgetError, ::Issuable::Callbacks::Base::Error => e
error(e.message, :unprocessable_entity)
end
@@ -70,15 +70,6 @@ module WorkItems
private
- override :handle_quick_actions
- def handle_quick_actions(work_item)
- # Do not handle quick actions unless the work item is the default Issue.
- # The available quick actions for a work item depend on its type and widgets.
- return if work_item.work_item_type != WorkItems::Type.default_by_type(:issue)
-
- super
- end
-
def authorization_action
:create_work_item
end
@@ -86,6 +77,21 @@ module WorkItems
def payload(work_item)
{ work_item: work_item }
end
+
+ def skip_system_notes?
+ false
+ end
+
+ def publish_event(work_item)
+ work_item.run_after_commit_or_now do
+ Gitlab::EventStore.publish(
+ WorkItems::WorkItemCreatedEvent.new(data: {
+ id: work_item.id,
+ namespace_id: work_item.namespace_id
+ })
+ )
+ end
+ end
end
end
diff --git a/app/services/work_items/delete_service.rb b/app/services/work_items/delete_service.rb
index 1093a403a1c..2813e4c00c0 100644
--- a/app/services/work_items/delete_service.rb
+++ b/app/services/work_items/delete_service.rb
@@ -8,10 +8,23 @@ module WorkItems
end
if super
+ publish_event(work_item)
::ServiceResponse.success
else
::ServiceResponse.error(message: work_item.errors.full_messages)
end
end
+
+ private
+
+ def publish_event(work_item)
+ Gitlab::EventStore.publish(
+ WorkItems::WorkItemDeletedEvent.new(data: {
+ id: work_item.id,
+ namespace_id: work_item.namespace_id,
+ work_item_parent_id: work_item.work_item_parent&.id
+ }.tap(&:compact_blank!))
+ )
+ end
end
end
diff --git a/app/services/work_items/parent_links/base_service.rb b/app/services/work_items/parent_links/base_service.rb
index 6f22e09a3fc..92903bf6e06 100644
--- a/app/services/work_items/parent_links/base_service.rb
+++ b/app/services/work_items/parent_links/base_service.rb
@@ -51,3 +51,5 @@ module WorkItems
end
end
end
+
+WorkItems::ParentLinks::BaseService.prepend_mod
diff --git a/app/services/work_items/parent_links/create_service.rb b/app/services/work_items/parent_links/create_service.rb
index 4747d2f17e4..cd21f36674d 100644
--- a/app/services/work_items/parent_links/create_service.rb
+++ b/app/services/work_items/parent_links/create_service.rb
@@ -9,20 +9,16 @@ module WorkItems
def relate_issuables(work_item)
link = set_parent(issuable, work_item)
- link.move_to_end
-
- if link.changed? && link.save
- relate_child_note = create_notes(work_item)
-
- ResourceLinkEvent.create(
- user: current_user,
- work_item: link.work_item_parent,
- child_work_item: link.work_item,
- action: ResourceLinkEvent.actions[:add],
- system_note_metadata_id: relate_child_note&.system_note_metadata&.id
- )
+ # It's possible to force the relative_position. This is for example used when importing parent links from
+ # legacy epics.
+ if params[:relative_position]
+ link.relative_position = params[:relative_position]
+ else
+ link.move_to_start
end
+ create_notes_and_resource_event(work_item, link) if link.changed? && link.save
+
link
end
@@ -30,6 +26,27 @@ module WorkItems
def extract_references
params[:issuable_references]
end
+
+ def create_notes_and_resource_event(work_item, link)
+ relate_child_note = create_notes(work_item)
+
+ ResourceLinkEvent.create(
+ user: current_user,
+ work_item: link.work_item_parent,
+ child_work_item: link.work_item,
+ action: ResourceLinkEvent.actions[:add],
+ system_note_metadata_id: relate_child_note&.system_note_metadata&.id
+ )
+ end
+
+ override :after_create_for
+ def after_create_for(link)
+ super
+
+ GraphqlTriggers.work_item_updated(link.work_item_parent)
+ end
end
end
end
+
+WorkItems::ParentLinks::CreateService.prepend_mod
diff --git a/app/services/work_items/parent_links/destroy_service.rb b/app/services/work_items/parent_links/destroy_service.rb
index 97145d0b360..b3d49e2ab14 100644
--- a/app/services/work_items/parent_links/destroy_service.rb
+++ b/app/services/work_items/parent_links/destroy_service.rb
@@ -3,11 +3,14 @@
module WorkItems
module ParentLinks
class DestroyService < IssuableLinks::DestroyService
+ extend ::Gitlab::Utils::Override
+
attr_reader :link, :current_user, :parent, :child
- def initialize(link, user)
+ def initialize(link, user, params = {})
@link = link
@current_user = user
+ @params = params
@parent = link.work_item_parent
@child = link.work_item
end
@@ -33,6 +36,15 @@ module WorkItems
def permission_to_remove_relation?
can?(current_user, :admin_parent_link, child) && can?(current_user, :admin_parent_link, parent)
end
+
+ override :after_destroy
+ def after_destroy
+ super
+
+ GraphqlTriggers.work_item_updated(@link.work_item_parent)
+ end
end
end
end
+
+WorkItems::ParentLinks::DestroyService.prepend_mod
diff --git a/app/services/work_items/parent_links/reorder_service.rb b/app/services/work_items/parent_links/reorder_service.rb
index 0ee650bd8ab..749a5a477eb 100644
--- a/app/services/work_items/parent_links/reorder_service.rb
+++ b/app/services/work_items/parent_links/reorder_service.rb
@@ -9,9 +9,13 @@ module WorkItems
def relate_issuables(work_item)
notes_are_expected = work_item.work_item_parent != issuable
link = set_parent(issuable, work_item)
- reorder(link, params[:adjacent_work_item], params[:relative_position])
- create_notes(work_item) if link.save && notes_are_expected
+ if reorder(link, params[:adjacent_work_item], params[:relative_position])
+ create_notes(work_item) if notes_are_expected
+ # When the hierarchy is changed from the children list,
+ # we have to trigger the update on the parent to update the view
+ GraphqlTriggers.work_item_updated(issuable)
+ end
link
end
@@ -19,8 +23,20 @@ module WorkItems
def reorder(link, adjacent_work_item, relative_position)
WorkItems::ParentLink.move_nulls_to_end(RelativePositioning.mover.context(link).relative_siblings)
- link.move_before(adjacent_work_item.parent_link) if relative_position == 'BEFORE'
- link.move_after(adjacent_work_item.parent_link) if relative_position == 'AFTER'
+ move_link(link, adjacent_work_item, relative_position)
+ end
+
+ # overriden in EE
+ def move_link(link, adjacent_work_item, relative_position)
+ if relative_position
+ link.move_before(adjacent_work_item.parent_link) if relative_position == 'BEFORE'
+ link.move_after(adjacent_work_item.parent_link) if relative_position == 'AFTER'
+ elsif link.changes.include?(:work_item_parent_id)
+ # position item at the start of the list if parent changed and relative_position is not provided
+ link.move_to_start
+ end
+
+ link.save
end
override :render_conflict_error?
@@ -37,3 +53,5 @@ module WorkItems
end
end
end
+
+WorkItems::ParentLinks::ReorderService.prepend_mod
diff --git a/app/services/work_items/related_work_item_links/create_service.rb b/app/services/work_items/related_work_item_links/create_service.rb
index 38e5ba3be7f..42998d50751 100644
--- a/app/services/work_items/related_work_item_links/create_service.rb
+++ b/app/services/work_items/related_work_item_links/create_service.rb
@@ -6,10 +6,10 @@ module WorkItems
extend ::Gitlab::Utils::Override
def execute
- return error(_('No matching work item found.'), 404) unless can?(current_user, :admin_work_item_link, issuable)
+ return error(_('No matching work item found.'), 404) unless can_admin_work_item_link?(issuable)
response = super
- create_notes_async if new_links.any?
+ after_execute
if response[:status] == :success
response[:message] = format(
@@ -31,6 +31,14 @@ module WorkItems
private
+ def after_execute
+ create_notes_async if new_links.any?
+ end
+
+ def can_admin_work_item_link?(work_item)
+ can?(current_user, :admin_work_item_link, work_item)
+ end
+
def create_notes(_issuable_link)
# no-op notes are created asynchronously
end
@@ -40,7 +48,7 @@ module WorkItems
end
def can_link_item?(work_item)
- return true if can?(current_user, :admin_work_item_link, work_item)
+ return true if can_admin_work_item_link?(work_item)
@errors << format(
_("Item with ID: %{id} cannot be added. You don't have permission to perform this action."),
diff --git a/app/services/work_items/related_work_item_links/destroy_service.rb b/app/services/work_items/related_work_item_links/destroy_service.rb
index 6d1920d01b2..519070c45ff 100644
--- a/app/services/work_items/related_work_item_links/destroy_service.rb
+++ b/app/services/work_items/related_work_item_links/destroy_service.rb
@@ -12,7 +12,7 @@ module WorkItems
end
def execute
- return error(_('No work item found.'), 403) unless can?(current_user, :admin_work_item_link, work_item)
+ return error(_('No work item found.'), 403) unless can_admin_work_item_link?(work_item)
return error(_('No work item IDs provided.'), 409) if params[:item_ids].empty?
destroy_links_for(params[:item_ids])
@@ -28,6 +28,10 @@ module WorkItems
attr_reader :work_item, :current_user, :failed_ids, :removed_ids
+ def can_admin_work_item_link?(resource)
+ can?(current_user, :admin_work_item_link, resource)
+ end
+
def destroy_links_for(item_ids)
destroy_links(source: work_item, target: item_ids, direction: :target)
destroy_links(source: item_ids, target: work_item, direction: :source)
@@ -37,16 +41,22 @@ module WorkItems
WorkItems::RelatedWorkItemLink.for_source_and_target(source, target).each do |link|
linked_item = link.try(direction)
- if can?(current_user, :admin_work_item_link, linked_item)
- link.destroy!
- removed_ids << linked_item.id
- create_notes(link)
+ if can_admin_work_item_link?(linked_item)
+ create_notes(link) if perform_destroy_link(link, linked_item)
else
failed_ids << linked_item.id
end
end
end
+ # Overriden on EE to sync deletion with
+ # related epic links records
+ def perform_destroy_link(link, linked_item)
+ link.destroy!
+ removed_ids << linked_item.id
+ true
+ end
+
def create_notes(link)
SystemNoteService.unrelate_issuable(link.source, link.target, current_user)
SystemNoteService.unrelate_issuable(link.target, link.source, current_user)
@@ -83,3 +93,5 @@ module WorkItems
end
end
end
+
+WorkItems::RelatedWorkItemLinks::DestroyService.prepend_mod_with('WorkItems::RelatedWorkItemLinks::DestroyService')
diff --git a/app/services/work_items/update_service.rb b/app/services/work_items/update_service.rb
index 27b318d280f..5b3b643b819 100644
--- a/app/services/work_items/update_service.rb
+++ b/app/services/work_items/update_service.rb
@@ -6,6 +6,7 @@ module WorkItems
include WidgetableService
def initialize(container:, current_user: nil, params: {}, perform_spam_check: false, widget_params: {})
+ @extra_params = params.delete(:extra_params) || {}
params[:widget_params] = true if widget_params.present?
super(container: container, current_user: current_user, params: params, perform_spam_check: perform_spam_check)
@@ -21,19 +22,20 @@ module WorkItems
else
error(updated_work_item.errors.full_messages, :unprocessable_entity, pass_back: payload(updated_work_item))
end
- rescue ::WorkItems::Widgets::BaseService::WidgetError => e
+ rescue ::WorkItems::Widgets::BaseService::WidgetError, ::Issuable::Callbacks::Base::Error => e
error(e.message, :unprocessable_entity)
end
private
- override :handle_quick_actions
- def handle_quick_actions(work_item)
- # Do not handle quick actions unless the work item is the default Issue.
- # The available quick actions for a work item depend on its type and widgets.
- return unless work_item.work_item_type.default_issue?
+ attr_reader :extra_params
- super
+ override :handle_date_changes
+ def handle_date_changes(work_item)
+ return if work_item.dates_source&.previous_changes.blank? &&
+ work_item.previous_changes.slice('due_date', 'start_date').none?
+
+ GraphqlTriggers.issuable_dates_updated(work_item)
end
def prepare_update_params(work_item)
@@ -53,6 +55,13 @@ module WorkItems
super
end
+ override :associations_before_update
+ def associations_before_update(work_item)
+ super.merge(
+ work_item_parent_id: work_item.work_item_parent&.id
+ )
+ end
+
def transaction_update(work_item, opts = {})
execute_widgets(work_item: work_item, callback: :before_update_in_transaction, widget_params: @widget_params)
@@ -64,6 +73,7 @@ module WorkItems
super
GraphqlTriggers.issuable_title_updated(work_item) if work_item.previous_changes.key?(:title)
+ publish_event(work_item, old_associations)
end
def payload(work_item)
@@ -77,5 +87,25 @@ module WorkItems
author: current_user
)
end
+
+ def publish_event(work_item, old_associations)
+ event = WorkItems::WorkItemUpdatedEvent.new(data: {
+ id: work_item.id,
+ namespace_id: work_item.namespace_id,
+ previous_work_item_parent_id: old_associations[:work_item_parent_id],
+ updated_attributes: work_item.previous_changes&.keys&.map(&:to_s),
+ updated_widgets: @widget_params&.keys&.map(&:to_s)
+ }.tap(&:compact_blank!))
+
+ work_item.run_after_commit_or_now do
+ Gitlab::EventStore.publish(event)
+ end
+ end
+
+ def parent
+ container
+ end
end
end
+
+WorkItems::UpdateService.prepend_mod
diff --git a/app/services/work_items/widgets/base_service.rb b/app/services/work_items/widgets/base_service.rb
index cae6ed7646f..52d52b62b2f 100644
--- a/app/services/work_items/widgets/base_service.rb
+++ b/app/services/work_items/widgets/base_service.rb
@@ -19,7 +19,7 @@ module WorkItems
def new_type_excludes_widget?
return false unless service_params[:work_item_type]
- service_params[:work_item_type].widgets.exclude?(@widget.class)
+ service_params[:work_item_type].widget_classes(work_item.resource_parent).exclude?(@widget.class)
end
def has_permission?(permission)
diff --git a/app/services/work_items/widgets/hierarchy_service/base_service.rb b/app/services/work_items/widgets/hierarchy_service/base_service.rb
index 45393eab58c..94734e51240 100644
--- a/app/services/work_items/widgets/hierarchy_service/base_service.rb
+++ b/app/services/work_items/widgets/hierarchy_service/base_service.rb
@@ -7,12 +7,14 @@ module WorkItems
private
def handle_hierarchy_changes(params)
- return incompatible_args_error if incompatible_args?(params)
+ return incompatible_args_error if params.slice(*mutually_exclusive_args).size > 1
if params.key?(:parent)
update_work_item_parent(params.delete(:parent))
elsif params.key?(:children)
update_work_item_children(params.delete(:children))
+ elsif params.key?(:remove_child)
+ remove_child(params.delete(:remove_child))
else
invalid_args_error(params)
end
@@ -27,32 +29,50 @@ module WorkItems
end
def set_parent(parent)
- ::WorkItems::ParentLinks::CreateService
+ service_response = ::WorkItems::ParentLinks::CreateService
.new(parent, current_user, { target_issuable: work_item })
.execute
+
+ # Reference the parent instead because the error is returned in the child context
+ if service_response[:status] == :error
+ service_response[:message].sub!(/#.* cannot be added/, "#{parent.to_reference} cannot be added")
+ end
+
+ service_response
end
# rubocop: disable CodeReuse/ActiveRecord
- def remove_parent
- link = ::WorkItems::ParentLink.find_by(work_item: work_item)
+ def remove_parent_link(child)
+ link = ::WorkItems::ParentLink.find_by(work_item: child)
return success unless link.present?
::WorkItems::ParentLinks::DestroyService.new(link, current_user).execute
end
# rubocop: enable CodeReuse/ActiveRecord
+ def remove_parent
+ remove_parent_link(work_item)
+ end
+
+ def remove_child(child)
+ remove_parent_link(child)
+ end
+
def update_work_item_children(children)
::WorkItems::ParentLinks::CreateService
.new(work_item, current_user, { issuable_references: children })
.execute
end
- def incompatible_args?(params)
- params[:children] && params[:parent]
+ def mutually_exclusive_args
+ [:children, :parent, :remove_child]
end
def incompatible_args_error
- error(_('A Work Item can be a parent or a child, but not both.'))
+ error(format(
+ _("One and only one of %{params} is required"),
+ params: mutually_exclusive_args.to_sentence(last_word_connector: ' or ')
+ ))
end
def invalid_args_error(params)
diff --git a/app/services/work_items/widgets/labels_service/base_service.rb b/app/services/work_items/widgets/labels_service/base_service.rb
index 2d679c1f18c..a8ce195a7da 100644
--- a/app/services/work_items/widgets/labels_service/base_service.rb
+++ b/app/services/work_items/widgets/labels_service/base_service.rb
@@ -7,18 +7,11 @@ module WorkItems
private
def prepare_params(params: {}, permitted_params: [])
- clear_label_params(params) if new_type_excludes_widget?
-
return if params.blank?
return unless has_permission?(:set_work_item_metadata)
service_params.merge!(params.slice(*permitted_params))
end
-
- def clear_label_params(params)
- params[:remove_label_ids] = @work_item.labels.map(&:id)
- params[:add_label_ids] = []
- end
end
end
end
diff --git a/app/services/work_items/widgets/labels_service/update_service.rb b/app/services/work_items/widgets/labels_service/update_service.rb
index 780451e3eae..241a156aa06 100644
--- a/app/services/work_items/widgets/labels_service/update_service.rb
+++ b/app/services/work_items/widgets/labels_service/update_service.rb
@@ -5,8 +5,17 @@ module WorkItems
module LabelsService
class UpdateService < BaseService
def prepare_update_params(params: {})
+ clear_label_params(params) if new_type_excludes_widget?
+
prepare_params(params: params, permitted_params: %i[add_label_ids remove_label_ids])
end
+
+ private
+
+ def clear_label_params(params)
+ params[:remove_label_ids] = @work_item.labels.map(&:id)
+ params[:add_label_ids] = []
+ end
end
end
end