Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-11-17 14:33:21 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-11-17 14:33:21 +0300
commit7021455bd1ed7b125c55eb1b33c5a01f2bc55ee0 (patch)
tree5bdc2229f5198d516781f8d24eace62fc7e589e9 /app/services
parent185b095e93520f96e9cfc31d9c3e69b498cdab7c (diff)
Add latest changes from gitlab-org/gitlab@15-6-stable-eev15.6.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/bulk_imports/create_pipeline_trackers_service.rb2
-rw-r--r--app/services/bulk_imports/lfs_objects_export_service.rb2
-rw-r--r--app/services/ci/after_requeue_job_service.rb20
-rw-r--r--app/services/ci/archive_trace_service.rb2
-rw-r--r--app/services/ci/build_erase_service.rb4
-rw-r--r--app/services/ci/create_pipeline_service.rb12
-rw-r--r--app/services/ci/job_artifacts/destroy_associations_service.rb2
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb59
-rw-r--r--app/services/ci/job_artifacts/track_artifact_report_service.rb2
-rw-r--r--app/services/ci/list_config_variables_service.rb2
-rw-r--r--app/services/ci/pipeline_artifacts/coverage_report_service.rb2
-rw-r--r--app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb4
-rw-r--r--app/services/ci/pipeline_schedules/take_ownership_service.rb34
-rw-r--r--app/services/ci/pipeline_trigger_service.rb2
-rw-r--r--app/services/ci/play_build_service.rb24
-rw-r--r--app/services/ci/process_build_service.rb4
-rw-r--r--app/services/ci/register_job_service.rb4
-rw-r--r--app/services/ci/retry_job_service.rb16
-rw-r--r--app/services/ci/runners/bulk_delete_runners_service.rb54
-rw-r--r--app/services/ci/runners/set_runner_associated_projects_service.rb2
-rw-r--r--app/services/clusters/applications/check_ingress_ip_address_service.rb46
-rw-r--r--app/services/clusters/applications/check_installation_progress_service.rb42
-rw-r--r--app/services/clusters/applications/check_uninstall_progress_service.rb42
-rw-r--r--app/services/clusters/applications/check_upgrade_progress_service.rb71
-rw-r--r--app/services/clusters/applications/create_service.rb18
-rw-r--r--app/services/clusters/applications/patch_service.rb32
-rw-r--r--app/services/clusters/applications/prometheus_update_service.rb38
-rw-r--r--app/services/clusters/applications/update_service.rb17
-rw-r--r--app/services/clusters/kubernetes/configure_istio_ingress_service.rb112
-rw-r--r--app/services/concerns/alert_management/responses.rb4
-rw-r--r--app/services/dependency_proxy/find_cached_manifest_service.rb1
-rw-r--r--app/services/deployments/create_for_build_service.rb66
-rw-r--r--app/services/environments/create_for_build_service.rb40
-rw-r--r--app/services/environments/schedule_to_delete_review_apps_service.rb2
-rw-r--r--app/services/event_create_service.rb67
-rw-r--r--app/services/git/base_hooks_service.rb14
-rw-r--r--app/services/google_cloud/create_service_accounts_service.rb4
-rw-r--r--app/services/google_cloud/generate_pipeline_service.rb7
-rw-r--r--app/services/google_cloud/setup_cloudsql_instance_service.rb8
-rw-r--r--app/services/groups/create_service.rb11
-rw-r--r--app/services/groups/update_service.rb4
-rw-r--r--app/services/incident_management/timeline_event_tags/base_service.rb27
-rw-r--r--app/services/incident_management/timeline_event_tags/create_service.rb32
-rw-r--r--app/services/incident_management/timeline_events/create_service.rb66
-rw-r--r--app/services/incident_management/timeline_events/update_service.rb9
-rw-r--r--app/services/issuable/bulk_update_service.rb5
-rw-r--r--app/services/issuable/discussions_list_service.rb70
-rw-r--r--app/services/issues/update_service.rb1
-rw-r--r--app/services/jira_import/start_import_service.rb2
-rw-r--r--app/services/labels/transfer_service.rb4
-rw-r--r--app/services/loose_foreign_keys/process_deleted_records_service.rb18
-rw-r--r--app/services/markup/rendering_service.rb79
-rw-r--r--app/services/members/approve_access_request_service.rb6
-rw-r--r--app/services/members/destroy_service.rb12
-rw-r--r--app/services/members/update_service.rb89
-rw-r--r--app/services/merge_requests/add_context_service.rb2
-rw-r--r--app/services/merge_requests/after_create_service.rb2
-rw-r--r--app/services/merge_requests/approval_service.rb2
-rw-r--r--app/services/merge_requests/base_service.rb6
-rw-r--r--app/services/merge_requests/create_service.rb21
-rw-r--r--app/services/merge_requests/mergeability/run_checks_service.rb3
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb3
-rw-r--r--app/services/merge_requests/remove_approval_service.rb2
-rw-r--r--app/services/merge_requests/update_assignees_service.rb13
-rw-r--r--app/services/metrics/dashboard/self_monitoring_dashboard_service.rb2
-rw-r--r--app/services/milestones/transfer_service.rb2
-rw-r--r--app/services/namespaces/statistics_refresher_service.rb1
-rw-r--r--app/services/notes/create_service.rb2
-rw-r--r--app/services/notification_service.rb10
-rw-r--r--app/services/packages/debian/create_distribution_service.rb2
-rw-r--r--app/services/packages/debian/update_distribution_service.rb2
-rw-r--r--app/services/packages/maven/metadata/base_create_xml_service.rb5
-rw-r--r--app/services/packages/maven/metadata/create_versions_xml_service.rb15
-rw-r--r--app/services/packages/npm/create_package_service.rb2
-rw-r--r--app/services/packages/rpm/parse_package_service.rb3
-rw-r--r--app/services/packages/rpm/repository_metadata/base_builder.rb46
-rw-r--r--app/services/packages/rpm/repository_metadata/build_filelist_xml.rb14
-rw-r--r--app/services/packages/rpm/repository_metadata/build_filelist_xml_service.rb39
-rw-r--r--app/services/packages/rpm/repository_metadata/build_other_xml.rb14
-rw-r--r--app/services/packages/rpm/repository_metadata/build_other_xml_service.rb31
-rw-r--r--app/services/packages/rpm/repository_metadata/build_primary_xml_service.rb (renamed from app/services/packages/rpm/repository_metadata/build_primary_xml.rb)28
-rw-r--r--app/services/packages/rpm/repository_metadata/build_repomd_xml_service.rb (renamed from app/services/packages/rpm/repository_metadata/build_repomd_xml.rb)14
-rw-r--r--app/services/packages/rpm/repository_metadata/build_xml_base_service.rb22
-rw-r--r--app/services/packages/rpm/repository_metadata/update_xml_service.rb62
-rw-r--r--app/services/personal_access_tokens/revoke_service.rb2
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_link_list_service.rb6
-rw-r--r--app/services/projects/move_users_star_projects_service.rb4
-rw-r--r--app/services/projects/prometheus/alerts/notify_service.rb10
-rw-r--r--app/services/projects/unlink_fork_service.rb2
-rw-r--r--app/services/protected_branches/api_service.rb33
-rw-r--r--app/services/protected_branches/cache_service.rb29
-rw-r--r--app/services/protected_refs/access_level_params.rb4
-rw-r--r--app/services/resource_events/base_change_timebox_service.rb4
-rw-r--r--app/services/resource_events/change_milestone_service.rb6
-rw-r--r--app/services/service_ping/submit_service.rb2
-rw-r--r--app/services/snippets/create_service.rb2
-rw-r--r--app/services/spam/spam_verdict_service.rb2
-rw-r--r--app/services/system_notes/issuables_service.rb8
-rw-r--r--app/services/tags/create_service.rb2
-rw-r--r--app/services/todo_service.rb7
-rw-r--r--app/services/two_factor/base_service.rb4
-rw-r--r--app/services/two_factor/destroy_service.rb6
-rw-r--r--app/services/user_project_access_changed_service.rb5
-rw-r--r--app/services/users/build_service.rb5
-rw-r--r--app/services/users/destroy_service.rb65
-rw-r--r--app/services/users/migrate_records_to_ghost_user_in_batches_service.rb25
-rw-r--r--app/services/users/migrate_to_ghost_user_service.rb113
-rw-r--r--app/services/web_hook_service.rb15
-rw-r--r--app/services/work_items/create_service.rb7
-rw-r--r--app/services/work_items/widgets/hierarchy_service/base_service.rb11
-rw-r--r--app/services/work_items/widgets/milestone_service/base_service.rb39
-rw-r--r--app/services/work_items/widgets/milestone_service/create_service.rb13
-rw-r--r--app/services/work_items/widgets/milestone_service/update_service.rb13
113 files changed, 1161 insertions, 972 deletions
diff --git a/app/services/bulk_imports/create_pipeline_trackers_service.rb b/app/services/bulk_imports/create_pipeline_trackers_service.rb
index f5b944e6df5..7fa62e0ce8a 100644
--- a/app/services/bulk_imports/create_pipeline_trackers_service.rb
+++ b/app/services/bulk_imports/create_pipeline_trackers_service.rb
@@ -55,6 +55,8 @@ module BulkImports
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
+ bulk_import_entity_type: entity.source_type,
+ source_full_path: entity.source_full_path,
pipeline_name: pipeline[:pipeline],
minimum_source_version: minimum_version,
maximum_source_version: maximum_version,
diff --git a/app/services/bulk_imports/lfs_objects_export_service.rb b/app/services/bulk_imports/lfs_objects_export_service.rb
index 1f745201c8a..b3b7cddf2d9 100644
--- a/app/services/bulk_imports/lfs_objects_export_service.rb
+++ b/app/services/bulk_imports/lfs_objects_export_service.rb
@@ -60,7 +60,7 @@ module BulkImports
def write_lfs_json
filepath = File.join(export_path, "#{BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION}.json")
- File.write(filepath, lfs_json.to_json)
+ File.write(filepath, Gitlab::Json.dump(lfs_json))
end
end
end
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
index 9d54207d75d..4374ccd52e0 100644
--- a/app/services/ci/after_requeue_job_service.rb
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -23,8 +23,6 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def dependent_jobs
- return legacy_dependent_jobs unless ::Feature.enabled?(:ci_requeue_with_dag_object_hierarchy, project)
-
ordered_by_dag(
@processable.pipeline.processables
.from_union(needs_dependent_jobs, stage_dependent_jobs)
@@ -50,24 +48,6 @@ module Ci
).descendants
end
- def legacy_skipped_jobs
- @legacy_skipped_jobs ||= @processable.pipeline.processables.skipped
- end
-
- def legacy_dependent_jobs
- ordered_by_dag(
- legacy_stage_dependent_jobs.or(legacy_needs_dependent_jobs).ordered_by_stage.preload(:needs)
- )
- end
-
- def legacy_stage_dependent_jobs
- legacy_skipped_jobs.after_stage(@processable.stage_idx)
- end
-
- def legacy_needs_dependent_jobs
- legacy_skipped_jobs.scheduling_type_dag.with_needs([@processable.name])
- end
-
def ordered_by_dag(jobs)
sorted_job_names = sort_jobs(jobs).each_with_index.to_h
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 566346a4b09..3d548c824c8 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -68,7 +68,7 @@ module Ci
Gitlab::ErrorTracking
.track_and_raise_for_dev_exception(error,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
- job_id: job.id )
+ job_id: job.id)
end
end
end
diff --git a/app/services/ci/build_erase_service.rb b/app/services/ci/build_erase_service.rb
index 8a468e094eb..71b4c5481b3 100644
--- a/app/services/ci/build_erase_service.rb
+++ b/app/services/ci/build_erase_service.rb
@@ -33,9 +33,7 @@ module Ci
attr_reader :build, :current_user
def destroy_artifacts
- # fix_expire_at is false because in this case we want to explicitly delete the job artifacts
- # this flag is a workaround that will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/355833
- Ci::JobArtifacts::DestroyBatchService.new(build.job_artifacts, fix_expire_at: false).execute
+ Ci::JobArtifacts::DestroyBatchService.new(build.job_artifacts).execute
end
def erase_trace!
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 0b49beffcb5..4106dfe0ecc 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -30,6 +30,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Limit::Deployments,
Gitlab::Ci::Pipeline::Chain::Validate::External,
Gitlab::Ci::Pipeline::Chain::Populate,
+ Gitlab::Ci::Pipeline::Chain::PopulateMetadata,
Gitlab::Ci::Pipeline::Chain::StopDryRun,
Gitlab::Ci::Pipeline::Chain::EnsureEnvironments,
Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
@@ -118,17 +119,6 @@ module Ci
end
# rubocop: enable Metrics/ParameterLists
- def execute!(*args, &block)
- source = args[0]
- params = Hash(args[1])
-
- execute(source, **params, &block).tap do |response|
- unless response.payload.persisted?
- raise CreateError, pipeline.full_error_messages
- end
- end
- end
-
private
def commit
diff --git a/app/services/ci/job_artifacts/destroy_associations_service.rb b/app/services/ci/job_artifacts/destroy_associations_service.rb
index 08d7f7f6f02..794d24eadf2 100644
--- a/app/services/ci/job_artifacts/destroy_associations_service.rb
+++ b/app/services/ci/job_artifacts/destroy_associations_service.rb
@@ -12,7 +12,7 @@ module Ci
def destroy_records
@job_artifacts_relation.each_batch(of: BATCH_SIZE) do |relation|
- service = Ci::JobArtifacts::DestroyBatchService.new(relation, pick_up_at: Time.current, fix_expire_at: false)
+ service = Ci::JobArtifacts::DestroyBatchService.new(relation, pick_up_at: Time.current)
result = service.execute(update_stats: false)
updates = result[:statistics_updates]
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
index 54ec2c671c6..e0307d9bd53 100644
--- a/app/services/ci/job_artifacts/destroy_batch_service.rb
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -17,10 +17,9 @@ module Ci
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
- def initialize(job_artifacts, pick_up_at: nil, fix_expire_at: fix_expire_at?, skip_projects_on_refresh: false)
+ def initialize(job_artifacts, pick_up_at: nil, skip_projects_on_refresh: false)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
- @fix_expire_at = fix_expire_at
@skip_projects_on_refresh = skip_projects_on_refresh
end
@@ -32,9 +31,7 @@ module Ci
track_artifacts_undergoing_stats_refresh
end
- # Detect and fix artifacts that had `expire_at` wrongly backfilled by migration
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47723
- detect_and_fix_wrongly_expired_artifacts
+ exclude_trace_artifacts
return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?
@@ -113,55 +110,9 @@ module Ci
end
end
- # This detects and fixes job artifacts that have `expire_at` wrongly backfilled by the migration
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47723.
- # These job artifacts will not be deleted and will have their `expire_at` removed.
- #
- # The migration would have backfilled `expire_at`
- # to midnight on the 22nd of the month of the local timezone,
- # storing it as UTC time in the database.
- #
- # If the timezone setting has changed since the migration,
- # the `expire_at` stored in the database could have changed to a different local time other than midnight.
- # For example:
- # - changing timezone from UTC+02:00 to UTC+02:30 would change the `expire_at` in local time 00:00:00 to 00:30:00.
- # - changing timezone from UTC+00:00 to UTC-01:00 would change the `expire_at` in local time 00:00:00 to 23:00:00 on the previous day (21st).
- #
- # Therefore job artifacts that have `expire_at` exactly on the 00, 30 or 45 minute mark
- # on the dates 21, 22, 23 of the month will not be deleted.
- # https://en.wikipedia.org/wiki/List_of_UTC_time_offsets
- def detect_and_fix_wrongly_expired_artifacts
- return unless @fix_expire_at
-
- wrongly_expired_artifacts, @job_artifacts = @job_artifacts.partition { |artifact| wrongly_expired?(artifact) }
-
- remove_expire_at(wrongly_expired_artifacts) if wrongly_expired_artifacts.any?
- end
-
- def fix_expire_at?
- Feature.enabled?(:ci_detect_wrongly_expired_artifacts)
- end
-
- def wrongly_expired?(artifact)
- return false unless artifact.expire_at.present?
-
- # Although traces should never have expiration dates that don't match time & date here.
- # we can explicitly exclude them by type since they should never be destroyed.
- artifact.trace? || (match_date?(artifact.expire_at) && match_time?(artifact.expire_at))
- end
-
- def match_date?(expire_at)
- [21, 22, 23].include?(expire_at.day)
- end
-
- def match_time?(expire_at)
- %w[00:00.000 30:00.000 45:00.000].include?(expire_at.strftime('%M:%S.%L'))
- end
-
- def remove_expire_at(artifacts)
- Ci::JobArtifact.id_in(artifacts).update_all(expire_at: nil)
-
- Gitlab::AppLogger.info(message: "Fixed expire_at from artifacts.", fixed_artifacts_expire_at_count: artifacts.count)
+ # Traces should never be destroyed.
+ def exclude_trace_artifacts
+ _trace_artifacts, @job_artifacts = @job_artifacts.partition(&:trace?)
end
def track_artifacts_undergoing_stats_refresh
diff --git a/app/services/ci/job_artifacts/track_artifact_report_service.rb b/app/services/ci/job_artifacts/track_artifact_report_service.rb
index 1be1d98394f..0230a5e19ce 100644
--- a/app/services/ci/job_artifacts/track_artifact_report_service.rb
+++ b/app/services/ci/job_artifacts/track_artifact_report_service.rb
@@ -5,7 +5,7 @@ module Ci
class TrackArtifactReportService
include Gitlab::Utils::UsageData
- REPORT_TRACKED = %i[test].freeze
+ REPORT_TRACKED = %i[test coverage].freeze
def execute(pipeline)
REPORT_TRACKED.each do |report|
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
index 3890882b3d4..df4963d1b33 100644
--- a/app/services/ci/list_config_variables_service.rb
+++ b/app/services/ci/list_config_variables_service.rb
@@ -30,7 +30,7 @@ module Ci
user: current_user,
sha: sha).execute
- result.valid? ? result.variables_with_data : {}
+ result.valid? ? result.root_variables_with_prefill_data : {}
end
# Required for ReactiveCaching, it is also used in `reactive_cache_worker_finder`
diff --git a/app/services/ci/pipeline_artifacts/coverage_report_service.rb b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
index 9c6fdb7a405..de66a4cb045 100644
--- a/app/services/ci/pipeline_artifacts/coverage_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
@@ -39,7 +39,7 @@ module Ci
def carrierwave_file
strong_memoize(:carrier_wave_file) do
CarrierWaveStringFile.new_file(
- file_content: report.to_json,
+ file_content: Gitlab::Json.dump(report),
filename: Ci::PipelineArtifact::DEFAULT_FILE_NAMES.fetch(:code_coverage),
content_type: 'application/json'
)
diff --git a/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb b/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
index a0746ef32b2..57b663dc293 100644
--- a/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
@@ -77,9 +77,9 @@ module Ci
end
def build_quality_mr_diff_report(mr_diff_report)
- mr_diff_report.each_with_object({}) do |diff_report, hash|
+ Gitlab::Json.dump(mr_diff_report.each_with_object({}) do |diff_report, hash|
hash[diff_report.first] = Ci::CodequalityMrDiffReportSerializer.new.represent(diff_report.second) # rubocop: disable CodeReuse/Serializer
- end.to_json
+ end)
end
end
end
diff --git a/app/services/ci/pipeline_schedules/take_ownership_service.rb b/app/services/ci/pipeline_schedules/take_ownership_service.rb
new file mode 100644
index 00000000000..9b4001c74bd
--- /dev/null
+++ b/app/services/ci/pipeline_schedules/take_ownership_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineSchedules
+ class TakeOwnershipService
+ def initialize(schedule, user)
+ @schedule = schedule
+ @user = user
+ end
+
+ def execute
+ return forbidden unless allowed?
+
+ if schedule.update(owner: user)
+ ServiceResponse.success(payload: schedule)
+ else
+ ServiceResponse.error(message: schedule.errors.full_messages)
+ end
+ end
+
+ private
+
+ attr_reader :schedule, :user
+
+ def allowed?
+ user.can?(:take_ownership_pipeline_schedule, schedule)
+ end
+
+ def forbidden
+ ServiceResponse.error(message: _('Failed to change the owner'), reason: :access_denied)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_trigger_service.rb b/app/services/ci/pipeline_trigger_service.rb
index 39ac9bf33e9..d7065680053 100644
--- a/app/services/ci/pipeline_trigger_service.rb
+++ b/app/services/ci/pipeline_trigger_service.rb
@@ -93,7 +93,7 @@ module Ci
def payload_variable
{ key: PAYLOAD_VARIABLE_KEY,
- value: params.except(*PAYLOAD_VARIABLE_HIDDEN_PARAMS).to_json,
+ value: Gitlab::Json.dump(params.except(*PAYLOAD_VARIABLE_HIDDEN_PARAMS)),
variable_type: :file }
end
diff --git a/app/services/ci/play_build_service.rb b/app/services/ci/play_build_service.rb
index fbf2aad1991..b7aec57f3e3 100644
--- a/app/services/ci/play_build_service.rb
+++ b/app/services/ci/play_build_service.rb
@@ -5,21 +5,27 @@ module Ci
def execute(build, job_variables_attributes = nil)
check_access!(build, job_variables_attributes)
- # Try to enqueue the build, otherwise create a duplicate.
- #
- if build.enqueue
- build.tap do |build|
- build.update!(user: current_user, job_variables_attributes: job_variables_attributes || [])
-
- AfterRequeueJobService.new(project, current_user).execute(build)
- end
+ if build.can_enqueue?
+ build.user = current_user
+ build.job_variables_attributes = job_variables_attributes || []
+ build.enqueue!
+
+ AfterRequeueJobService.new(project, current_user).execute(build)
+
+ build
else
- Ci::RetryJobService.new(project, current_user).execute(build)[:job]
+ retry_build(build)
end
+ rescue StateMachines::InvalidTransition
+ retry_build(build.reset)
end
private
+ def retry_build(build)
+ Ci::RetryJobService.new(project, current_user).execute(build)[:job]
+ end
+
def check_access!(build, job_variables_attributes)
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, build)
diff --git a/app/services/ci/process_build_service.rb b/app/services/ci/process_build_service.rb
index 22cd267806d..cb51d918fc2 100644
--- a/app/services/ci/process_build_service.rb
+++ b/app/services/ci/process_build_service.rb
@@ -15,6 +15,8 @@ module Ci
private
def process(build)
+ return enqueue(build) if Feature.enabled?(:ci_retry_job_fix, project) && build.enqueue_immediately?
+
if build.schedulable?
build.schedule
elsif build.action?
@@ -25,7 +27,7 @@ module Ci
end
def enqueue(build)
- return build.drop!(:failed_outdated_deployment_job) if build.prevent_rollback_deployment?
+ return build.drop!(:failed_outdated_deployment_job) if build.outdated_deployment?
build.enqueue
end
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 0bd4bf8cc86..f11577feb88 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -42,7 +42,7 @@ module Ci
if !db_all_caught_up && !result.build
metrics.increment_queue_operation(:queue_replication_lag)
- ::Ci::RegisterJobService::Result.new(nil, false) # rubocop:disable Cop/AvoidReturnFromBlocks
+ ::Ci::RegisterJobService::Result.new(nil, nil, false) # rubocop:disable Cop/AvoidReturnFromBlocks
else
result
end
@@ -226,7 +226,7 @@ module Ci
log_artifacts_context(build)
log_build_dependencies_size(presented_build)
- build_json = ::API::Entities::Ci::JobRequest::Response.new(presented_build).to_json
+ build_json = Gitlab::Json.dump(::API::Entities::Ci::JobRequest::Response.new(presented_build))
Result.new(build, build_json, true)
end
diff --git a/app/services/ci/retry_job_service.rb b/app/services/ci/retry_job_service.rb
index 25bda8a6380..74ebaef48b1 100644
--- a/app/services/ci/retry_job_service.rb
+++ b/app/services/ci/retry_job_service.rb
@@ -19,7 +19,7 @@ module Ci
end
# rubocop: disable CodeReuse/ActiveRecord
- def clone!(job, variables: [])
+ def clone!(job, variables: [], enqueue_if_actionable: false)
# Cloning a job requires a strict type check to ensure
# the attributes being used for the clone are taken straight
# from the model and not overridden by other abstractions.
@@ -28,6 +28,9 @@ module Ci
check_access!(job)
new_job = job.clone(current_user: current_user, new_job_variables_attributes: variables)
+ if Feature.enabled?(:ci_retry_job_fix, project) && enqueue_if_actionable && new_job.action?
+ new_job.set_enqueue_immediately!
+ end
new_job.run_after_commit do
::Ci::CopyCrossDatabaseAssociationsService.new.execute(job, new_job)
@@ -56,13 +59,20 @@ module Ci
def check_assignable_runners!(job); end
def retry_job(job, variables: [])
- clone!(job, variables: variables).tap do |new_job|
+ clone!(job, variables: variables, enqueue_if_actionable: true).tap do |new_job|
check_assignable_runners!(new_job) if new_job.is_a?(Ci::Build)
next if new_job.failed?
- Gitlab::OptimisticLocking.retry_lock(new_job, name: 'retry_build', &:enqueue)
+ Gitlab::OptimisticLocking.retry_lock(new_job, name: 'retry_build', &:enqueue) if Feature.disabled?(
+ :ci_retry_job_fix, project)
+
AfterRequeueJobService.new(project, current_user).execute(job)
+
+ if Feature.enabled?(:ci_retry_job_fix, project)
+ Ci::PipelineCreation::StartPipelineService.new(job.pipeline).execute
+ new_job.reset
+ end
end
end
diff --git a/app/services/ci/runners/bulk_delete_runners_service.rb b/app/services/ci/runners/bulk_delete_runners_service.rb
index ce07aa541c2..b6b07746e61 100644
--- a/app/services/ci/runners/bulk_delete_runners_service.rb
+++ b/app/services/ci/runners/bulk_delete_runners_service.rb
@@ -7,29 +7,69 @@ module Ci
RUNNER_LIMIT = 50
- # @param runners [Array<Ci::Runner, Integer>] the runners to unregister/destroy
- def initialize(runners:)
+ # @param runners [Array<Ci::Runner>] the runners to unregister/destroy
+ # @param current_user [User] the user performing the operation
+ def initialize(runners:, current_user:)
@runners = runners
+ @current_user = current_user
end
def execute
if @runners
# Delete a few runners immediately
- return ServiceResponse.success(payload: delete_runners)
+ return delete_runners
end
- ServiceResponse.success(payload: { deleted_count: 0, deleted_ids: [] })
+ ServiceResponse.success(payload: { deleted_count: 0, deleted_ids: [], errors: [] })
end
private
def delete_runners
+ runner_count = @runners.limit(RUNNER_LIMIT + 1).count
+ authorized_runners_ids, unauthorized_runners_ids = compute_authorized_runners
# rubocop:disable CodeReuse/ActiveRecord
- runners_to_be_deleted = Ci::Runner.where(id: @runners).limit(RUNNER_LIMIT)
+ runners_to_be_deleted =
+ Ci::Runner
+ .where(id: authorized_runners_ids)
+ .preload([:taggings, :runner_namespaces, :runner_projects])
# rubocop:enable CodeReuse/ActiveRecord
- deleted_ids = runners_to_be_deleted.destroy_all.map(&:id) # rubocop: disable Cop/DestroyAll
+ deleted_ids = runners_to_be_deleted.destroy_all.map(&:id) # rubocop:disable Cop/DestroyAll
- { deleted_count: deleted_ids.count, deleted_ids: deleted_ids }
+ ServiceResponse.success(
+ payload: {
+ deleted_count: deleted_ids.count,
+ deleted_ids: deleted_ids,
+ errors: error_messages(runner_count, authorized_runners_ids, unauthorized_runners_ids)
+ })
+ end
+
+ def compute_authorized_runners
+ # rubocop:disable CodeReuse/ActiveRecord
+ @current_user.ci_owned_runners.load # preload the owned runners to avoid an N+1
+ authorized_runners, unauthorized_runners =
+ @runners.limit(RUNNER_LIMIT)
+ .partition { |runner| Ability.allowed?(@current_user, :delete_runner, runner) }
+ # rubocop:enable CodeReuse/ActiveRecord
+
+ [authorized_runners.map(&:id), unauthorized_runners.map(&:id)]
+ end
+
+ def error_messages(runner_count, authorized_runners_ids, unauthorized_runners_ids)
+ errors = []
+
+ if runner_count > RUNNER_LIMIT
+ errors << "Can only delete up to #{RUNNER_LIMIT} runners per call. Ignored the remaining runner(s)."
+ end
+
+ if authorized_runners_ids.empty?
+ errors << "User does not have permission to delete any of the runners"
+ elsif unauthorized_runners_ids.any?
+ failed_ids = unauthorized_runners_ids.map { |runner_id| "##{runner_id}" }.join(', ')
+ errors << "User does not have permission to delete runner(s) #{failed_ids}"
+ end
+
+ errors
end
end
end
diff --git a/app/services/ci/runners/set_runner_associated_projects_service.rb b/app/services/ci/runners/set_runner_associated_projects_service.rb
index 7930776749d..5e33fdae2f4 100644
--- a/app/services/ci/runners/set_runner_associated_projects_service.rb
+++ b/app/services/ci/runners/set_runner_associated_projects_service.rb
@@ -17,7 +17,7 @@ module Ci
return ServiceResponse.error(message: 'user not allowed to assign runner', http_status: :forbidden)
end
- return ServiceResponse.success if project_ids.blank?
+ return ServiceResponse.success if project_ids.nil?
set_associated_projects
end
diff --git a/app/services/clusters/applications/check_ingress_ip_address_service.rb b/app/services/clusters/applications/check_ingress_ip_address_service.rb
deleted file mode 100644
index e254a0358a0..00000000000
--- a/app/services/clusters/applications/check_ingress_ip_address_service.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class CheckIngressIpAddressService < BaseHelmService
- include Gitlab::Utils::StrongMemoize
-
- Error = Class.new(StandardError)
-
- LEASE_TIMEOUT = 15.seconds.to_i
-
- def execute
- return if app.external_ip
- return if app.external_hostname
- return unless try_obtain_lease
-
- app.external_ip = ingress_ip if ingress_ip
- app.external_hostname = ingress_hostname if ingress_hostname
-
- app.save! if app.changed?
- end
-
- private
-
- def try_obtain_lease
- Gitlab::ExclusiveLease
- .new("check_ingress_ip_address_service:#{app.id}", timeout: LEASE_TIMEOUT)
- .try_obtain
- end
-
- def ingress_ip
- ingress_service&.ip
- end
-
- def ingress_hostname
- ingress_service&.hostname
- end
-
- def ingress_service
- strong_memoize(:ingress_service) do
- app.ingress_service.status.loadBalancer.ingress&.first
- end
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/check_installation_progress_service.rb b/app/services/clusters/applications/check_installation_progress_service.rb
deleted file mode 100644
index 10a12f30956..00000000000
--- a/app/services/clusters/applications/check_installation_progress_service.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class CheckInstallationProgressService < CheckProgressService
- private
-
- def operation_in_progress?
- app.installing? || app.updating?
- end
-
- def on_success
- app.make_installed!
-
- Gitlab::Tracking.event('cluster:applications', "cluster_application_#{app.name}_installed")
- ensure
- remove_installation_pod
- end
-
- def check_timeout
- if timed_out?
- app.make_errored!("Operation timed out. Check pod logs for #{pod_name} for more details.")
- else
- ClusterWaitForAppInstallationWorker.perform_in(
- ClusterWaitForAppInstallationWorker::INTERVAL, app.name, app.id)
- end
- end
-
- def pod_name
- install_command.pod_name
- end
-
- def timed_out?
- Time.current.utc - app.updated_at.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
- end
-
- def remove_installation_pod
- helm_api.delete_pod!(pod_name)
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/check_uninstall_progress_service.rb b/app/services/clusters/applications/check_uninstall_progress_service.rb
deleted file mode 100644
index cd213c3ebbf..00000000000
--- a/app/services/clusters/applications/check_uninstall_progress_service.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class CheckUninstallProgressService < CheckProgressService
- private
-
- def operation_in_progress?
- app.uninstalling?
- end
-
- def on_success
- app.post_uninstall
- app.destroy!
- rescue StandardError => e
- app.make_errored!(_('Application uninstalled but failed to destroy: %{error_message}') % { error_message: e.message })
- ensure
- remove_uninstallation_pod
- end
-
- def check_timeout
- if timed_out?
- app.make_errored!(_('Operation timed out. Check pod logs for %{pod_name} for more details.') % { pod_name: pod_name })
- else
- WaitForUninstallAppWorker.perform_in(WaitForUninstallAppWorker::INTERVAL, app.name, app.id)
- end
- end
-
- def pod_name
- app.uninstall_command.pod_name
- end
-
- def timed_out?
- Time.current.utc - app.updated_at.utc > WaitForUninstallAppWorker::TIMEOUT
- end
-
- def remove_uninstallation_pod
- helm_api.delete_pod!(pod_name)
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/check_upgrade_progress_service.rb b/app/services/clusters/applications/check_upgrade_progress_service.rb
deleted file mode 100644
index c4fd234b302..00000000000
--- a/app/services/clusters/applications/check_upgrade_progress_service.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class CheckUpgradeProgressService < BaseHelmService
- def execute
- return unless app.updating?
-
- case phase
- when ::Gitlab::Kubernetes::Pod::SUCCEEDED
- on_success
- when ::Gitlab::Kubernetes::Pod::FAILED
- on_failed
- else
- check_timeout
- end
- rescue ::Kubeclient::HttpError => e
- app.make_update_errored!("Kubernetes error: #{e.message}") unless app.update_errored?
- end
-
- private
-
- def on_success
- app.make_installed!
- ensure
- remove_pod
- end
-
- def on_failed
- app.make_update_errored!(errors || 'Update silently failed')
- ensure
- remove_pod
- end
-
- def check_timeout
- if timed_out?
- begin
- app.make_update_errored!('Update timed out')
- ensure
- remove_pod
- end
- else
- ::ClusterWaitForAppUpdateWorker.perform_in(
- ::ClusterWaitForAppUpdateWorker::INTERVAL, app.name, app.id)
- end
- end
-
- def timed_out?
- Time.current.utc - app.updated_at.to_time.utc > ::ClusterWaitForAppUpdateWorker::TIMEOUT
- end
-
- def remove_pod
- helm_api.delete_pod!(pod_name)
- rescue StandardError
- # no-op
- end
-
- def phase
- helm_api.status(pod_name)
- end
-
- def errors
- helm_api.log(pod_name)
- end
-
- def pod_name
- @pod_name ||= patch_command.pod_name
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/create_service.rb b/app/services/clusters/applications/create_service.rb
deleted file mode 100644
index 2a626a402e4..00000000000
--- a/app/services/clusters/applications/create_service.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class CreateService < Clusters::Applications::BaseService
- private
-
- def worker_class(application)
- application.updateable? ? ClusterUpgradeAppWorker : ClusterInstallAppWorker
- end
-
- def builder
- cluster.public_send(application_class.association_name) || # rubocop:disable GitlabSecurity/PublicSend
- cluster.public_send(:"build_application_#{application_name}") # rubocop:disable GitlabSecurity/PublicSend
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/patch_service.rb b/app/services/clusters/applications/patch_service.rb
deleted file mode 100644
index fbea18bae6b..00000000000
--- a/app/services/clusters/applications/patch_service.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class PatchService < BaseHelmService
- def execute
- return unless app.scheduled?
-
- app.make_updating!
-
- patch
- end
-
- private
-
- def patch
- log_event(:begin_patch)
- helm_api.update(update_command)
-
- log_event(:schedule_wait_for_patch)
- ClusterWaitForAppInstallationWorker.perform_in(
- ClusterWaitForAppInstallationWorker::INTERVAL, app.name, app.id)
- rescue Kubeclient::HttpError => e
- log_error(e)
- app.make_errored!(_('Kubernetes error: %{error_code}') % { error_code: e.error_code })
- rescue StandardError => e
- log_error(e)
- app.make_errored!(_('Failed to update.'))
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/prometheus_update_service.rb b/app/services/clusters/applications/prometheus_update_service.rb
deleted file mode 100644
index b8b50f06d72..00000000000
--- a/app/services/clusters/applications/prometheus_update_service.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- # Deprecated, to be removed in %14.0 as part of https://gitlab.com/groups/gitlab-org/-/epics/4280
- class PrometheusUpdateService < BaseHelmService
- attr_accessor :project
-
- def initialize(app, project)
- super(app)
- @project = project
- end
-
- def execute
- raise NotImplementedError, 'Externally installed prometheus should not be modified!' unless app.managed_prometheus?
-
- app.make_updating!
-
- helm_api.update(patch_command(values))
-
- ::ClusterWaitForAppUpdateWorker.perform_in(::ClusterWaitForAppUpdateWorker::INTERVAL, app.name, app.id)
- rescue ::Kubeclient::HttpError => ke
- app.make_update_errored!("Kubernetes error: #{ke.message}")
- rescue StandardError => e
- app.make_update_errored!(e.message)
- end
-
- private
-
- def values
- PrometheusConfigService
- .new(project, cluster, app)
- .execute
- .to_yaml
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/update_service.rb b/app/services/clusters/applications/update_service.rb
deleted file mode 100644
index 7a36401f156..00000000000
--- a/app/services/clusters/applications/update_service.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class UpdateService < Clusters::Applications::BaseService
- private
-
- def worker_class(application)
- ClusterPatchAppWorker
- end
-
- def builder
- cluster.public_send(application_class.association_name) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
- end
-end
diff --git a/app/services/clusters/kubernetes/configure_istio_ingress_service.rb b/app/services/clusters/kubernetes/configure_istio_ingress_service.rb
deleted file mode 100644
index 3b7e094bc97..00000000000
--- a/app/services/clusters/kubernetes/configure_istio_ingress_service.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-# frozen_string_literal: true
-
-require 'openssl'
-
-module Clusters
- module Kubernetes
- class ConfigureIstioIngressService
- PASSTHROUGH_RESOURCE = Kubeclient::Resource.new(
- mode: 'PASSTHROUGH'
- ).freeze
-
- MTLS_RESOURCE = Kubeclient::Resource.new(
- mode: 'MUTUAL',
- privateKey: '/etc/istio/ingressgateway-certs/tls.key',
- serverCertificate: '/etc/istio/ingressgateway-certs/tls.crt',
- caCertificates: '/etc/istio/ingressgateway-ca-certs/cert.pem'
- ).freeze
-
- def initialize(cluster:)
- @cluster = cluster
- @platform = cluster.platform
- @kubeclient = platform.kubeclient
- @knative = cluster.application_knative
- end
-
- def execute
- return configure_certificates if serverless_domain_cluster
-
- configure_passthrough
- rescue Kubeclient::HttpError => e
- knative.make_errored!(_('Kubernetes error: %{error_code}') % { error_code: e.error_code })
- rescue StandardError
- knative.make_errored!(_('Failed to update.'))
- end
-
- private
-
- attr_reader :cluster, :platform, :kubeclient, :knative
-
- def serverless_domain_cluster
- knative&.serverless_domain_cluster
- end
-
- def configure_certificates
- create_or_update_istio_cert_and_key
- set_gateway_wildcard_https(MTLS_RESOURCE)
- end
-
- def create_or_update_istio_cert_and_key
- name = OpenSSL::X509::Name.parse("CN=#{knative.hostname}")
-
- key = OpenSSL::PKey::RSA.new(2048)
-
- cert = OpenSSL::X509::Certificate.new
- cert.version = 2
- cert.serial = 0
- cert.not_before = Time.current
- cert.not_after = Time.current + 1000.years
-
- cert.public_key = key.public_key
- cert.subject = name
- cert.issuer = name
- cert.sign(key, OpenSSL::Digest.new('SHA256'))
-
- serverless_domain_cluster.update!(
- key: key.to_pem,
- certificate: cert.to_pem
- )
-
- kubeclient.create_or_update_secret(istio_ca_certs_resource)
- kubeclient.create_or_update_secret(istio_certs_resource)
- end
-
- def istio_ca_certs_resource
- Gitlab::Kubernetes::GenericSecret.new(
- 'istio-ingressgateway-ca-certs',
- {
- 'cert.pem': Base64.strict_encode64(serverless_domain_cluster.certificate)
- },
- Clusters::Kubernetes::ISTIO_SYSTEM_NAMESPACE
- ).generate
- end
-
- def istio_certs_resource
- Gitlab::Kubernetes::TlsSecret.new(
- 'istio-ingressgateway-certs',
- serverless_domain_cluster.certificate,
- serverless_domain_cluster.key,
- Clusters::Kubernetes::ISTIO_SYSTEM_NAMESPACE
- ).generate
- end
-
- def set_gateway_wildcard_https(tls_resource)
- gateway_resource = gateway
- gateway_resource.spec.servers.each do |server|
- next unless server.hosts == ['*'] && server.port.name == 'https'
-
- server.tls = tls_resource
- end
- kubeclient.update_gateway(gateway_resource)
- end
-
- def configure_passthrough
- set_gateway_wildcard_https(PASSTHROUGH_RESOURCE)
- end
-
- def gateway
- kubeclient.get_gateway('knative-ingress-gateway', Clusters::Kubernetes::KNATIVE_SERVING_NAMESPACE)
- end
- end
- end
-end
diff --git a/app/services/concerns/alert_management/responses.rb b/app/services/concerns/alert_management/responses.rb
index 183a831a00a..e48d07d26c0 100644
--- a/app/services/concerns/alert_management/responses.rb
+++ b/app/services/concerns/alert_management/responses.rb
@@ -7,6 +7,10 @@ module AlertManagement
ServiceResponse.success(payload: { alerts: Array(alerts) })
end
+ def created
+ ServiceResponse.success(http_status: :created)
+ end
+
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end
diff --git a/app/services/dependency_proxy/find_cached_manifest_service.rb b/app/services/dependency_proxy/find_cached_manifest_service.rb
index faf0402edaa..ea09445584a 100644
--- a/app/services/dependency_proxy/find_cached_manifest_service.rb
+++ b/app/services/dependency_proxy/find_cached_manifest_service.rb
@@ -19,6 +19,7 @@ module DependencyProxy
head_result = DependencyProxy::HeadManifestService.new(@image, @tag, @token).execute
return respond if cached_manifest_matches?(head_result)
+ return respond if @manifest && head_result[:status] == :error
success(manifest: nil, from_cache: false)
rescue Timeout::Error, *Gitlab::HTTP::HTTP_ERRORS
diff --git a/app/services/deployments/create_for_build_service.rb b/app/services/deployments/create_for_build_service.rb
index 76d871161e3..7bc0ea88910 100644
--- a/app/services/deployments/create_for_build_service.rb
+++ b/app/services/deployments/create_for_build_service.rb
@@ -8,28 +8,62 @@ module Deployments
def execute(build)
return unless build.instance_of?(::Ci::Build) && build.persisted_environment.present?
- # TODO: Move all buisness logic in `Seed::Deployment` to this class after
- # `create_deployment_in_separate_transaction` feature flag has been removed.
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/348778
-
- # If build.persisted_environment is a BatchLoader, we need to remove
- # the method proxy in order to clone into new item here
- # https://github.com/exAspArk/batch-loader/issues/31
- environment = if build.persisted_environment.respond_to?(:__sync)
- build.persisted_environment.__sync
- else
- build.persisted_environment
- end
-
- deployment = ::Gitlab::Ci::Pipeline::Seed::Deployment
- .new(build, environment).to_resource
+ environment = build.actual_persisted_environment
+
+ deployment = to_resource(build, environment)
return unless deployment
- build.create_deployment!(deployment.attributes)
+ deployment.save!
+ build.association(:deployment).target = deployment
+ build.association(:deployment).loaded!
+
+ deployment
rescue ActiveRecord::RecordInvalid => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
DeploymentCreationError.new(e.message), build_id: build.id)
end
+
+ private
+
+ def to_resource(build, environment)
+ return build.deployment if build.deployment
+ return unless build.starts_environment?
+
+ deployment = ::Deployment.new(attributes(build, environment))
+
+ # If there is a validation error on environment creation, such as
+ # the name contains invalid character, the job will fall back to a
+ # non-environment job.
+ return unless deployment.valid? && deployment.environment.persisted?
+
+ if cluster = deployment.environment.deployment_platform&.cluster
+ # double write cluster_id until 12.9: https://gitlab.com/gitlab-org/gitlab/issues/202628
+ deployment.cluster_id = cluster.id
+ deployment.deployment_cluster = ::DeploymentCluster.new(
+ cluster_id: cluster.id,
+ kubernetes_namespace: cluster.kubernetes_namespace_for(deployment.environment, deployable: build)
+ )
+ end
+
+ # Allocate IID for deployments.
+ # This operation must be outside of transactions of pipeline creations.
+ deployment.ensure_project_iid!
+
+ deployment
+ end
+
+ def attributes(build, environment)
+ {
+ project: build.project,
+ environment: environment,
+ deployable: build,
+ user: build.user,
+ ref: build.ref,
+ tag: build.tag,
+ sha: build.sha,
+ on_stop: build.on_stop
+ }
+ end
end
end
diff --git a/app/services/environments/create_for_build_service.rb b/app/services/environments/create_for_build_service.rb
new file mode 100644
index 00000000000..c46b66ac5b3
--- /dev/null
+++ b/app/services/environments/create_for_build_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Environments
+ # This class creates an environment record for a build (a pipeline job).
+ class CreateForBuildService
+ def execute(build, merge_request: nil)
+ return unless build.instance_of?(::Ci::Build) && build.has_environment_keyword?
+
+ environment = to_resource(build, merge_request)
+
+ if environment.persisted?
+ build.persisted_environment = environment
+ build.assign_attributes(metadata_attributes: { expanded_environment_name: environment.name })
+ else
+ build.assign_attributes(status: :failed, failure_reason: :environment_creation_failure)
+ end
+
+ environment
+ end
+
+ private
+
+ # rubocop: disable Performance/ActiveRecordSubtransactionMethods
+ def to_resource(build, merge_request)
+ build.project.environments.safe_find_or_create_by(name: build.expanded_environment_name) do |environment|
+ # Initialize the attributes at creation
+ environment.auto_stop_in = expanded_auto_stop_in(build)
+ environment.tier = build.environment_tier_from_options
+ environment.merge_request = merge_request
+ end
+ end
+ # rubocop: enable Performance/ActiveRecordSubtransactionMethods
+
+ def expanded_auto_stop_in(build)
+ return unless build.environment_auto_stop_in
+
+ ExpandVariables.expand(build.environment_auto_stop_in, -> { build.simple_variables.sort_and_expand_all })
+ end
+ end
+end
diff --git a/app/services/environments/schedule_to_delete_review_apps_service.rb b/app/services/environments/schedule_to_delete_review_apps_service.rb
index b3b86689748..041b834f11b 100644
--- a/app/services/environments/schedule_to_delete_review_apps_service.rb
+++ b/app/services/environments/schedule_to_delete_review_apps_service.rb
@@ -58,7 +58,7 @@ module Environments
else
result.set_status(
:bad_request,
- error_message: "Failed to authorize deletions for some or all of the environments. Ask someone with more permissions to delete the environments."
+ error_message: "No environments found for scheduled deletion. Either your query did not match any environments (default parameters match environments that are 30 days or older), or you have insufficient permissions to delete matching environments."
)
result.set_unprocessable_entries(failed)
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 019246dfc9f..662980fe506 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -25,18 +25,22 @@ class EventCreateService
def open_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :created).tap do
track_event(event_action: :created, event_target: MergeRequest, author_id: current_user.id)
- track_snowplow_event(merge_request, current_user,
- Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
- :create, 'merge_requests_users')
+ track_snowplow_event(
+ :created,
+ merge_request,
+ current_user
+ )
end
end
def close_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :closed).tap do
track_event(event_action: :closed, event_target: MergeRequest, author_id: current_user.id)
- track_snowplow_event(merge_request, current_user,
- Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
- :close, 'merge_requests_users')
+ track_snowplow_event(
+ :closed,
+ merge_request,
+ current_user
+ )
end
end
@@ -47,9 +51,11 @@ class EventCreateService
def merge_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :merged).tap do
track_event(event_action: :merged, event_target: MergeRequest, author_id: current_user.id)
- track_snowplow_event(merge_request, current_user,
- Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
- :merge, 'merge_requests_users')
+ track_snowplow_event(
+ :merged,
+ merge_request,
+ current_user
+ )
end
end
@@ -73,9 +79,12 @@ class EventCreateService
create_record_event(note, current_user, :commented).tap do
if note.is_a?(DiffNote) && note.for_merge_request?
track_event(event_action: :commented, event_target: MergeRequest, author_id: current_user.id)
- track_snowplow_event(note, current_user,
- Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
- :comment, 'merge_requests_users')
+ track_snowplow_event(
+ :commented,
+ note,
+ current_user
+ )
+
end
end
end
@@ -109,13 +118,13 @@ class EventCreateService
return [] if records.empty?
if create.any?
- track_snowplow_event(create.first, current_user,
+ old_track_snowplow_event(create.first, current_user,
Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
:create, 'design_users')
end
if update.any?
- track_snowplow_event(update.first, current_user,
+ old_track_snowplow_event(update.first, current_user,
Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
:update, 'design_users')
end
@@ -126,7 +135,7 @@ class EventCreateService
def destroy_designs(designs, current_user)
return [] unless designs.present?
- track_snowplow_event(designs.first, current_user,
+ old_track_snowplow_event(designs.first, current_user,
Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
:destroy, 'design_users')
create_record_events(designs.zip([:destroyed].cycle), current_user)
@@ -213,7 +222,15 @@ class EventCreateService
namespace = project.namespace
if Feature.enabled?(:route_hll_to_snowplow, namespace)
- Gitlab::Tracking.event(self.class.to_s, 'action_active_users_project_repo', namespace: namespace, user: current_user, project: project)
+ Gitlab::Tracking.event(
+ self.class.to_s,
+ :push,
+ label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo',
+ namespace: namespace,
+ user: current_user,
+ project: project,
+ context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'action_active_users_project_repo').to_context]
+ )
end
Users::LastPushEventService.new(current_user)
@@ -253,7 +270,10 @@ class EventCreateService
Gitlab::UsageDataCounters::TrackUniqueEvents.track_event(**params)
end
- def track_snowplow_event(record, current_user, category, action, label)
+ # This will be deleted as a part of
+ # https://gitlab.com/groups/gitlab-org/-/epics/8641
+ # once all the events are fixed
+ def old_track_snowplow_event(record, current_user, category, action, label)
return unless Feature.enabled?(:route_hll_to_snowplow_phase2)
project = record.project
@@ -266,6 +286,19 @@ class EventCreateService
user: current_user
)
end
+
+ def track_snowplow_event(action, record, user)
+ project = record.project
+ Gitlab::Tracking.event(
+ self.class.to_s,
+ action.to_s,
+ label: 'usage_activity_by_stage_monthly.create.merge_requests_users',
+ namespace: project.namespace,
+ user: user,
+ project: project,
+ context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
+ )
+ end
end
EventCreateService.prepend_mod_with('EventCreateService')
diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb
index 269637805ad..7158116fde1 100644
--- a/app/services/git/base_hooks_service.rb
+++ b/app/services/git/base_hooks_service.rb
@@ -53,11 +53,11 @@ module Git
def create_pipelines
return unless params.fetch(:create_pipelines, true)
- Ci::CreatePipelineService
- .new(project, current_user, pipeline_params)
- .execute!(:push, pipeline_options)
- rescue Ci::CreatePipelineService::CreateError => ex
- log_pipeline_errors(ex)
+ response = Ci::CreatePipelineService
+ .new(project, current_user, pipeline_params)
+ .execute(:push, **pipeline_options)
+
+ log_pipeline_errors(response.message) unless response.payload.persisted?
end
def execute_project_hooks
@@ -148,14 +148,14 @@ module Git
{}
end
- def log_pipeline_errors(exception)
+ def log_pipeline_errors(error_message)
data = {
class: self.class.name,
correlation_id: Labkit::Correlation::CorrelationId.current_id.to_s,
project_id: project.id,
project_path: project.full_path,
message: "Error creating pipeline",
- errors: exception.to_s,
+ errors: error_message,
pipeline_params: sanitized_pipeline_params
}
diff --git a/app/services/google_cloud/create_service_accounts_service.rb b/app/services/google_cloud/create_service_accounts_service.rb
index 9617161b8e9..ca0aa7c91df 100644
--- a/app/services/google_cloud/create_service_accounts_service.rb
+++ b/app/services/google_cloud/create_service_accounts_service.rb
@@ -10,8 +10,8 @@ module GoogleCloud
service_accounts_service.add_for_project(
environment_name,
service_account.project_id,
- service_account.to_json,
- service_account_key.to_json,
+ Gitlab::Json.dump(service_account),
+ Gitlab::Json.dump(service_account_key),
ProtectedBranch.protected?(project, environment_name) || ProtectedTag.protected?(project, environment_name)
)
diff --git a/app/services/google_cloud/generate_pipeline_service.rb b/app/services/google_cloud/generate_pipeline_service.rb
index be0c7a783c9..b6438d6f501 100644
--- a/app/services/google_cloud/generate_pipeline_service.rb
+++ b/app/services/google_cloud/generate_pipeline_service.rb
@@ -34,7 +34,8 @@ module GoogleCloud
end
def generate_commit_attributes
- if action == ACTION_DEPLOY_TO_CLOUD_RUN
+ case action
+ when ACTION_DEPLOY_TO_CLOUD_RUN
branch_name = "deploy-to-cloud-run-#{SecureRandom.hex(8)}"
{
commit_message: 'Enable Cloud Run deployments',
@@ -43,7 +44,7 @@ module GoogleCloud
branch_name: branch_name,
start_branch: branch_name
}
- elsif action == ACTION_DEPLOY_TO_CLOUD_STORAGE
+ when ACTION_DEPLOY_TO_CLOUD_STORAGE
branch_name = "deploy-to-cloud-storage-#{SecureRandom.hex(8)}"
{
commit_message: 'Enable Cloud Storage deployments',
@@ -73,7 +74,7 @@ module GoogleCloud
includes << { 'remote' => include_url }
gitlab_ci_yml['include'] = includes.uniq
- gitlab_ci_yml.to_yaml
+ gitlab_ci_yml.deep_stringify_keys.to_yaml
end
end
end
diff --git a/app/services/google_cloud/setup_cloudsql_instance_service.rb b/app/services/google_cloud/setup_cloudsql_instance_service.rb
index 10237f83b37..40184b927ad 100644
--- a/app/services/google_cloud/setup_cloudsql_instance_service.rb
+++ b/app/services/google_cloud/setup_cloudsql_instance_service.rb
@@ -13,7 +13,7 @@ module GoogleCloud
get_instance_response = google_api_client.get_cloudsql_instance(gcp_project_id, instance_name)
if get_instance_response.state != INSTANCE_STATE_RUNNABLE
- return error("CloudSQL instance not RUNNABLE: #{get_instance_response.to_json}")
+ return error("CloudSQL instance not RUNNABLE: #{Gitlab::Json.dump(get_instance_response)}")
end
save_instance_ci_vars(get_instance_response)
@@ -42,7 +42,7 @@ module GoogleCloud
success
rescue Google::Apis::Error => err
- error(message: err.to_json)
+ error(message: Gitlab::Json.dump(err))
end
private
@@ -97,7 +97,7 @@ module GoogleCloud
database_response = google_api_client.create_cloudsql_database(gcp_project_id, instance_name, database_name)
if database_response.status != OPERATION_STATE_DONE
- return error("Database creation failed: #{database_response.to_json}")
+ return error("Database creation failed: #{Gitlab::Json.dump(database_response)}")
end
success
@@ -109,7 +109,7 @@ module GoogleCloud
user_response = google_api_client.create_cloudsql_user(gcp_project_id, instance_name, username, password)
if user_response.status != OPERATION_STATE_DONE
- return error("User creation failed: #{user_response.to_json}")
+ return error("User creation failed: #{Gitlab::Json.dump(user_response)}")
end
success
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index d508865ef32..68bb6427350 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -57,7 +57,7 @@ module Groups
end
def after_create_hook
- track_experiment_event
+ # overridden in EE
end
def remove_unallowed_params
@@ -109,15 +109,6 @@ module Groups
@group.shared_runners_enabled = @group.parent.shared_runners_enabled
@group.allow_descendants_override_disabled_shared_runners = @group.parent.allow_descendants_override_disabled_shared_runners
end
-
- def track_experiment_event
- return unless group.persisted?
-
- # Track namespace created events to relate them with signed up events for
- # the same experiment. This will let us associate created namespaces to
- # users that signed up from the experimental logged out header.
- experiment(:logged_out_marketing_header, actor: current_user).track(:namespace_created, namespace: group)
- end
end
end
diff --git a/app/services/groups/update_service.rb b/app/services/groups/update_service.rb
index 2135892a95a..925a2acbb58 100644
--- a/app/services/groups/update_service.rb
+++ b/app/services/groups/update_service.rb
@@ -10,6 +10,8 @@ module Groups
reject_parent_id!
remove_unallowed_params
+ before_assignment_hook(group, params)
+
if renaming_group_with_container_registry_images?
group.errors.add(:base, container_images_error)
return false
@@ -25,8 +27,6 @@ module Groups
handle_changes
- before_assignment_hook(group, params)
-
handle_namespace_settings
group.assign_attributes(params)
diff --git a/app/services/incident_management/timeline_event_tags/base_service.rb b/app/services/incident_management/timeline_event_tags/base_service.rb
new file mode 100644
index 00000000000..7bb596dcd92
--- /dev/null
+++ b/app/services/incident_management/timeline_event_tags/base_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module IncidentManagement
+ module TimelineEventTags
+ class BaseService
+ def allowed?
+ user&.can?(:admin_incident_management_timeline_event_tag, project)
+ end
+
+ def success(timeline_event_tag)
+ ServiceResponse.success(payload: { timeline_event_tag: timeline_event_tag })
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def error_no_permissions
+ error(_('You have insufficient permissions to manage timeline event tags for this project'))
+ end
+
+ def error_in_save(timeline_event_tag)
+ error(timeline_event_tag.errors.full_messages.to_sentence)
+ end
+ end
+ end
+end
diff --git a/app/services/incident_management/timeline_event_tags/create_service.rb b/app/services/incident_management/timeline_event_tags/create_service.rb
new file mode 100644
index 00000000000..6742bb6ba5c
--- /dev/null
+++ b/app/services/incident_management/timeline_event_tags/create_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module IncidentManagement
+ module TimelineEventTags
+ class CreateService < TimelineEventTags::BaseService
+ attr_reader :project, :user, :params
+
+ def initialize(project, user, params)
+ @project = project
+ @user = user
+ @params = params
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ timeline_event_tag_params = {
+ project: project,
+ name: params[:name]
+ }
+
+ timeline_event_tag = IncidentManagement::TimelineEventTag.new(timeline_event_tag_params)
+
+ if timeline_event_tag.save
+ success(timeline_event_tag)
+ else
+ error_in_save(timeline_event_tag)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/incident_management/timeline_events/create_service.rb b/app/services/incident_management/timeline_events/create_service.rb
index 5422b4ad6d2..71ff5b64515 100644
--- a/app/services/incident_management/timeline_events/create_service.rb
+++ b/app/services/incident_management/timeline_events/create_service.rb
@@ -5,6 +5,7 @@ module IncidentManagement
DEFAULT_ACTION = 'comment'
DEFAULT_EDITABLE = false
DEFAULT_AUTO_CREATED = false
+ AUTOCREATE_TAGS = [TimelineEventTag::START_TIME_TAG_NAME, TimelineEventTag::END_TIME_TAG_NAME].freeze
class CreateService < TimelineEvents::BaseService
def initialize(incident, user, params)
@@ -49,6 +50,15 @@ module IncidentManagement
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
+ def change_severity(incident, user)
+ severity_label = IssuableSeverity::SEVERITY_LABELS[incident.severity.to_sym]
+ note = "@#{user.username} changed the incident severity to **#{severity_label}**"
+ occurred_at = incident.updated_at
+ action = 'severity'
+
+ new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
+ end
+
def change_labels(incident, user, added_labels: [], removed_labels: [])
return if Feature.disabled?(:incident_timeline_events_from_labels, incident.project)
@@ -85,10 +95,17 @@ module IncidentManagement
editable: params.fetch(:editable, DEFAULT_EDITABLE)
}
+ non_existing_tags = validate_tags(project, params[:timeline_event_tag_names])
+
+ return error("#{_("Following tags don't exist")}: #{non_existing_tags}") unless non_existing_tags.empty?
+
timeline_event = IncidentManagement::TimelineEvent.new(timeline_event_params)
- if timeline_event.save
+ if timeline_event.save(context: validation_context)
add_system_note(timeline_event)
+
+ create_timeline_event_tag_links(timeline_event, params[:timeline_event_tag_names])
+
track_usage_event(:incident_management_timeline_event_created, user.id)
success(timeline_event)
@@ -112,6 +129,53 @@ module IncidentManagement
SystemNoteService.add_timeline_event(timeline_event)
end
+
+ def validation_context
+ :user_input if !auto_created && params[:promoted_from_note].blank?
+ end
+
+ def create_timeline_event_tag_links(timeline_event, tag_names)
+ return unless tag_names&.any?
+
+ auto_create_predefined_tags(tag_names)
+
+ # Refetches the tag objects to consider predefined tags as well
+ tags = project.incident_management_timeline_event_tags.by_names(tag_names)
+
+ tag_links = tags.select(:id).map do |tag|
+ {
+ timeline_event_id: timeline_event.id,
+ timeline_event_tag_id: tag.id,
+ created_at: DateTime.current
+ }
+ end
+
+ IncidentManagement::TimelineEventTagLink.insert_all(tag_links) if tag_links.any?
+ end
+
+ def auto_create_predefined_tags(new_tags)
+ new_tags = new_tags.map(&:downcase)
+
+ tags_to_create = AUTOCREATE_TAGS.select { |tag| tag.downcase.in?(new_tags) }
+
+ tags_to_create.each do |name|
+ project.incident_management_timeline_event_tags.create(name: name)
+ end
+ end
+
+ def validate_tags(project, tag_names)
+ return [] unless tag_names&.any?
+
+ start_time_tag = AUTOCREATE_TAGS[0].downcase
+ end_time_tag = AUTOCREATE_TAGS[1].downcase
+
+ tag_names_downcased = tag_names.map(&:downcase)
+
+ tags = project.incident_management_timeline_event_tags.by_names(tag_names).pluck_names.map(&:downcase)
+
+ # remove tags from given tag_names and also remove predefined tags which can be auto created
+ tag_names_downcased - tags - [start_time_tag, end_time_tag]
+ end
end
end
end
diff --git a/app/services/incident_management/timeline_events/update_service.rb b/app/services/incident_management/timeline_events/update_service.rb
index 012e2f0e260..8d4e29c6857 100644
--- a/app/services/incident_management/timeline_events/update_service.rb
+++ b/app/services/incident_management/timeline_events/update_service.rb
@@ -8,18 +8,23 @@ module IncidentManagement
# @option params [string] note
# @option params [datetime] occurred_at
class UpdateService < TimelineEvents::BaseService
+ VALIDATION_CONTEXT = :user_input
+
def initialize(timeline_event, user, params)
@timeline_event = timeline_event
@incident = timeline_event.incident
@user = user
@note = params[:note]
@occurred_at = params[:occurred_at]
+ @validation_context = VALIDATION_CONTEXT
end
def execute
return error_no_permissions unless allowed?
- if timeline_event.update(update_params)
+ timeline_event.assign_attributes(update_params)
+
+ if timeline_event.save(context: validation_context)
add_system_note(timeline_event)
track_usage_event(:incident_management_timeline_event_edited, user.id)
@@ -31,7 +36,7 @@ module IncidentManagement
private
- attr_reader :timeline_event, :incident, :user, :note, :occurred_at
+ attr_reader :timeline_event, :incident, :user, :note, :occurred_at, :validation_context
def update_params
{ updated_by_user: user, note: note, occurred_at: occurred_at }.compact
diff --git a/app/services/issuable/bulk_update_service.rb b/app/services/issuable/bulk_update_service.rb
index 238f5ebddae..30444fa3938 100644
--- a/app/services/issuable/bulk_update_service.rb
+++ b/app/services/issuable/bulk_update_service.rb
@@ -68,9 +68,10 @@ module Issuable
end
def find_issuables(parent, model_class, ids)
- if parent.is_a?(Project)
+ case parent
+ when Project
projects = parent
- elsif parent.is_a?(Group)
+ when Group
projects = parent.all_projects
else
return
diff --git a/app/services/issuable/discussions_list_service.rb b/app/services/issuable/discussions_list_service.rb
new file mode 100644
index 00000000000..7aa0363af01
--- /dev/null
+++ b/app/services/issuable/discussions_list_service.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+# This service return notes grouped by discussion ID and paginated per discussion.
+# System notes also have a discussion ID assigned including Synthetic system notes.
+module Issuable
+ class DiscussionsListService
+ include RendersNotes
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :current_user, :issuable, :params
+
+ def initialize(current_user, issuable, params = {})
+ @current_user = current_user
+ @issuable = issuable
+ @params = params.dup
+ end
+
+ def execute
+ return Note.none unless can_read_issuable?
+
+ notes = NotesFinder.new(current_user, params.merge({ target: issuable, project: issuable.project }))
+ .execute.with_web_entity_associations.inc_relations_for_view.fresh
+
+ if paginator
+ paginated_discussions_by_type = paginator.records.group_by(&:table_name)
+
+ notes = if paginated_discussions_by_type['notes'].present?
+ notes.with_discussion_ids(paginated_discussions_by_type['notes'].map(&:discussion_id))
+ else
+ notes.none
+ end
+ end
+
+ if params[:notes_filter] != UserPreference::NOTES_FILTERS[:only_comments]
+ notes = ResourceEvents::MergeIntoNotesService.new(
+ issuable, current_user, paginated_notes: paginated_discussions_by_type
+ ).execute(notes)
+ end
+
+ notes = prepare_notes_for_rendering(notes)
+
+ # TODO: optimize this permission check.
+ # Given this loads notes on a single issuable and current permission system, we should not have to check
+ # permission on every single note. We should be able to check permission on the given issuable or its container,
+ # which should result in just one permission check. Perhaps that should also either be passed to NotesFinder or
+ # should be done in NotesFinder, which would decide right away if it would need to return no notes
+ # or if it should just filter out internal notes.
+ notes = notes.select { |n| n.readable_by?(current_user) }
+
+ Discussion.build_collection(notes, issuable)
+ end
+
+ def paginator
+ return if params[:per_page].blank?
+ return if issuable.instance_of?(MergeRequest) && Feature.disabled?(:paginated_mr_discussions, issuable.project)
+
+ strong_memoize(:paginator) do
+ issuable
+ .discussion_root_note_ids(notes_filter: params[:notes_filter])
+ .keyset_paginate(cursor: params[:cursor], per_page: params[:per_page].to_i)
+ end
+ end
+
+ def can_read_issuable?
+ return Ability.allowed?(current_user, :read_security_resource, issuable) if issuable.is_a?(Vulnerability)
+
+ Ability.allowed?(current_user, :"read_#{issuable.to_ability_name}", issuable)
+ end
+ end
+end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index e5feb4422f6..0aed9e3ba40 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -163,6 +163,7 @@ module Issues
invalidate_milestone_issue_counters(issue)
send_milestone_change_notification(issue)
+ GraphqlTriggers.issuable_milestone_updated(issue)
end
def invalidate_milestone_issue_counters(issue)
diff --git a/app/services/jira_import/start_import_service.rb b/app/services/jira_import/start_import_service.rb
index 4d1f2c94ac8..9cd56cf339e 100644
--- a/app/services/jira_import/start_import_service.rb
+++ b/app/services/jira_import/start_import_service.rb
@@ -40,7 +40,7 @@ module JiraImport
project.import_type = 'jira'
project.save! && jira_import.schedule!
- ServiceResponse.success(payload: { import_data: jira_import } )
+ ServiceResponse.success(payload: { import_data: jira_import })
rescue StandardError => ex
# in case project.save! raises an error
Gitlab::ErrorTracking.track_exception(ex, project_id: project.id)
diff --git a/app/services/labels/transfer_service.rb b/app/services/labels/transfer_service.rb
index a79e5b00232..79e807d8010 100644
--- a/app/services/labels/transfer_service.rb
+++ b/app/services/labels/transfer_service.rb
@@ -51,7 +51,7 @@ module Labels
# rubocop: disable CodeReuse/ActiveRecord
def group_labels_applied_to_issues
@labels_applied_to_issues ||= Label.joins(:issues)
- .joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'" )
+ .joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'")
.where(issues: { project_id: project.id }).reorder(nil)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -59,7 +59,7 @@ module Labels
# rubocop: disable CodeReuse/ActiveRecord
def group_labels_applied_to_merge_requests
@labels_applied_to_mrs ||= Label.joins(:merge_requests)
- .joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'" )
+ .joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'")
.where(merge_requests: { target_project_id: project.id }).reorder(nil)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/loose_foreign_keys/process_deleted_records_service.rb b/app/services/loose_foreign_keys/process_deleted_records_service.rb
index 54f54d99afb..8700276c982 100644
--- a/app/services/loose_foreign_keys/process_deleted_records_service.rb
+++ b/app/services/loose_foreign_keys/process_deleted_records_service.rb
@@ -9,6 +9,7 @@ module LooseForeignKeys
end
def execute
+ raised_error = false
modification_tracker = ModificationTracker.new
tracked_tables.cycle do |table|
records = load_batch_for_table(table)
@@ -35,13 +36,30 @@ module LooseForeignKeys
break if modification_tracker.over_limit?
end
+ ::Gitlab::Metrics::LooseForeignKeysSlis.record_apdex(
+ success: !modification_tracker.over_limit?,
+ db_config_name: db_config_name
+ )
+
modification_tracker.stats
+ rescue StandardError
+ raised_error = true
+ raise
+ ensure
+ ::Gitlab::Metrics::LooseForeignKeysSlis.record_error_rate(
+ error: raised_error,
+ db_config_name: db_config_name
+ )
end
private
attr_reader :connection
+ def db_config_name
+ ::Gitlab::Database.db_config_name(connection)
+ end
+
def load_batch_for_table(table)
fully_qualified_table_name = "#{current_schema}.#{table}"
LooseForeignKeys::DeletedRecord.load_batch_for_table(fully_qualified_table_name, BATCH_SIZE)
diff --git a/app/services/markup/rendering_service.rb b/app/services/markup/rendering_service.rb
new file mode 100644
index 00000000000..0142d600522
--- /dev/null
+++ b/app/services/markup/rendering_service.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module Markup
+ class RenderingService
+ include ActionView::Helpers::TextHelper
+
+ # Let's increase the render timeout
+ # For a smaller one, a test that renders the blob content statically fails
+ # We can consider removing this custom timeout when markup_rendering_timeout FF is removed:
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/365358
+ RENDER_TIMEOUT = 5.seconds
+
+ def initialize(text, file_name: nil, context: {}, postprocess_context: {})
+ @text = text
+ @file_name = file_name
+ @context = context
+ @postprocess_context = postprocess_context
+ end
+
+ def execute
+ return '' unless text.present?
+ return context.delete(:rendered) if context.has_key?(:rendered)
+
+ html = file_name ? markup_unsafe : markdown_unsafe
+
+ return '' unless html.present?
+
+ postprocess_context ? postprocess(html) : html
+ end
+
+ private
+
+ def markup_unsafe
+ markup = proc do
+ if Gitlab::MarkupHelper.gitlab_markdown?(file_name)
+ markdown_unsafe
+ elsif Gitlab::MarkupHelper.asciidoc?(file_name)
+ asciidoc_unsafe
+ elsif Gitlab::MarkupHelper.plain?(file_name)
+ plain_unsafe
+ else
+ other_markup_unsafe
+ end
+ end
+
+ if Feature.enabled?(:markup_rendering_timeout, context[:project])
+ Gitlab::RenderTimeout.timeout(foreground: RENDER_TIMEOUT, &markup)
+ else
+ markup.call
+ end
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(e, project_id: context[:project]&.id, file_name: file_name)
+
+ simple_format(text)
+ end
+
+ def markdown_unsafe
+ Banzai.render(text, context)
+ end
+
+ def asciidoc_unsafe
+ Gitlab::Asciidoc.render(text, context)
+ end
+
+ def plain_unsafe
+ "<pre class=\"plain-readme\">#{text}</pre>"
+ end
+
+ def other_markup_unsafe
+ Gitlab::OtherMarkup.render(file_name, text, context)
+ end
+
+ def postprocess(html)
+ Banzai.post_process(html, context.reverse_merge(postprocess_context))
+ end
+
+ attr_reader :text, :file_name, :context, :postprocess_context
+ end
+end
diff --git a/app/services/members/approve_access_request_service.rb b/app/services/members/approve_access_request_service.rb
index 5337279f702..51f9492ec91 100644
--- a/app/services/members/approve_access_request_service.rb
+++ b/app/services/members/approve_access_request_service.rb
@@ -16,7 +16,7 @@ module Members
private
def validate_access!(access_requester)
- raise Gitlab::Access::AccessDeniedError unless can_update_access_requester?(access_requester)
+ raise Gitlab::Access::AccessDeniedError unless can_approve_access_requester?(access_requester)
if approving_member_with_owner_access_level?(access_requester) &&
cannot_assign_owner_responsibilities_to_member_in_project?(access_requester)
@@ -24,8 +24,8 @@ module Members
end
end
- def can_update_access_requester?(access_requester)
- can?(current_user, update_member_permission(access_requester), access_requester)
+ def can_approve_access_requester?(access_requester)
+ can?(current_user, :admin_member_access_request, access_requester.source)
end
def approving_member_with_owner_access_level?(access_requester)
diff --git a/app/services/members/destroy_service.rb b/app/services/members/destroy_service.rb
index ce79907e8a8..f18269454e3 100644
--- a/app/services/members/destroy_service.rb
+++ b/app/services/members/destroy_service.rb
@@ -48,6 +48,10 @@ module Members
def authorized?(member, destroy_bot)
return can_destroy_bot_member?(member) if destroy_bot
+ if member.request?
+ return can_destroy_member_access_request?(member) || can_withdraw_member_access_request?(member)
+ end
+
can_destroy_member?(member)
end
@@ -106,6 +110,14 @@ module Members
can?(current_user, destroy_bot_member_permission(member), member)
end
+ def can_destroy_member_access_request?(member)
+ can?(current_user, :admin_member_access_request, member.source)
+ end
+
+ def can_withdraw_member_access_request?(member)
+ can?(current_user, :withdraw_member_access_request, member)
+ end
+
def destroying_member_with_owner_access_level?(member)
member.owner?
end
diff --git a/app/services/members/update_service.rb b/app/services/members/update_service.rb
index 8ef3e307519..0e6b02f7a80 100644
--- a/app/services/members/update_service.rb
+++ b/app/services/members/update_service.rb
@@ -2,37 +2,84 @@
module Members
class UpdateService < Members::BaseService
- # returns the updated member
- def execute(member, permission: :update)
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, action_member_permission(permission, member), member)
- raise Gitlab::Access::AccessDeniedError if prevent_upgrade_to_owner?(member) || prevent_downgrade_from_owner?(member)
+ # @param members [Member, Array<Member>]
+ # returns the updated member(s)
+ def execute(members, permission: :update)
+ members = Array.wrap(members)
- return success(member: member) if update_results_in_no_change?(member)
-
- old_access_level = member.human_access
- old_expiry = member.expires_at
-
- if member.update(params)
- after_execute(action: permission, old_access_level: old_access_level, old_expiry: old_expiry, member: member)
-
- # Deletes only confidential issues todos for guests
- enqueue_delete_todos(member) if downgrading_to_guest?
+ old_access_level_expiry_map = members.to_h do |member|
+ [member.id, { human_access: member.human_access, expires_at: member.expires_at }]
end
- if member.errors.any?
- error(member.errors.full_messages.to_sentence, pass_back: { member: member })
+ if Feature.enabled?(:bulk_update_membership_roles, current_user)
+ multiple_members_update(members, permission, old_access_level_expiry_map)
else
- success(member: member)
+ single_member_update(members.first, permission, old_access_level_expiry_map)
end
+
+ prepare_response(members)
end
private
- def update_results_in_no_change?(member)
- return false if params[:expires_at]&.to_date != member.expires_at
- return false if params[:access_level] != member.access_level
+ def single_member_update(member, permission, old_access_level_expiry_map)
+ raise Gitlab::Access::AccessDeniedError unless has_update_permissions?(member, permission)
+
+ member.attributes = params
+ return success(member: member) unless member.changed?
+
+ post_update(member, permission, old_access_level_expiry_map) if member.save
+ end
+
+ def multiple_members_update(members, permission, old_access_level_expiry_map)
+ begin
+ updated_members =
+ Member.transaction do
+ # Using `next` with `filter_map` avoids the `post_update` call for the member that resulted in no change
+ members.filter_map do |member|
+ raise Gitlab::Access::AccessDeniedError unless has_update_permissions?(member, permission)
+
+ member.attributes = params
+ next unless member.changed?
+
+ member.save!
+ member
+ end
+ end
+ rescue ActiveRecord::RecordInvalid
+ return
+ end
+
+ updated_members.each { |member| post_update(member, permission, old_access_level_expiry_map) }
+ end
+
+ def post_update(member, permission, old_access_level_expiry_map)
+ old_access_level = old_access_level_expiry_map[member.id][:human_access]
+ old_expiry = old_access_level_expiry_map[member.id][:expires_at]
+
+ after_execute(action: permission, old_access_level: old_access_level, old_expiry: old_expiry, member: member)
+ enqueue_delete_todos(member) if downgrading_to_guest? # Deletes only confidential issues todos for guests
+ end
+
+ def prepare_response(members)
+ errored_member = members.detect { |member| member.errors.any? }
+ if errored_member.present?
+ return error(errored_member.errors.full_messages.to_sentence, pass_back: { member: errored_member })
+ end
+
+ # TODO: Remove the :member key when removing the bulk_update_membership_roles FF and update where it's used.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/373257
+ if members.one?
+ success(member: members.first)
+ else
+ success(members: members)
+ end
+ end
- true
+ def has_update_permissions?(member, permission)
+ can?(current_user, action_member_permission(permission, member), member) &&
+ !prevent_upgrade_to_owner?(member) &&
+ !prevent_downgrade_from_owner?(member)
end
def downgrading_to_guest?
diff --git a/app/services/merge_requests/add_context_service.rb b/app/services/merge_requests/add_context_service.rb
index 7b441ddf5e4..2ce6073050e 100644
--- a/app/services/merge_requests/add_context_service.rb
+++ b/app/services/merge_requests/add_context_service.rb
@@ -65,7 +65,7 @@ module MergeRequests
sha: sha,
authored_date: Gitlab::Database.sanitize_timestamp(commit_hash[:authored_date]),
committed_date: Gitlab::Database.sanitize_timestamp(commit_hash[:committed_date]),
- trailers: commit_hash.fetch(:trailers, {}).to_json
+ trailers: Gitlab::Json.dump(commit_hash.fetch(:trailers, {}))
)
end
end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index 9d12eb80eb6..20b32dbc2a0 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -5,6 +5,8 @@ module MergeRequests
include Gitlab::Utils::StrongMemoize
def execute(merge_request)
+ merge_request.ensure_merge_request_diff
+
prepare_for_mergeability(merge_request)
prepare_merge_request(merge_request)
end
diff --git a/app/services/merge_requests/approval_service.rb b/app/services/merge_requests/approval_service.rb
index 5761e34caff..72f398ce415 100644
--- a/app/services/merge_requests/approval_service.rb
+++ b/app/services/merge_requests/approval_service.rb
@@ -11,6 +11,8 @@ module MergeRequests
reset_approvals_cache(merge_request)
merge_request_activity_counter.track_approve_mr_action(user: current_user, merge_request: merge_request)
+ trigger_merge_request_merge_status_updated(merge_request)
+ trigger_merge_request_reviewers_updated(merge_request)
# Approval side effects (things not required to be done immediately but
# should happen after a successful approval) should be done asynchronously
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index cfd7c645b7e..e7ab2c062ee 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -20,7 +20,7 @@ module MergeRequests
end
def execute_hooks(merge_request, action = 'open', old_rev: nil, old_associations: {})
- merge_data = hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations)
+ merge_data = Gitlab::Lazy.new { hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations) }
merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_integrations(merge_data, :merge_request_hooks)
@@ -249,6 +249,10 @@ module MergeRequests
def trigger_merge_request_reviewers_updated(merge_request)
GraphqlTriggers.merge_request_reviewers_updated(merge_request)
end
+
+ def trigger_merge_request_merge_status_updated(merge_request)
+ GraphqlTriggers.merge_request_merge_status_updated(merge_request)
+ end
end
end
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index 8e0f72eb380..04d08f257f1 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -14,11 +14,15 @@ module MergeRequests
end
def after_create(issuable)
- issuable.mark_as_preparing
+ current_user_id = current_user.id
+
+ issuable.run_after_commit do
+ # Add new items to MergeRequests::AfterCreateService if they can
+ # be performed in Sidekiq
+ NewMergeRequestWorker.perform_async(issuable.id, current_user_id)
+ end
- # Add new items to MergeRequests::AfterCreateService if they can
- # be performed in Sidekiq
- NewMergeRequestWorker.perform_async(issuable.id, current_user.id)
+ issuable.mark_as_preparing
super
end
@@ -34,7 +38,12 @@ module MergeRequests
# callback (e.g. after_create), a database transaction will be
# open while the Gitaly RPC waits. To avoid an idle in transaction
# timeout, we do this before we attempt to save the merge request.
- merge_request.eager_fetch_ref!
+
+ if Feature.enabled?(:async_merge_request_diff_creation, current_user)
+ merge_request.skip_ensure_merge_request_diff = true
+ else
+ merge_request.eager_fetch_ref!
+ end
end
def set_projects!
@@ -59,4 +68,4 @@ module MergeRequests
end
end
-MergeRequests::CreateService.include_mod_with('MergeRequests::CreateService')
+MergeRequests::CreateService.prepend_mod_with('MergeRequests::CreateService')
diff --git a/app/services/merge_requests/mergeability/run_checks_service.rb b/app/services/merge_requests/mergeability/run_checks_service.rb
index 7f205c8dd6c..740a6feac2c 100644
--- a/app/services/merge_requests/mergeability/run_checks_service.rb
+++ b/app/services/merge_requests/mergeability/run_checks_service.rb
@@ -38,7 +38,7 @@ module MergeRequests
def failure_reason
raise 'Execute needs to be called before' if results.nil?
- results.find(&:failed?)&.payload&.fetch(:reason)
+ results.find(&:failed?)&.payload&.fetch(:reason)&.to_sym
end
private
@@ -46,7 +46,6 @@ module MergeRequests
attr_reader :merge_request, :params, :results
def run_check(check)
- return check.execute unless Feature.enabled?(:mergeability_caching, merge_request.project)
return check.execute unless check.cacheable?
cached_result = cached_results.read(merge_check: check)
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index 1ce44f465cd..2a3f417a33b 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -156,8 +156,7 @@ module MergeRequests
end
def merge_to_ref
- params = { allow_conflicts: Feature.enabled?(:display_merge_conflicts_in_diff, project) }
- result = MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author, params: params).execute(merge_request)
+ result = MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author, params: {}).execute(merge_request)
result[:status] == :success
end
diff --git a/app/services/merge_requests/remove_approval_service.rb b/app/services/merge_requests/remove_approval_service.rb
index 52628729519..8387c23fe3f 100644
--- a/app/services/merge_requests/remove_approval_service.rb
+++ b/app/services/merge_requests/remove_approval_service.rb
@@ -17,6 +17,8 @@ module MergeRequests
reset_approvals_cache(merge_request)
create_note(merge_request)
merge_request_activity_counter.track_unapprove_mr_action(user: current_user)
+ trigger_merge_request_merge_status_updated(merge_request)
+ trigger_merge_request_reviewers_updated(merge_request)
end
success
diff --git a/app/services/merge_requests/update_assignees_service.rb b/app/services/merge_requests/update_assignees_service.rb
index 79a3e9f3c22..d45d55cbebc 100644
--- a/app/services/merge_requests/update_assignees_service.rb
+++ b/app/services/merge_requests/update_assignees_service.rb
@@ -19,16 +19,9 @@ module MergeRequests
attrs = update_attrs.merge(assignee_ids: new_ids)
- # We now have assignees validation on merge request
- # If we use an update with bang, it will explode,
- # instead we need to check if its valid then return if its not valid.
- if Feature.enabled?(:limit_assignees_per_issuable)
- merge_request.update(**attrs)
-
- return merge_request unless merge_request.valid?
- else
- merge_request.update!(**attrs)
- end
+ merge_request.update(**attrs)
+
+ return merge_request unless merge_request.valid?
# Defer the more expensive operations (handle_assignee_changes) to the background
MergeRequests::HandleAssigneesChangeService
diff --git a/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb b/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb
index 0651e569d07..62264281a02 100644
--- a/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb
+++ b/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# Fetches the self monitoring metrics dashboard and formats the output.
+# Fetches the self-monitoring metrics dashboard and formats the output.
# Use Gitlab::Metrics::Dashboard::Finder to retrieve dashboards.
module Metrics
module Dashboard
diff --git a/app/services/milestones/transfer_service.rb b/app/services/milestones/transfer_service.rb
index bbf6920f83b..fa6b461b75d 100644
--- a/app/services/milestones/transfer_service.rb
+++ b/app/services/milestones/transfer_service.rb
@@ -62,7 +62,7 @@ module Milestones
# rubocop: enable CodeReuse/ActiveRecord
def find_or_create_milestone(milestone)
- params = milestone.attributes.slice('title', 'description', 'start_date', 'due_date')
+ params = milestone.attributes.slice('title', 'description', 'start_date', 'due_date', 'state')
FindOrCreateService.new(project, current_user, params).execute
end
diff --git a/app/services/namespaces/statistics_refresher_service.rb b/app/services/namespaces/statistics_refresher_service.rb
index 805060cdee9..2580d2f09fd 100644
--- a/app/services/namespaces/statistics_refresher_service.rb
+++ b/app/services/namespaces/statistics_refresher_service.rb
@@ -5,6 +5,7 @@ module Namespaces
RefresherError = Class.new(StandardError)
def execute(root_namespace)
+ root_namespace = root_namespace.root_ancestor # just in case the true root isn't passed
root_storage_statistics = find_or_create_root_storage_statistics(root_namespace.id)
root_storage_statistics.recalculate!
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 1aaf7fb769a..555d60dc291 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -137,8 +137,6 @@ module Notes
end
def invalid_assignees?(update_params)
- return false unless Feature.enabled?(:limit_assignees_per_issuable)
-
if update_params.key?(:assignee_ids)
possible_assignees = update_params[:assignee_ids]&.uniq&.size
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 1224cf80b76..660d9891e46 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -16,6 +16,16 @@
# NotificationService.new.async.new_issue(issue, current_user)
#
class NotificationService
+ # These should not be called by the MailScheduler::NotificationServiceWorker -
+ # what would it even mean?
+ EXCLUDED_ACTIONS = %i[async].freeze
+
+ def self.permitted_actions
+ @permitted_actions ||= gitlab_extensions.flat_map do |klass|
+ klass.public_instance_methods(false) - EXCLUDED_ACTIONS
+ end.to_set
+ end
+
class Async
attr_reader :parent
diff --git a/app/services/packages/debian/create_distribution_service.rb b/app/services/packages/debian/create_distribution_service.rb
index b4b1ec952ef..218423bb8e3 100644
--- a/app/services/packages/debian/create_distribution_service.rb
+++ b/app/services/packages/debian/create_distribution_service.rb
@@ -61,7 +61,7 @@ module Packages
create_objects(distribution.architectures, architectures, error_label: 'Architecture')
end
- def create_objects(objects, object_names_from_params, error_label: )
+ def create_objects(objects, object_names_from_params, error_label:)
object_names_from_params.each do |name|
new_object = objects.create(name: name)
append_errors(new_object, error_label)
diff --git a/app/services/packages/debian/update_distribution_service.rb b/app/services/packages/debian/update_distribution_service.rb
index 218167ecdc5..5096bd5361f 100644
--- a/app/services/packages/debian/update_distribution_service.rb
+++ b/app/services/packages/debian/update_distribution_service.rb
@@ -58,7 +58,7 @@ module Packages
update_objects(distribution.architectures, architectures, error_label: 'Architecture')
end
- def update_objects(objects, object_names_from_params, error_label: )
+ def update_objects(objects, object_names_from_params, error_label:)
current_object_names = objects.map(&:name)
missing_object_names = object_names_from_params - current_object_names
extra_object_names = current_object_names - object_names_from_params
diff --git a/app/services/packages/maven/metadata/base_create_xml_service.rb b/app/services/packages/maven/metadata/base_create_xml_service.rb
index 4d5cab4978e..3b0d93e1dfb 100644
--- a/app/services/packages/maven/metadata/base_create_xml_service.rb
+++ b/app/services/packages/maven/metadata/base_create_xml_service.rb
@@ -8,13 +8,16 @@ module Packages
INDENT_SPACE = 2
- def initialize(metadata_content:, package:)
+ def initialize(metadata_content:, package:, logger: nil)
@metadata_content = metadata_content
@package = package
+ @logger = logger || Gitlab::AppJsonLogger
end
private
+ attr_reader :logger
+
def xml_doc
strong_memoize(:xml_doc) do
Nokogiri::XML(@metadata_content) do |config|
diff --git a/app/services/packages/maven/metadata/create_versions_xml_service.rb b/app/services/packages/maven/metadata/create_versions_xml_service.rb
index 13b6efa8650..c2ac7fea703 100644
--- a/app/services/packages/maven/metadata/create_versions_xml_service.rb
+++ b/app/services/packages/maven/metadata/create_versions_xml_service.rb
@@ -67,6 +67,12 @@ module Packages
def update_release
return false if release_coherent?
+ unless release_xml_node.present?
+ log_malformed_content('Missing release tag')
+
+ return false
+ end
+
if release_from_database
release_xml_node.content = release_from_database
else
@@ -159,6 +165,15 @@ module Packages
non_snapshot_versions_from_database.last
end
end
+
+ def log_malformed_content(reason)
+ logger.warn(
+ message: 'A malformed metadata file has been encountered',
+ reason: reason,
+ project_id: @package.project_id,
+ package_id: @package.id
+ )
+ end
end
end
end
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index a3596314199..dd074f7472b 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -81,7 +81,7 @@ module Packages
# - https://blog.aaronlenoir.com/2017/11/10/get-original-length-from-base-64-string/
# - https://en.wikipedia.org/wiki/Base64#Decoding_Base64_with_padding
encoded_data = attachment['data']
- ((encoded_data.length * 0.75 ) - encoded_data[-2..].count('=')).to_i
+ ((encoded_data.length * 0.75) - encoded_data[-2..].count('=')).to_i
end
end
diff --git a/app/services/packages/rpm/parse_package_service.rb b/app/services/packages/rpm/parse_package_service.rb
index 689a161a81a..18b916a9d8b 100644
--- a/app/services/packages/rpm/parse_package_service.rb
+++ b/app/services/packages/rpm/parse_package_service.rb
@@ -25,7 +25,8 @@ module Packages
epoch: package_tags[:epoch] || '0',
changelogs: build_changelogs,
requirements: build_requirements,
- provides: build_provides
+ provides: build_provides,
+ directories: package_tags[:dirnames]
}.merge(extract_static_attributes)
end
diff --git a/app/services/packages/rpm/repository_metadata/base_builder.rb b/app/services/packages/rpm/repository_metadata/base_builder.rb
deleted file mode 100644
index 2c0a11457ec..00000000000
--- a/app/services/packages/rpm/repository_metadata/base_builder.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-module Packages
- module Rpm
- module RepositoryMetadata
- class BaseBuilder
- def initialize(xml: nil, data: {})
- @xml = Nokogiri::XML(xml) if xml.present?
- @data = data
- end
-
- def execute
- return build_empty_structure if xml.blank?
-
- update_xml_document
- update_package_count
- xml.to_xml
- end
-
- private
-
- attr_reader :xml, :data
-
- def build_empty_structure
- Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
- xml.method_missing(self.class::ROOT_TAG, self.class::ROOT_ATTRIBUTES)
- end.to_xml
- end
-
- def update_xml_document
- # Add to the root xml element a new package metadata node
- xml.at(self.class::ROOT_TAG).add_child(build_new_node)
- end
-
- def update_package_count
- packages_count = xml.css("//#{self.class::ROOT_TAG}/package").count
-
- xml.at(self.class::ROOT_TAG).attributes["packages"].value = packages_count.to_s
- end
-
- def build_new_node
- raise NotImplementedError, "#{self.class} should implement #{__method__}"
- end
- end
- end
- end
-end
diff --git a/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb b/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb
deleted file mode 100644
index 01fb36f4b91..00000000000
--- a/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-module Packages
- module Rpm
- module RepositoryMetadata
- class BuildFilelistXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
- ROOT_TAG = 'filelists'
- ROOT_ATTRIBUTES = {
- xmlns: 'http://linux.duke.edu/metadata/filelists',
- packages: '0'
- }.freeze
- end
- end
- end
-end
diff --git a/app/services/packages/rpm/repository_metadata/build_filelist_xml_service.rb b/app/services/packages/rpm/repository_metadata/build_filelist_xml_service.rb
new file mode 100644
index 00000000000..47cbba76fa4
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_filelist_xml_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildFilelistXmlService < BuildXmlBaseService
+ ROOT_TAG = 'filelists'
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/filelists',
+ packages: '0'
+ }.freeze
+
+ def execute
+ super do |xml|
+ xml.package(pkgid: data[:pkgid], name: data[:name], arch: data[:arch]) do
+ xml.version epoch: data[:epoch], ver: data[:version], rel: data[:release]
+ build_file_nodes(xml)
+ end
+ end
+ end
+
+ private
+
+ def build_file_nodes(xml)
+ data[:files].each do |path|
+ attributes = dir?(path) ? { type: 'dir' } : {}
+
+ xml.file path, **attributes
+ end
+ end
+
+ def dir?(path)
+ # Add trailing slash to path to check
+ # if it exists in directories list
+ data[:directories].include? File.join(path, '')
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_other_xml.rb b/app/services/packages/rpm/repository_metadata/build_other_xml.rb
deleted file mode 100644
index 4bf61c901a3..00000000000
--- a/app/services/packages/rpm/repository_metadata/build_other_xml.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-module Packages
- module Rpm
- module RepositoryMetadata
- class BuildOtherXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
- ROOT_TAG = 'otherdata'
- ROOT_ATTRIBUTES = {
- xmlns: 'http://linux.duke.edu/metadata/other',
- packages: '0'
- }.freeze
- end
- end
- end
-end
diff --git a/app/services/packages/rpm/repository_metadata/build_other_xml_service.rb b/app/services/packages/rpm/repository_metadata/build_other_xml_service.rb
new file mode 100644
index 00000000000..00e88f4f548
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_other_xml_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildOtherXmlService < BuildXmlBaseService
+ ROOT_TAG = 'otherdata'
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/other',
+ packages: '0'
+ }.freeze
+
+ def execute
+ super do |xml|
+ xml.package(pkgid: data[:pkgid], name: data[:name], arch: data[:arch]) do
+ xml.version epoch: data[:epoch], ver: data[:version], rel: data[:release]
+ build_changelog_nodes(xml)
+ end
+ end
+ end
+
+ private
+
+ def build_changelog_nodes(xml)
+ data[:changelogs].each do |changelog|
+ xml.changelog changelog[:changelogtext], date: changelog[:changelogtime]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_primary_xml.rb b/app/services/packages/rpm/repository_metadata/build_primary_xml_service.rb
index 580bf844a0c..1044ab3997a 100644
--- a/app/services/packages/rpm/repository_metadata/build_primary_xml.rb
+++ b/app/services/packages/rpm/repository_metadata/build_primary_xml_service.rb
@@ -2,7 +2,7 @@
module Packages
module Rpm
module RepositoryMetadata
- class BuildPrimaryXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
+ class BuildPrimaryXmlService < BuildXmlBaseService
ROOT_TAG = 'metadata'
ROOT_ATTRIBUTES = {
xmlns: 'http://linux.duke.edu/metadata/common',
@@ -11,37 +11,27 @@ module Packages
}.freeze
# Nodes that have only text without attributes
- REQUIRED_BASE_ATTRIBUTES = %i[name arch summary description].freeze
- NOT_REQUIRED_BASE_ATTRIBUTES = %i[url packager].freeze
+ BASE_ATTRIBUTES = %i[name arch summary description url packager].freeze
FORMAT_NODE_BASE_ATTRIBUTES = %i[license vendor group buildhost sourcerpm].freeze
- private
-
- def build_new_node
- builder = Nokogiri::XML::Builder.new do |xml|
+ def execute
+ super do |xml|
xml.package(type: :rpm, 'xmlns:rpm': 'http://linux.duke.edu/metadata/rpm') do
- build_required_base_attributes(xml)
- build_not_required_base_attributes(xml)
+ build_base_attributes(xml)
xml.version epoch: data[:epoch], ver: data[:version], rel: data[:release]
- xml.checksum data[:checksum], type: 'sha256', pkgid: 'YES'
+ xml.checksum data[:pkgid], type: 'sha256', pkgid: 'YES'
xml.size package: data[:packagesize], installed: data[:installedsize], archive: data[:archivesize]
xml.time file: data[:filetime], build: data[:buildtime]
xml.location href: data[:location] if data[:location].present?
build_format_node(xml)
end
end
-
- Nokogiri::XML(builder.to_xml).at('package')
end
- def build_required_base_attributes(xml)
- REQUIRED_BASE_ATTRIBUTES.each do |attribute|
- xml.method_missing(attribute, data[attribute])
- end
- end
+ private
- def build_not_required_base_attributes(xml)
- NOT_REQUIRED_BASE_ATTRIBUTES.each do |attribute|
+ def build_base_attributes(xml)
+ BASE_ATTRIBUTES.each do |attribute|
xml.method_missing(attribute, data[attribute]) if data[attribute].present?
end
end
diff --git a/app/services/packages/rpm/repository_metadata/build_repomd_xml.rb b/app/services/packages/rpm/repository_metadata/build_repomd_xml_service.rb
index 84614196254..cb80faa12c0 100644
--- a/app/services/packages/rpm/repository_metadata/build_repomd_xml.rb
+++ b/app/services/packages/rpm/repository_metadata/build_repomd_xml_service.rb
@@ -2,9 +2,7 @@
module Packages
module Rpm
module RepositoryMetadata
- class BuildRepomdXml
- attr_reader :data
-
+ class BuildRepomdXmlService
ROOT_ATTRIBUTES = {
xmlns: 'http://linux.duke.edu/metadata/repo',
'xmlns:rpm': 'http://linux.duke.edu/metadata/rpm'
@@ -26,12 +24,6 @@ module Packages
end
def execute
- build_repomd
- end
-
- private
-
- def build_repomd
Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
xml.repomd(ROOT_ATTRIBUTES) do
xml.revision Time.now.to_i
@@ -40,6 +32,10 @@ module Packages
end.to_xml
end
+ private
+
+ attr_reader :data
+
def build_data_info(xml)
data.each do |filename, info|
xml.data(type: filename) do
diff --git a/app/services/packages/rpm/repository_metadata/build_xml_base_service.rb b/app/services/packages/rpm/repository_metadata/build_xml_base_service.rb
new file mode 100644
index 00000000000..4dfb4087f1b
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_xml_base_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildXmlBaseService
+ def initialize(data)
+ @data = data
+ end
+
+ def execute
+ builder = Nokogiri::XML::Builder.new { |xml| yield xml } # rubocop:disable Style/ExplicitBlockArgument
+
+ Nokogiri::XML(builder.to_xml).at('package')
+ end
+
+ private
+
+ attr_reader :data
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/update_xml_service.rb b/app/services/packages/rpm/repository_metadata/update_xml_service.rb
new file mode 100644
index 00000000000..8fef425195f
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/update_xml_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class UpdateXmlService
+ BUILDERS = {
+ other: ::Packages::Rpm::RepositoryMetadata::BuildOtherXmlService,
+ primary: ::Packages::Rpm::RepositoryMetadata::BuildPrimaryXmlService,
+ filelist: ::Packages::Rpm::RepositoryMetadata::BuildFilelistXmlService
+ }.freeze
+
+ def initialize(filename:, xml: nil, data: {})
+ @builder_class = BUILDERS[filename]
+ raise ArgumentError, "Filename must be one of: #{BUILDERS.keys.join(', ')}" if @builder_class.nil?
+
+ @xml = Nokogiri::XML(xml) if xml.present?
+ @data = data
+ @filename = filename
+ end
+
+ def execute
+ return build_empty_structure if xml.blank?
+
+ remove_existing_packages
+ update_xml_document
+ update_package_count
+ xml.to_xml
+ end
+
+ private
+
+ attr_reader :xml, :data, :builder_class, :filename
+
+ def build_empty_structure
+ Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
+ xml.method_missing(builder_class::ROOT_TAG, builder_class::ROOT_ATTRIBUTES)
+ end.to_xml
+ end
+
+ def update_xml_document
+ # Add to the root xml element a new package metadata node
+ xml.at(builder_class::ROOT_TAG).add_child(builder_class.new(data).execute)
+ end
+
+ def update_package_count
+ packages_count = xml.css("//#{builder_class::ROOT_TAG}/package").count
+
+ xml.at(builder_class::ROOT_TAG).attributes["packages"].value = packages_count.to_s
+ end
+
+ def remove_existing_packages
+ case filename
+ when :primary
+ xml.search("checksum:contains('#{data[:pkgid]}')").each { _1.parent&.remove }
+ else
+ xml.search("[pkgid='#{data[:pkgid]}']").each(&:remove)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/personal_access_tokens/revoke_service.rb b/app/services/personal_access_tokens/revoke_service.rb
index 732da75da3a..5371b6c91ef 100644
--- a/app/services/personal_access_tokens/revoke_service.rb
+++ b/app/services/personal_access_tokens/revoke_service.rb
@@ -4,7 +4,7 @@ module PersonalAccessTokens
class RevokeService < BaseService
attr_reader :token, :current_user, :group
- def initialize(current_user = nil, token: nil, group: nil )
+ def initialize(current_user = nil, token: nil, group: nil)
@current_user = current_user
@token = token
@group = group
diff --git a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
index c82ed97203f..c91103f897f 100644
--- a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
@@ -95,10 +95,12 @@ module Projects
end
def request_body(oids)
- {
+ body = {
operation: DOWNLOAD_ACTION,
objects: oids.map { |oid, size| { oid: oid, size: size } }
- }.to_json
+ }
+
+ Gitlab::Json.dump(body)
end
def headers
diff --git a/app/services/projects/move_users_star_projects_service.rb b/app/services/projects/move_users_star_projects_service.rb
index 5490448553f..4f1580c5f53 100644
--- a/app/services/projects/move_users_star_projects_service.rb
+++ b/app/services/projects/move_users_star_projects_service.rb
@@ -12,8 +12,8 @@ module Projects
Project.transaction do
user_stars.update_all(project_id: @project.id)
- Project.reset_counters @project.id, :users_star_projects
- Project.reset_counters source_project.id, :users_star_projects
+ @project.update(star_count: @project.starrers.with_state(:active).size)
+ source_project.update(star_count: source_project.starrers.with_state(:active).size)
success
end
diff --git a/app/services/projects/prometheus/alerts/notify_service.rb b/app/services/projects/prometheus/alerts/notify_service.rb
index 9f260345937..1e084c0e5eb 100644
--- a/app/services/projects/prometheus/alerts/notify_service.rb
+++ b/app/services/projects/prometheus/alerts/notify_service.rb
@@ -36,9 +36,9 @@ module Projects
truncate_alerts! if max_alerts_exceeded?
- alert_responses = process_prometheus_alerts
+ process_prometheus_alerts
- alert_response(alert_responses)
+ created
end
def self.processable?(payload)
@@ -152,12 +152,6 @@ module Projects
.execute
end
end
-
- def alert_response(alert_responses)
- alerts = alert_responses.flat_map { |resp| resp.payload[:alerts] }.compact
-
- success(alerts)
- end
end
end
end
diff --git a/app/services/projects/unlink_fork_service.rb b/app/services/projects/unlink_fork_service.rb
index 9eccc16a8b2..898421364db 100644
--- a/app/services/projects/unlink_fork_service.rb
+++ b/app/services/projects/unlink_fork_service.rb
@@ -60,3 +60,5 @@ module Projects
end
end
end
+
+Projects::UnlinkForkService.prepend_mod_with('Projects::UnlinkForkService')
diff --git a/app/services/protected_branches/api_service.rb b/app/services/protected_branches/api_service.rb
index f604a57bcd1..b8fe9bac13e 100644
--- a/app/services/protected_branches/api_service.rb
+++ b/app/services/protected_branches/api_service.rb
@@ -6,17 +6,32 @@ module ProtectedBranches
::ProtectedBranches::CreateService.new(@project, @current_user, protected_branch_params).execute
end
- def protected_branch_params
- {
- name: params[:name],
- allow_force_push: allow_force_push?,
- push_access_levels_attributes: ::ProtectedRefs::AccessLevelParams.new(:push, params).access_levels,
- merge_access_levels_attributes: ::ProtectedRefs::AccessLevelParams.new(:merge, params).access_levels
- }
+ def update(protected_branch)
+ ::ProtectedBranches::UpdateService.new(@project, @current_user,
+protected_branch_params(with_defaults: false)).execute(protected_branch)
end
- def allow_force_push?
- params[:allow_force_push] || false
+ private
+
+ def protected_branch_params(with_defaults: true)
+ params.slice(*attributes).merge(
+ {
+ push_access_levels_attributes: access_level_attributes(:push, with_defaults),
+ merge_access_levels_attributes: access_level_attributes(:merge, with_defaults)
+ }
+ )
+ end
+
+ def access_level_attributes(type, with_defaults)
+ ::ProtectedRefs::AccessLevelParams.new(
+ type,
+ params,
+ with_defaults: with_defaults
+ ).access_levels
+ end
+
+ def attributes
+ [:name, :allow_force_push]
end
end
end
diff --git a/app/services/protected_branches/cache_service.rb b/app/services/protected_branches/cache_service.rb
index 8c521f4ebcb..66ca549c508 100644
--- a/app/services/protected_branches/cache_service.rb
+++ b/app/services/protected_branches/cache_service.rb
@@ -7,20 +7,26 @@ module ProtectedBranches
CACHE_EXPIRE_IN = 1.day
CACHE_LIMIT = 1000
- def fetch(ref_name, dry_run: false)
+ def fetch(ref_name, dry_run: false, &block)
record = OpenSSL::Digest::SHA256.hexdigest(ref_name)
- Gitlab::Redis::Cache.with do |redis|
+ with_redis do |redis|
cached_result = redis.hget(redis_key, record)
- decoded_result = Gitlab::Redis::Boolean.decode(cached_result) unless cached_result.nil?
+ if cached_result.nil?
+ metrics.increment_cache_miss
+ else
+ metrics.increment_cache_hit
+
+ decoded_result = Gitlab::Redis::Boolean.decode(cached_result)
+ end
# If we're dry-running, don't break because we need to check against
# the real value to ensure the cache is working properly.
# If the result is nil we'll need to run the block, so don't break yet.
break decoded_result unless dry_run || decoded_result.nil?
- calculated_value = yield
+ calculated_value = metrics.observe_cache_generation(&block)
check_and_log_discrepancy(decoded_result, calculated_value, ref_name) if dry_run
@@ -42,11 +48,15 @@ module ProtectedBranches
end
def refresh
- Gitlab::Redis::Cache.with { |redis| redis.unlink(redis_key) }
+ with_redis { |redis| redis.unlink(redis_key) }
end
private
+ def with_redis(&block)
+ Gitlab::Redis::Cache.with(&block) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
def check_and_log_discrepancy(cached_value, real_value, ref_name)
return if cached_value.nil?
return if cached_value == real_value
@@ -64,5 +74,14 @@ module ProtectedBranches
def redis_key
@redis_key ||= [CACHE_ROOT_KEY, @project.id].join(':')
end
+
+ def metrics
+ @metrics ||= Gitlab::Cache::Metrics.new(
+ caller_id: Gitlab::ApplicationContext.current_context_attribute(:caller_id),
+ cache_identifier: "#{self.class}#fetch",
+ feature_category: :source_code_management,
+ backing_resource: :cpu
+ )
+ end
end
end
diff --git a/app/services/protected_refs/access_level_params.rb b/app/services/protected_refs/access_level_params.rb
index 59fc17868d1..a421964a6ab 100644
--- a/app/services/protected_refs/access_level_params.rb
+++ b/app/services/protected_refs/access_level_params.rb
@@ -4,9 +4,9 @@ module ProtectedRefs
class AccessLevelParams
attr_reader :type, :params
- def initialize(type, params)
+ def initialize(type, params, with_defaults: true)
@type = type
- @params = params_with_default(params)
+ @params = with_defaults ? params_with_default(params) : params
end
def access_levels
diff --git a/app/services/resource_events/base_change_timebox_service.rb b/app/services/resource_events/base_change_timebox_service.rb
index 372f1c9d816..ba7c9d90713 100644
--- a/app/services/resource_events/base_change_timebox_service.rb
+++ b/app/services/resource_events/base_change_timebox_service.rb
@@ -12,11 +12,15 @@ module ResourceEvents
def execute
create_event
+ track_event
+
resource.expire_note_etag_cache
end
private
+ def track_event; end
+
def create_event
raise NotImplementedError
end
diff --git a/app/services/resource_events/change_milestone_service.rb b/app/services/resource_events/change_milestone_service.rb
index 24935a3327a..a092d807d8f 100644
--- a/app/services/resource_events/change_milestone_service.rb
+++ b/app/services/resource_events/change_milestone_service.rb
@@ -13,6 +13,12 @@ module ResourceEvents
private
+ def track_event
+ return unless resource.is_a?(WorkItem)
+
+ Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter.track_work_item_milestone_changed_action(author: user)
+ end
+
def create_event
ResourceMilestoneEvent.create(build_resource_args)
end
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 7fd0fb10b4b..da2a51562f8 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -63,7 +63,7 @@ module ServicePing
def submit_payload(payload, path: USAGE_DATA_PATH)
Gitlab::HTTP.post(
URI.join(base_url, path),
- body: payload.to_json,
+ body: Gitlab::Json.dump(payload),
allow_local_requests: true,
headers: { 'Content-type' => 'application/json' }
)
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index e0bab4cd6ad..5cadff42958 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -34,7 +34,7 @@ module Snippets
move_temporary_files
- ServiceResponse.success(payload: { snippet: @snippet } )
+ ServiceResponse.success(payload: { snippet: @snippet })
else
snippet_error_response(@snippet, 400)
end
diff --git a/app/services/spam/spam_verdict_service.rb b/app/services/spam/spam_verdict_service.rb
index 08634ec840c..0dcb3546034 100644
--- a/app/services/spam/spam_verdict_service.rb
+++ b/app/services/spam/spam_verdict_service.rb
@@ -24,7 +24,7 @@ module Spam
label = spamcheck_error ? 'ERROR' : spamcheck_result.to_s.upcase
- histogram.observe( { result: label }, external_spam_check_round_trip_time )
+ histogram.observe({ result: label }, external_spam_check_round_trip_time)
# assign result to a var for logging it before reassigning to nil when monitorMode is true
original_spamcheck_result = spamcheck_result
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 7275a05d2ce..ad9f0dd0368 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -16,6 +16,8 @@ module SystemNotes
def self.issuable_events
{
+ assigned: s_('IssuableEvents|assigned to'),
+ unassigned: s_('IssuableEvents|unassigned'),
review_requested: s_('IssuableEvents|requested review from'),
review_request_removed: s_('IssuableEvents|removed review request for')
}.freeze
@@ -83,7 +85,7 @@ module SystemNotes
#
# "assigned to @user1 additionally to @user2"
#
- # "assigned to @user1, @user2 and @user3 and unassigned from @user4 and @user5"
+ # "assigned to @user1, @user2 and @user3 and unassigned @user4 and @user5"
#
# "assigned to @user1 and @user2"
#
@@ -94,8 +96,8 @@ module SystemNotes
text_parts = []
Gitlab::I18n.with_default_locale do
- text_parts << "assigned to #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
- text_parts << "unassigned #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
+ text_parts << "#{self.class.issuable_events[:assigned]} #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
+ text_parts << "#{self.class.issuable_events[:unassigned]} #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
end
body = text_parts.join(' and ')
diff --git a/app/services/tags/create_service.rb b/app/services/tags/create_service.rb
index 8a7b98ab944..e332b51ae94 100644
--- a/app/services/tags/create_service.rb
+++ b/app/services/tags/create_service.rb
@@ -3,6 +3,8 @@
module Tags
class CreateService < BaseService
def execute(tag_name, target, message)
+ return error('Target is empty', 400) if target.blank?
+
valid_tag = Gitlab::GitRefValidator.validate(tag_name)
return error('Tag name invalid', 400) unless valid_tag
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index 6ae394072c6..06352d36215 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -329,11 +329,12 @@ class TodoService
commit_id: nil
}
- if target.is_a?(Commit)
+ case target
+ when Commit
attributes.merge!(target_id: nil, commit_id: target.id)
- elsif target.is_a?(Issue)
+ when Issue
attributes[:issue_type] = target.issue_type
- elsif target.is_a?(Discussion)
+ when Discussion
attributes.merge!(target_type: nil, target_id: nil, discussion: target)
end
diff --git a/app/services/two_factor/base_service.rb b/app/services/two_factor/base_service.rb
index 0957d7ebabd..50a3a5c099c 100644
--- a/app/services/two_factor/base_service.rb
+++ b/app/services/two_factor/base_service.rb
@@ -4,12 +4,12 @@ module TwoFactor
class BaseService
include BaseServiceUtility
- attr_reader :current_user, :params, :user
+ attr_reader :current_user, :user, :group
def initialize(current_user, params = {})
@current_user = current_user
- @params = params
@user = params.delete(:user)
+ @group = params.delete(:group)
end
end
end
diff --git a/app/services/two_factor/destroy_service.rb b/app/services/two_factor/destroy_service.rb
index 859012c2153..3db9aefbe70 100644
--- a/app/services/two_factor/destroy_service.rb
+++ b/app/services/two_factor/destroy_service.rb
@@ -3,7 +3,7 @@
module TwoFactor
class DestroyService < ::TwoFactor::BaseService
def execute
- return error(_('You are not authorized to perform this action')) unless can?(current_user, :disable_two_factor, user)
+ return error(_('You are not authorized to perform this action')) unless authorized?
return error(_('Two-factor authentication is not enabled for this user')) unless user.two_factor_enabled?
result = disable_two_factor
@@ -15,6 +15,10 @@ module TwoFactor
private
+ def authorized?
+ can?(current_user, :disable_two_factor, user)
+ end
+
def disable_two_factor
::Users::UpdateService.new(current_user, user: user).execute do |user|
user.disable_two_factor!
diff --git a/app/services/user_project_access_changed_service.rb b/app/services/user_project_access_changed_service.rb
index ceaf21bb926..f7178ee9bb6 100644
--- a/app/services/user_project_access_changed_service.rb
+++ b/app/services/user_project_access_changed_service.rb
@@ -21,9 +21,10 @@ class UserProjectAccessChangedService
if blocking
AuthorizedProjectsWorker.bulk_perform_and_wait(bulk_args)
else
- if priority == HIGH_PRIORITY
+ case priority
+ when HIGH_PRIORITY
AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
- elsif priority == MEDIUM_PRIORITY
+ when MEDIUM_PRIORITY
AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker.bulk_perform_in(MEDIUM_DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds) # rubocop:disable Scalability/BulkPerformWithContext
else
with_related_class_context do
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 0fa1bb96b13..8ef1b3e0613 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -177,7 +177,7 @@ module Users
# Allowed params for user signup
def signup_params
- [
+ signup_params = [
:email,
:name,
:password,
@@ -187,6 +187,9 @@ module Users
:first_name,
:last_name
]
+ signup_params << :preferred_language if ::Feature.enabled?(:preferred_language_switcher)
+
+ signup_params
end
end
end
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index a378cb09854..d4c00a4dcec 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -8,9 +8,20 @@ module Users
def initialize(current_user)
@current_user = current_user
+
+ @scheduled_records_gauge = Gitlab::Metrics.gauge(
+ :gitlab_ghost_user_migration_scheduled_records_total,
+ 'The total number of scheduled ghost user migrations'
+ )
+ @lag_gauge = Gitlab::Metrics.gauge(
+ :gitlab_ghost_user_migration_lag_seconds,
+ 'The waiting time in seconds of the oldest scheduled record for ghost user migration'
+ )
end
- # Synchronously destroys +user+
+ # Asynchronously destroys +user+
+ # Migrating the associated user records, and post-migration cleanup is
+ # handled by the Users::MigrateRecordsToGhostUserWorker cron worker.
#
# The operation will fail if the user is the sole owner of any groups. To
# force the groups to be destroyed, pass `delete_solo_owned_groups: true` in
@@ -24,10 +35,7 @@ module Users
# a hard deletion without destroying solo-owned groups, pass
# `delete_solo_owned_groups: false, hard_delete: true` in +options+.
#
- # To make the service asynchronous, a new behaviour is being introduced
- # behind the user_destroy_with_limited_execution_time_worker feature flag.
- # Migrating the associated user records, and post-migration cleanup is
- # handled by the Users::MigrateRecordsToGhostUserWorker cron worker.
+
def execute(user, options = {})
delete_solo_owned_groups = options.fetch(:delete_solo_owned_groups, options[:hard_delete])
@@ -62,32 +70,43 @@ module Users
yield(user) if block_given?
hard_delete = options.fetch(:hard_delete, false)
+ Users::GhostUserMigration.create!(user: user,
+ initiator_user: current_user,
+ hard_delete: hard_delete)
- if Feature.enabled?(:user_destroy_with_limited_execution_time_worker)
- Users::GhostUserMigration.create!(user: user,
- initiator_user: current_user,
- hard_delete: hard_delete)
+ update_metrics
+ end
- else
- MigrateToGhostUserService.new(user).execute(hard_delete: options[:hard_delete])
+ private
+
+ attr_reader :scheduled_records_gauge, :lag_gauge
- response = Snippets::BulkDestroyService.new(current_user, user.snippets)
- .execute(skip_authorization: hard_delete)
- raise DestroyError, response.message if response.error?
+ def update_metrics
+ update_scheduled_records_gauge
+ update_lag_gauge
+ end
- # Rails attempts to load all related records into memory before
- # destroying: https://github.com/rails/rails/issues/22510
- # This ensures we delete records in batches.
- user.destroy_dependent_associations_in_batches(exclude: [:snippets])
- user.nullify_dependent_associations_in_batches
+ def update_scheduled_records_gauge
+ # We do not want to issue unbounded COUNT() queries, hence we limit the
+ # query to count 1001 records and then approximate the result.
+ count = Users::GhostUserMigration.limit(1001).count
- # Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
- user_data = user.destroy
- namespace.destroy
+ if count == 1001
+ # more than 1000 records, approximate count
+ min = Users::GhostUserMigration.minimum(:id) || 0
+ max = Users::GhostUserMigration.maximum(:id) || 0
- user_data
+ scheduled_records_gauge.set({}, max - min)
+ else
+ # less than 1000 records, count is accurate
+ scheduled_records_gauge.set({}, count)
end
end
+
+ def update_lag_gauge
+ oldest_job = Users::GhostUserMigration.first
+ lag_gauge.set({}, Time.current - oldest_job.created_at)
+ end
end
end
diff --git a/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb b/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb
index 7c4a5698ea9..d294312cc30 100644
--- a/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb
+++ b/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb
@@ -2,25 +2,38 @@
module Users
class MigrateRecordsToGhostUserInBatchesService
+ LIMIT_SIZE = 1000
+
def initialize
@execution_tracker = Gitlab::Utils::ExecutionTracker.new
end
def execute
- Users::GhostUserMigration.find_each do |user_to_migrate|
+ ghost_user_migrations.each do |job|
break if execution_tracker.over_limit?
- service = Users::MigrateRecordsToGhostUserService.new(user_to_migrate.user,
- user_to_migrate.initiator_user,
+ service = Users::MigrateRecordsToGhostUserService.new(job.user,
+ job.initiator_user,
execution_tracker)
- service.execute(hard_delete: user_to_migrate.hard_delete)
+ service.execute(hard_delete: job.hard_delete)
+ rescue Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError
+ # no-op
+ rescue StandardError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+ reschedule(job)
end
- rescue Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError
- # no-op
end
private
attr_reader :execution_tracker
+
+ def ghost_user_migrations
+ Users::GhostUserMigration.consume_order.limit(LIMIT_SIZE)
+ end
+
+ def reschedule(job)
+ job.update(consume_after: 30.minutes.from_now)
+ end
end
end
diff --git a/app/services/users/migrate_to_ghost_user_service.rb b/app/services/users/migrate_to_ghost_user_service.rb
deleted file mode 100644
index 3eb220c0e40..00000000000
--- a/app/services/users/migrate_to_ghost_user_service.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-# frozen_string_literal: true
-
-# When a user is destroyed, some of their associated records are
-# moved to a "Ghost User", to prevent these associated records from
-# being destroyed.
-#
-# For example, all the issues/MRs a user has created are _not_ destroyed
-# when the user is destroyed.
-module Users
- class MigrateToGhostUserService
- extend ActiveSupport::Concern
-
- attr_reader :ghost_user, :user, :hard_delete
-
- def initialize(user)
- @user = user
- @ghost_user = User.ghost
- end
-
- # If an admin attempts to hard delete a user, in some cases associated
- # records may have a NOT NULL constraint on the user ID that prevent that record
- # from being destroyed. In such situations we must assign the record to the ghost user.
- # Passing in `hard_delete: true` will ensure these records get assigned to
- # the ghost user before the user is destroyed. Other associated records will be destroyed.
- # letting the other associated records be destroyed.
- def execute(hard_delete: false)
- @hard_delete = hard_delete
- transition = user.block_transition
-
- # Block the user before moving records to prevent a data race.
- # For example, if the user creates an issue after `migrate_issues`
- # runs and before the user is destroyed, the destroy will fail with
- # an exception.
- user.block
-
- begin
- user.transaction do
- migrate_records
- end
- rescue Exception # rubocop:disable Lint/RescueException
- # Reverse the user block if record migration fails
- if transition
- transition.rollback
- user.save!
- end
-
- raise
- end
-
- user.reset
- end
-
- private
-
- def migrate_records
- return if hard_delete
-
- migrate_issues
- migrate_merge_requests
- migrate_notes
- migrate_abuse_reports
- migrate_award_emoji
- migrate_snippets
- migrate_reviews
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def migrate_issues
- batched_migrate(Issue, :author_id)
- batched_migrate(Issue, :last_edited_by_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def migrate_merge_requests
- batched_migrate(MergeRequest, :author_id)
- batched_migrate(MergeRequest, :merge_user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def migrate_notes
- batched_migrate(Note, :author_id)
- end
-
- def migrate_abuse_reports
- user.reported_abuse_reports.update_all(reporter_id: ghost_user.id)
- end
-
- def migrate_award_emoji
- user.award_emoji.update_all(user_id: ghost_user.id)
- end
-
- def migrate_snippets
- snippets = user.snippets.only_project_snippets
- snippets.update_all(author_id: ghost_user.id)
- end
-
- def migrate_reviews
- batched_migrate(Review, :author_id)
- end
-
- # rubocop:disable CodeReuse/ActiveRecord
- def batched_migrate(base_scope, column, batch_size: 50)
- loop do
- update_count = base_scope.where(column => user.id).limit(batch_size).update_all(column => ghost_user.id)
- break if update_count == 0
- end
- end
- # rubocop:enable CodeReuse/ActiveRecord
- end
-end
-
-Users::MigrateToGhostUserService.prepend_mod_with('Users::MigrateToGhostUserService')
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index e5e5e375198..d32dcd73734 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -57,11 +57,11 @@ class WebHookService
end
def execute
- return { status: :error, message: 'Hook disabled' } if disabled?
+ return ServiceResponse.error(message: 'Hook disabled') if disabled?
if recursion_blocked?
log_recursion_blocked
- return { status: :error, message: 'Recursive webhook blocked' }
+ return ServiceResponse.error(message: 'Recursive webhook blocked')
end
Gitlab::WebHooks::RecursionDetection.register!(hook)
@@ -79,11 +79,7 @@ class WebHookService
execution_duration: Gitlab::Metrics::System.monotonic_time - start_time
)
- {
- status: :success,
- http_status: response.code,
- message: response.body
- }
+ ServiceResponse.success(message: response.body, payload: { http_status: response.code })
rescue *Gitlab::HTTP::HTTP_ERRORS,
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
execution_duration = Gitlab::Metrics::System.monotonic_time - start_time
@@ -97,10 +93,7 @@ class WebHookService
Gitlab::AppLogger.error("WebHook Error after #{execution_duration.to_i.seconds}s => #{e}")
- {
- status: :error,
- message: error_message
- }
+ ServiceResponse.error(message: error_message)
end
def async_execute
diff --git a/app/services/work_items/create_service.rb b/app/services/work_items/create_service.rb
index ebda043e873..87cc690d666 100644
--- a/app/services/work_items/create_service.rb
+++ b/app/services/work_items/create_service.rb
@@ -30,6 +30,13 @@ module WorkItems
error(e.message, :unprocessable_entity)
end
+ def before_create(work_item)
+ execute_widgets(work_item: work_item, callback: :before_create_callback,
+ widget_params: @widget_params)
+
+ super
+ end
+
def transaction_create(work_item)
super.tap do |save_result|
if save_result
diff --git a/app/services/work_items/widgets/hierarchy_service/base_service.rb b/app/services/work_items/widgets/hierarchy_service/base_service.rb
index bb681ef0083..236762d6937 100644
--- a/app/services/work_items/widgets/hierarchy_service/base_service.rb
+++ b/app/services/work_items/widgets/hierarchy_service/base_service.rb
@@ -7,7 +7,6 @@ module WorkItems
private
def handle_hierarchy_changes(params)
- return feature_flag_error unless feature_flag_enabled?
return incompatible_args_error if incompatible_args?(params)
if params.key?(:parent)
@@ -48,24 +47,16 @@ module WorkItems
.execute
end
- def feature_flag_enabled?
- Feature.enabled?(:work_items_hierarchy, work_item&.project)
- end
-
def incompatible_args?(params)
params[:children] && params[:parent]
end
- def feature_flag_error
- error(_('`work_items_hierarchy` feature flag disabled for this project'))
- end
-
def incompatible_args_error
error(_('A Work Item can be a parent or a child, but not both.'))
end
def invalid_args_error(params)
- error(_("One or more arguments are invalid: %{args}." % { args: params.keys.to_sentence } ))
+ error(_("One or more arguments are invalid: %{args}." % { args: params.keys.to_sentence }))
end
def service_response!(result)
diff --git a/app/services/work_items/widgets/milestone_service/base_service.rb b/app/services/work_items/widgets/milestone_service/base_service.rb
new file mode 100644
index 00000000000..f373e6daea3
--- /dev/null
+++ b/app/services/work_items/widgets/milestone_service/base_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module MilestoneService
+ class BaseService < WorkItems::Widgets::BaseService
+ private
+
+ def handle_milestone_change(params:)
+ return unless params.present? && params.key?(:milestone_id)
+
+ unless has_permission?(:set_work_item_metadata)
+ params.delete(:milestone_id)
+ return
+ end
+
+ if params[:milestone_id].nil?
+ work_item.milestone = nil
+
+ return
+ end
+
+ project = work_item.project
+ milestone = MilestonesFinder.new({
+ project_ids: [project.id],
+ group_ids: project.group&.self_and_ancestors&.select(:id),
+ ids: [params[:milestone_id]]
+ }).execute.first
+
+ if milestone
+ work_item.milestone = milestone
+ else
+ params.delete(:milestone_id)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/milestone_service/create_service.rb b/app/services/work_items/widgets/milestone_service/create_service.rb
new file mode 100644
index 00000000000..e8d6bfe503c
--- /dev/null
+++ b/app/services/work_items/widgets/milestone_service/create_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module MilestoneService
+ class CreateService < WorkItems::Widgets::MilestoneService::BaseService
+ def before_create_callback(params:)
+ handle_milestone_change(params: params)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/milestone_service/update_service.rb b/app/services/work_items/widgets/milestone_service/update_service.rb
new file mode 100644
index 00000000000..7ff0c2a5367
--- /dev/null
+++ b/app/services/work_items/widgets/milestone_service/update_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module MilestoneService
+ class UpdateService < WorkItems::Widgets::MilestoneService::BaseService
+ def before_update_callback(params:)
+ handle_milestone_change(params: params)
+ end
+ end
+ end
+ end
+end