Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-02-20 16:49:51 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-02-20 16:49:51 +0300
commit71786ddc8e28fbd3cb3fcc4b3ff15e5962a1c82e (patch)
tree6a2d93ef3fb2d353bb7739e4b57e6541f51cdd71 /app/workers
parenta7253423e3403b8c08f8a161e5937e1488f5f407 (diff)
Add latest changes from gitlab-org/gitlab@15-9-stable-eev15.9.0-rc42
Diffstat (limited to 'app/workers')
-rw-r--r--app/workers/all_queues.yml34
-rw-r--r--app/workers/approve_blocked_pending_approval_users_worker.rb2
-rw-r--r--app/workers/bulk_import_worker.rb16
-rw-r--r--app/workers/bulk_imports/pipeline_worker.rb18
-rw-r--r--app/workers/ci/archive_traces_cron_worker.rb10
-rw-r--r--app/workers/ci/cancel_redundant_pipelines_worker.rb21
-rw-r--r--app/workers/ci/initial_pipeline_process_worker.rb2
-rw-r--r--app/workers/ci/parse_secure_file_metadata_worker.rb2
-rw-r--r--app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb2
-rw-r--r--app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb24
-rw-r--r--app/workers/ci/schedule_delete_objects_cron_worker.rb4
-rw-r--r--app/workers/concerns/application_worker.rb5
-rw-r--r--app/workers/concerns/git_garbage_collect_methods.rb4
-rw-r--r--app/workers/concerns/gitlab/github_import/rescheduling_methods.rb4
-rw-r--r--app/workers/concerns/limited_capacity/worker.rb8
-rw-r--r--app/workers/concerns/waitable_worker.rb7
-rw-r--r--app/workers/database/batched_background_migration/execution_worker.rb2
-rw-r--r--app/workers/gitlab/import/stuck_import_job.rb4
-rw-r--r--app/workers/incident_management/close_incident_worker.rb2
-rw-r--r--app/workers/issues/close_worker.rb2
-rw-r--r--app/workers/merge_requests/close_issue_worker.rb2
-rw-r--r--app/workers/merge_requests/delete_source_branch_worker.rb9
-rw-r--r--app/workers/new_issue_worker.rb2
-rw-r--r--app/workers/new_merge_request_worker.rb4
-rw-r--r--app/workers/object_storage/migrate_uploads_worker.rb2
-rw-r--r--app/workers/packages/debian/generate_distribution_worker.rb3
-rw-r--r--app/workers/packages/debian/process_changes_worker.rb5
-rw-r--r--app/workers/packages/debian/process_package_file_worker.rb21
-rw-r--r--app/workers/pipeline_schedule_worker.rb34
-rw-r--r--app/workers/projects/post_creation_worker.rb9
-rw-r--r--app/workers/projects/record_target_platforms_worker.rb2
-rw-r--r--app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb1
-rw-r--r--app/workers/prune_old_events_worker.rb2
-rw-r--r--app/workers/run_pipeline_schedule_worker.rb4
-rw-r--r--app/workers/tasks_to_be_done/create_worker.rb2
-rw-r--r--app/workers/user_status_cleanup/batch_worker.rb2
-rw-r--r--app/workers/users/create_statistics_worker.rb2
-rw-r--r--app/workers/users/migrate_records_to_ghost_user_in_batches_worker.rb2
38 files changed, 166 insertions, 115 deletions
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 693afdea43a..c660243d336 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -255,6 +255,15 @@
:weight: 1
:idempotent: true
:tags: []
+- :name: cronjob:ci_runners_stale_machines_cleanup_cron
+ :worker_name: Ci::Runners::StaleMachinesCleanupCronWorker
+ :feature_category: :runner_fleet
+ :has_external_dependencies: false
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: true
+ :tags: []
- :name: cronjob:ci_schedule_delete_objects_cron
:worker_name: Ci::ScheduleDeleteObjectsCronWorker
:feature_category: :continuous_integration
@@ -653,7 +662,7 @@
:tags: []
- :name: cronjob:prune_old_events
:worker_name: PruneOldEventsWorker
- :feature_category: :users
+ :feature_category: :user_profile
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -806,7 +815,7 @@
:tags: []
- :name: cronjob:user_status_cleanup_batch
:worker_name: UserStatusCleanup::BatchWorker
- :feature_category: :users
+ :feature_category: :user_profile
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -815,7 +824,7 @@
:tags: []
- :name: cronjob:users_create_statistics
:worker_name: Users::CreateStatisticsWorker
- :feature_category: :users
+ :feature_category: :user_profile
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -833,7 +842,7 @@
:tags: []
- :name: cronjob:users_migrate_records_to_ghost_user_in_batches
:worker_name: Users::MigrateRecordsToGhostUserInBatchesWorker
- :feature_category: :users
+ :feature_category: :user_profile
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -2192,7 +2201,7 @@
:tags: []
- :name: approve_blocked_pending_approval_users
:worker_name: ApproveBlockedPendingApprovalUsersWorker
- :feature_category: :users
+ :feature_category: :user_profile
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -2298,6 +2307,15 @@
:weight: 1
:idempotent: true
:tags: []
+- :name: ci_cancel_redundant_pipelines
+ :worker_name: Ci::CancelRedundantPipelinesWorker
+ :feature_category: :continuous_integration
+ :has_external_dependencies: false
+ :urgency: :high
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: true
+ :tags: []
- :name: ci_delete_objects
:worker_name: Ci::DeleteObjectsWorker
:feature_category: :continuous_integration
@@ -2318,7 +2336,7 @@
:tags: []
- :name: ci_parse_secure_file_metadata
:worker_name: Ci::ParseSecureFileMetadataWorker
- :feature_category: :mobile_signing_deployment
+ :feature_category: :mobile_devops
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
@@ -2917,7 +2935,7 @@
:urgency: :high
:resource_boundary: :cpu
:weight: 2
- :idempotent: false
+ :idempotent: true
:tags: []
- :name: new_note
:worker_name: NewNoteWorker
@@ -3146,7 +3164,7 @@
:tags: []
- :name: projects_record_target_platforms
:worker_name: Projects::RecordTargetPlatformsWorker
- :feature_category: :experimentation_activation
+ :feature_category: :projects
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
diff --git a/app/workers/approve_blocked_pending_approval_users_worker.rb b/app/workers/approve_blocked_pending_approval_users_worker.rb
index 661ec87c1dd..13ce2454cf5 100644
--- a/app/workers/approve_blocked_pending_approval_users_worker.rb
+++ b/app/workers/approve_blocked_pending_approval_users_worker.rb
@@ -9,7 +9,7 @@ class ApproveBlockedPendingApprovalUsersWorker
idempotent!
- feature_category :users
+ feature_category :user_profile
def perform(current_user_id)
current_user = User.find(current_user_id)
diff --git a/app/workers/bulk_import_worker.rb b/app/workers/bulk_import_worker.rb
index d5eca86744e..6bce13c5ff0 100644
--- a/app/workers/bulk_import_worker.rb
+++ b/app/workers/bulk_import_worker.rb
@@ -4,6 +4,7 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
PERFORM_DELAY = 5.seconds
+ DEFAULT_BATCH_SIZE = 5
data_consistency :always
feature_category :importers
@@ -16,10 +17,11 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
return if @bulk_import.finished? || @bulk_import.failed?
return @bulk_import.fail_op! if all_entities_failed?
return @bulk_import.finish! if all_entities_processed? && @bulk_import.started?
+ return re_enqueue if max_batch_size_exceeded? # Do not start more jobs if max allowed are already running
@bulk_import.start! if @bulk_import.created?
- created_entities.find_each do |entity|
+ created_entities.first(next_batch_size).each do |entity|
BulkImports::CreatePipelineTrackersService.new(entity).execute!
entity.start!
@@ -58,4 +60,16 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
def re_enqueue
BulkImportWorker.perform_in(PERFORM_DELAY, @bulk_import.id)
end
+
+ def started_entities
+ entities.with_status(:started)
+ end
+
+ def max_batch_size_exceeded?
+ started_entities.count >= DEFAULT_BATCH_SIZE
+ end
+
+ def next_batch_size
+ [DEFAULT_BATCH_SIZE - started_entities.count, 0].max
+ end
end
diff --git a/app/workers/bulk_imports/pipeline_worker.rb b/app/workers/bulk_imports/pipeline_worker.rb
index 62e85d38e61..8f03c74e13e 100644
--- a/app/workers/bulk_imports/pipeline_worker.rb
+++ b/app/workers/bulk_imports/pipeline_worker.rb
@@ -103,14 +103,8 @@ module BulkImports
pipeline_tracker.file_extraction_pipeline?
end
- def job_timeout?
- return false unless file_extraction_pipeline?
-
- time_since_entity_created > Pipeline::NDJSON_EXPORT_TIMEOUT
- end
-
def empty_export_timeout?
- export_empty? && time_since_entity_created > Pipeline::EMPTY_EXPORT_STATUS_TIMEOUT
+ export_empty? && time_since_tracker_created > Pipeline::EMPTY_EXPORT_STATUS_TIMEOUT
end
def export_failed?
@@ -167,8 +161,8 @@ module BulkImports
logger.error(structured_payload(payload))
end
- def time_since_entity_created
- Time.zone.now - entity.created_at
+ def time_since_tracker_created
+ Time.zone.now - (pipeline_tracker.created_at || entity.created_at)
end
def lease_timeout
@@ -178,5 +172,11 @@ module BulkImports
def lease_key
"gitlab:bulk_imports:pipeline_worker:#{pipeline_tracker.id}"
end
+
+ def job_timeout?
+ return false unless file_extraction_pipeline?
+
+ time_since_tracker_created > Pipeline::NDJSON_EXPORT_TIMEOUT
+ end
end
end
diff --git a/app/workers/ci/archive_traces_cron_worker.rb b/app/workers/ci/archive_traces_cron_worker.rb
index 12856805243..fe23d10c2ac 100644
--- a/app/workers/ci/archive_traces_cron_worker.rb
+++ b/app/workers/ci/archive_traces_cron_worker.rb
@@ -9,14 +9,20 @@ module Ci
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
feature_category :continuous_integration
+ deduplicate :until_executed, including_scheduled: true
# rubocop: disable CodeReuse/ActiveRecord
def perform
# Archive stale live traces which still resides in redis or database
# This could happen when Ci::ArchiveTraceWorker sidekiq jobs were lost by receiving SIGKILL
# More details in https://gitlab.com/gitlab-org/gitlab-foss/issues/36791
- Ci::Build.with_stale_live_trace.find_each(batch_size: 100) do |build|
- Ci::ArchiveTraceService.new.execute(build, worker_name: self.class.name)
+
+ if Feature.enabled?(:deduplicate_archive_traces_cron_worker)
+ Ci::ArchiveTraceService.new.batch_execute(worker_name: self.class.name)
+ else
+ Ci::Build.with_stale_live_trace.find_each(batch_size: 100) do |build|
+ Ci::ArchiveTraceService.new.execute(build, worker_name: self.class.name)
+ end
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/workers/ci/cancel_redundant_pipelines_worker.rb b/app/workers/ci/cancel_redundant_pipelines_worker.rb
new file mode 100644
index 00000000000..8ae8b2df952
--- /dev/null
+++ b/app/workers/ci/cancel_redundant_pipelines_worker.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Ci
+ class CancelRedundantPipelinesWorker
+ include ApplicationWorker
+
+ data_consistency :always
+ feature_category :continuous_integration
+ idempotent!
+ deduplicate :until_executed
+ urgency :high
+
+ def perform(pipeline_id)
+ Ci::Pipeline.find_by_id(pipeline_id).try do |pipeline|
+ Ci::PipelineCreation::CancelRedundantPipelinesService
+ .new(pipeline)
+ .execute
+ end
+ end
+ end
+end
diff --git a/app/workers/ci/initial_pipeline_process_worker.rb b/app/workers/ci/initial_pipeline_process_worker.rb
index 734755f176a..52a4f075cf0 100644
--- a/app/workers/ci/initial_pipeline_process_worker.rb
+++ b/app/workers/ci/initial_pipeline_process_worker.rb
@@ -17,7 +17,7 @@ module Ci
def perform(pipeline_id)
Ci::Pipeline.find_by_id(pipeline_id).try do |pipeline|
- create_deployments!(pipeline) if Feature.enabled?(:move_create_deployments_to_worker, pipeline.project)
+ create_deployments!(pipeline)
Ci::PipelineCreation::StartPipelineService
.new(pipeline)
diff --git a/app/workers/ci/parse_secure_file_metadata_worker.rb b/app/workers/ci/parse_secure_file_metadata_worker.rb
index 0d2495d3155..8703bb13776 100644
--- a/app/workers/ci/parse_secure_file_metadata_worker.rb
+++ b/app/workers/ci/parse_secure_file_metadata_worker.rb
@@ -4,7 +4,7 @@ module Ci
class ParseSecureFileMetadataWorker
include ::ApplicationWorker
- feature_category :mobile_signing_deployment
+ feature_category :mobile_devops
urgency :low
idempotent!
diff --git a/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb b/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb
index 69ab477c80a..722c513a4bb 100644
--- a/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb
+++ b/app/workers/ci/runners/reconcile_existing_runner_versions_cron_worker.rb
@@ -30,7 +30,7 @@ module Ci
end
result = ::Ci::Runners::ReconcileExistingRunnerVersionsService.new.execute
- result.payload.each { |key, value| log_extra_metadata_on_done(key, value) }
+ log_hash_metadata_on_done(result.payload)
end
end
end
diff --git a/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb b/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb
new file mode 100644
index 00000000000..9a11db33fb6
--- /dev/null
+++ b/app/workers/ci/runners/stale_machines_cleanup_cron_worker.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ class StaleMachinesCleanupCronWorker
+ include ApplicationWorker
+
+ # This worker does not schedule other workers that require context.
+ include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
+
+ data_consistency :sticky
+ feature_category :runner_fleet
+ urgency :low
+
+ idempotent!
+
+ def perform
+ result = ::Ci::Runners::StaleMachinesCleanupService.new.execute
+ log_extra_metadata_on_done(:status, result.status)
+ log_hash_metadata_on_done(result.payload)
+ end
+ end
+ end
+end
diff --git a/app/workers/ci/schedule_delete_objects_cron_worker.rb b/app/workers/ci/schedule_delete_objects_cron_worker.rb
index 55b23bbab62..b8332838b13 100644
--- a/app/workers/ci/schedule_delete_objects_cron_worker.rb
+++ b/app/workers/ci/schedule_delete_objects_cron_worker.rb
@@ -14,8 +14,8 @@ module Ci
feature_category :continuous_integration
idempotent!
- def perform(*args)
- Ci::DeleteObjectsWorker.perform_with_capacity(*args)
+ def perform(...)
+ Ci::DeleteObjectsWorker.perform_with_capacity(...)
end
end
end
diff --git a/app/workers/concerns/application_worker.rb b/app/workers/concerns/application_worker.rb
index 222d045b0ba..e2e31b0a5bd 100644
--- a/app/workers/concerns/application_worker.rb
+++ b/app/workers/concerns/application_worker.rb
@@ -36,6 +36,11 @@ module ApplicationWorker
@done_log_extra_metadata[key] = value
end
+ def log_hash_metadata_on_done(hash)
+ @done_log_extra_metadata ||= {}
+ hash.each { |key, value| @done_log_extra_metadata[key] = value }
+ end
+
def logging_extras
return {} unless @done_log_extra_metadata
diff --git a/app/workers/concerns/git_garbage_collect_methods.rb b/app/workers/concerns/git_garbage_collect_methods.rb
index c5f8c9c8464..718031ec33e 100644
--- a/app/workers/concerns/git_garbage_collect_methods.rb
+++ b/app/workers/concerns/git_garbage_collect_methods.rb
@@ -57,7 +57,7 @@ module GitGarbageCollectMethods
end
def gc?(task)
- task == :gc || task == :prune
+ %i[gc eager prune].include?(task)
end
def try_obtain_lease(key)
@@ -87,7 +87,7 @@ module GitGarbageCollectMethods
if task == :prune
client.prune_unreachable_objects
else
- client.optimize_repository
+ client.optimize_repository(eager: task == :eager)
end
rescue GRPC::NotFound => e
Gitlab::GitLogger.error("#{__method__} failed:\nRepository not found")
diff --git a/app/workers/concerns/gitlab/github_import/rescheduling_methods.rb b/app/workers/concerns/gitlab/github_import/rescheduling_methods.rb
index 0a43a0fc4d2..64fa705329e 100644
--- a/app/workers/concerns/gitlab/github_import/rescheduling_methods.rb
+++ b/app/workers/concerns/gitlab/github_import/rescheduling_methods.rb
@@ -25,8 +25,8 @@ module Gitlab
end
end
- def try_import(*args)
- import(*args)
+ def try_import(...)
+ import(...)
true
rescue RateLimitError
false
diff --git a/app/workers/concerns/limited_capacity/worker.rb b/app/workers/concerns/limited_capacity/worker.rb
index bcedb4efcc0..af66d80b3e9 100644
--- a/app/workers/concerns/limited_capacity/worker.rb
+++ b/app/workers/concerns/limited_capacity/worker.rb
@@ -61,8 +61,8 @@ module LimitedCapacity
end
end
- def perform(*args)
- perform_registered(*args) if job_tracker.register(jid, max_running_jobs)
+ def perform(...)
+ perform_registered(...) if job_tracker.register(jid, max_running_jobs)
end
def perform_work(*args)
@@ -81,9 +81,9 @@ module LimitedCapacity
job_tracker.clean_up
end
- def report_prometheus_metrics(*args)
+ def report_prometheus_metrics(...)
report_running_jobs_metrics
- set_metric(:remaining_work_gauge, remaining_work_count(*args))
+ set_metric(:remaining_work_gauge, remaining_work_count(...))
set_metric(:max_running_jobs_gauge, max_running_jobs)
end
diff --git a/app/workers/concerns/waitable_worker.rb b/app/workers/concerns/waitable_worker.rb
index f23e3fb20c2..1fe950b7570 100644
--- a/app/workers/concerns/waitable_worker.rb
+++ b/app/workers/concerns/waitable_worker.rb
@@ -3,13 +3,6 @@
module WaitableWorker
extend ActiveSupport::Concern
- class_methods do
- # Schedules multiple jobs and waits for them to be completed.
- def bulk_perform_and_wait(args_list)
- bulk_perform_async(args_list)
- end
- end
-
def perform(*args)
notify_key = args.pop if Gitlab::JobWaiter.key?(args.last)
diff --git a/app/workers/database/batched_background_migration/execution_worker.rb b/app/workers/database/batched_background_migration/execution_worker.rb
index b59e4bd1f86..37b40c73ca6 100644
--- a/app/workers/database/batched_background_migration/execution_worker.rb
+++ b/app/workers/database/batched_background_migration/execution_worker.rb
@@ -11,7 +11,7 @@ module Database
INTERVAL_VARIANCE = 5.seconds.freeze
LEASE_TIMEOUT_MULTIPLIER = 3
- MAX_RUNNING_MIGRATIONS = 2
+ MAX_RUNNING_MIGRATIONS = 4
included do
data_consistency :always
diff --git a/app/workers/gitlab/import/stuck_import_job.rb b/app/workers/gitlab/import/stuck_import_job.rb
index efbea7d8133..7278106efb9 100644
--- a/app/workers/gitlab/import/stuck_import_job.rb
+++ b/app/workers/gitlab/import/stuck_import_job.rb
@@ -37,7 +37,7 @@ module Gitlab
def mark_imports_without_jid_as_failed!
enqueued_import_states_without_jid
- .each(&method(:mark_as_failed))
+ .each { |import_state| mark_as_failed(import_state) }
.size
end
@@ -61,7 +61,7 @@ module Gitlab
)
completed_import_states
- .each(&method(:mark_as_failed))
+ .each { |import_state| mark_as_failed(import_state) }
.size
end
diff --git a/app/workers/incident_management/close_incident_worker.rb b/app/workers/incident_management/close_incident_worker.rb
index 7d45a6785ea..6b3e1c5321b 100644
--- a/app/workers/incident_management/close_incident_worker.rb
+++ b/app/workers/incident_management/close_incident_worker.rb
@@ -30,7 +30,7 @@ module IncidentManagement
def close_incident(incident)
::Issues::CloseService
- .new(project: incident.project, current_user: user)
+ .new(container: incident.project, current_user: user)
.execute(incident, system_note: false)
end
diff --git a/app/workers/issues/close_worker.rb b/app/workers/issues/close_worker.rb
index 0d540ee8c4f..343f50cd7cf 100644
--- a/app/workers/issues/close_worker.rb
+++ b/app/workers/issues/close_worker.rb
@@ -42,7 +42,7 @@ module Issues
end
commit = Commit.build_from_sidekiq_hash(project, params["commit_hash"])
- service = Issues::CloseService.new(project: project, current_user: author)
+ service = Issues::CloseService.new(container: project, current_user: author)
service.execute(issue, commit: commit)
end
diff --git a/app/workers/merge_requests/close_issue_worker.rb b/app/workers/merge_requests/close_issue_worker.rb
index 8c3ba1bc5ab..5ac48423d41 100644
--- a/app/workers/merge_requests/close_issue_worker.rb
+++ b/app/workers/merge_requests/close_issue_worker.rb
@@ -45,7 +45,7 @@ module MergeRequests
end
Issues::CloseService
- .new(project: project, current_user: user)
+ .new(container: project, current_user: user)
.execute(issue, commit: merge_request)
end
end
diff --git a/app/workers/merge_requests/delete_source_branch_worker.rb b/app/workers/merge_requests/delete_source_branch_worker.rb
index da1eca067a9..f9dbd85cd44 100644
--- a/app/workers/merge_requests/delete_source_branch_worker.rb
+++ b/app/workers/merge_requests/delete_source_branch_worker.rb
@@ -18,15 +18,10 @@ class MergeRequests::DeleteSourceBranchWorker
# Source branch changed while it's being removed
return if merge_request.source_branch_sha != source_branch_sha
- if Feature.enabled?(:add_delete_branch_worker, merge_request.source_project)
- ::Projects::DeleteBranchWorker.new.perform(merge_request.source_project.id, user_id,
- merge_request.source_branch)
- else
- ::Branches::DeleteService.new(merge_request.source_project, user).execute(merge_request.source_branch)
- end
-
::MergeRequests::RetargetChainService.new(project: merge_request.source_project, current_user: user)
.execute(merge_request)
+
+ ::Projects::DeleteBranchWorker.new.perform(merge_request.source_project.id, user_id, merge_request.source_branch)
rescue ActiveRecord::RecordNotFound
end
end
diff --git a/app/workers/new_issue_worker.rb b/app/workers/new_issue_worker.rb
index e14f0dc7dfe..07699a50e36 100644
--- a/app/workers/new_issue_worker.rb
+++ b/app/workers/new_issue_worker.rb
@@ -26,7 +26,7 @@ class NewIssueWorker # rubocop:disable Scalability/IdempotentWorker
issuable.create_cross_references!(user)
Issues::AfterCreateService
- .new(project: issuable.project, current_user: user)
+ .new(container: issuable.project, current_user: user)
.execute(issuable)
end
end
diff --git a/app/workers/new_merge_request_worker.rb b/app/workers/new_merge_request_worker.rb
index d6e8d517b5a..a32a414c0ba 100644
--- a/app/workers/new_merge_request_worker.rb
+++ b/app/workers/new_merge_request_worker.rb
@@ -8,6 +8,9 @@ class NewMergeRequestWorker # rubocop:disable Scalability/IdempotentWorker
sidekiq_options retry: 3
include NewIssuable
+ idempotent!
+ deduplicate :until_executed
+
feature_category :code_review_workflow
urgency :high
worker_resource_boundary :cpu
@@ -15,6 +18,7 @@ class NewMergeRequestWorker # rubocop:disable Scalability/IdempotentWorker
def perform(merge_request_id, user_id)
return unless objects_found?(merge_request_id, user_id)
+ return if issuable.prepared?
MergeRequests::AfterCreateService
.new(project: issuable.target_project, current_user: user)
diff --git a/app/workers/object_storage/migrate_uploads_worker.rb b/app/workers/object_storage/migrate_uploads_worker.rb
index 3e681c3f111..3fe4231bb55 100644
--- a/app/workers/object_storage/migrate_uploads_worker.rb
+++ b/app/workers/object_storage/migrate_uploads_worker.rb
@@ -99,7 +99,7 @@ module ObjectStorage
end
def migrate(uploads)
- uploads.map(&method(:process_upload))
+ uploads.map { |upload| process_upload(upload) }
end
def process_upload(upload)
diff --git a/app/workers/packages/debian/generate_distribution_worker.rb b/app/workers/packages/debian/generate_distribution_worker.rb
index 822fe5a1517..1eff3ea02dd 100644
--- a/app/workers/packages/debian/generate_distribution_worker.rb
+++ b/app/workers/packages/debian/generate_distribution_worker.rb
@@ -4,7 +4,6 @@ module Packages
module Debian
class GenerateDistributionWorker
include ApplicationWorker
- include ::Packages::FIPS
data_consistency :always
include Gitlab::Utils::StrongMemoize
@@ -21,8 +20,6 @@ module Packages
loggable_arguments 0
def perform(container_type, distribution_id)
- raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
-
@container_type = container_type
@distribution_id = distribution_id
diff --git a/app/workers/packages/debian/process_changes_worker.rb b/app/workers/packages/debian/process_changes_worker.rb
index d477a6f2e1f..0a716c61203 100644
--- a/app/workers/packages/debian/process_changes_worker.rb
+++ b/app/workers/packages/debian/process_changes_worker.rb
@@ -4,7 +4,6 @@ module Packages
module Debian
class ProcessChangesWorker
include ApplicationWorker
- include ::Packages::FIPS
data_consistency :always
include Gitlab::Utils::StrongMemoize
@@ -16,8 +15,6 @@ module Packages
feature_category :package_registry
def perform(package_file_id, user_id)
- raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
-
@package_file_id = package_file_id
@user_id = user_id
@@ -25,8 +22,6 @@ module Packages
::Packages::Debian::ProcessChangesService.new(package_file, user).execute
rescue StandardError => e
- raise if e.instance_of?(DisabledError)
-
Gitlab::ErrorTracking.log_exception(e, package_file_id: @package_file_id, user_id: @user_id)
package_file.destroy!
end
diff --git a/app/workers/packages/debian/process_package_file_worker.rb b/app/workers/packages/debian/process_package_file_worker.rb
index 587c0b78c9c..e9d6ad57749 100644
--- a/app/workers/packages/debian/process_package_file_worker.rb
+++ b/app/workers/packages/debian/process_package_file_worker.rb
@@ -4,7 +4,6 @@ module Packages
module Debian
class ProcessPackageFileWorker
include ApplicationWorker
- include ::Packages::FIPS
include Gitlab::Utils::StrongMemoize
data_consistency :always
@@ -15,25 +14,20 @@ module Packages
queue_namespace :package_repositories
feature_category :package_registry
- def perform(package_file_id, user_id, distribution_name, component_name)
- raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
-
+ def perform(package_file_id, distribution_name, component_name)
@package_file_id = package_file_id
- @user_id = user_id
@distribution_name = distribution_name
@component_name = component_name
- return unless package_file && user && distribution_name && component_name
+ return unless package_file && distribution_name && component_name
# return if file has already been processed
return unless package_file.debian_file_metadatum&.unknown?
- ::Packages::Debian::ProcessPackageFileService.new(package_file, user, distribution_name, component_name).execute
+ ::Packages::Debian::ProcessPackageFileService.new(package_file, distribution_name, component_name).execute
rescue StandardError => e
- raise if e.instance_of?(DisabledError)
-
- Gitlab::ErrorTracking.log_exception(e, package_file_id: @package_file_id, user_id: @user_id,
+ Gitlab::ErrorTracking.log_exception(e, package_file_id: @package_file_id,
distribution_name: @distribution_name, component_name: @component_name)
- package_file.destroy!
+ package_file.package.update_column(:status, :error)
end
private
@@ -42,11 +36,6 @@ module Packages
::Packages::PackageFile.find_by_id(@package_file_id)
end
strong_memoize_attr :package_file
-
- def user
- ::User.find_by_id(@user_id)
- end
- strong_memoize_attr :user
end
end
end
diff --git a/app/workers/pipeline_schedule_worker.rb b/app/workers/pipeline_schedule_worker.rb
index fb843bd421c..ca589acf26c 100644
--- a/app/workers/pipeline_schedule_worker.rb
+++ b/app/workers/pipeline_schedule_worker.rb
@@ -15,30 +15,18 @@ class PipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
worker_resource_boundary :cpu
def perform
- if Feature.enabled?(:ci_use_run_pipeline_schedule_worker)
- in_lock(lock_key, **lock_params) do
- Ci::PipelineSchedule
- .select(:id, :owner_id, :project_id) # Minimize the selected columns
- .runnable_schedules
- .preloaded
- .find_in_batches do |schedules|
- RunPipelineScheduleWorker.bulk_perform_async_with_contexts(
- schedules,
- arguments_proc: ->(schedule) { [schedule.id, schedule.owner_id] },
- context_proc: ->(schedule) { { project: schedule.project, user: schedule.owner } }
- )
- end
- end
- else
- Ci::PipelineSchedule.runnable_schedules.preloaded.find_in_batches do |schedules|
- schedules.each do |schedule|
- next unless schedule.project
-
- with_context(project: schedule.project, user: schedule.owner) do
- Ci::PipelineScheduleService.new(schedule.project, schedule.owner).execute(schedule)
- end
+ in_lock(lock_key, **lock_params) do
+ Ci::PipelineSchedule
+ .select(:id, :owner_id, :project_id) # Minimize the selected columns
+ .runnable_schedules
+ .preloaded
+ .find_in_batches do |schedules|
+ RunPipelineScheduleWorker.bulk_perform_async_with_contexts(
+ schedules,
+ arguments_proc: ->(schedule) { [schedule.id, schedule.owner_id, { scheduling: true }] },
+ context_proc: ->(schedule) { { project: schedule.project, user: schedule.owner } }
+ )
end
- end
end
end
diff --git a/app/workers/projects/post_creation_worker.rb b/app/workers/projects/post_creation_worker.rb
index 886919ecace..5a6c88fca69 100644
--- a/app/workers/projects/post_creation_worker.rb
+++ b/app/workers/projects/post_creation_worker.rb
@@ -38,12 +38,11 @@ module Projects
def create_incident_management_timeline_event_tags(project)
tags = project.incident_management_timeline_event_tags.pluck_names
- start_time_name = ::IncidentManagement::TimelineEventTag::START_TIME_TAG_NAME
- end_time_name = ::IncidentManagement::TimelineEventTag::END_TIME_TAG_NAME
+ predefined_tags = ::IncidentManagement::TimelineEventTag::PREDEFINED_TAGS
- project.incident_management_timeline_event_tags.new(name: start_time_name) unless tags.include?(start_time_name)
-
- project.incident_management_timeline_event_tags.new(name: end_time_name) unless tags.include?(end_time_name)
+ predefined_tags.each do |tag|
+ project.incident_management_timeline_event_tags.new(name: tag) unless tags.include?(tag)
+ end
project.save!
rescue StandardError => e
diff --git a/app/workers/projects/record_target_platforms_worker.rb b/app/workers/projects/record_target_platforms_worker.rb
index 899721492e9..e69450692d9 100644
--- a/app/workers/projects/record_target_platforms_worker.rb
+++ b/app/workers/projects/record_target_platforms_worker.rb
@@ -9,7 +9,7 @@ module Projects
APPLE_PLATFORM_LANGUAGES = %w(swift objective-c).freeze
ANDROID_PLATFORM_LANGUAGES = %w(java kotlin).freeze
- feature_category :experimentation_activation
+ feature_category :projects
data_consistency :always
deduplicate :until_executed
urgency :low
diff --git a/app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb b/app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb
index ec23bde5898..065d103fe07 100644
--- a/app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb
+++ b/app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb
@@ -19,6 +19,7 @@ module Projects
refresh = Projects::RefreshBuildArtifactsSizeStatisticsService.new.execute
return unless refresh
+ log_extra_metadata_on_done(:refresh_id, refresh.id)
log_extra_metadata_on_done(:project_id, refresh.project_id)
log_extra_metadata_on_done(:last_job_artifact_id, refresh.last_job_artifact_id)
log_extra_metadata_on_done(:last_batch, refresh.destroyed?)
diff --git a/app/workers/prune_old_events_worker.rb b/app/workers/prune_old_events_worker.rb
index 2d74c271ddc..c8dfb2ade0a 100644
--- a/app/workers/prune_old_events_worker.rb
+++ b/app/workers/prune_old_events_worker.rb
@@ -10,7 +10,7 @@ class PruneOldEventsWorker # rubocop:disable Scalability/IdempotentWorker
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category :users
+ feature_category :user_profile
DELETE_LIMIT = 10_000
diff --git a/app/workers/run_pipeline_schedule_worker.rb b/app/workers/run_pipeline_schedule_worker.rb
index db82cf3af91..a7037863ef5 100644
--- a/app/workers/run_pipeline_schedule_worker.rb
+++ b/app/workers/run_pipeline_schedule_worker.rb
@@ -19,7 +19,9 @@ class RunPipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
return unless schedule && schedule.project && user
- if Feature.enabled?(:ci_use_run_pipeline_schedule_worker)
+ options.symbolize_keys!
+
+ if options[:scheduling]
return if schedule.next_run_at > Time.current
update_next_run_at_for(schedule)
diff --git a/app/workers/tasks_to_be_done/create_worker.rb b/app/workers/tasks_to_be_done/create_worker.rb
index 0953f190fd0..d3824ceb4ae 100644
--- a/app/workers/tasks_to_be_done/create_worker.rb
+++ b/app/workers/tasks_to_be_done/create_worker.rb
@@ -17,7 +17,7 @@ module TasksToBeDone
member_task.tasks_to_be_done.each do |task|
service_class(task)
- .new(project: project, current_user: current_user, assignee_ids: assignee_ids)
+ .new(container: project, current_user: current_user, assignee_ids: assignee_ids)
.execute
end
end
diff --git a/app/workers/user_status_cleanup/batch_worker.rb b/app/workers/user_status_cleanup/batch_worker.rb
index f3d73b2e6e9..95a8bf6fe90 100644
--- a/app/workers/user_status_cleanup/batch_worker.rb
+++ b/app/workers/user_status_cleanup/batch_worker.rb
@@ -11,7 +11,7 @@ module UserStatusCleanup
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category :users
+ feature_category :user_profile
idempotent!
diff --git a/app/workers/users/create_statistics_worker.rb b/app/workers/users/create_statistics_worker.rb
index 0c27d165ded..e258fb92907 100644
--- a/app/workers/users/create_statistics_worker.rb
+++ b/app/workers/users/create_statistics_worker.rb
@@ -11,7 +11,7 @@ module Users
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
- feature_category :users
+ feature_category :user_profile
def perform
UsersStatistics.create_current_stats!
diff --git a/app/workers/users/migrate_records_to_ghost_user_in_batches_worker.rb b/app/workers/users/migrate_records_to_ghost_user_in_batches_worker.rb
index d9a80b6e899..cfb463241d3 100644
--- a/app/workers/users/migrate_records_to_ghost_user_in_batches_worker.rb
+++ b/app/workers/users/migrate_records_to_ghost_user_in_batches_worker.rb
@@ -7,7 +7,7 @@ module Users
include CronjobQueue # rubocop: disable Scalability/CronWorkerContext
sidekiq_options retry: false
- feature_category :users
+ feature_category :user_profile
data_consistency :always
idempotent!