blob: 40d26e14dc1f64fd0a50ae6f293602ba05269bc5 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
|
# frozen_string_literal: true
module BulkImports
class FinishBatchedPipelineWorker
include ApplicationWorker
include ExceptionBacktrace
REQUEUE_DELAY = 5.seconds
idempotent!
deduplicate :until_executing
data_consistency :always # rubocop:disable SidekiqLoadBalancing/WorkerDataConsistency
feature_category :importers
version 2
def perform(pipeline_tracker_id)
@tracker = Tracker.find(pipeline_tracker_id)
@context = ::BulkImports::Pipeline::Context.new(tracker)
return unless tracker.batched?
return unless tracker.started?
return re_enqueue if import_in_progress?
if tracker.stale?
logger.error(log_attributes(message: 'Tracker stale. Failing batches and tracker'))
tracker.batches.map(&:fail_op!)
tracker.fail_op!
else
tracker.pipeline_class.new(@context).on_finish
logger.info(log_attributes(message: 'Tracker finished'))
tracker.finish!
end
end
private
attr_reader :tracker
def re_enqueue
self.class.perform_in(REQUEUE_DELAY, tracker.id)
end
def import_in_progress?
tracker.batches.any? { |b| b.started? || b.created? }
end
def logger
@logger ||= Logger.build
end
def log_attributes(extra = {})
structured_payload(
{
tracker_id: tracker.id,
bulk_import_id: tracker.entity.id,
bulk_import_entity_id: tracker.entity.bulk_import_id,
pipeline_class: tracker.pipeline_name
}.merge(extra)
)
end
end
end
|