/
fetch.rb
80 lines (69 loc) · 2.11 KB
/
fetch.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# frozen_string_literal: true
require "sidekiq"
module Sidekiq
class BasicFetch
# We want the fetch operation to timeout every few seconds so the thread
# can check if the process is shutting down.
TIMEOUT = 2
UnitOfWork = Struct.new(:queue, :job) {
def acknowledge
# nothing to do
end
def queue_name
queue.delete_prefix("queue:")
end
def requeue
Sidekiq.redis do |conn|
conn.rpush(queue, job)
end
end
}
def initialize(options)
raise ArgumentError, "missing queue list" unless options[:queues]
@options = options
@strictly_ordered_queues = !!@options[:strict]
@queues = @options[:queues].map { |q| "queue:#{q}" }
if @strictly_ordered_queues
@queues.uniq!
@queues << TIMEOUT
end
end
def retrieve_work
work = Sidekiq.redis { |conn| conn.brpop(*queues_cmd) }
UnitOfWork.new(*work) if work
end
def bulk_requeue(inprogress, options)
return if inprogress.empty?
Sidekiq.logger.debug { "Re-queueing terminated jobs" }
jobs_to_requeue = {}
inprogress.each do |unit_of_work|
jobs_to_requeue[unit_of_work.queue] ||= []
jobs_to_requeue[unit_of_work.queue] << unit_of_work.job
end
Sidekiq.redis do |conn|
conn.pipelined do
jobs_to_requeue.each do |queue, jobs|
conn.rpush(queue, jobs)
end
end
end
Sidekiq.logger.info("Pushed #{inprogress.size} jobs back to Redis")
rescue => ex
Sidekiq.logger.warn("Failed to requeue #{inprogress.size} jobs: #{ex.message}")
end
# Creating the Redis#brpop command takes into account any
# configured queue weights. By default Redis#brpop returns
# data from the first queue that has pending elements. We
# recreate the queue command each time we invoke Redis#brpop
# to honor weights and avoid queue starvation.
def queues_cmd
if @strictly_ordered_queues
@queues
else
queues = @queues.shuffle!.uniq
queues << TIMEOUT
queues
end
end
end
end