fiber_job 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4b63b1dfdccf756e958f7cce41e450ad36ac82333a4d4a4231fe80e71cb86483
4
- data.tar.gz: '014148d099b5aa2a52d76d0cc2fdd3b26c427c3e978769bd6bdaaa0f9dbd34c9'
3
+ metadata.gz: 2a6d673a0a6c62ea5c97e63e64b19d29934fc363ea2ebbf6e181acbc919e90a7
4
+ data.tar.gz: 77aece8ed8222b313efb850b921d129f3cb515b935f2d61afcb0035118057d40
5
5
  SHA512:
6
- metadata.gz: f0ead6722ddbc4c8ee6d8247f2d68d5ebd06307c1628873c66df7dbffb095f27cd21f2e26bbde00c12949a4ff5f2447cde609f635a13eff5ea225a3c70155c84
7
- data.tar.gz: bcc920d7436747ca15366a9277ebdd76a3e2d29736a4f6529a0f078c6c74165cdd2fae1996a7621f7bdc846fa3cbb8fecc23e30fa5a17fec9cb4a4d448e13427
6
+ metadata.gz: e52875a8ca6776d98ce3c54be8d530613322c8b8f8c2a2a8d3d78ec3347cc7aba06ef45bf25de335931d8f1197059ccf3f0fc2d11300a0330d364a749f8a0f51
7
+ data.tar.gz: 5beb000f04496a73cd33f9ccffde20462f2f74aff2a58ebc3f180bbb526e70635cdc91835335c74ba1f68639ef212d96527b7415a4d05bae4f0510e57e44edf0
@@ -31,7 +31,9 @@ module FiberJob
31
31
  #
32
32
  # @raise [ArgumentError] If job_class is not a valid job class
33
33
  def self.enqueue(job_class, *args)
34
+ jid = JID.generate
34
35
  payload = {
36
+ 'jid' => jid,
35
37
  'class' => job_class.name,
36
38
  'args' => args,
37
39
  'enqueued_at' => Time.now.to_f
@@ -39,6 +41,8 @@ module FiberJob
39
41
 
40
42
  queue_name = job_class.queue
41
43
  Queue.push(queue_name, payload)
44
+
45
+ jid
42
46
  end
43
47
 
44
48
  # Enqueues a job for execution after a specified delay.
@@ -56,8 +60,10 @@ module FiberJob
56
60
  #
57
61
  # @raise [ArgumentError] If delay_seconds is negative or job_class is invalid
58
62
  def self.enqueue_in(delay_seconds, job_class, *args)
63
+ jid = JID.generate
59
64
  scheduled_at = Time.now.to_f + delay_seconds
60
65
  payload = {
66
+ 'jid' => jid,
61
67
  'class' => job_class.name,
62
68
  'args' => args,
63
69
  'enqueued_at' => Time.now.to_f
@@ -66,7 +72,9 @@ module FiberJob
66
72
  queue_name = job_class.queue
67
73
  Queue.schedule(queue_name, payload, scheduled_at)
68
74
 
69
- FiberJob.logger.info "Scheduled #{job_class.name} to run in #{delay_seconds}s"
75
+ FiberJob.logger.info "Scheduled #{job_class.name} (#{jid}) to run in #{delay_seconds}s"
76
+
77
+ jid
70
78
  end
71
79
 
72
80
  # Enqueues a job for execution at a specific time.
@@ -86,7 +94,9 @@ module FiberJob
86
94
  #
87
95
  # @raise [ArgumentError] If timestamp is in the past or job_class is invalid
88
96
  def self.enqueue_at(timestamp, job_class, *args)
97
+ jid = JID.generate
89
98
  payload = {
99
+ 'jid' => jid,
90
100
  'class' => job_class.name,
91
101
  'args' => args,
92
102
  'enqueued_at' => Time.now.to_f
@@ -95,7 +105,9 @@ module FiberJob
95
105
  queue_name = job_class.queue
96
106
  Queue.schedule(queue_name, payload, timestamp.to_f)
97
107
 
98
- FiberJob.logger.info "Scheduled #{job_class.name} to run at #{Time.at(timestamp)}"
108
+ FiberJob.logger.info "Scheduled #{job_class.name} (#{jid}) to run at #{Time.at(timestamp)}"
109
+
110
+ jid
99
111
  end
100
112
  end
101
113
  end
@@ -3,19 +3,19 @@
3
3
  module FiberJob
4
4
  class Cron
5
5
  def self.redis
6
- @redis ||= Redis.new(url: FiberJob.config.redis_url)
6
+ @redis ||= RedisClient.new(url: FiberJob.config.redis_url)
7
7
  end
8
8
 
9
9
  def self.register(cron_job_class)
10
10
  job_name = cron_job_class.name
11
11
  cron_expression = cron_job_class.cron_expression
12
12
 
13
- redis.hset('cron:jobs', job_name, JSON.dump({ 'class' => job_name,
13
+ redis.call('HSET', 'cron:jobs', job_name, JSON.dump({ 'class' => job_name,
14
14
  'cron' => cron_expression,
15
15
  'queue' => cron_job_class.new.queue,
16
16
  'registered_at' => Time.now.to_f }))
17
17
 
18
- unless redis.exists?("cron:next_run:#{job_name}")
18
+ unless redis.call('EXISTS', "cron:next_run:#{job_name}") > 0
19
19
  next_time = cron_job_class.next_run_time
20
20
  schedule_job(cron_job_class, next_time)
21
21
  end
@@ -27,35 +27,39 @@ module FiberJob
27
27
  job_name = cron_job_class.name
28
28
 
29
29
  # Set next run time
30
- redis.set("cron:next_run:#{job_name}", run_time.to_f)
30
+ redis.call('SET', "cron:next_run:#{job_name}", run_time.to_f)
31
31
 
32
32
  # Add to sorted set for efficient scanning
33
- redis.zadd('cron:schedule', run_time.to_f, job_name)
33
+ redis.call('ZADD', 'cron:schedule', run_time.to_f, job_name)
34
34
 
35
35
  FiberJob.logger.debug "Scheduled #{job_name} for #{run_time}"
36
36
  end
37
37
 
38
38
  def self.due_jobs(current_time = Time.now)
39
- job_names = redis.zrangebyscore('cron:schedule', 0, current_time.to_f)
39
+ job_names = redis.call('ZRANGEBYSCORE', 'cron:schedule', 0, current_time.to_f)
40
40
 
41
41
  job_names.map do |job_name|
42
- job_data = JSON.parse(redis.hget('cron:jobs', job_name))
42
+ job_data_raw = redis.call('HGET', 'cron:jobs', job_name)
43
+ job_data = job_data_raw ? JSON.parse(job_data_raw) : nil
43
44
  next unless job_data
44
45
 
45
- redis.zrem('cron:schedule', job_name)
46
+ redis.call('ZREM', 'cron:schedule', job_name)
46
47
 
47
48
  job_data
48
49
  end.compact
49
50
  end
50
51
 
51
52
  def self.registered_jobs
52
- jobs = redis.hgetall('cron:jobs')
53
+ jobs = redis.call('HGETALL', 'cron:jobs')
54
+ # Convert array response to hash
55
+ jobs = Hash[*jobs] if jobs.is_a?(Array)
53
56
  jobs.transform_values { |data| JSON.parse(data) }
54
57
  end
55
58
 
56
59
  def self.clear_all
57
- redis.del('cron:jobs', 'cron:schedule')
58
- redis.keys('cron:next_run:*').each { |key| redis.del(key) }
60
+ redis.call('DEL', 'cron:jobs', 'cron:schedule')
61
+ keys = redis.call('KEYS', 'cron:next_run:*')
62
+ keys.each { |key| redis.call('DEL', key) } unless keys.empty?
59
63
  end
60
64
  end
61
65
  end
@@ -0,0 +1,185 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'securerandom'
4
+
5
+ module FiberJob
6
+ # JID (Job ID) generation utilities for unique job identification.
7
+ # Provides methods to generate unique, sortable, and URL-safe job identifiers
8
+ # that can be used for tracking, logging, and job management operations.
9
+ #
10
+ # @example Generate a JID
11
+ # jid = FiberJob::JID.generate
12
+ # # => "01J4X8K2V3N9QRSTUVWXYZ1234"
13
+ #
14
+ # @example Extract timestamp from JID
15
+ # timestamp = FiberJob::JID.timestamp(jid)
16
+ # # => 2024-08-17 15:30:25 UTC
17
+ #
18
+ # @author FiberJob Team
19
+ # @since 1.1.0
20
+ class JID
21
+ # Base32 alphabet for ULID encoding (Crockford's Base32)
22
+ # Excludes ambiguous characters: 0, 1, I, L, O, U
23
+ ENCODING = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
24
+
25
+ # Length of the timestamp component in the JID (10 characters)
26
+ TIMESTAMP_LENGTH = 10
27
+
28
+ # Length of the random component in the JID (16 characters)
29
+ RANDOM_LENGTH = 16
30
+
31
+ # Total length of a JID
32
+ TOTAL_LENGTH = TIMESTAMP_LENGTH + RANDOM_LENGTH
33
+
34
+ class << self
35
+ # Generates a new unique Job ID (JID).
36
+ #
37
+ # JIDs are 26-character strings that combine:
38
+ # - 10-character timestamp component (millisecond precision)
39
+ # - 16-character random component
40
+ #
41
+ # This format ensures:
42
+ # - Lexicographical sortability by creation time
43
+ # - URL-safe characters only
44
+ # - Extremely low collision probability
45
+ # - Easy visual identification
46
+ #
47
+ # @param time [Time, nil] Optional time to use for timestamp (defaults to current time)
48
+ # @return [String] A 26-character unique job identifier
49
+ #
50
+ # @example Generate a JID
51
+ # jid = FiberJob::JID.generate
52
+ # # => "01J4X8K2V3N9QRSTUVWXYZ1234"
53
+ #
54
+ # @example Generate with specific time
55
+ # specific_time = Time.parse("2024-01-01 00:00:00 UTC")
56
+ # jid = FiberJob::JID.generate(specific_time)
57
+ # # => "01HKPNQ2V3ABCDEFGHIJKLMNOP"
58
+ def generate(time = nil)
59
+ time ||= Time.now
60
+ timestamp_ms = (time.to_f * 1000).to_i
61
+
62
+ # Encode timestamp as 10-character base32 string
63
+ timestamp_part = encode_timestamp(timestamp_ms)
64
+
65
+ # Generate 16-character random component
66
+ random_part = encode_random(16)
67
+
68
+ "#{timestamp_part}#{random_part}"
69
+ end
70
+
71
+ # Extracts the timestamp from a JID.
72
+ #
73
+ # @param jid [String] The job ID to extract timestamp from
74
+ # @return [Time] The timestamp when the JID was generated
75
+ # @raise [ArgumentError] If JID format is invalid
76
+ #
77
+ # @example Extract timestamp
78
+ # jid = "01J4X8K2V3N9QRSTUVWXYZ1234"
79
+ # timestamp = FiberJob::JID.timestamp(jid)
80
+ # # => 2024-08-17 15:30:25 UTC
81
+ def timestamp(jid)
82
+ validate_format!(jid)
83
+
84
+ timestamp_part = jid[0, TIMESTAMP_LENGTH]
85
+ timestamp_ms = decode_timestamp(timestamp_part)
86
+
87
+ Time.at(timestamp_ms / 1000.0)
88
+ end
89
+
90
+ # Validates JID format.
91
+ #
92
+ # @param jid [String] The job ID to validate
93
+ # @return [Boolean] True if valid, false otherwise
94
+ #
95
+ # @example Validate JID
96
+ # FiberJob::JID.valid?("01J4X8K2V3N9QRSTUVWXYZ1234") # => true
97
+ # FiberJob::JID.valid?("invalid") # => false
98
+ def valid?(jid)
99
+ return false unless jid.is_a?(String)
100
+ return false unless jid.length == TOTAL_LENGTH
101
+ return false unless jid.match?(/\A[#{ENCODING}]+\z/)
102
+
103
+ # Additional validation: timestamp should be reasonable
104
+ begin
105
+ ts = timestamp(jid)
106
+ # JID timestamp should be within reasonable bounds
107
+ ts > Time.new(2020, 1, 1) && ts < Time.now + 86400 # Not more than 1 day in future
108
+ rescue ArgumentError
109
+ false
110
+ end
111
+ end
112
+
113
+ # Generates a batch of unique JIDs efficiently.
114
+ # Ensures no duplicates within the batch by incrementing timestamp
115
+ # microseconds when necessary.
116
+ #
117
+ # @param count [Integer] Number of JIDs to generate
118
+ # @param time [Time, nil] Base time for generation (defaults to current time)
119
+ # @return [Array<String>] Array of unique JIDs
120
+ #
121
+ # @example Generate batch of JIDs
122
+ # jids = FiberJob::JID.generate_batch(5)
123
+ # # => ["01J4X8K2V3...", "01J4X8K2V4...", ...]
124
+ def generate_batch(count, time = nil)
125
+ base_time = time || Time.now
126
+ jids = []
127
+
128
+ count.times do |i|
129
+ # Ensure uniqueness by adding microseconds offset
130
+ jid_time = Time.at(base_time.to_f + (i * 0.001))
131
+ jids << generate(jid_time)
132
+ end
133
+
134
+ jids
135
+ end
136
+
137
+ private
138
+
139
+ # Encodes a timestamp (in milliseconds) to a base32 string
140
+ def encode_timestamp(timestamp_ms)
141
+ encode_number(timestamp_ms, TIMESTAMP_LENGTH)
142
+ end
143
+
144
+ # Encodes a number to base32 with fixed length
145
+ def encode_number(number, length)
146
+ result = ''
147
+
148
+ length.times do
149
+ result = ENCODING[number % 32] + result
150
+ number /= 32
151
+ end
152
+
153
+ result
154
+ end
155
+
156
+ # Generates random base32 string of specified length
157
+ def encode_random(length)
158
+ result = ''
159
+
160
+ length.times do
161
+ result += ENCODING[SecureRandom.random_number(32)]
162
+ end
163
+
164
+ result
165
+ end
166
+
167
+ # Decodes timestamp from base32 string
168
+ def decode_timestamp(timestamp_str)
169
+ result = 0
170
+
171
+ timestamp_str.each_char do |char|
172
+ result = result * 32 + ENCODING.index(char)
173
+ end
174
+
175
+ result
176
+ end
177
+
178
+ # Validates JID format and raises error if invalid
179
+ def validate_format!(jid)
180
+ raise ArgumentError, "JID must be #{TOTAL_LENGTH} characters" unless jid.length == TOTAL_LENGTH
181
+ raise ArgumentError, "JID contains invalid characters" unless jid.match?(/\A[#{ENCODING}]+\z/)
182
+ end
183
+ end
184
+ end
185
+ end
data/lib/fiber_job/job.rb CHANGED
@@ -43,9 +43,11 @@ module FiberJob
43
43
  # @return [Integer] Job priority (higher numbers = higher priority)
44
44
  # @!attribute [rw] timeout
45
45
  # @return [Integer] Maximum execution time in seconds before timeout
46
+ # @!attribute [rw] jid
47
+ # @return [String, nil] Unique job identifier
46
48
  # @!attribute [r] config
47
49
  # @return [FiberJob::Config] Configuration object for this job instance
48
- attr_accessor :queue, :retry_count, :max_retries, :priority, :timeout
50
+ attr_accessor :queue, :retry_count, :max_retries, :priority, :timeout, :jid
49
51
  attr_reader :config
50
52
 
51
53
  # Initializes a new job instance with default configuration.
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'redis'
3
+ require 'redis_client'
4
4
  require 'connection_pool'
5
5
 
6
6
  module FiberJob
@@ -43,7 +43,7 @@ module FiberJob
43
43
  # @return [ConnectionPool] The Redis connection pool
44
44
  def self.redis_pool
45
45
  @redis_pool ||= ConnectionPool.new(size: config.pool_size, timeout: 5) do
46
- Redis.new(url: config.redis_url)
46
+ RedisClient.new(url: config.redis_url)
47
47
  end
48
48
  end
49
49
 
@@ -51,19 +51,19 @@ module FiberJob
51
51
  # @deprecated Use redis_pool.with { |redis| ... } instead for better performance
52
52
  # Creates a new connection if one doesn't exist.
53
53
  #
54
- # @return [Redis] The shared Redis connection
54
+ # @return [RedisClient] The shared Redis connection
55
55
  def self.redis
56
- @redis ||= Redis.new(url: config.redis_url)
56
+ @redis ||= RedisClient.new(url: config.redis_url)
57
57
  end
58
58
 
59
59
  # Creates a new Redis connection for fiber-safe operations.
60
60
  # @deprecated Use redis_pool.with { |redis| ... } instead for better performance
61
61
  # Used when concurrent operations need separate connections.
62
62
  #
63
- # @return [Redis] A new Redis connection instance
63
+ # @return [RedisClient] A new Redis connection instance
64
64
  def self.redis_connection
65
65
  # Create a new Redis connection for fiber-safe operations
66
- Redis.new(url: config.redis_url)
66
+ RedisClient.new(url: config.redis_url)
67
67
  end
68
68
 
69
69
  # Adds a job to the specified queue for immediate processing.
@@ -78,7 +78,7 @@ module FiberJob
78
78
  # payload = { 'class' => 'EmailJob', 'args' => [123, 'message'] }
79
79
  # FiberJob::Queue.push(:default, payload)
80
80
  def self.push(queue_name, payload)
81
- redis_pool.with { |redis| redis.lpush("queue:#{queue_name}", JSON.dump(payload)) }
81
+ redis_pool.with { |redis| redis.call("LPUSH", "queue:#{queue_name}", JSON.dump(payload)) }
82
82
  end
83
83
 
84
84
  # Adds a job to the head of the queue for priority processing.
@@ -92,7 +92,7 @@ module FiberJob
92
92
  # FiberJob::Queue.push_priority(:default, urgent_job_data)
93
93
  def self.push_priority(queue_name, payload)
94
94
  # Add to the head of the queue for priority execution
95
- redis_pool.with { |redis| redis.rpush("queue:#{queue_name}", JSON.dump(payload)) }
95
+ redis_pool.with { |redis| redis.call("RPUSH", "queue:#{queue_name}", JSON.dump(payload)) }
96
96
  end
97
97
 
98
98
  # Removes and returns a job from the specified queue.
@@ -101,7 +101,7 @@ module FiberJob
101
101
  #
102
102
  # @param queue_name [String, Symbol] Name of the source queue
103
103
  # @param timeout [Float] Maximum time to wait for a job (default: 0.1)
104
- # @param redis_conn [Redis, nil] Optional Redis connection to use (bypasses pool)
104
+ # @param redis_conn [RedisClient, nil] Optional Redis connection to use (bypasses pool)
105
105
  # @return [Hash, nil] Job data hash or nil if timeout reached
106
106
  #
107
107
  # @example Pop from queue with timeout
@@ -112,10 +112,10 @@ module FiberJob
112
112
  def self.pop(queue_name, timeout: 0.1, redis_conn: nil)
113
113
  data = if redis_conn
114
114
  # Use provided connection (for legacy compatibility)
115
- redis_conn.brpop("queue:#{queue_name}", timeout: timeout)
115
+ redis_conn.call("BRPOP", "queue:#{queue_name}", timeout)
116
116
  else
117
117
  # Use connection pool for better performance
118
- redis_pool.with { |redis| redis.brpop("queue:#{queue_name}", timeout: timeout) }
118
+ redis_pool.with { |redis| redis.call("BRPOP", "queue:#{queue_name}", timeout) }
119
119
  end
120
120
  data ? JSON.parse(data[1]) : nil
121
121
  end
@@ -133,7 +133,7 @@ module FiberJob
133
133
  # future_time = Time.now.to_f + 3600 # 1 hour from now
134
134
  # FiberJob::Queue.schedule(:default, job_data, future_time)
135
135
  def self.schedule(queue_name, payload, scheduled_at)
136
- redis_pool.with { |redis| redis.zadd("schedule:#{queue_name}", scheduled_at, JSON.dump(payload)) }
136
+ redis_pool.with { |redis| redis.call("ZADD", "schedule:#{queue_name}", scheduled_at, JSON.dump(payload)) }
137
137
  end
138
138
 
139
139
  # Schedules a job with priority for execution at a specific time.
@@ -146,7 +146,7 @@ module FiberJob
146
146
  def self.schedule_priority(queue_name, payload, scheduled_at)
147
147
  # Mark as priority retry for head-of-queue execution
148
148
  priority_payload = payload.merge('priority_retry' => true)
149
- redis_pool.with { |redis| redis.zadd("schedule:#{queue_name}", scheduled_at, JSON.dump(priority_payload)) }
149
+ redis_pool.with { |redis| redis.call("ZADD", "schedule:#{queue_name}", scheduled_at, JSON.dump(priority_payload)) }
150
150
  end
151
151
 
152
152
  # Processes scheduled jobs that are ready for execution.
@@ -162,22 +162,22 @@ module FiberJob
162
162
  def self.scheduled_jobs(queue_name)
163
163
  redis_pool.with do |redis|
164
164
  now = Time.now.to_f
165
- jobs = redis.zrangebyscore("schedule:#{queue_name}", 0, now)
165
+ jobs = redis.call("ZRANGEBYSCORE", "schedule:#{queue_name}", 0, now)
166
166
 
167
167
  return 0 if jobs.empty?
168
168
 
169
169
  # Use pipeline for better performance with multiple operations
170
170
  redis.pipelined do |pipeline|
171
171
  jobs.each do |job_json|
172
- pipeline.zrem("schedule:#{queue_name}", job_json)
172
+ pipeline.call("ZREM", "schedule:#{queue_name}", job_json)
173
173
  job_data = JSON.parse(job_json)
174
174
 
175
175
  # Use priority queue for retries
176
176
  if job_data['priority_retry']
177
177
  job_data.delete('priority_retry') # Clean up the flag
178
- pipeline.rpush("queue:#{queue_name}", JSON.dump(job_data))
178
+ pipeline.call("RPUSH", "queue:#{queue_name}", JSON.dump(job_data))
179
179
  else
180
- pipeline.lpush("queue:#{queue_name}", JSON.dump(job_data))
180
+ pipeline.call("LPUSH", "queue:#{queue_name}", JSON.dump(job_data))
181
181
  end
182
182
  end
183
183
  end
@@ -199,9 +199,9 @@ module FiberJob
199
199
  def self.stats(queue_name)
200
200
  redis_pool.with do |redis|
201
201
  results = redis.pipelined do |pipeline|
202
- pipeline.llen("queue:#{queue_name}")
203
- pipeline.zcard("schedule:#{queue_name}")
204
- pipeline.get("processing:#{queue_name}")
202
+ pipeline.call("LLEN", "queue:#{queue_name}")
203
+ pipeline.call("ZCARD", "schedule:#{queue_name}")
204
+ pipeline.call("GET", "processing:#{queue_name}")
205
205
  end
206
206
 
207
207
  {
@@ -231,7 +231,7 @@ module FiberJob
231
231
  'error' => error.message,
232
232
  'backtrace' => error.backtrace&.first(10)
233
233
  })
234
- redis_pool.with { |redis| redis.lpush('failed', JSON.dump(failed_job_data)) }
234
+ redis_pool.with { |redis| redis.call("LPUSH", 'failed', JSON.dump(failed_job_data)) }
235
235
  end
236
236
 
237
237
  # Retrieves all failed jobs for inspection and debugging.
@@ -243,8 +243,165 @@ module FiberJob
243
243
  # failed.each { |job| puts "Failed: #{job['class']} - #{job['error']}" }
244
244
  def self.failed_jobs
245
245
  redis_pool.with do |redis|
246
- redis.lrange('failed', 0, -1).map { |job_json| JSON.parse(job_json) }
246
+ redis.call("LRANGE", 'failed', 0, -1).map { |job_json| JSON.parse(job_json) }
247
247
  end
248
248
  end
249
+
250
+ # Finds a job by its JID across all queues and scheduled jobs.
251
+ # Searches through active queues, scheduled jobs, and failed jobs.
252
+ #
253
+ # @param jid [String] The job ID to search for
254
+ # @return [Hash, nil] Job data if found, nil otherwise
255
+ #
256
+ # @example Find job by JID
257
+ # job = FiberJob::Queue.find_job("01J4X8K2V3N9QRSTUVWXYZ1234")
258
+ # if job
259
+ # puts "Found job: #{job['class']} in #{job['status']}"
260
+ # end
261
+ def self.find_job(jid)
262
+ return nil unless jid
263
+
264
+ redis_pool.with do |redis|
265
+ # Search in all known queues
266
+ config.queues.each do |queue_name|
267
+ # Check active queue
268
+ queue_jobs = redis.call("LRANGE", "queue:#{queue_name}", 0, -1)
269
+ queue_jobs.each do |job_json|
270
+ job_data = JSON.parse(job_json)
271
+ if job_data['jid'] == jid
272
+ return job_data.merge('status' => 'queued', 'queue' => queue_name)
273
+ end
274
+ end
275
+
276
+ # Check scheduled jobs
277
+ scheduled_jobs = redis.call("ZRANGE", "schedule:#{queue_name}", 0, -1)
278
+ scheduled_jobs.each do |job_json|
279
+ job_data = JSON.parse(job_json)
280
+ if job_data['jid'] == jid
281
+ return job_data.merge('status' => 'scheduled', 'queue' => queue_name)
282
+ end
283
+ end
284
+ end
285
+
286
+ # Check failed jobs
287
+ failed_jobs = redis.call("LRANGE", 'failed', 0, -1)
288
+ failed_jobs.each do |job_json|
289
+ job_data = JSON.parse(job_json)
290
+ if job_data['jid'] == jid
291
+ return job_data.merge('status' => 'failed')
292
+ end
293
+ end
294
+ end
295
+
296
+ nil
297
+ end
298
+
299
+ # Cancels a job by removing it from queues.
300
+ # Can cancel jobs that are queued or scheduled, but not currently processing.
301
+ #
302
+ # @param jid [String] The job ID to cancel
303
+ # @return [Boolean] True if job was found and cancelled, false otherwise
304
+ #
305
+ # @example Cancel a job
306
+ # success = FiberJob::Queue.cancel_job("01J4X8K2V3N9QRSTUVWXYZ1234")
307
+ # puts success ? "Job cancelled" : "Job not found or already processing"
308
+ def self.cancel_job(jid)
309
+ return false unless jid
310
+
311
+ redis_pool.with do |redis|
312
+ # Search and remove from all known queues
313
+ config.queues.each do |queue_name|
314
+ # Check active queue
315
+ queue_jobs = redis.call("LRANGE", "queue:#{queue_name}", 0, -1)
316
+ queue_jobs.each_with_index do |job_json, index|
317
+ job_data = JSON.parse(job_json)
318
+ if job_data['jid'] == jid
319
+ # Remove from queue by index (note: LREM removes by value)
320
+ redis.call("LREM", "queue:#{queue_name}", 1, job_json)
321
+ return true
322
+ end
323
+ end
324
+
325
+ # Check scheduled jobs
326
+ scheduled_jobs = redis.call("ZRANGE", "schedule:#{queue_name}", 0, -1)
327
+ scheduled_jobs.each do |job_json|
328
+ job_data = JSON.parse(job_json)
329
+ if job_data['jid'] == jid
330
+ redis.call("ZREM", "schedule:#{queue_name}", job_json)
331
+ return true
332
+ end
333
+ end
334
+ end
335
+ end
336
+
337
+ false
338
+ end
339
+
340
+ # Gets the status of a job by its JID.
341
+ # Possible statuses: 'queued', 'scheduled', 'failed', 'not_found'
342
+ #
343
+ # @param jid [String] The job ID to check
344
+ # @return [String] Job status
345
+ #
346
+ # @example Check job status
347
+ # status = FiberJob::Queue.job_status("01J4X8K2V3N9QRSTUVWXYZ1234")
348
+ # case status
349
+ # when 'queued'
350
+ # puts "Job is waiting to be processed"
351
+ # when 'scheduled'
352
+ # puts "Job is scheduled for future execution"
353
+ # when 'failed'
354
+ # puts "Job failed permanently"
355
+ # when 'not_found'
356
+ # puts "Job not found (may have completed successfully)"
357
+ # end
358
+ def self.job_status(jid)
359
+ job = find_job(jid)
360
+ job ? job['status'] : 'not_found'
361
+ end
362
+
363
+ # Lists all jobs with their JIDs and statuses.
364
+ # Useful for debugging and monitoring.
365
+ #
366
+ # @param limit [Integer] Maximum number of jobs to return per status (default: 100)
367
+ # @return [Hash] Hash with status as keys and arrays of job data as values
368
+ #
369
+ # @example List all jobs
370
+ # jobs = FiberJob::Queue.list_jobs(limit: 50)
371
+ # jobs['queued'].each { |job| puts "Queued: #{job['class']} [#{job['jid']}]" }
372
+ def self.list_jobs(limit: 100)
373
+ result = {
374
+ 'queued' => [],
375
+ 'scheduled' => [],
376
+ 'failed' => []
377
+ }
378
+
379
+ redis_pool.with do |redis|
380
+ # Get queued jobs
381
+ config.queues.each do |queue_name|
382
+ queue_jobs = redis.call("LRANGE", "queue:#{queue_name}", 0, limit - 1)
383
+ queue_jobs.each do |job_json|
384
+ job_data = JSON.parse(job_json)
385
+ result['queued'] << job_data.merge('queue' => queue_name)
386
+ end
387
+
388
+ # Get scheduled jobs
389
+ scheduled_jobs = redis.call("ZRANGE", "schedule:#{queue_name}", 0, limit - 1)
390
+ scheduled_jobs.each do |job_json|
391
+ job_data = JSON.parse(job_json)
392
+ result['scheduled'] << job_data.merge('queue' => queue_name)
393
+ end
394
+ end
395
+
396
+ # Get failed jobs
397
+ failed_jobs = redis.call("LRANGE", 'failed', 0, limit - 1)
398
+ failed_jobs.each do |job_json|
399
+ job_data = JSON.parse(job_json)
400
+ result['failed'] << job_data
401
+ end
402
+ end
403
+
404
+ result
405
+ end
249
406
  end
250
407
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module FiberJob
4
- VERSION = '0.2.4'
4
+ VERSION = '0.2.6'
5
5
  end
@@ -116,6 +116,10 @@ module FiberJob
116
116
  job = job_class.new
117
117
 
118
118
  job.retry_count = job_data['retry_count'] || 0
119
+ job.jid = job_data['jid']
120
+
121
+ jid_info = job.jid ? " [#{job.jid}]" : ""
122
+ FiberJob.logger.info "Starting #{job_class.name}#{jid_info}"
119
123
 
120
124
  begin
121
125
  Timeout.timeout(job.timeout) do
@@ -123,7 +127,10 @@ module FiberJob
123
127
  args << job_data['enqueued_at'] if job_data['enqueued_at']
124
128
  job.perform(*args)
125
129
  end
130
+
131
+ FiberJob.logger.info "Completed #{job_class.name}#{jid_info}"
126
132
  rescue => e
133
+ FiberJob.logger.error "Failed #{job_class.name}#{jid_info}: #{e.message}"
127
134
  handle_failure(job, job_data, e)
128
135
  end
129
136
  end
@@ -146,6 +153,8 @@ module FiberJob
146
153
  end
147
154
 
148
155
  def handle_failure(job, job_data, error)
156
+ jid_info = job.jid ? " [#{job.jid}]" : ""
157
+
149
158
  if job.retry_count < job.max_retries
150
159
  job.retry_count += 1
151
160
  delay = job.retry_delay(job.retry_count)
@@ -153,7 +162,7 @@ module FiberJob
153
162
  retry_job_data = job_data.dup
154
163
  retry_job_data['retry_count'] = job.retry_count
155
164
 
156
- message = "#{job.class} failed: #{error.message}. "
165
+ message = "#{job.class}#{jid_info} failed: #{error.message}. "
157
166
  message += "Retrying in #{delay.round(1)}s (attempt #{job.retry_count}/#{job.max_retries})"
158
167
  FiberJob.logger.warn message
159
168
 
@@ -163,7 +172,7 @@ module FiberJob
163
172
  Queue.schedule(job.queue, retry_job_data, Time.now.to_f + delay)
164
173
  end
165
174
  else
166
- FiberJob.logger.error "#{job.class} permanently failed after #{job.max_retries} retries: #{error.message}"
175
+ FiberJob.logger.error "#{job.class}#{jid_info} permanently failed after #{job.max_retries} retries: #{error.message}"
167
176
  Queue.store_failed_job(job_data, error)
168
177
  end
169
178
  end
data/lib/fiber_job.rb CHANGED
@@ -3,6 +3,7 @@
3
3
  require_relative 'fiber_job/version'
4
4
  require_relative 'fiber_job/config'
5
5
  require_relative 'fiber_job/logger'
6
+ require_relative 'fiber_job/jid'
6
7
  require_relative 'fiber_job/job'
7
8
  require_relative 'fiber_job/queue'
8
9
  require_relative 'fiber_job/worker'
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fiber_job
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.4
4
+ version: 0.2.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Caio Mendonca
@@ -14,6 +14,9 @@ dependencies:
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - "~>"
17
+ - !ruby/object:Gem::Version
18
+ version: '2.26'
19
+ - - ">="
17
20
  - !ruby/object:Gem::Version
18
21
  version: 2.26.0
19
22
  type: :runtime
@@ -21,36 +24,71 @@ dependencies:
21
24
  version_requirements: !ruby/object:Gem::Requirement
22
25
  requirements:
23
26
  - - "~>"
27
+ - !ruby/object:Gem::Version
28
+ version: '2.26'
29
+ - - ">="
24
30
  - !ruby/object:Gem::Version
25
31
  version: 2.26.0
26
32
  - !ruby/object:Gem::Dependency
27
- name: redis
33
+ name: connection_pool
28
34
  requirement: !ruby/object:Gem::Requirement
29
35
  requirements:
30
36
  - - "~>"
31
37
  - !ruby/object:Gem::Version
32
- version: 5.4.1
38
+ version: '2.5'
39
+ - - ">="
40
+ - !ruby/object:Gem::Version
41
+ version: 2.5.0
33
42
  type: :runtime
34
43
  prerelease: false
35
44
  version_requirements: !ruby/object:Gem::Requirement
36
45
  requirements:
37
46
  - - "~>"
38
47
  - !ruby/object:Gem::Version
39
- version: 5.4.1
48
+ version: '2.5'
49
+ - - ">="
50
+ - !ruby/object:Gem::Version
51
+ version: 2.5.0
40
52
  - !ruby/object:Gem::Dependency
41
- name: connection_pool
53
+ name: redis-client
42
54
  requirement: !ruby/object:Gem::Requirement
43
55
  requirements:
44
56
  - - "~>"
45
57
  - !ruby/object:Gem::Version
46
- version: 2.5.3
58
+ version: '0.24'
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: 0.24.0
47
62
  type: :runtime
48
63
  prerelease: false
49
64
  version_requirements: !ruby/object:Gem::Requirement
50
65
  requirements:
51
66
  - - "~>"
52
67
  - !ruby/object:Gem::Version
53
- version: 2.5.3
68
+ version: '0.24'
69
+ - - ">="
70
+ - !ruby/object:Gem::Version
71
+ version: 0.24.0
72
+ - !ruby/object:Gem::Dependency
73
+ name: hiredis-client
74
+ requirement: !ruby/object:Gem::Requirement
75
+ requirements:
76
+ - - "~>"
77
+ - !ruby/object:Gem::Version
78
+ version: '0.24'
79
+ - - ">="
80
+ - !ruby/object:Gem::Version
81
+ version: 0.24.0
82
+ type: :runtime
83
+ prerelease: false
84
+ version_requirements: !ruby/object:Gem::Requirement
85
+ requirements:
86
+ - - "~>"
87
+ - !ruby/object:Gem::Version
88
+ version: '0.24'
89
+ - - ">="
90
+ - !ruby/object:Gem::Version
91
+ version: 0.24.0
54
92
  executables:
55
93
  - fiber_job
56
94
  extensions: []
@@ -66,6 +104,7 @@ files:
66
104
  - lib/fiber_job/cron.rb
67
105
  - lib/fiber_job/cron_job.rb
68
106
  - lib/fiber_job/cron_parser.rb
107
+ - lib/fiber_job/jid.rb
69
108
  - lib/fiber_job/job.rb
70
109
  - lib/fiber_job/logger.rb
71
110
  - lib/fiber_job/process_manager.rb
@@ -92,7 +131,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
92
131
  - !ruby/object:Gem::Version
93
132
  version: '0'
94
133
  requirements: []
95
- rubygems_version: 3.6.7
134
+ rubygems_version: 3.7.1
96
135
  specification_version: 4
97
136
  summary: Experimental High-performance, Redis-based background job processing library
98
137
  for Ruby built on fiber-based concurrency