tina4ruby 3.11.13 → 3.11.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +80 -80
- data/LICENSE.txt +21 -21
- data/README.md +137 -137
- data/exe/tina4ruby +5 -5
- data/lib/tina4/ai.rb +696 -696
- data/lib/tina4/api.rb +189 -189
- data/lib/tina4/auth.rb +305 -305
- data/lib/tina4/auto_crud.rb +244 -244
- data/lib/tina4/cache.rb +154 -154
- data/lib/tina4/cli.rb +1449 -1449
- data/lib/tina4/constants.rb +46 -46
- data/lib/tina4/container.rb +74 -74
- data/lib/tina4/cors.rb +74 -74
- data/lib/tina4/crud.rb +692 -692
- data/lib/tina4/database/sqlite3_adapter.rb +165 -165
- data/lib/tina4/database.rb +625 -625
- data/lib/tina4/database_result.rb +208 -208
- data/lib/tina4/debug.rb +8 -8
- data/lib/tina4/dev.rb +14 -14
- data/lib/tina4/dev_admin.rb +935 -935
- data/lib/tina4/dev_mailbox.rb +191 -191
- data/lib/tina4/drivers/firebird_driver.rb +124 -110
- data/lib/tina4/drivers/mongodb_driver.rb +561 -561
- data/lib/tina4/drivers/mssql_driver.rb +112 -112
- data/lib/tina4/drivers/mysql_driver.rb +90 -90
- data/lib/tina4/drivers/odbc_driver.rb +191 -191
- data/lib/tina4/drivers/postgres_driver.rb +116 -106
- data/lib/tina4/drivers/sqlite_driver.rb +122 -122
- data/lib/tina4/env.rb +95 -95
- data/lib/tina4/error_overlay.rb +252 -252
- data/lib/tina4/events.rb +109 -109
- data/lib/tina4/field_types.rb +154 -154
- data/lib/tina4/frond.rb +2025 -2025
- data/lib/tina4/gallery/auth/meta.json +1 -1
- data/lib/tina4/gallery/auth/src/routes/api/gallery_auth.rb +114 -114
- data/lib/tina4/gallery/database/meta.json +1 -1
- data/lib/tina4/gallery/database/src/routes/api/gallery_db.rb +43 -43
- data/lib/tina4/gallery/error-overlay/meta.json +1 -1
- data/lib/tina4/gallery/error-overlay/src/routes/api/gallery_crash.rb +17 -17
- data/lib/tina4/gallery/orm/meta.json +1 -1
- data/lib/tina4/gallery/orm/src/routes/api/gallery_products.rb +16 -16
- data/lib/tina4/gallery/queue/meta.json +1 -1
- data/lib/tina4/gallery/queue/src/routes/api/gallery_queue.rb +325 -325
- data/lib/tina4/gallery/rest-api/meta.json +1 -1
- data/lib/tina4/gallery/rest-api/src/routes/api/gallery_hello.rb +14 -14
- data/lib/tina4/gallery/templates/meta.json +1 -1
- data/lib/tina4/gallery/templates/src/routes/gallery_page.rb +12 -12
- data/lib/tina4/gallery/templates/src/templates/gallery_page.twig +257 -257
- data/lib/tina4/graphql.rb +966 -966
- data/lib/tina4/health.rb +39 -39
- data/lib/tina4/html_element.rb +170 -170
- data/lib/tina4/job.rb +80 -80
- data/lib/tina4/localization.rb +168 -168
- data/lib/tina4/log.rb +203 -203
- data/lib/tina4/mcp.rb +696 -696
- data/lib/tina4/messenger.rb +587 -587
- data/lib/tina4/metrics.rb +793 -793
- data/lib/tina4/middleware.rb +445 -445
- data/lib/tina4/migration.rb +451 -451
- data/lib/tina4/orm.rb +790 -790
- data/lib/tina4/public/css/tina4.css +2463 -2463
- data/lib/tina4/public/css/tina4.min.css +1 -1
- data/lib/tina4/public/images/logo.svg +5 -5
- data/lib/tina4/public/js/frond.min.js +2 -2
- data/lib/tina4/public/js/tina4-dev-admin.js +565 -565
- data/lib/tina4/public/js/tina4-dev-admin.min.js +480 -480
- data/lib/tina4/public/js/tina4.min.js +92 -92
- data/lib/tina4/public/js/tina4js.min.js +48 -48
- data/lib/tina4/public/swagger/index.html +90 -90
- data/lib/tina4/public/swagger/oauth2-redirect.html +63 -63
- data/lib/tina4/query_builder.rb +380 -380
- data/lib/tina4/queue.rb +366 -366
- data/lib/tina4/queue_backends/kafka_backend.rb +80 -80
- data/lib/tina4/queue_backends/lite_backend.rb +298 -298
- data/lib/tina4/queue_backends/mongo_backend.rb +126 -126
- data/lib/tina4/queue_backends/rabbitmq_backend.rb +73 -73
- data/lib/tina4/rack_app.rb +817 -817
- data/lib/tina4/rate_limiter.rb +130 -130
- data/lib/tina4/request.rb +268 -255
- data/lib/tina4/response.rb +346 -346
- data/lib/tina4/response_cache.rb +551 -551
- data/lib/tina4/router.rb +406 -406
- data/lib/tina4/scss/tina4css/_alerts.scss +34 -34
- data/lib/tina4/scss/tina4css/_badges.scss +22 -22
- data/lib/tina4/scss/tina4css/_buttons.scss +69 -69
- data/lib/tina4/scss/tina4css/_cards.scss +49 -49
- data/lib/tina4/scss/tina4css/_forms.scss +156 -156
- data/lib/tina4/scss/tina4css/_grid.scss +81 -81
- data/lib/tina4/scss/tina4css/_modals.scss +84 -84
- data/lib/tina4/scss/tina4css/_nav.scss +149 -149
- data/lib/tina4/scss/tina4css/_reset.scss +94 -94
- data/lib/tina4/scss/tina4css/_tables.scss +54 -54
- data/lib/tina4/scss/tina4css/_typography.scss +55 -55
- data/lib/tina4/scss/tina4css/_utilities.scss +197 -197
- data/lib/tina4/scss/tina4css/_variables.scss +117 -117
- data/lib/tina4/scss/tina4css/base.scss +1 -1
- data/lib/tina4/scss/tina4css/colors.scss +48 -48
- data/lib/tina4/scss/tina4css/tina4.scss +17 -17
- data/lib/tina4/scss_compiler.rb +178 -178
- data/lib/tina4/seeder.rb +567 -567
- data/lib/tina4/service_runner.rb +303 -303
- data/lib/tina4/session.rb +297 -297
- data/lib/tina4/session_handlers/database_handler.rb +72 -72
- data/lib/tina4/session_handlers/file_handler.rb +67 -67
- data/lib/tina4/session_handlers/mongo_handler.rb +49 -49
- data/lib/tina4/session_handlers/redis_handler.rb +43 -43
- data/lib/tina4/session_handlers/valkey_handler.rb +43 -43
- data/lib/tina4/shutdown.rb +84 -84
- data/lib/tina4/sql_translation.rb +158 -158
- data/lib/tina4/swagger.rb +124 -124
- data/lib/tina4/template.rb +894 -894
- data/lib/tina4/templates/base.twig +26 -26
- data/lib/tina4/templates/errors/302.twig +14 -14
- data/lib/tina4/templates/errors/401.twig +9 -9
- data/lib/tina4/templates/errors/403.twig +29 -29
- data/lib/tina4/templates/errors/404.twig +29 -29
- data/lib/tina4/templates/errors/500.twig +38 -38
- data/lib/tina4/templates/errors/502.twig +9 -9
- data/lib/tina4/templates/errors/503.twig +12 -12
- data/lib/tina4/templates/errors/base.twig +37 -37
- data/lib/tina4/test_client.rb +159 -159
- data/lib/tina4/testing.rb +340 -340
- data/lib/tina4/validator.rb +174 -174
- data/lib/tina4/version.rb +1 -1
- data/lib/tina4/webserver.rb +312 -312
- data/lib/tina4/websocket.rb +343 -343
- data/lib/tina4/websocket_backplane.rb +190 -190
- data/lib/tina4/wsdl.rb +564 -564
- data/lib/tina4.rb +458 -458
- data/lib/tina4ruby.rb +4 -4
- metadata +3 -3
|
@@ -1,298 +1,298 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
require "json"
|
|
3
|
-
require "fileutils"
|
|
4
|
-
require "time"
|
|
5
|
-
|
|
6
|
-
module Tina4
|
|
7
|
-
module QueueBackends
|
|
8
|
-
class LiteBackend
|
|
9
|
-
def initialize(options = {})
|
|
10
|
-
@dir = options[:dir] || File.join(Dir.pwd, ".queue")
|
|
11
|
-
@dead_letter_dir = File.join(@dir, "dead_letter")
|
|
12
|
-
FileUtils.mkdir_p(@dir)
|
|
13
|
-
FileUtils.mkdir_p(@dead_letter_dir)
|
|
14
|
-
@mutex = Mutex.new
|
|
15
|
-
end
|
|
16
|
-
|
|
17
|
-
def enqueue(message)
|
|
18
|
-
@mutex.synchronize do
|
|
19
|
-
topic_dir = topic_path(message.topic)
|
|
20
|
-
FileUtils.mkdir_p(topic_dir)
|
|
21
|
-
path = File.join(topic_dir, "#{message.id}.json")
|
|
22
|
-
File.write(path, message.to_json)
|
|
23
|
-
end
|
|
24
|
-
end
|
|
25
|
-
|
|
26
|
-
def dequeue(topic)
|
|
27
|
-
@mutex.synchronize do
|
|
28
|
-
dir = topic_path(topic)
|
|
29
|
-
return nil unless Dir.exist?(dir)
|
|
30
|
-
|
|
31
|
-
now = Time.now
|
|
32
|
-
candidates = []
|
|
33
|
-
|
|
34
|
-
Dir.glob(File.join(dir, "*.json")).each do |f|
|
|
35
|
-
data = JSON.parse(File.read(f))
|
|
36
|
-
# Skip messages that are not yet available (delayed)
|
|
37
|
-
if data["available_at"]
|
|
38
|
-
available_at = Time.parse(data["available_at"])
|
|
39
|
-
next if available_at > now
|
|
40
|
-
end
|
|
41
|
-
candidates << { file: f, data: data, priority: data["priority"] || 0, mtime: File.mtime(f) }
|
|
42
|
-
rescue JSON::ParserError
|
|
43
|
-
next
|
|
44
|
-
end
|
|
45
|
-
|
|
46
|
-
return nil if candidates.empty?
|
|
47
|
-
|
|
48
|
-
# Sort by priority descending (higher first), then by mtime ascending (oldest first)
|
|
49
|
-
candidates.sort_by! { |c| [-c[:priority], c[:mtime]] }
|
|
50
|
-
|
|
51
|
-
chosen = candidates.first
|
|
52
|
-
File.delete(chosen[:file])
|
|
53
|
-
data = chosen[:data]
|
|
54
|
-
|
|
55
|
-
Tina4::Job.new(
|
|
56
|
-
topic: data["topic"] || topic.to_s,
|
|
57
|
-
payload: data["payload"],
|
|
58
|
-
id: data["id"],
|
|
59
|
-
priority: data["priority"] || 0,
|
|
60
|
-
available_at: data["available_at"] ? Time.parse(data["available_at"]) : nil,
|
|
61
|
-
attempts: data["attempts"] || 0
|
|
62
|
-
)
|
|
63
|
-
end
|
|
64
|
-
end
|
|
65
|
-
|
|
66
|
-
def dequeue_batch(topic, count)
|
|
67
|
-
@mutex.synchronize do
|
|
68
|
-
dir = topic_path(topic)
|
|
69
|
-
return [] unless Dir.exist?(dir)
|
|
70
|
-
|
|
71
|
-
now = Time.now
|
|
72
|
-
candidates = []
|
|
73
|
-
|
|
74
|
-
Dir.glob(File.join(dir, "*.json")).each do |f|
|
|
75
|
-
data = JSON.parse(File.read(f))
|
|
76
|
-
if data["available_at"]
|
|
77
|
-
available_at = Time.parse(data["available_at"])
|
|
78
|
-
next if available_at > now
|
|
79
|
-
end
|
|
80
|
-
candidates << { file: f, data: data, priority: data["priority"] || 0, mtime: File.mtime(f) }
|
|
81
|
-
rescue JSON::ParserError
|
|
82
|
-
next
|
|
83
|
-
end
|
|
84
|
-
|
|
85
|
-
return [] if candidates.empty?
|
|
86
|
-
|
|
87
|
-
candidates.sort_by! { |c| [-c[:priority], c[:mtime]] }
|
|
88
|
-
chosen = candidates.first(count)
|
|
89
|
-
|
|
90
|
-
chosen.map do |c|
|
|
91
|
-
File.delete(c[:file])
|
|
92
|
-
data = c[:data]
|
|
93
|
-
Tina4::Job.new(
|
|
94
|
-
topic: data["topic"] || topic.to_s,
|
|
95
|
-
payload: data["payload"],
|
|
96
|
-
id: data["id"],
|
|
97
|
-
priority: data["priority"] || 0,
|
|
98
|
-
available_at: data["available_at"] ? Time.parse(data["available_at"]) : nil,
|
|
99
|
-
attempts: data["attempts"] || 0
|
|
100
|
-
)
|
|
101
|
-
end
|
|
102
|
-
end
|
|
103
|
-
end
|
|
104
|
-
|
|
105
|
-
def acknowledge(message)
|
|
106
|
-
# File already deleted on dequeue
|
|
107
|
-
end
|
|
108
|
-
|
|
109
|
-
def requeue(message)
|
|
110
|
-
enqueue(message)
|
|
111
|
-
end
|
|
112
|
-
|
|
113
|
-
def dead_letter(message)
|
|
114
|
-
path = File.join(@dead_letter_dir, "#{message.id}.json")
|
|
115
|
-
File.write(path, message.to_json)
|
|
116
|
-
end
|
|
117
|
-
|
|
118
|
-
def size(topic)
|
|
119
|
-
dir = topic_path(topic)
|
|
120
|
-
return 0 unless Dir.exist?(dir)
|
|
121
|
-
Dir.glob(File.join(dir, "*.json")).length
|
|
122
|
-
end
|
|
123
|
-
|
|
124
|
-
# Count dead-letter / failed messages for a topic.
|
|
125
|
-
def dead_letter_count(topic)
|
|
126
|
-
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
127
|
-
|
|
128
|
-
count = 0
|
|
129
|
-
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
130
|
-
data = JSON.parse(File.read(file))
|
|
131
|
-
count += 1 if data["topic"] == topic.to_s
|
|
132
|
-
rescue JSON::ParserError
|
|
133
|
-
next
|
|
134
|
-
end
|
|
135
|
-
count
|
|
136
|
-
end
|
|
137
|
-
|
|
138
|
-
def topics
|
|
139
|
-
return [] unless Dir.exist?(@dir)
|
|
140
|
-
Dir.children(@dir)
|
|
141
|
-
.reject { |d| d == "dead_letter" }
|
|
142
|
-
.select { |d| File.directory?(File.join(@dir, d)) }
|
|
143
|
-
end
|
|
144
|
-
|
|
145
|
-
# Get dead letter jobs for a topic — messages that exceeded max retries.
|
|
146
|
-
def dead_letters(topic, max_retries: 3)
|
|
147
|
-
return [] unless Dir.exist?(@dead_letter_dir)
|
|
148
|
-
|
|
149
|
-
files = Dir.glob(File.join(@dead_letter_dir, "*.json")).sort_by { |f| File.mtime(f) }
|
|
150
|
-
jobs = []
|
|
151
|
-
|
|
152
|
-
files.each do |file|
|
|
153
|
-
data = JSON.parse(File.read(file))
|
|
154
|
-
next unless data["topic"] == topic.to_s
|
|
155
|
-
data["status"] = "dead"
|
|
156
|
-
jobs << data
|
|
157
|
-
rescue JSON::ParserError
|
|
158
|
-
next
|
|
159
|
-
end
|
|
160
|
-
|
|
161
|
-
jobs
|
|
162
|
-
end
|
|
163
|
-
|
|
164
|
-
# Delete messages by status (completed, failed, dead).
|
|
165
|
-
# For 'dead', removes from the dead_letter directory.
|
|
166
|
-
# For 'failed', removes from the topic directory (re-queued failed messages).
|
|
167
|
-
# Returns the number of jobs purged.
|
|
168
|
-
def purge(topic, status)
|
|
169
|
-
count = 0
|
|
170
|
-
|
|
171
|
-
if status.to_s == "dead"
|
|
172
|
-
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
173
|
-
|
|
174
|
-
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
175
|
-
data = JSON.parse(File.read(file))
|
|
176
|
-
if data["topic"] == topic.to_s
|
|
177
|
-
File.delete(file)
|
|
178
|
-
count += 1
|
|
179
|
-
end
|
|
180
|
-
rescue JSON::ParserError
|
|
181
|
-
next
|
|
182
|
-
end
|
|
183
|
-
elsif status.to_s == "failed" || status.to_s == "completed" || status.to_s == "pending"
|
|
184
|
-
dir = topic_path(topic)
|
|
185
|
-
return 0 unless Dir.exist?(dir)
|
|
186
|
-
|
|
187
|
-
Dir.glob(File.join(dir, "*.json")).each do |file|
|
|
188
|
-
data = JSON.parse(File.read(file))
|
|
189
|
-
if data["status"] == status.to_s
|
|
190
|
-
File.delete(file)
|
|
191
|
-
count += 1
|
|
192
|
-
end
|
|
193
|
-
rescue JSON::ParserError
|
|
194
|
-
next
|
|
195
|
-
end
|
|
196
|
-
end
|
|
197
|
-
|
|
198
|
-
count
|
|
199
|
-
end
|
|
200
|
-
|
|
201
|
-
# Re-queue failed messages (under max_retries) back to pending.
|
|
202
|
-
# Returns the number of jobs re-queued.
|
|
203
|
-
def retry_failed(topic, max_retries: 3)
|
|
204
|
-
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
205
|
-
|
|
206
|
-
dir = topic_path(topic)
|
|
207
|
-
FileUtils.mkdir_p(dir)
|
|
208
|
-
count = 0
|
|
209
|
-
|
|
210
|
-
# Dead letter directory contains messages that the Consumer moved there.
|
|
211
|
-
# Only retry those whose attempts are under max_retries.
|
|
212
|
-
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
213
|
-
data = JSON.parse(File.read(file))
|
|
214
|
-
next unless data["topic"] == topic.to_s
|
|
215
|
-
next if (data["attempts"] || 0) >= max_retries
|
|
216
|
-
|
|
217
|
-
data["status"] = "pending"
|
|
218
|
-
msg = Tina4::Job.new(
|
|
219
|
-
topic: data["topic"],
|
|
220
|
-
payload: data["payload"],
|
|
221
|
-
id: data["id"]
|
|
222
|
-
)
|
|
223
|
-
enqueue(msg)
|
|
224
|
-
File.delete(file)
|
|
225
|
-
count += 1
|
|
226
|
-
rescue JSON::ParserError
|
|
227
|
-
next
|
|
228
|
-
end
|
|
229
|
-
|
|
230
|
-
count
|
|
231
|
-
end
|
|
232
|
-
|
|
233
|
-
# Remove all pending jobs from a topic. Returns count removed.
|
|
234
|
-
def clear(topic)
|
|
235
|
-
dir = topic_path(topic)
|
|
236
|
-
return 0 unless Dir.exist?(dir)
|
|
237
|
-
count = 0
|
|
238
|
-
Dir.glob(File.join(dir, "*.json")).each do |file|
|
|
239
|
-
File.delete(file)
|
|
240
|
-
count += 1
|
|
241
|
-
end
|
|
242
|
-
count
|
|
243
|
-
end
|
|
244
|
-
|
|
245
|
-
# Get jobs that failed but are still eligible for retry (under max_retries).
|
|
246
|
-
def failed(topic, max_retries: 3)
|
|
247
|
-
return [] unless Dir.exist?(@dead_letter_dir)
|
|
248
|
-
jobs = []
|
|
249
|
-
Dir.glob(File.join(@dead_letter_dir, "*.json")).sort_by { |f| File.mtime(f) }.each do |file|
|
|
250
|
-
data = JSON.parse(File.read(file))
|
|
251
|
-
next unless data["topic"] == topic.to_s
|
|
252
|
-
next if (data["attempts"] || 0) >= max_retries
|
|
253
|
-
jobs << data
|
|
254
|
-
rescue JSON::ParserError
|
|
255
|
-
next
|
|
256
|
-
end
|
|
257
|
-
jobs
|
|
258
|
-
end
|
|
259
|
-
|
|
260
|
-
# Retry all dead letter jobs for this topic. Returns true if any were re-queued.
|
|
261
|
-
def retry_job(topic, job_id: nil, delay_seconds: 0)
|
|
262
|
-
return false unless Dir.exist?(@dead_letter_dir)
|
|
263
|
-
|
|
264
|
-
available_at = delay_seconds > 0 ? Time.now + delay_seconds : nil
|
|
265
|
-
count = 0
|
|
266
|
-
|
|
267
|
-
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
268
|
-
data = JSON.parse(File.read(file))
|
|
269
|
-
next unless data["topic"] == topic.to_s
|
|
270
|
-
next if job_id && data["id"] != job_id.to_s
|
|
271
|
-
|
|
272
|
-
msg = Tina4::Job.new(
|
|
273
|
-
topic: data["topic"],
|
|
274
|
-
payload: data["payload"],
|
|
275
|
-
id: data["id"],
|
|
276
|
-
attempts: (data["attempts"] || 0) + 1,
|
|
277
|
-
available_at: available_at
|
|
278
|
-
)
|
|
279
|
-
enqueue(msg)
|
|
280
|
-
File.delete(file)
|
|
281
|
-
count += 1
|
|
282
|
-
break if job_id # found the specific job, stop scanning
|
|
283
|
-
rescue JSON::ParserError
|
|
284
|
-
next
|
|
285
|
-
end
|
|
286
|
-
|
|
287
|
-
count > 0
|
|
288
|
-
end
|
|
289
|
-
|
|
290
|
-
private
|
|
291
|
-
|
|
292
|
-
def topic_path(topic)
|
|
293
|
-
safe_topic = topic.to_s.gsub(/[^a-zA-Z0-9_-]/, "_")
|
|
294
|
-
File.join(@dir, safe_topic)
|
|
295
|
-
end
|
|
296
|
-
end
|
|
297
|
-
end
|
|
298
|
-
end
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
require "json"
|
|
3
|
+
require "fileutils"
|
|
4
|
+
require "time"
|
|
5
|
+
|
|
6
|
+
module Tina4
|
|
7
|
+
module QueueBackends
|
|
8
|
+
class LiteBackend
|
|
9
|
+
def initialize(options = {})
|
|
10
|
+
@dir = options[:dir] || File.join(Dir.pwd, ".queue")
|
|
11
|
+
@dead_letter_dir = File.join(@dir, "dead_letter")
|
|
12
|
+
FileUtils.mkdir_p(@dir)
|
|
13
|
+
FileUtils.mkdir_p(@dead_letter_dir)
|
|
14
|
+
@mutex = Mutex.new
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def enqueue(message)
|
|
18
|
+
@mutex.synchronize do
|
|
19
|
+
topic_dir = topic_path(message.topic)
|
|
20
|
+
FileUtils.mkdir_p(topic_dir)
|
|
21
|
+
path = File.join(topic_dir, "#{message.id}.json")
|
|
22
|
+
File.write(path, message.to_json)
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def dequeue(topic)
|
|
27
|
+
@mutex.synchronize do
|
|
28
|
+
dir = topic_path(topic)
|
|
29
|
+
return nil unless Dir.exist?(dir)
|
|
30
|
+
|
|
31
|
+
now = Time.now
|
|
32
|
+
candidates = []
|
|
33
|
+
|
|
34
|
+
Dir.glob(File.join(dir, "*.json")).each do |f|
|
|
35
|
+
data = JSON.parse(File.read(f))
|
|
36
|
+
# Skip messages that are not yet available (delayed)
|
|
37
|
+
if data["available_at"]
|
|
38
|
+
available_at = Time.parse(data["available_at"])
|
|
39
|
+
next if available_at > now
|
|
40
|
+
end
|
|
41
|
+
candidates << { file: f, data: data, priority: data["priority"] || 0, mtime: File.mtime(f) }
|
|
42
|
+
rescue JSON::ParserError
|
|
43
|
+
next
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
return nil if candidates.empty?
|
|
47
|
+
|
|
48
|
+
# Sort by priority descending (higher first), then by mtime ascending (oldest first)
|
|
49
|
+
candidates.sort_by! { |c| [-c[:priority], c[:mtime]] }
|
|
50
|
+
|
|
51
|
+
chosen = candidates.first
|
|
52
|
+
File.delete(chosen[:file])
|
|
53
|
+
data = chosen[:data]
|
|
54
|
+
|
|
55
|
+
Tina4::Job.new(
|
|
56
|
+
topic: data["topic"] || topic.to_s,
|
|
57
|
+
payload: data["payload"],
|
|
58
|
+
id: data["id"],
|
|
59
|
+
priority: data["priority"] || 0,
|
|
60
|
+
available_at: data["available_at"] ? Time.parse(data["available_at"]) : nil,
|
|
61
|
+
attempts: data["attempts"] || 0
|
|
62
|
+
)
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def dequeue_batch(topic, count)
|
|
67
|
+
@mutex.synchronize do
|
|
68
|
+
dir = topic_path(topic)
|
|
69
|
+
return [] unless Dir.exist?(dir)
|
|
70
|
+
|
|
71
|
+
now = Time.now
|
|
72
|
+
candidates = []
|
|
73
|
+
|
|
74
|
+
Dir.glob(File.join(dir, "*.json")).each do |f|
|
|
75
|
+
data = JSON.parse(File.read(f))
|
|
76
|
+
if data["available_at"]
|
|
77
|
+
available_at = Time.parse(data["available_at"])
|
|
78
|
+
next if available_at > now
|
|
79
|
+
end
|
|
80
|
+
candidates << { file: f, data: data, priority: data["priority"] || 0, mtime: File.mtime(f) }
|
|
81
|
+
rescue JSON::ParserError
|
|
82
|
+
next
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
return [] if candidates.empty?
|
|
86
|
+
|
|
87
|
+
candidates.sort_by! { |c| [-c[:priority], c[:mtime]] }
|
|
88
|
+
chosen = candidates.first(count)
|
|
89
|
+
|
|
90
|
+
chosen.map do |c|
|
|
91
|
+
File.delete(c[:file])
|
|
92
|
+
data = c[:data]
|
|
93
|
+
Tina4::Job.new(
|
|
94
|
+
topic: data["topic"] || topic.to_s,
|
|
95
|
+
payload: data["payload"],
|
|
96
|
+
id: data["id"],
|
|
97
|
+
priority: data["priority"] || 0,
|
|
98
|
+
available_at: data["available_at"] ? Time.parse(data["available_at"]) : nil,
|
|
99
|
+
attempts: data["attempts"] || 0
|
|
100
|
+
)
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def acknowledge(message)
|
|
106
|
+
# File already deleted on dequeue
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def requeue(message)
|
|
110
|
+
enqueue(message)
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def dead_letter(message)
|
|
114
|
+
path = File.join(@dead_letter_dir, "#{message.id}.json")
|
|
115
|
+
File.write(path, message.to_json)
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def size(topic)
|
|
119
|
+
dir = topic_path(topic)
|
|
120
|
+
return 0 unless Dir.exist?(dir)
|
|
121
|
+
Dir.glob(File.join(dir, "*.json")).length
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Count dead-letter / failed messages for a topic.
|
|
125
|
+
def dead_letter_count(topic)
|
|
126
|
+
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
127
|
+
|
|
128
|
+
count = 0
|
|
129
|
+
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
130
|
+
data = JSON.parse(File.read(file))
|
|
131
|
+
count += 1 if data["topic"] == topic.to_s
|
|
132
|
+
rescue JSON::ParserError
|
|
133
|
+
next
|
|
134
|
+
end
|
|
135
|
+
count
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def topics
|
|
139
|
+
return [] unless Dir.exist?(@dir)
|
|
140
|
+
Dir.children(@dir)
|
|
141
|
+
.reject { |d| d == "dead_letter" }
|
|
142
|
+
.select { |d| File.directory?(File.join(@dir, d)) }
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
# Get dead letter jobs for a topic — messages that exceeded max retries.
|
|
146
|
+
def dead_letters(topic, max_retries: 3)
|
|
147
|
+
return [] unless Dir.exist?(@dead_letter_dir)
|
|
148
|
+
|
|
149
|
+
files = Dir.glob(File.join(@dead_letter_dir, "*.json")).sort_by { |f| File.mtime(f) }
|
|
150
|
+
jobs = []
|
|
151
|
+
|
|
152
|
+
files.each do |file|
|
|
153
|
+
data = JSON.parse(File.read(file))
|
|
154
|
+
next unless data["topic"] == topic.to_s
|
|
155
|
+
data["status"] = "dead"
|
|
156
|
+
jobs << data
|
|
157
|
+
rescue JSON::ParserError
|
|
158
|
+
next
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
jobs
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Delete messages by status (completed, failed, dead).
|
|
165
|
+
# For 'dead', removes from the dead_letter directory.
|
|
166
|
+
# For 'failed', removes from the topic directory (re-queued failed messages).
|
|
167
|
+
# Returns the number of jobs purged.
|
|
168
|
+
def purge(topic, status)
|
|
169
|
+
count = 0
|
|
170
|
+
|
|
171
|
+
if status.to_s == "dead"
|
|
172
|
+
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
173
|
+
|
|
174
|
+
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
175
|
+
data = JSON.parse(File.read(file))
|
|
176
|
+
if data["topic"] == topic.to_s
|
|
177
|
+
File.delete(file)
|
|
178
|
+
count += 1
|
|
179
|
+
end
|
|
180
|
+
rescue JSON::ParserError
|
|
181
|
+
next
|
|
182
|
+
end
|
|
183
|
+
elsif status.to_s == "failed" || status.to_s == "completed" || status.to_s == "pending"
|
|
184
|
+
dir = topic_path(topic)
|
|
185
|
+
return 0 unless Dir.exist?(dir)
|
|
186
|
+
|
|
187
|
+
Dir.glob(File.join(dir, "*.json")).each do |file|
|
|
188
|
+
data = JSON.parse(File.read(file))
|
|
189
|
+
if data["status"] == status.to_s
|
|
190
|
+
File.delete(file)
|
|
191
|
+
count += 1
|
|
192
|
+
end
|
|
193
|
+
rescue JSON::ParserError
|
|
194
|
+
next
|
|
195
|
+
end
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
count
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
# Re-queue failed messages (under max_retries) back to pending.
|
|
202
|
+
# Returns the number of jobs re-queued.
|
|
203
|
+
def retry_failed(topic, max_retries: 3)
|
|
204
|
+
return 0 unless Dir.exist?(@dead_letter_dir)
|
|
205
|
+
|
|
206
|
+
dir = topic_path(topic)
|
|
207
|
+
FileUtils.mkdir_p(dir)
|
|
208
|
+
count = 0
|
|
209
|
+
|
|
210
|
+
# Dead letter directory contains messages that the Consumer moved there.
|
|
211
|
+
# Only retry those whose attempts are under max_retries.
|
|
212
|
+
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
213
|
+
data = JSON.parse(File.read(file))
|
|
214
|
+
next unless data["topic"] == topic.to_s
|
|
215
|
+
next if (data["attempts"] || 0) >= max_retries
|
|
216
|
+
|
|
217
|
+
data["status"] = "pending"
|
|
218
|
+
msg = Tina4::Job.new(
|
|
219
|
+
topic: data["topic"],
|
|
220
|
+
payload: data["payload"],
|
|
221
|
+
id: data["id"]
|
|
222
|
+
)
|
|
223
|
+
enqueue(msg)
|
|
224
|
+
File.delete(file)
|
|
225
|
+
count += 1
|
|
226
|
+
rescue JSON::ParserError
|
|
227
|
+
next
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
count
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
# Remove all pending jobs from a topic. Returns count removed.
|
|
234
|
+
def clear(topic)
|
|
235
|
+
dir = topic_path(topic)
|
|
236
|
+
return 0 unless Dir.exist?(dir)
|
|
237
|
+
count = 0
|
|
238
|
+
Dir.glob(File.join(dir, "*.json")).each do |file|
|
|
239
|
+
File.delete(file)
|
|
240
|
+
count += 1
|
|
241
|
+
end
|
|
242
|
+
count
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
# Get jobs that failed but are still eligible for retry (under max_retries).
|
|
246
|
+
def failed(topic, max_retries: 3)
|
|
247
|
+
return [] unless Dir.exist?(@dead_letter_dir)
|
|
248
|
+
jobs = []
|
|
249
|
+
Dir.glob(File.join(@dead_letter_dir, "*.json")).sort_by { |f| File.mtime(f) }.each do |file|
|
|
250
|
+
data = JSON.parse(File.read(file))
|
|
251
|
+
next unless data["topic"] == topic.to_s
|
|
252
|
+
next if (data["attempts"] || 0) >= max_retries
|
|
253
|
+
jobs << data
|
|
254
|
+
rescue JSON::ParserError
|
|
255
|
+
next
|
|
256
|
+
end
|
|
257
|
+
jobs
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
# Retry all dead letter jobs for this topic. Returns true if any were re-queued.
|
|
261
|
+
def retry_job(topic, job_id: nil, delay_seconds: 0)
|
|
262
|
+
return false unless Dir.exist?(@dead_letter_dir)
|
|
263
|
+
|
|
264
|
+
available_at = delay_seconds > 0 ? Time.now + delay_seconds : nil
|
|
265
|
+
count = 0
|
|
266
|
+
|
|
267
|
+
Dir.glob(File.join(@dead_letter_dir, "*.json")).each do |file|
|
|
268
|
+
data = JSON.parse(File.read(file))
|
|
269
|
+
next unless data["topic"] == topic.to_s
|
|
270
|
+
next if job_id && data["id"] != job_id.to_s
|
|
271
|
+
|
|
272
|
+
msg = Tina4::Job.new(
|
|
273
|
+
topic: data["topic"],
|
|
274
|
+
payload: data["payload"],
|
|
275
|
+
id: data["id"],
|
|
276
|
+
attempts: (data["attempts"] || 0) + 1,
|
|
277
|
+
available_at: available_at
|
|
278
|
+
)
|
|
279
|
+
enqueue(msg)
|
|
280
|
+
File.delete(file)
|
|
281
|
+
count += 1
|
|
282
|
+
break if job_id # found the specific job, stop scanning
|
|
283
|
+
rescue JSON::ParserError
|
|
284
|
+
next
|
|
285
|
+
end
|
|
286
|
+
|
|
287
|
+
count > 0
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
private
|
|
291
|
+
|
|
292
|
+
def topic_path(topic)
|
|
293
|
+
safe_topic = topic.to_s.gsub(/[^a-zA-Z0-9_-]/, "_")
|
|
294
|
+
File.join(@dir, safe_topic)
|
|
295
|
+
end
|
|
296
|
+
end
|
|
297
|
+
end
|
|
298
|
+
end
|