pgbus 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. checksums.yaml +7 -0
  2. data/.bun-version +1 -0
  3. data/.claude/commands/architect.md +100 -0
  4. data/.claude/commands/github-review-comments.md +237 -0
  5. data/.claude/commands/lfg.md +271 -0
  6. data/.claude/commands/review-pr.md +69 -0
  7. data/.claude/commands/security.md +122 -0
  8. data/.claude/commands/tdd.md +148 -0
  9. data/.claude/rules/agents.md +49 -0
  10. data/.claude/rules/coding-style.md +91 -0
  11. data/.claude/rules/git-workflow.md +56 -0
  12. data/.claude/rules/performance.md +73 -0
  13. data/.claude/rules/testing.md +67 -0
  14. data/CHANGELOG.md +5 -0
  15. data/CLAUDE.md +80 -0
  16. data/CODE_OF_CONDUCT.md +10 -0
  17. data/LICENSE.txt +21 -0
  18. data/README.md +417 -0
  19. data/Rakefile +14 -0
  20. data/app/controllers/pgbus/api/stats_controller.rb +11 -0
  21. data/app/controllers/pgbus/application_controller.rb +35 -0
  22. data/app/controllers/pgbus/dashboard_controller.rb +27 -0
  23. data/app/controllers/pgbus/dead_letter_controller.rb +50 -0
  24. data/app/controllers/pgbus/events_controller.rb +23 -0
  25. data/app/controllers/pgbus/jobs_controller.rb +48 -0
  26. data/app/controllers/pgbus/processes_controller.rb +10 -0
  27. data/app/controllers/pgbus/queues_controller.rb +21 -0
  28. data/app/helpers/pgbus/application_helper.rb +69 -0
  29. data/app/views/layouts/pgbus/application.html.erb +76 -0
  30. data/app/views/pgbus/dashboard/_processes_table.html.erb +30 -0
  31. data/app/views/pgbus/dashboard/_queues_table.html.erb +39 -0
  32. data/app/views/pgbus/dashboard/_recent_failures.html.erb +33 -0
  33. data/app/views/pgbus/dashboard/_stats_cards.html.erb +28 -0
  34. data/app/views/pgbus/dashboard/show.html.erb +10 -0
  35. data/app/views/pgbus/dead_letter/_messages_table.html.erb +40 -0
  36. data/app/views/pgbus/dead_letter/index.html.erb +15 -0
  37. data/app/views/pgbus/dead_letter/show.html.erb +52 -0
  38. data/app/views/pgbus/events/index.html.erb +57 -0
  39. data/app/views/pgbus/events/show.html.erb +28 -0
  40. data/app/views/pgbus/jobs/_enqueued_table.html.erb +34 -0
  41. data/app/views/pgbus/jobs/_failed_table.html.erb +45 -0
  42. data/app/views/pgbus/jobs/index.html.erb +16 -0
  43. data/app/views/pgbus/jobs/show.html.erb +57 -0
  44. data/app/views/pgbus/processes/_processes_table.html.erb +37 -0
  45. data/app/views/pgbus/processes/index.html.erb +3 -0
  46. data/app/views/pgbus/queues/_queues_list.html.erb +41 -0
  47. data/app/views/pgbus/queues/index.html.erb +3 -0
  48. data/app/views/pgbus/queues/show.html.erb +49 -0
  49. data/bun.lock +18 -0
  50. data/config/routes.rb +45 -0
  51. data/docs/README.md +28 -0
  52. data/docs/switch_from_good_job.md +279 -0
  53. data/docs/switch_from_sidekiq.md +226 -0
  54. data/docs/switch_from_solid_queue.md +247 -0
  55. data/exe/pgbus +7 -0
  56. data/lib/generators/pgbus/install_generator.rb +56 -0
  57. data/lib/generators/pgbus/templates/migration.rb.erb +114 -0
  58. data/lib/generators/pgbus/templates/pgbus.yml.erb +74 -0
  59. data/lib/generators/pgbus/templates/pgbus_binstub.erb +7 -0
  60. data/lib/pgbus/active_job/adapter.rb +109 -0
  61. data/lib/pgbus/active_job/executor.rb +107 -0
  62. data/lib/pgbus/batch.rb +153 -0
  63. data/lib/pgbus/cli.rb +84 -0
  64. data/lib/pgbus/client.rb +162 -0
  65. data/lib/pgbus/concurrency/blocked_execution.rb +74 -0
  66. data/lib/pgbus/concurrency/semaphore.rb +66 -0
  67. data/lib/pgbus/concurrency.rb +65 -0
  68. data/lib/pgbus/config_loader.rb +27 -0
  69. data/lib/pgbus/configuration.rb +99 -0
  70. data/lib/pgbus/engine.rb +31 -0
  71. data/lib/pgbus/event.rb +31 -0
  72. data/lib/pgbus/event_bus/handler.rb +76 -0
  73. data/lib/pgbus/event_bus/publisher.rb +42 -0
  74. data/lib/pgbus/event_bus/registry.rb +54 -0
  75. data/lib/pgbus/event_bus/subscriber.rb +30 -0
  76. data/lib/pgbus/process/consumer.rb +113 -0
  77. data/lib/pgbus/process/dispatcher.rb +154 -0
  78. data/lib/pgbus/process/heartbeat.rb +71 -0
  79. data/lib/pgbus/process/signal_handler.rb +49 -0
  80. data/lib/pgbus/process/supervisor.rb +198 -0
  81. data/lib/pgbus/process/worker.rb +153 -0
  82. data/lib/pgbus/serializer.rb +43 -0
  83. data/lib/pgbus/version.rb +5 -0
  84. data/lib/pgbus/web/authentication.rb +24 -0
  85. data/lib/pgbus/web/data_source.rb +406 -0
  86. data/lib/pgbus.rb +49 -0
  87. data/package.json +9 -0
  88. data/sig/pgbus.rbs +4 -0
  89. metadata +198 -0
@@ -0,0 +1,153 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "concurrent"
4
+
5
+ module Pgbus
6
+ module Process
7
+ class Worker
8
+ include SignalHandler
9
+
10
+ attr_reader :queues, :threads, :config
11
+
12
+ def initialize(queues:, threads: 5, config: Pgbus.configuration)
13
+ @queues = Array(queues)
14
+ @threads = threads
15
+ @config = config
16
+ @shutting_down = false
17
+ @jobs_processed = Concurrent::AtomicFixnum.new(0)
18
+ @jobs_failed = Concurrent::AtomicFixnum.new(0)
19
+ @started_at = Time.now
20
+ @executor = Pgbus::ActiveJob::Executor.new
21
+ @pool = Concurrent::FixedThreadPool.new(threads)
22
+ end
23
+
24
+ def stats
25
+ { jobs_processed: @jobs_processed.value, jobs_failed: @jobs_failed.value, started_at: @started_at }
26
+ end
27
+
28
+ def run
29
+ setup_signals
30
+ start_heartbeat
31
+ Pgbus.logger.info { "[Pgbus] Worker started: queues=#{queues.join(",")} threads=#{threads} pid=#{::Process.pid}" }
32
+
33
+ loop do
34
+ break if @shutting_down
35
+ break if recycle_needed?
36
+
37
+ process_signals
38
+ claim_and_execute
39
+ end
40
+
41
+ shutdown
42
+ end
43
+
44
+ def graceful_shutdown
45
+ Pgbus.logger.info { "[Pgbus] Worker shutting down gracefully..." }
46
+ @shutting_down = true
47
+ end
48
+
49
+ def immediate_shutdown
50
+ Pgbus.logger.warn { "[Pgbus] Worker shutting down immediately!" }
51
+ @shutting_down = true
52
+ @pool.kill
53
+ end
54
+
55
+ private
56
+
57
+ def claim_and_execute
58
+ idle = @pool.max_length - @pool.queue_length
59
+ return sleep(config.polling_interval) if idle <= 0
60
+
61
+ messages = fetch_messages(idle)
62
+
63
+ if messages.empty?
64
+ sleep(config.polling_interval)
65
+ return
66
+ end
67
+
68
+ messages.each do |message|
69
+ queue_name = message.respond_to?(:queue_name) ? message.queue_name : queues.first
70
+ @pool.post { process_message(message, queue_name) }
71
+ end
72
+ end
73
+
74
+ def fetch_messages(qty)
75
+ if queues.size == 1
76
+ Pgbus.client.read_batch(queues.first, qty: qty) || []
77
+ else
78
+ # Multi-queue read: read from each queue proportionally
79
+ per_queue = [(qty / queues.size.to_f).ceil, 1].max
80
+ queues.flat_map do |q|
81
+ Pgbus.client.read_batch(q, qty: per_queue) || []
82
+ end.first(qty)
83
+ end
84
+ rescue StandardError => e
85
+ Pgbus.logger.error { "[Pgbus] Error fetching messages: #{e.message}" }
86
+ []
87
+ end
88
+
89
+ def process_message(message, queue_name)
90
+ result = @executor.execute(message, queue_name)
91
+ @jobs_processed.increment
92
+ @jobs_failed.increment if result == :failed
93
+ rescue StandardError => e
94
+ @jobs_failed.increment
95
+ Pgbus.logger.error { "[Pgbus] Unhandled error processing message: #{e.message}" }
96
+ end
97
+
98
+ def recycle_needed?
99
+ exceeded_max_jobs? || exceeded_max_memory? || exceeded_max_lifetime?
100
+ end
101
+
102
+ def exceeded_max_jobs?
103
+ return false unless config.max_jobs_per_worker && @jobs_processed.value >= config.max_jobs_per_worker
104
+
105
+ Pgbus.logger.info { "[Pgbus] Worker recycling: max_jobs reached (#{@jobs_processed.value})" }
106
+ true
107
+ end
108
+
109
+ def exceeded_max_memory?
110
+ return false unless config.max_memory_mb && current_memory_mb > config.max_memory_mb
111
+
112
+ Pgbus.logger.info { "[Pgbus] Worker recycling: memory limit (#{current_memory_mb}MB > #{config.max_memory_mb}MB)" }
113
+ true
114
+ end
115
+
116
+ def exceeded_max_lifetime?
117
+ return false unless config.max_worker_lifetime && (Time.now - @started_at) > config.max_worker_lifetime
118
+
119
+ Pgbus.logger.info { "[Pgbus] Worker recycling: lifetime exceeded" }
120
+ true
121
+ end
122
+
123
+ def current_memory_mb
124
+ if RUBY_PLATFORM.include?("darwin")
125
+ `ps -o rss= -p #{::Process.pid}`.to_i / 1024
126
+ else
127
+ begin
128
+ File.read("/proc/#{::Process.pid}/statm").split[1].to_i * 4096 / (1024 * 1024)
129
+ rescue Errno::ENOENT
130
+ 0
131
+ end
132
+ end
133
+ end
134
+
135
+ def start_heartbeat
136
+ @heartbeat = Heartbeat.new(
137
+ kind: "worker",
138
+ metadata: { queues: queues, threads: threads, pid: ::Process.pid }
139
+ )
140
+ @heartbeat.start
141
+ end
142
+
143
+ def shutdown
144
+ Pgbus.logger.info { "[Pgbus] Worker draining thread pool..." }
145
+ @pool.shutdown
146
+ @pool.wait_for_termination(30)
147
+ @heartbeat&.stop
148
+ restore_signals
149
+ Pgbus.logger.info { "[Pgbus] Worker stopped. Processed: #{@jobs_processed.value}, Failed: #{@jobs_failed.value}" }
150
+ end
151
+ end
152
+ end
153
+ end
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module Pgbus
6
+ module Serializer
7
+ module_function
8
+
9
+ def serialize_job(active_job)
10
+ data = active_job.serialize
11
+ # GlobalID is handled by ActiveJob's serialize — it converts AR objects
12
+ # to GlobalID URIs automatically. We just JSON-encode the result.
13
+ JSON.generate(data)
14
+ end
15
+
16
+ def deserialize_job(json_string)
17
+ data = JSON.parse(json_string)
18
+ ActiveJob::Base.deserialize(data)
19
+ end
20
+
21
+ def serialize_event(event)
22
+ payload = event.respond_to?(:to_global_id) ? { "_global_id" => event.to_global_id.to_s } : event
23
+ JSON.generate({
24
+ "event_id" => event.respond_to?(:event_id) ? event.event_id : SecureRandom.uuid,
25
+ "payload" => payload,
26
+ "published_at" => Time.now.utc.iso8601(6)
27
+ })
28
+ end
29
+
30
+ def deserialize_event(json_string)
31
+ data = JSON.parse(json_string)
32
+ payload = data["payload"]
33
+
34
+ data["payload"] = GlobalID::Locator.locate(payload["_global_id"]) if payload.is_a?(Hash) && payload["_global_id"]
35
+
36
+ Event.new(
37
+ event_id: data["event_id"],
38
+ payload: data["payload"],
39
+ published_at: Time.parse(data["published_at"])
40
+ )
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Pgbus
4
+ VERSION = "0.0.1"
5
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Pgbus
4
+ module Web
5
+ module Authentication
6
+ extend ActiveSupport::Concern
7
+
8
+ included do
9
+ before_action :authenticate_pgbus!
10
+ end
11
+
12
+ private
13
+
14
+ def authenticate_pgbus!
15
+ auth_block = Pgbus.configuration.web_auth
16
+ return if auth_block.nil?
17
+
18
+ return if auth_block.respond_to?(:call) && auth_block.call(request)
19
+
20
+ head :unauthorized
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,406 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "time"
4
+
5
+ module Pgbus
6
+ module Web
7
+ class DataSource
8
+ def initialize(client: Pgbus.client)
9
+ @client = client
10
+ end
11
+
12
+ # Dashboard summary
13
+ def summary_stats
14
+ queues = queues_with_metrics
15
+ total_depth = queues.sum { |q| q[:queue_length] }
16
+ total_visible = queues.sum { |q| q[:queue_visible_length] }
17
+ dlq_suffix = Pgbus.configuration.dead_letter_queue_suffix
18
+ dlq_depth = queues.select { |q| q[:name].end_with?(dlq_suffix) }.sum { |q| q[:queue_length] }
19
+
20
+ {
21
+ total_queues: queues.size,
22
+ total_depth: total_depth,
23
+ total_visible: total_visible,
24
+ active_processes: processes.count,
25
+ failed_count: failed_events_count,
26
+ dlq_depth: dlq_depth
27
+ }
28
+ end
29
+
30
+ # Queues
31
+ def queues_with_metrics
32
+ metrics = @client.metrics || []
33
+ Array(metrics).map { |m| format_metrics(m) }
34
+ rescue StandardError => e
35
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching queue metrics: #{e.message}" }
36
+ []
37
+ end
38
+
39
+ def queue_detail(name)
40
+ m = @client.metrics(name)
41
+ m ? format_metrics(m) : nil
42
+ rescue StandardError => e
43
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching queue detail for #{name}: #{e.message}" }
44
+ nil
45
+ end
46
+
47
+ def purge_queue(name)
48
+ @client.purge_queue(name)
49
+ end
50
+
51
+ # Jobs (messages in queue tables)
52
+ def jobs(queue_name: nil, page: 1, per_page: 25)
53
+ offset = (page - 1) * per_page
54
+
55
+ if queue_name
56
+ query_queue_messages(queue_name, per_page, offset)
57
+ else
58
+ all_queue_messages(per_page, offset)
59
+ end
60
+ rescue StandardError => e
61
+ Pgbus.logger.debug { "[Pgbus::Web] Error reading jobs: #{e.message}" }
62
+ []
63
+ end
64
+
65
+ def job_detail(queue_name, msg_id)
66
+ full_name = Pgbus.configuration.queue_name(queue_name)
67
+ row = connection.select_one(
68
+ "SELECT * FROM pgmq.q_#{sanitize_name(full_name)} WHERE msg_id = $1",
69
+ "Pgbus Job Detail",
70
+ [msg_id.to_i]
71
+ )
72
+ row ? format_message(row, queue_name) : nil
73
+ rescue StandardError => e
74
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching job detail: #{e.message}" }
75
+ nil
76
+ end
77
+
78
+ def retry_job(queue_name, msg_id)
79
+ full_name = Pgbus.configuration.queue_name(queue_name)
80
+ @client.set_visibility_timeout(full_name, msg_id.to_i, vt: 0)
81
+ end
82
+
83
+ def discard_job(queue_name, msg_id)
84
+ @client.archive_message(queue_name, msg_id.to_i)
85
+ end
86
+
87
+ # Failed events
88
+ def failed_events(page: 1, per_page: 25)
89
+ offset = (page - 1) * per_page
90
+ rows = connection.select_all(
91
+ "SELECT * FROM pgbus_failed_events ORDER BY failed_at DESC LIMIT $1 OFFSET $2",
92
+ "Pgbus Failed Events",
93
+ [per_page, offset]
94
+ )
95
+ rows.to_a
96
+ rescue StandardError => e
97
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching failed events: #{e.message}" }
98
+ []
99
+ end
100
+
101
+ def failed_events_count
102
+ result = connection.select_value("SELECT COUNT(*) FROM pgbus_failed_events")
103
+ result.to_i
104
+ rescue StandardError => e
105
+ Pgbus.logger.debug { "[Pgbus::Web] Error counting failed events: #{e.message}" }
106
+ 0
107
+ end
108
+
109
+ def failed_event(id)
110
+ connection.select_one(
111
+ "SELECT * FROM pgbus_failed_events WHERE id = $1",
112
+ "Pgbus Failed Event",
113
+ [id.to_i]
114
+ )
115
+ rescue StandardError => e
116
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching failed event #{id}: #{e.message}" }
117
+ nil
118
+ end
119
+
120
+ def retry_failed_event(id)
121
+ event = failed_event(id)
122
+ return false unless event
123
+
124
+ payload = JSON.parse(event["payload"])
125
+ headers = event["headers"]
126
+ headers = JSON.parse(headers) if headers.is_a?(String)
127
+
128
+ connection.transaction do
129
+ @client.send_message(event["queue_name"], payload, headers: headers)
130
+ connection.execute("DELETE FROM pgbus_failed_events WHERE id = #{id.to_i}")
131
+ end
132
+ true
133
+ rescue StandardError => e
134
+ Pgbus.logger.debug { "[Pgbus::Web] Error retrying failed event #{id}: #{e.message}" }
135
+ false
136
+ end
137
+
138
+ def discard_failed_event(id)
139
+ connection.execute("DELETE FROM pgbus_failed_events WHERE id = #{id.to_i}")
140
+ true
141
+ rescue StandardError => e
142
+ Pgbus.logger.debug { "[Pgbus::Web] Error discarding failed event #{id}: #{e.message}" }
143
+ false
144
+ end
145
+
146
+ def retry_all_failed
147
+ count = 0
148
+ connection.select_all("SELECT * FROM pgbus_failed_events").each do |event|
149
+ payload = JSON.parse(event["payload"])
150
+ headers = event["headers"]
151
+ headers = JSON.parse(headers) if headers.is_a?(String)
152
+
153
+ connection.transaction do
154
+ @client.send_message(event["queue_name"], payload, headers: headers)
155
+ connection.execute("DELETE FROM pgbus_failed_events WHERE id = #{event["id"].to_i}")
156
+ end
157
+ count += 1
158
+ rescue StandardError => e
159
+ Pgbus.logger.error { "[Pgbus::Web] Failed to retry event #{event["id"]}: #{e.message}" }
160
+ end
161
+ count
162
+ end
163
+
164
+ def discard_all_failed
165
+ result = connection.execute("DELETE FROM pgbus_failed_events")
166
+ result.cmd_tuples
167
+ rescue StandardError => e
168
+ Pgbus.logger.debug { "[Pgbus::Web] Error discarding all failed events: #{e.message}" }
169
+ 0
170
+ end
171
+
172
+ # Dead letter queue
173
+ # Note: DLQ queue names from queues_with_metrics are already fully qualified
174
+ # (e.g., "pgbus_default_dlq"), so we use them directly without re-prefixing.
175
+ def dlq_messages(page: 1, per_page: 25)
176
+ dlq_suffix = Pgbus.configuration.dead_letter_queue_suffix
177
+ queues = queues_with_metrics.select { |q| q[:name].end_with?(dlq_suffix) }
178
+ offset = (page - 1) * per_page
179
+
180
+ messages = queues.flat_map do |q|
181
+ query_queue_messages_raw(q[:name], per_page + offset, 0)
182
+ end
183
+
184
+ messages.sort_by { |m| -m[:msg_id].to_i }.slice(offset, per_page) || []
185
+ rescue StandardError => e
186
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching DLQ messages: #{e.message}" }
187
+ []
188
+ end
189
+
190
+ def dlq_message_detail(msg_id)
191
+ dlq_suffix = Pgbus.configuration.dead_letter_queue_suffix
192
+ queues = queues_with_metrics.select { |q| q[:name].end_with?(dlq_suffix) }
193
+ queues.each do |q|
194
+ row = connection.select_one(
195
+ "SELECT * FROM pgmq.q_#{sanitize_name(q[:name])} WHERE msg_id = $1",
196
+ "Pgbus DLQ Detail",
197
+ [msg_id.to_i]
198
+ )
199
+ return format_message(row, q[:name]) if row
200
+ end
201
+ nil
202
+ rescue StandardError => e
203
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching DLQ message #{msg_id}: #{e.message}" }
204
+ nil
205
+ end
206
+
207
+ def retry_dlq_message(queue_name, msg_id)
208
+ # queue_name here is the full DLQ name (already prefixed)
209
+ dlq_suffix = Pgbus.configuration.dead_letter_queue_suffix
210
+ original_queue = queue_name.delete_suffix(dlq_suffix)
211
+
212
+ row = connection.select_one(
213
+ "SELECT * FROM pgmq.q_#{sanitize_name(queue_name)} WHERE msg_id = $1",
214
+ "Pgbus DLQ Read",
215
+ [msg_id.to_i]
216
+ )
217
+ return false unless row
218
+
219
+ @client.transaction do |txn|
220
+ txn.produce(original_queue, row["message"], headers: row["headers"])
221
+ txn.delete(queue_name, msg_id.to_i)
222
+ end
223
+ true
224
+ rescue StandardError => e
225
+ Pgbus.logger.debug { "[Pgbus::Web] Error retrying DLQ message #{msg_id}: #{e.message}" }
226
+ false
227
+ end
228
+
229
+ def discard_dlq_message(queue_name, msg_id)
230
+ # queue_name here is the full DLQ name (already prefixed)
231
+ @client.delete_from_queue(queue_name, msg_id.to_i)
232
+ true
233
+ rescue StandardError => e
234
+ Pgbus.logger.debug { "[Pgbus::Web] Error discarding DLQ message #{msg_id}: #{e.message}" }
235
+ false
236
+ end
237
+
238
+ def retry_all_dlq
239
+ messages = dlq_messages(page: 1, per_page: 1000)
240
+ count = 0
241
+ messages.each do |m|
242
+ retry_dlq_message(m[:queue_name], m[:msg_id]) && count += 1
243
+ rescue StandardError => e
244
+ Pgbus.logger.debug { "[Pgbus::Web] Error retrying DLQ message #{m[:msg_id]}: #{e.message}" }
245
+ next
246
+ end
247
+ count
248
+ end
249
+
250
+ def discard_all_dlq
251
+ messages = dlq_messages(page: 1, per_page: 1000)
252
+ count = 0
253
+ messages.each do |m|
254
+ discard_dlq_message(m[:queue_name], m[:msg_id]) && count += 1
255
+ rescue StandardError => e
256
+ Pgbus.logger.debug { "[Pgbus::Web] Error discarding DLQ message #{m[:msg_id]}: #{e.message}" }
257
+ next
258
+ end
259
+ count
260
+ end
261
+
262
+ # Processes
263
+ def processes
264
+ rows = connection.select_all(
265
+ "SELECT * FROM pgbus_processes ORDER BY kind, created_at"
266
+ )
267
+ rows.to_a.map { |r| format_process(r) }
268
+ rescue StandardError => e
269
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching processes: #{e.message}" }
270
+ []
271
+ end
272
+
273
+ # Processed events (audit trail)
274
+ def processed_events(page: 1, per_page: 25)
275
+ offset = (page - 1) * per_page
276
+ rows = connection.select_all(
277
+ "SELECT * FROM pgbus_processed_events ORDER BY processed_at DESC LIMIT $1 OFFSET $2",
278
+ "Pgbus Processed Events",
279
+ [per_page, offset]
280
+ )
281
+ rows.to_a
282
+ rescue StandardError => e
283
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching processed events: #{e.message}" }
284
+ []
285
+ end
286
+
287
+ def processed_event(id)
288
+ connection.select_one(
289
+ "SELECT * FROM pgbus_processed_events WHERE id = $1",
290
+ "Pgbus Processed Event",
291
+ [id.to_i]
292
+ )
293
+ rescue StandardError => e
294
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching processed event #{id}: #{e.message}" }
295
+ nil
296
+ end
297
+
298
+ def processed_events_count
299
+ result = connection.select_value("SELECT COUNT(*) FROM pgbus_processed_events")
300
+ result.to_i
301
+ rescue StandardError => e
302
+ Pgbus.logger.debug { "[Pgbus::Web] Error counting processed events: #{e.message}" }
303
+ 0
304
+ end
305
+
306
+ def replay_event(event)
307
+ # Re-publish the event payload to all matching subscribers
308
+ routing_key = event["routing_key"] || event["handler_class"]
309
+ return false unless routing_key
310
+
311
+ @client.publish_to_topic(routing_key, event["payload"] || "{}")
312
+ true
313
+ rescue StandardError => e
314
+ Pgbus.logger.debug { "[Pgbus::Web] Error replaying event: #{e.message}" }
315
+ false
316
+ end
317
+
318
+ # Subscriber registry
319
+ def registered_subscribers
320
+ EventBus::Registry.instance.subscribers.map do |s|
321
+ { pattern: s.pattern, handler_class: s.handler_class.name, queue_name: s.queue_name }
322
+ end
323
+ rescue StandardError => e
324
+ Pgbus.logger.debug { "[Pgbus::Web] Error fetching subscribers: #{e.message}" }
325
+ []
326
+ end
327
+
328
+ private
329
+
330
+ def connection
331
+ ActiveRecord::Base.connection
332
+ end
333
+
334
+ def query_queue_messages(name, limit, offset)
335
+ full_name = Pgbus.configuration.queue_name(name)
336
+ query_queue_messages_raw(full_name, limit, offset).map { |m| m.merge(queue: name) }
337
+ end
338
+
339
+ def query_queue_messages_raw(full_name, limit, offset)
340
+ rows = connection.select_all(
341
+ "SELECT * FROM pgmq.q_#{sanitize_name(full_name)} ORDER BY msg_id DESC LIMIT $1 OFFSET $2",
342
+ "Pgbus Queue Messages",
343
+ [limit, offset]
344
+ )
345
+ rows.to_a.map { |r| format_message(r, full_name) }
346
+ rescue StandardError => e
347
+ Pgbus.logger.debug { "[Pgbus::Web] Error querying messages from #{full_name}: #{e.message}" }
348
+ []
349
+ end
350
+
351
+ def all_queue_messages(limit, offset)
352
+ dlq_suffix = Pgbus.configuration.dead_letter_queue_suffix
353
+ queues = queues_with_metrics.reject { |q| q[:name].end_with?(dlq_suffix) }
354
+ messages = queues.flat_map do |q|
355
+ query_queue_messages_raw(q[:name], limit + offset, 0)
356
+ end
357
+ messages.sort_by { |m| -m[:msg_id].to_i }.slice(offset, limit) || []
358
+ end
359
+
360
+ def format_metrics(m)
361
+ {
362
+ name: m.queue_name.to_s,
363
+ queue_length: m.queue_length.to_i,
364
+ queue_visible_length: m.queue_visible_length.to_i,
365
+ oldest_msg_age_sec: m.oldest_msg_age_sec&.to_i,
366
+ newest_msg_age_sec: m.newest_msg_age_sec&.to_i,
367
+ total_messages: m.total_messages.to_i
368
+ }
369
+ end
370
+
371
+ def format_message(row, queue_name)
372
+ {
373
+ msg_id: row["msg_id"].to_i,
374
+ read_ct: row["read_ct"].to_i,
375
+ enqueued_at: row["enqueued_at"],
376
+ last_read_at: row["last_read_at"],
377
+ vt: row["vt"],
378
+ message: row["message"],
379
+ headers: row["headers"],
380
+ queue_name: queue_name
381
+ }
382
+ end
383
+
384
+ def format_process(row)
385
+ heartbeat = row["last_heartbeat_at"]
386
+ heartbeat_time = heartbeat.is_a?(String) ? Time.parse(heartbeat) : heartbeat
387
+ stale = heartbeat_time && (Time.now - heartbeat_time) > Process::Heartbeat::ALIVE_THRESHOLD
388
+
389
+ {
390
+ id: row["id"].to_i,
391
+ kind: row["kind"],
392
+ hostname: row["hostname"],
393
+ pid: row["pid"].to_i,
394
+ metadata: row["metadata"].is_a?(String) ? JSON.parse(row["metadata"]) : row["metadata"],
395
+ last_heartbeat_at: heartbeat_time,
396
+ healthy: !stale,
397
+ created_at: row["created_at"]
398
+ }
399
+ end
400
+
401
+ def sanitize_name(name)
402
+ name.gsub(/[^a-zA-Z0-9_]/, "")
403
+ end
404
+ end
405
+ end
406
+ end
data/lib/pgbus.rb ADDED
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "zeitwerk"
4
+
5
+ module Pgbus
6
+ class Error < StandardError; end
7
+ class ConfigurationError < Error; end
8
+ class SerializationError < Error; end
9
+ class QueueNotFoundError < Error; end
10
+ class DeadLetterError < Error; end
11
+ class ConcurrencyLimitExceeded < Error; end
12
+
13
+ class << self
14
+ def loader
15
+ @loader ||= begin
16
+ loader = Zeitwerk::Loader.for_gem
17
+ loader.inflector.inflect("pgbus" => "Pgbus", "cli" => "CLI", "dsl" => "DSL")
18
+ loader.ignore("#{__dir__}/generators")
19
+ loader
20
+ end
21
+ end
22
+
23
+ def configuration
24
+ @configuration ||= Configuration.new
25
+ end
26
+
27
+ def configure
28
+ yield configuration
29
+ end
30
+
31
+ def client
32
+ @client ||= Client.new(configuration)
33
+ end
34
+
35
+ def reset!
36
+ @client&.close
37
+ @client = nil
38
+ @configuration = nil
39
+ end
40
+
41
+ def logger
42
+ configuration.logger
43
+ end
44
+ end
45
+
46
+ loader.setup
47
+ end
48
+
49
+ require "pgbus/engine" if defined?(Rails::Engine)
data/package.json ADDED
@@ -0,0 +1,9 @@
1
+ {
2
+ "private": true,
3
+ "engines": {
4
+ "bun": ">=1.1.0"
5
+ },
6
+ "devDependencies": {
7
+ "playwright": "^1.50.0"
8
+ }
9
+ }
data/sig/pgbus.rbs ADDED
@@ -0,0 +1,4 @@
1
+ module Pgbus
2
+ VERSION: String
3
+ # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+ end