claude-agent-framework 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +128 -0
- package/bin/claude-framework +3 -0
- package/framework/agents/design-lead.md +240 -0
- package/framework/agents/product-owner.md +179 -0
- package/framework/agents/tech-lead.md +226 -0
- package/framework/commands/ayuda.md +127 -0
- package/framework/commands/a/303/261adir.md +98 -0
- package/framework/commands/backup.md +397 -0
- package/framework/commands/cambiar.md +110 -0
- package/framework/commands/cloud.md +457 -0
- package/framework/commands/code.md +142 -0
- package/framework/commands/debug.md +334 -0
- package/framework/commands/deploy.md +383 -0
- package/framework/commands/deshacer.md +120 -0
- package/framework/commands/estado.md +218 -0
- package/framework/commands/explica.md +227 -0
- package/framework/commands/feature.md +120 -0
- package/framework/commands/git.md +427 -0
- package/framework/commands/historial.md +202 -0
- package/framework/commands/learn.md +408 -0
- package/framework/commands/movil.md +245 -0
- package/framework/commands/nuevo.md +118 -0
- package/framework/commands/plan.md +134 -0
- package/framework/commands/prd.md +113 -0
- package/framework/commands/probar.md +148 -0
- package/framework/commands/revisar.md +208 -0
- package/framework/commands/seeds.md +230 -0
- package/framework/commands/seguridad.md +226 -0
- package/framework/commands/tasks.md +157 -0
- package/framework/skills/architecture/algorithms.md +970 -0
- package/framework/skills/architecture/clean-code.md +1080 -0
- package/framework/skills/architecture/design-patterns.md +1984 -0
- package/framework/skills/architecture/functional-programming.md +972 -0
- package/framework/skills/architecture/solid.md +991 -0
- package/framework/skills/cloud/cloud-aws.md +848 -0
- package/framework/skills/cloud/cloud-azure.md +931 -0
- package/framework/skills/cloud/cloud-gcp.md +848 -0
- package/framework/skills/cloud/message-queues.md +1229 -0
- package/framework/skills/core/accessibility.md +401 -0
- package/framework/skills/core/api.md +474 -0
- package/framework/skills/core/authentication.md +306 -0
- package/framework/skills/core/authorization.md +388 -0
- package/framework/skills/core/background-jobs.md +341 -0
- package/framework/skills/core/caching.md +473 -0
- package/framework/skills/core/code-review.md +341 -0
- package/framework/skills/core/controllers.md +290 -0
- package/framework/skills/core/cua.md +285 -0
- package/framework/skills/core/documentation.md +472 -0
- package/framework/skills/core/file-uploads.md +351 -0
- package/framework/skills/core/hotwire-native.md +296 -0
- package/framework/skills/core/hotwire.md +278 -0
- package/framework/skills/core/i18n.md +334 -0
- package/framework/skills/core/imports-exports.md +750 -0
- package/framework/skills/core/infrastructure.md +337 -0
- package/framework/skills/core/models.md +228 -0
- package/framework/skills/core/notifications.md +672 -0
- package/framework/skills/core/payments.md +581 -0
- package/framework/skills/core/performance.md +361 -0
- package/framework/skills/core/rails-scaffold.md +131 -0
- package/framework/skills/core/search.md +518 -0
- package/framework/skills/core/security.md +565 -0
- package/framework/skills/core/seeds.md +307 -0
- package/framework/skills/core/seo.md +542 -0
- package/framework/skills/core/testing.md +393 -0
- package/framework/skills/core/views.md +260 -0
- package/framework/skills/core/websockets.md +564 -0
- package/framework/skills/data/advanced-sql.md +1204 -0
- package/framework/skills/data/nosql.md +1141 -0
- package/framework/skills/devops/containers-advanced.md +1237 -0
- package/framework/skills/devops/debugging.md +834 -0
- package/framework/skills/devops/git-workflow.md +752 -0
- package/framework/skills/devops/networking.md +932 -0
- package/framework/skills/devops/shell-scripting.md +1132 -0
- package/framework/sub-agents/architecture-patterns-agent.md +1450 -0
- package/framework/sub-agents/cloud-agent.md +677 -0
- package/framework/sub-agents/data.md +504 -0
- package/framework/sub-agents/debugging-agent.md +554 -0
- package/framework/sub-agents/devops.md +483 -0
- package/framework/sub-agents/docs.md +176 -0
- package/framework/sub-agents/frontend-dev.md +349 -0
- package/framework/sub-agents/git-workflow-agent.md +697 -0
- package/framework/sub-agents/integrations.md +630 -0
- package/framework/sub-agents/native-dev.md +434 -0
- package/framework/sub-agents/qa.md +138 -0
- package/framework/sub-agents/rails-dev.md +375 -0
- package/framework/sub-agents/security.md +526 -0
- package/framework/sub-agents/ui.md +437 -0
- package/framework/sub-agents/ux.md +284 -0
- package/framework/templates/api-spec.md +500 -0
- package/framework/templates/component-spec.md +248 -0
- package/framework/templates/feature.json +13 -0
- package/framework/templates/model-spec.md +318 -0
- package/framework/templates/prd-template.md +80 -0
- package/framework/templates/task-plan.md +122 -0
- package/framework/templates/task-user-story.md +52 -0
- package/framework/templates/technical-spec.md +260 -0
- package/framework/templates/user-story.md +95 -0
- package/package.json +42 -0
- package/project-templates/CLAUDE.md +42 -0
- package/project-templates/contexts/architecture.md +25 -0
- package/project-templates/contexts/conventions.md +46 -0
- package/project-templates/contexts/design-system.md +47 -0
- package/project-templates/contexts/requirements.md +38 -0
- package/project-templates/contexts/stack.md +30 -0
- package/project-templates/history/active/models.md +11 -0
- package/project-templates/history/changelog.md +15 -0
- package/project-templates/workspace/.gitkeep +0 -0
- package/src/cli.js +52 -0
- package/src/init.js +104 -0
- package/src/status.js +75 -0
- package/src/update.js +88 -0
|
@@ -0,0 +1,1229 @@
|
|
|
1
|
+
# Skill: Message Queues y Sistemas de Mensajeria
|
|
2
|
+
|
|
3
|
+
## Purpose
|
|
4
|
+
|
|
5
|
+
Implementar sistemas de colas y mensajeria para procesamiento asincrono, desacoplamiento de servicios y arquitecturas event-driven en aplicaciones Rails.
|
|
6
|
+
|
|
7
|
+
## Conceptos Fundamentales
|
|
8
|
+
|
|
9
|
+
### Terminologia
|
|
10
|
+
|
|
11
|
+
```markdown
|
|
12
|
+
## Componentes principales
|
|
13
|
+
|
|
14
|
+
| Concepto | Descripcion |
|
|
15
|
+
|----------|-------------|
|
|
16
|
+
| Producer | Servicio que envia mensajes a la cola |
|
|
17
|
+
| Consumer | Servicio que procesa mensajes de la cola |
|
|
18
|
+
| Queue | Cola FIFO donde se almacenan mensajes |
|
|
19
|
+
| Topic | Canal para distribuir mensajes a multiples suscriptores |
|
|
20
|
+
| Exchange | Router que dirige mensajes a las colas (RabbitMQ) |
|
|
21
|
+
| Partition | Division de un topic para paralelismo (Kafka) |
|
|
22
|
+
| Offset | Posicion del consumidor en una particion |
|
|
23
|
+
| Acknowledgment | Confirmacion de que un mensaje fue procesado |
|
|
24
|
+
| Dead Letter Queue | Cola para mensajes que fallaron |
|
|
25
|
+
|
|
26
|
+
## Patrones de mensajeria
|
|
27
|
+
|
|
28
|
+
| Patron | Descripcion |
|
|
29
|
+
|--------|-------------|
|
|
30
|
+
| Point-to-Point | Un mensaje es procesado por un solo consumidor |
|
|
31
|
+
| Pub/Sub | Un mensaje es distribuido a multiples suscriptores |
|
|
32
|
+
| Request-Reply | Comunicacion sincrona sobre mensajeria |
|
|
33
|
+
| Fan-out | Un mensaje genera multiples mensajes derivados |
|
|
34
|
+
| Saga | Transacciones distribuidas con compensacion |
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Solid Queue (Rails 8)
|
|
38
|
+
|
|
39
|
+
### Configuracion basica
|
|
40
|
+
|
|
41
|
+
```ruby
|
|
42
|
+
# Gemfile (incluido en Rails 8)
|
|
43
|
+
gem "solid_queue"
|
|
44
|
+
|
|
45
|
+
# Instalar
|
|
46
|
+
bin/rails solid_queue:install
|
|
47
|
+
bin/rails db:migrate
|
|
48
|
+
|
|
49
|
+
# config/application.rb
|
|
50
|
+
config.active_job.queue_adapter = :solid_queue
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Configuracion de workers
|
|
54
|
+
|
|
55
|
+
```yaml
|
|
56
|
+
# config/queue.yml
|
|
57
|
+
default: &default
|
|
58
|
+
dispatchers:
|
|
59
|
+
- polling_interval: 1
|
|
60
|
+
batch_size: 500
|
|
61
|
+
workers:
|
|
62
|
+
- queues: "*"
|
|
63
|
+
threads: 3
|
|
64
|
+
processes: 1
|
|
65
|
+
polling_interval: 0.1
|
|
66
|
+
|
|
67
|
+
development:
|
|
68
|
+
<<: *default
|
|
69
|
+
|
|
70
|
+
production:
|
|
71
|
+
<<: *default
|
|
72
|
+
dispatchers:
|
|
73
|
+
- polling_interval: 0.5
|
|
74
|
+
batch_size: 1000
|
|
75
|
+
workers:
|
|
76
|
+
# Worker para tareas criticas
|
|
77
|
+
- queues: [critical]
|
|
78
|
+
threads: 5
|
|
79
|
+
processes: 2
|
|
80
|
+
polling_interval: 0.1
|
|
81
|
+
|
|
82
|
+
# Worker para tareas normales
|
|
83
|
+
- queues: [default, mailers]
|
|
84
|
+
threads: 5
|
|
85
|
+
processes: 2
|
|
86
|
+
polling_interval: 0.5
|
|
87
|
+
|
|
88
|
+
# Worker para tareas de baja prioridad
|
|
89
|
+
- queues: [low, reports]
|
|
90
|
+
threads: 3
|
|
91
|
+
processes: 1
|
|
92
|
+
polling_interval: 2
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
### Jobs con prioridad
|
|
96
|
+
|
|
97
|
+
```ruby
|
|
98
|
+
# app/jobs/critical_payment_job.rb
|
|
99
|
+
class CriticalPaymentJob < ApplicationJob
|
|
100
|
+
queue_as :critical
|
|
101
|
+
|
|
102
|
+
retry_on PaymentGatewayError, wait: 5.seconds, attempts: 5
|
|
103
|
+
discard_on PaymentDeclinedError
|
|
104
|
+
|
|
105
|
+
def perform(payment_id)
|
|
106
|
+
payment = Payment.find(payment_id)
|
|
107
|
+
PaymentProcessor.new(payment).process!
|
|
108
|
+
end
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# app/jobs/send_report_job.rb
|
|
112
|
+
class SendReportJob < ApplicationJob
|
|
113
|
+
queue_as :low
|
|
114
|
+
|
|
115
|
+
def perform(user_id, report_type)
|
|
116
|
+
user = User.find(user_id)
|
|
117
|
+
report = ReportGenerator.new(user, report_type).generate
|
|
118
|
+
ReportMailer.send_report(user, report).deliver_now
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
### Recurring jobs
|
|
124
|
+
|
|
125
|
+
```yaml
|
|
126
|
+
# config/recurring.yml
|
|
127
|
+
production:
|
|
128
|
+
cleanup_sessions:
|
|
129
|
+
class: CleanupSessionsJob
|
|
130
|
+
schedule: every day at 3am
|
|
131
|
+
|
|
132
|
+
daily_digest:
|
|
133
|
+
class: DailyDigestJob
|
|
134
|
+
schedule: every day at 8am
|
|
135
|
+
|
|
136
|
+
sync_inventory:
|
|
137
|
+
class: SyncInventoryJob
|
|
138
|
+
schedule: every 15 minutes
|
|
139
|
+
|
|
140
|
+
generate_reports:
|
|
141
|
+
class: GenerateReportsJob
|
|
142
|
+
schedule: every monday at 6am
|
|
143
|
+
args: [weekly]
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
## Sidekiq
|
|
147
|
+
|
|
148
|
+
### Configuracion
|
|
149
|
+
|
|
150
|
+
```ruby
|
|
151
|
+
# Gemfile
|
|
152
|
+
gem "sidekiq"
|
|
153
|
+
gem "sidekiq-scheduler" # Para jobs recurrentes
|
|
154
|
+
|
|
155
|
+
# config/application.rb
|
|
156
|
+
config.active_job.queue_adapter = :sidekiq
|
|
157
|
+
|
|
158
|
+
# config/initializers/sidekiq.rb
|
|
159
|
+
Sidekiq.configure_server do |config|
|
|
160
|
+
config.redis = {
|
|
161
|
+
url: ENV.fetch("REDIS_URL") { "redis://localhost:6379/0" },
|
|
162
|
+
pool_timeout: 5
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
# Cargar scheduler
|
|
166
|
+
config.on(:startup) do
|
|
167
|
+
Sidekiq.schedule = YAML.load_file(Rails.root.join("config/sidekiq_schedule.yml"))
|
|
168
|
+
Sidekiq::Scheduler.reload_schedule!
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
Sidekiq.configure_client do |config|
|
|
173
|
+
config.redis = {
|
|
174
|
+
url: ENV.fetch("REDIS_URL") { "redis://localhost:6379/0" },
|
|
175
|
+
pool_timeout: 5
|
|
176
|
+
}
|
|
177
|
+
end
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
```yaml
|
|
181
|
+
# config/sidekiq.yml
|
|
182
|
+
:concurrency: 10
|
|
183
|
+
:queues:
|
|
184
|
+
- [critical, 3]
|
|
185
|
+
- [default, 2]
|
|
186
|
+
- [low, 1]
|
|
187
|
+
|
|
188
|
+
production:
|
|
189
|
+
:concurrency: 25
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### Workers nativos de Sidekiq
|
|
193
|
+
|
|
194
|
+
```ruby
|
|
195
|
+
# app/workers/heavy_processing_worker.rb
|
|
196
|
+
class HeavyProcessingWorker
|
|
197
|
+
include Sidekiq::Worker
|
|
198
|
+
|
|
199
|
+
sidekiq_options queue: :low,
|
|
200
|
+
retry: 5,
|
|
201
|
+
backtrace: true,
|
|
202
|
+
dead: true
|
|
203
|
+
|
|
204
|
+
sidekiq_retry_in do |count, exception|
|
|
205
|
+
case exception
|
|
206
|
+
when RateLimitError
|
|
207
|
+
60 * (count + 1) # 1min, 2min, 3min...
|
|
208
|
+
else
|
|
209
|
+
(count ** 4) + 15 # Exponential backoff
|
|
210
|
+
end
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def perform(record_id, options = {})
|
|
214
|
+
record = Record.find(record_id)
|
|
215
|
+
# Procesamiento pesado...
|
|
216
|
+
end
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
# Encolar
|
|
220
|
+
HeavyProcessingWorker.perform_async(record.id)
|
|
221
|
+
HeavyProcessingWorker.perform_in(1.hour, record.id)
|
|
222
|
+
HeavyProcessingWorker.perform_at(Date.tomorrow.noon, record.id)
|
|
223
|
+
```
|
|
224
|
+
|
|
225
|
+
### Batches (Sidekiq Pro)
|
|
226
|
+
|
|
227
|
+
```ruby
|
|
228
|
+
# Procesar multiples items como batch
|
|
229
|
+
batch = Sidekiq::Batch.new
|
|
230
|
+
batch.description = "Import users from CSV"
|
|
231
|
+
batch.on(:complete, ImportCallbacks, file_id: file.id)
|
|
232
|
+
batch.on(:success, ImportCallbacks)
|
|
233
|
+
|
|
234
|
+
batch.jobs do
|
|
235
|
+
users_data.each do |user_data|
|
|
236
|
+
ImportUserWorker.perform_async(user_data)
|
|
237
|
+
end
|
|
238
|
+
end
|
|
239
|
+
|
|
240
|
+
# Callbacks
|
|
241
|
+
class ImportCallbacks
|
|
242
|
+
def on_complete(status, options)
|
|
243
|
+
file = ImportFile.find(options["file_id"])
|
|
244
|
+
file.update!(status: "completed", processed: status.total)
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def on_success(status, options)
|
|
248
|
+
AdminNotifier.import_success(options["file_id"]).deliver_later
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
## RabbitMQ
|
|
254
|
+
|
|
255
|
+
### Configuracion con Bunny
|
|
256
|
+
|
|
257
|
+
```ruby
|
|
258
|
+
# Gemfile
|
|
259
|
+
gem "bunny"
|
|
260
|
+
gem "sneakers" # Para workers
|
|
261
|
+
|
|
262
|
+
# config/initializers/rabbitmq.rb
|
|
263
|
+
require "bunny"
|
|
264
|
+
|
|
265
|
+
RABBITMQ = Bunny.new(
|
|
266
|
+
host: ENV.fetch("RABBITMQ_HOST") { "localhost" },
|
|
267
|
+
port: ENV.fetch("RABBITMQ_PORT") { 5672 },
|
|
268
|
+
user: ENV.fetch("RABBITMQ_USER") { "guest" },
|
|
269
|
+
password: ENV.fetch("RABBITMQ_PASSWORD") { "guest" },
|
|
270
|
+
vhost: ENV.fetch("RABBITMQ_VHOST") { "/" },
|
|
271
|
+
automatically_recover: true,
|
|
272
|
+
recovery_attempts: 10
|
|
273
|
+
)
|
|
274
|
+
RABBITMQ.start
|
|
275
|
+
|
|
276
|
+
# Crear canal para publicar
|
|
277
|
+
RABBITMQ_CHANNEL = RABBITMQ.create_channel
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
### Exchanges y Queues
|
|
281
|
+
|
|
282
|
+
```ruby
|
|
283
|
+
# app/services/rabbitmq_publisher.rb
|
|
284
|
+
class RabbitmqPublisher
|
|
285
|
+
def initialize
|
|
286
|
+
@channel = RABBITMQ.create_channel
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
# Direct exchange - mensajes a una cola especifica
|
|
290
|
+
def publish_direct(queue_name, message, options = {})
|
|
291
|
+
queue = @channel.queue(queue_name, durable: true)
|
|
292
|
+
@channel.default_exchange.publish(
|
|
293
|
+
message.to_json,
|
|
294
|
+
routing_key: queue_name,
|
|
295
|
+
persistent: true,
|
|
296
|
+
content_type: "application/json",
|
|
297
|
+
**options
|
|
298
|
+
)
|
|
299
|
+
end
|
|
300
|
+
|
|
301
|
+
# Fanout exchange - mensajes a todas las colas conectadas
|
|
302
|
+
def publish_fanout(exchange_name, message)
|
|
303
|
+
exchange = @channel.fanout(exchange_name, durable: true)
|
|
304
|
+
exchange.publish(
|
|
305
|
+
message.to_json,
|
|
306
|
+
persistent: true,
|
|
307
|
+
content_type: "application/json"
|
|
308
|
+
)
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
# Topic exchange - mensajes basados en routing key pattern
|
|
312
|
+
def publish_topic(exchange_name, routing_key, message)
|
|
313
|
+
exchange = @channel.topic(exchange_name, durable: true)
|
|
314
|
+
exchange.publish(
|
|
315
|
+
message.to_json,
|
|
316
|
+
routing_key: routing_key,
|
|
317
|
+
persistent: true,
|
|
318
|
+
content_type: "application/json"
|
|
319
|
+
)
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
# Headers exchange - mensajes basados en headers
|
|
323
|
+
def publish_headers(exchange_name, headers, message)
|
|
324
|
+
exchange = @channel.headers(exchange_name, durable: true)
|
|
325
|
+
exchange.publish(
|
|
326
|
+
message.to_json,
|
|
327
|
+
headers: headers,
|
|
328
|
+
persistent: true,
|
|
329
|
+
content_type: "application/json"
|
|
330
|
+
)
|
|
331
|
+
end
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
# Uso
|
|
335
|
+
publisher = RabbitmqPublisher.new
|
|
336
|
+
publisher.publish_topic("orders", "order.created.premium", { order_id: 123 })
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
### Workers con Sneakers
|
|
340
|
+
|
|
341
|
+
```ruby
|
|
342
|
+
# config/initializers/sneakers.rb
|
|
343
|
+
Sneakers.configure(
|
|
344
|
+
connection: RABBITMQ,
|
|
345
|
+
workers: 4,
|
|
346
|
+
threads: 10,
|
|
347
|
+
prefetch: 10,
|
|
348
|
+
timeout_job_after: 60,
|
|
349
|
+
ack: true,
|
|
350
|
+
heartbeat: 30,
|
|
351
|
+
hooks: {
|
|
352
|
+
before_fork: -> { ActiveRecord::Base.connection_pool.disconnect! },
|
|
353
|
+
after_fork: -> { ActiveRecord::Base.establish_connection }
|
|
354
|
+
}
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
# app/workers/order_worker.rb
|
|
358
|
+
class OrderWorker
|
|
359
|
+
include Sneakers::Worker
|
|
360
|
+
|
|
361
|
+
from_queue "orders",
|
|
362
|
+
exchange: "orders_exchange",
|
|
363
|
+
exchange_type: :topic,
|
|
364
|
+
routing_key: "order.#",
|
|
365
|
+
durable: true
|
|
366
|
+
|
|
367
|
+
def work(raw_message)
|
|
368
|
+
message = JSON.parse(raw_message)
|
|
369
|
+
|
|
370
|
+
case message["event"]
|
|
371
|
+
when "order.created"
|
|
372
|
+
handle_order_created(message["data"])
|
|
373
|
+
when "order.paid"
|
|
374
|
+
handle_order_paid(message["data"])
|
|
375
|
+
end
|
|
376
|
+
|
|
377
|
+
ack!
|
|
378
|
+
rescue StandardError => e
|
|
379
|
+
Rails.logger.error "OrderWorker error: #{e.message}"
|
|
380
|
+
reject! # Requeue message
|
|
381
|
+
end
|
|
382
|
+
|
|
383
|
+
private
|
|
384
|
+
|
|
385
|
+
def handle_order_created(data)
|
|
386
|
+
order = Order.find(data["order_id"])
|
|
387
|
+
OrderConfirmationMailer.notify(order).deliver_now
|
|
388
|
+
end
|
|
389
|
+
|
|
390
|
+
def handle_order_paid(data)
|
|
391
|
+
order = Order.find(data["order_id"])
|
|
392
|
+
FulfillmentService.new(order).process
|
|
393
|
+
end
|
|
394
|
+
end
|
|
395
|
+
```
|
|
396
|
+
|
|
397
|
+
## Apache Kafka
|
|
398
|
+
|
|
399
|
+
### Configuracion
|
|
400
|
+
|
|
401
|
+
```ruby
|
|
402
|
+
# Gemfile
|
|
403
|
+
gem "ruby-kafka"
|
|
404
|
+
# O para Rails
|
|
405
|
+
gem "karafka"
|
|
406
|
+
|
|
407
|
+
# config/initializers/kafka.rb
|
|
408
|
+
require "kafka"
|
|
409
|
+
|
|
410
|
+
KAFKA = Kafka.new(
|
|
411
|
+
seed_brokers: ENV.fetch("KAFKA_BROKERS") { "localhost:9092" }.split(","),
|
|
412
|
+
client_id: "rails-app",
|
|
413
|
+
logger: Rails.logger,
|
|
414
|
+
ssl_ca_cert: ENV["KAFKA_SSL_CA_CERT"],
|
|
415
|
+
ssl_client_cert: ENV["KAFKA_SSL_CLIENT_CERT"],
|
|
416
|
+
ssl_client_cert_key: ENV["KAFKA_SSL_CLIENT_KEY"]
|
|
417
|
+
)
|
|
418
|
+
```
|
|
419
|
+
|
|
420
|
+
### Producer
|
|
421
|
+
|
|
422
|
+
```ruby
|
|
423
|
+
# app/services/kafka_producer.rb
|
|
424
|
+
class KafkaProducer
|
|
425
|
+
def initialize
|
|
426
|
+
@producer = KAFKA.producer(
|
|
427
|
+
ack_timeout: 5,
|
|
428
|
+
required_acks: :all,
|
|
429
|
+
max_retries: 3,
|
|
430
|
+
retry_backoff: 1
|
|
431
|
+
)
|
|
432
|
+
end
|
|
433
|
+
|
|
434
|
+
def publish(topic, message, key: nil, partition_key: nil, headers: {})
|
|
435
|
+
@producer.produce(
|
|
436
|
+
message.to_json,
|
|
437
|
+
topic: topic,
|
|
438
|
+
key: key,
|
|
439
|
+
partition_key: partition_key,
|
|
440
|
+
headers: headers.merge(
|
|
441
|
+
"produced_at" => Time.current.iso8601,
|
|
442
|
+
"producer" => "rails-app"
|
|
443
|
+
)
|
|
444
|
+
)
|
|
445
|
+
end
|
|
446
|
+
|
|
447
|
+
def deliver
|
|
448
|
+
@producer.deliver_messages
|
|
449
|
+
rescue Kafka::DeliveryFailed => e
|
|
450
|
+
Rails.logger.error "Kafka delivery failed: #{e.message}"
|
|
451
|
+
raise
|
|
452
|
+
end
|
|
453
|
+
|
|
454
|
+
def publish_sync(topic, message, **options)
|
|
455
|
+
publish(topic, message, **options)
|
|
456
|
+
deliver
|
|
457
|
+
end
|
|
458
|
+
|
|
459
|
+
def shutdown
|
|
460
|
+
@producer.shutdown
|
|
461
|
+
end
|
|
462
|
+
end
|
|
463
|
+
|
|
464
|
+
# Uso
|
|
465
|
+
producer = KafkaProducer.new
|
|
466
|
+
producer.publish("user-events", { event: "user.created", user_id: user.id })
|
|
467
|
+
producer.publish("user-events", { event: "user.updated", user_id: user.id })
|
|
468
|
+
producer.deliver # Enviar batch
|
|
469
|
+
```
|
|
470
|
+
|
|
471
|
+
### Consumer
|
|
472
|
+
|
|
473
|
+
```ruby
|
|
474
|
+
# app/consumers/kafka_consumer.rb
|
|
475
|
+
class KafkaConsumer
|
|
476
|
+
def initialize(topics, group_id: "rails-app-consumer")
|
|
477
|
+
@consumer = KAFKA.consumer(
|
|
478
|
+
group_id: group_id,
|
|
479
|
+
offset_commit_interval: 10,
|
|
480
|
+
offset_commit_threshold: 100,
|
|
481
|
+
heartbeat_interval: 10
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
topics.each do |topic|
|
|
485
|
+
@consumer.subscribe(topic, start_from_beginning: false)
|
|
486
|
+
end
|
|
487
|
+
end
|
|
488
|
+
|
|
489
|
+
def consume
|
|
490
|
+
@consumer.each_message do |message|
|
|
491
|
+
process_message(message)
|
|
492
|
+
rescue StandardError => e
|
|
493
|
+
handle_error(e, message)
|
|
494
|
+
end
|
|
495
|
+
end
|
|
496
|
+
|
|
497
|
+
def consume_batch(batch_size: 100)
|
|
498
|
+
@consumer.each_batch(max_wait_time: 5) do |batch|
|
|
499
|
+
batch.messages.each do |message|
|
|
500
|
+
process_message(message)
|
|
501
|
+
end
|
|
502
|
+
end
|
|
503
|
+
end
|
|
504
|
+
|
|
505
|
+
private
|
|
506
|
+
|
|
507
|
+
def process_message(message)
|
|
508
|
+
payload = JSON.parse(message.value)
|
|
509
|
+
|
|
510
|
+
Rails.logger.info "Processing: topic=#{message.topic} " \
|
|
511
|
+
"partition=#{message.partition} " \
|
|
512
|
+
"offset=#{message.offset}"
|
|
513
|
+
|
|
514
|
+
handler = find_handler(message.topic, payload["event"])
|
|
515
|
+
handler.call(payload)
|
|
516
|
+
end
|
|
517
|
+
|
|
518
|
+
def find_handler(topic, event)
|
|
519
|
+
handlers = {
|
|
520
|
+
"user-events" => {
|
|
521
|
+
"user.created" => ->(data) { UserCreatedHandler.new(data).call },
|
|
522
|
+
"user.updated" => ->(data) { UserUpdatedHandler.new(data).call }
|
|
523
|
+
},
|
|
524
|
+
"order-events" => {
|
|
525
|
+
"order.placed" => ->(data) { OrderPlacedHandler.new(data).call }
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
handlers.dig(topic, event) || ->(data) { Rails.logger.warn "Unknown event: #{event}" }
|
|
530
|
+
end
|
|
531
|
+
|
|
532
|
+
def handle_error(error, message)
|
|
533
|
+
Rails.logger.error "Consumer error: #{error.message}"
|
|
534
|
+
Rails.logger.error "Failed message: #{message.value}"
|
|
535
|
+
|
|
536
|
+
# Enviar a dead letter topic
|
|
537
|
+
dead_letter_producer = KafkaProducer.new
|
|
538
|
+
dead_letter_producer.publish_sync(
|
|
539
|
+
"#{message.topic}-dlq",
|
|
540
|
+
{
|
|
541
|
+
original_message: message.value,
|
|
542
|
+
error: error.message,
|
|
543
|
+
failed_at: Time.current.iso8601
|
|
544
|
+
}
|
|
545
|
+
)
|
|
546
|
+
end
|
|
547
|
+
end
|
|
548
|
+
|
|
549
|
+
# Iniciar consumer (en proceso separado)
|
|
550
|
+
# bin/kafka_consumer
|
|
551
|
+
consumer = KafkaConsumer.new(["user-events", "order-events"])
|
|
552
|
+
consumer.consume
|
|
553
|
+
```
|
|
554
|
+
|
|
555
|
+
### Karafka (Framework para Kafka)
|
|
556
|
+
|
|
557
|
+
```ruby
|
|
558
|
+
# Gemfile
|
|
559
|
+
gem "karafka"
|
|
560
|
+
|
|
561
|
+
# karafka.rb
|
|
562
|
+
class KarafkaApp < Karafka::App
|
|
563
|
+
setup do |config|
|
|
564
|
+
config.kafka = {
|
|
565
|
+
"bootstrap.servers": ENV.fetch("KAFKA_BROKERS") { "localhost:9092" }
|
|
566
|
+
}
|
|
567
|
+
config.client_id = "rails-app"
|
|
568
|
+
config.consumer_persistence = true
|
|
569
|
+
end
|
|
570
|
+
|
|
571
|
+
routes.draw do
|
|
572
|
+
topic :user_events do
|
|
573
|
+
consumer UserEventsConsumer
|
|
574
|
+
end
|
|
575
|
+
|
|
576
|
+
topic :order_events do
|
|
577
|
+
consumer OrderEventsConsumer
|
|
578
|
+
dead_letter_queue(topic: :order_events_dlq, max_retries: 3)
|
|
579
|
+
end
|
|
580
|
+
|
|
581
|
+
topic :notifications do
|
|
582
|
+
consumer NotificationsConsumer
|
|
583
|
+
batch true
|
|
584
|
+
max_messages 100
|
|
585
|
+
end
|
|
586
|
+
end
|
|
587
|
+
end
|
|
588
|
+
|
|
589
|
+
# app/consumers/user_events_consumer.rb
|
|
590
|
+
class UserEventsConsumer < Karafka::BaseConsumer
|
|
591
|
+
def consume
|
|
592
|
+
messages.each do |message|
|
|
593
|
+
payload = message.payload
|
|
594
|
+
|
|
595
|
+
case payload["event"]
|
|
596
|
+
when "user.created"
|
|
597
|
+
handle_user_created(payload)
|
|
598
|
+
when "user.deleted"
|
|
599
|
+
handle_user_deleted(payload)
|
|
600
|
+
end
|
|
601
|
+
end
|
|
602
|
+
end
|
|
603
|
+
|
|
604
|
+
private
|
|
605
|
+
|
|
606
|
+
def handle_user_created(payload)
|
|
607
|
+
WelcomeEmailJob.perform_later(payload["user_id"])
|
|
608
|
+
end
|
|
609
|
+
|
|
610
|
+
def handle_user_deleted(payload)
|
|
611
|
+
DataCleanupJob.perform_later(payload["user_id"])
|
|
612
|
+
end
|
|
613
|
+
end
|
|
614
|
+
```
|
|
615
|
+
|
|
616
|
+
## AWS SQS/SNS
|
|
617
|
+
|
|
618
|
+
### SQS (Simple Queue Service)
|
|
619
|
+
|
|
620
|
+
```ruby
|
|
621
|
+
# Gemfile
|
|
622
|
+
gem "aws-sdk-sqs"
|
|
623
|
+
|
|
624
|
+
# config/initializers/sqs.rb
|
|
625
|
+
require "aws-sdk-sqs"
|
|
626
|
+
|
|
627
|
+
SQS = Aws::SQS::Client.new(
|
|
628
|
+
region: ENV.fetch("AWS_REGION") { "us-east-1" },
|
|
629
|
+
credentials: Aws::Credentials.new(
|
|
630
|
+
ENV["AWS_ACCESS_KEY_ID"],
|
|
631
|
+
ENV["AWS_SECRET_ACCESS_KEY"]
|
|
632
|
+
)
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
# app/services/sqs_publisher.rb
|
|
636
|
+
class SqsPublisher
|
|
637
|
+
def initialize(queue_url)
|
|
638
|
+
@queue_url = queue_url
|
|
639
|
+
@client = SQS
|
|
640
|
+
end
|
|
641
|
+
|
|
642
|
+
def publish(message, delay_seconds: 0, message_attributes: {})
|
|
643
|
+
@client.send_message(
|
|
644
|
+
queue_url: @queue_url,
|
|
645
|
+
message_body: message.to_json,
|
|
646
|
+
delay_seconds: delay_seconds,
|
|
647
|
+
message_attributes: build_attributes(message_attributes)
|
|
648
|
+
)
|
|
649
|
+
end
|
|
650
|
+
|
|
651
|
+
def publish_batch(messages)
|
|
652
|
+
entries = messages.map.with_index do |msg, idx|
|
|
653
|
+
{
|
|
654
|
+
id: idx.to_s,
|
|
655
|
+
message_body: msg.to_json
|
|
656
|
+
}
|
|
657
|
+
end
|
|
658
|
+
|
|
659
|
+
@client.send_message_batch(
|
|
660
|
+
queue_url: @queue_url,
|
|
661
|
+
entries: entries
|
|
662
|
+
)
|
|
663
|
+
end
|
|
664
|
+
|
|
665
|
+
private
|
|
666
|
+
|
|
667
|
+
def build_attributes(attrs)
|
|
668
|
+
attrs.transform_values do |value|
|
|
669
|
+
{ string_value: value.to_s, data_type: "String" }
|
|
670
|
+
end
|
|
671
|
+
end
|
|
672
|
+
end
|
|
673
|
+
|
|
674
|
+
# app/services/sqs_consumer.rb
|
|
675
|
+
class SqsConsumer
|
|
676
|
+
def initialize(queue_url, visibility_timeout: 30)
|
|
677
|
+
@queue_url = queue_url
|
|
678
|
+
@visibility_timeout = visibility_timeout
|
|
679
|
+
@client = SQS
|
|
680
|
+
end
|
|
681
|
+
|
|
682
|
+
def poll(max_messages: 10, wait_time: 20)
|
|
683
|
+
loop do
|
|
684
|
+
response = @client.receive_message(
|
|
685
|
+
queue_url: @queue_url,
|
|
686
|
+
max_number_of_messages: max_messages,
|
|
687
|
+
wait_time_seconds: wait_time,
|
|
688
|
+
visibility_timeout: @visibility_timeout
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
response.messages.each do |message|
|
|
692
|
+
process_message(message)
|
|
693
|
+
end
|
|
694
|
+
end
|
|
695
|
+
end
|
|
696
|
+
|
|
697
|
+
private
|
|
698
|
+
|
|
699
|
+
def process_message(message)
|
|
700
|
+
payload = JSON.parse(message.body)
|
|
701
|
+
yield payload if block_given?
|
|
702
|
+
|
|
703
|
+
@client.delete_message(
|
|
704
|
+
queue_url: @queue_url,
|
|
705
|
+
receipt_handle: message.receipt_handle
|
|
706
|
+
)
|
|
707
|
+
rescue StandardError => e
|
|
708
|
+
Rails.logger.error "SQS processing error: #{e.message}"
|
|
709
|
+
# El mensaje volvera a estar visible despues del timeout
|
|
710
|
+
end
|
|
711
|
+
end
|
|
712
|
+
```
|
|
713
|
+
|
|
714
|
+
### SNS (Simple Notification Service)
|
|
715
|
+
|
|
716
|
+
```ruby
|
|
717
|
+
# Gemfile
|
|
718
|
+
gem "aws-sdk-sns"
|
|
719
|
+
|
|
720
|
+
# app/services/sns_publisher.rb
|
|
721
|
+
class SnsPublisher
|
|
722
|
+
def initialize
|
|
723
|
+
@client = Aws::SNS::Client.new(
|
|
724
|
+
region: ENV.fetch("AWS_REGION") { "us-east-1" }
|
|
725
|
+
)
|
|
726
|
+
end
|
|
727
|
+
|
|
728
|
+
def publish(topic_arn, message, subject: nil, attributes: {})
|
|
729
|
+
@client.publish(
|
|
730
|
+
topic_arn: topic_arn,
|
|
731
|
+
message: message.to_json,
|
|
732
|
+
subject: subject,
|
|
733
|
+
message_attributes: build_attributes(attributes)
|
|
734
|
+
)
|
|
735
|
+
end
|
|
736
|
+
|
|
737
|
+
# Publicar a multiples suscriptores con filtro
|
|
738
|
+
def publish_with_filter(topic_arn, message, filter_key:, filter_value:)
|
|
739
|
+
@client.publish(
|
|
740
|
+
topic_arn: topic_arn,
|
|
741
|
+
message: message.to_json,
|
|
742
|
+
message_attributes: {
|
|
743
|
+
filter_key => {
|
|
744
|
+
data_type: "String",
|
|
745
|
+
string_value: filter_value
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
)
|
|
749
|
+
end
|
|
750
|
+
|
|
751
|
+
private
|
|
752
|
+
|
|
753
|
+
def build_attributes(attrs)
|
|
754
|
+
attrs.transform_values do |value|
|
|
755
|
+
{ data_type: "String", string_value: value.to_s }
|
|
756
|
+
end
|
|
757
|
+
end
|
|
758
|
+
end
|
|
759
|
+
```
|
|
760
|
+
|
|
761
|
+
### Shoryuken (Sidekiq-like para SQS)
|
|
762
|
+
|
|
763
|
+
```ruby
|
|
764
|
+
# Gemfile
|
|
765
|
+
gem "shoryuken"
|
|
766
|
+
|
|
767
|
+
# config/shoryuken.yml
|
|
768
|
+
concurrency: 25
|
|
769
|
+
queues:
|
|
770
|
+
- [critical, 3]
|
|
771
|
+
- [default, 2]
|
|
772
|
+
- [low, 1]
|
|
773
|
+
|
|
774
|
+
# app/workers/sqs_worker.rb
|
|
775
|
+
class SqsWorker
|
|
776
|
+
include Shoryuken::Worker
|
|
777
|
+
|
|
778
|
+
shoryuken_options queue: "default",
|
|
779
|
+
auto_delete: true,
|
|
780
|
+
body_parser: :json
|
|
781
|
+
|
|
782
|
+
def perform(sqs_msg, body)
|
|
783
|
+
case body["event"]
|
|
784
|
+
when "order.created"
|
|
785
|
+
process_order(body["data"])
|
|
786
|
+
when "user.registered"
|
|
787
|
+
send_welcome_email(body["data"])
|
|
788
|
+
end
|
|
789
|
+
end
|
|
790
|
+
|
|
791
|
+
private
|
|
792
|
+
|
|
793
|
+
def process_order(data)
|
|
794
|
+
order = Order.find(data["order_id"])
|
|
795
|
+
OrderProcessor.new(order).process
|
|
796
|
+
end
|
|
797
|
+
|
|
798
|
+
def send_welcome_email(data)
|
|
799
|
+
user = User.find(data["user_id"])
|
|
800
|
+
WelcomeMailer.welcome(user).deliver_now
|
|
801
|
+
end
|
|
802
|
+
end
|
|
803
|
+
```
|
|
804
|
+
|
|
805
|
+
## Redis Pub/Sub
|
|
806
|
+
|
|
807
|
+
### Publisher
|
|
808
|
+
|
|
809
|
+
```ruby
|
|
810
|
+
# app/services/redis_publisher.rb
|
|
811
|
+
class RedisPublisher
|
|
812
|
+
def initialize
|
|
813
|
+
@redis = Redis.new(url: ENV["REDIS_URL"])
|
|
814
|
+
end
|
|
815
|
+
|
|
816
|
+
def publish(channel, message)
|
|
817
|
+
@redis.publish(channel, message.to_json)
|
|
818
|
+
end
|
|
819
|
+
|
|
820
|
+
def publish_event(event_type, payload)
|
|
821
|
+
message = {
|
|
822
|
+
event: event_type,
|
|
823
|
+
payload: payload,
|
|
824
|
+
timestamp: Time.current.iso8601,
|
|
825
|
+
producer: "rails-app"
|
|
826
|
+
}
|
|
827
|
+
publish("events:#{event_type.split('.').first}", message)
|
|
828
|
+
end
|
|
829
|
+
end
|
|
830
|
+
|
|
831
|
+
# Uso
|
|
832
|
+
publisher = RedisPublisher.new
|
|
833
|
+
publisher.publish_event("order.created", { order_id: 123, total: 99.99 })
|
|
834
|
+
```
|
|
835
|
+
|
|
836
|
+
### Subscriber
|
|
837
|
+
|
|
838
|
+
```ruby
|
|
839
|
+
# app/services/redis_subscriber.rb
|
|
840
|
+
class RedisSubscriber
|
|
841
|
+
def initialize(channels)
|
|
842
|
+
@redis = Redis.new(url: ENV["REDIS_URL"])
|
|
843
|
+
@channels = channels
|
|
844
|
+
end
|
|
845
|
+
|
|
846
|
+
def subscribe
|
|
847
|
+
@redis.subscribe(*@channels) do |on|
|
|
848
|
+
on.message do |channel, message|
|
|
849
|
+
process_message(channel, JSON.parse(message))
|
|
850
|
+
end
|
|
851
|
+
|
|
852
|
+
on.subscribe do |channel, subscriptions|
|
|
853
|
+
Rails.logger.info "Subscribed to #{channel} (#{subscriptions} subscriptions)"
|
|
854
|
+
end
|
|
855
|
+
end
|
|
856
|
+
end
|
|
857
|
+
|
|
858
|
+
def psubscribe(patterns)
|
|
859
|
+
@redis.psubscribe(*patterns) do |on|
|
|
860
|
+
on.pmessage do |pattern, channel, message|
|
|
861
|
+
process_message(channel, JSON.parse(message), pattern: pattern)
|
|
862
|
+
end
|
|
863
|
+
end
|
|
864
|
+
end
|
|
865
|
+
|
|
866
|
+
private
|
|
867
|
+
|
|
868
|
+
def process_message(channel, message, pattern: nil)
|
|
869
|
+
Rails.logger.info "Received on #{channel}: #{message['event']}"
|
|
870
|
+
|
|
871
|
+
handler_class = find_handler(message["event"])
|
|
872
|
+
handler_class.new(message["payload"]).call if handler_class
|
|
873
|
+
rescue StandardError => e
|
|
874
|
+
Rails.logger.error "Redis subscriber error: #{e.message}"
|
|
875
|
+
end
|
|
876
|
+
|
|
877
|
+
def find_handler(event_type)
|
|
878
|
+
{
|
|
879
|
+
"order.created" => OrderCreatedHandler,
|
|
880
|
+
"user.registered" => UserRegisteredHandler
|
|
881
|
+
}[event_type]
|
|
882
|
+
end
|
|
883
|
+
end
|
|
884
|
+
|
|
885
|
+
# Iniciar subscriber (proceso separado)
|
|
886
|
+
# bin/redis_subscriber
|
|
887
|
+
subscriber = RedisSubscriber.new(["events:order", "events:user"])
|
|
888
|
+
subscriber.subscribe
|
|
889
|
+
```
|
|
890
|
+
|
|
891
|
+
## Patrones Avanzados
|
|
892
|
+
|
|
893
|
+
### Saga Pattern
|
|
894
|
+
|
|
895
|
+
```ruby
|
|
896
|
+
# app/sagas/order_saga.rb
|
|
897
|
+
class OrderSaga
|
|
898
|
+
include Wisper::Publisher
|
|
899
|
+
|
|
900
|
+
STEPS = [
|
|
901
|
+
:reserve_inventory,
|
|
902
|
+
:process_payment,
|
|
903
|
+
:create_shipment,
|
|
904
|
+
:send_confirmation
|
|
905
|
+
].freeze
|
|
906
|
+
|
|
907
|
+
def initialize(order)
|
|
908
|
+
@order = order
|
|
909
|
+
@completed_steps = []
|
|
910
|
+
end
|
|
911
|
+
|
|
912
|
+
def execute
|
|
913
|
+
STEPS.each do |step|
|
|
914
|
+
begin
|
|
915
|
+
send(step)
|
|
916
|
+
@completed_steps << step
|
|
917
|
+
rescue StandardError => e
|
|
918
|
+
Rails.logger.error "Saga step #{step} failed: #{e.message}"
|
|
919
|
+
compensate
|
|
920
|
+
raise SagaFailedError, "Failed at #{step}: #{e.message}"
|
|
921
|
+
end
|
|
922
|
+
end
|
|
923
|
+
|
|
924
|
+
broadcast(:order_saga_completed, @order)
|
|
925
|
+
end
|
|
926
|
+
|
|
927
|
+
private
|
|
928
|
+
|
|
929
|
+
def reserve_inventory
|
|
930
|
+
InventoryService.reserve(@order.line_items)
|
|
931
|
+
end
|
|
932
|
+
|
|
933
|
+
def process_payment
|
|
934
|
+
PaymentService.charge(@order.payment_method, @order.total)
|
|
935
|
+
end
|
|
936
|
+
|
|
937
|
+
def create_shipment
|
|
938
|
+
ShipmentService.create(@order)
|
|
939
|
+
end
|
|
940
|
+
|
|
941
|
+
def send_confirmation
|
|
942
|
+
OrderMailer.confirmation(@order).deliver_now
|
|
943
|
+
end
|
|
944
|
+
|
|
945
|
+
# Compensaciones (rollback)
|
|
946
|
+
def compensate
|
|
947
|
+
@completed_steps.reverse.each do |step|
|
|
948
|
+
compensation_method = "compensate_#{step}"
|
|
949
|
+
send(compensation_method) if respond_to?(compensation_method, true)
|
|
950
|
+
end
|
|
951
|
+
end
|
|
952
|
+
|
|
953
|
+
def compensate_reserve_inventory
|
|
954
|
+
InventoryService.release(@order.line_items)
|
|
955
|
+
end
|
|
956
|
+
|
|
957
|
+
def compensate_process_payment
|
|
958
|
+
PaymentService.refund(@order.payment_id)
|
|
959
|
+
end
|
|
960
|
+
|
|
961
|
+
def compensate_create_shipment
|
|
962
|
+
ShipmentService.cancel(@order.shipment_id)
|
|
963
|
+
end
|
|
964
|
+
end
|
|
965
|
+
```
|
|
966
|
+
|
|
967
|
+
### Outbox Pattern
|
|
968
|
+
|
|
969
|
+
```ruby
|
|
970
|
+
# db/migrate/xxx_create_outbox_messages.rb
|
|
971
|
+
class CreateOutboxMessages < ActiveRecord::Migration[8.0]
|
|
972
|
+
def change
|
|
973
|
+
create_table :outbox_messages do |t|
|
|
974
|
+
t.string :topic, null: false
|
|
975
|
+
t.string :key
|
|
976
|
+
t.jsonb :payload, null: false
|
|
977
|
+
t.string :status, default: "pending"
|
|
978
|
+
t.datetime :processed_at
|
|
979
|
+
t.integer :retry_count, default: 0
|
|
980
|
+
t.timestamps
|
|
981
|
+
|
|
982
|
+
t.index [:status, :created_at]
|
|
983
|
+
end
|
|
984
|
+
end
|
|
985
|
+
end
|
|
986
|
+
|
|
987
|
+
# app/models/outbox_message.rb
|
|
988
|
+
class OutboxMessage < ApplicationRecord
|
|
989
|
+
scope :pending, -> { where(status: "pending") }
|
|
990
|
+
scope :failed, -> { where(status: "failed") }
|
|
991
|
+
scope :processable, -> { pending.order(created_at: :asc) }
|
|
992
|
+
|
|
993
|
+
def mark_processed!
|
|
994
|
+
update!(status: "processed", processed_at: Time.current)
|
|
995
|
+
end
|
|
996
|
+
|
|
997
|
+
def mark_failed!
|
|
998
|
+
update!(status: "failed", retry_count: retry_count + 1)
|
|
999
|
+
end
|
|
1000
|
+
end
|
|
1001
|
+
|
|
1002
|
+
# app/services/outbox_publisher.rb
|
|
1003
|
+
class OutboxPublisher
|
|
1004
|
+
def self.publish(topic:, payload:, key: nil)
|
|
1005
|
+
OutboxMessage.create!(
|
|
1006
|
+
topic: topic,
|
|
1007
|
+
key: key,
|
|
1008
|
+
payload: payload
|
|
1009
|
+
)
|
|
1010
|
+
end
|
|
1011
|
+
end
|
|
1012
|
+
|
|
1013
|
+
# app/jobs/outbox_processor_job.rb
|
|
1014
|
+
class OutboxProcessorJob < ApplicationJob
|
|
1015
|
+
queue_as :critical
|
|
1016
|
+
|
|
1017
|
+
def perform
|
|
1018
|
+
OutboxMessage.processable.find_each do |message|
|
|
1019
|
+
begin
|
|
1020
|
+
publish_to_kafka(message)
|
|
1021
|
+
message.mark_processed!
|
|
1022
|
+
rescue StandardError => e
|
|
1023
|
+
Rails.logger.error "Outbox publish failed: #{e.message}"
|
|
1024
|
+
message.mark_failed!
|
|
1025
|
+
end
|
|
1026
|
+
end
|
|
1027
|
+
end
|
|
1028
|
+
|
|
1029
|
+
private
|
|
1030
|
+
|
|
1031
|
+
def publish_to_kafka(message)
|
|
1032
|
+
producer = KafkaProducer.new
|
|
1033
|
+
producer.publish_sync(
|
|
1034
|
+
message.topic,
|
|
1035
|
+
message.payload,
|
|
1036
|
+
key: message.key
|
|
1037
|
+
)
|
|
1038
|
+
end
|
|
1039
|
+
end
|
|
1040
|
+
|
|
1041
|
+
# Uso - transaccion atomica
|
|
1042
|
+
ActiveRecord::Base.transaction do
|
|
1043
|
+
order = Order.create!(params)
|
|
1044
|
+
OutboxPublisher.publish(
|
|
1045
|
+
topic: "orders",
|
|
1046
|
+
payload: { event: "order.created", order_id: order.id },
|
|
1047
|
+
key: order.id.to_s
|
|
1048
|
+
)
|
|
1049
|
+
end
|
|
1050
|
+
```
|
|
1051
|
+
|
|
1052
|
+
### Dead Letter Queue
|
|
1053
|
+
|
|
1054
|
+
```ruby
|
|
1055
|
+
# app/services/dead_letter_handler.rb
|
|
1056
|
+
class DeadLetterHandler
|
|
1057
|
+
MAX_RETRIES = 3
|
|
1058
|
+
RETRY_DELAYS = [60, 300, 900] # 1min, 5min, 15min
|
|
1059
|
+
|
|
1060
|
+
def initialize(original_queue:, dlq:)
|
|
1061
|
+
@original_queue = original_queue
|
|
1062
|
+
@dlq = dlq
|
|
1063
|
+
end
|
|
1064
|
+
|
|
1065
|
+
def process_with_retry(message)
|
|
1066
|
+
retry_count = message.dig("metadata", "retry_count") || 0
|
|
1067
|
+
|
|
1068
|
+
begin
|
|
1069
|
+
yield message["payload"]
|
|
1070
|
+
rescue StandardError => e
|
|
1071
|
+
if retry_count < MAX_RETRIES
|
|
1072
|
+
schedule_retry(message, retry_count, e)
|
|
1073
|
+
else
|
|
1074
|
+
send_to_dlq(message, e)
|
|
1075
|
+
end
|
|
1076
|
+
end
|
|
1077
|
+
end
|
|
1078
|
+
|
|
1079
|
+
private
|
|
1080
|
+
|
|
1081
|
+
def schedule_retry(message, retry_count, error)
|
|
1082
|
+
delay = RETRY_DELAYS[retry_count]
|
|
1083
|
+
message["metadata"] ||= {}
|
|
1084
|
+
message["metadata"]["retry_count"] = retry_count + 1
|
|
1085
|
+
message["metadata"]["last_error"] = error.message
|
|
1086
|
+
|
|
1087
|
+
RetryMessageJob.set(wait: delay.seconds).perform_later(
|
|
1088
|
+
@original_queue,
|
|
1089
|
+
message
|
|
1090
|
+
)
|
|
1091
|
+
end
|
|
1092
|
+
|
|
1093
|
+
def send_to_dlq(message, error)
|
|
1094
|
+
message["metadata"] ||= {}
|
|
1095
|
+
message["metadata"]["final_error"] = error.message
|
|
1096
|
+
message["metadata"]["failed_at"] = Time.current.iso8601
|
|
1097
|
+
|
|
1098
|
+
@dlq.publish(message)
|
|
1099
|
+
|
|
1100
|
+
# Notificar a admins
|
|
1101
|
+
AdminNotifier.dlq_message(@dlq.name, message).deliver_later
|
|
1102
|
+
end
|
|
1103
|
+
end
|
|
1104
|
+
```
|
|
1105
|
+
|
|
1106
|
+
## Monitoreo
|
|
1107
|
+
|
|
1108
|
+
### Metricas para colas
|
|
1109
|
+
|
|
1110
|
+
```ruby
|
|
1111
|
+
# app/services/queue_metrics.rb
|
|
1112
|
+
class QueueMetrics
|
|
1113
|
+
def self.record_job_execution(job_class, duration_ms, success:)
|
|
1114
|
+
labels = { job: job_class, success: success }
|
|
1115
|
+
|
|
1116
|
+
# Incrementar contador
|
|
1117
|
+
Rails.cache.increment("metrics:jobs:#{job_class}:count")
|
|
1118
|
+
|
|
1119
|
+
# Registrar duracion
|
|
1120
|
+
Rails.cache.write(
|
|
1121
|
+
"metrics:jobs:#{job_class}:last_duration",
|
|
1122
|
+
duration_ms,
|
|
1123
|
+
expires_in: 1.hour
|
|
1124
|
+
)
|
|
1125
|
+
|
|
1126
|
+
# Para Prometheus/Grafana
|
|
1127
|
+
# StatsD.timing("job.duration", duration_ms, tags: labels)
|
|
1128
|
+
# StatsD.increment("job.count", tags: labels)
|
|
1129
|
+
end
|
|
1130
|
+
|
|
1131
|
+
def self.record_queue_depth(queue_name, depth)
|
|
1132
|
+
Rails.cache.write(
|
|
1133
|
+
"metrics:queue:#{queue_name}:depth",
|
|
1134
|
+
depth,
|
|
1135
|
+
expires_in: 1.minute
|
|
1136
|
+
)
|
|
1137
|
+
end
|
|
1138
|
+
|
|
1139
|
+
def self.dashboard_data
|
|
1140
|
+
{
|
|
1141
|
+
solid_queue: solid_queue_stats,
|
|
1142
|
+
sidekiq: sidekiq_stats,
|
|
1143
|
+
dead_jobs: dead_jobs_count
|
|
1144
|
+
}
|
|
1145
|
+
end
|
|
1146
|
+
|
|
1147
|
+
private
|
|
1148
|
+
|
|
1149
|
+
def self.solid_queue_stats
|
|
1150
|
+
{
|
|
1151
|
+
ready: SolidQueue::ReadyExecution.count,
|
|
1152
|
+
scheduled: SolidQueue::ScheduledExecution.count,
|
|
1153
|
+
failed: SolidQueue::FailedExecution.count
|
|
1154
|
+
}
|
|
1155
|
+
end
|
|
1156
|
+
|
|
1157
|
+
def self.sidekiq_stats
|
|
1158
|
+
return {} unless defined?(Sidekiq)
|
|
1159
|
+
|
|
1160
|
+
stats = Sidekiq::Stats.new
|
|
1161
|
+
{
|
|
1162
|
+
processed: stats.processed,
|
|
1163
|
+
failed: stats.failed,
|
|
1164
|
+
enqueued: stats.enqueued,
|
|
1165
|
+
retry_size: stats.retry_size
|
|
1166
|
+
}
|
|
1167
|
+
end
|
|
1168
|
+
end
|
|
1169
|
+
```
|
|
1170
|
+
|
|
1171
|
+
### Alertas
|
|
1172
|
+
|
|
1173
|
+
```ruby
|
|
1174
|
+
# app/jobs/queue_health_check_job.rb
|
|
1175
|
+
class QueueHealthCheckJob < ApplicationJob
|
|
1176
|
+
queue_as :critical
|
|
1177
|
+
|
|
1178
|
+
THRESHOLDS = {
|
|
1179
|
+
queue_depth: 1000,
|
|
1180
|
+
failed_jobs: 50,
|
|
1181
|
+
oldest_job_age: 30.minutes
|
|
1182
|
+
}.freeze
|
|
1183
|
+
|
|
1184
|
+
def perform
|
|
1185
|
+
check_queue_depth
|
|
1186
|
+
check_failed_jobs
|
|
1187
|
+
check_oldest_job
|
|
1188
|
+
end
|
|
1189
|
+
|
|
1190
|
+
private
|
|
1191
|
+
|
|
1192
|
+
def check_queue_depth
|
|
1193
|
+
depth = SolidQueue::ReadyExecution.count
|
|
1194
|
+
if depth > THRESHOLDS[:queue_depth]
|
|
1195
|
+
AdminNotifier.queue_depth_alert(depth).deliver_now
|
|
1196
|
+
end
|
|
1197
|
+
end
|
|
1198
|
+
|
|
1199
|
+
def check_failed_jobs
|
|
1200
|
+
failed = SolidQueue::FailedExecution.count
|
|
1201
|
+
if failed > THRESHOLDS[:failed_jobs]
|
|
1202
|
+
AdminNotifier.failed_jobs_alert(failed).deliver_now
|
|
1203
|
+
end
|
|
1204
|
+
end
|
|
1205
|
+
|
|
1206
|
+
def check_oldest_job
|
|
1207
|
+
oldest = SolidQueue::ReadyExecution.order(:created_at).first
|
|
1208
|
+
return unless oldest
|
|
1209
|
+
|
|
1210
|
+
age = Time.current - oldest.created_at
|
|
1211
|
+
if age > THRESHOLDS[:oldest_job_age]
|
|
1212
|
+
AdminNotifier.stale_jobs_alert(age).deliver_now
|
|
1213
|
+
end
|
|
1214
|
+
end
|
|
1215
|
+
end
|
|
1216
|
+
```
|
|
1217
|
+
|
|
1218
|
+
## Checklist
|
|
1219
|
+
|
|
1220
|
+
- [ ] Queue adapter configurado (Solid Queue/Sidekiq)
|
|
1221
|
+
- [ ] Colas separadas por prioridad
|
|
1222
|
+
- [ ] Retry strategy definida
|
|
1223
|
+
- [ ] Dead letter queue configurada
|
|
1224
|
+
- [ ] Jobs idempotentes
|
|
1225
|
+
- [ ] Monitoreo de profundidad de cola
|
|
1226
|
+
- [ ] Alertas para jobs fallidos
|
|
1227
|
+
- [ ] Backpressure handling
|
|
1228
|
+
- [ ] Graceful shutdown
|
|
1229
|
+
- [ ] Tests para jobs
|