rrq 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rrq/cli.py +39 -64
- rrq/cli_commands/__init__.py +1 -0
- rrq/cli_commands/base.py +102 -0
- rrq/cli_commands/commands/__init__.py +1 -0
- rrq/cli_commands/commands/debug.py +551 -0
- rrq/cli_commands/commands/dlq.py +853 -0
- rrq/cli_commands/commands/jobs.py +516 -0
- rrq/cli_commands/commands/monitor.py +776 -0
- rrq/cli_commands/commands/queues.py +539 -0
- rrq/cli_commands/utils.py +161 -0
- rrq/client.py +39 -35
- rrq/constants.py +10 -0
- rrq/cron.py +67 -8
- rrq/hooks.py +217 -0
- rrq/job.py +5 -5
- rrq/registry.py +0 -3
- rrq/settings.py +13 -1
- rrq/store.py +211 -53
- rrq/worker.py +6 -6
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/METADATA +208 -25
- rrq-0.7.0.dist-info/RECORD +26 -0
- rrq-0.5.0.dist-info/RECORD +0 -16
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/WHEEL +0 -0
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/entry_points.txt +0 -0
- {rrq-0.5.0.dist-info → rrq-0.7.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,551 @@
|
|
|
1
|
+
"""Debug and testing commands for RRQ"""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import random
|
|
6
|
+
import time
|
|
7
|
+
from datetime import datetime, timedelta
|
|
8
|
+
from typing import List, Optional
|
|
9
|
+
|
|
10
|
+
import click
|
|
11
|
+
|
|
12
|
+
from rrq.constants import JOB_KEY_PREFIX, QUEUE_KEY_PREFIX
|
|
13
|
+
from rrq.store import JobStore
|
|
14
|
+
from rrq.cli_commands.base import AsyncCommand, load_app_settings, get_job_store
|
|
15
|
+
from ..utils import (
|
|
16
|
+
console,
|
|
17
|
+
create_progress,
|
|
18
|
+
print_info,
|
|
19
|
+
print_success,
|
|
20
|
+
print_warning,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DebugCommands(AsyncCommand):
|
|
25
|
+
"""Debug and testing commands"""
|
|
26
|
+
|
|
27
|
+
def register(self, cli_group: click.Group) -> None:
|
|
28
|
+
"""Register debug commands"""
|
|
29
|
+
|
|
30
|
+
@cli_group.group("debug")
|
|
31
|
+
def debug_group():
|
|
32
|
+
"""Debug and testing tools"""
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
# Generate fake jobs
|
|
36
|
+
@debug_group.command("generate-jobs")
|
|
37
|
+
@click.option(
|
|
38
|
+
"--settings",
|
|
39
|
+
"settings_object_path",
|
|
40
|
+
type=str,
|
|
41
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
42
|
+
)
|
|
43
|
+
@click.option(
|
|
44
|
+
"--count",
|
|
45
|
+
type=int,
|
|
46
|
+
default=100,
|
|
47
|
+
help="Number of jobs to generate",
|
|
48
|
+
)
|
|
49
|
+
@click.option(
|
|
50
|
+
"--queue",
|
|
51
|
+
"queue_names",
|
|
52
|
+
multiple=True,
|
|
53
|
+
help="Queue names to use (default: test, urgent, low_priority)",
|
|
54
|
+
)
|
|
55
|
+
@click.option(
|
|
56
|
+
"--status",
|
|
57
|
+
"statuses",
|
|
58
|
+
multiple=True,
|
|
59
|
+
type=click.Choice(["pending", "active", "completed", "failed", "retrying"]),
|
|
60
|
+
help="Job statuses to create (default: mix of all)",
|
|
61
|
+
)
|
|
62
|
+
@click.option(
|
|
63
|
+
"--age-hours",
|
|
64
|
+
type=int,
|
|
65
|
+
default=24,
|
|
66
|
+
help="Maximum age of jobs in hours",
|
|
67
|
+
)
|
|
68
|
+
@click.option(
|
|
69
|
+
"--batch-size",
|
|
70
|
+
type=int,
|
|
71
|
+
default=10,
|
|
72
|
+
help="Batch size for bulk operations",
|
|
73
|
+
)
|
|
74
|
+
def generate_jobs(
|
|
75
|
+
settings_object_path: str,
|
|
76
|
+
count: int,
|
|
77
|
+
queue_names: tuple,
|
|
78
|
+
statuses: tuple,
|
|
79
|
+
age_hours: int,
|
|
80
|
+
batch_size: int,
|
|
81
|
+
):
|
|
82
|
+
"""Generate fake jobs for testing"""
|
|
83
|
+
self.make_async(self._generate_jobs)(
|
|
84
|
+
settings_object_path,
|
|
85
|
+
count,
|
|
86
|
+
queue_names,
|
|
87
|
+
statuses,
|
|
88
|
+
age_hours,
|
|
89
|
+
batch_size,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Generate fake workers
|
|
93
|
+
@debug_group.command("generate-workers")
|
|
94
|
+
@click.option(
|
|
95
|
+
"--settings",
|
|
96
|
+
"settings_object_path",
|
|
97
|
+
type=str,
|
|
98
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
99
|
+
)
|
|
100
|
+
@click.option(
|
|
101
|
+
"--count",
|
|
102
|
+
type=int,
|
|
103
|
+
default=5,
|
|
104
|
+
help="Number of workers to simulate",
|
|
105
|
+
)
|
|
106
|
+
@click.option(
|
|
107
|
+
"--duration",
|
|
108
|
+
type=int,
|
|
109
|
+
default=60,
|
|
110
|
+
help="Duration to simulate workers in seconds",
|
|
111
|
+
)
|
|
112
|
+
def generate_workers(settings_object_path: str, count: int, duration: int):
|
|
113
|
+
"""Generate fake worker heartbeats for testing"""
|
|
114
|
+
self.make_async(self._generate_workers)(
|
|
115
|
+
settings_object_path, count, duration
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# Submit test job
|
|
119
|
+
@debug_group.command("submit")
|
|
120
|
+
@click.argument("function_name")
|
|
121
|
+
@click.option(
|
|
122
|
+
"--settings",
|
|
123
|
+
"settings_object_path",
|
|
124
|
+
type=str,
|
|
125
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
126
|
+
)
|
|
127
|
+
@click.option(
|
|
128
|
+
"--args",
|
|
129
|
+
help="JSON string of positional arguments",
|
|
130
|
+
)
|
|
131
|
+
@click.option(
|
|
132
|
+
"--kwargs",
|
|
133
|
+
help="JSON string of keyword arguments",
|
|
134
|
+
)
|
|
135
|
+
@click.option(
|
|
136
|
+
"--queue",
|
|
137
|
+
help="Queue name",
|
|
138
|
+
)
|
|
139
|
+
@click.option(
|
|
140
|
+
"--delay",
|
|
141
|
+
type=int,
|
|
142
|
+
help="Delay in seconds",
|
|
143
|
+
)
|
|
144
|
+
def submit_job(
|
|
145
|
+
function_name: str,
|
|
146
|
+
settings_object_path: str,
|
|
147
|
+
args: Optional[str],
|
|
148
|
+
kwargs: Optional[str],
|
|
149
|
+
queue: Optional[str],
|
|
150
|
+
delay: Optional[int],
|
|
151
|
+
):
|
|
152
|
+
"""Submit a test job"""
|
|
153
|
+
self.make_async(self._submit_job)(
|
|
154
|
+
function_name, settings_object_path, args, kwargs, queue, delay
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Clear test data
|
|
158
|
+
@debug_group.command("clear")
|
|
159
|
+
@click.option(
|
|
160
|
+
"--settings",
|
|
161
|
+
"settings_object_path",
|
|
162
|
+
type=str,
|
|
163
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
164
|
+
)
|
|
165
|
+
@click.option(
|
|
166
|
+
"--confirm",
|
|
167
|
+
is_flag=True,
|
|
168
|
+
help="Confirm deletion without prompt",
|
|
169
|
+
)
|
|
170
|
+
@click.option(
|
|
171
|
+
"--pattern",
|
|
172
|
+
default="test_*",
|
|
173
|
+
help="Pattern to match for deletion (default: test_*)",
|
|
174
|
+
)
|
|
175
|
+
def clear_data(settings_object_path: str, confirm: bool, pattern: str):
|
|
176
|
+
"""Clear test data from Redis"""
|
|
177
|
+
self.make_async(self._clear_data)(settings_object_path, confirm, pattern)
|
|
178
|
+
|
|
179
|
+
# Stress test
|
|
180
|
+
@debug_group.command("stress-test")
|
|
181
|
+
@click.option(
|
|
182
|
+
"--settings",
|
|
183
|
+
"settings_object_path",
|
|
184
|
+
type=str,
|
|
185
|
+
help="Python settings path (e.g., myapp.settings.rrq_settings)",
|
|
186
|
+
)
|
|
187
|
+
@click.option(
|
|
188
|
+
"--jobs-per-second",
|
|
189
|
+
type=int,
|
|
190
|
+
default=10,
|
|
191
|
+
help="Jobs to create per second",
|
|
192
|
+
)
|
|
193
|
+
@click.option(
|
|
194
|
+
"--duration",
|
|
195
|
+
type=int,
|
|
196
|
+
default=60,
|
|
197
|
+
help="Duration in seconds",
|
|
198
|
+
)
|
|
199
|
+
@click.option(
|
|
200
|
+
"--queues",
|
|
201
|
+
multiple=True,
|
|
202
|
+
help="Queue names to use",
|
|
203
|
+
)
|
|
204
|
+
def stress_test(
|
|
205
|
+
settings_object_path: str,
|
|
206
|
+
jobs_per_second: int,
|
|
207
|
+
duration: int,
|
|
208
|
+
queues: tuple,
|
|
209
|
+
):
|
|
210
|
+
"""Run stress test by creating jobs continuously"""
|
|
211
|
+
self.make_async(self._stress_test)(
|
|
212
|
+
settings_object_path, jobs_per_second, duration, queues
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
async def _generate_jobs(
|
|
216
|
+
self,
|
|
217
|
+
settings_object_path: str,
|
|
218
|
+
count: int,
|
|
219
|
+
queue_names: tuple,
|
|
220
|
+
statuses: tuple,
|
|
221
|
+
age_hours: int,
|
|
222
|
+
batch_size: int,
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Generate fake jobs"""
|
|
225
|
+
settings = load_app_settings(settings_object_path)
|
|
226
|
+
job_store = await get_job_store(settings)
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
# Default values
|
|
230
|
+
if not queue_names:
|
|
231
|
+
queue_names = ("test", "urgent", "low_priority", "default")
|
|
232
|
+
|
|
233
|
+
if not statuses:
|
|
234
|
+
statuses = ("pending", "completed", "failed", "retrying")
|
|
235
|
+
|
|
236
|
+
# Sample function names
|
|
237
|
+
function_names = [
|
|
238
|
+
"process_data",
|
|
239
|
+
"send_email",
|
|
240
|
+
"generate_report",
|
|
241
|
+
"cleanup_files",
|
|
242
|
+
"sync_database",
|
|
243
|
+
"resize_image",
|
|
244
|
+
"calculate_metrics",
|
|
245
|
+
"export_csv",
|
|
246
|
+
"backup_data",
|
|
247
|
+
"validate_input",
|
|
248
|
+
]
|
|
249
|
+
|
|
250
|
+
# Generate timestamps
|
|
251
|
+
now = datetime.now()
|
|
252
|
+
start_time = now - timedelta(hours=age_hours)
|
|
253
|
+
|
|
254
|
+
with create_progress() as progress:
|
|
255
|
+
task = progress.add_task(
|
|
256
|
+
f"Generating {count} fake jobs...", total=count
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
created_jobs = []
|
|
260
|
+
for i in range(count):
|
|
261
|
+
# Random attributes
|
|
262
|
+
job_id = f"test_job_{int(time.time() * 1000000)}_{i}"
|
|
263
|
+
function_name = random.choice(function_names)
|
|
264
|
+
queue_name = random.choice(queue_names)
|
|
265
|
+
status = random.choice(statuses)
|
|
266
|
+
|
|
267
|
+
# Random timestamps
|
|
268
|
+
created_at = start_time + timedelta(
|
|
269
|
+
seconds=random.randint(0, int(age_hours * 3600))
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# Create job data
|
|
273
|
+
job_data = {
|
|
274
|
+
"id": job_id,
|
|
275
|
+
"function_name": function_name,
|
|
276
|
+
"queue_name": queue_name,
|
|
277
|
+
"status": status,
|
|
278
|
+
"args": json.dumps([f"arg_{i}", random.randint(1, 100)]),
|
|
279
|
+
"kwargs": json.dumps(
|
|
280
|
+
{
|
|
281
|
+
"user_id": random.randint(1, 1000),
|
|
282
|
+
"priority": random.choice(["high", "medium", "low"]),
|
|
283
|
+
}
|
|
284
|
+
),
|
|
285
|
+
"created_at": created_at.timestamp(),
|
|
286
|
+
"retries": random.randint(0, 3),
|
|
287
|
+
"max_retries": 3,
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
# Add status-specific fields
|
|
291
|
+
if status in ("completed", "failed"):
|
|
292
|
+
started_at = created_at + timedelta(
|
|
293
|
+
seconds=random.randint(1, 60)
|
|
294
|
+
)
|
|
295
|
+
completed_at = started_at + timedelta(
|
|
296
|
+
seconds=random.randint(1, 300)
|
|
297
|
+
)
|
|
298
|
+
job_data.update(
|
|
299
|
+
{
|
|
300
|
+
"started_at": started_at.timestamp(),
|
|
301
|
+
"completed_at": completed_at.timestamp(),
|
|
302
|
+
"worker_id": f"worker_{random.randint(1, 10)}",
|
|
303
|
+
}
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
if status == "completed":
|
|
307
|
+
job_data["result"] = json.dumps(
|
|
308
|
+
{
|
|
309
|
+
"success": True,
|
|
310
|
+
"processed_items": random.randint(1, 100),
|
|
311
|
+
}
|
|
312
|
+
)
|
|
313
|
+
else:
|
|
314
|
+
job_data["error"] = random.choice(
|
|
315
|
+
[
|
|
316
|
+
"Connection timeout",
|
|
317
|
+
"Invalid input data",
|
|
318
|
+
"Database error",
|
|
319
|
+
"File not found",
|
|
320
|
+
"Permission denied",
|
|
321
|
+
]
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
elif status == "active":
|
|
325
|
+
started_at = created_at + timedelta(
|
|
326
|
+
seconds=random.randint(1, 60)
|
|
327
|
+
)
|
|
328
|
+
job_data.update(
|
|
329
|
+
{
|
|
330
|
+
"started_at": started_at.timestamp(),
|
|
331
|
+
"worker_id": f"worker_{random.randint(1, 10)}",
|
|
332
|
+
}
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
created_jobs.append((job_id, job_data, queue_name, status))
|
|
336
|
+
|
|
337
|
+
# Batch insert
|
|
338
|
+
if len(created_jobs) >= batch_size:
|
|
339
|
+
await self._insert_job_batch(job_store, created_jobs)
|
|
340
|
+
created_jobs = []
|
|
341
|
+
progress.update(task, advance=batch_size)
|
|
342
|
+
|
|
343
|
+
# Insert remaining jobs
|
|
344
|
+
if created_jobs:
|
|
345
|
+
await self._insert_job_batch(job_store, created_jobs)
|
|
346
|
+
progress.update(task, advance=len(created_jobs))
|
|
347
|
+
|
|
348
|
+
print_success(
|
|
349
|
+
f"Generated {count} fake jobs across {len(queue_names)} queues"
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
finally:
|
|
353
|
+
await job_store.aclose()
|
|
354
|
+
|
|
355
|
+
async def _insert_job_batch(self, job_store: JobStore, jobs: List[tuple]) -> None:
|
|
356
|
+
"""Insert a batch of jobs into Redis"""
|
|
357
|
+
async with job_store.redis.pipeline() as pipe:
|
|
358
|
+
for job_id, job_data, queue_name, status in jobs:
|
|
359
|
+
# Insert job data
|
|
360
|
+
job_key = f"{JOB_KEY_PREFIX}{job_id}"
|
|
361
|
+
pipe.hset(job_key, mapping=job_data)
|
|
362
|
+
|
|
363
|
+
# Add to queue if pending
|
|
364
|
+
if status == "pending":
|
|
365
|
+
queue_key = f"{QUEUE_KEY_PREFIX}{queue_name}"
|
|
366
|
+
priority = job_data.get("created_at", time.time())
|
|
367
|
+
pipe.zadd(queue_key, {job_id: priority})
|
|
368
|
+
|
|
369
|
+
await pipe.execute()
|
|
370
|
+
|
|
371
|
+
async def _generate_workers(
|
|
372
|
+
self, settings_object_path: str, count: int, duration: int
|
|
373
|
+
) -> None:
|
|
374
|
+
"""Generate fake worker heartbeats"""
|
|
375
|
+
settings = load_app_settings(settings_object_path)
|
|
376
|
+
job_store = await get_job_store(settings)
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
worker_ids = [f"test_worker_{i}" for i in range(count)]
|
|
380
|
+
|
|
381
|
+
print_info(f"Simulating {count} workers for {duration} seconds...")
|
|
382
|
+
|
|
383
|
+
start_time = time.time()
|
|
384
|
+
while time.time() - start_time < duration:
|
|
385
|
+
# Update each worker
|
|
386
|
+
for worker_id in worker_ids:
|
|
387
|
+
health_data = {
|
|
388
|
+
"worker_id": worker_id,
|
|
389
|
+
"status": random.choice(["running", "idle", "polling"]),
|
|
390
|
+
"active_jobs": random.randint(0, 5),
|
|
391
|
+
"concurrency_limit": random.randint(5, 20),
|
|
392
|
+
"queues": random.sample(
|
|
393
|
+
["test", "urgent", "low_priority", "default"], 2
|
|
394
|
+
),
|
|
395
|
+
"timestamp": time.time(),
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
await job_store.set_worker_health(worker_id, health_data, 60)
|
|
399
|
+
|
|
400
|
+
# Wait before next update
|
|
401
|
+
await asyncio.sleep(5)
|
|
402
|
+
|
|
403
|
+
# Show progress
|
|
404
|
+
elapsed = time.time() - start_time
|
|
405
|
+
remaining = duration - elapsed
|
|
406
|
+
console.print(
|
|
407
|
+
f"\rSimulating workers... {remaining:.0f}s remaining", end=""
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
console.print("\nWorker simulation complete")
|
|
411
|
+
|
|
412
|
+
finally:
|
|
413
|
+
await job_store.aclose()
|
|
414
|
+
|
|
415
|
+
async def _submit_job(
|
|
416
|
+
self,
|
|
417
|
+
function_name: str,
|
|
418
|
+
settings_object_path: str,
|
|
419
|
+
args: Optional[str],
|
|
420
|
+
kwargs: Optional[str],
|
|
421
|
+
queue: Optional[str],
|
|
422
|
+
delay: Optional[int],
|
|
423
|
+
) -> None:
|
|
424
|
+
"""Submit a test job"""
|
|
425
|
+
settings = load_app_settings(settings_object_path)
|
|
426
|
+
|
|
427
|
+
# Parse arguments
|
|
428
|
+
parsed_args = json.loads(args) if args else []
|
|
429
|
+
parsed_kwargs = json.loads(kwargs) if kwargs else {}
|
|
430
|
+
|
|
431
|
+
# Create client
|
|
432
|
+
from rrq.client import RRQClient
|
|
433
|
+
|
|
434
|
+
client = RRQClient(settings=settings)
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
# Submit job
|
|
438
|
+
job_id = await client.enqueue(
|
|
439
|
+
function_name=function_name,
|
|
440
|
+
args=parsed_args,
|
|
441
|
+
kwargs=parsed_kwargs,
|
|
442
|
+
queue_name=queue or settings.default_queue_name,
|
|
443
|
+
delay=delay,
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
print_success(f"Job submitted: {job_id}")
|
|
447
|
+
console.print(f"Function: {function_name}")
|
|
448
|
+
console.print(f"Args: {parsed_args}")
|
|
449
|
+
console.print(f"Kwargs: {parsed_kwargs}")
|
|
450
|
+
console.print(f"Queue: {queue or settings.default_queue_name}")
|
|
451
|
+
if delay:
|
|
452
|
+
console.print(f"Delay: {delay}s")
|
|
453
|
+
|
|
454
|
+
finally:
|
|
455
|
+
await client.aclose()
|
|
456
|
+
|
|
457
|
+
async def _clear_data(
|
|
458
|
+
self, settings_object_path: str, confirm: bool, pattern: str
|
|
459
|
+
) -> None:
|
|
460
|
+
"""Clear test data from Redis"""
|
|
461
|
+
settings = load_app_settings(settings_object_path)
|
|
462
|
+
job_store = await get_job_store(settings)
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
# Find matching keys
|
|
466
|
+
keys_to_delete = []
|
|
467
|
+
async for key in job_store.redis.scan_iter(match=pattern):
|
|
468
|
+
keys_to_delete.append(key.decode())
|
|
469
|
+
|
|
470
|
+
if not keys_to_delete:
|
|
471
|
+
print_info(f"No keys found matching pattern: {pattern}")
|
|
472
|
+
return
|
|
473
|
+
|
|
474
|
+
print_warning(
|
|
475
|
+
f"Found {len(keys_to_delete)} keys matching pattern: {pattern}"
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
# Confirm deletion
|
|
479
|
+
if not confirm:
|
|
480
|
+
if not click.confirm(f"Delete {len(keys_to_delete)} keys?"):
|
|
481
|
+
print_info("Deletion cancelled")
|
|
482
|
+
return
|
|
483
|
+
|
|
484
|
+
# Delete keys
|
|
485
|
+
if keys_to_delete:
|
|
486
|
+
deleted = await job_store.redis.delete(*keys_to_delete)
|
|
487
|
+
print_success(f"Deleted {deleted} keys")
|
|
488
|
+
|
|
489
|
+
finally:
|
|
490
|
+
await job_store.aclose()
|
|
491
|
+
|
|
492
|
+
async def _stress_test(
|
|
493
|
+
self,
|
|
494
|
+
settings_object_path: str,
|
|
495
|
+
jobs_per_second: int,
|
|
496
|
+
duration: int,
|
|
497
|
+
queues: tuple,
|
|
498
|
+
) -> None:
|
|
499
|
+
"""Run stress test"""
|
|
500
|
+
settings = load_app_settings(settings_object_path)
|
|
501
|
+
|
|
502
|
+
# Default queues
|
|
503
|
+
if not queues:
|
|
504
|
+
queues = ("stress_test",)
|
|
505
|
+
|
|
506
|
+
# Create client
|
|
507
|
+
from rrq.client import RRQClient
|
|
508
|
+
|
|
509
|
+
client = RRQClient(settings=settings)
|
|
510
|
+
|
|
511
|
+
try:
|
|
512
|
+
print_info(
|
|
513
|
+
f"Starting stress test: {jobs_per_second} jobs/sec for {duration}s"
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
total_jobs = 0
|
|
517
|
+
start_time = time.time()
|
|
518
|
+
|
|
519
|
+
while time.time() - start_time < duration:
|
|
520
|
+
batch_start = time.time()
|
|
521
|
+
|
|
522
|
+
# Submit jobs for this second
|
|
523
|
+
for i in range(jobs_per_second):
|
|
524
|
+
queue_name = random.choice(queues)
|
|
525
|
+
|
|
526
|
+
await client.enqueue(
|
|
527
|
+
function_name="stress_test_job",
|
|
528
|
+
args=[total_jobs + i],
|
|
529
|
+
kwargs={"batch": int(time.time())},
|
|
530
|
+
queue_name=queue_name,
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
total_jobs += jobs_per_second
|
|
534
|
+
|
|
535
|
+
# Wait for next second
|
|
536
|
+
elapsed = time.time() - batch_start
|
|
537
|
+
if elapsed < 1.0:
|
|
538
|
+
await asyncio.sleep(1.0 - elapsed)
|
|
539
|
+
|
|
540
|
+
# Show progress
|
|
541
|
+
test_elapsed = time.time() - start_time
|
|
542
|
+
remaining = duration - test_elapsed
|
|
543
|
+
console.print(
|
|
544
|
+
f"\rStress test: {total_jobs} jobs submitted, {remaining:.0f}s remaining",
|
|
545
|
+
end="",
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
console.print(f"\nStress test complete: {total_jobs} jobs submitted")
|
|
549
|
+
|
|
550
|
+
finally:
|
|
551
|
+
await client.aclose()
|