aegis-stack 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aegis-stack might be problematic. Click here for more details.

Files changed (103) hide show
  1. aegis/__init__.py +5 -0
  2. aegis/__main__.py +374 -0
  3. aegis/core/CLAUDE.md +365 -0
  4. aegis/core/__init__.py +6 -0
  5. aegis/core/components.py +115 -0
  6. aegis/core/dependency_resolver.py +119 -0
  7. aegis/core/template_generator.py +163 -0
  8. aegis/templates/CLAUDE.md +306 -0
  9. aegis/templates/cookiecutter-aegis-project/cookiecutter.json +27 -0
  10. aegis/templates/cookiecutter-aegis-project/hooks/post_gen_project.py +172 -0
  11. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.dockerignore +71 -0
  12. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.env.example.j2 +70 -0
  13. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.gitignore +127 -0
  14. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/Dockerfile +53 -0
  15. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/Makefile +211 -0
  16. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/README.md.j2 +196 -0
  17. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/__init__.py +5 -0
  18. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/__init__.py +6 -0
  19. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/health.py +321 -0
  20. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/load_test.py +638 -0
  21. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/main.py +41 -0
  22. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/__init__.py +0 -0
  23. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/__init__.py +0 -0
  24. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/health.py +134 -0
  25. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/models.py.j2 +247 -0
  26. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/routing.py.j2 +14 -0
  27. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/tasks.py.j2 +596 -0
  28. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/hooks.py +133 -0
  29. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/main.py +16 -0
  30. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/middleware/__init__.py +1 -0
  31. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/middleware/cors.py +20 -0
  32. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/shutdown/__init__.py +1 -0
  33. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/shutdown/cleanup.py +14 -0
  34. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/startup/__init__.py +1 -0
  35. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/startup/component_health.py.j2 +190 -0
  36. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/__init__.py +0 -0
  37. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/core/__init__.py +1 -0
  38. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/core/theme.py +46 -0
  39. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/main.py +687 -0
  40. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/scheduler/__init__.py +1 -0
  41. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/scheduler/main.py +138 -0
  42. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/CLAUDE.md +213 -0
  43. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/__init__.py +6 -0
  44. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/constants.py.j2 +30 -0
  45. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/pools.py +78 -0
  46. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/__init__.py +1 -0
  47. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/load_test.py +48 -0
  48. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/media.py +41 -0
  49. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/system.py +36 -0
  50. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/registry.py +139 -0
  51. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/__init__.py +119 -0
  52. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/load_tasks.py +526 -0
  53. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/simple_system_tasks.py +32 -0
  54. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/system_tasks.py +279 -0
  55. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/config.py.j2 +119 -0
  56. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/constants.py +60 -0
  57. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/db.py +67 -0
  58. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/log.py +85 -0
  59. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/__init__.py +1 -0
  60. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/webserver.py +40 -0
  61. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/{% if cookiecutter.include_scheduler == /"yes/" %}scheduler.py{% endif %}" +21 -0
  62. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/integrations/__init__.py +0 -0
  63. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/integrations/main.py +61 -0
  64. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/py.typed +0 -0
  65. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/__init__.py +1 -0
  66. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/load_test.py +661 -0
  67. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/load_test_models.py +269 -0
  68. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/shared/__init__.py +15 -0
  69. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/shared/models.py +26 -0
  70. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/__init__.py +52 -0
  71. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/alerts.py +94 -0
  72. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/health.py.j2 +1105 -0
  73. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/models.py +169 -0
  74. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/ui.py +52 -0
  75. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docker-compose.yml.j2 +195 -0
  76. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/api.md +191 -0
  77. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/components/scheduler.md +414 -0
  78. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/development.md +215 -0
  79. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/health.md +240 -0
  80. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/javascripts/mermaid-config.js +62 -0
  81. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/stylesheets/mermaid.css +95 -0
  82. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/mkdocs.yml.j2 +62 -0
  83. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/pyproject.toml.j2 +156 -0
  84. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/entrypoint.sh +87 -0
  85. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/entrypoint.sh.j2 +104 -0
  86. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/gen_docs.py +16 -0
  87. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/api/__init__.py +1 -0
  88. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/api/test_health_endpoints.py.j2 +239 -0
  89. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/components/test_scheduler.py +76 -0
  90. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/conftest.py.j2 +81 -0
  91. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/__init__.py +1 -0
  92. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_component_integration.py.j2 +376 -0
  93. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_health_logic.py.j2 +633 -0
  94. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_load_test_models.py +665 -0
  95. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_load_test_service.py +602 -0
  96. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_system_service.py +96 -0
  97. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_worker_health_registration.py.j2 +224 -0
  98. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/test_core.py +50 -0
  99. aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/uv.lock +1673 -0
  100. aegis_stack-0.1.0.dist-info/METADATA +114 -0
  101. aegis_stack-0.1.0.dist-info/RECORD +103 -0
  102. aegis_stack-0.1.0.dist-info/WHEEL +4 -0
  103. aegis_stack-0.1.0.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,526 @@
1
+ """
2
+ Lightweight worker tasks for load testing.
3
+
4
+ These tasks are designed to be spawned in large numbers to test queue throughput
5
+ and worker performance. Each task does minimal work to focus on testing the
6
+ infrastructure rather than computational complexity.
7
+ """
8
+
9
+ import asyncio
10
+ from datetime import datetime
11
+ import random
12
+ from typing import Any, cast
13
+
14
+ from app.core.log import logger
15
+
16
+
17
+ async def cpu_intensive_task(ctx: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
18
+ """
19
+ CPU-intensive load testing task.
20
+
21
+ Performs realistic computational work including hash calculations, sorting,
22
+ and mathematical operations to test worker CPU processing capabilities.
23
+ Designed to actually use CPU cycles and test computational throughput.
24
+
25
+ Returns:
26
+ Task completion data with CPU-specific metrics and verification data
27
+ """
28
+ start_time = datetime.now()
29
+ task_id = ctx.get("job_id", "unknown")
30
+
31
+ # CPU work 1: Fibonacci calculation (larger numbers for real work)
32
+ n = random.randint(500, 1500) # Much larger range for actual CPU work
33
+ a, b = 0, 1
34
+ operations_count = 0
35
+
36
+ for i in range(n):
37
+ a, b = b, a + b
38
+ operations_count += 1
39
+
40
+ # CPU work 2: Sorting random data (CPU intensive)
41
+ data_size = random.randint(1000, 3000)
42
+ random_data = [random.randint(1, 10000) for _ in range(data_size)]
43
+ sorted_data = sorted(random_data)
44
+ operations_count += data_size # Approximate operations for sorting
45
+
46
+ # CPU work 3: Hash calculations (CPU intensive)
47
+ import hashlib
48
+
49
+ hash_operations = random.randint(50, 150)
50
+ hash_results = []
51
+ for i in range(hash_operations):
52
+ data_to_hash = f"load_test_data_{i}_{random.randint(1, 100000)}"
53
+ hash_result = hashlib.sha256(data_to_hash.encode()).hexdigest()
54
+ hash_results.append(hash_result[:8]) # Store first 8 chars
55
+ operations_count += 1
56
+
57
+ # CPU work 4: Mathematical computations
58
+ math_iterations = random.randint(5000, 15000)
59
+ math_result = 0
60
+ for i in range(math_iterations):
61
+ # More complex math operations
62
+ math_result += (i**2 + i**0.5) * 0.1
63
+ operations_count += 1
64
+
65
+ # CPU work 5: Prime number generation (CPU bound)
66
+ prime_start = random.randint(1000, 2000)
67
+ primes_found: list[int] = []
68
+ num = prime_start
69
+ while len(primes_found) < 20: # Find 20 primes
70
+ is_prime = True
71
+ if num > 1:
72
+ for i in range(2, int(num**0.5) + 1):
73
+ operations_count += 1
74
+ if num % i == 0:
75
+ is_prime = False
76
+ break
77
+ else:
78
+ is_prime = False
79
+
80
+ if is_prime:
81
+ primes_found.append(num)
82
+ num += 1
83
+
84
+ # CPU work 6: Simulate encoding/image processing (realistic CPU-heavy work)
85
+ encoding_operations = 0
86
+ # Simulate image/video encoding by doing intensive matrix operations
87
+ for frame in range(random.randint(10, 30)): # Process "frames"
88
+ # Simulate encoding a frame with matrix multiplication
89
+ matrix_size = random.randint(50, 100)
90
+ matrix_a = [
91
+ [random.random() for _ in range(matrix_size)] for _ in range(matrix_size)
92
+ ]
93
+ matrix_b = [
94
+ [random.random() for _ in range(matrix_size)] for _ in range(matrix_size)
95
+ ]
96
+
97
+ # Matrix multiplication (very CPU intensive)
98
+ result_matrix = [[0.0 for _ in range(matrix_size)] for _ in range(matrix_size)]
99
+ for i in range(matrix_size):
100
+ for j in range(matrix_size):
101
+ for k in range(matrix_size):
102
+ result_matrix[i][j] += matrix_a[i][k] * matrix_b[k][j]
103
+ encoding_operations += 1
104
+
105
+ # WARNING: This blocks the entire async event loop!
106
+ # In real systems, this would prevent other async tasks from running
107
+ event_loop_warning = "CPU work blocks async event loop - other tasks must wait!"
108
+
109
+ end_time = datetime.now()
110
+ duration_ms = (end_time - start_time).total_seconds() * 1000
111
+
112
+ return {
113
+ "task_type": "cpu_intensive",
114
+ "task_id": task_id,
115
+ # CPU work proof
116
+ "fibonacci_n": n,
117
+ "fibonacci_result": (
118
+ str(b)[:10] + "..." if len(str(b)) > 10 else str(b)
119
+ ),
120
+ "sorted_data_size": len(sorted_data),
121
+ "hash_operations": hash_operations,
122
+ "hash_sample": hash_results[:3], # First 3 hashes as proof
123
+ "math_iterations": math_iterations,
124
+ "math_result": round(math_result, 2),
125
+ "primes_found": primes_found,
126
+ "encoding_operations": encoding_operations,
127
+ "total_cpu_operations": operations_count + encoding_operations,
128
+ # Performance metrics
129
+ "duration_ms": round(duration_ms, 2),
130
+ "operations_per_ms": round(
131
+ (operations_count + encoding_operations) / max(duration_ms, 0.001), 2
132
+ ),
133
+ "cpu_intensive_score": round(
134
+ (operations_count + encoding_operations) / max(duration_ms / 1000, 0.001), 0
135
+ ),
136
+ "start_time": start_time.isoformat(),
137
+ "end_time": end_time.isoformat(),
138
+ # Async event loop impact
139
+ "async_event_loop_blocked_ms": round(duration_ms, 2),
140
+ "event_loop_warning": event_loop_warning,
141
+ "concurrency_impact": (
142
+ "HIGH - Blocks entire event loop during execution"
143
+ ),
144
+ # Verification signature
145
+ "work_type": "CPU_COMPUTATION_WITH_ENCODING",
146
+ "verification": (
147
+ f"fib({n}), sort({data_size}), hash({hash_operations}), "
148
+ f"math({math_iterations}), primes({len(primes_found)}), "
149
+ f"encoding({encoding_operations}ops)"
150
+ ),
151
+ "status": "completed",
152
+ }
153
+
154
+
155
+ async def io_simulation_task(ctx: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
156
+ """
157
+ I/O-intensive load testing task optimized for high throughput.
158
+
159
+ Simulates realistic async I/O patterns with minimal delays to avoid queue
160
+ saturation while still demonstrating async concurrency benefits. Focuses on
161
+ async pattern demonstration rather than realistic I/O timing.
162
+
163
+ Returns:
164
+ Task completion data with I/O-specific metrics and concurrency analysis
165
+ """
166
+ start_time = datetime.now()
167
+ task_id = ctx.get("job_id", "unknown")
168
+
169
+ # Ultra-lightweight I/O simulation to prevent queue saturation
170
+ # Focus on async patterns with minimal actual delays
171
+
172
+ async def simulate_database_read(read_id: int) -> dict[str, Any]:
173
+ """Simulate a database read with ultra-minimal delay."""
174
+ op_start = datetime.now()
175
+ # Extremely short delay to show async pattern without queue buildup
176
+ await asyncio.sleep(0.001) # 1ms max
177
+ op_end = datetime.now()
178
+ return {
179
+ "operation_id": read_id,
180
+ "operation_type": "database_read",
181
+ "table": f"users_{read_id}",
182
+ "simulated_delay_ms": 1,
183
+ "actual_duration_ms": round(
184
+ (op_end - op_start).total_seconds() * 1000, 3
185
+ ),
186
+ "rows_returned": random.randint(10, 100),
187
+ }
188
+
189
+ async def simulate_api_call(api_id: int) -> dict[str, Any]:
190
+ """Simulate an external API call with ultra-minimal delay."""
191
+ op_start = datetime.now()
192
+ # Minimal async yield to demonstrate concurrency
193
+ await asyncio.sleep(0.001) # 1ms max
194
+ op_end = datetime.now()
195
+ return {
196
+ "operation_id": api_id,
197
+ "operation_type": "api_call",
198
+ "endpoint": f"/api/data/{api_id}",
199
+ "simulated_delay_ms": 1,
200
+ "actual_duration_ms": round(
201
+ (op_end - op_start).total_seconds() * 1000, 3
202
+ ),
203
+ "status_code": random.choice([200, 200, 200, 201, 404]),
204
+ }
205
+
206
+ async def simulate_cache_read(cache_id: int) -> dict[str, Any]:
207
+ """Simulate a cache read operation (very fast)."""
208
+ op_start = datetime.now()
209
+ # Cache should be fastest - just yield control
210
+ await asyncio.sleep(0.0005) # 0.5ms max
211
+ op_end = datetime.now()
212
+ return {
213
+ "operation_id": cache_id,
214
+ "operation_type": "cache_read",
215
+ "cache_key": f"user_session_{cache_id}",
216
+ "simulated_delay_ms": 0.5,
217
+ "actual_duration_ms": round(
218
+ (op_end - op_start).total_seconds() * 1000, 3
219
+ ),
220
+ "cache_hit": random.choice([True, True, True, False]),
221
+ }
222
+
223
+ # Fewer concurrent operations to reduce total time
224
+ db_tasks = [simulate_database_read(i) for i in range(2)]
225
+ api_tasks = [simulate_api_call(i) for i in range(2)]
226
+ cache_tasks = [simulate_cache_read(i) for i in range(3)]
227
+
228
+ # Execute all I/O operations concurrently (key async benefit)
229
+ concurrent_ops_start = datetime.now()
230
+ all_operations = await asyncio.gather(*db_tasks, *api_tasks, *cache_tasks)
231
+ concurrent_ops_end = datetime.now()
232
+ concurrent_duration = (
233
+ concurrent_ops_end - concurrent_ops_start
234
+ ).total_seconds() * 1000
235
+
236
+ # Light data processing (realistic for I/O workloads)
237
+ processing_start = datetime.now()
238
+
239
+ # Aggregate results (typical I/O task post-processing)
240
+ success_count = 0
241
+ error_count = 0
242
+ total_rows = 0
243
+ cache_hits = 0
244
+
245
+ for op in all_operations:
246
+ if op["operation_type"] == "database_read":
247
+ total_rows += op["rows_returned"]
248
+ success_count += 1
249
+ elif op["operation_type"] == "api_call":
250
+ if op["status_code"] in [200, 201]:
251
+ success_count += 1
252
+ else:
253
+ error_count += 1
254
+ elif op["operation_type"] == "cache_read":
255
+ if op["cache_hit"]:
256
+ cache_hits += 1
257
+ success_count += 1
258
+
259
+ processing_end = datetime.now()
260
+ processing_duration = (processing_end - processing_start).total_seconds() * 1000
261
+
262
+ end_time = datetime.now()
263
+ total_duration_ms = (end_time - start_time).total_seconds() * 1000
264
+
265
+ # Calculate async concurrency benefits
266
+ total_simulated_delay = sum(op["simulated_delay_ms"] for op in all_operations)
267
+ actual_concurrent_time = concurrent_duration
268
+ concurrency_factor = total_simulated_delay / max(actual_concurrent_time, 0.001)
269
+ time_saved_ms = total_simulated_delay - actual_concurrent_time
270
+
271
+ # Categorize operations by type
272
+ db_operations = [
273
+ op for op in all_operations if op["operation_type"] == "database_read"
274
+ ]
275
+ api_operations = [op for op in all_operations if op["operation_type"] == "api_call"]
276
+ cache_operations = [
277
+ op for op in all_operations if op["operation_type"] == "cache_read"
278
+ ]
279
+
280
+ return {
281
+ "task_type": "io_simulation",
282
+ "task_id": task_id,
283
+ # I/O work proof and results
284
+ "database_operations": len(db_operations),
285
+ "api_operations": len(api_operations),
286
+ "cache_operations": len(cache_operations),
287
+ "total_concurrent_operations": len(all_operations),
288
+ "processing_results": {
289
+ "total_rows_processed": total_rows,
290
+ "success_operations": success_count,
291
+ "error_operations": error_count,
292
+ "cache_hits": cache_hits,
293
+ },
294
+ # Performance and concurrency metrics
295
+ "total_duration_ms": round(total_duration_ms, 3),
296
+ "concurrent_io_duration_ms": round(concurrent_duration, 3),
297
+ "processing_duration_ms": round(processing_duration, 3),
298
+ "total_simulated_delay_ms": total_simulated_delay,
299
+ "concurrency_time_saved_ms": round(time_saved_ms, 3),
300
+ "concurrency_factor": round(concurrency_factor, 2),
301
+ "io_throughput_ops_per_sec": round(
302
+ len(all_operations) / max(total_duration_ms / 1000, 0.001), 1
303
+ ),
304
+ "start_time": start_time.isoformat(),
305
+ "end_time": end_time.isoformat(),
306
+ # Async pattern demonstration
307
+ "async_pattern_demo": {
308
+ "sequential_would_take_ms": total_simulated_delay,
309
+ "concurrent_actual_ms": round(concurrent_duration, 3),
310
+ "efficiency_improvement": (
311
+ f"{round((time_saved_ms / total_simulated_delay) * 100, 1)}%"
312
+ if total_simulated_delay > 0
313
+ else "0%"
314
+ ),
315
+ },
316
+ # Verification signature
317
+ "work_type": "OPTIMIZED_ASYNC_IO",
318
+ "verification": (
319
+ f"concurrent({len(all_operations)}ops, "
320
+ f"{total_simulated_delay}ms→{round(concurrent_duration, 1)}ms, "
321
+ f"{round(concurrency_factor, 1)}x speedup)"
322
+ ),
323
+ "status": "completed",
324
+ }
325
+
326
+
327
+ async def memory_operations_task(ctx: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
328
+ """
329
+ Memory-intensive load testing task with allocation patterns.
330
+
331
+ Performs realistic memory allocation, manipulation, and deallocation
332
+ patterns to test worker memory handling, garbage collection impact,
333
+ and memory bandwidth utilization.
334
+
335
+ Returns:
336
+ Task completion data with memory-specific metrics and allocation details
337
+ """
338
+ start_time = datetime.now()
339
+ task_id = ctx.get("job_id", "unknown")
340
+
341
+ # Memory allocation pattern 1: Sequential data structures
342
+ list_size = random.randint(500, 2000) # Larger for better measurement
343
+ data_list = list(range(list_size))
344
+
345
+ # Memory allocation pattern 2: Dictionary with string values
346
+ dict_size = list_size // 4
347
+ data_dict = {
348
+ i: f"memory_test_value_{i}_{'x' * 10}" for i in range(dict_size)
349
+ }
350
+
351
+ # Memory allocation pattern 3: Nested structures
352
+ nested_data: list[dict[str, Any]] = []
353
+ for i in range(50):
354
+ nested_item = {
355
+ "id": i,
356
+ "data": [j * 2 for j in range(20)],
357
+ "metadata": {"created": datetime.now().isoformat(), "size": 20},
358
+ }
359
+ nested_data.append(nested_item)
360
+
361
+ # Memory operations: processing that causes memory access patterns
362
+ # List operations
363
+ list_sum = sum(data_list)
364
+ list_squares = [x**2 for x in data_list[:100]]
365
+ max_value = max(data_list) if data_list else 0
366
+ min_value = min(data_list) if data_list else 0
367
+
368
+ # Dictionary operations
369
+ dict_keys_count = len(data_dict)
370
+ dict_values_total_len = sum(len(v) for v in data_dict.values())
371
+
372
+ # Nested data processing
373
+ nested_sum = sum(sum(cast(list[int], item["data"])) for item in nested_data)
374
+ nested_items_count = len(nested_data)
375
+
376
+ # Memory churn: create and destroy temporary objects
377
+ temp_objects = []
378
+ for i in range(100):
379
+ temp_obj = {"temp_id": i, "temp_data": list(range(10))}
380
+ temp_objects.append(temp_obj)
381
+
382
+ # Process temp objects then clean up
383
+ temp_sum = sum(
384
+ sum(cast(list[int], obj["temp_data"])) for obj in temp_objects
385
+ )
386
+ del temp_objects # Explicit cleanup
387
+
388
+ # Calculate memory usage estimates (approximate)
389
+ estimated_list_bytes = list_size * 8 # Rough estimate for integers
390
+ estimated_dict_bytes = dict_size * (8 + 30)
391
+ estimated_nested_bytes = len(nested_data) * 200
392
+ total_estimated_bytes = (
393
+ estimated_list_bytes + estimated_dict_bytes + estimated_nested_bytes
394
+ )
395
+
396
+ # Clean up main data structures
397
+ del data_list
398
+ del data_dict
399
+ del nested_data
400
+
401
+ end_time = datetime.now()
402
+ duration_ms = (end_time - start_time).total_seconds() * 1000
403
+
404
+ return {
405
+ "task_type": "memory_operations",
406
+ "task_id": task_id,
407
+ # Memory work proof
408
+ "list_allocation_size": list_size,
409
+ "dict_allocation_size": dict_size,
410
+ "nested_structures_count": nested_items_count,
411
+ "temp_objects_processed": 100,
412
+ # Memory operation results
413
+ "list_sum": list_sum,
414
+ "list_squares_sample": len(list_squares),
415
+ "max_value": max_value,
416
+ "min_value": min_value,
417
+ "dict_keys_count": dict_keys_count,
418
+ "dict_values_total_length": dict_values_total_len,
419
+ "nested_sum": nested_sum,
420
+ "temp_sum": temp_sum,
421
+ # Memory metrics
422
+ "estimated_peak_memory_bytes": total_estimated_bytes,
423
+ "estimated_peak_memory_mb": round(
424
+ total_estimated_bytes / (1024 * 1024), 2
425
+ ),
426
+ "memory_operations_count": 8, # Major allocation/processing operations
427
+ "duration_ms": round(duration_ms, 2),
428
+ "memory_throughput_mb_per_sec": round(
429
+ (total_estimated_bytes / (1024 * 1024)) / max(duration_ms / 1000, 0.001),
430
+ 2
431
+ ),
432
+ "start_time": start_time.isoformat(),
433
+ "end_time": end_time.isoformat(),
434
+ # Verification signature
435
+ "work_type": "MEMORY_ALLOCATION",
436
+ "verification": (
437
+ f"list({list_size}) + dict({dict_size}) + "
438
+ f"nested({nested_items_count}) + temp(100) = {total_estimated_bytes}bytes"
439
+ ),
440
+ "status": "completed",
441
+ }
442
+
443
+
444
+ async def failure_testing_task(ctx: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
445
+ """
446
+ Failure testing task for error handling validation.
447
+
448
+ Randomly fails ~20% of the time to test worker resilience,
449
+ error handling, and failure recovery patterns. When successful,
450
+ provides minimal work to focus on failure testing.
451
+
452
+ Returns:
453
+ Task completion data with failure testing metrics
454
+ """
455
+ start_time = datetime.now()
456
+ task_id = ctx.get("job_id", "unknown")
457
+
458
+ # Random failure probability (20%)
459
+ failure_roll = random.random()
460
+ failure_threshold = 0.2
461
+
462
+ # Generate different types of failures for testing
463
+ if failure_roll < failure_threshold:
464
+ failure_types = [
465
+ "simulated_network_timeout",
466
+ "simulated_database_error",
467
+ "simulated_validation_error",
468
+ "simulated_resource_exhaustion",
469
+ "simulated_permission_error",
470
+ ]
471
+
472
+ failure_type = random.choice(failure_types)
473
+ error_message = (
474
+ f"Simulated {failure_type} for resilience testing "
475
+ f"(task {task_id}, roll={failure_roll:.3f})"
476
+ )
477
+
478
+ # Log failure details before raising
479
+ logger.warning(f"🧪 Intentional test failure: {error_message}")
480
+
481
+ raise Exception(error_message)
482
+
483
+ # Success path: minimal work with timing
484
+ work_type = random.choice(
485
+ ["quick_sleep", "light_calculation", "simple_operation"]
486
+ )
487
+
488
+ if work_type == "quick_sleep":
489
+ delay_ms = random.randint(5, 15)
490
+ await asyncio.sleep(delay_ms / 1000)
491
+ work_detail = f"sleep({delay_ms}ms)"
492
+
493
+ elif work_type == "light_calculation":
494
+ n = random.randint(5, 15)
495
+ result = sum(i**2 for i in range(n))
496
+ work_detail = f"sum_squares({n})={result}"
497
+
498
+ else: # simple_operation
499
+ data = [i for i in range(10)]
500
+ result = len(data) + sum(data)
501
+ work_detail = f"list_ops({len(data)})={result}"
502
+
503
+ end_time = datetime.now()
504
+ duration_ms = (end_time - start_time).total_seconds() * 1000
505
+
506
+ return {
507
+ "task_type": "failure_testing",
508
+ "task_id": task_id,
509
+ # Failure testing proof
510
+ "failure_roll": round(failure_roll, 3),
511
+ "failure_threshold": failure_threshold,
512
+ "would_have_failed": failure_roll < failure_threshold,
513
+ "success_work_type": work_type,
514
+ "work_detail": work_detail,
515
+ # Performance metrics
516
+ "duration_ms": round(duration_ms, 2),
517
+ "start_time": start_time.isoformat(),
518
+ "end_time": end_time.isoformat(),
519
+ # Verification signature
520
+ "work_type": "FAILURE_TESTING",
521
+ "verification": (
522
+ f"roll={failure_roll:.3f} < {failure_threshold} = "
523
+ f"{failure_roll < failure_threshold}, work={work_detail}"
524
+ ),
525
+ "status": "completed",
526
+ }
@@ -0,0 +1,32 @@
1
+ """Simple system maintenance tasks for the system worker."""
2
+
3
+ import asyncio
4
+ from datetime import UTC, datetime
5
+
6
+ from app.core.log import logger
7
+
8
+
9
+ async def system_health_check() -> dict[str, str]:
10
+ """Simple system health check task."""
11
+ logger.info("🩺 Running system health check task")
12
+
13
+ # Simple health check - just return current timestamp
14
+ return {
15
+ "status": "healthy",
16
+ "timestamp": datetime.now(UTC).isoformat(),
17
+ "task": "system_health_check"
18
+ }
19
+
20
+
21
+ async def cleanup_temp_files() -> dict[str, str]:
22
+ """Simple temp file cleanup task placeholder."""
23
+ logger.info("🧹 Running temp file cleanup task")
24
+
25
+ # Placeholder for actual cleanup logic
26
+ await asyncio.sleep(0.1) # Simulate some work
27
+
28
+ return {
29
+ "status": "completed",
30
+ "timestamp": datetime.now(UTC).isoformat(),
31
+ "task": "cleanup_temp_files"
32
+ }