truthound-dashboard 1.3.1__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. truthound_dashboard/api/alerts.py +258 -0
  2. truthound_dashboard/api/anomaly.py +1302 -0
  3. truthound_dashboard/api/cross_alerts.py +352 -0
  4. truthound_dashboard/api/deps.py +143 -0
  5. truthound_dashboard/api/drift_monitor.py +540 -0
  6. truthound_dashboard/api/lineage.py +1151 -0
  7. truthound_dashboard/api/maintenance.py +363 -0
  8. truthound_dashboard/api/middleware.py +373 -1
  9. truthound_dashboard/api/model_monitoring.py +805 -0
  10. truthound_dashboard/api/notifications_advanced.py +2452 -0
  11. truthound_dashboard/api/plugins.py +2096 -0
  12. truthound_dashboard/api/profile.py +211 -14
  13. truthound_dashboard/api/reports.py +853 -0
  14. truthound_dashboard/api/router.py +147 -0
  15. truthound_dashboard/api/rule_suggestions.py +310 -0
  16. truthound_dashboard/api/schema_evolution.py +231 -0
  17. truthound_dashboard/api/sources.py +47 -3
  18. truthound_dashboard/api/triggers.py +190 -0
  19. truthound_dashboard/api/validations.py +13 -0
  20. truthound_dashboard/api/validators.py +333 -4
  21. truthound_dashboard/api/versioning.py +309 -0
  22. truthound_dashboard/api/websocket.py +301 -0
  23. truthound_dashboard/core/__init__.py +27 -0
  24. truthound_dashboard/core/anomaly.py +1395 -0
  25. truthound_dashboard/core/anomaly_explainer.py +633 -0
  26. truthound_dashboard/core/cache.py +206 -0
  27. truthound_dashboard/core/cached_services.py +422 -0
  28. truthound_dashboard/core/charts.py +352 -0
  29. truthound_dashboard/core/connections.py +1069 -42
  30. truthound_dashboard/core/cross_alerts.py +837 -0
  31. truthound_dashboard/core/drift_monitor.py +1477 -0
  32. truthound_dashboard/core/drift_sampling.py +669 -0
  33. truthound_dashboard/core/i18n/__init__.py +42 -0
  34. truthound_dashboard/core/i18n/detector.py +173 -0
  35. truthound_dashboard/core/i18n/messages.py +564 -0
  36. truthound_dashboard/core/lineage.py +971 -0
  37. truthound_dashboard/core/maintenance.py +443 -5
  38. truthound_dashboard/core/model_monitoring.py +1043 -0
  39. truthound_dashboard/core/notifications/channels.py +1020 -1
  40. truthound_dashboard/core/notifications/deduplication/__init__.py +143 -0
  41. truthound_dashboard/core/notifications/deduplication/policies.py +274 -0
  42. truthound_dashboard/core/notifications/deduplication/service.py +400 -0
  43. truthound_dashboard/core/notifications/deduplication/stores.py +2365 -0
  44. truthound_dashboard/core/notifications/deduplication/strategies.py +422 -0
  45. truthound_dashboard/core/notifications/dispatcher.py +43 -0
  46. truthound_dashboard/core/notifications/escalation/__init__.py +149 -0
  47. truthound_dashboard/core/notifications/escalation/backends.py +1384 -0
  48. truthound_dashboard/core/notifications/escalation/engine.py +429 -0
  49. truthound_dashboard/core/notifications/escalation/models.py +336 -0
  50. truthound_dashboard/core/notifications/escalation/scheduler.py +1187 -0
  51. truthound_dashboard/core/notifications/escalation/state_machine.py +330 -0
  52. truthound_dashboard/core/notifications/escalation/stores.py +2896 -0
  53. truthound_dashboard/core/notifications/events.py +49 -0
  54. truthound_dashboard/core/notifications/metrics/__init__.py +115 -0
  55. truthound_dashboard/core/notifications/metrics/base.py +528 -0
  56. truthound_dashboard/core/notifications/metrics/collectors.py +583 -0
  57. truthound_dashboard/core/notifications/routing/__init__.py +169 -0
  58. truthound_dashboard/core/notifications/routing/combinators.py +184 -0
  59. truthound_dashboard/core/notifications/routing/config.py +375 -0
  60. truthound_dashboard/core/notifications/routing/config_parser.py +867 -0
  61. truthound_dashboard/core/notifications/routing/engine.py +382 -0
  62. truthound_dashboard/core/notifications/routing/expression_engine.py +1269 -0
  63. truthound_dashboard/core/notifications/routing/jinja2_engine.py +774 -0
  64. truthound_dashboard/core/notifications/routing/rules.py +625 -0
  65. truthound_dashboard/core/notifications/routing/validator.py +678 -0
  66. truthound_dashboard/core/notifications/service.py +2 -0
  67. truthound_dashboard/core/notifications/stats_aggregator.py +850 -0
  68. truthound_dashboard/core/notifications/throttling/__init__.py +83 -0
  69. truthound_dashboard/core/notifications/throttling/builder.py +311 -0
  70. truthound_dashboard/core/notifications/throttling/stores.py +1859 -0
  71. truthound_dashboard/core/notifications/throttling/throttlers.py +633 -0
  72. truthound_dashboard/core/openlineage.py +1028 -0
  73. truthound_dashboard/core/plugins/__init__.py +39 -0
  74. truthound_dashboard/core/plugins/docs/__init__.py +39 -0
  75. truthound_dashboard/core/plugins/docs/extractor.py +703 -0
  76. truthound_dashboard/core/plugins/docs/renderers.py +804 -0
  77. truthound_dashboard/core/plugins/hooks/__init__.py +63 -0
  78. truthound_dashboard/core/plugins/hooks/decorators.py +367 -0
  79. truthound_dashboard/core/plugins/hooks/manager.py +403 -0
  80. truthound_dashboard/core/plugins/hooks/protocols.py +265 -0
  81. truthound_dashboard/core/plugins/lifecycle/__init__.py +41 -0
  82. truthound_dashboard/core/plugins/lifecycle/hot_reload.py +584 -0
  83. truthound_dashboard/core/plugins/lifecycle/machine.py +419 -0
  84. truthound_dashboard/core/plugins/lifecycle/states.py +266 -0
  85. truthound_dashboard/core/plugins/loader.py +504 -0
  86. truthound_dashboard/core/plugins/registry.py +810 -0
  87. truthound_dashboard/core/plugins/reporter_executor.py +588 -0
  88. truthound_dashboard/core/plugins/sandbox/__init__.py +59 -0
  89. truthound_dashboard/core/plugins/sandbox/code_validator.py +243 -0
  90. truthound_dashboard/core/plugins/sandbox/engines.py +770 -0
  91. truthound_dashboard/core/plugins/sandbox/protocols.py +194 -0
  92. truthound_dashboard/core/plugins/sandbox.py +617 -0
  93. truthound_dashboard/core/plugins/security/__init__.py +68 -0
  94. truthound_dashboard/core/plugins/security/analyzer.py +535 -0
  95. truthound_dashboard/core/plugins/security/policies.py +311 -0
  96. truthound_dashboard/core/plugins/security/protocols.py +296 -0
  97. truthound_dashboard/core/plugins/security/signing.py +842 -0
  98. truthound_dashboard/core/plugins/security.py +446 -0
  99. truthound_dashboard/core/plugins/validator_executor.py +401 -0
  100. truthound_dashboard/core/plugins/versioning/__init__.py +51 -0
  101. truthound_dashboard/core/plugins/versioning/constraints.py +377 -0
  102. truthound_dashboard/core/plugins/versioning/dependencies.py +541 -0
  103. truthound_dashboard/core/plugins/versioning/semver.py +266 -0
  104. truthound_dashboard/core/profile_comparison.py +601 -0
  105. truthound_dashboard/core/report_history.py +570 -0
  106. truthound_dashboard/core/reporters/__init__.py +57 -0
  107. truthound_dashboard/core/reporters/base.py +296 -0
  108. truthound_dashboard/core/reporters/csv_reporter.py +155 -0
  109. truthound_dashboard/core/reporters/html_reporter.py +598 -0
  110. truthound_dashboard/core/reporters/i18n/__init__.py +65 -0
  111. truthound_dashboard/core/reporters/i18n/base.py +494 -0
  112. truthound_dashboard/core/reporters/i18n/catalogs.py +930 -0
  113. truthound_dashboard/core/reporters/json_reporter.py +160 -0
  114. truthound_dashboard/core/reporters/junit_reporter.py +233 -0
  115. truthound_dashboard/core/reporters/markdown_reporter.py +207 -0
  116. truthound_dashboard/core/reporters/pdf_reporter.py +209 -0
  117. truthound_dashboard/core/reporters/registry.py +272 -0
  118. truthound_dashboard/core/rule_generator.py +2088 -0
  119. truthound_dashboard/core/scheduler.py +822 -12
  120. truthound_dashboard/core/schema_evolution.py +858 -0
  121. truthound_dashboard/core/services.py +152 -9
  122. truthound_dashboard/core/statistics.py +718 -0
  123. truthound_dashboard/core/streaming_anomaly.py +883 -0
  124. truthound_dashboard/core/triggers/__init__.py +45 -0
  125. truthound_dashboard/core/triggers/base.py +226 -0
  126. truthound_dashboard/core/triggers/evaluators.py +609 -0
  127. truthound_dashboard/core/triggers/factory.py +363 -0
  128. truthound_dashboard/core/unified_alerts.py +870 -0
  129. truthound_dashboard/core/validation_limits.py +509 -0
  130. truthound_dashboard/core/versioning.py +709 -0
  131. truthound_dashboard/core/websocket/__init__.py +59 -0
  132. truthound_dashboard/core/websocket/manager.py +512 -0
  133. truthound_dashboard/core/websocket/messages.py +130 -0
  134. truthound_dashboard/db/__init__.py +30 -0
  135. truthound_dashboard/db/models.py +3375 -3
  136. truthound_dashboard/main.py +22 -0
  137. truthound_dashboard/schemas/__init__.py +396 -1
  138. truthound_dashboard/schemas/anomaly.py +1258 -0
  139. truthound_dashboard/schemas/base.py +4 -0
  140. truthound_dashboard/schemas/cross_alerts.py +334 -0
  141. truthound_dashboard/schemas/drift_monitor.py +890 -0
  142. truthound_dashboard/schemas/lineage.py +428 -0
  143. truthound_dashboard/schemas/maintenance.py +154 -0
  144. truthound_dashboard/schemas/model_monitoring.py +374 -0
  145. truthound_dashboard/schemas/notifications_advanced.py +1363 -0
  146. truthound_dashboard/schemas/openlineage.py +704 -0
  147. truthound_dashboard/schemas/plugins.py +1293 -0
  148. truthound_dashboard/schemas/profile.py +420 -34
  149. truthound_dashboard/schemas/profile_comparison.py +242 -0
  150. truthound_dashboard/schemas/reports.py +285 -0
  151. truthound_dashboard/schemas/rule_suggestion.py +434 -0
  152. truthound_dashboard/schemas/schema_evolution.py +164 -0
  153. truthound_dashboard/schemas/source.py +117 -2
  154. truthound_dashboard/schemas/triggers.py +511 -0
  155. truthound_dashboard/schemas/unified_alerts.py +223 -0
  156. truthound_dashboard/schemas/validation.py +25 -1
  157. truthound_dashboard/schemas/validators/__init__.py +11 -0
  158. truthound_dashboard/schemas/validators/base.py +151 -0
  159. truthound_dashboard/schemas/versioning.py +152 -0
  160. truthound_dashboard/static/index.html +2 -2
  161. {truthound_dashboard-1.3.1.dist-info → truthound_dashboard-1.4.0.dist-info}/METADATA +142 -22
  162. truthound_dashboard-1.4.0.dist-info/RECORD +239 -0
  163. truthound_dashboard/static/assets/index-BZG20KuF.js +0 -586
  164. truthound_dashboard/static/assets/index-D_HyZ3pb.css +0 -1
  165. truthound_dashboard/static/assets/unmerged_dictionaries-CtpqQBm0.js +0 -1
  166. truthound_dashboard-1.3.1.dist-info/RECORD +0 -110
  167. {truthound_dashboard-1.3.1.dist-info → truthound_dashboard-1.4.0.dist-info}/WHEEL +0 -0
  168. {truthound_dashboard-1.3.1.dist-info → truthound_dashboard-1.4.0.dist-info}/entry_points.txt +0 -0
  169. {truthound_dashboard-1.3.1.dist-info → truthound_dashboard-1.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,1302 @@
1
+ """Anomaly detection API endpoints.
2
+
3
+ This module provides API endpoints for ML-based anomaly detection,
4
+ including streaming real-time detection.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from datetime import datetime
10
+ from typing import Annotated
11
+
12
+ from fastapi import APIRouter, HTTPException, Path, Query, WebSocket, WebSocketDisconnect
13
+
14
+ from truthound_dashboard.schemas.anomaly import (
15
+ AnomalyDetectionRequest,
16
+ AnomalyDetectionResponse,
17
+ AnomalyDetectionListResponse,
18
+ AnomalyStatus,
19
+ AlgorithmListResponse,
20
+ AlgorithmInfo,
21
+ BatchDetectionRequest,
22
+ BatchDetectionResponse,
23
+ BatchDetectionStatus,
24
+ BatchDetectionListResponse,
25
+ BatchSourceResult,
26
+ AlgorithmComparisonRequest,
27
+ AlgorithmComparisonResult,
28
+ AlgorithmComparisonResultItem,
29
+ AgreementSummary,
30
+ AgreementRecord,
31
+ AgreementLevel,
32
+ ExplainabilityRequest,
33
+ ExplainabilityResponse,
34
+ CachedExplanationsListResponse,
35
+ CachedExplanationResponse,
36
+ FeatureContribution,
37
+ AnomalyExplanationResult,
38
+ # Streaming schemas
39
+ StreamingSessionCreate,
40
+ StreamingSessionResponse,
41
+ StreamingSessionListResponse,
42
+ StreamingDataPoint,
43
+ StreamingDataBatch,
44
+ StreamingAlert as StreamingAlertSchema,
45
+ StreamingAlertListResponse,
46
+ StreamingStatusResponse,
47
+ StreamingRecentDataResponse,
48
+ StreamingAlgorithmListResponse,
49
+ StreamingStatistics as StreamingStatisticsSchema,
50
+ StreamingAlgorithm as StreamingAlgorithmSchema,
51
+ StreamingSessionStatus as StreamingStatusSchema,
52
+ get_streaming_algorithm_info_list,
53
+ )
54
+ from truthound_dashboard.core.streaming_anomaly import (
55
+ StreamingAnomalyDetector,
56
+ StreamingAlgorithm,
57
+ StreamingSessionStatus,
58
+ get_streaming_detector,
59
+ )
60
+
61
+ from .deps import AnomalyDetectionServiceDep, AnomalyExplainerServiceDep
62
+
63
+ router = APIRouter()
64
+
65
+
66
+ # =============================================================================
67
+ # Detection Endpoints
68
+ # =============================================================================
69
+
70
+
71
+ @router.post(
72
+ "/sources/{source_id}/anomaly/detect",
73
+ response_model=AnomalyDetectionResponse,
74
+ status_code=201,
75
+ summary="Run anomaly detection",
76
+ description="Run ML-based anomaly detection on a data source",
77
+ )
78
+ async def run_anomaly_detection(
79
+ service: AnomalyDetectionServiceDep,
80
+ source_id: Annotated[str, Path(description="Source ID")],
81
+ request: AnomalyDetectionRequest,
82
+ ) -> AnomalyDetectionResponse:
83
+ """Run anomaly detection on a source.
84
+
85
+ This creates a detection record and immediately runs the detection.
86
+
87
+ Args:
88
+ service: Injected anomaly detection service.
89
+ source_id: Source ID to analyze.
90
+ request: Detection request with algorithm and config.
91
+
92
+ Returns:
93
+ Detection results.
94
+
95
+ Raises:
96
+ HTTPException: 404 if source not found.
97
+ """
98
+ try:
99
+ # Create the detection record
100
+ detection = await service.create_detection(
101
+ source_id=source_id,
102
+ algorithm=request.algorithm.value,
103
+ columns=request.columns,
104
+ config=request.config,
105
+ sample_size=request.sample_size,
106
+ )
107
+
108
+ # Run the detection
109
+ detection = await service.run_detection(detection.id)
110
+
111
+ return _detection_to_response(detection)
112
+
113
+ except ValueError as e:
114
+ raise HTTPException(status_code=404, detail=str(e))
115
+
116
+
117
+ @router.get(
118
+ "/anomaly/{detection_id}",
119
+ response_model=AnomalyDetectionResponse,
120
+ summary="Get detection result",
121
+ description="Get a specific anomaly detection result by ID",
122
+ )
123
+ async def get_detection(
124
+ service: AnomalyDetectionServiceDep,
125
+ detection_id: Annotated[str, Path(description="Detection ID")],
126
+ ) -> AnomalyDetectionResponse:
127
+ """Get a specific anomaly detection result.
128
+
129
+ Args:
130
+ service: Injected anomaly detection service.
131
+ detection_id: Detection unique identifier.
132
+
133
+ Returns:
134
+ Detection details.
135
+
136
+ Raises:
137
+ HTTPException: 404 if detection not found.
138
+ """
139
+ detection = await service.get_detection(detection_id)
140
+ if detection is None:
141
+ raise HTTPException(status_code=404, detail="Detection not found")
142
+ return _detection_to_response(detection)
143
+
144
+
145
+ @router.get(
146
+ "/sources/{source_id}/anomaly/detections",
147
+ response_model=AnomalyDetectionListResponse,
148
+ summary="List detections",
149
+ description="Get detection history for a source",
150
+ )
151
+ async def list_detections(
152
+ service: AnomalyDetectionServiceDep,
153
+ source_id: Annotated[str, Path(description="Source ID")],
154
+ offset: Annotated[int, Query(ge=0, description="Offset for pagination")] = 0,
155
+ limit: Annotated[
156
+ int, Query(ge=1, le=100, description="Maximum items to return")
157
+ ] = 50,
158
+ ) -> AnomalyDetectionListResponse:
159
+ """List detection history for a source.
160
+
161
+ Args:
162
+ service: Injected anomaly detection service.
163
+ source_id: Source ID.
164
+ offset: Number of items to skip.
165
+ limit: Maximum items to return.
166
+
167
+ Returns:
168
+ Paginated list of detections.
169
+ """
170
+ detections = await service.get_detections_by_source(
171
+ source_id,
172
+ offset=offset,
173
+ limit=limit,
174
+ )
175
+ return AnomalyDetectionListResponse(
176
+ data=[_detection_to_response(d) for d in detections],
177
+ total=len(detections), # TODO: Get actual total count
178
+ offset=offset,
179
+ limit=limit,
180
+ )
181
+
182
+
183
+ @router.get(
184
+ "/sources/{source_id}/anomaly/latest",
185
+ response_model=AnomalyDetectionResponse,
186
+ summary="Get latest detection",
187
+ description="Get the latest anomaly detection result for a source",
188
+ )
189
+ async def get_latest_detection(
190
+ service: AnomalyDetectionServiceDep,
191
+ source_id: Annotated[str, Path(description="Source ID")],
192
+ ) -> AnomalyDetectionResponse:
193
+ """Get the latest detection for a source.
194
+
195
+ Args:
196
+ service: Injected anomaly detection service.
197
+ source_id: Source ID.
198
+
199
+ Returns:
200
+ Latest detection result.
201
+
202
+ Raises:
203
+ HTTPException: 404 if no detections found.
204
+ """
205
+ detection = await service.get_latest_detection(source_id)
206
+ if detection is None:
207
+ raise HTTPException(
208
+ status_code=404,
209
+ detail="No detections found for this source",
210
+ )
211
+ return _detection_to_response(detection)
212
+
213
+
214
+ # =============================================================================
215
+ # Algorithm Information Endpoints
216
+ # =============================================================================
217
+
218
+
219
+ @router.get(
220
+ "/anomaly/algorithms",
221
+ response_model=AlgorithmListResponse,
222
+ summary="List algorithms",
223
+ description="Get information about available anomaly detection algorithms",
224
+ )
225
+ async def list_algorithms(
226
+ service: AnomalyDetectionServiceDep,
227
+ ) -> AlgorithmListResponse:
228
+ """Get information about available algorithms.
229
+
230
+ Args:
231
+ service: Injected anomaly detection service.
232
+
233
+ Returns:
234
+ List of algorithm information.
235
+ """
236
+ algorithms = service.get_algorithm_info()
237
+ return AlgorithmListResponse(
238
+ algorithms=[AlgorithmInfo(**algo) for algo in algorithms],
239
+ total=len(algorithms),
240
+ )
241
+
242
+
243
+ # =============================================================================
244
+ # Explainability Endpoints
245
+ # =============================================================================
246
+
247
+
248
+ @router.post(
249
+ "/anomaly/{detection_id}/explain",
250
+ response_model=ExplainabilityResponse,
251
+ summary="Generate anomaly explanations",
252
+ description="Generate SHAP/LIME explanations for specific anomaly rows",
253
+ )
254
+ async def explain_anomaly(
255
+ explainer_service: AnomalyExplainerServiceDep,
256
+ detection_id: Annotated[str, Path(description="Detection ID")],
257
+ request: ExplainabilityRequest,
258
+ ) -> ExplainabilityResponse:
259
+ """Generate SHAP/LIME explanations for anomaly rows.
260
+
261
+ This uses SHAP (SHapley Additive exPlanations) to provide
262
+ interpretability for ML-based anomaly detection results.
263
+
264
+ For tree-based models (Isolation Forest), uses TreeExplainer.
265
+ For other models, uses KernelExplainer as a fallback.
266
+
267
+ Args:
268
+ explainer_service: Injected explainer service.
269
+ detection_id: Anomaly detection ID to explain.
270
+ request: Explanation request with row indices and options.
271
+
272
+ Returns:
273
+ Explanations with feature contributions for each row.
274
+
275
+ Raises:
276
+ HTTPException: 404 if detection not found, 400 if invalid request.
277
+ """
278
+ try:
279
+ result = await explainer_service.explain_anomaly(
280
+ detection_id=detection_id,
281
+ row_indices=request.row_indices,
282
+ max_features=request.max_features,
283
+ sample_background=request.sample_background,
284
+ )
285
+
286
+ # Handle error in result
287
+ if "error" in result and result.get("explanations") == []:
288
+ raise HTTPException(status_code=400, detail=result["error"])
289
+
290
+ return ExplainabilityResponse(
291
+ detection_id=result.get("detection_id", detection_id),
292
+ algorithm=result.get("algorithm", "unknown"),
293
+ row_indices=result.get("row_indices", request.row_indices),
294
+ feature_names=result.get("feature_names", []),
295
+ explanations=[
296
+ AnomalyExplanationResult(
297
+ row_index=exp["row_index"],
298
+ anomaly_score=exp["anomaly_score"],
299
+ feature_contributions=[
300
+ FeatureContribution(**fc)
301
+ for fc in exp["feature_contributions"]
302
+ ],
303
+ total_shap=exp["total_shap"],
304
+ summary=exp["summary"],
305
+ )
306
+ for exp in result.get("explanations", [])
307
+ ],
308
+ generated_at=result.get("generated_at", ""),
309
+ error=result.get("error"),
310
+ )
311
+
312
+ except ValueError as e:
313
+ raise HTTPException(status_code=404, detail=str(e))
314
+
315
+
316
+ @router.get(
317
+ "/anomaly/{detection_id}/explanations",
318
+ response_model=CachedExplanationsListResponse,
319
+ summary="Get cached explanations",
320
+ description="Get cached SHAP/LIME explanations for a detection",
321
+ )
322
+ async def get_cached_explanations(
323
+ explainer_service: AnomalyExplainerServiceDep,
324
+ detection_id: Annotated[str, Path(description="Detection ID")],
325
+ row_indices: Annotated[
326
+ str | None,
327
+ Query(description="Comma-separated row indices to filter (optional)")
328
+ ] = None,
329
+ ) -> CachedExplanationsListResponse:
330
+ """Get cached explanations for a detection.
331
+
332
+ Retrieves previously generated explanations from the database.
333
+ Use this to avoid re-computing explanations for the same rows.
334
+
335
+ Args:
336
+ explainer_service: Injected explainer service.
337
+ detection_id: Anomaly detection ID.
338
+ row_indices: Optional comma-separated list of row indices to filter.
339
+
340
+ Returns:
341
+ List of cached explanations.
342
+ """
343
+ # Parse row indices if provided
344
+ indices_list: list[int] | None = None
345
+ if row_indices:
346
+ try:
347
+ indices_list = [int(i.strip()) for i in row_indices.split(",")]
348
+ except ValueError:
349
+ raise HTTPException(
350
+ status_code=400,
351
+ detail="Invalid row_indices format. Use comma-separated integers."
352
+ )
353
+
354
+ explanations = await explainer_service.get_cached_explanations(
355
+ detection_id=detection_id,
356
+ row_indices=indices_list,
357
+ )
358
+
359
+ return CachedExplanationsListResponse(
360
+ detection_id=detection_id,
361
+ explanations=[
362
+ CachedExplanationResponse(
363
+ id=exp["id"],
364
+ detection_id=exp["detection_id"],
365
+ row_index=exp["row_index"],
366
+ anomaly_score=exp["anomaly_score"],
367
+ feature_contributions=[
368
+ FeatureContribution(**fc)
369
+ for fc in exp["feature_contributions"]
370
+ ],
371
+ total_shap=exp["total_shap"],
372
+ summary=exp["summary"],
373
+ generated_at=exp.get("generated_at"),
374
+ )
375
+ for exp in explanations
376
+ ],
377
+ total=len(explanations),
378
+ )
379
+
380
+
381
+ # =============================================================================
382
+ # Batch Detection Endpoints
383
+ # =============================================================================
384
+
385
+
386
+ @router.post(
387
+ "/anomaly/batch",
388
+ response_model=BatchDetectionResponse,
389
+ status_code=201,
390
+ summary="Create batch detection job",
391
+ description="Create a batch anomaly detection job for multiple sources",
392
+ )
393
+ async def create_batch_detection(
394
+ service: AnomalyDetectionServiceDep,
395
+ request: BatchDetectionRequest,
396
+ ) -> BatchDetectionResponse:
397
+ """Create a batch anomaly detection job.
398
+
399
+ This creates a batch job and immediately starts execution.
400
+
401
+ Args:
402
+ service: Injected anomaly detection service.
403
+ request: Batch detection request with source IDs and config.
404
+
405
+ Returns:
406
+ Created batch job with initial status.
407
+
408
+ Raises:
409
+ HTTPException: 400 if no valid sources.
410
+ """
411
+ try:
412
+ # Create the batch job
413
+ batch_job = await service.create_batch_detection(
414
+ source_ids=request.source_ids,
415
+ name=request.name,
416
+ algorithm=request.algorithm.value,
417
+ config=request.config,
418
+ sample_size=request.sample_size,
419
+ )
420
+
421
+ # Start execution in background (for now, run synchronously)
422
+ batch_job = await service.run_batch_detection(batch_job.id)
423
+
424
+ return await _batch_job_to_response(service, batch_job)
425
+
426
+ except ValueError as e:
427
+ raise HTTPException(status_code=400, detail=str(e))
428
+
429
+
430
+ @router.get(
431
+ "/anomaly/batch/{batch_id}",
432
+ response_model=BatchDetectionResponse,
433
+ summary="Get batch job status",
434
+ description="Get the status and progress of a batch detection job",
435
+ )
436
+ async def get_batch_job(
437
+ service: AnomalyDetectionServiceDep,
438
+ batch_id: Annotated[str, Path(description="Batch job ID")],
439
+ ) -> BatchDetectionResponse:
440
+ """Get a batch detection job status.
441
+
442
+ Args:
443
+ service: Injected anomaly detection service.
444
+ batch_id: Batch job unique identifier.
445
+
446
+ Returns:
447
+ Batch job details and progress.
448
+
449
+ Raises:
450
+ HTTPException: 404 if batch job not found.
451
+ """
452
+ batch_job = await service.get_batch_job(batch_id)
453
+ if batch_job is None:
454
+ raise HTTPException(status_code=404, detail="Batch job not found")
455
+ return await _batch_job_to_response(service, batch_job)
456
+
457
+
458
+ @router.get(
459
+ "/anomaly/batch/{batch_id}/results",
460
+ response_model=list[BatchSourceResult],
461
+ summary="Get batch results",
462
+ description="Get detailed results for each source in a batch job",
463
+ )
464
+ async def get_batch_results(
465
+ service: AnomalyDetectionServiceDep,
466
+ batch_id: Annotated[str, Path(description="Batch job ID")],
467
+ ) -> list[BatchSourceResult]:
468
+ """Get detailed results for a batch job.
469
+
470
+ Args:
471
+ service: Injected anomaly detection service.
472
+ batch_id: Batch job unique identifier.
473
+
474
+ Returns:
475
+ List of results for each source.
476
+
477
+ Raises:
478
+ HTTPException: 404 if batch job not found.
479
+ """
480
+ try:
481
+ results = await service.get_batch_results(batch_id)
482
+ return [BatchSourceResult(**r) for r in results]
483
+ except ValueError as e:
484
+ raise HTTPException(status_code=404, detail=str(e))
485
+
486
+
487
+ @router.get(
488
+ "/anomaly/batch",
489
+ response_model=BatchDetectionListResponse,
490
+ summary="List batch jobs",
491
+ description="List all batch detection jobs",
492
+ )
493
+ async def list_batch_jobs(
494
+ service: AnomalyDetectionServiceDep,
495
+ offset: Annotated[int, Query(ge=0, description="Offset for pagination")] = 0,
496
+ limit: Annotated[
497
+ int, Query(ge=1, le=100, description="Maximum items to return")
498
+ ] = 50,
499
+ ) -> BatchDetectionListResponse:
500
+ """List all batch detection jobs.
501
+
502
+ Args:
503
+ service: Injected anomaly detection service.
504
+ offset: Number of items to skip.
505
+ limit: Maximum items to return.
506
+
507
+ Returns:
508
+ Paginated list of batch jobs.
509
+ """
510
+ batch_jobs = await service.list_batch_jobs(offset=offset, limit=limit)
511
+ responses = []
512
+ for job in batch_jobs:
513
+ responses.append(await _batch_job_to_response(service, job))
514
+
515
+ return BatchDetectionListResponse(
516
+ data=responses,
517
+ total=len(responses), # TODO: Get actual total count
518
+ offset=offset,
519
+ limit=limit,
520
+ )
521
+
522
+
523
+ @router.post(
524
+ "/anomaly/batch/{batch_id}/cancel",
525
+ response_model=BatchDetectionResponse,
526
+ summary="Cancel batch job",
527
+ description="Cancel a running batch detection job",
528
+ )
529
+ async def cancel_batch_job(
530
+ service: AnomalyDetectionServiceDep,
531
+ batch_id: Annotated[str, Path(description="Batch job ID")],
532
+ ) -> BatchDetectionResponse:
533
+ """Cancel a running batch job.
534
+
535
+ Args:
536
+ service: Injected anomaly detection service.
537
+ batch_id: Batch job unique identifier.
538
+
539
+ Returns:
540
+ Updated batch job with cancelled status.
541
+
542
+ Raises:
543
+ HTTPException: 404 if batch job not found.
544
+ """
545
+ batch_job = await service.cancel_batch_job(batch_id)
546
+ if batch_job is None:
547
+ raise HTTPException(status_code=404, detail="Batch job not found")
548
+ return await _batch_job_to_response(service, batch_job)
549
+
550
+
551
+ @router.delete(
552
+ "/anomaly/batch/{batch_id}",
553
+ status_code=204,
554
+ summary="Delete batch job",
555
+ description="Delete a batch detection job",
556
+ )
557
+ async def delete_batch_job(
558
+ service: AnomalyDetectionServiceDep,
559
+ batch_id: Annotated[str, Path(description="Batch job ID")],
560
+ ) -> None:
561
+ """Delete a batch detection job.
562
+
563
+ Args:
564
+ service: Injected anomaly detection service.
565
+ batch_id: Batch job unique identifier.
566
+
567
+ Raises:
568
+ HTTPException: 404 if batch job not found.
569
+ """
570
+ deleted = await service.delete_batch_job(batch_id)
571
+ if not deleted:
572
+ raise HTTPException(status_code=404, detail="Batch job not found")
573
+
574
+
575
+ # =============================================================================
576
+ # Algorithm Comparison Endpoints
577
+ # =============================================================================
578
+
579
+
580
+ @router.post(
581
+ "/anomaly/compare",
582
+ response_model=AlgorithmComparisonResult,
583
+ status_code=201,
584
+ summary="Compare algorithms",
585
+ description="Run multiple anomaly detection algorithms and compare results",
586
+ )
587
+ async def compare_algorithms(
588
+ service: AnomalyDetectionServiceDep,
589
+ request: AlgorithmComparisonRequest,
590
+ source_id: Annotated[str, Query(description="Source ID to analyze")],
591
+ ) -> AlgorithmComparisonResult:
592
+ """Compare multiple anomaly detection algorithms.
593
+
594
+ Runs all specified algorithms on the same data and returns
595
+ a comparison with agreement analysis.
596
+
597
+ Args:
598
+ service: Injected anomaly detection service.
599
+ request: Comparison request with algorithms to compare.
600
+ source_id: Source ID to analyze.
601
+
602
+ Returns:
603
+ Comparison results with agreement analysis.
604
+
605
+ Raises:
606
+ HTTPException: 404 if source not found, 400 if invalid request.
607
+ """
608
+ try:
609
+ # Run comparison
610
+ result = await service.run_comparison(
611
+ source_id=source_id,
612
+ algorithms=[algo.value for algo in request.algorithms],
613
+ columns=request.columns,
614
+ config=request.config,
615
+ sample_size=request.sample_size,
616
+ )
617
+
618
+ return _comparison_to_response(result)
619
+
620
+ except ValueError as e:
621
+ raise HTTPException(status_code=404, detail=str(e))
622
+
623
+
624
+ @router.get(
625
+ "/anomaly/compare/{comparison_id}",
626
+ response_model=AlgorithmComparisonResult,
627
+ summary="Get comparison result",
628
+ description="Get a specific algorithm comparison result by ID",
629
+ )
630
+ async def get_comparison(
631
+ service: AnomalyDetectionServiceDep,
632
+ comparison_id: Annotated[str, Path(description="Comparison ID")],
633
+ ) -> AlgorithmComparisonResult:
634
+ """Get a specific algorithm comparison result.
635
+
636
+ Note: Comparison results are computed on-the-fly and not persisted.
637
+ This endpoint would require storing comparison results to be functional.
638
+
639
+ Args:
640
+ service: Injected anomaly detection service.
641
+ comparison_id: Comparison unique identifier.
642
+
643
+ Returns:
644
+ Comparison details.
645
+
646
+ Raises:
647
+ HTTPException: 404 - comparisons are not persisted.
648
+ """
649
+ # For now, comparisons are not persisted - they are computed on-the-fly
650
+ raise HTTPException(
651
+ status_code=404,
652
+ detail="Comparison results are computed on-the-fly and not persisted. "
653
+ "Please run a new comparison using POST /anomaly/compare",
654
+ )
655
+
656
+
657
+ # =============================================================================
658
+ # Helper Functions
659
+ # =============================================================================
660
+
661
+
662
+ def _comparison_to_response(result: dict) -> AlgorithmComparisonResult:
663
+ """Convert comparison result dict to response schema."""
664
+ from truthound_dashboard.schemas.anomaly import AnomalyAlgorithm
665
+
666
+ # Convert algorithm results
667
+ algorithm_results = []
668
+ for ar in result.get("algorithm_results", []):
669
+ algorithm_results.append(
670
+ AlgorithmComparisonResultItem(
671
+ algorithm=AnomalyAlgorithm(ar["algorithm"]),
672
+ display_name=ar["display_name"],
673
+ status=AnomalyStatus(ar["status"]),
674
+ anomaly_count=ar.get("anomaly_count"),
675
+ anomaly_rate=ar.get("anomaly_rate"),
676
+ duration_ms=ar.get("duration_ms"),
677
+ error_message=ar.get("error_message"),
678
+ anomaly_indices=ar.get("anomaly_indices", []),
679
+ )
680
+ )
681
+
682
+ # Convert agreement summary
683
+ agreement_summary = None
684
+ if result.get("agreement_summary"):
685
+ summary_data = result["agreement_summary"]
686
+ agreement_summary = AgreementSummary(
687
+ total_algorithms=summary_data["total_algorithms"],
688
+ total_unique_anomalies=summary_data["total_unique_anomalies"],
689
+ all_agree_count=summary_data["all_agree_count"],
690
+ majority_agree_count=summary_data["majority_agree_count"],
691
+ some_agree_count=summary_data["some_agree_count"],
692
+ one_only_count=summary_data["one_only_count"],
693
+ agreement_matrix=summary_data.get("agreement_matrix", []),
694
+ )
695
+
696
+ # Convert agreement records
697
+ agreement_records = None
698
+ if result.get("agreement_records"):
699
+ agreement_records = [
700
+ AgreementRecord(
701
+ row_index=rec["row_index"],
702
+ detected_by=[AnomalyAlgorithm(a) for a in rec["detected_by"]],
703
+ detection_count=rec["detection_count"],
704
+ agreement_level=AgreementLevel(rec["agreement_level"]),
705
+ confidence_score=rec["confidence_score"],
706
+ column_values=rec.get("column_values", {}),
707
+ )
708
+ for rec in result["agreement_records"]
709
+ ]
710
+
711
+ return AlgorithmComparisonResult(
712
+ id=result["id"],
713
+ source_id=result["source_id"],
714
+ status=AnomalyStatus(result["status"]),
715
+ total_rows=result.get("total_rows"),
716
+ columns_analyzed=result.get("columns_analyzed"),
717
+ algorithm_results=algorithm_results,
718
+ agreement_summary=agreement_summary,
719
+ agreement_records=agreement_records,
720
+ total_duration_ms=result.get("total_duration_ms"),
721
+ error_message=result.get("error_message"),
722
+ created_at=result["created_at"],
723
+ completed_at=result.get("completed_at"),
724
+ )
725
+
726
+
727
+ async def _batch_job_to_response(service, batch_job) -> BatchDetectionResponse:
728
+ """Convert batch job model to response schema."""
729
+ from truthound_dashboard.schemas.anomaly import (
730
+ AnomalyAlgorithm,
731
+ BatchDetectionStatus,
732
+ BatchSourceResult,
733
+ )
734
+
735
+ # Get detailed results with source names
736
+ results = None
737
+ if batch_job.results_json or batch_job.total_sources > 0:
738
+ try:
739
+ results_data = await service.get_batch_results(batch_job.id)
740
+ results = [BatchSourceResult(**r) for r in results_data]
741
+ except ValueError:
742
+ results = None
743
+
744
+ return BatchDetectionResponse(
745
+ id=batch_job.id,
746
+ name=batch_job.name,
747
+ status=BatchDetectionStatus(batch_job.status),
748
+ algorithm=AnomalyAlgorithm(batch_job.algorithm),
749
+ config=batch_job.config,
750
+ total_sources=batch_job.total_sources,
751
+ completed_sources=batch_job.completed_sources,
752
+ failed_sources=batch_job.failed_sources,
753
+ progress_percent=batch_job.progress_percent,
754
+ current_source_id=batch_job.current_source_id,
755
+ total_anomalies=batch_job.total_anomalies,
756
+ total_rows_analyzed=batch_job.total_rows_analyzed,
757
+ average_anomaly_rate=batch_job.average_anomaly_rate,
758
+ results=results,
759
+ duration_ms=batch_job.duration_ms,
760
+ error_message=batch_job.error_message,
761
+ created_at=batch_job.created_at.isoformat() if batch_job.created_at else "",
762
+ started_at=batch_job.started_at.isoformat() if batch_job.started_at else None,
763
+ completed_at=batch_job.completed_at.isoformat() if batch_job.completed_at else None,
764
+ )
765
+
766
+
767
+ def _detection_to_response(detection) -> AnomalyDetectionResponse:
768
+ """Convert detection model to response schema."""
769
+ from truthound_dashboard.schemas.anomaly import (
770
+ AnomalyAlgorithm,
771
+ AnomalyStatus,
772
+ AnomalyRecord,
773
+ ColumnAnomalySummary,
774
+ )
775
+
776
+ # Parse anomalies if present
777
+ anomalies = None
778
+ if detection.result_json and "anomalies" in detection.result_json:
779
+ anomalies = [
780
+ AnomalyRecord(**a) for a in detection.result_json["anomalies"][:100]
781
+ ]
782
+
783
+ # Parse column summaries if present
784
+ column_summaries = None
785
+ if detection.result_json and "column_summaries" in detection.result_json:
786
+ column_summaries = [
787
+ ColumnAnomalySummary(**cs)
788
+ for cs in detection.result_json["column_summaries"]
789
+ ]
790
+
791
+ return AnomalyDetectionResponse(
792
+ id=detection.id,
793
+ source_id=detection.source_id,
794
+ status=AnomalyStatus(detection.status),
795
+ algorithm=AnomalyAlgorithm(detection.algorithm),
796
+ config=detection.config,
797
+ total_rows=detection.total_rows,
798
+ anomaly_count=detection.anomaly_count,
799
+ anomaly_rate=detection.anomaly_rate,
800
+ columns_analyzed=detection.columns_analyzed,
801
+ column_summaries=column_summaries,
802
+ anomalies=anomalies,
803
+ duration_ms=detection.duration_ms,
804
+ error_message=detection.error_message,
805
+ created_at=detection.created_at.isoformat() if detection.created_at else "",
806
+ started_at=detection.started_at.isoformat() if detection.started_at else None,
807
+ completed_at=detection.completed_at.isoformat() if detection.completed_at else None,
808
+ )
809
+
810
+
811
+ # =============================================================================
812
+ # Streaming Anomaly Detection Endpoints
813
+ # =============================================================================
814
+
815
+
816
+ @router.post(
817
+ "/anomaly/streaming/start",
818
+ response_model=StreamingSessionResponse,
819
+ status_code=201,
820
+ summary="Start streaming session",
821
+ description="Create and start a new streaming anomaly detection session",
822
+ )
823
+ async def start_streaming_session(
824
+ request: StreamingSessionCreate,
825
+ ) -> StreamingSessionResponse:
826
+ """Start a new streaming anomaly detection session.
827
+
828
+ Args:
829
+ request: Session configuration.
830
+
831
+ Returns:
832
+ Created and started session.
833
+ """
834
+ detector = get_streaming_detector()
835
+
836
+ # Map schema algorithm to core algorithm
837
+ algorithm = StreamingAlgorithm(request.algorithm.value)
838
+
839
+ # Create session
840
+ session = await detector.create_session(
841
+ source_id=request.source_id,
842
+ algorithm=algorithm,
843
+ window_size=request.window_size,
844
+ threshold=request.threshold,
845
+ columns=request.columns or [],
846
+ config=request.config,
847
+ )
848
+
849
+ # Start the session
850
+ session = await detector.start_session(session.id)
851
+
852
+ return _session_to_response(session)
853
+
854
+
855
+ @router.post(
856
+ "/anomaly/streaming/{session_id}/data",
857
+ response_model=StreamingAlertSchema | None,
858
+ summary="Push data point",
859
+ description="Push a data point to a streaming session for anomaly detection",
860
+ )
861
+ async def push_streaming_data(
862
+ session_id: Annotated[str, Path(description="Session ID")],
863
+ data_point: StreamingDataPoint,
864
+ ) -> StreamingAlertSchema | None:
865
+ """Push a data point to a streaming session.
866
+
867
+ Args:
868
+ session_id: Session ID.
869
+ data_point: Data point to push.
870
+
871
+ Returns:
872
+ Alert if anomaly detected, None otherwise.
873
+
874
+ Raises:
875
+ HTTPException: 404 if session not found.
876
+ """
877
+ detector = get_streaming_detector()
878
+
879
+ # Parse timestamp if provided
880
+ timestamp = None
881
+ if data_point.timestamp:
882
+ try:
883
+ timestamp = datetime.fromisoformat(data_point.timestamp)
884
+ except ValueError:
885
+ pass
886
+
887
+ try:
888
+ alert = await detector.push_data_point(
889
+ session_id=session_id,
890
+ data=data_point.data,
891
+ timestamp=timestamp,
892
+ )
893
+
894
+ if alert is not None:
895
+ return _alert_to_response(alert)
896
+ return None
897
+
898
+ except ValueError as e:
899
+ raise HTTPException(status_code=404, detail=str(e))
900
+
901
+
902
+ @router.post(
903
+ "/anomaly/streaming/{session_id}/batch",
904
+ response_model=list[StreamingAlertSchema],
905
+ summary="Push data batch",
906
+ description="Push a batch of data points to a streaming session",
907
+ )
908
+ async def push_streaming_batch(
909
+ session_id: Annotated[str, Path(description="Session ID")],
910
+ batch: StreamingDataBatch,
911
+ ) -> list[StreamingAlertSchema]:
912
+ """Push a batch of data points to a streaming session.
913
+
914
+ Args:
915
+ session_id: Session ID.
916
+ batch: Batch of data points.
917
+
918
+ Returns:
919
+ List of alerts for detected anomalies.
920
+
921
+ Raises:
922
+ HTTPException: 404 if session not found.
923
+ """
924
+ detector = get_streaming_detector()
925
+
926
+ # Prepare data and timestamps
927
+ data_points = [dp.data for dp in batch.data_points]
928
+ timestamps = []
929
+ for dp in batch.data_points:
930
+ if dp.timestamp:
931
+ try:
932
+ timestamps.append(datetime.fromisoformat(dp.timestamp))
933
+ except ValueError:
934
+ timestamps.append(datetime.utcnow())
935
+ else:
936
+ timestamps.append(datetime.utcnow())
937
+
938
+ try:
939
+ alerts = await detector.push_batch(
940
+ session_id=session_id,
941
+ data_points=data_points,
942
+ timestamps=timestamps,
943
+ )
944
+
945
+ return [_alert_to_response(alert) for alert in alerts]
946
+
947
+ except ValueError as e:
948
+ raise HTTPException(status_code=404, detail=str(e))
949
+
950
+
951
+ @router.get(
952
+ "/anomaly/streaming/{session_id}/status",
953
+ response_model=StreamingStatusResponse,
954
+ summary="Get session status",
955
+ description="Get the current status and statistics of a streaming session",
956
+ )
957
+ async def get_streaming_status(
958
+ session_id: Annotated[str, Path(description="Session ID")],
959
+ ) -> StreamingStatusResponse:
960
+ """Get streaming session status and statistics.
961
+
962
+ Args:
963
+ session_id: Session ID.
964
+
965
+ Returns:
966
+ Session status with statistics.
967
+
968
+ Raises:
969
+ HTTPException: 404 if session not found.
970
+ """
971
+ detector = get_streaming_detector()
972
+
973
+ session = await detector.get_session(session_id)
974
+ if session is None:
975
+ raise HTTPException(status_code=404, detail="Session not found")
976
+
977
+ stats = await detector.get_statistics(session_id)
978
+ recent_alerts = await detector.get_alerts(session_id, limit=10)
979
+
980
+ return StreamingStatusResponse(
981
+ session_id=session.id,
982
+ status=StreamingStatusSchema(session.status.value),
983
+ total_points=stats.get("total_points", 0),
984
+ total_alerts=stats.get("total_alerts", 0),
985
+ buffer_utilization=stats.get("buffer_utilization", 0),
986
+ statistics={
987
+ col: StreamingStatisticsSchema(**col_stats)
988
+ for col, col_stats in stats.get("columns", {}).items()
989
+ },
990
+ recent_alerts=[_alert_to_response(alert) for alert in recent_alerts],
991
+ )
992
+
993
+
994
+ @router.post(
995
+ "/anomaly/streaming/{session_id}/stop",
996
+ response_model=StreamingSessionResponse,
997
+ summary="Stop streaming session",
998
+ description="Stop a running streaming session",
999
+ )
1000
+ async def stop_streaming_session(
1001
+ session_id: Annotated[str, Path(description="Session ID")],
1002
+ ) -> StreamingSessionResponse:
1003
+ """Stop a streaming session.
1004
+
1005
+ Args:
1006
+ session_id: Session ID to stop.
1007
+
1008
+ Returns:
1009
+ Updated session.
1010
+
1011
+ Raises:
1012
+ HTTPException: 404 if session not found.
1013
+ """
1014
+ detector = get_streaming_detector()
1015
+
1016
+ try:
1017
+ session = await detector.stop_session(session_id)
1018
+ return _session_to_response(session)
1019
+ except ValueError as e:
1020
+ raise HTTPException(status_code=404, detail=str(e))
1021
+
1022
+
1023
+ @router.delete(
1024
+ "/anomaly/streaming/{session_id}",
1025
+ status_code=204,
1026
+ summary="Delete streaming session",
1027
+ description="Delete a streaming session",
1028
+ )
1029
+ async def delete_streaming_session(
1030
+ session_id: Annotated[str, Path(description="Session ID")],
1031
+ ) -> None:
1032
+ """Delete a streaming session.
1033
+
1034
+ Args:
1035
+ session_id: Session ID to delete.
1036
+
1037
+ Raises:
1038
+ HTTPException: 404 if session not found.
1039
+ """
1040
+ detector = get_streaming_detector()
1041
+
1042
+ deleted = await detector.delete_session(session_id)
1043
+ if not deleted:
1044
+ raise HTTPException(status_code=404, detail="Session not found")
1045
+
1046
+
1047
+ @router.get(
1048
+ "/anomaly/streaming/{session_id}/alerts",
1049
+ response_model=StreamingAlertListResponse,
1050
+ summary="List session alerts",
1051
+ description="Get alerts from a streaming session",
1052
+ )
1053
+ async def list_streaming_alerts(
1054
+ session_id: Annotated[str, Path(description="Session ID")],
1055
+ offset: Annotated[int, Query(ge=0, description="Offset for pagination")] = 0,
1056
+ limit: Annotated[
1057
+ int, Query(ge=1, le=100, description="Maximum items to return")
1058
+ ] = 50,
1059
+ ) -> StreamingAlertListResponse:
1060
+ """List alerts from a streaming session.
1061
+
1062
+ Args:
1063
+ session_id: Session ID.
1064
+ offset: Pagination offset.
1065
+ limit: Maximum items.
1066
+
1067
+ Returns:
1068
+ Paginated list of alerts.
1069
+
1070
+ Raises:
1071
+ HTTPException: 404 if session not found.
1072
+ """
1073
+ detector = get_streaming_detector()
1074
+
1075
+ session = await detector.get_session(session_id)
1076
+ if session is None:
1077
+ raise HTTPException(status_code=404, detail="Session not found")
1078
+
1079
+ alerts = await detector.get_alerts(session_id, limit=limit, offset=offset)
1080
+
1081
+ return StreamingAlertListResponse(
1082
+ data=[_alert_to_response(alert) for alert in alerts],
1083
+ total=len(session._alerts),
1084
+ offset=offset,
1085
+ limit=limit,
1086
+ )
1087
+
1088
+
1089
+ @router.get(
1090
+ "/anomaly/streaming/{session_id}/data",
1091
+ response_model=StreamingRecentDataResponse,
1092
+ summary="Get recent data",
1093
+ description="Get recent data points from a streaming session",
1094
+ )
1095
+ async def get_streaming_recent_data(
1096
+ session_id: Annotated[str, Path(description="Session ID")],
1097
+ limit: Annotated[
1098
+ int, Query(ge=1, le=1000, description="Maximum items to return")
1099
+ ] = 100,
1100
+ ) -> StreamingRecentDataResponse:
1101
+ """Get recent data points from a streaming session.
1102
+
1103
+ Args:
1104
+ session_id: Session ID.
1105
+ limit: Maximum points to return.
1106
+
1107
+ Returns:
1108
+ Recent data points.
1109
+
1110
+ Raises:
1111
+ HTTPException: 404 if session not found.
1112
+ """
1113
+ detector = get_streaming_detector()
1114
+
1115
+ session = await detector.get_session(session_id)
1116
+ if session is None:
1117
+ raise HTTPException(status_code=404, detail="Session not found")
1118
+
1119
+ data_points = await detector.get_recent_data(session_id, limit=limit)
1120
+
1121
+ return StreamingRecentDataResponse(
1122
+ session_id=session_id,
1123
+ data_points=data_points,
1124
+ total=len(data_points),
1125
+ )
1126
+
1127
+
1128
+ @router.get(
1129
+ "/anomaly/streaming/sessions",
1130
+ response_model=StreamingSessionListResponse,
1131
+ summary="List streaming sessions",
1132
+ description="List all active streaming sessions",
1133
+ )
1134
+ async def list_streaming_sessions(
1135
+ offset: Annotated[int, Query(ge=0, description="Offset for pagination")] = 0,
1136
+ limit: Annotated[
1137
+ int, Query(ge=1, le=100, description="Maximum items to return")
1138
+ ] = 50,
1139
+ ) -> StreamingSessionListResponse:
1140
+ """List all streaming sessions.
1141
+
1142
+ Args:
1143
+ offset: Pagination offset.
1144
+ limit: Maximum items.
1145
+
1146
+ Returns:
1147
+ Paginated list of sessions.
1148
+ """
1149
+ detector = get_streaming_detector()
1150
+
1151
+ sessions = await detector.list_sessions()
1152
+ paginated = sessions[offset : offset + limit]
1153
+
1154
+ return StreamingSessionListResponse(
1155
+ data=[_session_to_response(s) for s in paginated],
1156
+ total=len(sessions),
1157
+ offset=offset,
1158
+ limit=limit,
1159
+ )
1160
+
1161
+
1162
+ @router.get(
1163
+ "/anomaly/streaming/algorithms",
1164
+ response_model=StreamingAlgorithmListResponse,
1165
+ summary="List streaming algorithms",
1166
+ description="Get information about available streaming algorithms",
1167
+ )
1168
+ async def list_streaming_algorithms() -> StreamingAlgorithmListResponse:
1169
+ """Get information about available streaming algorithms.
1170
+
1171
+ Returns:
1172
+ List of streaming algorithm information.
1173
+ """
1174
+ algorithms = get_streaming_algorithm_info_list()
1175
+ return StreamingAlgorithmListResponse(
1176
+ algorithms=algorithms,
1177
+ total=len(algorithms),
1178
+ )
1179
+
1180
+
1181
+ @router.websocket("/anomaly/streaming/{session_id}/ws")
1182
+ async def streaming_websocket(
1183
+ websocket: WebSocket,
1184
+ session_id: str,
1185
+ ) -> None:
1186
+ """WebSocket endpoint for real-time streaming alerts.
1187
+
1188
+ Clients can connect to receive alerts in real-time.
1189
+ They can also push data points through the WebSocket.
1190
+
1191
+ Protocol:
1192
+ - Send JSON: {"type": "data", "data": {...}} to push data
1193
+ - Receive JSON: {"type": "alert", "alert": {...}} on anomaly
1194
+
1195
+ Args:
1196
+ websocket: WebSocket connection.
1197
+ session_id: Session ID.
1198
+ """
1199
+ detector = get_streaming_detector()
1200
+
1201
+ # Verify session exists
1202
+ session = await detector.get_session(session_id)
1203
+ if session is None:
1204
+ await websocket.close(code=4004, reason="Session not found")
1205
+ return
1206
+
1207
+ await websocket.accept()
1208
+
1209
+ # Register callback for alerts
1210
+ async def on_alert(alert):
1211
+ """Send alert to WebSocket client."""
1212
+ try:
1213
+ await websocket.send_json({
1214
+ "type": "alert",
1215
+ "alert": alert.to_dict(),
1216
+ })
1217
+ except Exception:
1218
+ pass
1219
+
1220
+ detector.register_alert_callback(session_id, on_alert)
1221
+
1222
+ try:
1223
+ while True:
1224
+ # Receive data from client
1225
+ message = await websocket.receive_json()
1226
+
1227
+ if message.get("type") == "data":
1228
+ # Push data point
1229
+ data = message.get("data", {})
1230
+ timestamp_str = message.get("timestamp")
1231
+ timestamp = None
1232
+ if timestamp_str:
1233
+ try:
1234
+ timestamp = datetime.fromisoformat(timestamp_str)
1235
+ except ValueError:
1236
+ pass
1237
+
1238
+ alert = await detector.push_data_point(
1239
+ session_id=session_id,
1240
+ data=data,
1241
+ timestamp=timestamp,
1242
+ )
1243
+
1244
+ # Send acknowledgment
1245
+ await websocket.send_json({
1246
+ "type": "ack",
1247
+ "has_alert": alert is not None,
1248
+ })
1249
+
1250
+ elif message.get("type") == "ping":
1251
+ await websocket.send_json({"type": "pong"})
1252
+
1253
+ except WebSocketDisconnect:
1254
+ pass
1255
+ finally:
1256
+ detector.unregister_alert_callback(session_id, on_alert)
1257
+
1258
+
1259
+ # =============================================================================
1260
+ # Streaming Helper Functions
1261
+ # =============================================================================
1262
+
1263
+
1264
+ def _session_to_response(session) -> StreamingSessionResponse:
1265
+ """Convert streaming session to response schema."""
1266
+ statistics = None
1267
+ if session._column_stats:
1268
+ statistics = {
1269
+ col: StreamingStatisticsSchema(**stats.to_dict())
1270
+ for col, stats in session._column_stats.items()
1271
+ }
1272
+
1273
+ return StreamingSessionResponse(
1274
+ id=session.id,
1275
+ source_id=session.source_id,
1276
+ algorithm=StreamingAlgorithmSchema(session.algorithm.value),
1277
+ window_size=session.window_size,
1278
+ threshold=session.threshold,
1279
+ columns=session.columns,
1280
+ status=StreamingStatusSchema(session.status.value),
1281
+ config=session.config,
1282
+ statistics=statistics,
1283
+ total_points=len(session._buffer),
1284
+ total_alerts=len(session._alerts),
1285
+ created_at=session.created_at.isoformat(),
1286
+ started_at=session.started_at.isoformat() if session.started_at else None,
1287
+ stopped_at=session.stopped_at.isoformat() if session.stopped_at else None,
1288
+ )
1289
+
1290
+
1291
+ def _alert_to_response(alert) -> StreamingAlertSchema:
1292
+ """Convert streaming alert to response schema."""
1293
+ return StreamingAlertSchema(
1294
+ id=alert.id,
1295
+ session_id=alert.session_id,
1296
+ timestamp=alert.timestamp.isoformat(),
1297
+ data_point=alert.data_point,
1298
+ anomaly_score=alert.anomaly_score,
1299
+ is_anomaly=alert.is_anomaly,
1300
+ algorithm=StreamingAlgorithmSchema(alert.algorithm.value),
1301
+ details=alert.details,
1302
+ )