odibi 2.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. odibi/__init__.py +32 -0
  2. odibi/__main__.py +8 -0
  3. odibi/catalog.py +3011 -0
  4. odibi/cli/__init__.py +11 -0
  5. odibi/cli/__main__.py +6 -0
  6. odibi/cli/catalog.py +553 -0
  7. odibi/cli/deploy.py +69 -0
  8. odibi/cli/doctor.py +161 -0
  9. odibi/cli/export.py +66 -0
  10. odibi/cli/graph.py +150 -0
  11. odibi/cli/init_pipeline.py +242 -0
  12. odibi/cli/lineage.py +259 -0
  13. odibi/cli/main.py +215 -0
  14. odibi/cli/run.py +98 -0
  15. odibi/cli/schema.py +208 -0
  16. odibi/cli/secrets.py +232 -0
  17. odibi/cli/story.py +379 -0
  18. odibi/cli/system.py +132 -0
  19. odibi/cli/test.py +286 -0
  20. odibi/cli/ui.py +31 -0
  21. odibi/cli/validate.py +39 -0
  22. odibi/config.py +3541 -0
  23. odibi/connections/__init__.py +9 -0
  24. odibi/connections/azure_adls.py +499 -0
  25. odibi/connections/azure_sql.py +709 -0
  26. odibi/connections/base.py +28 -0
  27. odibi/connections/factory.py +322 -0
  28. odibi/connections/http.py +78 -0
  29. odibi/connections/local.py +119 -0
  30. odibi/connections/local_dbfs.py +61 -0
  31. odibi/constants.py +17 -0
  32. odibi/context.py +528 -0
  33. odibi/diagnostics/__init__.py +12 -0
  34. odibi/diagnostics/delta.py +520 -0
  35. odibi/diagnostics/diff.py +169 -0
  36. odibi/diagnostics/manager.py +171 -0
  37. odibi/engine/__init__.py +20 -0
  38. odibi/engine/base.py +334 -0
  39. odibi/engine/pandas_engine.py +2178 -0
  40. odibi/engine/polars_engine.py +1114 -0
  41. odibi/engine/registry.py +54 -0
  42. odibi/engine/spark_engine.py +2362 -0
  43. odibi/enums.py +7 -0
  44. odibi/exceptions.py +297 -0
  45. odibi/graph.py +426 -0
  46. odibi/introspect.py +1214 -0
  47. odibi/lineage.py +511 -0
  48. odibi/node.py +3341 -0
  49. odibi/orchestration/__init__.py +0 -0
  50. odibi/orchestration/airflow.py +90 -0
  51. odibi/orchestration/dagster.py +77 -0
  52. odibi/patterns/__init__.py +24 -0
  53. odibi/patterns/aggregation.py +599 -0
  54. odibi/patterns/base.py +94 -0
  55. odibi/patterns/date_dimension.py +423 -0
  56. odibi/patterns/dimension.py +696 -0
  57. odibi/patterns/fact.py +748 -0
  58. odibi/patterns/merge.py +128 -0
  59. odibi/patterns/scd2.py +148 -0
  60. odibi/pipeline.py +2382 -0
  61. odibi/plugins.py +80 -0
  62. odibi/project.py +581 -0
  63. odibi/references.py +151 -0
  64. odibi/registry.py +246 -0
  65. odibi/semantics/__init__.py +71 -0
  66. odibi/semantics/materialize.py +392 -0
  67. odibi/semantics/metrics.py +361 -0
  68. odibi/semantics/query.py +743 -0
  69. odibi/semantics/runner.py +430 -0
  70. odibi/semantics/story.py +507 -0
  71. odibi/semantics/views.py +432 -0
  72. odibi/state/__init__.py +1203 -0
  73. odibi/story/__init__.py +55 -0
  74. odibi/story/doc_story.py +554 -0
  75. odibi/story/generator.py +1431 -0
  76. odibi/story/lineage.py +1043 -0
  77. odibi/story/lineage_utils.py +324 -0
  78. odibi/story/metadata.py +608 -0
  79. odibi/story/renderers.py +453 -0
  80. odibi/story/templates/run_story.html +2520 -0
  81. odibi/story/themes.py +216 -0
  82. odibi/testing/__init__.py +13 -0
  83. odibi/testing/assertions.py +75 -0
  84. odibi/testing/fixtures.py +85 -0
  85. odibi/testing/source_pool.py +277 -0
  86. odibi/transformers/__init__.py +122 -0
  87. odibi/transformers/advanced.py +1472 -0
  88. odibi/transformers/delete_detection.py +610 -0
  89. odibi/transformers/manufacturing.py +1029 -0
  90. odibi/transformers/merge_transformer.py +778 -0
  91. odibi/transformers/relational.py +675 -0
  92. odibi/transformers/scd.py +579 -0
  93. odibi/transformers/sql_core.py +1356 -0
  94. odibi/transformers/validation.py +165 -0
  95. odibi/ui/__init__.py +0 -0
  96. odibi/ui/app.py +195 -0
  97. odibi/utils/__init__.py +66 -0
  98. odibi/utils/alerting.py +667 -0
  99. odibi/utils/config_loader.py +343 -0
  100. odibi/utils/console.py +231 -0
  101. odibi/utils/content_hash.py +202 -0
  102. odibi/utils/duration.py +43 -0
  103. odibi/utils/encoding.py +102 -0
  104. odibi/utils/extensions.py +28 -0
  105. odibi/utils/hashing.py +61 -0
  106. odibi/utils/logging.py +203 -0
  107. odibi/utils/logging_context.py +740 -0
  108. odibi/utils/progress.py +429 -0
  109. odibi/utils/setup_helpers.py +302 -0
  110. odibi/utils/telemetry.py +140 -0
  111. odibi/validation/__init__.py +62 -0
  112. odibi/validation/engine.py +765 -0
  113. odibi/validation/explanation_linter.py +155 -0
  114. odibi/validation/fk.py +547 -0
  115. odibi/validation/gate.py +252 -0
  116. odibi/validation/quarantine.py +605 -0
  117. odibi/writers/__init__.py +15 -0
  118. odibi/writers/sql_server_writer.py +2081 -0
  119. odibi-2.5.0.dist-info/METADATA +255 -0
  120. odibi-2.5.0.dist-info/RECORD +124 -0
  121. odibi-2.5.0.dist-info/WHEEL +5 -0
  122. odibi-2.5.0.dist-info/entry_points.txt +2 -0
  123. odibi-2.5.0.dist-info/licenses/LICENSE +190 -0
  124. odibi-2.5.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,667 @@
1
+ """Alerting utilities for notifications."""
2
+
3
+ import json
4
+ import logging
5
+ import urllib.request
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, Optional
8
+
9
+ from odibi.config import AlertConfig, AlertEvent, AlertType
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class AlertThrottler:
15
+ """Prevent alert spam by throttling repeated alerts."""
16
+
17
+ def __init__(self):
18
+ self._last_alerts: Dict[str, datetime] = {}
19
+ self._alert_counts: Dict[str, int] = {}
20
+
21
+ def should_send(
22
+ self,
23
+ alert_key: str,
24
+ throttle_minutes: int = 15,
25
+ max_per_hour: int = 10,
26
+ ) -> bool:
27
+ """Check if alert should be sent based on throttling rules.
28
+
29
+ Args:
30
+ alert_key: Unique key for this alert type
31
+ throttle_minutes: Minimum minutes between same alerts
32
+ max_per_hour: Maximum alerts of same type per hour
33
+
34
+ Returns:
35
+ True if alert should be sent, False if throttled
36
+ """
37
+ now = datetime.now(timezone.utc)
38
+ last = self._last_alerts.get(alert_key)
39
+
40
+ if last and (now - last).total_seconds() < throttle_minutes * 60:
41
+ logger.debug(f"Alert throttled: {alert_key} (within {throttle_minutes}m)")
42
+ return False
43
+
44
+ hour_key = f"{alert_key}:{now.strftime('%Y%m%d%H')}"
45
+ count = self._alert_counts.get(hour_key, 0)
46
+ if count >= max_per_hour:
47
+ logger.debug(f"Alert rate-limited: {alert_key} ({count}/{max_per_hour} per hour)")
48
+ return False
49
+
50
+ self._last_alerts[alert_key] = now
51
+ self._alert_counts[hour_key] = count + 1
52
+
53
+ return True
54
+
55
+ def reset(self) -> None:
56
+ """Reset throttler state (useful for testing)."""
57
+ self._last_alerts.clear()
58
+ self._alert_counts.clear()
59
+
60
+
61
+ _throttler = AlertThrottler()
62
+
63
+
64
+ def get_throttler() -> AlertThrottler:
65
+ """Get the global throttler instance."""
66
+ return _throttler
67
+
68
+
69
+ def send_alert(
70
+ config: AlertConfig,
71
+ message: str,
72
+ context: Dict[str, Any],
73
+ throttle: bool = True,
74
+ ) -> bool:
75
+ """Send alert to configured channel with throttling support.
76
+
77
+ Args:
78
+ config: Alert configuration
79
+ message: Alert message
80
+ context: Context dictionary (pipeline name, status, event_type, etc.)
81
+ throttle: Whether to apply throttling (default: True)
82
+
83
+ Returns:
84
+ True if alert was sent, False if throttled or failed
85
+ """
86
+ if throttle:
87
+ pipeline = context.get("pipeline", "unknown")
88
+ event = context.get("event_type", "unknown")
89
+ throttle_key = f"{pipeline}:{event}"
90
+
91
+ throttle_minutes = config.metadata.get("throttle_minutes", 15)
92
+ max_per_hour = config.metadata.get("max_per_hour", 10)
93
+
94
+ if not _throttler.should_send(throttle_key, throttle_minutes, max_per_hour):
95
+ return False
96
+
97
+ payload = _build_payload(config, message, context)
98
+
99
+ try:
100
+ headers = {"Content-Type": "application/json"}
101
+ data = json.dumps(payload).encode("utf-8")
102
+ req = urllib.request.Request(config.url, data=data, headers=headers)
103
+
104
+ with urllib.request.urlopen(req) as response:
105
+ if response.status >= 400:
106
+ logger.error(f"Alert failed: HTTP {response.status}")
107
+ return False
108
+ return True
109
+ except Exception as e:
110
+ logger.error(f"Failed to send alert: {e}")
111
+ return False
112
+
113
+
114
+ def _get_event_color(event_type: str, status: str) -> Dict[str, str]:
115
+ """Get color scheme based on event type or status.
116
+
117
+ Returns:
118
+ Dict with 'hex' (for Slack), 'style' (for Teams Adaptive Card)
119
+ """
120
+ if event_type == AlertEvent.ON_QUARANTINE.value:
121
+ return {"hex": "#FFA500", "style": "Warning"}
122
+ elif event_type == AlertEvent.ON_GATE_BLOCK.value:
123
+ return {"hex": "#FF0000", "style": "Attention"}
124
+ elif event_type == AlertEvent.ON_THRESHOLD_BREACH.value:
125
+ return {"hex": "#FF6600", "style": "Warning"}
126
+ elif status == "SUCCESS":
127
+ return {"hex": "#36a64f", "style": "Good"}
128
+ elif status == "STARTED":
129
+ return {"hex": "#0078D4", "style": "Accent"}
130
+ else:
131
+ return {"hex": "#FF0000", "style": "Attention"}
132
+
133
+
134
+ def _get_event_icon(event_type: str, status: str) -> str:
135
+ """Get icon based on event type or status."""
136
+ icons = {
137
+ AlertEvent.ON_QUARANTINE.value: "🔶",
138
+ AlertEvent.ON_GATE_BLOCK.value: "🚫",
139
+ AlertEvent.ON_THRESHOLD_BREACH.value: "⚠️",
140
+ "SUCCESS": "✅",
141
+ "STARTED": "🚀",
142
+ }
143
+ return icons.get(event_type, icons.get(status, "❌"))
144
+
145
+
146
+ def _build_payload(
147
+ config: AlertConfig,
148
+ message: str,
149
+ context: Dict[str, Any],
150
+ ) -> Dict[str, Any]:
151
+ """Build payload based on alert type and event."""
152
+ pipeline = context.get("pipeline", "Unknown Pipeline")
153
+ status = context.get("status", "UNKNOWN")
154
+ duration = context.get("duration", 0.0)
155
+ project_config = context.get("project_config")
156
+ event_type = context.get("event_type", "")
157
+ timestamp = context.get("timestamp", datetime.now(timezone.utc).isoformat())
158
+
159
+ # Row count summary from story
160
+ total_rows = context.get("total_rows_processed", 0)
161
+ rows_dropped = context.get("rows_dropped", 0)
162
+ final_rows = context.get("final_output_rows")
163
+
164
+ project_name = "Odibi Project"
165
+ owner = None
166
+
167
+ if project_config:
168
+ project_name = getattr(project_config, "project", project_name)
169
+ owner = getattr(project_config, "owner", None)
170
+
171
+ color = _get_event_color(event_type, status)
172
+ icon = _get_event_icon(event_type, status)
173
+
174
+ if config.type == AlertType.SLACK:
175
+ return _build_slack_payload(
176
+ pipeline=pipeline,
177
+ project_name=project_name,
178
+ status=status,
179
+ duration=duration,
180
+ message=message,
181
+ owner=owner,
182
+ event_type=event_type,
183
+ timestamp=timestamp,
184
+ context=context,
185
+ config=config,
186
+ color=color["hex"],
187
+ icon=icon,
188
+ total_rows=total_rows,
189
+ rows_dropped=rows_dropped,
190
+ final_rows=final_rows,
191
+ )
192
+
193
+ elif config.type in (AlertType.TEAMS, AlertType.TEAMS_WORKFLOW):
194
+ return _build_teams_workflow_payload(
195
+ pipeline=pipeline,
196
+ project_name=project_name,
197
+ status=status,
198
+ duration=duration,
199
+ message=message,
200
+ owner=owner,
201
+ event_type=event_type,
202
+ timestamp=timestamp,
203
+ context=context,
204
+ config=config,
205
+ style=color["style"],
206
+ icon=icon,
207
+ total_rows=total_rows,
208
+ rows_dropped=rows_dropped,
209
+ final_rows=final_rows,
210
+ )
211
+
212
+ else:
213
+ return _build_generic_payload(
214
+ pipeline=pipeline,
215
+ status=status,
216
+ duration=duration,
217
+ message=message,
218
+ timestamp=timestamp,
219
+ context=context,
220
+ config=config,
221
+ )
222
+
223
+
224
+ def _build_slack_payload(
225
+ pipeline: str,
226
+ project_name: str,
227
+ status: str,
228
+ duration: float,
229
+ message: str,
230
+ owner: Optional[str],
231
+ event_type: str,
232
+ timestamp: str,
233
+ context: Dict[str, Any],
234
+ config: AlertConfig,
235
+ color: str,
236
+ icon: str,
237
+ total_rows: int = 0,
238
+ rows_dropped: int = 0,
239
+ final_rows: Optional[int] = None,
240
+ ) -> Dict[str, Any]:
241
+ """Build Slack Block Kit payload with event-specific content."""
242
+ blocks = [
243
+ {
244
+ "type": "header",
245
+ "text": {"type": "plain_text", "text": f"{icon} ODIBI: {pipeline} - {status}"},
246
+ }
247
+ ]
248
+
249
+ fields = [
250
+ {"type": "mrkdwn", "text": f"*Project:*\n{project_name}"},
251
+ {"type": "mrkdwn", "text": f"*Status:*\n{status}"},
252
+ {"type": "mrkdwn", "text": f"*Duration:*\n{duration:.2f}s"},
253
+ ]
254
+
255
+ # Add row summary for success/failure events (not start)
256
+ if total_rows > 0 or final_rows is not None:
257
+ row_text = f"{final_rows:,}" if final_rows else f"{total_rows:,}"
258
+ fields.append({"type": "mrkdwn", "text": f"*Rows Processed:*\n{row_text}"})
259
+ if rows_dropped > 0:
260
+ fields.append({"type": "mrkdwn", "text": f"*Rows Filtered:*\n{rows_dropped:,}"})
261
+
262
+ if timestamp:
263
+ fields.append({"type": "mrkdwn", "text": f"*Timestamp:*\n{timestamp}"})
264
+
265
+ if owner:
266
+ fields.append({"type": "mrkdwn", "text": f"*Owner:*\n{owner}"})
267
+
268
+ if event_type == AlertEvent.ON_QUARANTINE.value:
269
+ qd = context.get("quarantine_details", {})
270
+ fields.extend(
271
+ [
272
+ {
273
+ "type": "mrkdwn",
274
+ "text": f"*Rows Quarantined:*\n{qd.get('rows_quarantined', 0):,}",
275
+ },
276
+ {
277
+ "type": "mrkdwn",
278
+ "text": f"*Quarantine Table:*\n{qd.get('quarantine_path', 'N/A')}",
279
+ },
280
+ ]
281
+ )
282
+ failed_tests = qd.get("failed_tests", [])
283
+ if failed_tests:
284
+ fields.append(
285
+ {
286
+ "type": "mrkdwn",
287
+ "text": f"*Failed Tests:*\n{', '.join(failed_tests[:5])}",
288
+ }
289
+ )
290
+
291
+ elif event_type == AlertEvent.ON_GATE_BLOCK.value:
292
+ gd = context.get("gate_details", {})
293
+ fields.extend(
294
+ [
295
+ {"type": "mrkdwn", "text": f"*Pass Rate:*\n{gd.get('pass_rate', 0):.1%}"},
296
+ {"type": "mrkdwn", "text": f"*Required:*\n{gd.get('required_rate', 0.95):.1%}"},
297
+ {"type": "mrkdwn", "text": f"*Rows Failed:*\n{gd.get('failed_rows', 0):,}"},
298
+ ]
299
+ )
300
+ failure_reasons = gd.get("failure_reasons", [])
301
+ if failure_reasons:
302
+ fields.append(
303
+ {
304
+ "type": "mrkdwn",
305
+ "text": f"*Reasons:*\n{'; '.join(failure_reasons[:3])}",
306
+ }
307
+ )
308
+
309
+ elif event_type == AlertEvent.ON_THRESHOLD_BREACH.value:
310
+ td = context.get("threshold_details", {})
311
+ fields.extend(
312
+ [
313
+ {"type": "mrkdwn", "text": f"*Threshold:*\n{td.get('threshold', 'N/A')}"},
314
+ {"type": "mrkdwn", "text": f"*Actual Value:*\n{td.get('actual_value', 'N/A')}"},
315
+ {"type": "mrkdwn", "text": f"*Metric:*\n{td.get('metric', 'N/A')}"},
316
+ ]
317
+ )
318
+
319
+ blocks.append({"type": "section", "fields": fields})
320
+
321
+ story_path = context.get("story_path")
322
+ if story_path:
323
+ blocks.append(
324
+ {
325
+ "type": "context",
326
+ "elements": [{"type": "mrkdwn", "text": f"📂 Story: `{story_path}`"}],
327
+ }
328
+ )
329
+
330
+ payload = {"blocks": blocks}
331
+
332
+ if color:
333
+ payload["attachments"] = [{"color": color, "blocks": []}]
334
+
335
+ payload.update(config.metadata)
336
+ return payload
337
+
338
+
339
+ def _build_teams_workflow_payload(
340
+ pipeline: str,
341
+ project_name: str,
342
+ status: str,
343
+ duration: float,
344
+ message: str,
345
+ owner: Optional[str],
346
+ event_type: str,
347
+ timestamp: str,
348
+ context: Dict[str, Any],
349
+ config: AlertConfig,
350
+ style: str,
351
+ icon: str,
352
+ total_rows: int = 0,
353
+ rows_dropped: int = 0,
354
+ final_rows: Optional[int] = None,
355
+ ) -> Dict[str, Any]:
356
+ """Build payload for Power Automate Teams Workflow trigger.
357
+
358
+ Power Automate's 'When a Teams webhook request is received' expects
359
+ just the Adaptive Card content wrapped in an 'attachments' array,
360
+ not the full message envelope used by classic webhooks.
361
+ """
362
+ facts = [
363
+ {"title": "⏱ Duration", "value": f"{duration:.2f}s"},
364
+ {"title": "📅 Time", "value": timestamp},
365
+ ]
366
+
367
+ if total_rows > 0 or final_rows is not None:
368
+ row_text = f"{final_rows:,}" if final_rows else f"{total_rows:,}"
369
+ facts.append({"title": "📊 Rows Processed", "value": row_text})
370
+ if rows_dropped > 0:
371
+ facts.append({"title": "🔻 Rows Filtered", "value": f"{rows_dropped:,}"})
372
+
373
+ if owner:
374
+ facts.insert(0, {"title": "👤 Owner", "value": owner})
375
+
376
+ if event_type == AlertEvent.ON_QUARANTINE.value:
377
+ qd = context.get("quarantine_details", {})
378
+ facts.extend(
379
+ [
380
+ {"title": "🔶 Rows Quarantined", "value": f"{qd.get('rows_quarantined', 0):,}"},
381
+ {"title": "📍 Quarantine Table", "value": qd.get("quarantine_path", "N/A")},
382
+ ]
383
+ )
384
+ failed_tests = qd.get("failed_tests", [])
385
+ if failed_tests:
386
+ facts.append({"title": "❌ Failed Tests", "value": ", ".join(failed_tests[:5])})
387
+
388
+ elif event_type == AlertEvent.ON_GATE_BLOCK.value:
389
+ gd = context.get("gate_details", {})
390
+ facts.extend(
391
+ [
392
+ {"title": "📊 Pass Rate", "value": f"{gd.get('pass_rate', 0):.1%}"},
393
+ {"title": "🎯 Required", "value": f"{gd.get('required_rate', 0.95):.1%}"},
394
+ {"title": "❌ Rows Failed", "value": f"{gd.get('failed_rows', 0):,}"},
395
+ ]
396
+ )
397
+
398
+ elif event_type == AlertEvent.ON_THRESHOLD_BREACH.value:
399
+ td = context.get("threshold_details", {})
400
+ facts.extend(
401
+ [
402
+ {"title": "📏 Threshold", "value": str(td.get("threshold", "N/A"))},
403
+ {"title": "📈 Actual Value", "value": str(td.get("actual_value", "N/A"))},
404
+ {"title": "📊 Metric", "value": td.get("metric", "N/A")},
405
+ ]
406
+ )
407
+
408
+ body_items = [
409
+ {
410
+ "type": "Container",
411
+ "style": style,
412
+ "items": [
413
+ {
414
+ "type": "TextBlock",
415
+ "text": f"{icon} Pipeline: {pipeline}",
416
+ "weight": "Bolder",
417
+ "size": "Medium",
418
+ "color": "Light",
419
+ },
420
+ {
421
+ "type": "TextBlock",
422
+ "text": f"Project: {project_name} | Status: {status}",
423
+ "isSubtle": True,
424
+ "spacing": "None",
425
+ "color": "Light",
426
+ "size": "Small",
427
+ },
428
+ ],
429
+ },
430
+ {"type": "Container", "items": [{"type": "FactSet", "facts": facts}]},
431
+ ]
432
+
433
+ story_path = context.get("story_path")
434
+ if story_path:
435
+ body_items.append(
436
+ {
437
+ "type": "TextBlock",
438
+ "text": f"📂 Story: {story_path}",
439
+ "size": "Small",
440
+ "isSubtle": True,
441
+ "wrap": True,
442
+ }
443
+ )
444
+
445
+ # Handle @mentions
446
+ # 'mention' applies to all events, 'mention_on_failure' only to failure events
447
+ mention_users = config.metadata.get("mention", [])
448
+ if isinstance(mention_users, str):
449
+ mention_users = [mention_users]
450
+
451
+ # Add failure-specific mentions for failure events
452
+ is_failure_event = event_type in (
453
+ AlertEvent.ON_FAILURE.value,
454
+ AlertEvent.ON_GATE_BLOCK.value,
455
+ AlertEvent.ON_QUARANTINE.value,
456
+ )
457
+ if is_failure_event:
458
+ failure_mentions = config.metadata.get("mention_on_failure", [])
459
+ if isinstance(failure_mentions, str):
460
+ failure_mentions = [failure_mentions]
461
+ mention_users = list(set(mention_users + failure_mentions))
462
+
463
+ entities = []
464
+ mention_text = ""
465
+
466
+ if mention_users:
467
+ mentions = []
468
+ for i, user_email in enumerate(mention_users):
469
+ mention_id = f"mention{i}"
470
+ mentions.append(f"<at>{mention_id}</at>")
471
+ entities.append(
472
+ {
473
+ "type": "mention",
474
+ "text": f"<at>{mention_id}</at>",
475
+ "mentioned": {"id": user_email, "name": user_email},
476
+ }
477
+ )
478
+ mention_text = " ".join(mentions)
479
+ body_items.append(
480
+ {
481
+ "type": "TextBlock",
482
+ "text": f"🔔 {mention_text}",
483
+ "wrap": True,
484
+ }
485
+ )
486
+
487
+ adaptive_card = {
488
+ "type": "AdaptiveCard",
489
+ "$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
490
+ "version": "1.4",
491
+ "body": body_items,
492
+ }
493
+
494
+ if entities:
495
+ adaptive_card["msteams"] = {"entities": entities}
496
+
497
+ # Power Automate workflow expects 'attachments' array with the card
498
+ return {
499
+ "attachments": [
500
+ {
501
+ "contentType": "application/vnd.microsoft.card.adaptive",
502
+ "content": adaptive_card,
503
+ }
504
+ ]
505
+ }
506
+
507
+
508
+ def _build_generic_payload(
509
+ pipeline: str,
510
+ status: str,
511
+ duration: float,
512
+ message: str,
513
+ timestamp: str,
514
+ context: Dict[str, Any],
515
+ config: AlertConfig,
516
+ ) -> Dict[str, Any]:
517
+ """Build generic webhook payload."""
518
+ payload = {
519
+ "pipeline": pipeline,
520
+ "status": status,
521
+ "duration": duration,
522
+ "message": message,
523
+ "timestamp": timestamp,
524
+ "event_type": context.get("event_type"),
525
+ "metadata": config.metadata,
526
+ }
527
+
528
+ if context.get("event_type") == AlertEvent.ON_QUARANTINE.value:
529
+ payload["quarantine_details"] = context.get("quarantine_details", {})
530
+ elif context.get("event_type") == AlertEvent.ON_GATE_BLOCK.value:
531
+ payload["gate_details"] = context.get("gate_details", {})
532
+ elif context.get("event_type") == AlertEvent.ON_THRESHOLD_BREACH.value:
533
+ payload["threshold_details"] = context.get("threshold_details", {})
534
+
535
+ return payload
536
+
537
+
538
+ def send_quarantine_alert(
539
+ config: AlertConfig,
540
+ pipeline: str,
541
+ node_name: str,
542
+ rows_quarantined: int,
543
+ quarantine_path: str,
544
+ failed_tests: list,
545
+ context: Optional[Dict[str, Any]] = None,
546
+ ) -> bool:
547
+ """Convenience function to send a quarantine alert.
548
+
549
+ Args:
550
+ config: Alert configuration
551
+ pipeline: Pipeline name
552
+ node_name: Node that quarantined rows
553
+ rows_quarantined: Number of rows quarantined
554
+ quarantine_path: Path/table where quarantined rows are stored
555
+ failed_tests: List of test names that failed
556
+ context: Optional additional context
557
+
558
+ Returns:
559
+ True if alert sent, False otherwise
560
+ """
561
+ ctx = context.copy() if context else {}
562
+ ctx.update(
563
+ {
564
+ "pipeline": pipeline,
565
+ "status": "QUARANTINE",
566
+ "event_type": AlertEvent.ON_QUARANTINE.value,
567
+ "quarantine_details": {
568
+ "rows_quarantined": rows_quarantined,
569
+ "quarantine_path": quarantine_path,
570
+ "failed_tests": failed_tests,
571
+ "node_name": node_name,
572
+ },
573
+ }
574
+ )
575
+
576
+ message = f"{rows_quarantined} rows quarantined in {node_name}"
577
+ return send_alert(config, message, ctx)
578
+
579
+
580
+ def send_gate_block_alert(
581
+ config: AlertConfig,
582
+ pipeline: str,
583
+ node_name: str,
584
+ pass_rate: float,
585
+ required_rate: float,
586
+ failed_rows: int,
587
+ total_rows: int,
588
+ failure_reasons: list,
589
+ context: Optional[Dict[str, Any]] = None,
590
+ ) -> bool:
591
+ """Convenience function to send a gate block alert.
592
+
593
+ Args:
594
+ config: Alert configuration
595
+ pipeline: Pipeline name
596
+ node_name: Node where gate failed
597
+ pass_rate: Actual pass rate
598
+ required_rate: Required pass rate
599
+ failed_rows: Number of failed rows
600
+ total_rows: Total rows processed
601
+ failure_reasons: List of failure reasons
602
+ context: Optional additional context
603
+
604
+ Returns:
605
+ True if alert sent, False otherwise
606
+ """
607
+ ctx = context.copy() if context else {}
608
+ ctx.update(
609
+ {
610
+ "pipeline": pipeline,
611
+ "status": "GATE_BLOCKED",
612
+ "event_type": AlertEvent.ON_GATE_BLOCK.value,
613
+ "gate_details": {
614
+ "pass_rate": pass_rate,
615
+ "required_rate": required_rate,
616
+ "failed_rows": failed_rows,
617
+ "total_rows": total_rows,
618
+ "failure_reasons": failure_reasons,
619
+ "node_name": node_name,
620
+ },
621
+ }
622
+ )
623
+
624
+ message = f"Quality gate failed in {node_name}: {pass_rate:.1%} < {required_rate:.1%}"
625
+ return send_alert(config, message, ctx)
626
+
627
+
628
+ def send_threshold_breach_alert(
629
+ config: AlertConfig,
630
+ pipeline: str,
631
+ node_name: str,
632
+ metric: str,
633
+ threshold: Any,
634
+ actual_value: Any,
635
+ context: Optional[Dict[str, Any]] = None,
636
+ ) -> bool:
637
+ """Convenience function to send a threshold breach alert.
638
+
639
+ Args:
640
+ config: Alert configuration
641
+ pipeline: Pipeline name
642
+ node_name: Node where threshold was breached
643
+ metric: Name of the metric that breached
644
+ threshold: Expected threshold value
645
+ actual_value: Actual value that breached
646
+ context: Optional additional context
647
+
648
+ Returns:
649
+ True if alert sent, False otherwise
650
+ """
651
+ ctx = context.copy() if context else {}
652
+ ctx.update(
653
+ {
654
+ "pipeline": pipeline,
655
+ "status": "THRESHOLD_BREACH",
656
+ "event_type": AlertEvent.ON_THRESHOLD_BREACH.value,
657
+ "threshold_details": {
658
+ "metric": metric,
659
+ "threshold": threshold,
660
+ "actual_value": actual_value,
661
+ "node_name": node_name,
662
+ },
663
+ }
664
+ )
665
+
666
+ message = f"Threshold breach in {node_name}: {metric} = {actual_value} (threshold: {threshold})"
667
+ return send_alert(config, message, ctx)