delimit-cli 4.1.43 → 4.1.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/README.md +46 -5
- package/bin/delimit-cli.js +1523 -208
- package/bin/delimit-setup.js +8 -2
- package/gateway/ai/agent_dispatch.py +34 -2
- package/gateway/ai/backends/deploy_bridge.py +167 -12
- package/gateway/ai/content_engine.py +1276 -2
- package/gateway/ai/github_scanner.py +1 -1
- package/gateway/ai/governance.py +58 -0
- package/gateway/ai/key_resolver.py +95 -2
- package/gateway/ai/ledger_manager.py +13 -3
- package/gateway/ai/loop_engine.py +220 -349
- package/gateway/ai/notify.py +1786 -2
- package/gateway/ai/reddit_scanner.py +45 -1
- package/gateway/ai/screen_record.py +1 -1
- package/gateway/ai/secrets_broker.py +5 -1
- package/gateway/ai/social_cache.py +341 -0
- package/gateway/ai/social_daemon.py +41 -10
- package/gateway/ai/supabase_sync.py +190 -2
- package/gateway/ai/tui.py +594 -36
- package/gateway/core/zero_spec/express_extractor.py +2 -2
- package/gateway/core/zero_spec/nestjs_extractor.py +40 -9
- package/gateway/requirements.txt +3 -6
- package/package.json +4 -3
- package/scripts/demo-v420-clean.sh +267 -0
- package/scripts/demo-v420-deliberation.sh +217 -0
- package/scripts/demo-v420.sh +55 -0
- package/scripts/postinstall.js +4 -3
- package/scripts/publish-ci-guard.sh +30 -0
- package/scripts/record-and-upload.sh +132 -0
- package/scripts/release.sh +126 -0
- package/scripts/sync-gateway.sh +100 -0
- package/scripts/youtube-upload.py +141 -0
package/gateway/ai/notify.py
CHANGED
|
@@ -1,2 +1,1786 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
"""
|
|
2
|
+
Notification helper for delimit_notify and delimit_notify_inbox tools.
|
|
3
|
+
|
|
4
|
+
Supports webhook, Slack, and email channels (outbound).
|
|
5
|
+
Supports impact-based notification routing (LED-233).
|
|
6
|
+
Supports IMAP inbox polling with classification and forwarding (inbound).
|
|
7
|
+
Stores notification history in ~/.delimit/notifications.jsonl.
|
|
8
|
+
Stores inbox routing log in ~/.delimit/inbox_routing.jsonl.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import email
|
|
12
|
+
import email.header
|
|
13
|
+
import email.utils
|
|
14
|
+
import imaplib
|
|
15
|
+
import json
|
|
16
|
+
import logging
|
|
17
|
+
import os
|
|
18
|
+
import smtplib
|
|
19
|
+
import urllib.request
|
|
20
|
+
import urllib.error
|
|
21
|
+
from datetime import datetime, timezone
|
|
22
|
+
from email.mime.text import MIMEText
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Dict, List, Optional
|
|
25
|
+
|
|
26
|
+
import threading
|
|
27
|
+
import time as _time
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
import yaml as _yaml
|
|
31
|
+
except ImportError:
|
|
32
|
+
_yaml = None # type: ignore[assignment]
|
|
33
|
+
|
|
34
|
+
# ── Email Throttle (Storm Prevention) ────────────────────────────────
|
|
35
|
+
# - Max 30 immediate emails per hour (raised from 5 — was too aggressive)
|
|
36
|
+
# - Only truly spammy patterns get batched (daemon auto-emails, scan digests)
|
|
37
|
+
# - All founder-initiated, approval, alert, and social draft emails send immediately
|
|
38
|
+
_email_throttle_lock = threading.Lock()
|
|
39
|
+
_email_send_times: list = [] # timestamps of recent sends
|
|
40
|
+
_email_digest_queue: list = [] # batched non-urgent emails
|
|
41
|
+
_EMAIL_MAX_PER_HOUR = 10
|
|
42
|
+
_EMAIL_DIGEST_INTERVAL = 3600 # 60 minutes (was 30 — reduce digest flood)
|
|
43
|
+
_last_digest_flush = 0.0
|
|
44
|
+
|
|
45
|
+
logger = logging.getLogger("delimit.ai.notify")
|
|
46
|
+
|
|
47
|
+
HISTORY_FILE = Path.home() / ".delimit" / "notifications.jsonl"
|
|
48
|
+
INBOX_ROUTING_FILE = Path.home() / ".delimit" / "inbox_routing.jsonl"
|
|
49
|
+
OWNER_ACTIONS_FILE = Path.home() / ".delimit" / "owner_actions.jsonl"
|
|
50
|
+
|
|
51
|
+
def _load_json_file(path: Path) -> Dict[str, Any]:
|
|
52
|
+
try:
|
|
53
|
+
if not path.exists():
|
|
54
|
+
return {}
|
|
55
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
56
|
+
data = json.load(f)
|
|
57
|
+
return data if isinstance(data, dict) else {}
|
|
58
|
+
except (OSError, json.JSONDecodeError):
|
|
59
|
+
return {}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _load_secret_value(*names: str) -> str:
|
|
63
|
+
"""Load a secret value from ~/.delimit/secrets/<NAME>.json files."""
|
|
64
|
+
secrets_dir = Path.home() / ".delimit" / "secrets"
|
|
65
|
+
for name in names:
|
|
66
|
+
data = _load_json_file(secrets_dir / f"{name}.json")
|
|
67
|
+
value = data.get("value") or data.get("token") or data.get("access_token") or ""
|
|
68
|
+
if value:
|
|
69
|
+
return str(value)
|
|
70
|
+
return ""
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _load_inbound_email_config() -> Dict[str, str]:
|
|
74
|
+
secrets_dir = Path.home() / ".delimit" / "secrets"
|
|
75
|
+
smtp_accounts = _load_json_file(secrets_dir / "smtp-all.json")
|
|
76
|
+
defaults = smtp_accounts.get("_defaults", {}) if isinstance(smtp_accounts.get("_defaults"), dict) else {}
|
|
77
|
+
account_name = str(defaults.get("from_account") or "pro@delimit.ai")
|
|
78
|
+
account = smtp_accounts.get(account_name, {}) if isinstance(smtp_accounts.get(account_name), dict) else {}
|
|
79
|
+
forward_cfg = _load_json_file(secrets_dir / "forward-to.json")
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
"imap_host": str(account.get("host") or ""),
|
|
83
|
+
"imap_port": str(account.get("imap_port") or "993"),
|
|
84
|
+
"imap_user": str(account.get("user") or account_name or ""),
|
|
85
|
+
"forward_to": str(
|
|
86
|
+
os.environ.get("DELIMIT_FORWARD_TO", "")
|
|
87
|
+
or forward_cfg.get("value")
|
|
88
|
+
or forward_cfg.get("to")
|
|
89
|
+
or defaults.get("to")
|
|
90
|
+
or ""
|
|
91
|
+
),
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# ── Inbound email configuration ──────────────────────────────────────
|
|
96
|
+
_INBOUND_CFG = _load_inbound_email_config()
|
|
97
|
+
IMAP_HOST = os.environ.get("DELIMIT_IMAP_HOST", "") or _INBOUND_CFG.get("imap_host", "")
|
|
98
|
+
IMAP_PORT = int(os.environ.get("DELIMIT_IMAP_PORT", "") or _INBOUND_CFG.get("imap_port", "993"))
|
|
99
|
+
IMAP_USER = os.environ.get("DELIMIT_IMAP_USER", "") or _INBOUND_CFG.get("imap_user", "")
|
|
100
|
+
FORWARD_TO = _INBOUND_CFG.get("forward_to", "")
|
|
101
|
+
|
|
102
|
+
# Domains/senders whose emails require owner action
|
|
103
|
+
OWNER_ACTION_DOMAINS = {
|
|
104
|
+
"cooperpress.com",
|
|
105
|
+
"github.com",
|
|
106
|
+
"lemon.com",
|
|
107
|
+
"lemonsqueezy.com",
|
|
108
|
+
"namecheap.com",
|
|
109
|
+
"stripe.com",
|
|
110
|
+
"google.com",
|
|
111
|
+
"youtube.com",
|
|
112
|
+
"x.com",
|
|
113
|
+
"twitter.com",
|
|
114
|
+
"npmjs.com",
|
|
115
|
+
"vercel.com",
|
|
116
|
+
"supabase.io",
|
|
117
|
+
"supabase.com",
|
|
118
|
+
"glama.ai",
|
|
119
|
+
"vultr.com",
|
|
120
|
+
"digitalocean.com",
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
OWNER_ACTION_SENDERS = set(
|
|
124
|
+
filter(None, [os.environ.get("DELIMIT_OWNER_EMAIL", "")])
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Subject patterns that indicate owner-action (compiled once)
|
|
128
|
+
import re as _re
|
|
129
|
+
OWNER_ACTION_SUBJECT_PATTERNS = [
|
|
130
|
+
_re.compile(r"social\s+draft", _re.IGNORECASE),
|
|
131
|
+
_re.compile(r"show\s+hn", _re.IGNORECASE),
|
|
132
|
+
_re.compile(r"approval", _re.IGNORECASE),
|
|
133
|
+
_re.compile(r"action\s+required", _re.IGNORECASE),
|
|
134
|
+
_re.compile(r"reply|respond", _re.IGNORECASE),
|
|
135
|
+
_re.compile(r"invoice", _re.IGNORECASE),
|
|
136
|
+
_re.compile(r"payment", _re.IGNORECASE),
|
|
137
|
+
_re.compile(r"subscription", _re.IGNORECASE),
|
|
138
|
+
]
|
|
139
|
+
|
|
140
|
+
# Sender patterns that are definitely non-owner (automated/bot)
|
|
141
|
+
NON_OWNER_SENDERS = {
|
|
142
|
+
"noreply@",
|
|
143
|
+
"no-reply@",
|
|
144
|
+
"notifications@",
|
|
145
|
+
"mailer-daemon@",
|
|
146
|
+
"postmaster@",
|
|
147
|
+
"donotreply@",
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _record_notification(entry: Dict[str, Any]) -> None:
|
|
152
|
+
"""Append a notification record to the history file."""
|
|
153
|
+
try:
|
|
154
|
+
HISTORY_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
155
|
+
with open(HISTORY_FILE, "a", encoding="utf-8") as f:
|
|
156
|
+
f.write(json.dumps(entry) + "\n")
|
|
157
|
+
except OSError as e:
|
|
158
|
+
logger.warning("Failed to record notification: %s", e)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def record_owner_action(entry: Dict[str, Any]) -> None:
|
|
162
|
+
"""Append an owner-action record for dashboard and async fanout."""
|
|
163
|
+
try:
|
|
164
|
+
OWNER_ACTIONS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
165
|
+
payload = {
|
|
166
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
167
|
+
"status": "open",
|
|
168
|
+
**entry,
|
|
169
|
+
}
|
|
170
|
+
with open(OWNER_ACTIONS_FILE, "a", encoding="utf-8") as f:
|
|
171
|
+
f.write(json.dumps(payload) + "\n")
|
|
172
|
+
except OSError as e:
|
|
173
|
+
logger.warning("Failed to record owner action: %s", e)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _post_json(url: str, payload: Dict[str, Any], timeout: int = 10) -> Dict[str, Any]:
|
|
177
|
+
"""POST a JSON payload to a URL. Returns status dict."""
|
|
178
|
+
data = json.dumps(payload).encode("utf-8")
|
|
179
|
+
req = urllib.request.Request(
|
|
180
|
+
url,
|
|
181
|
+
data=data,
|
|
182
|
+
headers={"Content-Type": "application/json"},
|
|
183
|
+
method="POST",
|
|
184
|
+
)
|
|
185
|
+
try:
|
|
186
|
+
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
|
187
|
+
return {
|
|
188
|
+
"status_code": resp.status,
|
|
189
|
+
"success": 200 <= resp.status < 300,
|
|
190
|
+
}
|
|
191
|
+
except urllib.error.HTTPError as e:
|
|
192
|
+
return {"status_code": e.code, "success": False, "error": str(e)}
|
|
193
|
+
except urllib.error.URLError as e:
|
|
194
|
+
return {"status_code": 0, "success": False, "error": str(e)}
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def send_webhook(
|
|
198
|
+
webhook_url: str,
|
|
199
|
+
message: str,
|
|
200
|
+
event_type: str = "",
|
|
201
|
+
) -> Dict[str, Any]:
|
|
202
|
+
"""Send a generic webhook notification (JSON POST)."""
|
|
203
|
+
if not webhook_url:
|
|
204
|
+
return {"error": "webhook_url is required for webhook channel"}
|
|
205
|
+
|
|
206
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
207
|
+
payload = {
|
|
208
|
+
"event_type": event_type or "delimit_notification",
|
|
209
|
+
"message": message,
|
|
210
|
+
"timestamp": timestamp,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
result = _post_json(webhook_url, payload)
|
|
214
|
+
record = {
|
|
215
|
+
"channel": "webhook",
|
|
216
|
+
"event_type": event_type,
|
|
217
|
+
"message": message,
|
|
218
|
+
"webhook_url": webhook_url,
|
|
219
|
+
"timestamp": timestamp,
|
|
220
|
+
"success": result.get("success", False),
|
|
221
|
+
}
|
|
222
|
+
_record_notification(record)
|
|
223
|
+
|
|
224
|
+
return {
|
|
225
|
+
"channel": "webhook",
|
|
226
|
+
"delivered": result.get("success", False),
|
|
227
|
+
"status_code": result.get("status_code"),
|
|
228
|
+
"timestamp": timestamp,
|
|
229
|
+
"error": result.get("error"),
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def send_slack(
|
|
234
|
+
webhook_url: str,
|
|
235
|
+
message: str,
|
|
236
|
+
event_type: str = "",
|
|
237
|
+
) -> Dict[str, Any]:
|
|
238
|
+
"""Send a Slack notification via incoming webhook."""
|
|
239
|
+
if not webhook_url:
|
|
240
|
+
return {"error": "webhook_url is required for slack channel"}
|
|
241
|
+
|
|
242
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
243
|
+
prefix = f"[{event_type}] " if event_type else ""
|
|
244
|
+
payload = {"text": f"{prefix}{message}"}
|
|
245
|
+
|
|
246
|
+
result = _post_json(webhook_url, payload)
|
|
247
|
+
record = {
|
|
248
|
+
"channel": "slack",
|
|
249
|
+
"event_type": event_type,
|
|
250
|
+
"message": message,
|
|
251
|
+
"webhook_url": webhook_url,
|
|
252
|
+
"timestamp": timestamp,
|
|
253
|
+
"success": result.get("success", False),
|
|
254
|
+
}
|
|
255
|
+
_record_notification(record)
|
|
256
|
+
|
|
257
|
+
return {
|
|
258
|
+
"channel": "slack",
|
|
259
|
+
"delivered": result.get("success", False),
|
|
260
|
+
"status_code": result.get("status_code"),
|
|
261
|
+
"timestamp": timestamp,
|
|
262
|
+
"error": result.get("error"),
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def send_telegram(
|
|
267
|
+
message: str,
|
|
268
|
+
event_type: str = "",
|
|
269
|
+
bot_token: str = "",
|
|
270
|
+
chat_id: str = "",
|
|
271
|
+
) -> Dict[str, Any]:
|
|
272
|
+
"""Send a Telegram message via bot API."""
|
|
273
|
+
bot_token = bot_token or os.environ.get("DELIMIT_TELEGRAM_BOT_TOKEN", "") or _load_secret_value("DELIMIT_TELEGRAM_BOT_TOKEN", "TELEGRAM_MONITOR_BOT_TOKEN")
|
|
274
|
+
chat_id = chat_id or os.environ.get("DELIMIT_TELEGRAM_CHAT_ID", "") or _load_secret_value("DELIMIT_TELEGRAM_CHAT_ID", "TELEGRAM_MONITOR_CHAT_ID")
|
|
275
|
+
if not bot_token or not chat_id:
|
|
276
|
+
return {"error": "telegram bot token and chat id are required"}
|
|
277
|
+
|
|
278
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
279
|
+
prefix = f"[{event_type}] " if event_type else ""
|
|
280
|
+
payload = {
|
|
281
|
+
"chat_id": chat_id,
|
|
282
|
+
"text": f"{prefix}{message}",
|
|
283
|
+
"disable_web_page_preview": False,
|
|
284
|
+
}
|
|
285
|
+
url = f"https://api.telegram.org/bot{bot_token}/sendMessage"
|
|
286
|
+
result = _post_json(url, payload)
|
|
287
|
+
_record_notification({
|
|
288
|
+
"channel": "telegram",
|
|
289
|
+
"event_type": event_type,
|
|
290
|
+
"message": message,
|
|
291
|
+
"timestamp": timestamp,
|
|
292
|
+
"success": result.get("success", False),
|
|
293
|
+
})
|
|
294
|
+
return {
|
|
295
|
+
"channel": "telegram",
|
|
296
|
+
"delivered": result.get("success", False),
|
|
297
|
+
"status_code": result.get("status_code"),
|
|
298
|
+
"timestamp": timestamp,
|
|
299
|
+
"error": result.get("error"),
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def _load_smtp_account(from_account: str) -> Optional[Dict[str, str]]:
|
|
304
|
+
"""Load SMTP credentials from smtp-all.json for a given account.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
from_account: Email address key in smtp-all.json (e.g. 'pro@delimit.ai').
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
Dict with host, port, user, pass keys, or None if not found.
|
|
311
|
+
"""
|
|
312
|
+
secrets_path = Path.home() / ".delimit" / "secrets" / "smtp-all.json"
|
|
313
|
+
try:
|
|
314
|
+
if not secrets_path.exists():
|
|
315
|
+
return None
|
|
316
|
+
with open(secrets_path, "r", encoding="utf-8") as f:
|
|
317
|
+
accounts = json.load(f)
|
|
318
|
+
if from_account in accounts:
|
|
319
|
+
return accounts[from_account]
|
|
320
|
+
return None
|
|
321
|
+
except (OSError, json.JSONDecodeError) as e:
|
|
322
|
+
logger.warning("Failed to load smtp-all.json: %s", e)
|
|
323
|
+
return None
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def _flush_email_digest():
|
|
327
|
+
"""Send all queued non-urgent emails as a single HTML digest."""
|
|
328
|
+
global _email_digest_queue
|
|
329
|
+
if not _email_digest_queue:
|
|
330
|
+
return
|
|
331
|
+
|
|
332
|
+
items = list(_email_digest_queue)
|
|
333
|
+
_email_digest_queue.clear()
|
|
334
|
+
|
|
335
|
+
# Drop pure noise items — zero-action social scans, heartbeats
|
|
336
|
+
noise_types = {"heartbeat", "scan_summary", "daemon_status", "digest_suppressed"}
|
|
337
|
+
items = [i for i in items if i.get("event_type", "") not in noise_types]
|
|
338
|
+
if not items:
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
# Rank items by priority: security/alert first, then actions, then drafts
|
|
342
|
+
PRIORITY_ORDER = {
|
|
343
|
+
"security": 0, "alert": 0, "gate_failure": 0,
|
|
344
|
+
"deploy": 1, "action": 1, "approve": 1,
|
|
345
|
+
"founder_directive": 2,
|
|
346
|
+
"social_draft": 3, "github_outreach_queued": 3,
|
|
347
|
+
"info": 4, "digest": 4,
|
|
348
|
+
"daemon_status": 5, "scan_summary": 5, "heartbeat": 5,
|
|
349
|
+
}
|
|
350
|
+
items.sort(key=lambda x: PRIORITY_ORDER.get(x.get("event_type", ""), 4))
|
|
351
|
+
|
|
352
|
+
# Build digest body — each item gets its own section
|
|
353
|
+
# Social drafts get the full draft text in a copy block
|
|
354
|
+
# Other items get a summary
|
|
355
|
+
digest_text = f"{len(items)} notification{'s' if len(items) != 1 else ''} batched in this digest.\n\n"
|
|
356
|
+
for i, item in enumerate(items, 1):
|
|
357
|
+
subj = item.get('subject', 'No subject')
|
|
358
|
+
body_raw = item.get('body', '')
|
|
359
|
+
event = item.get('event_type', '')
|
|
360
|
+
|
|
361
|
+
digest_text += f"{i}. {subj}\n\n"
|
|
362
|
+
|
|
363
|
+
if event in ('social_draft', 'github_outreach_queued'):
|
|
364
|
+
# Extract just the draft text from the full email body
|
|
365
|
+
# Look for the copy block content between markers
|
|
366
|
+
if '--- COPY BELOW THIS LINE ---' in body_raw:
|
|
367
|
+
parts = body_raw.split('--- COPY BELOW THIS LINE ---', 1)
|
|
368
|
+
before_copy = parts[0].strip()
|
|
369
|
+
after_marker = parts[1] if len(parts) > 1 else ''
|
|
370
|
+
# Get text before END COPY
|
|
371
|
+
draft_only = after_marker.split('--- END COPY ---')[0].strip() if '--- END COPY ---' in after_marker else after_marker.strip()
|
|
372
|
+
# Show context (WHERE/LINK/WHY) then copy block
|
|
373
|
+
for line in before_copy.split('\n'):
|
|
374
|
+
line = line.strip()
|
|
375
|
+
if line.startswith(('WHERE:', 'LINK:', 'WHY:')):
|
|
376
|
+
digest_text += f"{line}\n"
|
|
377
|
+
digest_text += f"\n--- COPY BELOW THIS LINE ---\n{draft_only}\n--- END COPY ---\n\n"
|
|
378
|
+
else:
|
|
379
|
+
digest_text += f"{body_raw[:500]}\n\n"
|
|
380
|
+
else:
|
|
381
|
+
digest_text += f"{body_raw[:500]}\n\n"
|
|
382
|
+
|
|
383
|
+
digest_subject = f"[DIGEST] {len(items)} Delimit notifications"
|
|
384
|
+
digest_html = _render_html_email(digest_subject, digest_text, "digest")
|
|
385
|
+
|
|
386
|
+
# Send digest as a single email (bypasses throttle since it IS the flush)
|
|
387
|
+
_email_send_times.append(_time.time())
|
|
388
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
389
|
+
try:
|
|
390
|
+
from_acct = items[0].get("from_account", "") if items else ""
|
|
391
|
+
smtp_to = items[0].get("to", "") if items else ""
|
|
392
|
+
if not smtp_to:
|
|
393
|
+
defaults = _load_smtp_account("_defaults") or {}
|
|
394
|
+
smtp_to = str(defaults.get("to") or os.environ.get("DELIMIT_SMTP_TO", ""))
|
|
395
|
+
|
|
396
|
+
# Resolve SMTP account: from_acct → _defaults.from_account → env vars
|
|
397
|
+
acct = _load_smtp_account(from_acct) if from_acct else None
|
|
398
|
+
if not acct or not acct.get("pass"):
|
|
399
|
+
defaults = _load_smtp_account("_defaults") or {}
|
|
400
|
+
fallback_acct = defaults.get("from_account", "")
|
|
401
|
+
if fallback_acct:
|
|
402
|
+
acct = _load_smtp_account(fallback_acct) or {}
|
|
403
|
+
else:
|
|
404
|
+
acct = defaults
|
|
405
|
+
smtp_host = acct.get("host", os.environ.get("DELIMIT_SMTP_HOST", "smtp.gmail.com"))
|
|
406
|
+
smtp_port = int(acct.get("port", os.environ.get("DELIMIT_SMTP_PORT", "587")))
|
|
407
|
+
smtp_user = acct.get("user", os.environ.get("DELIMIT_SMTP_USER", ""))
|
|
408
|
+
smtp_pass = acct.get("pass", os.environ.get("DELIMIT_SMTP_PASS", ""))
|
|
409
|
+
smtp_from = acct.get("from", smtp_user)
|
|
410
|
+
|
|
411
|
+
if not smtp_pass:
|
|
412
|
+
logger.warning("Digest flush skipped: no SMTP password")
|
|
413
|
+
return
|
|
414
|
+
|
|
415
|
+
msg = MIMEText(digest_html, "html", "utf-8")
|
|
416
|
+
msg["Subject"] = digest_subject
|
|
417
|
+
msg["From"] = smtp_from
|
|
418
|
+
msg["To"] = smtp_to
|
|
419
|
+
|
|
420
|
+
with smtplib.SMTP(smtp_host, smtp_port, timeout=15) as server:
|
|
421
|
+
server.starttls()
|
|
422
|
+
server.login(smtp_user, smtp_pass)
|
|
423
|
+
server.sendmail(smtp_from, [smtp_to], msg.as_string())
|
|
424
|
+
delivered = True
|
|
425
|
+
_record_notification({
|
|
426
|
+
"channel": "email",
|
|
427
|
+
"event_type": "digest",
|
|
428
|
+
"to": smtp_to,
|
|
429
|
+
"from": from_acct or "pro@delimit.ai",
|
|
430
|
+
"subject": digest_subject,
|
|
431
|
+
"message": digest_text,
|
|
432
|
+
"timestamp": timestamp,
|
|
433
|
+
"success": delivered,
|
|
434
|
+
"items": len(items),
|
|
435
|
+
})
|
|
436
|
+
logger.info("Flushed email digest: %d items", len(items))
|
|
437
|
+
except Exception as e:
|
|
438
|
+
logger.warning("Digest flush failed: %s", e)
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def _send_smtp_direct(to: str, subject: str, body: str, from_account: str = "") -> bool:
|
|
442
|
+
"""Low-level SMTP send — used by digest flush and direct sends."""
|
|
443
|
+
if not from_account:
|
|
444
|
+
defaults = _load_smtp_account("_defaults")
|
|
445
|
+
if defaults and defaults.get("from_account"):
|
|
446
|
+
from_account = str(defaults["from_account"])
|
|
447
|
+
|
|
448
|
+
acct = _load_smtp_account(from_account) if from_account else None
|
|
449
|
+
smtp_host = (acct or {}).get("host", os.environ.get("DELIMIT_SMTP_HOST", "smtp.gmail.com"))
|
|
450
|
+
smtp_port = int((acct or {}).get("port", os.environ.get("DELIMIT_SMTP_PORT", "587")))
|
|
451
|
+
smtp_user = (acct or {}).get("user", os.environ.get("DELIMIT_SMTP_USER", ""))
|
|
452
|
+
smtp_pass = (acct or {}).get("pass", os.environ.get("DELIMIT_SMTP_PASS", ""))
|
|
453
|
+
smtp_from = (acct or {}).get("from", smtp_user)
|
|
454
|
+
|
|
455
|
+
if not smtp_pass:
|
|
456
|
+
return False
|
|
457
|
+
|
|
458
|
+
content_type = "html" if body.strip().startswith("<html") else "plain"
|
|
459
|
+
msg = MIMEText(body, content_type, "utf-8")
|
|
460
|
+
msg["Subject"] = subject
|
|
461
|
+
msg["From"] = smtp_from
|
|
462
|
+
msg["To"] = to
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
with smtplib.SMTP(smtp_host, smtp_port, timeout=15) as server:
|
|
466
|
+
server.starttls()
|
|
467
|
+
server.login(smtp_user, smtp_pass)
|
|
468
|
+
server.sendmail(smtp_from, [to], msg.as_string())
|
|
469
|
+
return True
|
|
470
|
+
except Exception as e:
|
|
471
|
+
logger.error("SMTP send failed: %s", e)
|
|
472
|
+
return False
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def _render_html_email(subject: str, body: str, event_type: str) -> str:
|
|
476
|
+
"""Render a professional HTML email from a plain-text body.
|
|
477
|
+
|
|
478
|
+
Converts markdown-like patterns to HTML:
|
|
479
|
+
- Lines starting with "---" become <hr>
|
|
480
|
+
- Lines with ALL CAPS become section headers
|
|
481
|
+
- Lines starting with "- " become list items
|
|
482
|
+
- URLs become clickable links
|
|
483
|
+
- Draft text in quotes gets styled as blockquotes
|
|
484
|
+
- "approve/reject" instructions get styled as action buttons
|
|
485
|
+
"""
|
|
486
|
+
import re
|
|
487
|
+
import html as _html
|
|
488
|
+
|
|
489
|
+
# Determine accent color from event type
|
|
490
|
+
color_map = {
|
|
491
|
+
"social_draft": "#7C3AED", # purple — approval needed
|
|
492
|
+
"outreach": "#7C3AED",
|
|
493
|
+
"deploy": "#059669", # green — deploy/success
|
|
494
|
+
"gate_failure": "#DC2626", # red — failure/alert
|
|
495
|
+
"digest": "#2563EB", # blue — informational
|
|
496
|
+
"info": "#2563EB",
|
|
497
|
+
}
|
|
498
|
+
accent = color_map.get(event_type, "#7C3AED")
|
|
499
|
+
|
|
500
|
+
# Parse subject for badge
|
|
501
|
+
badge = ""
|
|
502
|
+
badge_match = re.match(r'\[([A-Z]+)\]', subject)
|
|
503
|
+
if badge_match:
|
|
504
|
+
badge = badge_match.group(1)
|
|
505
|
+
|
|
506
|
+
def _render_copy_block(label: str, text: str) -> str:
|
|
507
|
+
escaped_text = _html.escape(text.strip("\n"))
|
|
508
|
+
escaped_label = _html.escape(label)
|
|
509
|
+
return (
|
|
510
|
+
f'<div style="margin:14px 0">'
|
|
511
|
+
f'<div style="background:{accent};color:white;padding:8px 12px;'
|
|
512
|
+
f'border-radius:8px 8px 0 0;font-size:12px;font-weight:700;letter-spacing:0.3px">'
|
|
513
|
+
f'{escaped_label}</div>'
|
|
514
|
+
f'<div style="border:1px solid #D1D5DB;border-top:none;border-radius:0 0 8px 8px;'
|
|
515
|
+
f'background:#F9FAFB;padding:12px">'
|
|
516
|
+
f'<div style="font-size:11px;color:#6B7280;margin-bottom:8px">'
|
|
517
|
+
f'Tap and hold inside this block to copy.</div>'
|
|
518
|
+
f'<pre style="margin:0;white-space:pre-wrap;word-break:break-word;'
|
|
519
|
+
f'font:13px/1.55 SFMono-Regular,Consolas,Monaco,monospace;color:#111827">'
|
|
520
|
+
f'{escaped_text}</pre>'
|
|
521
|
+
f'</div>'
|
|
522
|
+
f'</div>'
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
# Convert body lines to HTML
|
|
526
|
+
lines = body.split("\n")
|
|
527
|
+
html_lines = []
|
|
528
|
+
in_list = False
|
|
529
|
+
active_copy_label = None
|
|
530
|
+
active_copy_lines = []
|
|
531
|
+
|
|
532
|
+
for line in lines:
|
|
533
|
+
stripped = line.strip()
|
|
534
|
+
|
|
535
|
+
if stripped.startswith("--- COPY BELOW THIS LINE ---"):
|
|
536
|
+
if in_list:
|
|
537
|
+
html_lines.append("</ul>")
|
|
538
|
+
in_list = False
|
|
539
|
+
active_copy_label = "Manual Post Text"
|
|
540
|
+
active_copy_lines = []
|
|
541
|
+
continue
|
|
542
|
+
|
|
543
|
+
if stripped.startswith("--- TITLE (paste in title field) ---"):
|
|
544
|
+
if in_list:
|
|
545
|
+
html_lines.append("</ul>")
|
|
546
|
+
in_list = False
|
|
547
|
+
active_copy_label = "Post Title"
|
|
548
|
+
active_copy_lines = []
|
|
549
|
+
continue
|
|
550
|
+
|
|
551
|
+
if stripped.startswith("--- BODY (paste in body field) ---"):
|
|
552
|
+
if active_copy_label and active_copy_lines:
|
|
553
|
+
html_lines.append(_render_copy_block(active_copy_label, "\n".join(active_copy_lines)))
|
|
554
|
+
if in_list:
|
|
555
|
+
html_lines.append("</ul>")
|
|
556
|
+
in_list = False
|
|
557
|
+
active_copy_label = "Post Body"
|
|
558
|
+
active_copy_lines = []
|
|
559
|
+
continue
|
|
560
|
+
|
|
561
|
+
if stripped.startswith("--- SOURCE POST TITLE ---"):
|
|
562
|
+
if active_copy_label and active_copy_lines:
|
|
563
|
+
html_lines.append(_render_copy_block(active_copy_label, "\n".join(active_copy_lines)))
|
|
564
|
+
if in_list:
|
|
565
|
+
html_lines.append("</ul>")
|
|
566
|
+
in_list = False
|
|
567
|
+
active_copy_label = "Source Post Title"
|
|
568
|
+
active_copy_lines = []
|
|
569
|
+
continue
|
|
570
|
+
|
|
571
|
+
if stripped.startswith("--- SOURCE POST BODY ---"):
|
|
572
|
+
if active_copy_label and active_copy_lines:
|
|
573
|
+
html_lines.append(_render_copy_block(active_copy_label, "\n".join(active_copy_lines)))
|
|
574
|
+
if in_list:
|
|
575
|
+
html_lines.append("</ul>")
|
|
576
|
+
in_list = False
|
|
577
|
+
active_copy_label = "Source Post Body"
|
|
578
|
+
active_copy_lines = []
|
|
579
|
+
continue
|
|
580
|
+
|
|
581
|
+
if stripped in ("--- END ---", "--- END COPY ---") and active_copy_label is not None:
|
|
582
|
+
html_lines.append(_render_copy_block(active_copy_label, "\n".join(active_copy_lines)))
|
|
583
|
+
active_copy_label = None
|
|
584
|
+
active_copy_lines = []
|
|
585
|
+
continue
|
|
586
|
+
|
|
587
|
+
if active_copy_label is not None:
|
|
588
|
+
active_copy_lines.append(line)
|
|
589
|
+
continue
|
|
590
|
+
|
|
591
|
+
if not stripped:
|
|
592
|
+
if in_list:
|
|
593
|
+
html_lines.append("</ul>")
|
|
594
|
+
in_list = False
|
|
595
|
+
html_lines.append("<br>")
|
|
596
|
+
continue
|
|
597
|
+
|
|
598
|
+
# Draft block headers: "--- Draft <id> (platform) ---"
|
|
599
|
+
if stripped.startswith("--- Draft ") and stripped.endswith("---"):
|
|
600
|
+
if in_list:
|
|
601
|
+
html_lines.append("</ul>")
|
|
602
|
+
in_list = False
|
|
603
|
+
draft_label = _html.escape(stripped.strip("- ").strip())
|
|
604
|
+
html_lines.append(
|
|
605
|
+
f'<div style="background:{accent};color:white;padding:8px 14px;'
|
|
606
|
+
f'border-radius:6px 6px 0 0;margin-top:16px;font-size:13px;font-weight:700">'
|
|
607
|
+
f'{draft_label}</div>'
|
|
608
|
+
f'<div style="background:#F9FAFB;border:1px solid #E5E7EB;border-top:none;'
|
|
609
|
+
f'border-radius:0 0 6px 6px;padding:12px 14px;margin-bottom:4px">'
|
|
610
|
+
)
|
|
611
|
+
# The next lines until the next "---" or "To approve" will be inside this box
|
|
612
|
+
# We'll close it when we hit the next separator
|
|
613
|
+
continue
|
|
614
|
+
|
|
615
|
+
# Horizontal rules / separators
|
|
616
|
+
if stripped.startswith("---") or stripped.startswith("==="):
|
|
617
|
+
if in_list:
|
|
618
|
+
html_lines.append("</ul>")
|
|
619
|
+
in_list = False
|
|
620
|
+
# Close any open draft box
|
|
621
|
+
html_lines.append('</div>')
|
|
622
|
+
html_lines.append(f'<hr style="border:none;border-top:1px solid #E5E7EB;margin:16px 0">')
|
|
623
|
+
continue
|
|
624
|
+
|
|
625
|
+
# Section headers (ALL CAPS lines or lines ending with colon that are short)
|
|
626
|
+
if (stripped.isupper() and len(stripped) > 3 and len(stripped) < 60) or \
|
|
627
|
+
(stripped.endswith(":") and len(stripped) < 50 and stripped[:-1].replace(" ", "").replace("-", "").isalpha()):
|
|
628
|
+
if in_list:
|
|
629
|
+
html_lines.append("</ul>")
|
|
630
|
+
in_list = False
|
|
631
|
+
html_lines.append(
|
|
632
|
+
f'<h3 style="color:{accent};font-size:13px;font-weight:700;'
|
|
633
|
+
f'text-transform:uppercase;letter-spacing:0.5px;margin:20px 0 8px 0;'
|
|
634
|
+
f'border-bottom:2px solid {accent};padding-bottom:4px">'
|
|
635
|
+
f'{_html.escape(stripped)}</h3>'
|
|
636
|
+
)
|
|
637
|
+
continue
|
|
638
|
+
|
|
639
|
+
# List items
|
|
640
|
+
if stripped.startswith("- ") or stripped.startswith("* "):
|
|
641
|
+
if not in_list:
|
|
642
|
+
html_lines.append('<ul style="margin:4px 0;padding-left:20px">')
|
|
643
|
+
in_list = True
|
|
644
|
+
item = _html.escape(stripped[2:])
|
|
645
|
+
# Bold draft IDs
|
|
646
|
+
item = re.sub(r'([0-9a-f]{12})', r'<code style="background:#F3F4F6;padding:1px 4px;border-radius:3px;font-size:12px">\1</code>', item)
|
|
647
|
+
html_lines.append(f'<li style="margin:4px 0;color:#374151">{item}</li>')
|
|
648
|
+
continue
|
|
649
|
+
|
|
650
|
+
if in_list:
|
|
651
|
+
html_lines.append("</ul>")
|
|
652
|
+
in_list = False
|
|
653
|
+
|
|
654
|
+
escaped = _html.escape(stripped)
|
|
655
|
+
|
|
656
|
+
# Convert URLs to clickable links
|
|
657
|
+
escaped = re.sub(
|
|
658
|
+
r'(https?://[^\s<>&"]+)',
|
|
659
|
+
r'<a href="\1" style="color:{};text-decoration:underline">\1</a>'.format(accent),
|
|
660
|
+
escaped,
|
|
661
|
+
)
|
|
662
|
+
|
|
663
|
+
# Style quoted draft text
|
|
664
|
+
if escaped.startswith('"') and escaped.endswith('"'):
|
|
665
|
+
html_lines.append(
|
|
666
|
+
f'<blockquote style="border-left:3px solid {accent};margin:8px 0;'
|
|
667
|
+
f'padding:8px 12px;background:#F9FAFB;color:#374151;font-style:italic">'
|
|
668
|
+
f'{escaped}</blockquote>'
|
|
669
|
+
)
|
|
670
|
+
continue
|
|
671
|
+
|
|
672
|
+
# Style approve/reject instructions as action callouts
|
|
673
|
+
if any(kw in stripped.lower() for kw in ("to approve", "reply with", "reply \"approve")):
|
|
674
|
+
html_lines.append(
|
|
675
|
+
f'<div style="background:#F0FDF4;border:1px solid #BBF7D0;border-radius:6px;'
|
|
676
|
+
f'padding:10px 14px;margin:12px 0;font-weight:600;color:#166534">'
|
|
677
|
+
f'{escaped}</div>'
|
|
678
|
+
)
|
|
679
|
+
continue
|
|
680
|
+
|
|
681
|
+
if any(kw in stripped.lower() for kw in ("to reject", "reply \"reject")):
|
|
682
|
+
html_lines.append(
|
|
683
|
+
f'<div style="background:#FEF2F2;border:1px solid #FECACA;border-radius:6px;'
|
|
684
|
+
f'padding:10px 14px;margin:12px 0;color:#991B1B">'
|
|
685
|
+
f'{escaped}</div>'
|
|
686
|
+
)
|
|
687
|
+
continue
|
|
688
|
+
|
|
689
|
+
# Protocol warnings
|
|
690
|
+
if "PROTOCOL WARNING" in stripped:
|
|
691
|
+
html_lines.append(
|
|
692
|
+
f'<div style="background:#FEF3C7;border:1px solid #FDE68A;border-radius:6px;'
|
|
693
|
+
f'padding:10px 14px;margin:12px 0;color:#92400E;font-size:12px">'
|
|
694
|
+
f'{escaped}</div>'
|
|
695
|
+
)
|
|
696
|
+
continue
|
|
697
|
+
|
|
698
|
+
# Regular paragraph
|
|
699
|
+
html_lines.append(f'<p style="margin:6px 0;color:#374151;line-height:1.5">{escaped}</p>')
|
|
700
|
+
|
|
701
|
+
if active_copy_label and active_copy_lines:
|
|
702
|
+
html_lines.append(_render_copy_block(active_copy_label, "\n".join(active_copy_lines)))
|
|
703
|
+
if in_list:
|
|
704
|
+
html_lines.append("</ul>")
|
|
705
|
+
|
|
706
|
+
body_html = "\n".join(html_lines)
|
|
707
|
+
|
|
708
|
+
# Build full HTML email
|
|
709
|
+
badge_html = ""
|
|
710
|
+
if badge:
|
|
711
|
+
badge_colors = {
|
|
712
|
+
"APPROVE": ("#7C3AED", "#EDE9FE"),
|
|
713
|
+
"ACTION": ("#D97706", "#FEF3C7"),
|
|
714
|
+
"ALERT": ("#DC2626", "#FEE2E2"),
|
|
715
|
+
"URGENT": ("#DC2626", "#FEE2E2"),
|
|
716
|
+
"GATE": ("#DC2626", "#FEE2E2"),
|
|
717
|
+
"DIGEST": ("#2563EB", "#DBEAFE"),
|
|
718
|
+
"INFO": ("#6B7280", "#F3F4F6"),
|
|
719
|
+
"OUTREACH": ("#7C3AED", "#EDE9FE"),
|
|
720
|
+
"DEPLOY": ("#059669", "#D1FAE5"),
|
|
721
|
+
}
|
|
722
|
+
fg, bg = badge_colors.get(badge, ("#6B7280", "#F3F4F6"))
|
|
723
|
+
badge_html = (
|
|
724
|
+
f'<span style="display:inline-block;background:{bg};color:{fg};'
|
|
725
|
+
f'font-size:11px;font-weight:700;padding:2px 8px;border-radius:4px;'
|
|
726
|
+
f'letter-spacing:0.5px;margin-bottom:8px">{badge}</span><br>'
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
# Clean subject for display (remove bracket prefix)
|
|
730
|
+
display_subject = re.sub(r'^\[[A-Z]+\]\s*', '', subject)
|
|
731
|
+
|
|
732
|
+
return f"""<!DOCTYPE html>
|
|
733
|
+
<html>
|
|
734
|
+
<head><meta charset="utf-8"><meta name="viewport" content="width=device-width,initial-scale=1"></head>
|
|
735
|
+
<body style="margin:0;padding:0;background:#F9FAFB;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,sans-serif">
|
|
736
|
+
<table width="100%" cellpadding="0" cellspacing="0" style="background:#F9FAFB;padding:20px 0">
|
|
737
|
+
<tr><td align="center">
|
|
738
|
+
<table width="600" cellpadding="0" cellspacing="0" style="background:#FFFFFF;border-radius:8px;box-shadow:0 1px 3px rgba(0,0,0,0.1);overflow:hidden">
|
|
739
|
+
|
|
740
|
+
<!-- Header bar -->
|
|
741
|
+
<tr><td style="background:{accent};padding:16px 24px">
|
|
742
|
+
<table width="100%" cellpadding="0" cellspacing="0"><tr>
|
|
743
|
+
<td><span style="color:white;font-size:14px;font-weight:700;letter-spacing:0.5px">DELIMIT</span></td>
|
|
744
|
+
<td align="right"><span style="color:rgba(255,255,255,0.8);font-size:11px">{event_type or 'notification'}</span></td>
|
|
745
|
+
</tr></table>
|
|
746
|
+
</td></tr>
|
|
747
|
+
|
|
748
|
+
<!-- Body -->
|
|
749
|
+
<tr><td style="padding:24px">
|
|
750
|
+
{badge_html}
|
|
751
|
+
<h2 style="margin:0 0 16px 0;color:#111827;font-size:18px;font-weight:600;line-height:1.3">{_html.escape(display_subject)}</h2>
|
|
752
|
+
{body_html}
|
|
753
|
+
</td></tr>
|
|
754
|
+
|
|
755
|
+
<!-- Footer -->
|
|
756
|
+
<tr><td style="background:#F9FAFB;padding:12px 24px;border-top:1px solid #E5E7EB">
|
|
757
|
+
<table width="100%" cellpadding="0" cellspacing="0"><tr>
|
|
758
|
+
<td><span style="color:#9CA3AF;font-size:11px">Sent by Delimit governance layer</span></td>
|
|
759
|
+
<td align="right"><a href="https://delimit.ai" style="color:#9CA3AF;font-size:11px;text-decoration:none">delimit.ai</a></td>
|
|
760
|
+
</tr></table>
|
|
761
|
+
</td></tr>
|
|
762
|
+
|
|
763
|
+
</table>
|
|
764
|
+
</td></tr>
|
|
765
|
+
</table>
|
|
766
|
+
</body>
|
|
767
|
+
</html>"""
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def send_email(
|
|
771
|
+
to: str = "",
|
|
772
|
+
subject: str = "",
|
|
773
|
+
body: str = "",
|
|
774
|
+
from_account: str = "",
|
|
775
|
+
message: str = "",
|
|
776
|
+
event_type: str = "",
|
|
777
|
+
attachments: list = None,
|
|
778
|
+
) -> Dict[str, Any]:
|
|
779
|
+
"""Send an email notification via SMTP.
|
|
780
|
+
|
|
781
|
+
Args:
|
|
782
|
+
to: Recipient email address. Falls back to DELIMIT_SMTP_TO or
|
|
783
|
+
owner@example.com.
|
|
784
|
+
subject: Email subject line.
|
|
785
|
+
body: Email body text (preferred). Falls back to 'message' for
|
|
786
|
+
backward compatibility.
|
|
787
|
+
from_account: Sender account key in ~/.delimit/secrets/smtp-all.json
|
|
788
|
+
(e.g. 'pro@delimit.ai', 'admin@wire.report'). If provided, SMTP
|
|
789
|
+
credentials are loaded from that file instead of env vars.
|
|
790
|
+
message: Email body text (legacy parameter, use 'body' instead).
|
|
791
|
+
event_type: Event category for filtering/logging.
|
|
792
|
+
attachments: List of file paths to attach to the email.
|
|
793
|
+
|
|
794
|
+
Credential resolution order:
|
|
795
|
+
1. from_account lookup in ~/.delimit/secrets/smtp-all.json
|
|
796
|
+
2. DELIMIT_SMTP_* environment variables
|
|
797
|
+
"""
|
|
798
|
+
# body takes precedence, fall back to message for backward compat
|
|
799
|
+
email_body = body or message
|
|
800
|
+
|
|
801
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
802
|
+
event_key = (event_type or "").lower()
|
|
803
|
+
subject_lower = (subject or "").lower()
|
|
804
|
+
|
|
805
|
+
# Batch automated scan output — daemon heartbeats, scan summaries
|
|
806
|
+
# NOTE: social_draft sends IMMEDIATELY — those are the actionable emails
|
|
807
|
+
# with copy text + links that the founder needs to post from.
|
|
808
|
+
force_digest = event_key in (
|
|
809
|
+
"daemon_status", "scan_summary", "heartbeat",
|
|
810
|
+
"github_outreach_queued",
|
|
811
|
+
"social_digest", "digest_suppressed",
|
|
812
|
+
)
|
|
813
|
+
|
|
814
|
+
# Only these event types send immediately (founder needs to see them now)
|
|
815
|
+
is_urgent = (not force_digest) and any(tag in event_key + subject_lower
|
|
816
|
+
for tag in ("p0", "urgent", "alert", "critical", "approve",
|
|
817
|
+
"founder_directive", "gate_failure",
|
|
818
|
+
"security", "deploy", "action",
|
|
819
|
+
"completed", "social_draft"))
|
|
820
|
+
|
|
821
|
+
global _last_digest_flush
|
|
822
|
+
with _email_throttle_lock:
|
|
823
|
+
now = _time.time()
|
|
824
|
+
# Prune sends older than 1 hour
|
|
825
|
+
_email_send_times[:] = [t for t in _email_send_times if now - t < 3600]
|
|
826
|
+
|
|
827
|
+
if force_digest or (not is_urgent and len(_email_send_times) >= _EMAIL_MAX_PER_HOUR):
|
|
828
|
+
# Queue for digest instead of sending immediately
|
|
829
|
+
_email_digest_queue.append({
|
|
830
|
+
"to": to, "subject": subject, "body": email_body,
|
|
831
|
+
"from_account": from_account, "event_type": event_type,
|
|
832
|
+
"timestamp": timestamp,
|
|
833
|
+
})
|
|
834
|
+
# Flush digest when: 3+ items queued, OR interval elapsed with any items
|
|
835
|
+
queue_size = len(_email_digest_queue)
|
|
836
|
+
interval_elapsed = now - _last_digest_flush >= _EMAIL_DIGEST_INTERVAL
|
|
837
|
+
if (queue_size >= 10) or (interval_elapsed and queue_size > 0):
|
|
838
|
+
_flush_email_digest()
|
|
839
|
+
_last_digest_flush = now
|
|
840
|
+
return {
|
|
841
|
+
"channel": "email",
|
|
842
|
+
"delivered": False,
|
|
843
|
+
"queued": True,
|
|
844
|
+
"reason": "Batched for digest." if force_digest else f"Throttled ({len(_email_send_times)}/{_EMAIL_MAX_PER_HOUR} per hour). Batched for digest.",
|
|
845
|
+
"queue_size": len(_email_digest_queue),
|
|
846
|
+
"timestamp": timestamp,
|
|
847
|
+
}
|
|
848
|
+
_email_send_times.append(now)
|
|
849
|
+
|
|
850
|
+
# Try from_account first, then _defaults, then fall back to env vars
|
|
851
|
+
if not from_account:
|
|
852
|
+
defaults = _load_smtp_account("_defaults")
|
|
853
|
+
if defaults and defaults.get("from_account"):
|
|
854
|
+
from_account = defaults["from_account"]
|
|
855
|
+
account_creds = _load_smtp_account(from_account) if from_account else None
|
|
856
|
+
|
|
857
|
+
if account_creds:
|
|
858
|
+
smtp_host = account_creds.get("host", "")
|
|
859
|
+
smtp_port = int(account_creds.get("port", 587))
|
|
860
|
+
smtp_user = account_creds.get("user", "")
|
|
861
|
+
smtp_pass = account_creds.get("pass", "")
|
|
862
|
+
smtp_from = account_creds.get("from_alias", from_account)
|
|
863
|
+
else:
|
|
864
|
+
smtp_host = os.environ.get("DELIMIT_SMTP_HOST", "")
|
|
865
|
+
smtp_port = int(os.environ.get("DELIMIT_SMTP_PORT", "587"))
|
|
866
|
+
smtp_user = os.environ.get("DELIMIT_SMTP_USER", "")
|
|
867
|
+
smtp_pass = os.environ.get("DELIMIT_SMTP_PASS", "")
|
|
868
|
+
smtp_from = os.environ.get("DELIMIT_SMTP_FROM", "")
|
|
869
|
+
|
|
870
|
+
# Resolve recipient: explicit > env var > smtp-all.json _defaults
|
|
871
|
+
smtp_to = to or os.environ.get("DELIMIT_SMTP_TO", "")
|
|
872
|
+
if not smtp_to:
|
|
873
|
+
defaults = _load_smtp_account("_defaults")
|
|
874
|
+
if defaults:
|
|
875
|
+
smtp_to = defaults.get("to", "")
|
|
876
|
+
|
|
877
|
+
if not all([smtp_host, smtp_from, smtp_to]):
|
|
878
|
+
record = {
|
|
879
|
+
"channel": "email",
|
|
880
|
+
"event_type": event_type,
|
|
881
|
+
"to": smtp_to,
|
|
882
|
+
"from": smtp_from,
|
|
883
|
+
"message": email_body,
|
|
884
|
+
"subject": subject,
|
|
885
|
+
"timestamp": timestamp,
|
|
886
|
+
"success": False,
|
|
887
|
+
"reason": "smtp_not_configured",
|
|
888
|
+
}
|
|
889
|
+
_record_notification(record)
|
|
890
|
+
return {
|
|
891
|
+
"channel": "email",
|
|
892
|
+
"delivered": False,
|
|
893
|
+
"timestamp": timestamp,
|
|
894
|
+
"error": "SMTP not configured. Set DELIMIT_SMTP_HOST, DELIMIT_SMTP_FROM, DELIMIT_SMTP_TO environment variables, or use from_account with smtp-all.json.",
|
|
895
|
+
"intent_logged": True,
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
subj = subject or f"Delimit: {event_type or 'Notification'}"
|
|
899
|
+
html_body = _render_html_email(subj, email_body, event_type)
|
|
900
|
+
|
|
901
|
+
if attachments:
|
|
902
|
+
from email.mime.multipart import MIMEMultipart
|
|
903
|
+
from email.mime.base import MIMEBase
|
|
904
|
+
from email import encoders
|
|
905
|
+
msg = MIMEMultipart()
|
|
906
|
+
msg.attach(MIMEText(html_body, "html"))
|
|
907
|
+
for fpath in attachments:
|
|
908
|
+
fp = Path(fpath)
|
|
909
|
+
if fp.exists():
|
|
910
|
+
part = MIMEBase("application", "octet-stream")
|
|
911
|
+
part.set_payload(fp.read_bytes())
|
|
912
|
+
encoders.encode_base64(part)
|
|
913
|
+
part.add_header("Content-Disposition", f"attachment; filename={fp.name}")
|
|
914
|
+
msg.attach(part)
|
|
915
|
+
else:
|
|
916
|
+
msg = MIMEText(html_body, "html")
|
|
917
|
+
msg["Subject"] = subj
|
|
918
|
+
msg["From"] = smtp_from
|
|
919
|
+
msg["To"] = smtp_to
|
|
920
|
+
|
|
921
|
+
# Generate a unique Message-ID for threading support (Consensus 116)
|
|
922
|
+
import uuid as _uuid
|
|
923
|
+
domain = smtp_from.split("@", 1)[1] if "@" in smtp_from else "delimit.ai"
|
|
924
|
+
message_id = f"<{_uuid.uuid4().hex}@{domain}>"
|
|
925
|
+
msg["Message-ID"] = message_id
|
|
926
|
+
|
|
927
|
+
try:
|
|
928
|
+
with smtplib.SMTP(smtp_host, smtp_port, timeout=10) as server:
|
|
929
|
+
if smtp_user and smtp_pass:
|
|
930
|
+
server.starttls()
|
|
931
|
+
server.login(smtp_user, smtp_pass)
|
|
932
|
+
server.sendmail(smtp_from, [smtp_to], msg.as_string())
|
|
933
|
+
|
|
934
|
+
record = {
|
|
935
|
+
"channel": "email",
|
|
936
|
+
"event_type": event_type,
|
|
937
|
+
"to": smtp_to,
|
|
938
|
+
"from": smtp_from,
|
|
939
|
+
"subject": subj,
|
|
940
|
+
"message": email_body,
|
|
941
|
+
"timestamp": timestamp,
|
|
942
|
+
"success": True,
|
|
943
|
+
"message_id": message_id,
|
|
944
|
+
}
|
|
945
|
+
_record_notification(record)
|
|
946
|
+
|
|
947
|
+
return {
|
|
948
|
+
"channel": "email",
|
|
949
|
+
"delivered": True,
|
|
950
|
+
"timestamp": timestamp,
|
|
951
|
+
"subject": subj,
|
|
952
|
+
"to": smtp_to,
|
|
953
|
+
"from": smtp_from,
|
|
954
|
+
"message_id": message_id,
|
|
955
|
+
}
|
|
956
|
+
except Exception as e:
|
|
957
|
+
record = {
|
|
958
|
+
"channel": "email",
|
|
959
|
+
"event_type": event_type,
|
|
960
|
+
"to": smtp_to,
|
|
961
|
+
"from": smtp_from,
|
|
962
|
+
"message": email_body,
|
|
963
|
+
"timestamp": timestamp,
|
|
964
|
+
"success": False,
|
|
965
|
+
"error": str(e),
|
|
966
|
+
}
|
|
967
|
+
_record_notification(record)
|
|
968
|
+
return {
|
|
969
|
+
"channel": "email",
|
|
970
|
+
"delivered": False,
|
|
971
|
+
"timestamp": timestamp,
|
|
972
|
+
"error": str(e),
|
|
973
|
+
}
|
|
974
|
+
|
|
975
|
+
|
|
976
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
977
|
+
# Email Protocol — enforced server-side, model-agnostic
|
|
978
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
979
|
+
# Every email must be self-contained and actionable on mobile.
|
|
980
|
+
# The protocol validates required sections per event_type and rejects
|
|
981
|
+
# or fixes emails that don't meet the standard.
|
|
982
|
+
|
|
983
|
+
# Subject line MUST start with one of these brackets:
|
|
984
|
+
_VALID_SUBJECT_PREFIXES = (
|
|
985
|
+
"[APPROVE]", "[ACTION]", "[INFO]", "[ALERT]", "[DIGEST]",
|
|
986
|
+
"[URGENT]", "[OUTREACH]", "[DEPLOY]", "[GATE]",
|
|
987
|
+
)
|
|
988
|
+
|
|
989
|
+
# Required sections per event_type. Each is a (header, description) tuple.
|
|
990
|
+
_EMAIL_PROTOCOL: Dict[str, List[tuple]] = {
|
|
991
|
+
"social_draft": [
|
|
992
|
+
("THREAD CONTEXT", "subreddit/platform, post topic, engagement stats"),
|
|
993
|
+
("DRAFT", "the full draft text, not just an ID"),
|
|
994
|
+
("TO APPROVE", "reply instructions with draft_id"),
|
|
995
|
+
],
|
|
996
|
+
"outreach": [
|
|
997
|
+
("TARGETS FOUND", "list with platform, title/snippet, URL"),
|
|
998
|
+
("DRAFTS", "full draft text for each, with draft_id"),
|
|
999
|
+
("TO APPROVE", "reply instructions"),
|
|
1000
|
+
],
|
|
1001
|
+
"deploy": [
|
|
1002
|
+
("WHAT CHANGED", "summary of changes being deployed"),
|
|
1003
|
+
("GATES PASSED", "test, security, lint results"),
|
|
1004
|
+
("TO APPROVE", "reply instructions or auto-proceed note"),
|
|
1005
|
+
],
|
|
1006
|
+
"gate_failure": [
|
|
1007
|
+
("WHAT FAILED", "which gate and why"),
|
|
1008
|
+
("IMPACT", "what is blocked"),
|
|
1009
|
+
("TO FIX", "next steps"),
|
|
1010
|
+
],
|
|
1011
|
+
"digest": [
|
|
1012
|
+
("COMPLETED", "what was done since last digest"),
|
|
1013
|
+
("PENDING YOUR ACTION", "items needing founder response"),
|
|
1014
|
+
],
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
|
|
1018
|
+
def _load_drafts_by_ids(draft_ids: list) -> list:
|
|
1019
|
+
"""Load draft entries from social_drafts.jsonl matching the given IDs."""
|
|
1020
|
+
drafts_file = Path.home() / ".delimit" / "social_drafts.jsonl"
|
|
1021
|
+
if not drafts_file.exists():
|
|
1022
|
+
return []
|
|
1023
|
+
results = []
|
|
1024
|
+
id_set = set(draft_ids)
|
|
1025
|
+
try:
|
|
1026
|
+
for line in drafts_file.read_text().splitlines():
|
|
1027
|
+
if not line.strip():
|
|
1028
|
+
continue
|
|
1029
|
+
try:
|
|
1030
|
+
d = json.loads(line)
|
|
1031
|
+
if d.get("draft_id") in id_set and d.get("status") == "pending":
|
|
1032
|
+
results.append(d)
|
|
1033
|
+
except (json.JSONDecodeError, ValueError):
|
|
1034
|
+
continue
|
|
1035
|
+
except Exception:
|
|
1036
|
+
pass
|
|
1037
|
+
return results
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
def _enforce_email_protocol(subject: str, message: str, event_type: str) -> tuple:
|
|
1041
|
+
"""Validate and fix email against the protocol. Returns (subject, message, warnings)."""
|
|
1042
|
+
warnings = []
|
|
1043
|
+
|
|
1044
|
+
# 1. Subject must have a valid prefix bracket
|
|
1045
|
+
if not any(subject.startswith(p) for p in _VALID_SUBJECT_PREFIXES):
|
|
1046
|
+
# Try to infer from event_type
|
|
1047
|
+
prefix_map = {
|
|
1048
|
+
"social_draft": "[APPROVE]",
|
|
1049
|
+
"outreach": "[OUTREACH]",
|
|
1050
|
+
"deploy": "[DEPLOY]",
|
|
1051
|
+
"gate_failure": "[ALERT]",
|
|
1052
|
+
"digest": "[DIGEST]",
|
|
1053
|
+
"info": "[INFO]",
|
|
1054
|
+
}
|
|
1055
|
+
prefix = prefix_map.get(event_type, "[INFO]")
|
|
1056
|
+
subject = f"{prefix} {subject}"
|
|
1057
|
+
warnings.append(f"Subject prefix added: {prefix}")
|
|
1058
|
+
|
|
1059
|
+
# 2. Check required sections for this event_type
|
|
1060
|
+
required = _EMAIL_PROTOCOL.get(event_type, [])
|
|
1061
|
+
msg_upper = message.upper()
|
|
1062
|
+
missing = []
|
|
1063
|
+
for header, desc in required:
|
|
1064
|
+
# Check if the section header appears (case-insensitive, with or without colon)
|
|
1065
|
+
if header.upper() not in msg_upper:
|
|
1066
|
+
missing.append(f"{header} ({desc})")
|
|
1067
|
+
|
|
1068
|
+
if missing:
|
|
1069
|
+
# Append a protocol warning to the email body so the founder sees what's missing
|
|
1070
|
+
message += "\n\n" + "=" * 40
|
|
1071
|
+
message += "\nPROTOCOL WARNING — Missing required sections:"
|
|
1072
|
+
for m in missing:
|
|
1073
|
+
message += f"\n - {m}"
|
|
1074
|
+
message += "\n\nThis email may not be fully actionable. The sending model"
|
|
1075
|
+
message += "\nskipped required context. Check drafts via delimit_social_approve."
|
|
1076
|
+
warnings.append(f"Missing sections: {', '.join(m.split(' (')[0] for m in missing)}")
|
|
1077
|
+
|
|
1078
|
+
# 3. Outreach/social_draft emails — auto-inject full draft text from social_drafts.jsonl
|
|
1079
|
+
if event_type in ("social_draft", "outreach"):
|
|
1080
|
+
import re
|
|
1081
|
+
draft_ids = re.findall(r'[0-9a-f]{12}', message)
|
|
1082
|
+
if draft_ids:
|
|
1083
|
+
drafts = _load_drafts_by_ids(draft_ids)
|
|
1084
|
+
if drafts:
|
|
1085
|
+
message += "\n\n" + "=" * 40
|
|
1086
|
+
message += "\nCOPY-READY DRAFTS"
|
|
1087
|
+
message += "\n" + "=" * 40
|
|
1088
|
+
for d in drafts:
|
|
1089
|
+
did = d.get("draft_id", "")
|
|
1090
|
+
text = d.get("text", "")
|
|
1091
|
+
platform = d.get("platform", "")
|
|
1092
|
+
ctx = d.get("context", "")
|
|
1093
|
+
thread_url = d.get("thread_url", "")
|
|
1094
|
+
reply_to_id = d.get("reply_to_id", "")
|
|
1095
|
+
# Try to extract URL from context if thread_url is empty
|
|
1096
|
+
if not thread_url and ctx:
|
|
1097
|
+
url_match = re.search(r'https?://[^\s]+', ctx)
|
|
1098
|
+
if url_match:
|
|
1099
|
+
thread_url = url_match.group(0)
|
|
1100
|
+
message += f"\n\n--- Draft {did} ({platform}) ---"
|
|
1101
|
+
message += f"\nWHERE: {platform}"
|
|
1102
|
+
where_link = thread_url or (f"https://x.com/i/status/{reply_to_id}" if reply_to_id else "")
|
|
1103
|
+
if where_link:
|
|
1104
|
+
message += f"\nLINK: {where_link}"
|
|
1105
|
+
if ctx:
|
|
1106
|
+
message += f"\nWHY: {ctx}"
|
|
1107
|
+
message += f"\nWHAT:\n--- COPY BELOW THIS LINE ---\n{text}\n--- END COPY ---"
|
|
1108
|
+
message += f"\n\nTo approve: reply \"approve {did}\""
|
|
1109
|
+
message += "\n\n" + "=" * 40
|
|
1110
|
+
warnings.append(f"Auto-injected {len(drafts)} draft texts from social_drafts.jsonl")
|
|
1111
|
+
|
|
1112
|
+
# 4. Always append the standard footer
|
|
1113
|
+
if "delimit.ai" not in message.lower() and "Delimit" not in message:
|
|
1114
|
+
message += "\n\n---\nSent by Delimit governance layer"
|
|
1115
|
+
|
|
1116
|
+
return subject, message, warnings
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
def send_notification(
|
|
1120
|
+
channel: str = "webhook",
|
|
1121
|
+
message: str = "",
|
|
1122
|
+
webhook_url: str = "",
|
|
1123
|
+
subject: str = "",
|
|
1124
|
+
event_type: str = "",
|
|
1125
|
+
to: str = "",
|
|
1126
|
+
from_account: str = "",
|
|
1127
|
+
) -> Dict[str, Any]:
|
|
1128
|
+
"""Route a notification to the appropriate channel."""
|
|
1129
|
+
if not message:
|
|
1130
|
+
return {"error": "message is required"}
|
|
1131
|
+
|
|
1132
|
+
# Enforce email protocol for all email notifications
|
|
1133
|
+
protocol_warnings = []
|
|
1134
|
+
if channel == "email":
|
|
1135
|
+
subject, message, protocol_warnings = _enforce_email_protocol(subject, message, event_type)
|
|
1136
|
+
|
|
1137
|
+
if channel == "webhook":
|
|
1138
|
+
return send_webhook(webhook_url, message, event_type)
|
|
1139
|
+
elif channel == "slack":
|
|
1140
|
+
return send_slack(webhook_url, message, event_type)
|
|
1141
|
+
elif channel == "email":
|
|
1142
|
+
result = send_email(
|
|
1143
|
+
to=to,
|
|
1144
|
+
subject=subject,
|
|
1145
|
+
message=message,
|
|
1146
|
+
from_account=from_account,
|
|
1147
|
+
event_type=event_type,
|
|
1148
|
+
)
|
|
1149
|
+
if protocol_warnings:
|
|
1150
|
+
result["protocol_warnings"] = protocol_warnings
|
|
1151
|
+
return result
|
|
1152
|
+
elif channel == "telegram":
|
|
1153
|
+
return send_telegram(message=message, event_type=event_type)
|
|
1154
|
+
else:
|
|
1155
|
+
return {"error": f"Unknown channel: {channel}. Supported: webhook, slack, email, telegram"}
|
|
1156
|
+
|
|
1157
|
+
|
|
1158
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
1159
|
+
# LED-233: Impact-Based Notification Routing
|
|
1160
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
1161
|
+
|
|
1162
|
+
ROUTING_CONFIG_FILE = Path.home() / ".delimit" / "notify_routing.yaml"
|
|
1163
|
+
|
|
1164
|
+
# Severity aliases — map various input labels to canonical levels
|
|
1165
|
+
_SEVERITY_ALIASES: Dict[str, str] = {
|
|
1166
|
+
"critical": "critical",
|
|
1167
|
+
"breaking": "critical",
|
|
1168
|
+
"error": "critical",
|
|
1169
|
+
"major": "critical",
|
|
1170
|
+
"warning": "warning",
|
|
1171
|
+
"non-breaking": "warning",
|
|
1172
|
+
"minor": "warning",
|
|
1173
|
+
"info": "info",
|
|
1174
|
+
"cosmetic": "info",
|
|
1175
|
+
"docs": "info",
|
|
1176
|
+
"patch": "info",
|
|
1177
|
+
"none": "info",
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
DEFAULT_ROUTING_CONFIG: Dict[str, Any] = {
|
|
1181
|
+
"routing": {
|
|
1182
|
+
"critical": {
|
|
1183
|
+
"channels": ["email", "webhook"],
|
|
1184
|
+
"email_subject_prefix": "[URGENT]",
|
|
1185
|
+
"webhook_priority": "high",
|
|
1186
|
+
},
|
|
1187
|
+
"warning": {
|
|
1188
|
+
"channels": ["webhook"],
|
|
1189
|
+
"webhook_priority": "normal",
|
|
1190
|
+
},
|
|
1191
|
+
"info": {
|
|
1192
|
+
"channels": [],
|
|
1193
|
+
"digest": True,
|
|
1194
|
+
},
|
|
1195
|
+
},
|
|
1196
|
+
}
|
|
1197
|
+
|
|
1198
|
+
|
|
1199
|
+
def load_routing_config() -> Dict[str, Any]:
|
|
1200
|
+
"""Load routing config from ~/.delimit/notify_routing.yaml or return defaults.
|
|
1201
|
+
|
|
1202
|
+
Returns:
|
|
1203
|
+
The routing configuration dict with a 'routing' key.
|
|
1204
|
+
"""
|
|
1205
|
+
if ROUTING_CONFIG_FILE.exists():
|
|
1206
|
+
try:
|
|
1207
|
+
if _yaml is not None:
|
|
1208
|
+
with open(ROUTING_CONFIG_FILE, "r", encoding="utf-8") as f:
|
|
1209
|
+
config = _yaml.safe_load(f)
|
|
1210
|
+
if isinstance(config, dict) and "routing" in config:
|
|
1211
|
+
return config
|
|
1212
|
+
else:
|
|
1213
|
+
# Fallback: try JSON (yaml not installed)
|
|
1214
|
+
with open(ROUTING_CONFIG_FILE, "r", encoding="utf-8") as f:
|
|
1215
|
+
config = json.load(f)
|
|
1216
|
+
if isinstance(config, dict) and "routing" in config:
|
|
1217
|
+
return config
|
|
1218
|
+
except Exception as e:
|
|
1219
|
+
logger.warning("Failed to load routing config from %s: %s", ROUTING_CONFIG_FILE, e)
|
|
1220
|
+
return DEFAULT_ROUTING_CONFIG
|
|
1221
|
+
|
|
1222
|
+
|
|
1223
|
+
def save_routing_config(config: Dict[str, Any]) -> Dict[str, Any]:
|
|
1224
|
+
"""Save routing config to ~/.delimit/notify_routing.yaml.
|
|
1225
|
+
|
|
1226
|
+
Args:
|
|
1227
|
+
config: Full routing config dict (must contain 'routing' key).
|
|
1228
|
+
|
|
1229
|
+
Returns:
|
|
1230
|
+
Status dict with success/error.
|
|
1231
|
+
"""
|
|
1232
|
+
if "routing" not in config:
|
|
1233
|
+
return {"error": "Config must contain a 'routing' key."}
|
|
1234
|
+
try:
|
|
1235
|
+
ROUTING_CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
1236
|
+
if _yaml is not None:
|
|
1237
|
+
with open(ROUTING_CONFIG_FILE, "w", encoding="utf-8") as f:
|
|
1238
|
+
_yaml.dump(config, f, default_flow_style=False)
|
|
1239
|
+
else:
|
|
1240
|
+
with open(ROUTING_CONFIG_FILE, "w", encoding="utf-8") as f:
|
|
1241
|
+
json.dump(config, f, indent=2)
|
|
1242
|
+
return {"success": True, "path": str(ROUTING_CONFIG_FILE)}
|
|
1243
|
+
except Exception as e:
|
|
1244
|
+
return {"error": f"Failed to save routing config: {e}"}
|
|
1245
|
+
|
|
1246
|
+
|
|
1247
|
+
def _classify_severity(change: Dict[str, Any]) -> str:
|
|
1248
|
+
"""Map a single change dict to a canonical severity level (critical/warning/info).
|
|
1249
|
+
|
|
1250
|
+
Inspects these keys in order:
|
|
1251
|
+
- 'severity' (from lint violations)
|
|
1252
|
+
- 'is_breaking' (from diff changes)
|
|
1253
|
+
- 'type' (change type string)
|
|
1254
|
+
"""
|
|
1255
|
+
# Direct severity label
|
|
1256
|
+
sev = str(change.get("severity", "")).lower()
|
|
1257
|
+
if sev in _SEVERITY_ALIASES:
|
|
1258
|
+
return _SEVERITY_ALIASES[sev]
|
|
1259
|
+
|
|
1260
|
+
# Breaking flag from diff engine
|
|
1261
|
+
if change.get("is_breaking"):
|
|
1262
|
+
return "critical"
|
|
1263
|
+
|
|
1264
|
+
# Change type heuristic
|
|
1265
|
+
ctype = str(change.get("type", "")).lower()
|
|
1266
|
+
if "removed" in ctype or "breaking" in ctype:
|
|
1267
|
+
return "critical"
|
|
1268
|
+
if "added" in ctype or "changed" in ctype:
|
|
1269
|
+
return "warning"
|
|
1270
|
+
|
|
1271
|
+
return "info"
|
|
1272
|
+
|
|
1273
|
+
|
|
1274
|
+
def route_by_impact(
|
|
1275
|
+
changes: List[Dict[str, Any]],
|
|
1276
|
+
routing_config: Optional[Dict[str, Any]] = None,
|
|
1277
|
+
webhook_url: str = "",
|
|
1278
|
+
email_to: str = "",
|
|
1279
|
+
from_account: str = "",
|
|
1280
|
+
dry_run: bool = False,
|
|
1281
|
+
) -> Dict[str, Any]:
|
|
1282
|
+
"""Route notifications based on change severity.
|
|
1283
|
+
|
|
1284
|
+
Takes a list of changes (from lint/diff output) and sends notifications
|
|
1285
|
+
to the appropriate channels based on severity classification.
|
|
1286
|
+
|
|
1287
|
+
Args:
|
|
1288
|
+
changes: List of change dicts (from lint violations or diff changes).
|
|
1289
|
+
routing_config: Custom routing config. Uses saved/default if None.
|
|
1290
|
+
webhook_url: Webhook URL for webhook channel delivery.
|
|
1291
|
+
email_to: Email recipient for email channel delivery.
|
|
1292
|
+
from_account: Sender account key for email delivery.
|
|
1293
|
+
dry_run: If True, classify and plan routing but do not send.
|
|
1294
|
+
|
|
1295
|
+
Returns:
|
|
1296
|
+
Dict with routing decisions and delivery results.
|
|
1297
|
+
"""
|
|
1298
|
+
if not changes:
|
|
1299
|
+
return {
|
|
1300
|
+
"routed": 0,
|
|
1301
|
+
"suppressed": 0,
|
|
1302
|
+
"decisions": [],
|
|
1303
|
+
"notifications_sent": [],
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1306
|
+
config = routing_config or load_routing_config()
|
|
1307
|
+
routing_rules = config.get("routing", {})
|
|
1308
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
1309
|
+
|
|
1310
|
+
# Classify all changes by severity
|
|
1311
|
+
buckets: Dict[str, List[Dict[str, Any]]] = {
|
|
1312
|
+
"critical": [],
|
|
1313
|
+
"warning": [],
|
|
1314
|
+
"info": [],
|
|
1315
|
+
}
|
|
1316
|
+
for change in changes:
|
|
1317
|
+
severity = _classify_severity(change)
|
|
1318
|
+
buckets[severity].append(change)
|
|
1319
|
+
|
|
1320
|
+
decisions: List[Dict[str, Any]] = []
|
|
1321
|
+
notifications_sent: List[Dict[str, Any]] = []
|
|
1322
|
+
suppressed_count = 0
|
|
1323
|
+
|
|
1324
|
+
for severity, items in buckets.items():
|
|
1325
|
+
if not items:
|
|
1326
|
+
continue
|
|
1327
|
+
|
|
1328
|
+
rule = routing_rules.get(severity, {})
|
|
1329
|
+
channels = rule.get("channels", [])
|
|
1330
|
+
is_digest = rule.get("digest", False)
|
|
1331
|
+
|
|
1332
|
+
if not channels:
|
|
1333
|
+
# Suppressed (or digest-only)
|
|
1334
|
+
suppressed_count += len(items)
|
|
1335
|
+
decisions.append({
|
|
1336
|
+
"severity": severity,
|
|
1337
|
+
"count": len(items),
|
|
1338
|
+
"action": "digest" if is_digest else "suppressed",
|
|
1339
|
+
"channels": [],
|
|
1340
|
+
})
|
|
1341
|
+
continue
|
|
1342
|
+
|
|
1343
|
+
# Build notification message for this severity bucket
|
|
1344
|
+
subject_prefix = rule.get("email_subject_prefix", "")
|
|
1345
|
+
webhook_priority = rule.get("webhook_priority", "normal")
|
|
1346
|
+
|
|
1347
|
+
summary_lines = [f"{len(items)} {severity} change(s) detected:"]
|
|
1348
|
+
for item in items[:10]: # Cap detail lines at 10
|
|
1349
|
+
msg = item.get("message", item.get("name", item.get("type", "change")))
|
|
1350
|
+
path = item.get("path", "")
|
|
1351
|
+
summary_lines.append(f" - {msg}" + (f" ({path})" if path else ""))
|
|
1352
|
+
if len(items) > 10:
|
|
1353
|
+
summary_lines.append(f" ... and {len(items) - 10} more")
|
|
1354
|
+
message_body = "\n".join(summary_lines)
|
|
1355
|
+
|
|
1356
|
+
decision = {
|
|
1357
|
+
"severity": severity,
|
|
1358
|
+
"count": len(items),
|
|
1359
|
+
"action": "notify",
|
|
1360
|
+
"channels": list(channels),
|
|
1361
|
+
}
|
|
1362
|
+
decisions.append(decision)
|
|
1363
|
+
|
|
1364
|
+
if dry_run:
|
|
1365
|
+
continue
|
|
1366
|
+
|
|
1367
|
+
# Send to each configured channel
|
|
1368
|
+
for channel in channels:
|
|
1369
|
+
if channel == "email":
|
|
1370
|
+
subject = f"{subject_prefix} Delimit: {severity} API changes".strip()
|
|
1371
|
+
result = send_email(
|
|
1372
|
+
to=email_to,
|
|
1373
|
+
subject=subject,
|
|
1374
|
+
body=message_body,
|
|
1375
|
+
from_account=from_account,
|
|
1376
|
+
event_type=f"impact_routing_{severity}",
|
|
1377
|
+
)
|
|
1378
|
+
notifications_sent.append({
|
|
1379
|
+
"channel": "email",
|
|
1380
|
+
"severity": severity,
|
|
1381
|
+
"delivered": result.get("delivered", False),
|
|
1382
|
+
"error": result.get("error"),
|
|
1383
|
+
})
|
|
1384
|
+
elif channel == "webhook" and webhook_url:
|
|
1385
|
+
# Inject priority into the webhook payload
|
|
1386
|
+
payload = {
|
|
1387
|
+
"event_type": f"delimit_impact_{severity}",
|
|
1388
|
+
"message": message_body,
|
|
1389
|
+
"priority": webhook_priority,
|
|
1390
|
+
"severity": severity,
|
|
1391
|
+
"change_count": len(items),
|
|
1392
|
+
"timestamp": timestamp,
|
|
1393
|
+
}
|
|
1394
|
+
post_result = _post_json(webhook_url, payload)
|
|
1395
|
+
_record_notification({
|
|
1396
|
+
"channel": "webhook",
|
|
1397
|
+
"event_type": f"impact_routing_{severity}",
|
|
1398
|
+
"message": message_body,
|
|
1399
|
+
"webhook_url": webhook_url,
|
|
1400
|
+
"priority": webhook_priority,
|
|
1401
|
+
"timestamp": timestamp,
|
|
1402
|
+
"success": post_result.get("success", False),
|
|
1403
|
+
})
|
|
1404
|
+
notifications_sent.append({
|
|
1405
|
+
"channel": "webhook",
|
|
1406
|
+
"severity": severity,
|
|
1407
|
+
"priority": webhook_priority,
|
|
1408
|
+
"delivered": post_result.get("success", False),
|
|
1409
|
+
"error": post_result.get("error"),
|
|
1410
|
+
})
|
|
1411
|
+
elif channel == "slack" and webhook_url:
|
|
1412
|
+
result = send_slack(webhook_url, message_body, f"impact_{severity}")
|
|
1413
|
+
notifications_sent.append({
|
|
1414
|
+
"channel": "slack",
|
|
1415
|
+
"severity": severity,
|
|
1416
|
+
"delivered": result.get("delivered", False),
|
|
1417
|
+
"error": result.get("error"),
|
|
1418
|
+
})
|
|
1419
|
+
|
|
1420
|
+
return {
|
|
1421
|
+
"routed": sum(d["count"] for d in decisions if d["action"] == "notify"),
|
|
1422
|
+
"suppressed": suppressed_count,
|
|
1423
|
+
"decisions": decisions,
|
|
1424
|
+
"notifications_sent": notifications_sent,
|
|
1425
|
+
"timestamp": timestamp,
|
|
1426
|
+
"dry_run": dry_run,
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
|
|
1430
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
1431
|
+
# INBOUND EMAIL: IMAP polling, classification, and forwarding
|
|
1432
|
+
# ═════════════════════════════════════════════════════════════════════
|
|
1433
|
+
|
|
1434
|
+
def _record_inbox_routing(entry: Dict[str, Any]) -> None:
|
|
1435
|
+
"""Append a routing record to the inbox routing log."""
|
|
1436
|
+
try:
|
|
1437
|
+
INBOX_ROUTING_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
1438
|
+
with open(INBOX_ROUTING_FILE, "a", encoding="utf-8") as f:
|
|
1439
|
+
f.write(json.dumps(entry) + "\n")
|
|
1440
|
+
except OSError as e:
|
|
1441
|
+
logger.warning("Failed to record inbox routing: %s", e)
|
|
1442
|
+
|
|
1443
|
+
|
|
1444
|
+
def _decode_header(raw: str) -> str:
|
|
1445
|
+
"""Decode an RFC 2047 encoded email header into a plain string."""
|
|
1446
|
+
if not raw:
|
|
1447
|
+
return ""
|
|
1448
|
+
parts = email.header.decode_header(raw)
|
|
1449
|
+
decoded = []
|
|
1450
|
+
for data, charset in parts:
|
|
1451
|
+
if isinstance(data, bytes):
|
|
1452
|
+
decoded.append(data.decode(charset or "utf-8", errors="replace"))
|
|
1453
|
+
else:
|
|
1454
|
+
decoded.append(data)
|
|
1455
|
+
return " ".join(decoded)
|
|
1456
|
+
|
|
1457
|
+
|
|
1458
|
+
def _extract_sender_email(from_header: str) -> str:
|
|
1459
|
+
"""Extract the bare email address from a From header."""
|
|
1460
|
+
_, addr = email.utils.parseaddr(from_header)
|
|
1461
|
+
return addr.lower()
|
|
1462
|
+
|
|
1463
|
+
|
|
1464
|
+
def _extract_sender_domain(sender_email: str) -> str:
|
|
1465
|
+
"""Extract domain from an email address."""
|
|
1466
|
+
if "@" in sender_email:
|
|
1467
|
+
return sender_email.split("@", 1)[1]
|
|
1468
|
+
return ""
|
|
1469
|
+
|
|
1470
|
+
|
|
1471
|
+
def classify_email(sender: str, subject: str, from_header: str = "") -> str:
|
|
1472
|
+
"""Classify an email as 'owner-action' or 'non-owner'.
|
|
1473
|
+
|
|
1474
|
+
Returns:
|
|
1475
|
+
'owner-action' if the email needs owner attention.
|
|
1476
|
+
'non-owner' if it can stay in the Delimit inbox.
|
|
1477
|
+
"""
|
|
1478
|
+
sender_lower = sender.lower()
|
|
1479
|
+
sender_domain = _extract_sender_domain(sender_lower)
|
|
1480
|
+
|
|
1481
|
+
# Rule 1: from the owner directly
|
|
1482
|
+
if sender_lower in OWNER_ACTION_SENDERS:
|
|
1483
|
+
return "owner-action"
|
|
1484
|
+
|
|
1485
|
+
# Rule 2: from a known vendor/partner domain
|
|
1486
|
+
if sender_domain in OWNER_ACTION_DOMAINS:
|
|
1487
|
+
return "owner-action"
|
|
1488
|
+
|
|
1489
|
+
# Rule 3: subject matches owner-action patterns
|
|
1490
|
+
for pattern in OWNER_ACTION_SUBJECT_PATTERNS:
|
|
1491
|
+
if pattern.search(subject):
|
|
1492
|
+
return "owner-action"
|
|
1493
|
+
|
|
1494
|
+
# Rule 4: if sender looks like a real person (not noreply), lean owner-action
|
|
1495
|
+
# Only if from_header has a display name that looks personal
|
|
1496
|
+
is_noreply = any(sender_lower.startswith(prefix) for prefix in NON_OWNER_SENDERS)
|
|
1497
|
+
if not is_noreply and sender_domain and sender_domain not in ("pypi.org",):
|
|
1498
|
+
# Check if subject indicates automated content
|
|
1499
|
+
automated_keywords = ["unsubscribe", "newsletter", "digest", "weekly roundup",
|
|
1500
|
+
"notification", "alert", "automated", "receipt"]
|
|
1501
|
+
subject_lower = subject.lower()
|
|
1502
|
+
if any(kw in subject_lower for kw in automated_keywords):
|
|
1503
|
+
return "non-owner"
|
|
1504
|
+
# Personal email from unknown domain - forward to be safe
|
|
1505
|
+
return "owner-action"
|
|
1506
|
+
|
|
1507
|
+
return "non-owner"
|
|
1508
|
+
|
|
1509
|
+
|
|
1510
|
+
def _forward_email(original_msg: email.message.Message, smtp_pass: str) -> bool:
|
|
1511
|
+
"""Forward an email to the owner via SMTP."""
|
|
1512
|
+
subject = _decode_header(original_msg.get("Subject", ""))
|
|
1513
|
+
from_header = original_msg.get("From", "")
|
|
1514
|
+
|
|
1515
|
+
# Build forwarded message
|
|
1516
|
+
body_parts = []
|
|
1517
|
+
if original_msg.is_multipart():
|
|
1518
|
+
for part in original_msg.walk():
|
|
1519
|
+
if part.get_content_type() == "text/plain":
|
|
1520
|
+
payload = part.get_payload(decode=True)
|
|
1521
|
+
if payload:
|
|
1522
|
+
body_parts.append(payload.decode("utf-8", errors="replace"))
|
|
1523
|
+
else:
|
|
1524
|
+
payload = original_msg.get_payload(decode=True)
|
|
1525
|
+
if payload:
|
|
1526
|
+
body_parts.append(payload.decode("utf-8", errors="replace"))
|
|
1527
|
+
|
|
1528
|
+
body = "\n".join(body_parts) if body_parts else "(no text content)"
|
|
1529
|
+
|
|
1530
|
+
fwd_text = (
|
|
1531
|
+
f"--- Forwarded from pro@delimit.ai ---\n"
|
|
1532
|
+
f"From: {from_header}\n"
|
|
1533
|
+
f"Subject: {subject}\n"
|
|
1534
|
+
f"Date: {original_msg.get('Date', 'unknown')}\n"
|
|
1535
|
+
f"---\n\n"
|
|
1536
|
+
f"{body}"
|
|
1537
|
+
)
|
|
1538
|
+
|
|
1539
|
+
fwd_msg = MIMEText(fwd_text)
|
|
1540
|
+
fwd_msg["Subject"] = f"[Fwd] {subject}"
|
|
1541
|
+
fwd_msg["From"] = IMAP_USER
|
|
1542
|
+
fwd_msg["To"] = FORWARD_TO
|
|
1543
|
+
|
|
1544
|
+
try:
|
|
1545
|
+
with smtplib.SMTP(IMAP_HOST, 587, timeout=10) as server:
|
|
1546
|
+
server.starttls()
|
|
1547
|
+
server.login(IMAP_USER, smtp_pass)
|
|
1548
|
+
server.sendmail(IMAP_USER, [FORWARD_TO], fwd_msg.as_string())
|
|
1549
|
+
return True
|
|
1550
|
+
except Exception as e:
|
|
1551
|
+
logger.error("Failed to forward email: %s", e)
|
|
1552
|
+
return False
|
|
1553
|
+
|
|
1554
|
+
|
|
1555
|
+
def poll_inbox(
|
|
1556
|
+
smtp_pass: str = "",
|
|
1557
|
+
limit: int = 20,
|
|
1558
|
+
process: bool = True,
|
|
1559
|
+
) -> Dict[str, Any]:
|
|
1560
|
+
"""Poll the IMAP inbox, classify emails, and optionally forward owner-action items.
|
|
1561
|
+
|
|
1562
|
+
Args:
|
|
1563
|
+
smtp_pass: SMTP/IMAP password for pro@delimit.ai.
|
|
1564
|
+
limit: Max number of recent messages to check.
|
|
1565
|
+
process: If True, forward owner-action emails and mark as read.
|
|
1566
|
+
If False, just report classification (dry run).
|
|
1567
|
+
|
|
1568
|
+
Returns:
|
|
1569
|
+
Summary of inbox state and routing decisions.
|
|
1570
|
+
"""
|
|
1571
|
+
if not smtp_pass:
|
|
1572
|
+
smtp_pass = os.environ.get("DELIMIT_SMTP_PASS", "")
|
|
1573
|
+
if not smtp_pass and IMAP_USER:
|
|
1574
|
+
account = _load_smtp_account(IMAP_USER)
|
|
1575
|
+
smtp_pass = str((account or {}).get("pass") or (account or {}).get("password") or "")
|
|
1576
|
+
if not smtp_pass:
|
|
1577
|
+
return {"error": "IMAP password required. Set DELIMIT_SMTP_PASS or pass smtp_pass."}
|
|
1578
|
+
|
|
1579
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
1580
|
+
|
|
1581
|
+
try:
|
|
1582
|
+
imap = imaplib.IMAP4_SSL(IMAP_HOST, IMAP_PORT)
|
|
1583
|
+
imap.login(IMAP_USER, smtp_pass)
|
|
1584
|
+
except Exception as e:
|
|
1585
|
+
return {"error": f"IMAP connection failed: {e}"}
|
|
1586
|
+
|
|
1587
|
+
try:
|
|
1588
|
+
imap.select("INBOX")
|
|
1589
|
+
|
|
1590
|
+
# Get UNSEEN messages first, then fall back to recent ALL
|
|
1591
|
+
status, unseen_data = imap.search(None, "UNSEEN")
|
|
1592
|
+
unseen_ids = unseen_data[0].split() if unseen_data[0] else []
|
|
1593
|
+
|
|
1594
|
+
# Also get all message IDs for the summary
|
|
1595
|
+
status, all_data = imap.search(None, "ALL")
|
|
1596
|
+
all_ids = all_data[0].split() if all_data[0] else []
|
|
1597
|
+
|
|
1598
|
+
# Process unseen messages (up to limit)
|
|
1599
|
+
target_ids = unseen_ids[-limit:] if unseen_ids else []
|
|
1600
|
+
|
|
1601
|
+
results: List[Dict[str, Any]] = []
|
|
1602
|
+
forwarded = 0
|
|
1603
|
+
skipped = 0
|
|
1604
|
+
|
|
1605
|
+
# Dedup: track processed Message-IDs to avoid triple-forwarding
|
|
1606
|
+
_processed_cache_path = Path(os.path.expanduser("~/.delimit/inbox_processed.json"))
|
|
1607
|
+
_processed_ids: set = set()
|
|
1608
|
+
try:
|
|
1609
|
+
if _processed_cache_path.exists():
|
|
1610
|
+
_processed_ids = set(json.loads(_processed_cache_path.read_text()))
|
|
1611
|
+
except Exception:
|
|
1612
|
+
_processed_ids = set()
|
|
1613
|
+
|
|
1614
|
+
for msg_id in target_ids:
|
|
1615
|
+
# Fetch without marking as seen (use BODY.PEEK)
|
|
1616
|
+
status, data = imap.fetch(msg_id, "(BODY.PEEK[])")
|
|
1617
|
+
if status != "OK" or not data or not data[0]:
|
|
1618
|
+
continue
|
|
1619
|
+
|
|
1620
|
+
raw_email = data[0][1]
|
|
1621
|
+
msg = email.message_from_bytes(raw_email)
|
|
1622
|
+
|
|
1623
|
+
from_header = _decode_header(msg.get("From", ""))
|
|
1624
|
+
subject = _decode_header(msg.get("Subject", ""))
|
|
1625
|
+
date_str = msg.get("Date", "")
|
|
1626
|
+
sender_addr = _extract_sender_email(from_header)
|
|
1627
|
+
|
|
1628
|
+
# Dedup: skip if we already processed this Message-ID
|
|
1629
|
+
message_id_header = msg.get("Message-ID", msg_id.decode())
|
|
1630
|
+
if message_id_header in _processed_ids:
|
|
1631
|
+
imap.store(msg_id, "+FLAGS", "\\Seen")
|
|
1632
|
+
continue
|
|
1633
|
+
|
|
1634
|
+
classification = classify_email(sender_addr, subject, from_header)
|
|
1635
|
+
|
|
1636
|
+
entry = {
|
|
1637
|
+
"msg_id": msg_id.decode(),
|
|
1638
|
+
"from": from_header,
|
|
1639
|
+
"sender": sender_addr,
|
|
1640
|
+
"subject": subject,
|
|
1641
|
+
"date": date_str,
|
|
1642
|
+
"classification": classification,
|
|
1643
|
+
"forwarded": False,
|
|
1644
|
+
}
|
|
1645
|
+
|
|
1646
|
+
if process and classification == "owner-action":
|
|
1647
|
+
success = _forward_email(msg, smtp_pass)
|
|
1648
|
+
entry["forwarded"] = success
|
|
1649
|
+
if success:
|
|
1650
|
+
# Mark as seen after successful forward
|
|
1651
|
+
imap.store(msg_id, "+FLAGS", "\\Seen")
|
|
1652
|
+
forwarded += 1
|
|
1653
|
+
else:
|
|
1654
|
+
entry["forward_error"] = True
|
|
1655
|
+
elif process and classification == "non-owner":
|
|
1656
|
+
# Mark as seen (processed, stays in inbox)
|
|
1657
|
+
imap.store(msg_id, "+FLAGS", "\\Seen")
|
|
1658
|
+
skipped += 1
|
|
1659
|
+
|
|
1660
|
+
results.append(entry)
|
|
1661
|
+
_processed_ids.add(message_id_header)
|
|
1662
|
+
_record_inbox_routing({**entry, "timestamp": timestamp, "process_mode": process})
|
|
1663
|
+
|
|
1664
|
+
# Persist processed IDs (keep last 500 to avoid unbounded growth)
|
|
1665
|
+
try:
|
|
1666
|
+
trimmed = list(_processed_ids)[-500:]
|
|
1667
|
+
_processed_cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1668
|
+
_processed_cache_path.write_text(json.dumps(trimmed))
|
|
1669
|
+
except Exception:
|
|
1670
|
+
pass
|
|
1671
|
+
|
|
1672
|
+
imap.logout()
|
|
1673
|
+
|
|
1674
|
+
return {
|
|
1675
|
+
"timestamp": timestamp,
|
|
1676
|
+
"total_messages": len(all_ids),
|
|
1677
|
+
"unseen_count": len(unseen_ids),
|
|
1678
|
+
"processed": len(results),
|
|
1679
|
+
"forwarded_to_owner": forwarded,
|
|
1680
|
+
"kept_in_inbox": skipped,
|
|
1681
|
+
"dry_run": not process,
|
|
1682
|
+
"messages": results,
|
|
1683
|
+
}
|
|
1684
|
+
|
|
1685
|
+
except Exception as e:
|
|
1686
|
+
try:
|
|
1687
|
+
imap.logout()
|
|
1688
|
+
except Exception:
|
|
1689
|
+
pass
|
|
1690
|
+
return {"error": f"Inbox processing failed: {e}"}
|
|
1691
|
+
|
|
1692
|
+
|
|
1693
|
+
def get_inbox_status(
|
|
1694
|
+
smtp_pass: str = "",
|
|
1695
|
+
limit: int = 10,
|
|
1696
|
+
) -> Dict[str, Any]:
|
|
1697
|
+
"""Get inbox status and recent routing history without processing.
|
|
1698
|
+
|
|
1699
|
+
Args:
|
|
1700
|
+
smtp_pass: IMAP password.
|
|
1701
|
+
limit: Number of recent messages to show.
|
|
1702
|
+
|
|
1703
|
+
Returns:
|
|
1704
|
+
Inbox summary and recent routing log entries.
|
|
1705
|
+
"""
|
|
1706
|
+
# Get recent routing history from log
|
|
1707
|
+
routing_history: List[Dict[str, Any]] = []
|
|
1708
|
+
try:
|
|
1709
|
+
if INBOX_ROUTING_FILE.exists():
|
|
1710
|
+
with open(INBOX_ROUTING_FILE, "r", encoding="utf-8") as f:
|
|
1711
|
+
lines = f.readlines()
|
|
1712
|
+
for line in lines[-limit:]:
|
|
1713
|
+
try:
|
|
1714
|
+
routing_history.append(json.loads(line.strip()))
|
|
1715
|
+
except json.JSONDecodeError:
|
|
1716
|
+
continue
|
|
1717
|
+
except OSError:
|
|
1718
|
+
pass
|
|
1719
|
+
|
|
1720
|
+
# Get live inbox state
|
|
1721
|
+
if not smtp_pass:
|
|
1722
|
+
smtp_pass = os.environ.get("DELIMIT_SMTP_PASS", "")
|
|
1723
|
+
if not smtp_pass:
|
|
1724
|
+
return {
|
|
1725
|
+
"routing_history": routing_history,
|
|
1726
|
+
"error": "IMAP password required for live inbox status. Set DELIMIT_SMTP_PASS.",
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1729
|
+
try:
|
|
1730
|
+
imap = imaplib.IMAP4_SSL(IMAP_HOST, IMAP_PORT)
|
|
1731
|
+
imap.login(IMAP_USER, smtp_pass)
|
|
1732
|
+
imap.select("INBOX")
|
|
1733
|
+
|
|
1734
|
+
_, all_data = imap.search(None, "ALL")
|
|
1735
|
+
all_ids = all_data[0].split() if all_data[0] else []
|
|
1736
|
+
|
|
1737
|
+
_, unseen_data = imap.search(None, "UNSEEN")
|
|
1738
|
+
unseen_ids = unseen_data[0].split() if unseen_data[0] else []
|
|
1739
|
+
|
|
1740
|
+
# Preview recent messages
|
|
1741
|
+
recent_ids = all_ids[-limit:]
|
|
1742
|
+
recent_msgs: List[Dict[str, str]] = []
|
|
1743
|
+
for msg_id in recent_ids:
|
|
1744
|
+
_, data = imap.fetch(msg_id, "(BODY.PEEK[HEADER.FIELDS (FROM SUBJECT DATE)] FLAGS)")
|
|
1745
|
+
if data and data[0]:
|
|
1746
|
+
# Parse flags and headers
|
|
1747
|
+
flags_str = ""
|
|
1748
|
+
header_bytes = b""
|
|
1749
|
+
for part in data:
|
|
1750
|
+
if isinstance(part, tuple):
|
|
1751
|
+
if b"FLAGS" in part[0]:
|
|
1752
|
+
flags_str = part[0].decode(errors="replace")
|
|
1753
|
+
header_bytes = part[1]
|
|
1754
|
+
|
|
1755
|
+
header_text = header_bytes.decode("utf-8", errors="replace")
|
|
1756
|
+
tmp_msg = email.message_from_string(header_text)
|
|
1757
|
+
from_h = _decode_header(tmp_msg.get("From", ""))
|
|
1758
|
+
subj_h = _decode_header(tmp_msg.get("Subject", ""))
|
|
1759
|
+
date_h = tmp_msg.get("Date", "")
|
|
1760
|
+
sender = _extract_sender_email(from_h)
|
|
1761
|
+
cls = classify_email(sender, subj_h, from_h)
|
|
1762
|
+
seen = "\\Seen" in flags_str
|
|
1763
|
+
|
|
1764
|
+
recent_msgs.append({
|
|
1765
|
+
"from": from_h,
|
|
1766
|
+
"subject": subj_h,
|
|
1767
|
+
"date": date_h,
|
|
1768
|
+
"classification": cls,
|
|
1769
|
+
"seen": seen,
|
|
1770
|
+
})
|
|
1771
|
+
|
|
1772
|
+
imap.logout()
|
|
1773
|
+
|
|
1774
|
+
return {
|
|
1775
|
+
"total_messages": len(all_ids),
|
|
1776
|
+
"unseen_count": len(unseen_ids),
|
|
1777
|
+
"recent_messages": recent_msgs,
|
|
1778
|
+
"routing_history_count": len(routing_history),
|
|
1779
|
+
"routing_history": routing_history[-5:],
|
|
1780
|
+
}
|
|
1781
|
+
|
|
1782
|
+
except Exception as e:
|
|
1783
|
+
return {
|
|
1784
|
+
"routing_history": routing_history,
|
|
1785
|
+
"error": f"IMAP connection failed: {e}",
|
|
1786
|
+
}
|