delimit-cli 4.1.43 → 4.1.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/CHANGELOG.md +33 -0
  2. package/README.md +46 -5
  3. package/bin/delimit-cli.js +1987 -337
  4. package/bin/delimit-setup.js +108 -66
  5. package/gateway/ai/activate_helpers.py +253 -7
  6. package/gateway/ai/agent_dispatch.py +34 -2
  7. package/gateway/ai/backends/deploy_bridge.py +167 -12
  8. package/gateway/ai/backends/gateway_core.py +236 -13
  9. package/gateway/ai/backends/repo_bridge.py +80 -16
  10. package/gateway/ai/backends/tools_infra.py +49 -32
  11. package/gateway/ai/checksums.sha256 +6 -0
  12. package/gateway/ai/content_engine.py +1276 -2
  13. package/gateway/ai/continuity.py +462 -0
  14. package/gateway/ai/deliberation.pyi +53 -0
  15. package/gateway/ai/github_scanner.py +1 -1
  16. package/gateway/ai/governance.py +58 -0
  17. package/gateway/ai/governance.pyi +32 -0
  18. package/gateway/ai/governance_hardening.py +569 -0
  19. package/gateway/ai/inbox_daemon_runner.py +217 -0
  20. package/gateway/ai/key_resolver.py +95 -2
  21. package/gateway/ai/ledger_manager.py +53 -3
  22. package/gateway/ai/license.py +104 -3
  23. package/gateway/ai/license_core.py +177 -36
  24. package/gateway/ai/license_core.pyi +50 -0
  25. package/gateway/ai/loop_engine.py +929 -294
  26. package/gateway/ai/notify.py +1786 -2
  27. package/gateway/ai/reddit_scanner.py +190 -1
  28. package/gateway/ai/screen_record.py +1 -1
  29. package/gateway/ai/secrets_broker.py +5 -1
  30. package/gateway/ai/server.py +254 -19
  31. package/gateway/ai/social_cache.py +341 -0
  32. package/gateway/ai/social_daemon.py +41 -10
  33. package/gateway/ai/supabase_sync.py +190 -2
  34. package/gateway/ai/swarm.py +86 -0
  35. package/gateway/ai/swarm_infra.py +656 -0
  36. package/gateway/ai/tui.py +594 -36
  37. package/gateway/ai/tweet_corpus_schema.sql +76 -0
  38. package/gateway/core/diff_engine_v2.py +6 -2
  39. package/gateway/core/generator_drift.py +242 -0
  40. package/gateway/core/json_schema_diff.py +375 -0
  41. package/gateway/core/openapi_version.py +124 -0
  42. package/gateway/core/spec_detector.py +47 -7
  43. package/gateway/core/spec_health.py +5 -2
  44. package/gateway/core/zero_spec/express_extractor.py +2 -2
  45. package/gateway/core/zero_spec/nestjs_extractor.py +40 -9
  46. package/gateway/requirements.txt +3 -6
  47. package/lib/cross-model-hooks.js +4 -12
  48. package/package.json +11 -3
  49. package/scripts/demo-v420-clean.sh +267 -0
  50. package/scripts/demo-v420-deliberation.sh +217 -0
  51. package/scripts/demo-v420.sh +55 -0
  52. package/scripts/postinstall.js +4 -3
  53. package/scripts/publish-ci-guard.sh +30 -0
  54. package/scripts/record-and-upload.sh +132 -0
  55. package/scripts/release.sh +126 -0
  56. package/scripts/sync-gateway.sh +112 -0
  57. package/scripts/youtube-upload.py +141 -0
@@ -0,0 +1,217 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Standalone runner for the Delimit inbox polling daemon.
4
+
5
+ Designed for use with systemd or manual invocation. Adds:
6
+ - Structured logging with timestamps
7
+ - Graceful SIGTERM handling for clean systemd stop
8
+ - PID file to prevent duplicate instances
9
+ - Startup validation of required configuration
10
+
11
+ Usage:
12
+ # Via systemd (see deploy/inbox-daemon.service)
13
+ systemctl start delimit-inbox-daemon
14
+
15
+ # Manual foreground run
16
+ python3 ai/inbox_daemon_runner.py
17
+
18
+ # Single poll cycle (for testing)
19
+ python3 ai/inbox_daemon_runner.py --once
20
+
21
+ Environment variables:
22
+ DELIMIT_SMTP_PASS Required. IMAP/SMTP password.
23
+ DELIMIT_INBOX_POLL_INTERVAL Poll interval in seconds (default: 300).
24
+ DELIMIT_HOME Delimit config directory (default: ~/.delimit).
25
+ PYTHONPATH Must include the gateway root for ai.* imports.
26
+ """
27
+
28
+ import logging
29
+ import os
30
+ import signal
31
+ import sys
32
+ import time
33
+ from datetime import datetime, timezone
34
+ from pathlib import Path
35
+
36
+ # Ensure the gateway root is on sys.path so ai.* imports work
37
+ _gateway_root = Path(__file__).resolve().parent.parent
38
+ if str(_gateway_root) not in sys.path:
39
+ sys.path.insert(0, str(_gateway_root))
40
+
41
+ # PID file to prevent duplicate instances
42
+ PID_DIR = Path(os.environ.get("DELIMIT_HOME", Path.home() / ".delimit"))
43
+ PID_FILE = PID_DIR / "inbox-daemon.pid"
44
+
45
+
46
+ def _setup_logging() -> logging.Logger:
47
+ """Configure structured logging for journald and console."""
48
+ log_format = "%(asctime)s [%(name)s] %(levelname)s: %(message)s"
49
+ logging.basicConfig(
50
+ level=logging.INFO,
51
+ format=log_format,
52
+ stream=sys.stdout,
53
+ )
54
+ # Suppress noisy libraries
55
+ logging.getLogger("urllib3").setLevel(logging.WARNING)
56
+ logging.getLogger("imaplib").setLevel(logging.WARNING)
57
+ return logging.getLogger("delimit.inbox_daemon_runner")
58
+
59
+
60
+ def _write_pid() -> None:
61
+ """Write PID file. Check for stale processes first."""
62
+ PID_DIR.mkdir(parents=True, exist_ok=True)
63
+
64
+ if PID_FILE.exists():
65
+ try:
66
+ old_pid = int(PID_FILE.read_text().strip())
67
+ # Check if the old process is still running
68
+ os.kill(old_pid, 0)
69
+ # Process exists -- abort to prevent duplicates
70
+ print(
71
+ f"ERROR: Another inbox daemon is running (PID {old_pid}). "
72
+ f"Remove {PID_FILE} if stale.",
73
+ file=sys.stderr,
74
+ )
75
+ sys.exit(1)
76
+ except (ValueError, ProcessLookupError, PermissionError):
77
+ # Stale PID file -- safe to overwrite
78
+ pass
79
+ except OSError:
80
+ pass
81
+
82
+ PID_FILE.write_text(str(os.getpid()))
83
+
84
+
85
+ def _remove_pid() -> None:
86
+ """Remove PID file on clean shutdown."""
87
+ try:
88
+ if PID_FILE.exists():
89
+ current_pid = PID_FILE.read_text().strip()
90
+ if current_pid == str(os.getpid()):
91
+ PID_FILE.unlink()
92
+ except OSError:
93
+ pass
94
+
95
+
96
+ def _validate_config(logger: logging.Logger) -> bool:
97
+ """Validate required configuration before starting the daemon."""
98
+ ok = True
99
+
100
+ if not os.environ.get("DELIMIT_SMTP_PASS"):
101
+ # Check if the notify module can load credentials from config
102
+ try:
103
+ from ai.notify import _load_smtp_account, IMAP_USER
104
+ if IMAP_USER:
105
+ account = _load_smtp_account(IMAP_USER)
106
+ if account and (account.get("pass") or account.get("password")):
107
+ logger.info("SMTP credentials loaded from config for %s", IMAP_USER)
108
+ else:
109
+ logger.error(
110
+ "DELIMIT_SMTP_PASS not set and no credentials found in config for %s",
111
+ IMAP_USER,
112
+ )
113
+ ok = False
114
+ else:
115
+ logger.error("DELIMIT_SMTP_PASS not set and IMAP_USER not configured")
116
+ ok = False
117
+ except ImportError:
118
+ logger.error("DELIMIT_SMTP_PASS not set and ai.notify module not importable")
119
+ ok = False
120
+ else:
121
+ logger.info("SMTP credentials provided via environment")
122
+
123
+ return ok
124
+
125
+
126
+ def main() -> None:
127
+ import argparse
128
+
129
+ parser = argparse.ArgumentParser(
130
+ description="Delimit inbox daemon runner -- persistent email governance polling",
131
+ )
132
+ parser.add_argument(
133
+ "--once",
134
+ action="store_true",
135
+ help="Run a single poll cycle and exit",
136
+ )
137
+ parser.add_argument(
138
+ "--interval",
139
+ type=int,
140
+ default=None,
141
+ help="Override poll interval in seconds",
142
+ )
143
+ args = parser.parse_args()
144
+
145
+ logger = _setup_logging()
146
+ logger.info(
147
+ "Delimit inbox daemon runner starting (PID %d, Python %s)",
148
+ os.getpid(),
149
+ sys.version.split()[0],
150
+ )
151
+
152
+ # Validate config before doing anything else
153
+ if not _validate_config(logger):
154
+ logger.error("Configuration validation failed. Exiting.")
155
+ sys.exit(1)
156
+
157
+ # Import the daemon module (after PYTHONPATH is set up)
158
+ from ai.inbox_daemon import (
159
+ _daemon_state,
160
+ _daemon_loop,
161
+ poll_once,
162
+ POLL_INTERVAL,
163
+ )
164
+
165
+ # Override poll interval if requested
166
+ if args.interval is not None:
167
+ import ai.inbox_daemon
168
+ ai.inbox_daemon.POLL_INTERVAL = args.interval
169
+ logger.info("Poll interval overridden to %d seconds", args.interval)
170
+
171
+ # Single-shot mode
172
+ if args.once:
173
+ logger.info("Running single poll cycle (--once mode)")
174
+ result = poll_once()
175
+ if "error" in result:
176
+ logger.error("Poll failed: %s", result["error"])
177
+ sys.exit(1)
178
+ logger.info(
179
+ "Poll complete: %d processed, %d forwarded",
180
+ result.get("processed", 0),
181
+ result.get("forwarded", 0),
182
+ )
183
+ return
184
+
185
+ # Write PID file (only for long-running mode)
186
+ _write_pid()
187
+
188
+ # Graceful shutdown handler
189
+ def _handle_signal(signum, frame):
190
+ sig_name = signal.Signals(signum).name
191
+ logger.info("Received %s -- initiating graceful shutdown", sig_name)
192
+ _daemon_state._stop_event.set()
193
+
194
+ signal.signal(signal.SIGTERM, _handle_signal)
195
+ signal.signal(signal.SIGINT, _handle_signal)
196
+
197
+ # Start the daemon loop (blocks until stop event)
198
+ logger.info(
199
+ "Inbox daemon entering main loop (poll interval: %ds)",
200
+ ai.inbox_daemon.POLL_INTERVAL,
201
+ )
202
+ _daemon_state.running = True
203
+ _daemon_state._stop_event.clear()
204
+
205
+ try:
206
+ _daemon_loop()
207
+ except Exception as e:
208
+ logger.critical("Daemon loop crashed: %s", e, exc_info=True)
209
+ sys.exit(1)
210
+ finally:
211
+ _daemon_state.running = False
212
+ _remove_pid()
213
+ logger.info("Inbox daemon runner exiting cleanly")
214
+
215
+
216
+ if __name__ == "__main__":
217
+ main()
@@ -1,2 +1,95 @@
1
- # key_resolver Pro module (stubbed in npm package)
2
- # Full implementation available on delimit.ai server
1
+ """Auto-resolve API keys from multiple sources.
2
+
3
+ Priority: env var -> secrets broker -> return None (free fallback).
4
+
5
+ Every MCP tool that depends on an external service should use this module
6
+ so it works out of the box without API keys, with enhanced functionality
7
+ unlocked when keys are available.
8
+ """
9
+
10
+ import json
11
+ import logging
12
+ import os
13
+ import shutil
14
+ import subprocess
15
+ from pathlib import Path
16
+ from typing import Optional, Tuple
17
+
18
+ logger = logging.getLogger("delimit.ai.key_resolver")
19
+
20
+ SECRETS_DIR = Path.home() / ".delimit" / "secrets"
21
+
22
+
23
+ def get_key(name: str, env_var: str = "", _secrets_dir: Optional[Path] = None) -> Tuple[Optional[str], str]:
24
+ """Get an API key. Returns (key, source) or (None, "not_found").
25
+
26
+ Sources checked in order:
27
+ 1. Environment variable (explicit *env_var*, then common conventions)
28
+ 2. ~/.delimit/secrets/{name}.json
29
+ 3. None (free fallback)
30
+ """
31
+ # 1. Env var — explicit, then common patterns
32
+ candidates = [env_var] if env_var else []
33
+ candidates += [
34
+ f"{name.upper()}_TOKEN",
35
+ f"{name.upper()}_API_KEY",
36
+ f"{name.upper()}_KEY",
37
+ ]
38
+ for var in candidates:
39
+ if not var:
40
+ continue
41
+ val = os.environ.get(var)
42
+ if val:
43
+ return val, "env"
44
+
45
+ # 2. Secrets broker
46
+ secrets_dir = _secrets_dir if _secrets_dir is not None else SECRETS_DIR
47
+ secrets_file = secrets_dir / f"{name.lower()}.json"
48
+ if secrets_file.exists():
49
+ try:
50
+ data = json.loads(secrets_file.read_text())
51
+ for field in ("value", "api_key", "token", "key"):
52
+ if data.get(field):
53
+ return data[field], "secrets_broker"
54
+ except Exception:
55
+ logger.debug("Failed to read secrets file %s", secrets_file)
56
+
57
+ # 3. Not found
58
+ return None, "not_found"
59
+
60
+
61
+ # ---------------------------------------------------------------------------
62
+ # Convenience wrappers
63
+ # ---------------------------------------------------------------------------
64
+
65
+ def get_figma_token() -> Tuple[Optional[str], str]:
66
+ """Resolve Figma API token."""
67
+ return get_key("figma", "FIGMA_TOKEN")
68
+
69
+
70
+ def get_trivy_path() -> Tuple[Optional[str], str]:
71
+ """Check if Trivy binary is available on PATH."""
72
+ path = shutil.which("trivy")
73
+ return (path, "installed") if path else (None, "not_found")
74
+
75
+
76
+ def get_playwright() -> Tuple[bool, str]:
77
+ """Check whether Playwright is usable (Python package installed)."""
78
+ try:
79
+ import playwright # noqa: F401
80
+ return True, "installed"
81
+ except ImportError:
82
+ return False, "not_found"
83
+
84
+
85
+ def get_puppeteer() -> Tuple[bool, str]:
86
+ """Check whether puppeteer (npx) is available for screenshot fallback."""
87
+ try:
88
+ result = subprocess.run(
89
+ ["npx", "puppeteer", "--version"],
90
+ capture_output=True,
91
+ timeout=15,
92
+ )
93
+ return (True, "installed") if result.returncode == 0 else (False, "not_found")
94
+ except Exception:
95
+ return False, "not_found"
@@ -91,10 +91,54 @@ def _register_venture(info: Dict[str, str]):
91
91
  VENTURES_FILE.write_text(json.dumps(ventures, indent=2))
92
92
 
93
93
 
94
+ CENTRAL_LEDGER_DIR = Path.home() / ".delimit" / "ledger"
95
+
96
+
97
+ def _detect_model() -> str:
98
+ """Auto-detect which AI model is running this session.
99
+
100
+ Checks environment variables set by various AI coding assistants:
101
+ - CLAUDE_MODEL / CLAUDE_CODE_MODEL: Claude Code
102
+ - CODEX_MODEL: OpenAI Codex CLI
103
+ - GEMINI_MODEL: Gemini CLI
104
+ - MCP_CLIENT_NAME: Generic MCP client identifier
105
+ Falls back to "unknown" if none are set.
106
+ """
107
+ # Claude Code
108
+ for var in ("CLAUDE_MODEL", "CLAUDE_CODE_MODEL"):
109
+ val = os.environ.get(var)
110
+ if val:
111
+ return val
112
+
113
+ # OpenAI Codex
114
+ val = os.environ.get("CODEX_MODEL")
115
+ if val:
116
+ return val
117
+
118
+ # Gemini
119
+ val = os.environ.get("GEMINI_MODEL")
120
+ if val:
121
+ return val
122
+
123
+ # Generic MCP client
124
+ val = os.environ.get("MCP_CLIENT_NAME")
125
+ if val:
126
+ return val
127
+
128
+ return "unknown"
129
+
130
+
94
131
  def _project_ledger_dir(project_path: str = ".") -> Path:
95
- """Get the ledger directory for the current project."""
96
- p = Path(project_path).resolve()
97
- return p / ".delimit" / "ledger"
132
+ """Get the ledger directory ALWAYS uses central ~/.delimit/ledger/.
133
+
134
+ Cross-model handoff fix: Codex and Gemini were writing to $PWD/.delimit/ledger/
135
+ which caused ledger fragmentation. All models must use the same central location
136
+ so Claude, Codex, and Gemini see the same items.
137
+
138
+ The central ledger at ~/.delimit/ledger/ is the source of truth.
139
+ Per-project .delimit/ dirs are for policies and config only, not ledger state.
140
+ """
141
+ return CENTRAL_LEDGER_DIR
98
142
 
99
143
 
100
144
  def _ensure(project_path: str = "."):
@@ -151,6 +195,7 @@ def add_item(
151
195
  context: str = "",
152
196
  tools_needed: Optional[List[str]] = None,
153
197
  estimated_complexity: str = "",
198
+ worked_by: str = "",
154
199
  ) -> Dict[str, Any]:
155
200
  """Add a new item to the project's strategy or operational ledger.
156
201
 
@@ -181,6 +226,7 @@ def add_item(
181
226
  "venture": venture["name"],
182
227
  "status": "open",
183
228
  "tags": tags or [],
229
+ "worked_by": worked_by or _detect_model(),
184
230
  }
185
231
  # LED-189: Optional acceptance criteria
186
232
  if acceptance_criteria:
@@ -234,6 +280,7 @@ def update_item(
234
280
  blocked_by: Optional[str] = None,
235
281
  blocks: Optional[str] = None,
236
282
  project_path: str = ".",
283
+ worked_by: str = "",
237
284
  ) -> Dict[str, Any]:
238
285
  """Update an existing ledger item's fields."""
239
286
  _ensure(project_path)
@@ -271,6 +318,7 @@ def update_item(
271
318
  "id": item_id,
272
319
  "type": "update",
273
320
  "updated_at": time.strftime("%Y-%m-%dT%H:%M:%SZ"),
321
+ "worked_by": worked_by or _detect_model(),
274
322
  }
275
323
  if status:
276
324
  update["status"] = status
@@ -338,6 +386,8 @@ def list_items(
338
386
  state[item_id]["last_note"] = item["note"]
339
387
  if "priority" in item:
340
388
  state[item_id]["priority"] = item["priority"]
389
+ if "worked_by" in item:
390
+ state[item_id]["last_worked_by"] = item["worked_by"]
341
391
  state[item_id]["updated_at"] = item.get("updated_at")
342
392
  else:
343
393
  state[item_id] = {**item}
@@ -14,6 +14,9 @@ try:
14
14
  check_premium as is_premium,
15
15
  gate_tool as require_premium,
16
16
  activate as activate_license,
17
+ needs_revalidation,
18
+ revalidate_license,
19
+ is_license_valid,
17
20
  PRO_TOOLS as _CORE_PRO_TOOLS,
18
21
  FREE_TRIAL_LIMITS,
19
22
  )
@@ -78,17 +81,114 @@ except ImportError:
78
81
  })
79
82
  FREE_TRIAL_LIMITS = {"delimit_deliberate": 3}
80
83
 
84
+ REVALIDATION_INTERVAL = 30 * 86400 # 30 days
85
+ GRACE_PERIOD = 7 * 86400
86
+ HARD_BLOCK = 14 * 86400
87
+
81
88
  def get_license() -> dict:
82
89
  if not LICENSE_FILE.exists():
83
90
  return {"tier": "free", "valid": True}
84
91
  try:
85
- return json.loads(LICENSE_FILE.read_text())
92
+ data = json.loads(LICENSE_FILE.read_text())
93
+ if data.get("expires_at") and data["expires_at"] < time.time():
94
+ return {"tier": "free", "valid": True, "expired": True}
95
+ if data.get("tier") in ("pro", "enterprise") and data.get("valid"):
96
+ if needs_revalidation(data):
97
+ result = revalidate_license(data)
98
+ data = result["updated_data"]
99
+ if result["status"] == "expired":
100
+ return {"tier": "free", "valid": True, "revoked": True,
101
+ "reason": result.get("reason", "License expired.")}
102
+ return data
86
103
  except Exception:
87
104
  return {"tier": "free", "valid": True}
88
105
 
106
+ def needs_revalidation(data: dict) -> bool:
107
+ if data.get("tier") not in ("pro", "enterprise"):
108
+ return False
109
+ last_validated = data.get("last_validated_at", data.get("activated_at", 0))
110
+ if last_validated == 0:
111
+ return True
112
+ return (time.time() - last_validated) > REVALIDATION_INTERVAL
113
+
114
+ def revalidate_license(data: dict) -> dict:
115
+ import hashlib
116
+ import urllib.request
117
+ key = data.get("key", "")
118
+ if not key or key.startswith("JAMSONS"):
119
+ data["last_validated_at"] = time.time()
120
+ data["validation_status"] = "current"
121
+ _write_license(data)
122
+ return {"status": "valid", "updated_data": data}
123
+
124
+ last_validated = data.get("last_validated_at", data.get("activated_at", 0))
125
+ elapsed = time.time() - last_validated
126
+ machine_hash = data.get("machine_hash", hashlib.sha256(str(Path.home()).encode()).hexdigest()[:16])
127
+
128
+ api_valid = None
129
+ try:
130
+ req_data = json.dumps({"license_key": key, "instance_name": machine_hash}).encode()
131
+ req = urllib.request.Request(
132
+ "https://api.lemonsqueezy.com/v1/licenses/validate",
133
+ data=req_data,
134
+ headers={"Content-Type": "application/json", "Accept": "application/json"},
135
+ method="POST",
136
+ )
137
+ with urllib.request.urlopen(req, timeout=10) as resp:
138
+ result = json.loads(resp.read())
139
+ api_valid = result.get("valid", False)
140
+ except Exception:
141
+ api_valid = None
142
+
143
+ if api_valid is True:
144
+ data["last_validated_at"] = time.time()
145
+ data["validation_status"] = "current"
146
+ data.pop("grace_days_remaining", None)
147
+ _write_license(data)
148
+ return {"status": "valid", "updated_data": data}
149
+
150
+ if elapsed > REVALIDATION_INTERVAL + HARD_BLOCK:
151
+ data["validation_status"] = "expired"
152
+ data["valid"] = False
153
+ _write_license(data)
154
+ return {"status": "expired", "updated_data": data,
155
+ "reason": "License expired — no successful re-validation in 44 days."}
156
+
157
+ if elapsed > REVALIDATION_INTERVAL + GRACE_PERIOD:
158
+ days_left = max(0, int((REVALIDATION_INTERVAL + HARD_BLOCK - elapsed) / 86400))
159
+ data["validation_status"] = "grace_period"
160
+ data["grace_days_remaining"] = days_left
161
+ _write_license(data)
162
+ return {"status": "grace", "updated_data": data, "grace_days_remaining": days_left}
163
+
164
+ data["validation_status"] = "revalidation_pending"
165
+ _write_license(data)
166
+ return {"status": "grace", "updated_data": data}
167
+
168
+ def is_license_valid(data: dict) -> bool:
169
+ if data.get("tier") not in ("pro", "enterprise"):
170
+ return False
171
+ if not data.get("valid", False):
172
+ return False
173
+ key = data.get("key", "")
174
+ if key.startswith("JAMSONS"):
175
+ return True
176
+ last_validated = data.get("last_validated_at", data.get("activated_at", 0))
177
+ if last_validated == 0:
178
+ return True
179
+ elapsed = time.time() - last_validated
180
+ return elapsed <= (REVALIDATION_INTERVAL + HARD_BLOCK)
181
+
182
+ def _write_license(data: dict) -> None:
183
+ try:
184
+ LICENSE_FILE.parent.mkdir(parents=True, exist_ok=True)
185
+ LICENSE_FILE.write_text(json.dumps(data, indent=2))
186
+ except Exception:
187
+ pass
188
+
89
189
  def is_premium() -> bool:
90
190
  lic = get_license()
91
- return lic.get("tier") in ("pro", "enterprise") and lic.get("valid", False)
191
+ return is_license_valid(lic)
92
192
 
93
193
  def require_premium(tool_name: str) -> dict | None:
94
194
  full_name = tool_name if tool_name.startswith("delimit_") else f"delimit_{tool_name}"
@@ -121,7 +221,8 @@ except ImportError:
121
221
  # Store key for offline validation
122
222
  license_data = {
123
223
  "key": key, "tier": "pro", "valid": True,
124
- "activated_at": time.time(), "validated_via": "offline_fallback",
224
+ "activated_at": time.time(), "last_validated_at": time.time(),
225
+ "validated_via": "offline_fallback",
125
226
  }
126
227
  LICENSE_FILE.parent.mkdir(parents=True, exist_ok=True)
127
228
  LICENSE_FILE.write_text(json.dumps(license_data, indent=2))