vallignus 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vallignus/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """Vallignus - Agent Firewall Proxy with Authority"""
2
+
3
+ __version__ = "0.3.0"
vallignus/auth.py ADDED
@@ -0,0 +1,699 @@
1
+ """Vallignus Authority - Identity, tokens, and policy management
2
+
3
+ Sprint 2 P0 Features:
4
+ - Policy versioning (v0001, v0002, etc.)
5
+ - Token revocation (jti)
6
+ - Key rotation (kid)
7
+ """
8
+
9
+ import base64
10
+ import hashlib
11
+ import hmac
12
+ import json
13
+ import os
14
+ import secrets
15
+ import shutil
16
+ import time
17
+ import uuid
18
+ from dataclasses import dataclass
19
+ from datetime import datetime
20
+ from pathlib import Path
21
+ from typing import Optional, Set, Tuple, Dict, Any
22
+
23
+
24
+ # Storage paths
25
+ VALLIGNUS_DIR = Path.home() / ".vallignus"
26
+ AGENTS_DIR = VALLIGNUS_DIR / "agents"
27
+ POLICIES_DIR = VALLIGNUS_DIR / "policies"
28
+ KEYS_DIR = VALLIGNUS_DIR / "keys"
29
+ REVOKED_DIR = VALLIGNUS_DIR / "revoked"
30
+
31
+ # Legacy paths (for migration)
32
+ LEGACY_SECRET_KEY_FILE = VALLIGNUS_DIR / "secret.key"
33
+
34
+
35
+ @dataclass
36
+ class Agent:
37
+ """Agent identity"""
38
+ agent_id: str
39
+ owner: str
40
+ created_at: float
41
+ description: str = ""
42
+
43
+
44
+ @dataclass
45
+ class Policy:
46
+ """Permission policy"""
47
+ policy_id: str
48
+ version: int
49
+ max_spend_usd: Optional[float]
50
+ allowed_domains: Set[str]
51
+ created_at: str # ISO format
52
+ updated_at: str # ISO format
53
+ description: str = ""
54
+
55
+
56
+ @dataclass
57
+ class TokenPayload:
58
+ """Decoded token payload"""
59
+ agent_id: str
60
+ owner: str
61
+ policy_id: str
62
+ policy_version: int
63
+ issued_at: float
64
+ expires_at: float
65
+ permissions_hash: str
66
+ jti: str # Token ID for revocation
67
+
68
+
69
+ class AuthError(Exception):
70
+ """Authentication/authorization error"""
71
+ pass
72
+
73
+
74
+ # =============================================================================
75
+ # BASE64 URL ENCODING
76
+ # =============================================================================
77
+
78
+ def _b64url_encode(data: bytes) -> str:
79
+ """Base64url encode without padding"""
80
+ return base64.urlsafe_b64encode(data).rstrip(b'=').decode('ascii')
81
+
82
+
83
+ def _b64url_decode(s: str) -> bytes:
84
+ """Base64url decode with padding restoration"""
85
+ padding = 4 - len(s) % 4
86
+ if padding != 4:
87
+ s += '=' * padding
88
+ return base64.urlsafe_b64decode(s)
89
+
90
+
91
+ def _sanitize_id(id_str: str) -> str:
92
+ """Sanitize ID for filesystem use"""
93
+ return "".join(c if c.isalnum() or c in '-_' else '_' for c in id_str)
94
+
95
+
96
+ def _iso_now() -> str:
97
+ """Get current time in ISO format"""
98
+ return datetime.utcnow().isoformat()
99
+
100
+
101
+ # =============================================================================
102
+ # KEY MANAGEMENT (kid rotation)
103
+ # =============================================================================
104
+
105
+ def _get_active_kid() -> str:
106
+ """Get the active key ID"""
107
+ active_file = KEYS_DIR / "active"
108
+ if not active_file.exists():
109
+ raise AuthError("No active key. Run 'vallignus auth init' first.")
110
+ return active_file.read_text().strip()
111
+
112
+
113
+ def _get_key_by_kid(kid: str) -> bytes:
114
+ """Load a specific key by its ID"""
115
+ key_file = KEYS_DIR / f"{kid}.key"
116
+ if not key_file.exists():
117
+ raise AuthError(f"Unknown key ID: {kid}")
118
+ return key_file.read_bytes()
119
+
120
+
121
+ def _get_active_key() -> Tuple[str, bytes]:
122
+ """Get the active key ID and key bytes"""
123
+ kid = _get_active_kid()
124
+ return kid, _get_key_by_kid(kid)
125
+
126
+
127
+ def _get_next_kid() -> str:
128
+ """Generate the next key ID (k0001, k0002, etc.)"""
129
+ if not KEYS_DIR.exists():
130
+ return "k0001"
131
+
132
+ existing = [f.stem for f in KEYS_DIR.glob("k*.key")]
133
+ if not existing:
134
+ return "k0001"
135
+
136
+ max_num = max(int(k[1:]) for k in existing)
137
+ return f"k{max_num + 1:04d}"
138
+
139
+
140
+ def rotate_key() -> Tuple[bool, str]:
141
+ """Create a new key and set it as active"""
142
+ try:
143
+ if not KEYS_DIR.exists():
144
+ return False, "Keys directory not initialized. Run 'vallignus auth init' first."
145
+
146
+ new_kid = _get_next_kid()
147
+ key = secrets.token_bytes(32)
148
+
149
+ key_file = KEYS_DIR / f"{new_kid}.key"
150
+ key_file.write_bytes(key)
151
+ key_file.chmod(0o600)
152
+
153
+ active_file = KEYS_DIR / "active"
154
+ active_file.write_text(new_kid)
155
+
156
+ return True, f"Rotated to new key: {new_kid}"
157
+ except Exception as e:
158
+ return False, f"Failed to rotate key: {e}"
159
+
160
+
161
+ # =============================================================================
162
+ # INITIALIZATION
163
+ # =============================================================================
164
+
165
+ def init_auth() -> Tuple[bool, str]:
166
+ """
167
+ Initialize Vallignus auth directories and keys.
168
+ Handles migration from legacy single secret.key to keyring.
169
+ """
170
+ try:
171
+ VALLIGNUS_DIR.mkdir(mode=0o700, exist_ok=True)
172
+ AGENTS_DIR.mkdir(mode=0o700, exist_ok=True)
173
+ POLICIES_DIR.mkdir(mode=0o700, exist_ok=True)
174
+ KEYS_DIR.mkdir(mode=0o700, exist_ok=True)
175
+ REVOKED_DIR.mkdir(mode=0o700, exist_ok=True)
176
+
177
+ active_file = KEYS_DIR / "active"
178
+
179
+ # Check for legacy secret.key and migrate
180
+ if LEGACY_SECRET_KEY_FILE.exists() and not active_file.exists():
181
+ # Migrate legacy key to keyring
182
+ legacy_key = LEGACY_SECRET_KEY_FILE.read_bytes()
183
+ first_key_file = KEYS_DIR / "k0001.key"
184
+ first_key_file.write_bytes(legacy_key)
185
+ first_key_file.chmod(0o600)
186
+ active_file.write_text("k0001")
187
+ # Keep legacy file for now (user can delete manually)
188
+ return True, f"Migrated legacy key to keyring at {KEYS_DIR}"
189
+
190
+ if not active_file.exists():
191
+ # Generate first key
192
+ key = secrets.token_bytes(32)
193
+ first_key_file = KEYS_DIR / "k0001.key"
194
+ first_key_file.write_bytes(key)
195
+ first_key_file.chmod(0o600)
196
+ active_file.write_text("k0001")
197
+ return True, f"Initialized Vallignus auth at {VALLIGNUS_DIR}"
198
+ else:
199
+ return True, f"Vallignus auth already initialized at {VALLIGNUS_DIR}"
200
+ except Exception as e:
201
+ return False, f"Failed to initialize: {e}"
202
+
203
+
204
+ # =============================================================================
205
+ # AGENT MANAGEMENT
206
+ # =============================================================================
207
+
208
+ def _get_agent_path(agent_id: str) -> Path:
209
+ """Get path to agent JSON file"""
210
+ return AGENTS_DIR / f"{_sanitize_id(agent_id)}.json"
211
+
212
+
213
+ def create_agent(agent_id: str, owner: str, description: str = "") -> Tuple[bool, str]:
214
+ """Create a new agent identity"""
215
+ path = _get_agent_path(agent_id)
216
+
217
+ if path.exists():
218
+ return False, f"Agent '{agent_id}' already exists"
219
+
220
+ agent_data = {
221
+ "agent_id": agent_id,
222
+ "owner": owner,
223
+ "description": description,
224
+ "created_at": time.time()
225
+ }
226
+
227
+ try:
228
+ path.write_text(json.dumps(agent_data, indent=2))
229
+ return True, f"Created agent '{agent_id}' (owner: {owner})"
230
+ except Exception as e:
231
+ return False, f"Failed to create agent: {e}"
232
+
233
+
234
+ def load_agent(agent_id: str) -> Agent:
235
+ """Load an agent from storage"""
236
+ path = _get_agent_path(agent_id)
237
+
238
+ if not path.exists():
239
+ raise AuthError(f"Agent '{agent_id}' not found")
240
+
241
+ try:
242
+ data = json.loads(path.read_text())
243
+ return Agent(
244
+ agent_id=data["agent_id"],
245
+ owner=data["owner"],
246
+ created_at=data["created_at"],
247
+ description=data.get("description", "")
248
+ )
249
+ except Exception as e:
250
+ raise AuthError(f"Failed to load agent: {e}")
251
+
252
+
253
+ def list_agents() -> list:
254
+ """List all agents"""
255
+ if not AGENTS_DIR.exists():
256
+ return []
257
+
258
+ agents = []
259
+ for f in AGENTS_DIR.glob("*.json"):
260
+ try:
261
+ data = json.loads(f.read_text())
262
+ agents.append(data)
263
+ except:
264
+ pass
265
+ return agents
266
+
267
+
268
+ # =============================================================================
269
+ # POLICY MANAGEMENT (Versioned)
270
+ # =============================================================================
271
+
272
+ def _get_policy_dir(policy_id: str) -> Path:
273
+ """Get directory for versioned policy storage"""
274
+ return POLICIES_DIR / _sanitize_id(policy_id)
275
+
276
+
277
+ def _get_policy_version_path(policy_id: str, version: int) -> Path:
278
+ """Get path to specific policy version file"""
279
+ return _get_policy_dir(policy_id) / f"v{version:04d}.json"
280
+
281
+
282
+ def _get_policy_latest_path(policy_id: str) -> Path:
283
+ """Get path to latest.json for a policy"""
284
+ return _get_policy_dir(policy_id) / "latest.json"
285
+
286
+
287
+ def _get_legacy_policy_path(policy_id: str) -> Path:
288
+ """Get legacy flat policy path (for migration)"""
289
+ return POLICIES_DIR / f"{_sanitize_id(policy_id)}.json"
290
+
291
+
292
+ def _migrate_legacy_policy(policy_id: str) -> Optional[int]:
293
+ """
294
+ Migrate a legacy flat policy file to versioned storage.
295
+ Returns the version number (1) if migrated, None if no legacy file.
296
+ """
297
+ legacy_path = _get_legacy_policy_path(policy_id)
298
+ if not legacy_path.exists():
299
+ return None
300
+
301
+ # Read legacy data
302
+ legacy_data = json.loads(legacy_path.read_text())
303
+
304
+ # Create versioned directory
305
+ policy_dir = _get_policy_dir(policy_id)
306
+ policy_dir.mkdir(mode=0o700, exist_ok=True)
307
+
308
+ # Convert to versioned format
309
+ now = _iso_now()
310
+ versioned_data = {
311
+ "policy_id": legacy_data["policy_id"],
312
+ "version": 1,
313
+ "max_spend_usd": legacy_data.get("max_spend_usd"),
314
+ "allowed_domains": legacy_data["allowed_domains"],
315
+ "description": legacy_data.get("description", ""),
316
+ "created_at": legacy_data.get("created_at", now),
317
+ "updated_at": now
318
+ }
319
+
320
+ # Handle old created_at format (float timestamp)
321
+ if isinstance(versioned_data["created_at"], (int, float)):
322
+ versioned_data["created_at"] = datetime.utcfromtimestamp(versioned_data["created_at"]).isoformat()
323
+
324
+ # Write v0001 and latest
325
+ v1_path = _get_policy_version_path(policy_id, 1)
326
+ latest_path = _get_policy_latest_path(policy_id)
327
+
328
+ policy_json = json.dumps(versioned_data, indent=2)
329
+ v1_path.write_text(policy_json)
330
+ latest_path.write_text(policy_json)
331
+
332
+ # Remove legacy file
333
+ legacy_path.unlink()
334
+
335
+ return 1
336
+
337
+
338
+ def _get_latest_version(policy_id: str) -> int:
339
+ """Get the latest version number for a policy"""
340
+ policy_dir = _get_policy_dir(policy_id)
341
+ if not policy_dir.exists():
342
+ return 0
343
+
344
+ versions = [int(f.stem[1:]) for f in policy_dir.glob("v*.json")]
345
+ return max(versions) if versions else 0
346
+
347
+
348
+ def create_policy(
349
+ policy_id: str,
350
+ max_spend_usd: Optional[float],
351
+ allowed_domains: str,
352
+ description: str = ""
353
+ ) -> Tuple[bool, str]:
354
+ """Create a new permission policy (v0001)"""
355
+ policy_dir = _get_policy_dir(policy_id)
356
+ legacy_path = _get_legacy_policy_path(policy_id)
357
+
358
+ # Check if policy already exists (versioned or legacy)
359
+ if policy_dir.exists() or legacy_path.exists():
360
+ return False, f"Policy '{policy_id}' already exists"
361
+
362
+ # Parse domains
363
+ domains = [d.strip().lower() for d in allowed_domains.split(',') if d.strip()]
364
+ if not domains:
365
+ return False, "At least one domain must be specified"
366
+
367
+ now = _iso_now()
368
+ policy_data = {
369
+ "policy_id": policy_id,
370
+ "version": 1,
371
+ "max_spend_usd": max_spend_usd,
372
+ "allowed_domains": sorted(domains),
373
+ "description": description,
374
+ "created_at": now,
375
+ "updated_at": now
376
+ }
377
+
378
+ try:
379
+ policy_dir.mkdir(mode=0o700, parents=True)
380
+
381
+ policy_json = json.dumps(policy_data, indent=2)
382
+ _get_policy_version_path(policy_id, 1).write_text(policy_json)
383
+ _get_policy_latest_path(policy_id).write_text(policy_json)
384
+
385
+ return True, f"Created policy '{policy_id}' v1 (budget: ${max_spend_usd or 'unlimited'}, domains: {len(domains)})"
386
+ except Exception as e:
387
+ return False, f"Failed to create policy: {e}"
388
+
389
+
390
+ def update_policy(
391
+ policy_id: str,
392
+ max_spend_usd: Optional[float] = None,
393
+ allowed_domains: Optional[str] = None,
394
+ description: Optional[str] = None
395
+ ) -> Tuple[bool, str]:
396
+ """Update a policy, creating a new version"""
397
+ # Check for legacy and migrate if needed
398
+ _migrate_legacy_policy(policy_id)
399
+
400
+ policy_dir = _get_policy_dir(policy_id)
401
+ if not policy_dir.exists():
402
+ return False, f"Policy '{policy_id}' not found"
403
+
404
+ # Load current latest
405
+ latest_path = _get_policy_latest_path(policy_id)
406
+ if not latest_path.exists():
407
+ return False, f"Policy '{policy_id}' has no versions"
408
+
409
+ current = json.loads(latest_path.read_text())
410
+ current_version = current["version"]
411
+ new_version = current_version + 1
412
+
413
+ # Build new version (inherit unchanged fields)
414
+ now = _iso_now()
415
+ new_data = {
416
+ "policy_id": policy_id,
417
+ "version": new_version,
418
+ "max_spend_usd": max_spend_usd if max_spend_usd is not None else current.get("max_spend_usd"),
419
+ "allowed_domains": current["allowed_domains"],
420
+ "description": description if description is not None else current.get("description", ""),
421
+ "created_at": current["created_at"],
422
+ "updated_at": now
423
+ }
424
+
425
+ # Update domains if provided
426
+ if allowed_domains is not None:
427
+ domains = [d.strip().lower() for d in allowed_domains.split(',') if d.strip()]
428
+ if not domains:
429
+ return False, "At least one domain must be specified"
430
+ new_data["allowed_domains"] = sorted(domains)
431
+
432
+ try:
433
+ policy_json = json.dumps(new_data, indent=2)
434
+ _get_policy_version_path(policy_id, new_version).write_text(policy_json)
435
+ _get_policy_latest_path(policy_id).write_text(policy_json)
436
+
437
+ return True, f"Updated policy '{policy_id}' to v{new_version}"
438
+ except Exception as e:
439
+ return False, f"Failed to update policy: {e}"
440
+
441
+
442
+ def load_policy(policy_id: str, version: Optional[int] = None) -> Policy:
443
+ """
444
+ Load a policy from storage.
445
+ If version is None, loads latest.
446
+ """
447
+ # Check for legacy and migrate if needed
448
+ migrated = _migrate_legacy_policy(policy_id)
449
+
450
+ policy_dir = _get_policy_dir(policy_id)
451
+ if not policy_dir.exists():
452
+ raise AuthError(f"Policy '{policy_id}' not found")
453
+
454
+ if version is None:
455
+ path = _get_policy_latest_path(policy_id)
456
+ else:
457
+ path = _get_policy_version_path(policy_id, version)
458
+
459
+ if not path.exists():
460
+ raise AuthError(f"Policy '{policy_id}' version {version} not found")
461
+
462
+ try:
463
+ data = json.loads(path.read_text())
464
+ return Policy(
465
+ policy_id=data["policy_id"],
466
+ version=data["version"],
467
+ max_spend_usd=data.get("max_spend_usd"),
468
+ allowed_domains=set(data["allowed_domains"]),
469
+ created_at=data["created_at"],
470
+ updated_at=data["updated_at"],
471
+ description=data.get("description", "")
472
+ )
473
+ except Exception as e:
474
+ raise AuthError(f"Failed to load policy: {e}")
475
+
476
+
477
+ def _get_policy_version_hash(policy_id: str, version: int) -> str:
478
+ """Get SHA256 hash of a specific policy version"""
479
+ path = _get_policy_version_path(policy_id, version)
480
+ if not path.exists():
481
+ raise AuthError(f"Policy '{policy_id}' version {version} not found")
482
+
483
+ data = json.loads(path.read_text())
484
+ canonical = json.dumps(data, sort_keys=True, separators=(',', ':'))
485
+ return hashlib.sha256(canonical.encode()).hexdigest()
486
+
487
+
488
+ def list_policies() -> list:
489
+ """List all policies (latest versions)"""
490
+ if not POLICIES_DIR.exists():
491
+ return []
492
+
493
+ policies = []
494
+
495
+ # Check for versioned policies (directories)
496
+ for d in POLICIES_DIR.iterdir():
497
+ if d.is_dir():
498
+ latest_path = d / "latest.json"
499
+ if latest_path.exists():
500
+ try:
501
+ data = json.loads(latest_path.read_text())
502
+ policies.append(data)
503
+ except:
504
+ pass
505
+
506
+ # Check for legacy policies (files) - but don't migrate here, just list
507
+ for f in POLICIES_DIR.glob("*.json"):
508
+ if f.is_file():
509
+ try:
510
+ data = json.loads(f.read_text())
511
+ # Add version field for display if missing
512
+ if "version" not in data:
513
+ data["version"] = 1
514
+ policies.append(data)
515
+ except:
516
+ pass
517
+
518
+ return policies
519
+
520
+
521
+ # =============================================================================
522
+ # TOKEN REVOCATION (jti)
523
+ # =============================================================================
524
+
525
+ def revoke_token(jti: str) -> Tuple[bool, str]:
526
+ """Revoke a token by its JTI"""
527
+ if not REVOKED_DIR.exists():
528
+ REVOKED_DIR.mkdir(mode=0o700, exist_ok=True)
529
+
530
+ revoked_file = REVOKED_DIR / jti
531
+ if revoked_file.exists():
532
+ return False, f"Token {jti} is already revoked"
533
+
534
+ try:
535
+ revoked_file.touch()
536
+ return True, f"Revoked token {jti}"
537
+ except Exception as e:
538
+ return False, f"Failed to revoke token: {e}"
539
+
540
+
541
+ def is_token_revoked(jti: str) -> bool:
542
+ """Check if a token is revoked"""
543
+ return (REVOKED_DIR / jti).exists()
544
+
545
+
546
+ # =============================================================================
547
+ # TOKEN MINTING AND VERIFICATION
548
+ # =============================================================================
549
+
550
+ def issue_token(agent_id: str, policy_id: str, ttl_seconds: int = 3600) -> str:
551
+ """
552
+ Issue a signed token for an agent with a specific policy.
553
+ Returns the token string.
554
+ """
555
+ # Verify agent exists
556
+ agent = load_agent(agent_id)
557
+
558
+ # Load latest policy (and migrate if needed)
559
+ policy = load_policy(policy_id)
560
+
561
+ # Get active key
562
+ kid, secret_key = _get_active_key()
563
+
564
+ now = time.time()
565
+ jti = str(uuid.uuid4())
566
+
567
+ # Header (includes kid)
568
+ header = {"alg": "HS256", "typ": "VALLIGNUS", "kid": kid}
569
+ header_b64 = _b64url_encode(json.dumps(header, separators=(',', ':')).encode())
570
+
571
+ # Payload (includes policy_version and jti)
572
+ payload = {
573
+ "agent_id": agent.agent_id,
574
+ "owner": agent.owner,
575
+ "policy_id": policy_id,
576
+ "policy_version": policy.version,
577
+ "issued_at": now,
578
+ "expires_at": now + ttl_seconds,
579
+ "permissions_hash": _get_policy_version_hash(policy_id, policy.version),
580
+ "jti": jti
581
+ }
582
+ payload_b64 = _b64url_encode(json.dumps(payload, separators=(',', ':')).encode())
583
+
584
+ # Signature
585
+ message = f"{header_b64}.{payload_b64}".encode()
586
+ signature = hmac.new(secret_key, message, hashlib.sha256).digest()
587
+ signature_b64 = _b64url_encode(signature)
588
+
589
+ return f"{header_b64}.{payload_b64}.{signature_b64}"
590
+
591
+
592
+ def decode_token_payload(token: str) -> Dict[str, Any]:
593
+ """Decode token payload without verification (for inspection)"""
594
+ try:
595
+ parts = token.split('.')
596
+ if len(parts) != 3:
597
+ raise AuthError("Invalid token format")
598
+
599
+ header_b64, payload_b64, _ = parts
600
+
601
+ header = json.loads(_b64url_decode(header_b64).decode())
602
+ payload = json.loads(_b64url_decode(payload_b64).decode())
603
+
604
+ return {"header": header, "payload": payload}
605
+ except Exception as e:
606
+ raise AuthError(f"Failed to decode token: {e}")
607
+
608
+
609
+ def verify_token(token: str) -> TokenPayload:
610
+ """
611
+ Verify a token and return its payload.
612
+ Raises AuthError if invalid.
613
+ """
614
+ try:
615
+ parts = token.split('.')
616
+ if len(parts) != 3:
617
+ raise AuthError("Invalid token format")
618
+
619
+ header_b64, payload_b64, signature_b64 = parts
620
+
621
+ # Decode header to get kid
622
+ header = json.loads(_b64url_decode(header_b64).decode())
623
+ kid = header.get("kid")
624
+
625
+ # Get the appropriate key
626
+ if kid:
627
+ try:
628
+ secret_key = _get_key_by_kid(kid)
629
+ except AuthError:
630
+ raise AuthError(f"Unknown signing key: {kid}")
631
+ else:
632
+ # Legacy token without kid - try active key or legacy key
633
+ try:
634
+ _, secret_key = _get_active_key()
635
+ except AuthError:
636
+ if LEGACY_SECRET_KEY_FILE.exists():
637
+ secret_key = LEGACY_SECRET_KEY_FILE.read_bytes()
638
+ else:
639
+ raise
640
+
641
+ # Verify signature
642
+ message = f"{header_b64}.{payload_b64}".encode()
643
+ expected_sig = hmac.new(secret_key, message, hashlib.sha256).digest()
644
+ actual_sig = _b64url_decode(signature_b64)
645
+
646
+ if not hmac.compare_digest(expected_sig, actual_sig):
647
+ raise AuthError("Invalid token signature")
648
+
649
+ # Decode payload
650
+ payload = json.loads(_b64url_decode(payload_b64).decode())
651
+
652
+ # Check expiry
653
+ if time.time() > payload["expires_at"]:
654
+ raise AuthError("Token expired")
655
+
656
+ # Check revocation
657
+ jti = payload.get("jti")
658
+ if jti and is_token_revoked(jti):
659
+ raise AuthError("TOKEN_REVOKED")
660
+
661
+ # Handle legacy tokens without policy_version
662
+ policy_version = payload.get("policy_version", 1)
663
+
664
+ return TokenPayload(
665
+ agent_id=payload["agent_id"],
666
+ owner=payload["owner"],
667
+ policy_id=payload["policy_id"],
668
+ policy_version=policy_version,
669
+ issued_at=payload["issued_at"],
670
+ expires_at=payload["expires_at"],
671
+ permissions_hash=payload["permissions_hash"],
672
+ jti=jti or ""
673
+ )
674
+
675
+ except AuthError:
676
+ raise
677
+ except Exception as e:
678
+ raise AuthError(f"Token verification failed: {e}")
679
+
680
+
681
+ def verify_token_with_policy(token: str) -> Tuple[TokenPayload, Policy]:
682
+ """
683
+ Verify token and load + validate the associated policy version.
684
+ Returns (payload, policy) or raises AuthError.
685
+ """
686
+ payload = verify_token(token)
687
+
688
+ # Load the specific policy version the token was issued for
689
+ policy = load_policy(payload.policy_id, payload.policy_version)
690
+
691
+ # Verify permissions_hash matches that version
692
+ current_hash = _get_policy_version_hash(payload.policy_id, payload.policy_version)
693
+ if current_hash != payload.permissions_hash:
694
+ raise AuthError(
695
+ f"Policy '{payload.policy_id}' v{payload.policy_version} integrity check failed. "
696
+ "The policy file may have been tampered with."
697
+ )
698
+
699
+ return payload, policy