glacis 0.1.3__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- glacis/__init__.py +62 -1
- glacis/__main__.py +1 -80
- glacis/client.py +60 -31
- glacis/config.py +141 -0
- glacis/controls/__init__.py +232 -0
- glacis/controls/base.py +104 -0
- glacis/controls/jailbreak.py +224 -0
- glacis/controls/pii.py +855 -0
- glacis/crypto.py +70 -1
- glacis/integrations/__init__.py +53 -3
- glacis/integrations/anthropic.py +207 -142
- glacis/integrations/base.py +476 -0
- glacis/integrations/openai.py +156 -121
- glacis/models.py +277 -24
- glacis/storage.py +324 -8
- glacis/verify.py +154 -0
- glacis-0.2.0.dist-info/METADATA +275 -0
- glacis-0.2.0.dist-info/RECORD +21 -0
- glacis/wasm/s3p_core_wasi.wasm +0 -0
- glacis/wasm_runtime.py +0 -533
- glacis-0.1.3.dist-info/METADATA +0 -324
- glacis-0.1.3.dist-info/RECORD +0 -16
- {glacis-0.1.3.dist-info → glacis-0.2.0.dist-info}/WHEEL +0 -0
- {glacis-0.1.3.dist-info → glacis-0.2.0.dist-info}/licenses/LICENSE +0 -0
glacis/storage.py
CHANGED
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
"""
|
|
2
|
-
SQLite storage for
|
|
2
|
+
SQLite storage for attestation receipts and evidence.
|
|
3
3
|
|
|
4
|
-
Stores local attestation receipts in ~/.glacis/
|
|
5
|
-
and later verification.
|
|
4
|
+
Stores local attestation receipts and full evidence in ~/.glacis/glacis.db
|
|
5
|
+
for persistence, audit trails, and later verification.
|
|
6
|
+
|
|
7
|
+
Evidence (input, output, control_plane_results) is stored locally for zero-egress
|
|
8
|
+
compliance - only hashes are sent to the GLACIS server.
|
|
6
9
|
"""
|
|
7
10
|
|
|
8
11
|
from __future__ import annotations
|
|
@@ -14,11 +17,11 @@ from pathlib import Path
|
|
|
14
17
|
from typing import TYPE_CHECKING, Any, Optional
|
|
15
18
|
|
|
16
19
|
if TYPE_CHECKING:
|
|
17
|
-
from glacis.models import OfflineAttestReceipt
|
|
20
|
+
from glacis.models import ControlPlaneAttestation, OfflineAttestReceipt
|
|
18
21
|
|
|
19
|
-
DEFAULT_DB_PATH = Path.home() / ".glacis" / "
|
|
22
|
+
DEFAULT_DB_PATH = Path.home() / ".glacis" / "glacis.db"
|
|
20
23
|
|
|
21
|
-
SCHEMA_VERSION =
|
|
24
|
+
SCHEMA_VERSION = 2
|
|
22
25
|
|
|
23
26
|
SCHEMA = """
|
|
24
27
|
CREATE TABLE IF NOT EXISTS offline_receipts (
|
|
@@ -40,11 +43,57 @@ CREATE INDEX IF NOT EXISTS idx_timestamp ON offline_receipts(timestamp);
|
|
|
40
43
|
CREATE INDEX IF NOT EXISTS idx_payload_hash ON offline_receipts(payload_hash);
|
|
41
44
|
CREATE INDEX IF NOT EXISTS idx_created_at ON offline_receipts(created_at);
|
|
42
45
|
|
|
46
|
+
-- Evidence table for full audit trail (both online and offline modes)
|
|
47
|
+
CREATE TABLE IF NOT EXISTS evidence (
|
|
48
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
49
|
+
attestation_id TEXT NOT NULL,
|
|
50
|
+
attestation_hash TEXT NOT NULL,
|
|
51
|
+
mode TEXT NOT NULL, -- 'online' or 'offline'
|
|
52
|
+
service_id TEXT NOT NULL,
|
|
53
|
+
operation_type TEXT NOT NULL,
|
|
54
|
+
timestamp TEXT NOT NULL,
|
|
55
|
+
created_at TEXT NOT NULL,
|
|
56
|
+
input_json TEXT NOT NULL,
|
|
57
|
+
output_json TEXT NOT NULL,
|
|
58
|
+
control_plane_json TEXT,
|
|
59
|
+
metadata_json TEXT,
|
|
60
|
+
UNIQUE(attestation_id)
|
|
61
|
+
);
|
|
62
|
+
|
|
63
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_attestation_id ON evidence(attestation_id);
|
|
64
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_attestation_hash ON evidence(attestation_hash);
|
|
65
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_service_id ON evidence(service_id);
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_timestamp ON evidence(timestamp);
|
|
67
|
+
|
|
43
68
|
CREATE TABLE IF NOT EXISTS schema_version (
|
|
44
69
|
version INTEGER PRIMARY KEY
|
|
45
70
|
);
|
|
46
71
|
"""
|
|
47
72
|
|
|
73
|
+
# Migration from v1 to v2: Add evidence table
|
|
74
|
+
MIGRATION_V1_TO_V2 = """
|
|
75
|
+
CREATE TABLE IF NOT EXISTS evidence (
|
|
76
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
77
|
+
attestation_id TEXT NOT NULL,
|
|
78
|
+
attestation_hash TEXT NOT NULL,
|
|
79
|
+
mode TEXT NOT NULL,
|
|
80
|
+
service_id TEXT NOT NULL,
|
|
81
|
+
operation_type TEXT NOT NULL,
|
|
82
|
+
timestamp TEXT NOT NULL,
|
|
83
|
+
created_at TEXT NOT NULL,
|
|
84
|
+
input_json TEXT NOT NULL,
|
|
85
|
+
output_json TEXT NOT NULL,
|
|
86
|
+
control_plane_json TEXT,
|
|
87
|
+
metadata_json TEXT,
|
|
88
|
+
UNIQUE(attestation_id)
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_attestation_id ON evidence(attestation_id);
|
|
92
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_attestation_hash ON evidence(attestation_hash);
|
|
93
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_service_id ON evidence(service_id);
|
|
94
|
+
CREATE INDEX IF NOT EXISTS idx_evidence_timestamp ON evidence(timestamp);
|
|
95
|
+
"""
|
|
96
|
+
|
|
48
97
|
|
|
49
98
|
class ReceiptStorage:
|
|
50
99
|
"""
|
|
@@ -67,7 +116,10 @@ class ReceiptStorage:
|
|
|
67
116
|
def _get_connection(self) -> sqlite3.Connection:
|
|
68
117
|
"""Get or create database connection."""
|
|
69
118
|
if self._conn is None:
|
|
70
|
-
|
|
119
|
+
# check_same_thread=False allows the connection to be used across threads
|
|
120
|
+
# This is safe because we're only doing simple CRUD operations
|
|
121
|
+
# and SQLite handles locking internally
|
|
122
|
+
self._conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
|
71
123
|
self._conn.row_factory = sqlite3.Row
|
|
72
124
|
self._init_schema()
|
|
73
125
|
return self._conn
|
|
@@ -102,12 +154,16 @@ class ReceiptStorage:
|
|
|
102
154
|
|
|
103
155
|
def _run_migrations(self, from_version: int) -> None:
|
|
104
156
|
"""Run schema migrations."""
|
|
105
|
-
# No migrations needed yet since this is v1
|
|
106
157
|
conn = self._conn
|
|
107
158
|
if conn is None:
|
|
108
159
|
return
|
|
109
160
|
|
|
110
161
|
cursor = conn.cursor()
|
|
162
|
+
|
|
163
|
+
# Migration from v1 to v2: Add evidence table
|
|
164
|
+
if from_version < 2:
|
|
165
|
+
cursor.executescript(MIGRATION_V1_TO_V2)
|
|
166
|
+
|
|
111
167
|
cursor.execute(
|
|
112
168
|
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
113
169
|
(SCHEMA_VERSION,),
|
|
@@ -334,3 +390,263 @@ class ReceiptStorage:
|
|
|
334
390
|
) -> None:
|
|
335
391
|
"""Context manager exit."""
|
|
336
392
|
self.close()
|
|
393
|
+
|
|
394
|
+
# =========================================================================
|
|
395
|
+
# Evidence Storage (for full audit trail)
|
|
396
|
+
# =========================================================================
|
|
397
|
+
|
|
398
|
+
def store_evidence(
|
|
399
|
+
self,
|
|
400
|
+
attestation_id: str,
|
|
401
|
+
attestation_hash: str,
|
|
402
|
+
mode: str,
|
|
403
|
+
service_id: str,
|
|
404
|
+
operation_type: str,
|
|
405
|
+
timestamp: str,
|
|
406
|
+
input_data: Any,
|
|
407
|
+
output_data: Any,
|
|
408
|
+
control_plane_results: Optional["ControlPlaneAttestation"] = None,
|
|
409
|
+
metadata: Optional[dict[str, Any]] = None,
|
|
410
|
+
) -> None:
|
|
411
|
+
"""
|
|
412
|
+
Store full evidence for an attestation.
|
|
413
|
+
|
|
414
|
+
This stores the complete input, output, and control plane results locally
|
|
415
|
+
for audit trails and dispute resolution. Only the hash was sent to GLACIS.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
attestation_id: The attestation ID (att_xxx or oatt_xxx)
|
|
419
|
+
attestation_hash: The hash that was attested (payload_hash)
|
|
420
|
+
mode: 'online' or 'offline'
|
|
421
|
+
service_id: Service identifier
|
|
422
|
+
operation_type: Type of operation
|
|
423
|
+
timestamp: ISO 8601 timestamp
|
|
424
|
+
input_data: Full input data (will be JSON serialized)
|
|
425
|
+
output_data: Full output data (will be JSON serialized)
|
|
426
|
+
control_plane_results: Optional control plane attestation
|
|
427
|
+
metadata: Optional metadata dict
|
|
428
|
+
"""
|
|
429
|
+
conn = self._get_connection()
|
|
430
|
+
cursor = conn.cursor()
|
|
431
|
+
|
|
432
|
+
control_plane_json = None
|
|
433
|
+
if control_plane_results:
|
|
434
|
+
control_plane_json = json.dumps(
|
|
435
|
+
control_plane_results.model_dump(by_alias=True)
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
cursor.execute(
|
|
439
|
+
"""
|
|
440
|
+
INSERT OR REPLACE INTO evidence
|
|
441
|
+
(attestation_id, attestation_hash, mode, service_id, operation_type,
|
|
442
|
+
timestamp, created_at, input_json, output_json, control_plane_json, metadata_json)
|
|
443
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
444
|
+
""",
|
|
445
|
+
(
|
|
446
|
+
attestation_id,
|
|
447
|
+
attestation_hash,
|
|
448
|
+
mode,
|
|
449
|
+
service_id,
|
|
450
|
+
operation_type,
|
|
451
|
+
timestamp,
|
|
452
|
+
datetime.utcnow().isoformat() + "Z",
|
|
453
|
+
json.dumps(input_data),
|
|
454
|
+
json.dumps(output_data),
|
|
455
|
+
control_plane_json,
|
|
456
|
+
json.dumps(metadata) if metadata else None,
|
|
457
|
+
),
|
|
458
|
+
)
|
|
459
|
+
conn.commit()
|
|
460
|
+
|
|
461
|
+
def get_evidence(self, attestation_id: str) -> Optional[dict[str, Any]]:
|
|
462
|
+
"""
|
|
463
|
+
Retrieve full evidence by attestation ID.
|
|
464
|
+
|
|
465
|
+
Args:
|
|
466
|
+
attestation_id: The attestation ID (att_xxx or oatt_xxx)
|
|
467
|
+
|
|
468
|
+
Returns:
|
|
469
|
+
Dict with input, output, control_plane_results, and metadata,
|
|
470
|
+
or None if not found
|
|
471
|
+
"""
|
|
472
|
+
conn = self._get_connection()
|
|
473
|
+
cursor = conn.cursor()
|
|
474
|
+
cursor.execute(
|
|
475
|
+
"SELECT * FROM evidence WHERE attestation_id = ?",
|
|
476
|
+
(attestation_id,),
|
|
477
|
+
)
|
|
478
|
+
row = cursor.fetchone()
|
|
479
|
+
if row is None:
|
|
480
|
+
return None
|
|
481
|
+
|
|
482
|
+
result: dict[str, Any] = {
|
|
483
|
+
"attestation_id": row["attestation_id"],
|
|
484
|
+
"attestation_hash": row["attestation_hash"],
|
|
485
|
+
"mode": row["mode"],
|
|
486
|
+
"service_id": row["service_id"],
|
|
487
|
+
"operation_type": row["operation_type"],
|
|
488
|
+
"timestamp": row["timestamp"],
|
|
489
|
+
"created_at": row["created_at"],
|
|
490
|
+
"input": json.loads(row["input_json"]),
|
|
491
|
+
"output": json.loads(row["output_json"]),
|
|
492
|
+
"control_plane_results": (
|
|
493
|
+
json.loads(row["control_plane_json"])
|
|
494
|
+
if row["control_plane_json"]
|
|
495
|
+
else None
|
|
496
|
+
),
|
|
497
|
+
"metadata": (
|
|
498
|
+
json.loads(row["metadata_json"]) if row["metadata_json"] else None
|
|
499
|
+
),
|
|
500
|
+
}
|
|
501
|
+
return result
|
|
502
|
+
|
|
503
|
+
def get_evidence_by_hash(self, attestation_hash: str) -> Optional[dict[str, Any]]:
|
|
504
|
+
"""
|
|
505
|
+
Retrieve full evidence by attestation hash.
|
|
506
|
+
|
|
507
|
+
Useful for verifying that stored evidence matches the attested hash.
|
|
508
|
+
|
|
509
|
+
Args:
|
|
510
|
+
attestation_hash: The payload hash that was attested
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
Dict with input, output, control_plane_results, and metadata,
|
|
514
|
+
or None if not found
|
|
515
|
+
"""
|
|
516
|
+
conn = self._get_connection()
|
|
517
|
+
cursor = conn.cursor()
|
|
518
|
+
cursor.execute(
|
|
519
|
+
"SELECT * FROM evidence WHERE attestation_hash = ?",
|
|
520
|
+
(attestation_hash,),
|
|
521
|
+
)
|
|
522
|
+
row = cursor.fetchone()
|
|
523
|
+
if row is None:
|
|
524
|
+
return None
|
|
525
|
+
|
|
526
|
+
result: dict[str, Any] = {
|
|
527
|
+
"attestation_id": row["attestation_id"],
|
|
528
|
+
"attestation_hash": row["attestation_hash"],
|
|
529
|
+
"mode": row["mode"],
|
|
530
|
+
"service_id": row["service_id"],
|
|
531
|
+
"operation_type": row["operation_type"],
|
|
532
|
+
"timestamp": row["timestamp"],
|
|
533
|
+
"created_at": row["created_at"],
|
|
534
|
+
"input": json.loads(row["input_json"]),
|
|
535
|
+
"output": json.loads(row["output_json"]),
|
|
536
|
+
"control_plane_results": (
|
|
537
|
+
json.loads(row["control_plane_json"])
|
|
538
|
+
if row["control_plane_json"]
|
|
539
|
+
else None
|
|
540
|
+
),
|
|
541
|
+
"metadata": (
|
|
542
|
+
json.loads(row["metadata_json"]) if row["metadata_json"] else None
|
|
543
|
+
),
|
|
544
|
+
}
|
|
545
|
+
return result
|
|
546
|
+
|
|
547
|
+
def query_evidence(
|
|
548
|
+
self,
|
|
549
|
+
service_id: Optional[str] = None,
|
|
550
|
+
mode: Optional[str] = None,
|
|
551
|
+
start: Optional[str] = None,
|
|
552
|
+
end: Optional[str] = None,
|
|
553
|
+
limit: int = 50,
|
|
554
|
+
) -> list[dict[str, Any]]:
|
|
555
|
+
"""
|
|
556
|
+
Query evidence with optional filters.
|
|
557
|
+
|
|
558
|
+
Args:
|
|
559
|
+
service_id: Filter by service ID
|
|
560
|
+
mode: Filter by mode ('online' or 'offline')
|
|
561
|
+
start: Filter by timestamp >= start (ISO 8601)
|
|
562
|
+
end: Filter by timestamp <= end (ISO 8601)
|
|
563
|
+
limit: Maximum number of results (default 50)
|
|
564
|
+
|
|
565
|
+
Returns:
|
|
566
|
+
List of evidence records
|
|
567
|
+
"""
|
|
568
|
+
conn = self._get_connection()
|
|
569
|
+
cursor = conn.cursor()
|
|
570
|
+
|
|
571
|
+
query = "SELECT * FROM evidence WHERE 1=1"
|
|
572
|
+
params: list[Any] = []
|
|
573
|
+
|
|
574
|
+
if service_id:
|
|
575
|
+
query += " AND service_id = ?"
|
|
576
|
+
params.append(service_id)
|
|
577
|
+
if mode:
|
|
578
|
+
query += " AND mode = ?"
|
|
579
|
+
params.append(mode)
|
|
580
|
+
if start:
|
|
581
|
+
query += " AND timestamp >= ?"
|
|
582
|
+
params.append(start)
|
|
583
|
+
if end:
|
|
584
|
+
query += " AND timestamp <= ?"
|
|
585
|
+
params.append(end)
|
|
586
|
+
|
|
587
|
+
query += " ORDER BY created_at DESC LIMIT ?"
|
|
588
|
+
params.append(limit)
|
|
589
|
+
|
|
590
|
+
cursor.execute(query, params)
|
|
591
|
+
rows = cursor.fetchall()
|
|
592
|
+
|
|
593
|
+
results = []
|
|
594
|
+
for row in rows:
|
|
595
|
+
results.append({
|
|
596
|
+
"attestation_id": row["attestation_id"],
|
|
597
|
+
"attestation_hash": row["attestation_hash"],
|
|
598
|
+
"mode": row["mode"],
|
|
599
|
+
"service_id": row["service_id"],
|
|
600
|
+
"operation_type": row["operation_type"],
|
|
601
|
+
"timestamp": row["timestamp"],
|
|
602
|
+
"created_at": row["created_at"],
|
|
603
|
+
"input": json.loads(row["input_json"]),
|
|
604
|
+
"output": json.loads(row["output_json"]),
|
|
605
|
+
"control_plane_results": (
|
|
606
|
+
json.loads(row["control_plane_json"])
|
|
607
|
+
if row["control_plane_json"]
|
|
608
|
+
else None
|
|
609
|
+
),
|
|
610
|
+
"metadata": (
|
|
611
|
+
json.loads(row["metadata_json"]) if row["metadata_json"] else None
|
|
612
|
+
),
|
|
613
|
+
})
|
|
614
|
+
return results
|
|
615
|
+
|
|
616
|
+
def count_evidence(
|
|
617
|
+
self, service_id: Optional[str] = None, mode: Optional[str] = None
|
|
618
|
+
) -> int:
|
|
619
|
+
"""
|
|
620
|
+
Count evidence records.
|
|
621
|
+
|
|
622
|
+
Args:
|
|
623
|
+
service_id: Optional service ID filter
|
|
624
|
+
mode: Optional mode filter ('online' or 'offline')
|
|
625
|
+
|
|
626
|
+
Returns:
|
|
627
|
+
Number of matching evidence records
|
|
628
|
+
"""
|
|
629
|
+
conn = self._get_connection()
|
|
630
|
+
cursor = conn.cursor()
|
|
631
|
+
|
|
632
|
+
query = "SELECT COUNT(*) FROM evidence WHERE 1=1"
|
|
633
|
+
params: list[Any] = []
|
|
634
|
+
|
|
635
|
+
if service_id:
|
|
636
|
+
query += " AND service_id = ?"
|
|
637
|
+
params.append(service_id)
|
|
638
|
+
if mode:
|
|
639
|
+
query += " AND mode = ?"
|
|
640
|
+
params.append(mode)
|
|
641
|
+
|
|
642
|
+
cursor.execute(query, params)
|
|
643
|
+
row = cursor.fetchone()
|
|
644
|
+
return row[0] if row else 0
|
|
645
|
+
|
|
646
|
+
# NOTE: Evidence is intentionally append-only (no delete method).
|
|
647
|
+
# For a compliant audit trail, evidence must be immutable.
|
|
648
|
+
# Evidence deletion should only happen through:
|
|
649
|
+
# 1. Data retention policies (automated, time-based)
|
|
650
|
+
# 2. Explicit admin/compliance operations
|
|
651
|
+
# 3. Legal requirements (GDPR right to be forgotten)
|
|
652
|
+
# These operations should be logged and audited separately.
|
glacis/verify.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Glacis CLI - Receipt Verification
|
|
3
|
+
|
|
4
|
+
Usage:
|
|
5
|
+
python -m glacis verify <receipt.json>
|
|
6
|
+
python -m glacis verify <receipt.json> --base-url https://api.glacis.io
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import argparse
|
|
10
|
+
import json
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Union
|
|
14
|
+
|
|
15
|
+
import httpx
|
|
16
|
+
|
|
17
|
+
from glacis.models import (
|
|
18
|
+
AttestReceipt,
|
|
19
|
+
OfflineAttestReceipt,
|
|
20
|
+
OfflineVerifyResult,
|
|
21
|
+
VerifyResult,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
DEFAULT_BASE_URL = "https://api.glacis.io"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def verify_online(attestation_hash: str, base_url: str) -> VerifyResult:
|
|
28
|
+
"""Verify an online attestation via direct HTTP call."""
|
|
29
|
+
url = f"{base_url}/v1/verify/{attestation_hash}"
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
response = httpx.get(url, timeout=30.0)
|
|
33
|
+
response.raise_for_status()
|
|
34
|
+
return VerifyResult.model_validate(response.json())
|
|
35
|
+
except httpx.HTTPStatusError as e:
|
|
36
|
+
return VerifyResult(
|
|
37
|
+
valid=False,
|
|
38
|
+
error=f"HTTP {e.response.status_code}: {e.response.text}",
|
|
39
|
+
)
|
|
40
|
+
except httpx.RequestError as e:
|
|
41
|
+
return VerifyResult(
|
|
42
|
+
valid=False,
|
|
43
|
+
error=f"Request failed: {e}",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def verify_offline(receipt: OfflineAttestReceipt) -> OfflineVerifyResult:
|
|
48
|
+
"""
|
|
49
|
+
Verify an offline attestation.
|
|
50
|
+
|
|
51
|
+
Note: Full cryptographic signature verification of offline receipts requires
|
|
52
|
+
the original signing seed or the complete signed payload (which includes
|
|
53
|
+
a timestamp). Without these, we validate the receipt structure and format.
|
|
54
|
+
|
|
55
|
+
For full cryptographic verification, use the Glacis client with the
|
|
56
|
+
original signing_seed that created the receipt.
|
|
57
|
+
"""
|
|
58
|
+
try:
|
|
59
|
+
# Validate receipt structure and format
|
|
60
|
+
valid = (
|
|
61
|
+
receipt.attestation_id.startswith("oatt_")
|
|
62
|
+
and len(receipt.payload_hash) == 64
|
|
63
|
+
and len(receipt.public_key) == 64
|
|
64
|
+
and len(receipt.signature) > 0
|
|
65
|
+
and receipt.witness_status == "UNVERIFIED"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
return OfflineVerifyResult(
|
|
69
|
+
valid=valid,
|
|
70
|
+
witness_status="UNVERIFIED",
|
|
71
|
+
signature_valid=valid, # Structure valid (not full crypto verification)
|
|
72
|
+
attestation=receipt,
|
|
73
|
+
)
|
|
74
|
+
except Exception as e:
|
|
75
|
+
return OfflineVerifyResult(
|
|
76
|
+
valid=False,
|
|
77
|
+
witness_status="UNVERIFIED",
|
|
78
|
+
signature_valid=False,
|
|
79
|
+
attestation=receipt,
|
|
80
|
+
error=str(e),
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def verify_command(args: argparse.Namespace) -> None:
|
|
85
|
+
"""Verify a receipt file."""
|
|
86
|
+
receipt_path = Path(args.receipt)
|
|
87
|
+
|
|
88
|
+
if not receipt_path.exists():
|
|
89
|
+
print(f"Error: File not found: {receipt_path}", file=sys.stderr)
|
|
90
|
+
sys.exit(1)
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
with open(receipt_path) as f:
|
|
94
|
+
data: dict[str, Any] = json.load(f)
|
|
95
|
+
except json.JSONDecodeError as e:
|
|
96
|
+
print(f"Error: Invalid JSON: {e}", file=sys.stderr)
|
|
97
|
+
sys.exit(1)
|
|
98
|
+
|
|
99
|
+
# Determine receipt type and verify
|
|
100
|
+
receipt: Union[AttestReceipt, OfflineAttestReceipt]
|
|
101
|
+
result: Union[VerifyResult, OfflineVerifyResult]
|
|
102
|
+
|
|
103
|
+
# Check for offline receipt - supports both camelCase and snake_case
|
|
104
|
+
att_id = data.get("attestationId") or data.get("attestation_id") or ""
|
|
105
|
+
if att_id.startswith("oatt_"):
|
|
106
|
+
receipt = OfflineAttestReceipt(**data)
|
|
107
|
+
result = verify_offline(receipt)
|
|
108
|
+
else:
|
|
109
|
+
receipt = AttestReceipt(**data)
|
|
110
|
+
result = verify_online(receipt.attestation_hash, args.base_url)
|
|
111
|
+
|
|
112
|
+
# Output
|
|
113
|
+
print(f"Receipt: {receipt.attestation_id}")
|
|
114
|
+
print(f"Type: {'Offline' if isinstance(receipt, OfflineAttestReceipt) else 'Online'}")
|
|
115
|
+
print()
|
|
116
|
+
|
|
117
|
+
if result.valid:
|
|
118
|
+
print("Status: VALID")
|
|
119
|
+
if isinstance(result, OfflineVerifyResult):
|
|
120
|
+
sig_valid = result.signature_valid
|
|
121
|
+
else:
|
|
122
|
+
sig_valid = result.verification.signature_valid if result.verification else False
|
|
123
|
+
print(f" Signature: {'PASS' if sig_valid else 'FAIL'}")
|
|
124
|
+
if isinstance(result, VerifyResult) and result.verification:
|
|
125
|
+
print(f" Merkle proof: {'PASS' if result.verification.proof_valid else 'FAIL'}")
|
|
126
|
+
else:
|
|
127
|
+
print("Status: INVALID")
|
|
128
|
+
if result.error:
|
|
129
|
+
print(f" Error: {result.error}")
|
|
130
|
+
sys.exit(1)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def main() -> None:
|
|
134
|
+
parser = argparse.ArgumentParser(
|
|
135
|
+
prog="glacis", description="Glacis CLI - Cryptographic attestation for AI systems"
|
|
136
|
+
)
|
|
137
|
+
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
138
|
+
|
|
139
|
+
# verify command
|
|
140
|
+
verify_parser = subparsers.add_parser("verify", help="Verify a receipt")
|
|
141
|
+
verify_parser.add_argument("receipt", help="Path to receipt JSON file")
|
|
142
|
+
verify_parser.add_argument(
|
|
143
|
+
"--base-url",
|
|
144
|
+
default=DEFAULT_BASE_URL,
|
|
145
|
+
help=f"API base URL (default: {DEFAULT_BASE_URL})",
|
|
146
|
+
)
|
|
147
|
+
verify_parser.set_defaults(func=verify_command)
|
|
148
|
+
|
|
149
|
+
args = parser.parse_args()
|
|
150
|
+
args.func(args)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
if __name__ == "__main__":
|
|
154
|
+
main()
|