proxilion 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- proxilion/__init__.py +136 -0
- proxilion/audit/__init__.py +133 -0
- proxilion/audit/base_exporters.py +527 -0
- proxilion/audit/compliance/__init__.py +130 -0
- proxilion/audit/compliance/base.py +457 -0
- proxilion/audit/compliance/eu_ai_act.py +603 -0
- proxilion/audit/compliance/iso27001.py +544 -0
- proxilion/audit/compliance/soc2.py +491 -0
- proxilion/audit/events.py +493 -0
- proxilion/audit/explainability.py +1173 -0
- proxilion/audit/exporters/__init__.py +58 -0
- proxilion/audit/exporters/aws_s3.py +636 -0
- proxilion/audit/exporters/azure_storage.py +608 -0
- proxilion/audit/exporters/cloud_base.py +468 -0
- proxilion/audit/exporters/gcp_storage.py +570 -0
- proxilion/audit/exporters/multi_exporter.py +498 -0
- proxilion/audit/hash_chain.py +652 -0
- proxilion/audit/logger.py +543 -0
- proxilion/caching/__init__.py +49 -0
- proxilion/caching/tool_cache.py +633 -0
- proxilion/context/__init__.py +73 -0
- proxilion/context/context_window.py +556 -0
- proxilion/context/message_history.py +505 -0
- proxilion/context/session.py +735 -0
- proxilion/contrib/__init__.py +51 -0
- proxilion/contrib/anthropic.py +609 -0
- proxilion/contrib/google.py +1012 -0
- proxilion/contrib/langchain.py +641 -0
- proxilion/contrib/mcp.py +893 -0
- proxilion/contrib/openai.py +646 -0
- proxilion/core.py +3058 -0
- proxilion/decorators.py +966 -0
- proxilion/engines/__init__.py +287 -0
- proxilion/engines/base.py +266 -0
- proxilion/engines/casbin_engine.py +412 -0
- proxilion/engines/opa_engine.py +493 -0
- proxilion/engines/simple.py +437 -0
- proxilion/exceptions.py +887 -0
- proxilion/guards/__init__.py +54 -0
- proxilion/guards/input_guard.py +522 -0
- proxilion/guards/output_guard.py +634 -0
- proxilion/observability/__init__.py +198 -0
- proxilion/observability/cost_tracker.py +866 -0
- proxilion/observability/hooks.py +683 -0
- proxilion/observability/metrics.py +798 -0
- proxilion/observability/session_cost_tracker.py +1063 -0
- proxilion/policies/__init__.py +67 -0
- proxilion/policies/base.py +304 -0
- proxilion/policies/builtin.py +486 -0
- proxilion/policies/registry.py +376 -0
- proxilion/providers/__init__.py +201 -0
- proxilion/providers/adapter.py +468 -0
- proxilion/providers/anthropic_adapter.py +330 -0
- proxilion/providers/gemini_adapter.py +391 -0
- proxilion/providers/openai_adapter.py +294 -0
- proxilion/py.typed +0 -0
- proxilion/resilience/__init__.py +81 -0
- proxilion/resilience/degradation.py +615 -0
- proxilion/resilience/fallback.py +555 -0
- proxilion/resilience/retry.py +554 -0
- proxilion/scheduling/__init__.py +57 -0
- proxilion/scheduling/priority_queue.py +419 -0
- proxilion/scheduling/scheduler.py +459 -0
- proxilion/security/__init__.py +244 -0
- proxilion/security/agent_trust.py +968 -0
- proxilion/security/behavioral_drift.py +794 -0
- proxilion/security/cascade_protection.py +869 -0
- proxilion/security/circuit_breaker.py +428 -0
- proxilion/security/cost_limiter.py +690 -0
- proxilion/security/idor_protection.py +460 -0
- proxilion/security/intent_capsule.py +849 -0
- proxilion/security/intent_validator.py +495 -0
- proxilion/security/memory_integrity.py +767 -0
- proxilion/security/rate_limiter.py +509 -0
- proxilion/security/scope_enforcer.py +680 -0
- proxilion/security/sequence_validator.py +636 -0
- proxilion/security/trust_boundaries.py +784 -0
- proxilion/streaming/__init__.py +70 -0
- proxilion/streaming/detector.py +761 -0
- proxilion/streaming/transformer.py +674 -0
- proxilion/timeouts/__init__.py +55 -0
- proxilion/timeouts/decorators.py +477 -0
- proxilion/timeouts/manager.py +545 -0
- proxilion/tools/__init__.py +69 -0
- proxilion/tools/decorators.py +493 -0
- proxilion/tools/registry.py +732 -0
- proxilion/types.py +339 -0
- proxilion/validation/__init__.py +93 -0
- proxilion/validation/pydantic_schema.py +351 -0
- proxilion/validation/schema.py +651 -0
- proxilion-0.0.1.dist-info/METADATA +872 -0
- proxilion-0.0.1.dist-info/RECORD +94 -0
- proxilion-0.0.1.dist-info/WHEEL +4 -0
- proxilion-0.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Audit log exporters for Proxilion.
|
|
3
|
+
|
|
4
|
+
This module provides different export formats for audit logs:
|
|
5
|
+
|
|
6
|
+
- FileExporter: Write to JSON Lines files
|
|
7
|
+
- ConsoleExporter: Pretty-print for development
|
|
8
|
+
- StreamExporter: Generic stream output
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import sys
|
|
15
|
+
import threading
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
from collections.abc import Callable, Iterator
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, TextIO
|
|
20
|
+
|
|
21
|
+
from proxilion.audit.events import AuditEventV2
|
|
22
|
+
from proxilion.audit.hash_chain import HashChain, MerkleBatch
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Exporter(ABC):
|
|
26
|
+
"""Abstract base class for audit log exporters."""
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
30
|
+
"""Export a single event."""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
35
|
+
"""Export a batch marker."""
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
def export_chain(self, chain: HashChain) -> None:
|
|
39
|
+
"""Export an entire hash chain."""
|
|
40
|
+
for event in chain:
|
|
41
|
+
self.export_event(event)
|
|
42
|
+
|
|
43
|
+
@abstractmethod
|
|
44
|
+
def flush(self) -> None:
|
|
45
|
+
"""Flush any buffered output."""
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def close(self) -> None:
|
|
50
|
+
"""Close the exporter."""
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
def __enter__(self) -> Exporter:
|
|
54
|
+
"""Context manager entry."""
|
|
55
|
+
return self
|
|
56
|
+
|
|
57
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
58
|
+
"""Context manager exit."""
|
|
59
|
+
self.close()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class FileExporter(Exporter):
|
|
63
|
+
"""
|
|
64
|
+
Export audit events to JSON Lines files.
|
|
65
|
+
|
|
66
|
+
Writes one JSON object per line, making it easy to parse
|
|
67
|
+
with tools like jq or stream-process large files.
|
|
68
|
+
|
|
69
|
+
Example:
|
|
70
|
+
>>> with FileExporter("audit.jsonl") as exporter:
|
|
71
|
+
... for event in chain:
|
|
72
|
+
... exporter.export_event(event)
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(
|
|
76
|
+
self,
|
|
77
|
+
path: str | Path,
|
|
78
|
+
append: bool = True,
|
|
79
|
+
sync_writes: bool = False,
|
|
80
|
+
pretty: bool = False,
|
|
81
|
+
) -> None:
|
|
82
|
+
"""
|
|
83
|
+
Initialize the file exporter.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
path: Path to the output file.
|
|
87
|
+
append: If True, append to existing file.
|
|
88
|
+
sync_writes: If True, flush after each write.
|
|
89
|
+
pretty: If True, use indented JSON (multi-line).
|
|
90
|
+
"""
|
|
91
|
+
self.path = Path(path)
|
|
92
|
+
self.append = append
|
|
93
|
+
self.sync_writes = sync_writes
|
|
94
|
+
self.pretty = pretty
|
|
95
|
+
self._file: TextIO | None = None
|
|
96
|
+
self._lock = threading.RLock()
|
|
97
|
+
|
|
98
|
+
# Ensure parent directory exists
|
|
99
|
+
self.path.parent.mkdir(parents=True, exist_ok=True)
|
|
100
|
+
|
|
101
|
+
# Open file
|
|
102
|
+
mode = "a" if append else "w"
|
|
103
|
+
self._file = open(self.path, mode, encoding="utf-8")
|
|
104
|
+
|
|
105
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
106
|
+
"""Export a single event to the file."""
|
|
107
|
+
with self._lock:
|
|
108
|
+
if self._file is None:
|
|
109
|
+
raise RuntimeError("Exporter is closed")
|
|
110
|
+
|
|
111
|
+
if self.pretty:
|
|
112
|
+
line = json.dumps(event.to_dict(), indent=2, sort_keys=True, default=str)
|
|
113
|
+
self._file.write(line + "\n")
|
|
114
|
+
else:
|
|
115
|
+
line = event.to_json(pretty=False)
|
|
116
|
+
self._file.write(line + "\n")
|
|
117
|
+
|
|
118
|
+
if self.sync_writes:
|
|
119
|
+
self._file.flush()
|
|
120
|
+
|
|
121
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
122
|
+
"""Export a batch marker."""
|
|
123
|
+
with self._lock:
|
|
124
|
+
if self._file is None:
|
|
125
|
+
raise RuntimeError("Exporter is closed")
|
|
126
|
+
|
|
127
|
+
marker = {
|
|
128
|
+
"_type": "batch_marker",
|
|
129
|
+
"batch": batch.to_dict(),
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if self.pretty:
|
|
133
|
+
line = json.dumps(marker, indent=2, sort_keys=True)
|
|
134
|
+
else:
|
|
135
|
+
line = json.dumps(marker, sort_keys=True)
|
|
136
|
+
|
|
137
|
+
self._file.write(line + "\n")
|
|
138
|
+
|
|
139
|
+
if self.sync_writes:
|
|
140
|
+
self._file.flush()
|
|
141
|
+
|
|
142
|
+
def flush(self) -> None:
|
|
143
|
+
"""Flush buffered writes to disk."""
|
|
144
|
+
with self._lock:
|
|
145
|
+
if self._file:
|
|
146
|
+
self._file.flush()
|
|
147
|
+
|
|
148
|
+
def close(self) -> None:
|
|
149
|
+
"""Close the file."""
|
|
150
|
+
with self._lock:
|
|
151
|
+
if self._file:
|
|
152
|
+
self._file.close()
|
|
153
|
+
self._file = None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class ConsoleExporter(Exporter):
|
|
157
|
+
"""
|
|
158
|
+
Pretty-print audit events to the console.
|
|
159
|
+
|
|
160
|
+
Useful for development and debugging. Formats events
|
|
161
|
+
with colors (if supported) and readable structure.
|
|
162
|
+
|
|
163
|
+
Example:
|
|
164
|
+
>>> exporter = ConsoleExporter()
|
|
165
|
+
>>> exporter.export_event(event)
|
|
166
|
+
# Prints formatted event to stdout
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
# ANSI color codes
|
|
170
|
+
COLORS = {
|
|
171
|
+
"reset": "\033[0m",
|
|
172
|
+
"bold": "\033[1m",
|
|
173
|
+
"dim": "\033[2m",
|
|
174
|
+
"red": "\033[31m",
|
|
175
|
+
"green": "\033[32m",
|
|
176
|
+
"yellow": "\033[33m",
|
|
177
|
+
"blue": "\033[34m",
|
|
178
|
+
"magenta": "\033[35m",
|
|
179
|
+
"cyan": "\033[36m",
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
def __init__(
|
|
183
|
+
self,
|
|
184
|
+
output: TextIO | None = None,
|
|
185
|
+
use_colors: bool = True,
|
|
186
|
+
verbose: bool = False,
|
|
187
|
+
) -> None:
|
|
188
|
+
"""
|
|
189
|
+
Initialize the console exporter.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
output: Output stream (default: stdout).
|
|
193
|
+
use_colors: Whether to use ANSI colors.
|
|
194
|
+
verbose: If True, show all fields including hashes.
|
|
195
|
+
"""
|
|
196
|
+
self.output = output or sys.stdout
|
|
197
|
+
self.use_colors = use_colors and self._supports_colors()
|
|
198
|
+
self.verbose = verbose
|
|
199
|
+
self._lock = threading.RLock()
|
|
200
|
+
|
|
201
|
+
def _supports_colors(self) -> bool:
|
|
202
|
+
"""Check if the output supports ANSI colors."""
|
|
203
|
+
if not hasattr(self.output, "isatty"):
|
|
204
|
+
return False
|
|
205
|
+
return self.output.isatty()
|
|
206
|
+
|
|
207
|
+
def _color(self, text: str, color: str) -> str:
|
|
208
|
+
"""Apply color to text if colors are enabled."""
|
|
209
|
+
if not self.use_colors:
|
|
210
|
+
return text
|
|
211
|
+
return f"{self.COLORS.get(color, '')}{text}{self.COLORS['reset']}"
|
|
212
|
+
|
|
213
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
214
|
+
"""Pretty-print an event to the console."""
|
|
215
|
+
with self._lock:
|
|
216
|
+
lines = []
|
|
217
|
+
|
|
218
|
+
# Header with timestamp and event type
|
|
219
|
+
event_type = event.data.event_type.value
|
|
220
|
+
if "granted" in event_type:
|
|
221
|
+
type_color = "green"
|
|
222
|
+
status_symbol = "✓"
|
|
223
|
+
elif "denied" in event_type or "violation" in event_type:
|
|
224
|
+
type_color = "red"
|
|
225
|
+
status_symbol = "✗"
|
|
226
|
+
else:
|
|
227
|
+
type_color = "cyan"
|
|
228
|
+
status_symbol = "•"
|
|
229
|
+
|
|
230
|
+
header = f"{status_symbol} {event_type.upper()}"
|
|
231
|
+
lines.append(self._color(header, type_color))
|
|
232
|
+
|
|
233
|
+
# Timestamp
|
|
234
|
+
timestamp = event.timestamp.strftime("%Y-%m-%d %H:%M:%S UTC")
|
|
235
|
+
lines.append(f" {self._color('Time:', 'dim')} {timestamp}")
|
|
236
|
+
|
|
237
|
+
# User info
|
|
238
|
+
user_info = f"{event.data.user_id}"
|
|
239
|
+
if event.data.user_roles:
|
|
240
|
+
roles = ", ".join(event.data.user_roles)
|
|
241
|
+
user_info += f" [{roles}]"
|
|
242
|
+
lines.append(f" {self._color('User:', 'dim')} {user_info}")
|
|
243
|
+
|
|
244
|
+
# Tool call
|
|
245
|
+
tool_info = f"{event.data.tool_name}"
|
|
246
|
+
lines.append(f" {self._color('Tool:', 'dim')} {tool_info}")
|
|
247
|
+
|
|
248
|
+
# Arguments (truncated for readability)
|
|
249
|
+
if event.data.tool_arguments:
|
|
250
|
+
args_str = json.dumps(event.data.tool_arguments, default=str)
|
|
251
|
+
if len(args_str) > 80:
|
|
252
|
+
args_str = args_str[:77] + "..."
|
|
253
|
+
lines.append(f" {self._color('Args:', 'dim')} {args_str}")
|
|
254
|
+
|
|
255
|
+
# Authorization result
|
|
256
|
+
if event.data.authorization_allowed:
|
|
257
|
+
result = self._color("ALLOWED", "green")
|
|
258
|
+
else:
|
|
259
|
+
result = self._color("DENIED", "red")
|
|
260
|
+
lines.append(f" {self._color('Result:', 'dim')} {result}")
|
|
261
|
+
|
|
262
|
+
# Reason
|
|
263
|
+
if event.data.authorization_reason:
|
|
264
|
+
lines.append(f" {self._color('Reason:', 'dim')} {event.data.authorization_reason}")
|
|
265
|
+
|
|
266
|
+
# Verbose output
|
|
267
|
+
if self.verbose:
|
|
268
|
+
lines.append(f" {self._color('Event ID:', 'dim')} {event.event_id}")
|
|
269
|
+
lines.append(f" {self._color('Sequence:', 'dim')} {event.sequence_number}")
|
|
270
|
+
if event.event_hash:
|
|
271
|
+
hash_short = event.event_hash[:30] + "..."
|
|
272
|
+
lines.append(f" {self._color('Hash:', 'dim')} {hash_short}")
|
|
273
|
+
|
|
274
|
+
# Empty line for separation
|
|
275
|
+
lines.append("")
|
|
276
|
+
|
|
277
|
+
self.output.write("\n".join(lines) + "\n")
|
|
278
|
+
|
|
279
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
280
|
+
"""Pretty-print a batch marker."""
|
|
281
|
+
with self._lock:
|
|
282
|
+
lines = []
|
|
283
|
+
|
|
284
|
+
header = self._color("═══ BATCH FINALIZED ═══", "magenta")
|
|
285
|
+
lines.append(header)
|
|
286
|
+
lines.append(f" {self._color('Batch ID:', 'dim')} {batch.batch_id}")
|
|
287
|
+
seq_range = f"{batch.start_sequence}-{batch.end_sequence}"
|
|
288
|
+
lines.append(f" {self._color('Events:', 'dim')} {batch.event_count} (seq {seq_range})")
|
|
289
|
+
|
|
290
|
+
if self.verbose:
|
|
291
|
+
root_short = batch.merkle_root[:30] + "..."
|
|
292
|
+
lines.append(f" {self._color('Merkle Root:', 'dim')} {root_short}")
|
|
293
|
+
|
|
294
|
+
lines.append("")
|
|
295
|
+
|
|
296
|
+
self.output.write("\n".join(lines) + "\n")
|
|
297
|
+
|
|
298
|
+
def flush(self) -> None:
|
|
299
|
+
"""Flush the output stream."""
|
|
300
|
+
with self._lock:
|
|
301
|
+
self.output.flush()
|
|
302
|
+
|
|
303
|
+
def close(self) -> None:
|
|
304
|
+
"""No-op for console exporter."""
|
|
305
|
+
pass
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
class StreamExporter(Exporter):
|
|
309
|
+
"""
|
|
310
|
+
Generic stream exporter for custom output destinations.
|
|
311
|
+
|
|
312
|
+
Allows writing audit events to any TextIO stream
|
|
313
|
+
in JSON Lines format.
|
|
314
|
+
|
|
315
|
+
Example:
|
|
316
|
+
>>> import io
|
|
317
|
+
>>> buffer = io.StringIO()
|
|
318
|
+
>>> with StreamExporter(buffer) as exporter:
|
|
319
|
+
... exporter.export_event(event)
|
|
320
|
+
>>> print(buffer.getvalue())
|
|
321
|
+
"""
|
|
322
|
+
|
|
323
|
+
def __init__(
|
|
324
|
+
self,
|
|
325
|
+
stream: TextIO,
|
|
326
|
+
close_on_exit: bool = False,
|
|
327
|
+
) -> None:
|
|
328
|
+
"""
|
|
329
|
+
Initialize the stream exporter.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
stream: The output stream.
|
|
333
|
+
close_on_exit: If True, close stream on exit.
|
|
334
|
+
"""
|
|
335
|
+
self.stream = stream
|
|
336
|
+
self.close_on_exit = close_on_exit
|
|
337
|
+
self._lock = threading.RLock()
|
|
338
|
+
|
|
339
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
340
|
+
"""Export an event to the stream."""
|
|
341
|
+
with self._lock:
|
|
342
|
+
line = event.to_json(pretty=False)
|
|
343
|
+
self.stream.write(line + "\n")
|
|
344
|
+
|
|
345
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
346
|
+
"""Export a batch marker to the stream."""
|
|
347
|
+
with self._lock:
|
|
348
|
+
marker = {"_type": "batch_marker", "batch": batch.to_dict()}
|
|
349
|
+
line = json.dumps(marker, sort_keys=True)
|
|
350
|
+
self.stream.write(line + "\n")
|
|
351
|
+
|
|
352
|
+
def flush(self) -> None:
|
|
353
|
+
"""Flush the stream."""
|
|
354
|
+
with self._lock:
|
|
355
|
+
self.stream.flush()
|
|
356
|
+
|
|
357
|
+
def close(self) -> None:
|
|
358
|
+
"""Close the stream if configured."""
|
|
359
|
+
with self._lock:
|
|
360
|
+
if self.close_on_exit:
|
|
361
|
+
self.stream.close()
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
class CallbackExporter(Exporter):
|
|
365
|
+
"""
|
|
366
|
+
Exporter that calls a callback function for each event.
|
|
367
|
+
|
|
368
|
+
Useful for integrating with external systems, metrics,
|
|
369
|
+
or custom processing pipelines.
|
|
370
|
+
|
|
371
|
+
Example:
|
|
372
|
+
>>> def handle_event(event):
|
|
373
|
+
... send_to_siem(event.to_dict())
|
|
374
|
+
>>>
|
|
375
|
+
>>> exporter = CallbackExporter(handle_event)
|
|
376
|
+
>>> exporter.export_event(event)
|
|
377
|
+
"""
|
|
378
|
+
|
|
379
|
+
def __init__(
|
|
380
|
+
self,
|
|
381
|
+
event_callback: Callable[[AuditEventV2], None],
|
|
382
|
+
batch_callback: Callable[[MerkleBatch], None] | None = None,
|
|
383
|
+
) -> None:
|
|
384
|
+
"""
|
|
385
|
+
Initialize the callback exporter.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
event_callback: Function to call for each event.
|
|
389
|
+
batch_callback: Optional function for batch markers.
|
|
390
|
+
"""
|
|
391
|
+
self.event_callback = event_callback
|
|
392
|
+
self.batch_callback = batch_callback
|
|
393
|
+
|
|
394
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
395
|
+
"""Call the event callback."""
|
|
396
|
+
self.event_callback(event)
|
|
397
|
+
|
|
398
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
399
|
+
"""Call the batch callback if provided."""
|
|
400
|
+
if self.batch_callback:
|
|
401
|
+
self.batch_callback(batch)
|
|
402
|
+
|
|
403
|
+
def flush(self) -> None:
|
|
404
|
+
"""No-op for callback exporter."""
|
|
405
|
+
pass
|
|
406
|
+
|
|
407
|
+
def close(self) -> None:
|
|
408
|
+
"""No-op for callback exporter."""
|
|
409
|
+
pass
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
class MultiExporter(Exporter):
|
|
413
|
+
"""
|
|
414
|
+
Exporter that writes to multiple destinations.
|
|
415
|
+
|
|
416
|
+
Useful for simultaneously logging to file and console,
|
|
417
|
+
or sending to multiple external systems.
|
|
418
|
+
|
|
419
|
+
Example:
|
|
420
|
+
>>> file_exp = FileExporter("audit.jsonl")
|
|
421
|
+
>>> console_exp = ConsoleExporter()
|
|
422
|
+
>>> multi = MultiExporter([file_exp, console_exp])
|
|
423
|
+
>>> multi.export_event(event) # Writes to both
|
|
424
|
+
"""
|
|
425
|
+
|
|
426
|
+
def __init__(self, exporters: list[Exporter]) -> None:
|
|
427
|
+
"""
|
|
428
|
+
Initialize with multiple exporters.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
exporters: List of exporters to write to.
|
|
432
|
+
"""
|
|
433
|
+
self.exporters = exporters
|
|
434
|
+
|
|
435
|
+
def export_event(self, event: AuditEventV2) -> None:
|
|
436
|
+
"""Export to all exporters."""
|
|
437
|
+
for exporter in self.exporters:
|
|
438
|
+
exporter.export_event(event)
|
|
439
|
+
|
|
440
|
+
def export_batch(self, batch: MerkleBatch) -> None:
|
|
441
|
+
"""Export batch to all exporters."""
|
|
442
|
+
for exporter in self.exporters:
|
|
443
|
+
exporter.export_batch(batch)
|
|
444
|
+
|
|
445
|
+
def flush(self) -> None:
|
|
446
|
+
"""Flush all exporters."""
|
|
447
|
+
for exporter in self.exporters:
|
|
448
|
+
exporter.flush()
|
|
449
|
+
|
|
450
|
+
def close(self) -> None:
|
|
451
|
+
"""Close all exporters."""
|
|
452
|
+
for exporter in self.exporters:
|
|
453
|
+
exporter.close()
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def read_jsonl_events(path: str | Path) -> Iterator[AuditEventV2]:
|
|
457
|
+
"""
|
|
458
|
+
Read audit events from a JSON Lines file.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
path: Path to the JSON Lines file.
|
|
462
|
+
|
|
463
|
+
Yields:
|
|
464
|
+
AuditEventV2 instances.
|
|
465
|
+
|
|
466
|
+
Example:
|
|
467
|
+
>>> for event in read_jsonl_events("audit.jsonl"):
|
|
468
|
+
... print(event.event_id)
|
|
469
|
+
"""
|
|
470
|
+
with open(path, encoding="utf-8") as f:
|
|
471
|
+
for line in f:
|
|
472
|
+
line = line.strip()
|
|
473
|
+
if not line:
|
|
474
|
+
continue
|
|
475
|
+
|
|
476
|
+
data = json.loads(line)
|
|
477
|
+
|
|
478
|
+
# Skip batch markers
|
|
479
|
+
if data.get("_type") == "batch_marker":
|
|
480
|
+
continue
|
|
481
|
+
|
|
482
|
+
yield AuditEventV2.from_dict(data)
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def verify_jsonl_chain(path: str | Path) -> tuple[bool, str | None]:
|
|
486
|
+
"""
|
|
487
|
+
Verify the hash chain in a JSON Lines audit log.
|
|
488
|
+
|
|
489
|
+
Args:
|
|
490
|
+
path: Path to the JSON Lines file.
|
|
491
|
+
|
|
492
|
+
Returns:
|
|
493
|
+
Tuple of (is_valid, error_message).
|
|
494
|
+
|
|
495
|
+
Example:
|
|
496
|
+
>>> valid, error = verify_jsonl_chain("audit.jsonl")
|
|
497
|
+
>>> if not valid:
|
|
498
|
+
... print(f"Chain verification failed: {error}")
|
|
499
|
+
"""
|
|
500
|
+
from proxilion.audit.hash_chain import GENESIS_HASH
|
|
501
|
+
|
|
502
|
+
expected_previous = GENESIS_HASH
|
|
503
|
+
line_number = 0
|
|
504
|
+
|
|
505
|
+
try:
|
|
506
|
+
for event in read_jsonl_events(path):
|
|
507
|
+
line_number += 1
|
|
508
|
+
|
|
509
|
+
# Check chain linkage
|
|
510
|
+
if event.previous_hash != expected_previous:
|
|
511
|
+
return False, (
|
|
512
|
+
f"Chain broken at line {line_number}: "
|
|
513
|
+
f"expected {expected_previous}, got {event.previous_hash}"
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
# Verify event hash
|
|
517
|
+
if not event.verify_hash():
|
|
518
|
+
return False, f"Invalid hash at line {line_number}: event tampered"
|
|
519
|
+
|
|
520
|
+
expected_previous = event.event_hash
|
|
521
|
+
|
|
522
|
+
except json.JSONDecodeError as e:
|
|
523
|
+
return False, f"JSON parse error at line {line_number}: {e}"
|
|
524
|
+
except Exception as e:
|
|
525
|
+
return False, f"Verification error: {e}"
|
|
526
|
+
|
|
527
|
+
return True, None
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Compliance-ready audit export formats.
|
|
3
|
+
|
|
4
|
+
This module provides exporters that transform audit logs into
|
|
5
|
+
compliance-ready formats for various regulatory frameworks:
|
|
6
|
+
|
|
7
|
+
- EU AI Act (Article 14, 15, 17 requirements)
|
|
8
|
+
- SOC 2 Type II (Trust Service Criteria)
|
|
9
|
+
- ISO 27001 (Annex A controls)
|
|
10
|
+
|
|
11
|
+
Each exporter provides framework-specific evidence gathering,
|
|
12
|
+
report generation, and export capabilities.
|
|
13
|
+
|
|
14
|
+
Quick Start:
|
|
15
|
+
>>> from proxilion.audit import InMemoryAuditLogger
|
|
16
|
+
>>> from proxilion.audit.compliance import (
|
|
17
|
+
... EUAIActExporter,
|
|
18
|
+
... SOC2Exporter,
|
|
19
|
+
... ISO27001Exporter,
|
|
20
|
+
... )
|
|
21
|
+
>>> from datetime import datetime, timedelta, timezone
|
|
22
|
+
>>>
|
|
23
|
+
>>> # Create logger and log some events
|
|
24
|
+
>>> logger = InMemoryAuditLogger()
|
|
25
|
+
>>> # ... log events ...
|
|
26
|
+
>>>
|
|
27
|
+
>>> # Define reporting period
|
|
28
|
+
>>> end = datetime.now(timezone.utc)
|
|
29
|
+
>>> start = end - timedelta(days=90)
|
|
30
|
+
>>>
|
|
31
|
+
>>> # EU AI Act compliance
|
|
32
|
+
>>> eu_exporter = EUAIActExporter(
|
|
33
|
+
... logger,
|
|
34
|
+
... organization="Acme Corp",
|
|
35
|
+
... system_name="Customer AI",
|
|
36
|
+
... responsible_party="AI Team",
|
|
37
|
+
... )
|
|
38
|
+
>>> eu_report = eu_exporter.generate_report(start, end)
|
|
39
|
+
>>>
|
|
40
|
+
>>> # SOC 2 compliance
|
|
41
|
+
>>> soc2_exporter = SOC2Exporter(logger, organization="Acme Corp")
|
|
42
|
+
>>> soc2_report = soc2_exporter.generate_report(start, end)
|
|
43
|
+
>>>
|
|
44
|
+
>>> # ISO 27001 compliance
|
|
45
|
+
>>> iso_exporter = ISO27001Exporter(logger, organization="Acme Corp")
|
|
46
|
+
>>> iso_report = iso_exporter.generate_report(start, end)
|
|
47
|
+
|
|
48
|
+
Export Formats:
|
|
49
|
+
Each exporter supports multiple export formats:
|
|
50
|
+
|
|
51
|
+
- JSON: Machine-readable format
|
|
52
|
+
>>> json_output = exporter.export_json(start, end)
|
|
53
|
+
|
|
54
|
+
- Markdown: Human-readable report
|
|
55
|
+
>>> markdown_output = exporter.export_markdown(start, end)
|
|
56
|
+
|
|
57
|
+
- Report object: Structured Python object
|
|
58
|
+
>>> report = exporter.generate_report(start, end)
|
|
59
|
+
>>> print(report.summary)
|
|
60
|
+
|
|
61
|
+
Framework-Specific Methods:
|
|
62
|
+
Each exporter also provides framework-specific evidence methods:
|
|
63
|
+
|
|
64
|
+
EU AI Act:
|
|
65
|
+
>>> oversight = eu_exporter.export_human_oversight_evidence(start, end)
|
|
66
|
+
>>> trail = eu_exporter.export_decision_audit_trail(start, end)
|
|
67
|
+
>>> risks = eu_exporter.export_risk_assessment_log(start, end)
|
|
68
|
+
|
|
69
|
+
SOC 2:
|
|
70
|
+
>>> access = soc2_exporter.export_access_control_evidence(start, end)
|
|
71
|
+
>>> monitoring = soc2_exporter.export_monitoring_evidence(start, end)
|
|
72
|
+
>>> changes = soc2_exporter.export_change_management_evidence(start, end)
|
|
73
|
+
|
|
74
|
+
ISO 27001:
|
|
75
|
+
>>> a9 = iso_exporter.export_access_control_a9(start, end)
|
|
76
|
+
>>> a12 = iso_exporter.export_operations_security_a12(start, end)
|
|
77
|
+
>>> a16 = iso_exporter.export_incident_management_a16(start, end)
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
from proxilion.audit.compliance.base import (
|
|
81
|
+
BaseComplianceExporter,
|
|
82
|
+
ComplianceEvidence,
|
|
83
|
+
ComplianceFramework,
|
|
84
|
+
ComplianceMetadata,
|
|
85
|
+
ComplianceReport,
|
|
86
|
+
EventSource,
|
|
87
|
+
)
|
|
88
|
+
from proxilion.audit.compliance.eu_ai_act import (
|
|
89
|
+
DecisionAuditTrailEntry,
|
|
90
|
+
EUAIActExporter,
|
|
91
|
+
HumanOversightEvidence,
|
|
92
|
+
RiskAssessmentEntry,
|
|
93
|
+
)
|
|
94
|
+
from proxilion.audit.compliance.iso27001 import (
|
|
95
|
+
AccessControlA9Evidence,
|
|
96
|
+
IncidentManagementA16Evidence,
|
|
97
|
+
ISO27001Exporter,
|
|
98
|
+
OperationsSecurityA12Evidence,
|
|
99
|
+
)
|
|
100
|
+
from proxilion.audit.compliance.soc2 import (
|
|
101
|
+
AccessControlEvidence,
|
|
102
|
+
ChangeManagementEvidence,
|
|
103
|
+
MonitoringEvidence,
|
|
104
|
+
SOC2Exporter,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
__all__ = [
|
|
108
|
+
# Base classes
|
|
109
|
+
"BaseComplianceExporter",
|
|
110
|
+
"ComplianceEvidence",
|
|
111
|
+
"ComplianceFramework",
|
|
112
|
+
"ComplianceMetadata",
|
|
113
|
+
"ComplianceReport",
|
|
114
|
+
"EventSource",
|
|
115
|
+
# EU AI Act
|
|
116
|
+
"EUAIActExporter",
|
|
117
|
+
"DecisionAuditTrailEntry",
|
|
118
|
+
"HumanOversightEvidence",
|
|
119
|
+
"RiskAssessmentEntry",
|
|
120
|
+
# SOC 2
|
|
121
|
+
"SOC2Exporter",
|
|
122
|
+
"AccessControlEvidence",
|
|
123
|
+
"MonitoringEvidence",
|
|
124
|
+
"ChangeManagementEvidence",
|
|
125
|
+
# ISO 27001
|
|
126
|
+
"ISO27001Exporter",
|
|
127
|
+
"AccessControlA9Evidence",
|
|
128
|
+
"OperationsSecurityA12Evidence",
|
|
129
|
+
"IncidentManagementA16Evidence",
|
|
130
|
+
]
|