proxilion 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- proxilion/__init__.py +136 -0
- proxilion/audit/__init__.py +133 -0
- proxilion/audit/base_exporters.py +527 -0
- proxilion/audit/compliance/__init__.py +130 -0
- proxilion/audit/compliance/base.py +457 -0
- proxilion/audit/compliance/eu_ai_act.py +603 -0
- proxilion/audit/compliance/iso27001.py +544 -0
- proxilion/audit/compliance/soc2.py +491 -0
- proxilion/audit/events.py +493 -0
- proxilion/audit/explainability.py +1173 -0
- proxilion/audit/exporters/__init__.py +58 -0
- proxilion/audit/exporters/aws_s3.py +636 -0
- proxilion/audit/exporters/azure_storage.py +608 -0
- proxilion/audit/exporters/cloud_base.py +468 -0
- proxilion/audit/exporters/gcp_storage.py +570 -0
- proxilion/audit/exporters/multi_exporter.py +498 -0
- proxilion/audit/hash_chain.py +652 -0
- proxilion/audit/logger.py +543 -0
- proxilion/caching/__init__.py +49 -0
- proxilion/caching/tool_cache.py +633 -0
- proxilion/context/__init__.py +73 -0
- proxilion/context/context_window.py +556 -0
- proxilion/context/message_history.py +505 -0
- proxilion/context/session.py +735 -0
- proxilion/contrib/__init__.py +51 -0
- proxilion/contrib/anthropic.py +609 -0
- proxilion/contrib/google.py +1012 -0
- proxilion/contrib/langchain.py +641 -0
- proxilion/contrib/mcp.py +893 -0
- proxilion/contrib/openai.py +646 -0
- proxilion/core.py +3058 -0
- proxilion/decorators.py +966 -0
- proxilion/engines/__init__.py +287 -0
- proxilion/engines/base.py +266 -0
- proxilion/engines/casbin_engine.py +412 -0
- proxilion/engines/opa_engine.py +493 -0
- proxilion/engines/simple.py +437 -0
- proxilion/exceptions.py +887 -0
- proxilion/guards/__init__.py +54 -0
- proxilion/guards/input_guard.py +522 -0
- proxilion/guards/output_guard.py +634 -0
- proxilion/observability/__init__.py +198 -0
- proxilion/observability/cost_tracker.py +866 -0
- proxilion/observability/hooks.py +683 -0
- proxilion/observability/metrics.py +798 -0
- proxilion/observability/session_cost_tracker.py +1063 -0
- proxilion/policies/__init__.py +67 -0
- proxilion/policies/base.py +304 -0
- proxilion/policies/builtin.py +486 -0
- proxilion/policies/registry.py +376 -0
- proxilion/providers/__init__.py +201 -0
- proxilion/providers/adapter.py +468 -0
- proxilion/providers/anthropic_adapter.py +330 -0
- proxilion/providers/gemini_adapter.py +391 -0
- proxilion/providers/openai_adapter.py +294 -0
- proxilion/py.typed +0 -0
- proxilion/resilience/__init__.py +81 -0
- proxilion/resilience/degradation.py +615 -0
- proxilion/resilience/fallback.py +555 -0
- proxilion/resilience/retry.py +554 -0
- proxilion/scheduling/__init__.py +57 -0
- proxilion/scheduling/priority_queue.py +419 -0
- proxilion/scheduling/scheduler.py +459 -0
- proxilion/security/__init__.py +244 -0
- proxilion/security/agent_trust.py +968 -0
- proxilion/security/behavioral_drift.py +794 -0
- proxilion/security/cascade_protection.py +869 -0
- proxilion/security/circuit_breaker.py +428 -0
- proxilion/security/cost_limiter.py +690 -0
- proxilion/security/idor_protection.py +460 -0
- proxilion/security/intent_capsule.py +849 -0
- proxilion/security/intent_validator.py +495 -0
- proxilion/security/memory_integrity.py +767 -0
- proxilion/security/rate_limiter.py +509 -0
- proxilion/security/scope_enforcer.py +680 -0
- proxilion/security/sequence_validator.py +636 -0
- proxilion/security/trust_boundaries.py +784 -0
- proxilion/streaming/__init__.py +70 -0
- proxilion/streaming/detector.py +761 -0
- proxilion/streaming/transformer.py +674 -0
- proxilion/timeouts/__init__.py +55 -0
- proxilion/timeouts/decorators.py +477 -0
- proxilion/timeouts/manager.py +545 -0
- proxilion/tools/__init__.py +69 -0
- proxilion/tools/decorators.py +493 -0
- proxilion/tools/registry.py +732 -0
- proxilion/types.py +339 -0
- proxilion/validation/__init__.py +93 -0
- proxilion/validation/pydantic_schema.py +351 -0
- proxilion/validation/schema.py +651 -0
- proxilion-0.0.1.dist-info/METADATA +872 -0
- proxilion-0.0.1.dist-info/RECORD +94 -0
- proxilion-0.0.1.dist-info/WHEEL +4 -0
- proxilion-0.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,493 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Audit event definitions for Proxilion.
|
|
3
|
+
|
|
4
|
+
This module provides enhanced audit event types with support for
|
|
5
|
+
UUID v7-style time-ordering, sensitive data redaction, and
|
|
6
|
+
canonical JSON serialization for hash chain integrity.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import hashlib
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import re
|
|
15
|
+
import time
|
|
16
|
+
from dataclasses import dataclass, field
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from enum import Enum
|
|
19
|
+
from re import Pattern
|
|
20
|
+
from typing import Any
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class EventType(Enum):
|
|
24
|
+
"""Types of audit events."""
|
|
25
|
+
AUTHORIZATION_REQUEST = "authorization_request"
|
|
26
|
+
AUTHORIZATION_GRANTED = "authorization_granted"
|
|
27
|
+
AUTHORIZATION_DENIED = "authorization_denied"
|
|
28
|
+
TOOL_EXECUTION_START = "tool_execution_start"
|
|
29
|
+
TOOL_EXECUTION_SUCCESS = "tool_execution_success"
|
|
30
|
+
TOOL_EXECUTION_FAILURE = "tool_execution_failure"
|
|
31
|
+
RATE_LIMIT_EXCEEDED = "rate_limit_exceeded"
|
|
32
|
+
CIRCUIT_BREAKER_OPEN = "circuit_breaker_open"
|
|
33
|
+
IDOR_VIOLATION = "idor_violation"
|
|
34
|
+
SCHEMA_VALIDATION_FAILURE = "schema_validation_failure"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _generate_uuid_v7() -> str:
|
|
38
|
+
"""
|
|
39
|
+
Generate a UUID v7-style identifier for time-ordering.
|
|
40
|
+
|
|
41
|
+
UUID v7 embeds a Unix timestamp for time-ordering while maintaining
|
|
42
|
+
uniqueness. Since Python 3.10 doesn't have native UUID v7, we
|
|
43
|
+
construct a compatible format manually.
|
|
44
|
+
|
|
45
|
+
Format: xxxxxxxx-xxxx-7xxx-yxxx-xxxxxxxxxxxx
|
|
46
|
+
Where x is derived from timestamp + random, 7 indicates version 7,
|
|
47
|
+
and y is 8, 9, a, or b for variant 1.
|
|
48
|
+
"""
|
|
49
|
+
# Get current timestamp in milliseconds
|
|
50
|
+
timestamp_ms = int(time.time() * 1000)
|
|
51
|
+
|
|
52
|
+
# Convert to 48-bit value (6 bytes)
|
|
53
|
+
timestamp_bytes = timestamp_ms.to_bytes(6, byteorder='big')
|
|
54
|
+
|
|
55
|
+
# Generate 10 random bytes
|
|
56
|
+
random_bytes = os.urandom(10)
|
|
57
|
+
|
|
58
|
+
# Combine: 6 bytes timestamp + 10 bytes random
|
|
59
|
+
uuid_bytes = bytearray(16)
|
|
60
|
+
uuid_bytes[0:6] = timestamp_bytes
|
|
61
|
+
uuid_bytes[6:16] = random_bytes
|
|
62
|
+
|
|
63
|
+
# Set version (4 bits) to 7: 0111xxxx at position 6
|
|
64
|
+
uuid_bytes[6] = (uuid_bytes[6] & 0x0F) | 0x70
|
|
65
|
+
|
|
66
|
+
# Set variant (2 bits) to RFC 4122: 10xxxxxx at position 8
|
|
67
|
+
uuid_bytes[8] = (uuid_bytes[8] & 0x3F) | 0x80
|
|
68
|
+
|
|
69
|
+
# Format as UUID string
|
|
70
|
+
hex_str = uuid_bytes.hex()
|
|
71
|
+
return f"{hex_str[:8]}-{hex_str[8:12]}-{hex_str[12:16]}-{hex_str[16:20]}-{hex_str[20:]}"
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _utc_now() -> datetime:
|
|
75
|
+
"""Get current UTC time with timezone info."""
|
|
76
|
+
return datetime.now(timezone.utc)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# Global sequence counter (thread-safe via GIL for simple increments)
|
|
80
|
+
_sequence_counter = 0
|
|
81
|
+
_sequence_lock = None
|
|
82
|
+
|
|
83
|
+
def _next_sequence() -> int:
|
|
84
|
+
"""Get next sequence number (monotonically increasing)."""
|
|
85
|
+
global _sequence_counter
|
|
86
|
+
_sequence_counter += 1
|
|
87
|
+
return _sequence_counter
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def reset_sequence(value: int = 0) -> None:
|
|
91
|
+
"""Reset the sequence counter (for testing)."""
|
|
92
|
+
global _sequence_counter
|
|
93
|
+
_sequence_counter = value
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class RedactionConfig:
|
|
98
|
+
"""
|
|
99
|
+
Configuration for sensitive data redaction in audit logs.
|
|
100
|
+
|
|
101
|
+
Attributes:
|
|
102
|
+
patterns: Regex patterns to match sensitive data.
|
|
103
|
+
field_names: Field names that should always be redacted.
|
|
104
|
+
hash_pii: If True, hash PII instead of replacing with placeholder.
|
|
105
|
+
placeholder: Replacement text for redacted values.
|
|
106
|
+
"""
|
|
107
|
+
patterns: list[Pattern[str]] = field(default_factory=list)
|
|
108
|
+
field_names: set[str] = field(default_factory=set)
|
|
109
|
+
hash_pii: bool = False
|
|
110
|
+
placeholder: str = "[REDACTED]"
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def default(cls) -> RedactionConfig:
|
|
114
|
+
"""Create default redaction config with common patterns."""
|
|
115
|
+
return cls(
|
|
116
|
+
patterns=[
|
|
117
|
+
re.compile(r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'), # Email
|
|
118
|
+
re.compile(r'\b\d{3}-\d{2}-\d{4}\b'), # SSN
|
|
119
|
+
re.compile(r'\b\d{16}\b'), # Credit card (simple)
|
|
120
|
+
re.compile(
|
|
121
|
+
r'\b(?:password|passwd|pwd|secret|api_key|apikey|token)\s*[:=]\s*\S+',
|
|
122
|
+
re.IGNORECASE,
|
|
123
|
+
),
|
|
124
|
+
],
|
|
125
|
+
field_names={
|
|
126
|
+
"password", "passwd", "secret", "api_key", "apikey",
|
|
127
|
+
"token", "access_token", "refresh_token", "private_key",
|
|
128
|
+
"credit_card", "ssn", "social_security",
|
|
129
|
+
},
|
|
130
|
+
hash_pii=True,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@dataclass
|
|
135
|
+
class AuditEventData:
|
|
136
|
+
"""
|
|
137
|
+
Core data for an audit event, separate from hash chain fields.
|
|
138
|
+
|
|
139
|
+
This separation allows for cleaner event creation before
|
|
140
|
+
the event is added to a hash chain.
|
|
141
|
+
"""
|
|
142
|
+
event_type: EventType
|
|
143
|
+
user_id: str
|
|
144
|
+
user_roles: list[str]
|
|
145
|
+
session_id: str | None
|
|
146
|
+
user_attributes: dict[str, Any]
|
|
147
|
+
agent_id: str | None
|
|
148
|
+
agent_capabilities: list[str]
|
|
149
|
+
agent_trust_score: float | None
|
|
150
|
+
tool_name: str
|
|
151
|
+
tool_arguments: dict[str, Any]
|
|
152
|
+
tool_timestamp: datetime
|
|
153
|
+
authorization_allowed: bool
|
|
154
|
+
authorization_reason: str | None
|
|
155
|
+
policies_evaluated: list[str]
|
|
156
|
+
authorization_metadata: dict[str, Any]
|
|
157
|
+
execution_result: dict[str, Any] | None = None
|
|
158
|
+
error_message: str | None = None
|
|
159
|
+
|
|
160
|
+
def to_dict(self) -> dict[str, Any]:
|
|
161
|
+
"""Convert to dictionary for serialization."""
|
|
162
|
+
return {
|
|
163
|
+
"event_type": self.event_type.value,
|
|
164
|
+
"user": {
|
|
165
|
+
"user_id": self.user_id,
|
|
166
|
+
"roles": self.user_roles,
|
|
167
|
+
"session_id": self.session_id,
|
|
168
|
+
"attributes": self.user_attributes,
|
|
169
|
+
},
|
|
170
|
+
"agent": {
|
|
171
|
+
"agent_id": self.agent_id,
|
|
172
|
+
"capabilities": self.agent_capabilities,
|
|
173
|
+
"trust_score": self.agent_trust_score,
|
|
174
|
+
} if self.agent_id else None,
|
|
175
|
+
"tool_call": {
|
|
176
|
+
"tool_name": self.tool_name,
|
|
177
|
+
"arguments": self.tool_arguments,
|
|
178
|
+
"timestamp": self.tool_timestamp.isoformat(),
|
|
179
|
+
},
|
|
180
|
+
"authorization": {
|
|
181
|
+
"allowed": self.authorization_allowed,
|
|
182
|
+
"reason": self.authorization_reason,
|
|
183
|
+
"policies_evaluated": self.policies_evaluated,
|
|
184
|
+
"metadata": self.authorization_metadata,
|
|
185
|
+
},
|
|
186
|
+
"execution_result": self.execution_result,
|
|
187
|
+
"error_message": self.error_message,
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@dataclass
|
|
192
|
+
class AuditEventV2:
|
|
193
|
+
"""
|
|
194
|
+
Enhanced tamper-evident audit log entry.
|
|
195
|
+
|
|
196
|
+
Improvements over the base AuditEvent:
|
|
197
|
+
- UUID v7-style IDs for time-ordering
|
|
198
|
+
- Event type categorization
|
|
199
|
+
- Sensitive data redaction support
|
|
200
|
+
- Improved canonical JSON serialization
|
|
201
|
+
- Merkle tree integration support
|
|
202
|
+
|
|
203
|
+
Attributes:
|
|
204
|
+
event_id: UUID v7-style identifier for time-ordering.
|
|
205
|
+
timestamp: When the event occurred (UTC, ISO format).
|
|
206
|
+
sequence_number: Monotonically increasing counter.
|
|
207
|
+
event_type: Categorization of the event.
|
|
208
|
+
data: The core event data.
|
|
209
|
+
previous_hash: Hash of the previous event in the chain.
|
|
210
|
+
event_hash: SHA-256 hash of this event.
|
|
211
|
+
merkle_index: Index in the current Merkle tree batch.
|
|
212
|
+
"""
|
|
213
|
+
data: AuditEventData
|
|
214
|
+
previous_hash: str
|
|
215
|
+
event_id: str = field(default_factory=_generate_uuid_v7)
|
|
216
|
+
timestamp: datetime = field(default_factory=_utc_now)
|
|
217
|
+
sequence_number: int = field(default_factory=_next_sequence)
|
|
218
|
+
event_hash: str = ""
|
|
219
|
+
merkle_index: int | None = None
|
|
220
|
+
|
|
221
|
+
def _canonical_dict(self) -> dict[str, Any]:
|
|
222
|
+
"""
|
|
223
|
+
Create canonical dictionary for hashing.
|
|
224
|
+
|
|
225
|
+
This ensures consistent ordering and formatting
|
|
226
|
+
for reproducible hash computation.
|
|
227
|
+
"""
|
|
228
|
+
return {
|
|
229
|
+
"event_id": self.event_id,
|
|
230
|
+
"timestamp": self.timestamp.isoformat(),
|
|
231
|
+
"sequence_number": self.sequence_number,
|
|
232
|
+
"data": self.data.to_dict(),
|
|
233
|
+
"previous_hash": self.previous_hash,
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
def _canonical_json(self) -> str:
|
|
237
|
+
"""
|
|
238
|
+
Generate canonical JSON representation for hashing.
|
|
239
|
+
|
|
240
|
+
Uses sorted keys and minimal separators to ensure
|
|
241
|
+
deterministic serialization.
|
|
242
|
+
"""
|
|
243
|
+
return json.dumps(
|
|
244
|
+
self._canonical_dict(),
|
|
245
|
+
sort_keys=True,
|
|
246
|
+
separators=(",", ":"),
|
|
247
|
+
default=str,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
def compute_hash(self) -> str:
|
|
251
|
+
"""
|
|
252
|
+
Compute and set the event hash using SHA-256.
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
The computed hash as a hex string prefixed with 'sha256:'.
|
|
256
|
+
"""
|
|
257
|
+
canonical = self._canonical_json()
|
|
258
|
+
hash_bytes = hashlib.sha256(canonical.encode("utf-8")).hexdigest()
|
|
259
|
+
self.event_hash = f"sha256:{hash_bytes}"
|
|
260
|
+
return self.event_hash
|
|
261
|
+
|
|
262
|
+
def verify_hash(self) -> bool:
|
|
263
|
+
"""
|
|
264
|
+
Verify that the stored hash matches the computed hash.
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
True if the hash is valid, False if tampered.
|
|
268
|
+
"""
|
|
269
|
+
if not self.event_hash:
|
|
270
|
+
return False
|
|
271
|
+
|
|
272
|
+
# Store original hash
|
|
273
|
+
stored_hash = self.event_hash
|
|
274
|
+
|
|
275
|
+
# Compute expected hash (this sets self.event_hash)
|
|
276
|
+
canonical = self._canonical_json()
|
|
277
|
+
expected = f"sha256:{hashlib.sha256(canonical.encode('utf-8')).hexdigest()}"
|
|
278
|
+
|
|
279
|
+
# Restore original
|
|
280
|
+
self.event_hash = stored_hash
|
|
281
|
+
|
|
282
|
+
return stored_hash == expected
|
|
283
|
+
|
|
284
|
+
def to_dict(self, include_hash: bool = True) -> dict[str, Any]:
|
|
285
|
+
"""
|
|
286
|
+
Convert to dictionary for serialization.
|
|
287
|
+
|
|
288
|
+
Args:
|
|
289
|
+
include_hash: Whether to include the event_hash field.
|
|
290
|
+
"""
|
|
291
|
+
result = self._canonical_dict()
|
|
292
|
+
if include_hash:
|
|
293
|
+
result["event_hash"] = self.event_hash
|
|
294
|
+
if self.merkle_index is not None:
|
|
295
|
+
result["merkle_index"] = self.merkle_index
|
|
296
|
+
return result
|
|
297
|
+
|
|
298
|
+
def to_json(self, pretty: bool = False) -> str:
|
|
299
|
+
"""
|
|
300
|
+
Convert to JSON string.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
pretty: If True, use indented formatting.
|
|
304
|
+
"""
|
|
305
|
+
if pretty:
|
|
306
|
+
return json.dumps(self.to_dict(), sort_keys=True, indent=2, default=str)
|
|
307
|
+
return json.dumps(self.to_dict(), sort_keys=True, separators=(",", ":"), default=str)
|
|
308
|
+
|
|
309
|
+
@classmethod
|
|
310
|
+
def from_dict(cls, data: dict[str, Any]) -> AuditEventV2:
|
|
311
|
+
"""
|
|
312
|
+
Create an AuditEventV2 from a dictionary.
|
|
313
|
+
|
|
314
|
+
Args:
|
|
315
|
+
data: Dictionary representation of the event.
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
Reconstructed AuditEventV2 instance.
|
|
319
|
+
"""
|
|
320
|
+
event_data = AuditEventData(
|
|
321
|
+
event_type=EventType(data["data"]["event_type"]),
|
|
322
|
+
user_id=data["data"]["user"]["user_id"],
|
|
323
|
+
user_roles=data["data"]["user"]["roles"],
|
|
324
|
+
session_id=data["data"]["user"]["session_id"],
|
|
325
|
+
user_attributes=data["data"]["user"]["attributes"],
|
|
326
|
+
agent_id=(
|
|
327
|
+
data["data"]["agent"]["agent_id"] if data["data"]["agent"] else None
|
|
328
|
+
),
|
|
329
|
+
agent_capabilities=(
|
|
330
|
+
data["data"]["agent"]["capabilities"] if data["data"]["agent"] else []
|
|
331
|
+
),
|
|
332
|
+
agent_trust_score=(
|
|
333
|
+
data["data"]["agent"]["trust_score"] if data["data"]["agent"] else None
|
|
334
|
+
),
|
|
335
|
+
tool_name=data["data"]["tool_call"]["tool_name"],
|
|
336
|
+
tool_arguments=data["data"]["tool_call"]["arguments"],
|
|
337
|
+
tool_timestamp=datetime.fromisoformat(data["data"]["tool_call"]["timestamp"]),
|
|
338
|
+
authorization_allowed=data["data"]["authorization"]["allowed"],
|
|
339
|
+
authorization_reason=data["data"]["authorization"]["reason"],
|
|
340
|
+
policies_evaluated=data["data"]["authorization"]["policies_evaluated"],
|
|
341
|
+
authorization_metadata=data["data"]["authorization"]["metadata"],
|
|
342
|
+
execution_result=data["data"]["execution_result"],
|
|
343
|
+
error_message=data["data"]["error_message"],
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
event = cls(
|
|
347
|
+
data=event_data,
|
|
348
|
+
previous_hash=data["previous_hash"],
|
|
349
|
+
event_id=data["event_id"],
|
|
350
|
+
timestamp=datetime.fromisoformat(data["timestamp"]),
|
|
351
|
+
sequence_number=data["sequence_number"],
|
|
352
|
+
event_hash=data.get("event_hash", ""),
|
|
353
|
+
merkle_index=data.get("merkle_index"),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
return event
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def redact_sensitive_data(
|
|
360
|
+
data: dict[str, Any],
|
|
361
|
+
config: RedactionConfig,
|
|
362
|
+
) -> dict[str, Any]:
|
|
363
|
+
"""
|
|
364
|
+
Redact sensitive data from a dictionary.
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
data: Dictionary to redact.
|
|
368
|
+
config: Redaction configuration.
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
New dictionary with sensitive data redacted.
|
|
372
|
+
"""
|
|
373
|
+
result = {}
|
|
374
|
+
|
|
375
|
+
for key, value in data.items():
|
|
376
|
+
# Check if field name should be redacted
|
|
377
|
+
if key.lower() in config.field_names:
|
|
378
|
+
if config.hash_pii and isinstance(value, str):
|
|
379
|
+
result[key] = f"[HASH:{hashlib.sha256(value.encode()).hexdigest()[:16]}]"
|
|
380
|
+
else:
|
|
381
|
+
result[key] = config.placeholder
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
# Recursively handle nested dicts
|
|
385
|
+
if isinstance(value, dict):
|
|
386
|
+
result[key] = redact_sensitive_data(value, config)
|
|
387
|
+
continue
|
|
388
|
+
|
|
389
|
+
# Recursively handle lists
|
|
390
|
+
if isinstance(value, list):
|
|
391
|
+
result[key] = [
|
|
392
|
+
redact_sensitive_data(item, config) if isinstance(item, dict)
|
|
393
|
+
else _redact_string(item, config) if isinstance(item, str)
|
|
394
|
+
else item
|
|
395
|
+
for item in value
|
|
396
|
+
]
|
|
397
|
+
continue
|
|
398
|
+
|
|
399
|
+
# Check string values against patterns
|
|
400
|
+
if isinstance(value, str):
|
|
401
|
+
result[key] = _redact_string(value, config)
|
|
402
|
+
continue
|
|
403
|
+
|
|
404
|
+
result[key] = value
|
|
405
|
+
|
|
406
|
+
return result
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def _redact_string(value: str, config: RedactionConfig) -> str:
|
|
410
|
+
"""Redact sensitive patterns from a string."""
|
|
411
|
+
result = value
|
|
412
|
+
for pattern in config.patterns:
|
|
413
|
+
if config.hash_pii:
|
|
414
|
+
# Replace each match with a hash of the matched value
|
|
415
|
+
def hash_replace(match: re.Match[str]) -> str:
|
|
416
|
+
return f"[HASH:{hashlib.sha256(match.group().encode()).hexdigest()[:16]}]"
|
|
417
|
+
result = pattern.sub(hash_replace, result)
|
|
418
|
+
else:
|
|
419
|
+
result = pattern.sub(config.placeholder, result)
|
|
420
|
+
return result
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def create_authorization_event(
|
|
424
|
+
user_id: str,
|
|
425
|
+
user_roles: list[str],
|
|
426
|
+
tool_name: str,
|
|
427
|
+
tool_arguments: dict[str, Any],
|
|
428
|
+
allowed: bool,
|
|
429
|
+
reason: str | None = None,
|
|
430
|
+
policies_evaluated: list[str] | None = None,
|
|
431
|
+
session_id: str | None = None,
|
|
432
|
+
user_attributes: dict[str, Any] | None = None,
|
|
433
|
+
agent_id: str | None = None,
|
|
434
|
+
agent_capabilities: list[str] | None = None,
|
|
435
|
+
agent_trust_score: float | None = None,
|
|
436
|
+
previous_hash: str = "GENESIS",
|
|
437
|
+
redaction_config: RedactionConfig | None = None,
|
|
438
|
+
) -> AuditEventV2:
|
|
439
|
+
"""
|
|
440
|
+
Factory function to create an authorization audit event.
|
|
441
|
+
|
|
442
|
+
This provides a convenient way to create events with minimal
|
|
443
|
+
boilerplate while ensuring all required fields are set.
|
|
444
|
+
|
|
445
|
+
Args:
|
|
446
|
+
user_id: The user's identifier.
|
|
447
|
+
user_roles: List of user roles.
|
|
448
|
+
tool_name: Name of the tool being called.
|
|
449
|
+
tool_arguments: Arguments passed to the tool.
|
|
450
|
+
allowed: Whether authorization was granted.
|
|
451
|
+
reason: Explanation for the decision.
|
|
452
|
+
policies_evaluated: List of policies that were checked.
|
|
453
|
+
session_id: Optional session identifier.
|
|
454
|
+
user_attributes: Optional user attributes.
|
|
455
|
+
agent_id: Optional agent identifier.
|
|
456
|
+
agent_capabilities: Optional agent capabilities.
|
|
457
|
+
agent_trust_score: Optional agent trust score.
|
|
458
|
+
previous_hash: Hash of previous event in chain.
|
|
459
|
+
redaction_config: Optional config for sensitive data redaction.
|
|
460
|
+
|
|
461
|
+
Returns:
|
|
462
|
+
A new AuditEventV2 instance.
|
|
463
|
+
"""
|
|
464
|
+
# Apply redaction if configured
|
|
465
|
+
if redaction_config:
|
|
466
|
+
tool_arguments = redact_sensitive_data(tool_arguments, redaction_config)
|
|
467
|
+
if user_attributes:
|
|
468
|
+
user_attributes = redact_sensitive_data(user_attributes, redaction_config)
|
|
469
|
+
|
|
470
|
+
event_type = (
|
|
471
|
+
EventType.AUTHORIZATION_GRANTED if allowed
|
|
472
|
+
else EventType.AUTHORIZATION_DENIED
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
data = AuditEventData(
|
|
476
|
+
event_type=event_type,
|
|
477
|
+
user_id=user_id,
|
|
478
|
+
user_roles=user_roles,
|
|
479
|
+
session_id=session_id,
|
|
480
|
+
user_attributes=user_attributes or {},
|
|
481
|
+
agent_id=agent_id,
|
|
482
|
+
agent_capabilities=agent_capabilities or [],
|
|
483
|
+
agent_trust_score=agent_trust_score,
|
|
484
|
+
tool_name=tool_name,
|
|
485
|
+
tool_arguments=tool_arguments,
|
|
486
|
+
tool_timestamp=_utc_now(),
|
|
487
|
+
authorization_allowed=allowed,
|
|
488
|
+
authorization_reason=reason,
|
|
489
|
+
policies_evaluated=policies_evaluated or [],
|
|
490
|
+
authorization_metadata={},
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
return AuditEventV2(data=data, previous_hash=previous_hash)
|