aofire-python-agent 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aofire_python_agent-0.1.0.dist-info/METADATA +405 -0
- aofire_python_agent-0.1.0.dist-info/RECORD +23 -0
- aofire_python_agent-0.1.0.dist-info/WHEEL +5 -0
- aofire_python_agent-0.1.0.dist-info/entry_points.txt +7 -0
- aofire_python_agent-0.1.0.dist-info/licenses/LICENSE +28 -0
- aofire_python_agent-0.1.0.dist-info/top_level.txt +1 -0
- python_agent/CLAUDE.md +105 -0
- python_agent/__init__.py +3 -0
- python_agent/agent_utils.py +61 -0
- python_agent/call_graph.py +694 -0
- python_agent/coding_agent.py +193 -0
- python_agent/convergence_agent.py +362 -0
- python_agent/dag_integrity.py +198 -0
- python_agent/dag_utils.py +181 -0
- python_agent/discovery_agent.py +348 -0
- python_agent/divergence_agent.py +302 -0
- python_agent/ontology.py +270 -0
- python_agent/planning_agent.py +83 -0
- python_agent/py.typed +0 -0
- python_agent/rules.py +383 -0
- python_agent/tool_guard.py +164 -0
- python_agent/tools/__init__.py +0 -0
- python_agent/types.py +38 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"""HMAC integrity verification and injection scanning."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import hmac
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import re
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
_INJECTION_PATTERNS: list[re.Pattern[str]] = [
|
|
13
|
+
re.compile(p, re.IGNORECASE) for p in [
|
|
14
|
+
r"ignore\s+(all\s+)?previous\s+instructions",
|
|
15
|
+
r"disregard\s+(all\s+)?previous",
|
|
16
|
+
r"you\s+are\s+now\s+a",
|
|
17
|
+
r"new\s+instructions:",
|
|
18
|
+
r"system\s+prompt:",
|
|
19
|
+
r"</ontology-data>",
|
|
20
|
+
r"</strategy-data>",
|
|
21
|
+
r"</candidate-summaries>",
|
|
22
|
+
r"</context-data>",
|
|
23
|
+
r"</user-input>",
|
|
24
|
+
]
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def generate_key() -> str:
|
|
29
|
+
"""Generate 32-byte random key, hex-encoded."""
|
|
30
|
+
return os.urandom(32).hex()
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def load_or_create_key(path: str) -> str:
|
|
34
|
+
"""Load hex key from file, or create file with new key."""
|
|
35
|
+
try:
|
|
36
|
+
with open(path) as f:
|
|
37
|
+
return f.read().strip()
|
|
38
|
+
except FileNotFoundError:
|
|
39
|
+
key = generate_key()
|
|
40
|
+
with open(path, "w") as f:
|
|
41
|
+
f.write(key)
|
|
42
|
+
return key
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def compute_hash(
|
|
46
|
+
ontology_dict: dict[str, Any], key: str,
|
|
47
|
+
) -> str:
|
|
48
|
+
"""HMAC-SHA256 hex digest of deterministic JSON."""
|
|
49
|
+
payload = json.dumps(ontology_dict, sort_keys=True)
|
|
50
|
+
return hmac.new(
|
|
51
|
+
bytes.fromhex(key),
|
|
52
|
+
payload.encode(),
|
|
53
|
+
hashlib.sha256,
|
|
54
|
+
).hexdigest()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def sign_node(node: Any, key: str) -> None:
|
|
58
|
+
"""Set node.integrity_hash from ontology content."""
|
|
59
|
+
node.integrity_hash = compute_hash(
|
|
60
|
+
node.ontology.model_dump(), key,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def verify_node(node: Any, key: str) -> bool:
|
|
65
|
+
"""Return True if hash matches. False if tampered.
|
|
66
|
+
|
|
67
|
+
Returns False for unsigned nodes (empty hash).
|
|
68
|
+
"""
|
|
69
|
+
if not node.integrity_hash:
|
|
70
|
+
return False
|
|
71
|
+
expected = compute_hash(
|
|
72
|
+
node.ontology.model_dump(), key,
|
|
73
|
+
)
|
|
74
|
+
return hmac.compare_digest(
|
|
75
|
+
node.integrity_hash, expected,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def verify_dag(dag: Any, key: str) -> list[str]:
|
|
80
|
+
"""Return IDs of signed nodes that fail verification.
|
|
81
|
+
|
|
82
|
+
Unsigned nodes (empty hash) are skipped.
|
|
83
|
+
"""
|
|
84
|
+
failed: list[str] = []
|
|
85
|
+
for n in dag.nodes:
|
|
86
|
+
if not n.integrity_hash:
|
|
87
|
+
continue
|
|
88
|
+
if not verify_node(n, key):
|
|
89
|
+
failed.append(n.id)
|
|
90
|
+
return failed
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def scan_text_for_injection(text: str) -> list[str]:
|
|
94
|
+
"""Scan a string for common injection patterns.
|
|
95
|
+
|
|
96
|
+
Returns list of matched pattern descriptions.
|
|
97
|
+
"""
|
|
98
|
+
matches: list[str] = []
|
|
99
|
+
for pattern in _INJECTION_PATTERNS:
|
|
100
|
+
if pattern.search(text):
|
|
101
|
+
matches.append(pattern.pattern)
|
|
102
|
+
return matches
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def _collect_entity_texts(
|
|
106
|
+
ontology_dict: dict[str, Any],
|
|
107
|
+
) -> list[str]:
|
|
108
|
+
"""Extract text fields from entities."""
|
|
109
|
+
texts: list[str] = []
|
|
110
|
+
for entity in ontology_dict.get("entities", []):
|
|
111
|
+
texts.append(entity.get("description", ""))
|
|
112
|
+
return texts
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _collect_relationship_texts(
|
|
116
|
+
ontology_dict: dict[str, Any],
|
|
117
|
+
) -> list[str]:
|
|
118
|
+
"""Extract text fields from relationships."""
|
|
119
|
+
texts: list[str] = []
|
|
120
|
+
for r in ontology_dict.get("relationships", []):
|
|
121
|
+
texts.append(r.get("description", ""))
|
|
122
|
+
return texts
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _collect_constraint_texts(
|
|
126
|
+
ontology_dict: dict[str, Any],
|
|
127
|
+
) -> list[str]:
|
|
128
|
+
"""Extract text fields from domain constraints."""
|
|
129
|
+
texts: list[str] = []
|
|
130
|
+
for c in ontology_dict.get("domain_constraints", []):
|
|
131
|
+
texts.append(c.get("description", ""))
|
|
132
|
+
texts.append(c.get("expression", ""))
|
|
133
|
+
return texts
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _collect_module_texts(
|
|
137
|
+
ontology_dict: dict[str, Any],
|
|
138
|
+
) -> list[str]:
|
|
139
|
+
"""Extract text fields from modules, classes, functions."""
|
|
140
|
+
texts: list[str] = []
|
|
141
|
+
for m in ontology_dict.get("modules", []):
|
|
142
|
+
texts.append(m.get("responsibility", ""))
|
|
143
|
+
texts.append(m.get("test_strategy", ""))
|
|
144
|
+
for cls in m.get("classes", []):
|
|
145
|
+
texts.append(cls.get("description", ""))
|
|
146
|
+
for fn in cls.get("methods", []):
|
|
147
|
+
texts.append(fn.get("docstring", ""))
|
|
148
|
+
for fn in m.get("functions", []):
|
|
149
|
+
texts.append(fn.get("docstring", ""))
|
|
150
|
+
return texts
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _collect_misc_texts(
|
|
154
|
+
ontology_dict: dict[str, Any],
|
|
155
|
+
) -> list[str]:
|
|
156
|
+
"""Extract text from data_models, dependencies, questions."""
|
|
157
|
+
texts: list[str] = []
|
|
158
|
+
for dm in ontology_dict.get("data_models", []):
|
|
159
|
+
texts.append(dm.get("notes", ""))
|
|
160
|
+
for dep in ontology_dict.get("external_dependencies", []):
|
|
161
|
+
texts.append(dep.get("reason", ""))
|
|
162
|
+
for q in ontology_dict.get("open_questions", []):
|
|
163
|
+
texts.append(q.get("text", ""))
|
|
164
|
+
texts.append(q.get("context", ""))
|
|
165
|
+
texts.append(q.get("resolution", ""))
|
|
166
|
+
return texts
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _collect_text_fields(
|
|
170
|
+
ontology_dict: dict[str, Any],
|
|
171
|
+
) -> list[str]:
|
|
172
|
+
"""Extract all free-text fields from an ontology dict."""
|
|
173
|
+
texts: list[str] = []
|
|
174
|
+
texts += _collect_entity_texts(ontology_dict)
|
|
175
|
+
texts += _collect_relationship_texts(ontology_dict)
|
|
176
|
+
texts += _collect_constraint_texts(ontology_dict)
|
|
177
|
+
texts += _collect_module_texts(ontology_dict)
|
|
178
|
+
texts += _collect_misc_texts(ontology_dict)
|
|
179
|
+
return [t for t in texts if t]
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def scan_ontology_for_injection(
|
|
183
|
+
ontology_dict: dict[str, Any],
|
|
184
|
+
) -> list[str]:
|
|
185
|
+
"""Scan all text fields in an ontology for injection.
|
|
186
|
+
|
|
187
|
+
Returns list of warnings (empty if clean).
|
|
188
|
+
"""
|
|
189
|
+
warnings_list: list[str] = []
|
|
190
|
+
for text in _collect_text_fields(ontology_dict):
|
|
191
|
+
hits = scan_text_for_injection(text)
|
|
192
|
+
for pattern in hits:
|
|
193
|
+
preview = text[:80]
|
|
194
|
+
warnings_list.append(
|
|
195
|
+
f"Suspicious pattern {pattern!r} "
|
|
196
|
+
f"in: {preview!r}",
|
|
197
|
+
)
|
|
198
|
+
return warnings_list
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Shared DAG persistence and snapshot utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import tempfile
|
|
7
|
+
import uuid
|
|
8
|
+
import warnings
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from python_agent.dag_integrity import (
|
|
13
|
+
load_or_create_key,
|
|
14
|
+
scan_ontology_for_injection,
|
|
15
|
+
sign_node,
|
|
16
|
+
verify_dag,
|
|
17
|
+
)
|
|
18
|
+
from python_agent.ontology import (
|
|
19
|
+
DAGEdge,
|
|
20
|
+
DAGNode,
|
|
21
|
+
Decision,
|
|
22
|
+
Ontology,
|
|
23
|
+
OntologyDAG,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _default_key_path(dag_path: str) -> str:
|
|
28
|
+
"""Derive key file path from DAG file path."""
|
|
29
|
+
return str(Path(dag_path).parent / ".dag-key")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _sign_unsigned_nodes(
|
|
33
|
+
dag: OntologyDAG, key: str,
|
|
34
|
+
) -> None:
|
|
35
|
+
"""Sign all nodes missing an integrity hash."""
|
|
36
|
+
for node in dag.nodes:
|
|
37
|
+
if not node.integrity_hash:
|
|
38
|
+
sign_node(node, key)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _verify_loaded_dag(
|
|
42
|
+
dag: OntologyDAG, key_path: str,
|
|
43
|
+
) -> None:
|
|
44
|
+
"""Verify all nodes and warn on failures."""
|
|
45
|
+
try:
|
|
46
|
+
key = load_or_create_key(key_path)
|
|
47
|
+
except (OSError, ValueError):
|
|
48
|
+
return
|
|
49
|
+
failed = verify_dag(dag, key)
|
|
50
|
+
if failed:
|
|
51
|
+
warnings.warn(
|
|
52
|
+
"DAG integrity check failed for nodes: "
|
|
53
|
+
+ ", ".join(failed),
|
|
54
|
+
stacklevel=3,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _scan_loaded_dag(dag: OntologyDAG) -> None:
|
|
59
|
+
"""Scan all nodes for injection patterns."""
|
|
60
|
+
for node in dag.nodes:
|
|
61
|
+
hits = scan_ontology_for_injection(
|
|
62
|
+
node.ontology.model_dump(),
|
|
63
|
+
)
|
|
64
|
+
for hit in hits:
|
|
65
|
+
warnings.warn(
|
|
66
|
+
f"Node {node.id}: {hit}",
|
|
67
|
+
stacklevel=3,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _read_file(path: str) -> str | None:
|
|
72
|
+
"""Read file contents, or return None if not found."""
|
|
73
|
+
try:
|
|
74
|
+
with open(path) as f:
|
|
75
|
+
return f.read()
|
|
76
|
+
except FileNotFoundError:
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _parse_dag(text: str) -> OntologyDAG | None:
|
|
81
|
+
"""Parse DAG JSON, or return None with warning."""
|
|
82
|
+
try:
|
|
83
|
+
return OntologyDAG.from_json(text)
|
|
84
|
+
except Exception as exc:
|
|
85
|
+
warnings.warn(
|
|
86
|
+
f"DAG validation error: {exc}",
|
|
87
|
+
stacklevel=3,
|
|
88
|
+
)
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def load_dag(
|
|
93
|
+
path: str, project_name: str,
|
|
94
|
+
key_path: str | None = None,
|
|
95
|
+
) -> OntologyDAG:
|
|
96
|
+
"""Load an OntologyDAG from a JSON file.
|
|
97
|
+
|
|
98
|
+
Returns a new empty DAG if not found or invalid.
|
|
99
|
+
"""
|
|
100
|
+
text = _read_file(path)
|
|
101
|
+
if text is None:
|
|
102
|
+
return OntologyDAG(project_name=project_name)
|
|
103
|
+
dag = _parse_dag(text)
|
|
104
|
+
if dag is None:
|
|
105
|
+
return OntologyDAG(project_name=project_name)
|
|
106
|
+
if key_path is None:
|
|
107
|
+
key_path = _default_key_path(path)
|
|
108
|
+
_verify_loaded_dag(dag, key_path)
|
|
109
|
+
_scan_loaded_dag(dag)
|
|
110
|
+
return dag
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def save_dag(
|
|
114
|
+
dag: OntologyDAG, path: str,
|
|
115
|
+
key_path: str | None = None,
|
|
116
|
+
) -> None:
|
|
117
|
+
"""Save an OntologyDAG, signing unsigned nodes.
|
|
118
|
+
|
|
119
|
+
Uses atomic write (temp file + rename) to prevent
|
|
120
|
+
corruption from interrupted writes.
|
|
121
|
+
"""
|
|
122
|
+
if key_path is None:
|
|
123
|
+
key_path = _default_key_path(path)
|
|
124
|
+
key = load_or_create_key(key_path)
|
|
125
|
+
_sign_unsigned_nodes(dag, key)
|
|
126
|
+
parent_dir = os.path.dirname(os.path.abspath(path))
|
|
127
|
+
fd = tempfile.NamedTemporaryFile(
|
|
128
|
+
mode="w", dir=parent_dir,
|
|
129
|
+
suffix=".tmp", delete=False,
|
|
130
|
+
)
|
|
131
|
+
try:
|
|
132
|
+
fd.write(dag.to_json())
|
|
133
|
+
fd.close()
|
|
134
|
+
os.rename(fd.name, path)
|
|
135
|
+
except BaseException:
|
|
136
|
+
fd.close()
|
|
137
|
+
os.unlink(fd.name)
|
|
138
|
+
raise
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def make_node_id() -> str:
|
|
142
|
+
"""Generate a unique node ID using uuid4."""
|
|
143
|
+
return str(uuid.uuid4())
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def save_snapshot(
|
|
147
|
+
dag: OntologyDAG, ontology: Ontology,
|
|
148
|
+
label: str, decision: Decision | None = None,
|
|
149
|
+
) -> str:
|
|
150
|
+
"""Create a new DAG node from the current ontology.
|
|
151
|
+
|
|
152
|
+
Links it as a child of the current node if one exists.
|
|
153
|
+
If decision is None, a default decision is used.
|
|
154
|
+
Returns the new node id.
|
|
155
|
+
"""
|
|
156
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
157
|
+
node_id = make_node_id()
|
|
158
|
+
node = DAGNode(
|
|
159
|
+
id=node_id,
|
|
160
|
+
ontology=ontology.model_copy(deep=True),
|
|
161
|
+
created_at=now,
|
|
162
|
+
label=label,
|
|
163
|
+
)
|
|
164
|
+
dag.nodes.append(node)
|
|
165
|
+
if dag.current_node_id:
|
|
166
|
+
if decision is None:
|
|
167
|
+
decision = Decision(
|
|
168
|
+
question="save",
|
|
169
|
+
options=["continue"],
|
|
170
|
+
chosen="continue",
|
|
171
|
+
rationale=label,
|
|
172
|
+
)
|
|
173
|
+
edge = DAGEdge(
|
|
174
|
+
parent_id=dag.current_node_id,
|
|
175
|
+
child_id=node_id,
|
|
176
|
+
decision=decision,
|
|
177
|
+
created_at=now,
|
|
178
|
+
)
|
|
179
|
+
dag.edges.append(edge)
|
|
180
|
+
dag.current_node_id = node_id
|
|
181
|
+
return node_id
|