tri-star-symbolic-assembly-lang 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. crawler/__init__.py +0 -0
  2. crawler/madmonkey_crawler.py +15 -0
  3. madmonkey/__init__.py +0 -0
  4. madmonkey/intake.py +9 -0
  5. tri_star_symbolic_assembly_lang-0.1.0.dist-info/METADATA +423 -0
  6. tri_star_symbolic_assembly_lang-0.1.0.dist-info/RECORD +89 -0
  7. tri_star_symbolic_assembly_lang-0.1.0.dist-info/WHEEL +5 -0
  8. tri_star_symbolic_assembly_lang-0.1.0.dist-info/entry_points.txt +11 -0
  9. tri_star_symbolic_assembly_lang-0.1.0.dist-info/licenses/LICENSE +63 -0
  10. tri_star_symbolic_assembly_lang-0.1.0.dist-info/top_level.txt +3 -0
  11. tsal/__init__.py +95 -0
  12. tsal/audit/__init__.py +11 -0
  13. tsal/audit/brian_self_audit.py +114 -0
  14. tsal/cli/__init__.py +1 -0
  15. tsal/cli/beast.py +4 -0
  16. tsal/cli/brian.py +4 -0
  17. tsal/cli/brian_optimize.py +6 -0
  18. tsal/cli/meshkeeper.py +4 -0
  19. tsal/cli/party.py +4 -0
  20. tsal/cli/reflect.py +4 -0
  21. tsal/cli/watchdog.py +4 -0
  22. tsal/core/__init__.py +60 -0
  23. tsal/core/connectivity.py +32 -0
  24. tsal/core/constants.py +18 -0
  25. tsal/core/ethics_engine.py +48 -0
  26. tsal/core/executor.py +58 -0
  27. tsal/core/intent_metric.py +17 -0
  28. tsal/core/json_dsl.py +51 -0
  29. tsal/core/logic_gate.py +52 -0
  30. tsal/core/madmonkey_handler.py +10 -0
  31. tsal/core/mesh_logger.py +30 -0
  32. tsal/core/module_registry.py +108 -0
  33. tsal/core/optimizer_utils.py +78 -0
  34. tsal/core/opwords.py +126 -0
  35. tsal/core/phase_math.py +256 -0
  36. tsal/core/phi_math.py +44 -0
  37. tsal/core/reflection.py +104 -0
  38. tsal/core/rev_eng.py +185 -0
  39. tsal/core/spark_translator.py +57 -0
  40. tsal/core/spiral_fusion.py +45 -0
  41. tsal/core/spiral_memory.py +22 -0
  42. tsal/core/spiral_vector.py +39 -0
  43. tsal/core/stack_vm.py +49 -0
  44. tsal/core/state_vector.py +38 -0
  45. tsal/core/symbols.py +70 -0
  46. tsal/core/tokenize_flowchart.py +24 -0
  47. tsal/core/tsal_executor.py +533 -0
  48. tsal/core/voxel.py +16 -0
  49. tsal/renderer/__init__.py +0 -0
  50. tsal/renderer/code_render.py +13 -0
  51. tsal/rl/__init__.py +0 -0
  52. tsal/rl/madmonkey.py +56 -0
  53. tsal/schemas/__init__.py +1 -0
  54. tsal/schemas/python.json +13 -0
  55. tsal/singer/__init__.py +13 -0
  56. tsal/tools/__init__.py +43 -0
  57. tsal/tools/aletheia_checker.py +54 -0
  58. tsal/tools/alignment_guard.py +20 -0
  59. tsal/tools/archetype_fetcher.py +44 -0
  60. tsal/tools/brian/__init__.py +5 -0
  61. tsal/tools/brian/optimizer.py +205 -0
  62. tsal/tools/codec.py +31 -0
  63. tsal/tools/feedback_ingest.py +25 -0
  64. tsal/tools/goal_selector.py +26 -0
  65. tsal/tools/issue_agent.py +67 -0
  66. tsal/tools/kintsugi/__init__.py +1 -0
  67. tsal/tools/kintsugi/kintsugi.py +15 -0
  68. tsal/tools/meshkeeper.py +81 -0
  69. tsal/tools/module_draft.py +54 -0
  70. tsal/tools/party_tricks.py +128 -0
  71. tsal/tools/reflect.py +43 -0
  72. tsal/tools/spiral_audit.py +68 -0
  73. tsal/tools/state_tracker.py +66 -0
  74. tsal/tools/watchdog.py +40 -0
  75. tsal/tristar/__init__.py +4 -0
  76. tsal/tristar/governor.py +56 -0
  77. tsal/tristar/handshake.py +31 -0
  78. tsal/utils/__init__.py +26 -0
  79. tsal/utils/error_dignity.py +20 -0
  80. tsal/utils/fuzzy_spellcheck.py +7 -0
  81. tsal/utils/github_api.py +82 -0
  82. tsal/utils/grammar_db.py +155 -0
  83. tsal/utils/groundnews_api.py +9 -0
  84. tsal/utils/humour_db.py +75 -0
  85. tsal/utils/intent_metrics.py +44 -0
  86. tsal/utils/language_db.py +55 -0
  87. tsal/utils/octopus_api.py +46 -0
  88. tsal/utils/system_status.py +34 -0
  89. tsal/utils/wikipedia_api.py +46 -0
tsal/tools/__init__.py ADDED
@@ -0,0 +1,43 @@
1
+ from .codec import real_time_codec
2
+ from .brian import SymbolicOptimizer, analyze_and_repair
3
+ from .aletheia_checker import find_typos
4
+ from .meshkeeper import scan, render_voxels
5
+ from .watchdog import watch
6
+ from .feedback_ingest import categorize, Feedback
7
+ from .alignment_guard import is_aligned, Change
8
+ from .goal_selector import Goal, score_goals
9
+ from .spiral_audit import audit_path, audit_paths
10
+ from .reflect import reflect
11
+ from .kintsugi.kintsugi import kintsugi_repair
12
+ from .module_draft import generate_template, draft_directory
13
+ from .state_tracker import update_entry, show_entry
14
+ from .archetype_fetcher import fetch_online_mesh, merge_mesh
15
+ from .issue_agent import create_issue, handle_http_error
16
+
17
+ __all__ = [
18
+ "real_time_codec",
19
+ "SymbolicOptimizer",
20
+ "analyze_and_repair",
21
+ "find_typos",
22
+ "scan",
23
+ "render_voxels",
24
+ "watch",
25
+ "categorize",
26
+ "Feedback",
27
+ "is_aligned",
28
+ "Change",
29
+ "Goal",
30
+ "score_goals",
31
+ "audit_path",
32
+ "audit_paths",
33
+ "reflect",
34
+ "kintsugi_repair",
35
+ "generate_template",
36
+ "draft_directory",
37
+ "update_entry",
38
+ "show_entry",
39
+ "fetch_online_mesh",
40
+ "merge_mesh",
41
+ "create_issue",
42
+ "handle_http_error",
43
+ ]
@@ -0,0 +1,54 @@
1
+ import difflib
2
+ import re
3
+ from pathlib import Path
4
+
5
+ TARGET = "aletheia"
6
+ # Known common misspellings that should be flagged immediately
7
+ COMMON_TYPOS = {
8
+ "athalaya", # seen in various docs
9
+ "athaleia",
10
+ "alethei", # truncated
11
+ }
12
+
13
+ def is_typo(word: str) -> bool:
14
+ lw = word.lower()
15
+ if lw == TARGET:
16
+ return False
17
+ if lw in COMMON_TYPOS:
18
+ return True
19
+ ratio = difflib.SequenceMatcher(None, lw, TARGET).ratio()
20
+ return ratio >= 0.7
21
+
22
+ def scan_file(path: Path) -> list[tuple[int, str]]:
23
+ results = []
24
+ pattern = re.compile(r"[A-Za-z_-]+")
25
+ with open(path, "r", encoding="utf-8", errors="ignore") as f:
26
+ for lineno, line in enumerate(f, 1):
27
+ for word in pattern.findall(line):
28
+ if is_typo(word):
29
+ results.append((lineno, line.rstrip()))
30
+ break
31
+ return results
32
+
33
+ def find_typos(root: Path) -> dict[str, list[tuple[int, str]]]:
34
+ typos = {}
35
+ for path in root.rglob("*"):
36
+ if path.is_file() and path.suffix.lower() in {
37
+ ".py",
38
+ ".md",
39
+ ".txt",
40
+ ".json",
41
+ ".yaml",
42
+ ".yml",
43
+ }:
44
+ found = scan_file(path)
45
+ if found:
46
+ typos[str(path)] = found
47
+ return typos
48
+
49
+ if __name__ == "__main__":
50
+ repo_root = Path(__file__).resolve().parents[3]
51
+ hits = find_typos(repo_root)
52
+ for filepath, items in hits.items():
53
+ for lineno, line in items:
54
+ print(f"{filepath}:{lineno}: {line}")
@@ -0,0 +1,20 @@
1
+ """Verify spiral alignment of proposed changes."""
2
+
3
+ from dataclasses import dataclass
4
+
5
+ from tsal.core.spiral_vector import phi_alignment
6
+
7
+ @dataclass
8
+ class Change:
9
+ description: str
10
+ complexity: float
11
+ coherence: float
12
+
13
+ def is_aligned(change: Change, threshold: float = 0.76) -> bool:
14
+ """Return True if change clears φ score and keyword filter."""
15
+ score = phi_alignment(change.complexity, change.coherence)
16
+ if score < threshold:
17
+ return False
18
+ banned = {"coerce", "exploit"}
19
+ lowered = change.description.lower()
20
+ return not any(word in lowered for word in banned)
@@ -0,0 +1,44 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import List, Dict
4
+
5
+ try:
6
+ import requests
7
+ except ModuleNotFoundError: # pragma: no cover - fallback
8
+ import urllib.request as _u
9
+
10
+ class _Resp:
11
+ def __init__(self, text: str, status: int = 200) -> None:
12
+ self.text = text
13
+ self.status_code = status
14
+
15
+ def raise_for_status(self) -> None:
16
+ if self.status_code >= 400:
17
+ raise RuntimeError(f"status {self.status_code}")
18
+
19
+ class requests:
20
+ @staticmethod
21
+ def get(url: str) -> _Resp:
22
+ with _u.urlopen(url) as f:
23
+ return _Resp(f.read().decode(), f.getcode())
24
+
25
+
26
+ def fetch_online_mesh(url: str) -> List[Dict]:
27
+ """Fetch archetype mesh entries from ``url``."""
28
+ resp = requests.get(url)
29
+ resp.raise_for_status()
30
+ return json.loads(resp.text)
31
+
32
+
33
+ def merge_mesh(mesh_path: Path, entries: List[Dict]) -> None:
34
+ """Merge ``entries`` into JSON mesh at ``mesh_path`` by archetype name."""
35
+ if mesh_path.exists():
36
+ data = json.loads(mesh_path.read_text())
37
+ else:
38
+ data = []
39
+ names = {e.get("name") for e in data}
40
+ for e in entries:
41
+ if e.get("name") not in names:
42
+ data.append(e)
43
+ names.add(e.get("name"))
44
+ mesh_path.write_text(json.dumps(data, indent=2))
@@ -0,0 +1,5 @@
1
+ """Brian Spiral Code Healer tools."""
2
+
3
+ from .optimizer import SymbolicOptimizer, analyze_and_repair, spiral_optimize
4
+
5
+ __all__ = ["SymbolicOptimizer", "analyze_and_repair", "spiral_optimize"]
@@ -0,0 +1,205 @@
1
+ """Symbolic diff, repair, and spiral resequencer engine."""
2
+
3
+ import ast
4
+ from pathlib import Path
5
+ from typing import List, Dict, Optional, Tuple
6
+
7
+ from tsal.core.rev_eng import Rev_Eng
8
+ from tsal.core.phase_math import phase_match_enhanced
9
+ from tsal.core.optimizer_utils import (
10
+ SymbolicSignature,
11
+ extract_signature,
12
+ )
13
+ from tsal.core.spiral_vector import SpiralVector, phi_alignment
14
+
15
+ class SymbolicOptimizer:
16
+ """Walks Python AST, computes signatures, and suggests repairs."""
17
+
18
+ def __init__(
19
+ self,
20
+ target_signatures: Optional[Dict[str, List[float]]] = None,
21
+ rev_eng: Optional[Rev_Eng] = None,
22
+ ):
23
+ self.target_signatures = target_signatures or {}
24
+ self.rev = rev_eng or Rev_Eng(origin="SymbolicOptimizer")
25
+
26
+ def analyze(self, code: str) -> List[Tuple[SymbolicSignature, Dict]]:
27
+ try:
28
+ tree = ast.parse(code)
29
+ except SyntaxError:
30
+ self.rev.log_event("ANTISPIRAL", file="<buffer>")
31
+ raise
32
+ results = []
33
+ for node in ast.walk(tree):
34
+ if isinstance(
35
+ node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
36
+ ):
37
+ sig = extract_signature(node, node.name)
38
+ target_vec = self.target_signatures.get(sig.name, sig.vector)
39
+ local_state = sig.magnitude()
40
+ target_state = sum(target_vec)
41
+ aligned_state, energy, metrics = phase_match_enhanced(
42
+ local_state, target_state
43
+ )
44
+ delta = metrics.get("delta", 0)
45
+ self.rev.log_event(
46
+ "ANALYZE", name=sig.name, delta=delta, energy=energy
47
+ )
48
+ results.append((sig, metrics))
49
+ return results
50
+
51
+ def suggest_order(self, signatures: List[SymbolicSignature]) -> List[str]:
52
+ scored = []
53
+ for sig in signatures:
54
+ target_vec = self.target_signatures.get(sig.name, sig.vector)
55
+ local_state = sig.magnitude()
56
+ target_state = sum(target_vec)
57
+ _, energy, _ = phase_match_enhanced(local_state, target_state)
58
+ scored.append((sig.name, energy))
59
+ scored.sort(key=lambda x: x[1])
60
+ return [name for name, _ in scored]
61
+
62
+ def annotate_code(self, code: str) -> str:
63
+ tree = ast.parse(code)
64
+ signatures = []
65
+ for node in ast.walk(tree):
66
+ if isinstance(
67
+ node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
68
+ ):
69
+ sig = extract_signature(node, node.name)
70
+ signatures.append(sig)
71
+ target_vec = self.target_signatures.get(sig.name, sig.vector)
72
+ local_state = sig.magnitude()
73
+ target_state = sum(target_vec)
74
+ _, energy, metrics = phase_match_enhanced(
75
+ local_state, target_state
76
+ )
77
+ comment = ast.Expr(
78
+ value=ast.Constant(
79
+ value=f"OPTENERGY {energy:.3f} Δ{metrics['delta']:.3f}"
80
+ )
81
+ )
82
+ node.body.insert(0, comment)
83
+ annotated = ast.unparse(tree)
84
+ ordered_names = self.suggest_order(signatures)
85
+ header = f"# Suggested order: {', '.join(ordered_names)}\n"
86
+ return header + annotated
87
+
88
+ def repair_file(self, file_path: str) -> List[str]:
89
+ """Rewrites the file when reordering is required and returns suggestions.
90
+
91
+ The function analyzes the order of functions and classes in ``file_path``.
92
+ If the current ordering differs from the ideal, the file is rewritten with
93
+ the reordered definitions. A list of string suggestions describing the
94
+ deltas is returned regardless of whether rewriting occurred.
95
+ """
96
+ code = Path(file_path).read_text()
97
+ try:
98
+ tree = ast.parse(code)
99
+ except SyntaxError:
100
+ self.rev.log_event("ANTISPIRAL", file=file_path)
101
+ raise
102
+ items = []
103
+ for node in tree.body:
104
+ if isinstance(node, (ast.FunctionDef, ast.ClassDef)):
105
+ items.append(node.name)
106
+ ideal = self.suggest_order(
107
+ [
108
+ extract_signature(node, node.name)
109
+ for node in tree.body
110
+ if isinstance(node, (ast.FunctionDef, ast.ClassDef))
111
+ ]
112
+ )
113
+ suggestions = []
114
+ for idx, name in enumerate(items):
115
+ ideal_idx = ideal.index(name)
116
+ delta = idx - ideal_idx
117
+ _, energy, metrics = phase_match_enhanced(
118
+ float(idx), float(ideal_idx)
119
+ )
120
+ suggestion = f"{name}: Δ={delta} energy={energy:.3f} φ={metrics['phase_signature']}"
121
+ suggestions.append(suggestion)
122
+ if items != ideal:
123
+ new_body = []
124
+ name_map = {
125
+ node.name: node
126
+ for node in tree.body
127
+ if isinstance(node, (ast.FunctionDef, ast.ClassDef))
128
+ }
129
+ for name in ideal:
130
+ new_body.append(name_map[name])
131
+ for node in tree.body:
132
+ if not isinstance(node, (ast.FunctionDef, ast.ClassDef)):
133
+ new_body.append(node)
134
+ tree.body = new_body
135
+ Path(file_path).write_text(ast.unparse(tree))
136
+ return suggestions
137
+
138
+ from typing import Union
139
+
140
+ def analyze_and_repair(
141
+ file_path: Union[str, Path], repair: bool = False
142
+ ) -> list:
143
+ """Analyze or repair ``file_path``. Directories are processed recursively."""
144
+ path = Path(file_path)
145
+ if path.is_dir():
146
+ results = []
147
+ for file in path.rglob("*.py"):
148
+ results.extend(analyze_and_repair(file, repair=repair))
149
+ return results
150
+
151
+ opt = SymbolicOptimizer()
152
+ if repair:
153
+ try:
154
+ return opt.repair_file(str(path))
155
+ except SyntaxError:
156
+ return [f"ANTISPIRAL {path}"]
157
+
158
+ try:
159
+ code = path.read_text()
160
+ except IsADirectoryError:
161
+ return []
162
+
163
+ try:
164
+ results = opt.analyze(code)
165
+ except SyntaxError:
166
+ return [f"ANTISPIRAL {path}"]
167
+ return [
168
+ f"{sig.name}: energy={metrics['energy_required']:.3f} Δ={metrics.get('delta',0)}"
169
+ for (sig, metrics) in results
170
+ ]
171
+
172
+ def spiral_optimize(functions: List[SpiralVector]) -> List[SpiralVector]:
173
+ """Return ``functions`` sorted by φ-alignment score."""
174
+
175
+ return sorted(
176
+ functions,
177
+ key=lambda v: phi_alignment(v.complexity, v.coherence),
178
+ reverse=True,
179
+ )
180
+
181
+ def main():
182
+ import argparse
183
+
184
+ parser = argparse.ArgumentParser(description="Brian spiral optimizer")
185
+ parser.add_argument("path", help="Python file to analyze")
186
+ parser.add_argument(
187
+ "--repair", action="store_true", help="Rewrite file in spiral order"
188
+ )
189
+ args = parser.parse_args()
190
+
191
+ opt = SymbolicOptimizer()
192
+ if args.repair:
193
+ res = opt.repair_file(args.path)
194
+ else:
195
+ code = Path(args.path).read_text()
196
+ results = opt.analyze(code)
197
+ res = [
198
+ f"{sig.name}: energy={metrics['energy_required']:.3f} Δ={metrics.get('delta',0)}"
199
+ for (sig, metrics) in results
200
+ ]
201
+ for line in res:
202
+ print(line)
203
+
204
+ if __name__ == "__main__":
205
+ main()
tsal/tools/codec.py ADDED
@@ -0,0 +1,31 @@
1
+ """Real-time decode/encode helper."""
2
+
3
+ from typing import Iterable, Callable, Optional
4
+ from importlib import resources
5
+
6
+ from tsal.core.json_dsl import LanguageMap, SymbolicProcessor
7
+ from tsal.core.rev_eng import Rev_Eng
8
+
9
+ def real_time_codec(
10
+ lines: Iterable[str],
11
+ schema: str | None = None,
12
+ transform: Optional[Callable[[list[dict]], list[dict]]] = None,
13
+ rev: Optional[Rev_Eng] = None,
14
+ ) -> str:
15
+ """Decode lines, run an optional token transform, encode result.
16
+
17
+ ``lines`` is any iterable yielding code lines. If ``schema`` is not
18
+ provided, the built-in Python schema is used. ``transform`` receives the
19
+ token list before encoding. ``rev`` logs byte counts for in/out data.
20
+ """
21
+ if schema is None:
22
+ schema = str(resources.files("tsal.schemas").joinpath("python.json"))
23
+ lang = LanguageMap.load(schema)
24
+ rev = rev or Rev_Eng(origin="real_time_codec")
25
+ sp = SymbolicProcessor(lang, rev_eng=rev)
26
+
27
+ source = list(lines)
28
+ tokens = sp.decode(source)
29
+ if transform:
30
+ tokens = transform(tokens)
31
+ return sp.encode(tokens)
@@ -0,0 +1,25 @@
1
+ """Feedback ingestion and scoring for Rev_Eng logs."""
2
+ # TODO: expand scoring rules (experimental)
3
+
4
+ from dataclasses import dataclass
5
+ from typing import Iterable, List
6
+
7
+ from tsal.core.spiral_vector import phi_alignment
8
+
9
+ @dataclass
10
+ class Feedback:
11
+ source: str
12
+ content: str
13
+ score: float = 0.0
14
+
15
+ def _score(line: str) -> float:
16
+ complexity = float(len(line)) * 0.1
17
+ lowered = line.lower()
18
+ coherence = 1.0
19
+ if "error" in lowered or "bad" in lowered:
20
+ coherence = 0.1
21
+ return phi_alignment(complexity, coherence)
22
+
23
+ def categorize(feedback: Iterable[str]) -> List[Feedback]:
24
+ """Return feedback objects with φ-resonance scores."""
25
+ return [Feedback("user", line, _score(line)) for line in feedback]
@@ -0,0 +1,26 @@
1
+ """Score goals for priority based on mesh and alignment.""" # [!INTERNAL STUB]
2
+ # TODO: refine scoring with RL signals (experimental)
3
+
4
+ from dataclasses import dataclass
5
+ from typing import Iterable, List
6
+
7
+ @dataclass
8
+ class Goal:
9
+ name: str
10
+ mesh_benefit: float
11
+ alignment: float
12
+ cost: float
13
+ novelty: float
14
+
15
+ def score_goals(goals: Iterable[Goal]) -> List[Goal]:
16
+ """Return goals ordered by priority."""
17
+ return sorted(
18
+ goals,
19
+ key=lambda g: (
20
+ g.mesh_benefit * g.alignment
21
+ + 0.1 * g.mesh_benefit
22
+ - g.cost
23
+ + g.novelty
24
+ ),
25
+ reverse=True,
26
+ )
@@ -0,0 +1,67 @@
1
+ """Autonomous issue creator for mesh agents."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json as _json
6
+ from typing import Dict, Optional
7
+
8
+ try: # optional dependency
9
+ import requests # type: ignore
10
+ except ModuleNotFoundError: # pragma: no cover - fallback used in CI
11
+ import urllib.request
12
+
13
+ class _Resp:
14
+ def __init__(self, text: str, status: int = 200) -> None:
15
+ self.text = text
16
+ self.status_code = status
17
+
18
+ def json(self) -> Dict:
19
+ return _json.loads(self.text)
20
+
21
+ def raise_for_status(self) -> None:
22
+ if self.status_code >= 400:
23
+ raise RuntimeError(f"status {self.status_code}")
24
+
25
+ class requests:
26
+ @staticmethod
27
+ def post(url: str, json: Dict, headers: Optional[Dict[str, str]] = None) -> _Resp:
28
+ data = _json.dumps(json or {}).encode()
29
+ req = urllib.request.Request(url, data=data, headers=headers or {}, method="POST")
30
+ with urllib.request.urlopen(req) as resp:
31
+ text = resp.read().decode()
32
+ return _Resp(text, resp.getcode())
33
+
34
+
35
+ def create_issue(repo: str, title: str, body: str, token: str) -> int:
36
+ """Open an issue on ``repo`` and return the issue number."""
37
+ url = f"https://api.github.com/repos/{repo}/issues"
38
+ headers = {
39
+ "Authorization": f"token {token}",
40
+ "Accept": "application/vnd.github+json",
41
+ }
42
+ payload = {"title": title, "body": body}
43
+ resp = requests.post(url, json=payload, headers=headers)
44
+ resp.raise_for_status()
45
+ data = resp.json()
46
+ return int(data.get("number", 0))
47
+
48
+
49
+ def sandbox_diagnostics(log: str) -> None:
50
+ """Placeholder diagnostics when auth fails."""
51
+ print("🐒 Mad monkey diagnostics engaged")
52
+ print(log)
53
+
54
+
55
+ def handle_http_error(repo: str, err: Exception, log: str, token: Optional[str] = None) -> None:
56
+ """Create a GitHub issue for auth errors and trigger diagnostics."""
57
+ msg = str(err)
58
+ if "403" in msg or "404" in msg:
59
+ if token:
60
+ body = f"Error: {msg}\n\nLogs:\n```\n{log}\n```\nCheck PAT permissions."
61
+ try:
62
+ create_issue(repo, "Auth failure detected", body, token)
63
+ except Exception as ex: # pragma: no cover - network faults
64
+ sandbox_diagnostics(f"Issue creation failed: {ex}\n{log}")
65
+ else:
66
+ sandbox_diagnostics(log)
67
+
@@ -0,0 +1 @@
1
+ from .kintsugi import kintsugi_repair
@@ -0,0 +1,15 @@
1
+ from pathlib import Path
2
+ from tsal.tools.brian.optimizer import analyze_and_repair
3
+
4
+ def kintsugi_repair(file_path: str) -> list[str]:
5
+ """Run repair, and label damage as unique evolution."""
6
+ try:
7
+ output = analyze_and_repair(file_path, repair=True)
8
+ if output:
9
+ return [f"✨ [r]evolution vector: {line}" for line in output]
10
+ return ["✅ Already coherent."]
11
+ except Exception as e:
12
+ return [
13
+ f"💀 Damage recognized: {e}",
14
+ "⚡ Marking as potential Kintsugi Vector.",
15
+ ]
@@ -0,0 +1,81 @@
1
+ """Minimal mesh log scanner and voxel viewer."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import json
7
+ from pathlib import Path
8
+ from typing import List, Dict, Any
9
+
10
+ import numpy as np
11
+ import matplotlib.pyplot as plt
12
+
13
+ def scan(log_path: str) -> List[Dict[str, Any]]:
14
+ """Return list of DATA payloads from a mesh log file."""
15
+ entries: List[Dict[str, Any]] = []
16
+ path = Path(log_path)
17
+ if not path.exists():
18
+ return entries
19
+ for line in path.read_text().splitlines():
20
+ try:
21
+ obj = json.loads(line)
22
+ except json.JSONDecodeError:
23
+ continue
24
+ if obj.get("event_type") == "DATA" and isinstance(
25
+ obj.get("payload"), dict
26
+ ):
27
+ entries.append(obj["payload"])
28
+ return entries
29
+
30
+ def render_voxels(voxels: List[Dict[str, Any]]) -> None:
31
+ """Render voxels using matplotlib."""
32
+ if not voxels:
33
+ return
34
+ xs = np.array([v.get("pace", 0) for v in voxels])
35
+ ys = np.array([v.get("rate", 0) for v in voxels])
36
+ zs = np.arange(len(voxels))
37
+ fig = plt.figure()
38
+ ax = fig.add_subplot(111, projection="3d")
39
+ ax.scatter(xs, ys, zs)
40
+ ax.set_xlabel("pace")
41
+ ax.set_ylabel("rate")
42
+ ax.set_zlabel("index")
43
+ plt.show()
44
+
45
+ def summarize(voxels: List[Dict[str, Any]]) -> Dict[str, Any]:
46
+ """Return simple stats from the voxel list."""
47
+ if not voxels:
48
+ return {"voxels": 0}
49
+ pace = np.array([v.get("pace", 0.0) for v in voxels], dtype=float)
50
+ rate = np.array([v.get("rate", 0.0) for v in voxels], dtype=float)
51
+ return {
52
+ "voxels": len(voxels),
53
+ "pace": {
54
+ "min": float(pace.min()),
55
+ "max": float(pace.max()),
56
+ "avg": float(pace.mean()),
57
+ },
58
+ "rate": {
59
+ "min": float(rate.min()),
60
+ "max": float(rate.max()),
61
+ "avg": float(rate.mean()),
62
+ },
63
+ }
64
+
65
+ def main() -> None:
66
+ parser = argparse.ArgumentParser(description="TSAL Meshkeeper")
67
+ parser.add_argument("log", nargs="?", default="data/mesh_log.jsonl")
68
+ parser.add_argument("--render", action="store_true")
69
+ parser.add_argument("--dump", metavar="PATH", help="write raw voxels to file")
70
+ args = parser.parse_args()
71
+ voxels = scan(args.log)
72
+ if args.dump:
73
+ Path(args.dump).write_text(json.dumps(voxels))
74
+ return
75
+ if args.render:
76
+ render_voxels(voxels)
77
+ else:
78
+ print(json.dumps(summarize(voxels)))
79
+
80
+ if __name__ == "__main__":
81
+ main()
@@ -0,0 +1,54 @@
1
+ import ast
2
+ from pathlib import Path
3
+ from typing import List
4
+ # TODO: rewrite using AST pattern matching (experimental)
5
+
6
+
7
+ def generate_template(file_path: str) -> str:
8
+ """Return a stripped template of ``file_path`` preserving public interface."""
9
+ path = Path(file_path)
10
+ tree = ast.parse(path.read_text())
11
+
12
+ new_body: List[ast.stmt] = []
13
+ body = list(tree.body)
14
+
15
+ if body and isinstance(body[0], ast.Expr) and isinstance(body[0].value, ast.Constant) and isinstance(body[0].value.value, str):
16
+ new_body.append(body.pop(0))
17
+
18
+ for node in body:
19
+ if isinstance(node, (ast.Import, ast.ImportFrom)):
20
+ new_body.append(node)
21
+ elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)):
22
+ node.body = [ast.Pass()]
23
+ new_body.append(node)
24
+
25
+ tree.body = new_body
26
+ return ast.unparse(tree)
27
+
28
+
29
+ def draft_directory(base: Path, dest: Path) -> List[Path]:
30
+ """Generate templates for ``base`` under ``dest`` directory."""
31
+ generated = []
32
+ for file in base.rglob("*.py"):
33
+ template = generate_template(str(file))
34
+ target = dest / file.relative_to(base)
35
+ target.parent.mkdir(parents=True, exist_ok=True)
36
+ target.write_text(template)
37
+ generated.append(target)
38
+ return generated
39
+
40
+
41
+ def main() -> None:
42
+ import argparse
43
+
44
+ parser = argparse.ArgumentParser(description="Generate interface templates")
45
+ parser.add_argument("base", nargs="?", default="src/tsal")
46
+ parser.add_argument("--dest", default="drafts")
47
+ args = parser.parse_args()
48
+
49
+ paths = draft_directory(Path(args.base), Path(args.dest))
50
+ print(f"Generated {len(paths)} templates under {args.dest}")
51
+
52
+
53
+ if __name__ == "__main__":
54
+ main()