cortex-loop 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. cortex/__init__.py +7 -0
  2. cortex/adapters.py +339 -0
  3. cortex/blocklist.py +51 -0
  4. cortex/challenges.py +210 -0
  5. cortex/cli.py +7 -0
  6. cortex/core.py +601 -0
  7. cortex/core_helpers.py +190 -0
  8. cortex/data/identity_preamble.md +5 -0
  9. cortex/data/layer1_part_a.md +65 -0
  10. cortex/data/layer1_part_b.md +17 -0
  11. cortex/executive.py +295 -0
  12. cortex/foundation.py +185 -0
  13. cortex/genome.py +348 -0
  14. cortex/graveyard.py +226 -0
  15. cortex/hooks/__init__.py +27 -0
  16. cortex/hooks/_shared.py +167 -0
  17. cortex/hooks/post_tool_use.py +13 -0
  18. cortex/hooks/pre_tool_use.py +13 -0
  19. cortex/hooks/session_start.py +13 -0
  20. cortex/hooks/stop.py +13 -0
  21. cortex/invariants.py +258 -0
  22. cortex/packs.py +118 -0
  23. cortex/repomap.py +6 -0
  24. cortex/requirements.py +497 -0
  25. cortex/retry.py +312 -0
  26. cortex/stop_contract.py +217 -0
  27. cortex/stop_payload.py +122 -0
  28. cortex/stop_policy.py +100 -0
  29. cortex/stop_runtime.py +400 -0
  30. cortex/stop_signals.py +75 -0
  31. cortex/store.py +793 -0
  32. cortex/templates/__init__.py +10 -0
  33. cortex/utils.py +58 -0
  34. cortex_loop-0.1.0a1.dist-info/METADATA +121 -0
  35. cortex_loop-0.1.0a1.dist-info/RECORD +52 -0
  36. cortex_loop-0.1.0a1.dist-info/WHEEL +5 -0
  37. cortex_loop-0.1.0a1.dist-info/entry_points.txt +3 -0
  38. cortex_loop-0.1.0a1.dist-info/licenses/LICENSE +21 -0
  39. cortex_loop-0.1.0a1.dist-info/top_level.txt +3 -0
  40. cortex_ops_cli/__init__.py +3 -0
  41. cortex_ops_cli/_adapter_validation.py +119 -0
  42. cortex_ops_cli/_check_report.py +454 -0
  43. cortex_ops_cli/_check_report_output.py +270 -0
  44. cortex_ops_cli/_openai_bridge_probe.py +241 -0
  45. cortex_ops_cli/_openai_bridge_protocol.py +469 -0
  46. cortex_ops_cli/_runtime_profile_templates.py +341 -0
  47. cortex_ops_cli/_runtime_profiles.py +445 -0
  48. cortex_ops_cli/gemini_hooks.py +301 -0
  49. cortex_ops_cli/main.py +911 -0
  50. cortex_ops_cli/openai_app_server_bridge.py +375 -0
  51. cortex_repomap/__init__.py +1 -0
  52. cortex_repomap/engine.py +1201 -0
cortex/core.py ADDED
@@ -0,0 +1,601 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from collections.abc import Mapping
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import Any
8
+ from uuid import uuid4
9
+
10
+ from .adapters import EventAdapter, load_adapter
11
+ from .challenges import ChallengeEnforcer
12
+ from .core_helpers import (
13
+ extract_required_requirement_ids,
14
+ foundation_warnings_from_snapshot,
15
+ session_foundation_snapshot,
16
+ session_changed_files_since_baseline,
17
+ session_git_snapshot,
18
+ session_metadata,
19
+ session_required_requirement_ids,
20
+ session_witness_context,
21
+ )
22
+ from .executive import (
23
+ get_base_executive_function,
24
+ get_identity_preamble,
25
+ get_learned_executive_function,
26
+ record_stop_failure_event,
27
+ run_decay,
28
+ )
29
+ from .foundation import FoundationAnalyzer
30
+ from .genome import CortexGenome, load_genome
31
+ from .graveyard import Graveyard, explainability_warnings
32
+ from .invariants import InvariantRunner
33
+ from .blocklist import DEFAULT_BLOCKED_TOOLS, evaluate_blocklist
34
+ from .retry import compute_retry_verdict
35
+ from .stop_contract import resolve_stop_contract
36
+ from .stop_runtime import StopPathRunner
37
+ from .store import SQLiteStore
38
+ from .utils import _as_string_list
39
+
40
+
41
+ @dataclass(slots=True)
42
+ class KernelContext:
43
+ root: Path
44
+ genome_path: Path
45
+ db_path: Path
46
+ genome: CortexGenome
47
+ store: SQLiteStore
48
+
49
+
50
+ class CortexKernel:
51
+ """Hook-driven orchestration kernel for Cortex subsystems."""
52
+
53
+ def __init__(
54
+ self,
55
+ root: str | Path | None = None,
56
+ *,
57
+ config_path: str | Path | None = None,
58
+ db_path: str | Path | None = None,
59
+ adapter_name: str | None = None,
60
+ adapter: EventAdapter | None = None,
61
+ ) -> None:
62
+ if adapter_name is not None:
63
+ raise ValueError(
64
+ "adapter_name is no longer supported. Configure [runtime].adapter in cortex.toml."
65
+ )
66
+ repo_root = Path(root or os.getcwd()).resolve()
67
+ genome_path = Path(config_path).resolve() if config_path else repo_root / "cortex.toml"
68
+ store = SQLiteStore(Path(db_path).resolve() if db_path else repo_root / ".cortex" / "cortex.db")
69
+ store.initialize()
70
+ genome = load_genome(genome_path)
71
+ self.ctx = KernelContext(
72
+ root=repo_root,
73
+ genome_path=genome_path,
74
+ db_path=store.db_path,
75
+ genome=genome,
76
+ store=store,
77
+ )
78
+ self.foundation = FoundationAnalyzer(repo_root, genome.foundation)
79
+ self.graveyard = Graveyard(store, genome.graveyard)
80
+ self.challenges = ChallengeEnforcer(store, genome.challenges)
81
+ self.invariants = InvariantRunner(
82
+ repo_root,
83
+ store,
84
+ genome.invariants,
85
+ genome.hooks,
86
+ trust_profile=genome.project.trust_profile,
87
+ )
88
+ self.adapter = adapter or load_adapter(genome.runtime.adapter)
89
+ self.stop_path = StopPathRunner(
90
+ root=repo_root,
91
+ store=store,
92
+ genome=genome,
93
+ challenges=self.challenges,
94
+ invariants=self.invariants,
95
+ graveyard=self.graveyard,
96
+ session_metadata_loader=lambda active_store, active_session_id: session_metadata(
97
+ active_store, active_session_id
98
+ ),
99
+ session_git_snapshotter=lambda active_root: session_git_snapshot(active_root),
100
+ session_changed_files_since_baseline_fn=(
101
+ lambda **kwargs: session_changed_files_since_baseline(**kwargs)
102
+ ),
103
+ session_required_requirement_ids_loader=(
104
+ lambda active_store, active_session_id: session_required_requirement_ids(
105
+ active_store, active_session_id
106
+ )
107
+ ),
108
+ session_witness_context_loader=(
109
+ lambda active_store, active_session_id: session_witness_context(
110
+ active_store, active_session_id
111
+ )
112
+ ),
113
+ )
114
+ self._known_sessions: set[str] = set()
115
+
116
+ def on_session_start(self, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
117
+ payload = self.adapter.normalize("session_start", payload).payload
118
+ session_id = self._session_id(payload)
119
+ part_a_full, _part_b = get_base_executive_function()
120
+ executive_cfg = self.ctx.genome.executive
121
+ part_a = part_a_full if executive_cfg.part_a_mode != "once_per_project" else ""
122
+ identity_preamble = ""
123
+ required_requirement_ids = extract_required_requirement_ids(payload)
124
+ session_meta: dict[str, Any] = {"hook": "SessionStart"}
125
+ session_counter = self.ctx.store.allocate_session_counter(session_id)
126
+ session_meta["session_counter"] = session_counter
127
+ if required_requirement_ids:
128
+ session_meta["required_requirement_ids"] = required_requirement_ids
129
+ session_meta["git_snapshot"] = session_git_snapshot(self.ctx.root)
130
+ self._record_event(session_id, "SessionStart", payload)
131
+
132
+ learned_patterns = ""
133
+ if executive_cfg.enabled:
134
+ if executive_cfg.inject_identity_preamble:
135
+ identity_preamble = get_identity_preamble()
136
+ pruned = run_decay(
137
+ self.ctx.store,
138
+ halflife_sessions=executive_cfg.halflife_sessions,
139
+ threshold=executive_cfg.decay_threshold,
140
+ min_hold_sessions=executive_cfg.min_hold_sessions,
141
+ )
142
+ session_meta["executive_decay_pruned"] = pruned
143
+ learned_patterns = get_learned_executive_function(
144
+ self.ctx.store,
145
+ halflife_sessions=executive_cfg.halflife_sessions,
146
+ inject_threshold=executive_cfg.inject_threshold,
147
+ decay_threshold=executive_cfg.decay_threshold,
148
+ max_entries=executive_cfg.max_entries,
149
+ max_tokens=executive_cfg.max_tokens,
150
+ min_hold_sessions=executive_cfg.min_hold_sessions,
151
+ )
152
+
153
+ foundation_report = self.foundation.analyze()
154
+ session_meta["foundation"] = {
155
+ "warnings": list(foundation_report.warnings),
156
+ "findings": [finding.to_dict() for finding in foundation_report.findings],
157
+ }
158
+ self.ctx.store.upsert_session_start(
159
+ session_id=session_id,
160
+ status="running",
161
+ genome_path=self.ctx.genome.source_path,
162
+ metadata=session_meta,
163
+ )
164
+ self._known_sessions.add(session_id)
165
+ task_summary = str(payload.get("task") or payload.get("objective") or "")
166
+ target_files = _as_string_list(payload.get("target_files"))
167
+ graveyard_matches = [m.to_dict() for m in self.graveyard.find_similar(task_summary, target_files)]
168
+ repomap_summary = self._session_start_repomap(session_id=session_id, payload=payload)
169
+
170
+ warnings = list(foundation_report.warnings)
171
+ if self.ctx.genome.parse_error:
172
+ warnings.append(f"Config parse error in {self.ctx.genome.source_path}: {self.ctx.genome.parse_error}")
173
+ if self.ctx.genome.load_warnings:
174
+ warnings.extend(f"Config warning: {warning}" for warning in self.ctx.genome.load_warnings)
175
+ if graveyard_matches:
176
+ warnings.append(f"Found {len(graveyard_matches)} graveyard match(es) relevant to this session.")
177
+ warnings.extend(explainability_warnings(graveyard_matches))
178
+ if repomap_summary and repomap_summary.get("warning"):
179
+ warnings.append(str(repomap_summary["warning"]))
180
+ graveyard_context = _graveyard_context_block(graveyard_matches, self.ctx.genome.graveyard)
181
+ if executive_cfg.part_a_mode == "once_per_project":
182
+ part_a = part_a_full if self.ctx.store.claim_meta_once("executive.part_a_injected") else ""
183
+ context_blocks: list[str] = []
184
+ if identity_preamble:
185
+ context_blocks.append(identity_preamble)
186
+ if part_a:
187
+ context_blocks.append(part_a)
188
+ if learned_patterns:
189
+ context_blocks.append(learned_patterns)
190
+ if graveyard_context:
191
+ context_blocks.append(graveyard_context)
192
+
193
+ return self._response(
194
+ hook="SessionStart",
195
+ session_id=session_id,
196
+ warnings=warnings,
197
+ foundation=foundation_report.to_dict(),
198
+ graveyard_matches=graveyard_matches,
199
+ repomap=repomap_summary,
200
+ required_requirement_ids=required_requirement_ids,
201
+ executive_context={
202
+ "identity_preamble": identity_preamble,
203
+ "part_a": part_a,
204
+ "learned": learned_patterns,
205
+ },
206
+ context_blocks=context_blocks,
207
+ )
208
+
209
+ def on_pre_tool_use(self, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
210
+ payload = self.adapter.normalize("pre_tool_use", payload).payload
211
+ session_id = self._session_id(payload)
212
+ tool_name = str(payload.get("tool_name") or "").strip() or None
213
+ self._record_event(
214
+ session_id,
215
+ "PreToolUse",
216
+ payload,
217
+ tool_name=tool_name,
218
+ status=str(payload.get("status")) if payload.get("status") is not None else None,
219
+ )
220
+
221
+ warnings: list[str] = []
222
+ proceed = True
223
+
224
+ # --- Blocklist gate ---------------------------------------------------
225
+ bl_cfg = self.ctx.genome.blocklist
226
+ if bl_cfg.enabled:
227
+ cfg_blocked = frozenset(t.strip().lower() for t in bl_cfg.blocked_tools if t.strip())
228
+ effective_blocked = (cfg_blocked | DEFAULT_BLOCKED_TOOLS) if cfg_blocked else DEFAULT_BLOCKED_TOOLS
229
+ effective_allowed = frozenset(t.strip().lower() for t in bl_cfg.allowed_tools if t.strip())
230
+ verdict = evaluate_blocklist(
231
+ tool_name,
232
+ blocked_tools=effective_blocked,
233
+ allowed_tools=effective_allowed,
234
+ fail_closed=bl_cfg.fail_closed,
235
+ )
236
+ if verdict.blocked:
237
+ proceed = False
238
+ warnings.append(
239
+ f"Tool '{tool_name or 'unknown'}' blocked by denylist ({verdict.reason})."
240
+ )
241
+
242
+ target_files = _as_string_list(payload.get("target_files")) + _as_string_list(payload.get("planned_files"))
243
+ if target_files:
244
+ warnings.extend(self._foundation_warnings(session_id=session_id, target_files=target_files))
245
+
246
+ return self._response(
247
+ hook="PreToolUse",
248
+ session_id=session_id,
249
+ warnings=warnings,
250
+ proceed=proceed,
251
+ )
252
+
253
+ def on_session_marker(self, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
254
+ payload = self.adapter.normalize("session_marker", payload).payload
255
+ label = str(payload.get("label") or "").strip()
256
+ if not label:
257
+ raise ValueError("session_marker requires non-empty 'label'.")
258
+ session_id = str(payload.get("session_id") or "").strip()
259
+ if not session_id:
260
+ raise ValueError("session_marker requires session_id.")
261
+ self._record_event(session_id, "SessionMarker", {"label": label})
262
+ return self._response(hook="SessionMarker", session_id=session_id, warnings=[], label=label)
263
+
264
+ def on_post_tool_use(self, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
265
+ payload = self.adapter.normalize("post_tool_use", payload).payload
266
+ session_id = self._session_id(payload)
267
+ tool_name = str(payload.get("tool_name") or "").strip() or None
268
+ self._record_event(
269
+ session_id,
270
+ "PostToolUse",
271
+ payload,
272
+ tool_name=tool_name,
273
+ status=str(payload.get("status")) if payload.get("status") is not None else None,
274
+ )
275
+
276
+ warnings: list[str] = []
277
+ retry_verdict = None
278
+ if str(payload.get("status", "")).lower() in {"error", "failed", "fail"}:
279
+ summary = str(payload.get("error") or payload.get("message") or "")
280
+ target_files = _as_string_list(payload.get("target_files"))
281
+ matches = self.graveyard.find_similar(summary, target_files, max_matches=3)
282
+ if matches:
283
+ warnings.append(
284
+ f"Tool failure resembles {len(matches)} graveyard entry/entries; review before retrying."
285
+ )
286
+ warnings.extend(explainability_warnings([m.to_dict() for m in matches]))
287
+
288
+ if self.ctx.genome.retry.enabled:
289
+ retry_verdict = compute_retry_verdict(
290
+ store=self.ctx.store,
291
+ session_id=session_id,
292
+ payload=payload,
293
+ max_retries=self.ctx.genome.retry.max_retries,
294
+ )
295
+ if retry_verdict:
296
+ self._record_event(
297
+ session_id,
298
+ "RetryConsume",
299
+ {
300
+ "should_retry": retry_verdict.should_retry,
301
+ "reason": retry_verdict.reason,
302
+ "failure_class": retry_verdict.failure_class,
303
+ "status": str(payload.get("status") or ""),
304
+ "tool_name": tool_name,
305
+ "budget_remaining": retry_verdict.budget_remaining,
306
+ "budget_exhausted": retry_verdict.budget_exhausted,
307
+ "decision_code": retry_verdict.decision_code,
308
+ "failure_signature": retry_verdict.failure_signature,
309
+ },
310
+ tool_name=tool_name,
311
+ status="consumed" if retry_verdict.should_retry else "rejected",
312
+ )
313
+
314
+ retry_info: dict[str, Any] | None = None
315
+ if retry_verdict is not None:
316
+ retry_info = {
317
+ "should_retry": retry_verdict.should_retry,
318
+ "hard_stop": retry_verdict.hard_stop,
319
+ "failure_class": retry_verdict.failure_class,
320
+ "reason": retry_verdict.reason,
321
+ "budget_remaining": retry_verdict.budget_remaining,
322
+ "budget_exhausted": retry_verdict.budget_exhausted,
323
+ "decision_code": retry_verdict.decision_code,
324
+ }
325
+ if retry_verdict.hard_stop:
326
+ warnings.append(f"Hard stop: non-retryable failure ({retry_verdict.reason}).")
327
+ elif retry_verdict.decision_code == "no_delta":
328
+ warnings.append("Retry suppressed: no delta detected for repeated failure signature.")
329
+ elif retry_verdict.decision_code == "retry_contention":
330
+ warnings.append("Retry contention detected; retry slot was not acquired. Re-evaluate and retry after state settles.")
331
+ elif retry_verdict.decision_code == "reason_budget_exhausted":
332
+ warnings.append("Reason-specific retry budget exhausted; no further retries allowed for this failure reason.")
333
+ elif retry_verdict.budget_exhausted:
334
+ warnings.append("Retry budget exhausted; no further retries allowed.")
335
+
336
+ proceed = not (retry_verdict is not None and retry_verdict.hard_stop)
337
+
338
+ return self._response(
339
+ hook="PostToolUse",
340
+ session_id=session_id,
341
+ warnings=warnings,
342
+ proceed=proceed,
343
+ retry=retry_info,
344
+ )
345
+
346
+ def on_stop(self, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
347
+ payload = self.adapter.normalize("stop", payload).payload
348
+ session_id = self._session_id(payload)
349
+ stop_contract = resolve_stop_contract(
350
+ payload,
351
+ allow_message_fallback=self.ctx.genome.hooks.allow_message_stop_fallback,
352
+ require_structured_stop_payload=self.ctx.genome.hooks.require_structured_stop_payload,
353
+ )
354
+ self._record_event(
355
+ session_id,
356
+ "Stop",
357
+ payload,
358
+ capture_git_snapshot=True,
359
+ )
360
+ stop_outcome = self.stop_path.run(
361
+ session_id=session_id,
362
+ payload=payload,
363
+ stop_contract=stop_contract,
364
+ )
365
+ requirement_audit = stop_outcome.requirement_audit
366
+ truth_claims = stop_outcome.truth_claims
367
+ challenge_report = stop_outcome.challenge_report
368
+ invariant_report = stop_outcome.invariant_report
369
+ requirement_audit_gap = requirement_audit.gap
370
+ truth_claims_gap = truth_claims.gap
371
+ requirements_gate_gap = requirement_audit_gap or truth_claims_gap
372
+ executive_record, executive_signature = (None, None)
373
+ if self.ctx.genome.executive.enabled:
374
+ executive_record, executive_signature = record_stop_failure_event(
375
+ self.ctx.store,
376
+ session_id=session_id,
377
+ structured_stop_violation=stop_outcome.structured_stop_violation,
378
+ challenge_coverage_missing=stop_outcome.missing_challenge_coverage,
379
+ challenge_report=None if challenge_report is None else challenge_report.to_dict(),
380
+ requirements_gate_gap=requirements_gate_gap,
381
+ requirement_audit_report=requirement_audit.report,
382
+ truth_claims_report=truth_claims.report,
383
+ invariant_report=None if invariant_report is None else invariant_report.to_dict(),
384
+ signature_claim=lambda sig: self.ctx.store.claim_executive_stop_signature(session_id, sig),
385
+ )
386
+
387
+ self.ctx.store.close_session(
388
+ session_id=session_id,
389
+ status=stop_outcome.session_status,
390
+ metadata=self.stop_path.close_session_metadata(
391
+ outcome=stop_outcome,
392
+ stop_contract=stop_contract,
393
+ executive_signature=executive_signature,
394
+ executive_record=executive_record,
395
+ ),
396
+ )
397
+
398
+ return self._response(
399
+ hook="Stop",
400
+ session_id=session_id,
401
+ warnings=stop_outcome.warnings,
402
+ **self.stop_path.response_payload(
403
+ outcome=stop_outcome,
404
+ stop_contract=stop_contract,
405
+ ),
406
+ )
407
+
408
+ def dispatch(self, event_name: str, payload: Mapping[str, Any] | None = None) -> dict[str, Any]:
409
+ event_name = self.adapter.normalize(event_name, None).name
410
+ if event_name == "session_start":
411
+ return self.on_session_start(payload)
412
+ if event_name == "session_marker":
413
+ return self.on_session_marker(payload)
414
+ if event_name == "pre_tool_use":
415
+ return self.on_pre_tool_use(payload)
416
+ if event_name == "post_tool_use":
417
+ return self.on_post_tool_use(payload)
418
+ if event_name == "stop":
419
+ return self.on_stop(payload)
420
+ raise ValueError(f"Unknown hook event: {event_name}")
421
+
422
+ def _record_event(
423
+ self,
424
+ session_id: str,
425
+ hook: str,
426
+ payload: Mapping[str, Any],
427
+ *,
428
+ tool_name: str | None = None,
429
+ status: str | None = None,
430
+ capture_git_snapshot: bool = True,
431
+ ) -> None:
432
+ self._ensure_session_started(session_id=session_id, hook=hook, capture_git_snapshot=capture_git_snapshot)
433
+ self.ctx.store.record_event(
434
+ session_id=session_id,
435
+ hook=hook,
436
+ payload=dict(payload),
437
+ tool_name=tool_name,
438
+ status=status,
439
+ )
440
+
441
+ def _response(self, *, hook: str, session_id: str, warnings: list[str], **extra: Any) -> dict[str, Any]:
442
+ response = {
443
+ "ok": True,
444
+ "hook": hook,
445
+ "session_id": session_id,
446
+ "mode": self.ctx.genome.hooks.mode,
447
+ "warnings": warnings,
448
+ **extra,
449
+ }
450
+ if not self.ctx.genome.hooks.minimal_response:
451
+ response["config"] = {
452
+ "genome_path": self._response_path(self.ctx.genome_path),
453
+ "db_path": self._response_path(self.ctx.db_path),
454
+ }
455
+ return response
456
+
457
+ def _ensure_session_started(self, *, session_id: str, hook: str, capture_git_snapshot: bool = True) -> None:
458
+ if session_id in self._known_sessions:
459
+ return
460
+ metadata: dict[str, Any] = {"hook": hook, "auto_started": hook != "SessionStart"}
461
+ if not session_metadata(self.ctx.store, session_id) and hook != "SessionStart" and capture_git_snapshot:
462
+ metadata["git_snapshot"] = session_git_snapshot(self.ctx.root)
463
+ self.ctx.store.ensure_session_start(
464
+ session_id=session_id,
465
+ status="running",
466
+ genome_path=self.ctx.genome.source_path,
467
+ metadata=metadata,
468
+ )
469
+ self._known_sessions.add(session_id)
470
+
471
+ def _session_start_repomap(
472
+ self,
473
+ *,
474
+ session_id: str,
475
+ payload: Mapping[str, Any],
476
+ ) -> dict[str, Any] | None:
477
+ repomap_cfg = self.ctx.genome.repomap
478
+ if not (repomap_cfg.enabled and repomap_cfg.run_on_session_start):
479
+ return None
480
+
481
+ focus_files = _as_string_list(payload.get("target_files"))
482
+ try:
483
+ from .repomap import run_repomap
484
+
485
+ result = run_repomap(
486
+ root=self.ctx.root,
487
+ repomap_config=repomap_cfg,
488
+ focus_files=focus_files or None,
489
+ session_id=session_id,
490
+ timeout_ms=repomap_cfg.session_start_timeout_ms,
491
+ )
492
+ except Exception as exc: # noqa: BLE001
493
+ summary = {
494
+ "ok": False,
495
+ "artifact_path": None,
496
+ "method": "none",
497
+ "scope": list(repomap_cfg.watch_paths),
498
+ "stats": {
499
+ "files_parsed": 0,
500
+ "symbols_found": 0,
501
+ "graph_edges": 0,
502
+ "byte_count": 0,
503
+ },
504
+ "top_ranked_files": [],
505
+ "error": {"code": "internal_error", "message": str(exc)},
506
+ "warning": "Repo-map generation failed during session start (non-blocking).",
507
+ }
508
+ self._record_event(
509
+ session_id,
510
+ "RepoMap",
511
+ {
512
+ "trigger": "SessionStart",
513
+ "ok": False,
514
+ "error": summary["error"],
515
+ "scope": summary["scope"],
516
+ },
517
+ status="error",
518
+ )
519
+ return summary
520
+
521
+ artifact = result.artifact
522
+ top_ranked_files = [entry.path for entry in artifact.ranking[:5]]
523
+ summary = {
524
+ "ok": bool(result.ok),
525
+ "artifact_path": result.artifact_path,
526
+ "method": str(artifact.provenance.get("method", "none")),
527
+ "scope": list(artifact.provenance.get("scope", [])),
528
+ "stats": dict(artifact.stats),
529
+ "top_ranked_files": top_ranked_files,
530
+ }
531
+ event_payload: dict[str, Any] = {
532
+ "trigger": "SessionStart",
533
+ "ok": bool(result.ok),
534
+ "artifact_path": result.artifact_path,
535
+ "method": summary["method"],
536
+ "scope": summary["scope"],
537
+ "stats": summary["stats"],
538
+ "top_ranked_files": top_ranked_files,
539
+ }
540
+ if not result.ok and artifact.error:
541
+ error = {
542
+ "code": str(artifact.error.get("code", "internal_error")),
543
+ "message": str(artifact.error.get("message", "Repo-map generation failed")),
544
+ }
545
+ summary["error"] = error
546
+ summary["warning"] = f"Repo-map warning: {error['message']}"
547
+ event_payload["error"] = error
548
+
549
+ self._record_event(
550
+ session_id,
551
+ "RepoMap",
552
+ event_payload,
553
+ status="ok" if result.ok else "error",
554
+ )
555
+ return summary
556
+
557
+ def _response_path(self, path: Path) -> str:
558
+ resolved = path.resolve()
559
+ return str(resolved.relative_to(self.ctx.root)) if resolved.is_relative_to(self.ctx.root) else str(resolved)
560
+
561
+ @staticmethod
562
+ def _session_id(payload: Mapping[str, Any]) -> str: return (payload.get("session_id") or "").strip() if isinstance(payload.get("session_id"), str) and str(payload.get("session_id")).strip() else f"sess-{uuid4().hex[:12]}"
563
+
564
+ def _foundation_warnings(self, *, session_id: str, target_files: list[str]) -> list[str]:
565
+ return foundation_warnings_from_snapshot(foundation=self.foundation, snapshot=session_foundation_snapshot(self.ctx.store, session_id), target_files=target_files)
566
+
567
+ def _graveyard_context_block(matches: list[dict[str, Any]], config: Any) -> str:
568
+ if not matches:
569
+ return ""
570
+ max_matches = max(1, int(getattr(config, "context_max_matches", 2)))
571
+ summary_chars = max(40, int(getattr(config, "context_summary_chars", 140)))
572
+ reason_chars = max(40, int(getattr(config, "context_reason_chars", 200)))
573
+ token_budget = max(50, int(getattr(config, "context_max_tokens", 300)))
574
+
575
+ lines = ["## Graveyard context"]
576
+ used_tokens = _approx_tokens(lines[0])
577
+ for idx, match in enumerate(matches[:max_matches], start=1):
578
+ summary = _truncate_context_text(str(match.get("summary") or "").strip(), summary_chars)
579
+ reason = _truncate_context_text(str(match.get("reason") or "").strip(), reason_chars)
580
+ files = _as_string_list(match.get("files"))
581
+ entry_lines = [f"{idx}. {summary}" if summary else f"{idx}. Prior failure pattern"]
582
+ if reason:
583
+ entry_lines.append(f"Reason: {reason}")
584
+ if files:
585
+ entry_lines.append("Files: " + ", ".join(files[:5]))
586
+ entry_tokens = _approx_tokens("\n".join(entry_lines))
587
+ if used_tokens + entry_tokens > token_budget:
588
+ break
589
+ lines.extend(entry_lines)
590
+ used_tokens += entry_tokens
591
+ return "\n".join(lines).strip() if len(lines) > 1 else ""
592
+
593
+
594
+ def _truncate_context_text(text: str, max_chars: int) -> str:
595
+ if len(text) <= max_chars:
596
+ return text
597
+ return text[: max(1, max_chars - 3)].rstrip() + "..."
598
+
599
+
600
+ def _approx_tokens(text: str) -> int:
601
+ return max(1, len(text) // 4)