loki-mode 6.71.1 → 6.72.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/README.md +9 -1
  2. package/SKILL.md +2 -2
  3. package/VERSION +1 -1
  4. package/autonomy/hooks/migration-hooks.sh +26 -0
  5. package/autonomy/loki +429 -92
  6. package/autonomy/run.sh +219 -38
  7. package/dashboard/__init__.py +1 -1
  8. package/dashboard/server.py +101 -19
  9. package/docs/INSTALLATION.md +20 -11
  10. package/docs/bug-fixes/agent-01-cli-fixes.md +101 -0
  11. package/docs/bug-fixes/agent-02-purplelab-fixes.md +88 -0
  12. package/docs/bug-fixes/agent-03-dashboard-fixes.md +119 -0
  13. package/docs/bug-fixes/agent-04-memory-fixes.md +105 -0
  14. package/docs/bug-fixes/agent-05-provider-fixes.md +86 -0
  15. package/docs/bug-fixes/agent-06-integration-fixes.md +101 -0
  16. package/docs/bug-fixes/agent-07-dash-run-fixes.md +101 -0
  17. package/docs/bug-fixes/agent-08-docker-fixes.md +164 -0
  18. package/docs/bug-fixes/agent-09-e2e-build-fixes.md +69 -0
  19. package/docs/bug-fixes/agent-10-e2e-fullstack-fixes.md +102 -0
  20. package/docs/bug-fixes/agent-11-e2e-session-fixes.md +70 -0
  21. package/docs/bug-fixes/agent-12-scenario-fixes.md +120 -0
  22. package/docs/bug-fixes/agent-13-enterprise-fixes.md +143 -0
  23. package/docs/bug-fixes/agent-14-uat-newuser-fixes.md +88 -0
  24. package/docs/bug-fixes/agent-15-uat-poweruser-fixes.md +132 -0
  25. package/docs/bug-fixes/agent-19-code-review.md +316 -0
  26. package/docs/bug-fixes/agent-20-architecture-review.md +331 -0
  27. package/docs/competitive/bolt-new-analysis.md +579 -0
  28. package/docs/competitive/emergence-others-analysis.md +605 -0
  29. package/docs/competitive/replit-lovable-analysis.md +622 -0
  30. package/docs/test-scenarios/edge-cases.md +813 -0
  31. package/docs/test-scenarios/enterprise-scenarios.md +732 -0
  32. package/mcp/__init__.py +1 -1
  33. package/mcp/server.py +49 -5
  34. package/memory/consolidation.py +33 -0
  35. package/memory/embeddings.py +10 -1
  36. package/memory/engine.py +83 -38
  37. package/memory/retrieval.py +36 -0
  38. package/memory/storage.py +56 -4
  39. package/memory/token_economics.py +14 -2
  40. package/memory/vector_index.py +36 -7
  41. package/package.json +1 -1
  42. package/providers/gemini.sh +89 -2
  43. package/templates/README.md +1 -1
  44. package/templates/cli-tool.md +30 -0
  45. package/templates/dashboard.md +4 -0
  46. package/templates/data-pipeline.md +4 -0
  47. package/templates/discord-bot.md +47 -0
  48. package/templates/game.md +4 -0
  49. package/templates/microservice.md +4 -0
  50. package/templates/npm-library.md +4 -0
  51. package/templates/rest-api-auth.md +50 -20
  52. package/templates/rest-api.md +15 -0
  53. package/templates/saas-starter.md +1 -1
  54. package/templates/slack-bot.md +36 -0
  55. package/templates/static-landing-page.md +9 -1
  56. package/templates/web-scraper.md +4 -0
  57. package/web-app/dist/assets/Badge-CeBkFjo6.js +1 -0
  58. package/web-app/dist/assets/Button-yuhqo8Fq.js +1 -0
  59. package/web-app/dist/assets/{Card-B1bV4syB.js → Card-BG17vsX0.js} +1 -1
  60. package/web-app/dist/assets/{HomePage-CZTV6Nea.js → HomePage-BMSQ7Apj.js} +3 -3
  61. package/web-app/dist/assets/{LoginPage-D4UdURJc.js → LoginPage-aH_6iolg.js} +1 -1
  62. package/web-app/dist/assets/{NotFoundPage-CCLSeL6j.js → NotFoundPage-Di8cNtB1.js} +1 -1
  63. package/web-app/dist/assets/ProjectPage-BtRssmw9.js +285 -0
  64. package/web-app/dist/assets/ProjectsPage-B-FTFagc.js +6 -0
  65. package/web-app/dist/assets/{SettingsPage-Xuv8EfAg.js → SettingsPage-DIJPBla4.js} +1 -1
  66. package/web-app/dist/assets/TeamsPage--19fNX7w.js +36 -0
  67. package/web-app/dist/assets/TemplatesPage-ChUQNOOv.js +11 -0
  68. package/web-app/dist/assets/TerminalOutput-Dwrzecyl.js +31 -0
  69. package/web-app/dist/assets/activity-BNRWeu9N.js +6 -0
  70. package/web-app/dist/assets/{arrow-left-CaGtolHc.js → arrow-left-Ce6g1_YE.js} +1 -1
  71. package/web-app/dist/assets/circle-alert-LIndawHL.js +11 -0
  72. package/web-app/dist/assets/clock-Bpj4VPlP.js +6 -0
  73. package/web-app/dist/assets/{external-link-CazyUyav.js → external-link-BhhdF0iQ.js} +1 -1
  74. package/web-app/dist/assets/folder-open-CM2LgfxI.js +11 -0
  75. package/web-app/dist/assets/index-8-KpWWq7.css +1 -0
  76. package/web-app/dist/assets/index-kPDW4e_b.js +236 -0
  77. package/web-app/dist/assets/lock-sAk3Xe54.js +16 -0
  78. package/web-app/dist/assets/search-CR-2i9by.js +6 -0
  79. package/web-app/dist/assets/server-DuFh4ymA.js +26 -0
  80. package/web-app/dist/assets/trash-2-BmkkT8V_.js +11 -0
  81. package/web-app/dist/index.html +2 -2
  82. package/web-app/server.py +1321 -53
  83. package/web-app/dist/assets/Badge-CBUx2PjL.js +0 -6
  84. package/web-app/dist/assets/Button-DsRiznlh.js +0 -21
  85. package/web-app/dist/assets/ProjectPage-D0w_X9tG.js +0 -237
  86. package/web-app/dist/assets/ProjectsPage-ByYxDlKC.js +0 -16
  87. package/web-app/dist/assets/TemplatesPage-BKWN07mc.js +0 -1
  88. package/web-app/dist/assets/TerminalOutput-Dj98V8Z-.js +0 -51
  89. package/web-app/dist/assets/clock-C_CDmobx.js +0 -11
  90. package/web-app/dist/assets/index-D452pFGl.css +0 -1
  91. package/web-app/dist/assets/index-Df4_kgLY.js +0 -196
package/web-app/server.py CHANGED
@@ -313,11 +313,29 @@ class StartRequest(BaseModel):
313
313
  projectDir: Optional[str] = None
314
314
  mode: Optional[str] = None # "quick" for quick mode
315
315
 
316
+ @field_validator("provider")
317
+ @classmethod
318
+ def validate_provider(cls, v: str) -> str:
319
+ # BUG-E2E-006: Validate provider to prevent injection via unknown providers
320
+ allowed = {"claude", "codex", "gemini", "cline", "aider"}
321
+ if v not in allowed:
322
+ raise ValueError(f"Unknown provider '{v}'. Allowed: {', '.join(sorted(allowed))}")
323
+ return v
324
+
316
325
 
317
326
  class QuickStartRequest(BaseModel):
318
327
  prompt: str
319
328
  provider: str = "claude"
320
329
 
330
+ @field_validator("provider")
331
+ @classmethod
332
+ def validate_provider(cls, v: str) -> str:
333
+ # BUG-E2E-006: Validate provider to prevent injection via unknown providers
334
+ allowed = {"claude", "codex", "gemini", "cline", "aider"}
335
+ if v not in allowed:
336
+ raise ValueError(f"Unknown provider '{v}'. Allowed: {', '.join(sorted(allowed))}")
337
+ return v
338
+
321
339
 
322
340
  class StopResponse(BaseModel):
323
341
  stopped: bool
@@ -360,6 +378,18 @@ class FileDeleteRequest(BaseModel):
360
378
  class ChatRequest(BaseModel):
361
379
  message: str
362
380
  mode: str = "quick" # "quick" or "standard"
381
+ # BUG-E2E-004: Accept recent chat history so AI has conversation context
382
+ history: Optional[list[dict]] = None # [{role: "user"|"assistant", content: str}]
383
+
384
+ @field_validator("message")
385
+ @classmethod
386
+ def validate_message(cls, v: str) -> str:
387
+ stripped = v.strip()
388
+ if not stripped:
389
+ raise ValueError("Message cannot be empty")
390
+ if len(stripped.encode("utf-8")) > 100_000: # 100KB max
391
+ raise ValueError("Message exceeds 100KB limit")
392
+ return stripped
363
393
 
364
394
 
365
395
  class SecretRequest(BaseModel):
@@ -412,8 +442,19 @@ class FileChangeHandler(FileSystemEventHandler): # type: ignore[misc]
412
442
  self._debounce_handle: Optional[asyncio.TimerHandle] = None
413
443
 
414
444
  def _should_ignore(self, path: str) -> bool:
415
- """Return True if this path should be ignored."""
416
- parts = Path(path).parts
445
+ """Return True if this path should be ignored.
446
+
447
+ BUG-INT-004 fix: only check path components relative to the project
448
+ directory, not the full absolute path. Previously, a project stored
449
+ at e.g. /home/user/build/project/ would have all events ignored
450
+ because 'build' appeared in the absolute path parts.
451
+ """
452
+ # Compute relative path from project root
453
+ try:
454
+ rel = os.path.relpath(path, self.project_dir)
455
+ except ValueError:
456
+ rel = path # different drives on Windows
457
+ parts = Path(rel).parts
417
458
  for part in parts:
418
459
  if part in _WATCH_IGNORE_DIRS:
419
460
  return True
@@ -426,7 +467,7 @@ class FileChangeHandler(FileSystemEventHandler): # type: ignore[misc]
426
467
  return False
427
468
 
428
469
  def on_any_event(self, event) -> None: # type: ignore[override]
429
- if event.is_directory and event.event_type not in ("created", "deleted"):
470
+ if event.is_directory and event.event_type not in ("created", "deleted", "moved"):
430
471
  return
431
472
  src = getattr(event, "src_path", "")
432
473
  if not src or self._should_ignore(src):
@@ -1543,6 +1584,15 @@ class DevServerManager:
1543
1584
  fix_env = {**os.environ, **_load_secrets()}
1544
1585
  fix_env["LOKI_MAX_ITERATIONS"] = "5"
1545
1586
  fix_env["LOKI_AUTO_FIX"] = "true"
1587
+ # Pass provider via env
1588
+ _mf_prov = Path(project_dir) / ".loki" / "state" / "provider"
1589
+ if _mf_prov.exists():
1590
+ try:
1591
+ _mfp = _mf_prov.read_text().strip()
1592
+ if _mfp:
1593
+ fix_env["LOKI_PROVIDER"] = _mfp
1594
+ except OSError:
1595
+ pass
1546
1596
 
1547
1597
  await asyncio.to_thread(
1548
1598
  subprocess.run,
@@ -1633,6 +1683,15 @@ class DevServerManager:
1633
1683
  try:
1634
1684
  auto_fix_env = {**os.environ}
1635
1685
  auto_fix_env.update(_load_secrets())
1686
+ # Pass provider via env for auto-fix commands
1687
+ _af_prov_file = target / ".loki" / "state" / "provider"
1688
+ if _af_prov_file.exists():
1689
+ try:
1690
+ _afp = _af_prov_file.read_text().strip()
1691
+ if _afp:
1692
+ auto_fix_env["LOKI_PROVIDER"] = _afp
1693
+ except OSError:
1694
+ pass
1636
1695
  result = await asyncio.get_running_loop().run_in_executor(
1637
1696
  None,
1638
1697
  lambda: subprocess.run(
@@ -2023,6 +2082,15 @@ INSTRUCTIONS:
2023
2082
  try:
2024
2083
  fix_env = {**os.environ, **_load_secrets()}
2025
2084
  fix_env["LOKI_MAX_ITERATIONS"] = "5" # More iterations for complex Docker fixes
2085
+ # Pass provider via env
2086
+ _sf_prov = Path(project_dir) / ".loki" / "state" / "provider"
2087
+ if _sf_prov.exists():
2088
+ try:
2089
+ _sfp = _sf_prov.read_text().strip()
2090
+ if _sfp:
2091
+ fix_env["LOKI_PROVIDER"] = _sfp
2092
+ except OSError:
2093
+ pass
2026
2094
 
2027
2095
  proc = await asyncio.to_thread(
2028
2096
  subprocess.run,
@@ -2350,9 +2418,15 @@ async def _read_process_output() -> None:
2350
2418
  # Keep last 5000 lines
2351
2419
  if len(session.log_lines) > 5000:
2352
2420
  session.log_lines = session.log_lines[-5000:]
2421
+ # BUG-E2E-002: Include sequence number so frontend can detect
2422
+ # gaps and maintain correct ordering even under async pressure
2353
2423
  await _broadcast({
2354
2424
  "type": "log",
2355
- "data": {"line": text, "timestamp": time.strftime("%H:%M:%S")},
2425
+ "data": {
2426
+ "line": text,
2427
+ "timestamp": time.strftime("%H:%M:%S"),
2428
+ "seq": session.log_lines_total,
2429
+ },
2356
2430
  })
2357
2431
  except Exception:
2358
2432
  logger.error("Process output reader failed", exc_info=True)
@@ -2363,8 +2437,12 @@ async def _read_process_output() -> None:
2363
2437
  await _broadcast({"type": "session_end", "data": {"message": "Session ended"}})
2364
2438
 
2365
2439
 
2366
- def _build_file_tree(root: Path, max_depth: int = 4, _depth: int = 0) -> list[dict]:
2367
- """Recursively build a file tree from a directory."""
2440
+ def _build_file_tree(root: Path, max_depth: int = 8, _depth: int = 0) -> list[dict]:
2441
+ """Recursively build a file tree from a directory.
2442
+
2443
+ max_depth is set to 8 to support monorepo structures like
2444
+ packages/frontend/src/components/ui/Button.tsx (6 levels deep).
2445
+ """
2368
2446
  if _depth >= max_depth or not root.is_dir():
2369
2447
  return []
2370
2448
 
@@ -2374,13 +2452,28 @@ def _build_file_tree(root: Path, max_depth: int = 4, _depth: int = 0) -> list[di
2374
2452
  except PermissionError:
2375
2453
  return []
2376
2454
 
2455
+ # Limit children per directory to prevent memory issues on very large projects
2456
+ MAX_CHILDREN = 500
2457
+ child_count = 0
2458
+
2377
2459
  for item in items:
2378
2460
  # Skip hidden dirs and common noise
2379
2461
  if item.name.startswith("."):
2380
2462
  continue
2381
- if item.name in ("node_modules", "__pycache__", ".git", "venv", ".venv"):
2463
+ if item.name in ("node_modules", "__pycache__", ".git", "venv", ".venv",
2464
+ "vendor", ".turbo", ".nx", "coverage", ".parcel-cache"):
2382
2465
  continue
2383
2466
 
2467
+ child_count += 1
2468
+ if child_count > MAX_CHILDREN:
2469
+ entries.append({
2470
+ "name": f"... ({len(list(root.iterdir())) - MAX_CHILDREN} more items)",
2471
+ "path": str(root.relative_to(root)) + "/...",
2472
+ "type": "file",
2473
+ "size": 0,
2474
+ })
2475
+ break
2476
+
2384
2477
  node: dict = {"name": item.name, "path": str(item.relative_to(root))}
2385
2478
  if item.is_dir():
2386
2479
  node["type"] = "directory"
@@ -2430,13 +2523,17 @@ def _load_secrets() -> dict[str, str]:
2430
2523
 
2431
2524
  def _save_secrets(secrets: dict[str, str]) -> None:
2432
2525
  """Save secrets to disk, encrypting values if PURPLE_LAB_SECRET_KEY is set."""
2433
- from crypto import encrypt_value, encryption_available
2434
2526
  _SECRETS_FILE.parent.mkdir(parents=True, exist_ok=True)
2435
- if encryption_available():
2436
- encrypted = {k: encrypt_value(v) for k, v in secrets.items()}
2437
- _SECRETS_FILE.write_text(json.dumps(encrypted, indent=2))
2438
- else:
2439
- _SECRETS_FILE.write_text(json.dumps(secrets, indent=2))
2527
+ try:
2528
+ from crypto import encrypt_value, encryption_available
2529
+ if encryption_available():
2530
+ encrypted = {k: encrypt_value(v) for k, v in secrets.items()}
2531
+ _SECRETS_FILE.write_text(json.dumps(encrypted, indent=2))
2532
+ return
2533
+ except ImportError:
2534
+ pass
2535
+ # Fallback: save as plaintext when crypto module is not available
2536
+ _SECRETS_FILE.write_text(json.dumps(secrets, indent=2))
2440
2537
 
2441
2538
 
2442
2539
  # ---------------------------------------------------------------------------
@@ -2506,6 +2603,16 @@ async def start_session(req: StartRequest) -> JSONResponse:
2506
2603
  project_dir = req.projectDir
2507
2604
  if not project_dir:
2508
2605
  project_dir = os.path.join(Path.home(), "purple-lab-projects", f"project-{int(time.time() * 1000)}")
2606
+ else:
2607
+ # Validate user-supplied path stays within home directory
2608
+ try:
2609
+ resolved_dir = Path(project_dir).resolve()
2610
+ resolved_dir.relative_to(Path.home().resolve())
2611
+ except (ValueError, OSError):
2612
+ return JSONResponse(
2613
+ status_code=400,
2614
+ content={"error": "Project directory must be within your home directory"},
2615
+ )
2509
2616
  os.makedirs(project_dir, exist_ok=True)
2510
2617
 
2511
2618
  # Write PRD to a temp file in the project dir
@@ -2535,6 +2642,11 @@ async def start_session(req: StartRequest) -> JSONResponse:
2535
2642
  build_env = {**os.environ, "LOKI_DIR": os.path.join(project_dir, ".loki")}
2536
2643
  build_env.update(_load_secrets())
2537
2644
 
2645
+ # BUG-INT-001 fix: pass provider selection via LOKI_PROVIDER env var
2646
+ # for quick mode (loki quick doesn't accept --provider flag)
2647
+ if req.provider:
2648
+ build_env["LOKI_PROVIDER"] = req.provider
2649
+
2538
2650
  proc = subprocess.Popen(
2539
2651
  cmd,
2540
2652
  stdout=subprocess.PIPE,
@@ -2600,6 +2712,9 @@ async def quick_start_session(req: QuickStartRequest) -> JSONResponse:
2600
2712
  prompt = req.prompt.strip()
2601
2713
  if not prompt:
2602
2714
  return JSONResponse(status_code=400, content={"error": "Prompt required"})
2715
+ # BUG-E2E-001: Validate minimum prompt length to prevent degenerate builds
2716
+ if len(prompt) < 3:
2717
+ return JSONResponse(status_code=400, content={"error": "Prompt too short (minimum 3 characters)"})
2603
2718
  if len(prompt.encode()) > _MAX_PRD_BYTES:
2604
2719
  return JSONResponse(status_code=400, content={"error": "Prompt exceeds size limit"})
2605
2720
 
@@ -2750,22 +2865,39 @@ async def get_status() -> JSONResponse:
2750
2865
  max_iterations = 0
2751
2866
  cost_usd = 0.0
2752
2867
 
2753
- state_file = loki_dir / "state" / "session.json"
2754
- if state_file.exists():
2868
+ # BUG-INT-002 fix: CLI writes dashboard-state.json, not state/session.json.
2869
+ # Read from dashboard-state.json (primary) with orchestrator.json fallback.
2870
+ dash_state_file = loki_dir / "dashboard-state.json"
2871
+ if dash_state_file.exists():
2755
2872
  try:
2756
- with open(state_file) as f:
2873
+ with open(dash_state_file) as f:
2757
2874
  state = json.load(f)
2758
2875
  phase = state.get("phase", phase)
2759
2876
  iteration = state.get("iteration", iteration)
2760
2877
  complexity = state.get("complexity", complexity)
2761
- current_task = state.get("current_task", current_task)
2762
- pending_tasks = state.get("pending_tasks", pending_tasks)
2878
+ # Tasks are nested in dashboard-state.json
2879
+ tasks = state.get("tasks")
2880
+ if isinstance(tasks, dict):
2881
+ pending_tasks = int(tasks.get("pending", 0) or 0)
2882
+ in_progress = int(tasks.get("inProgress", 0) or 0)
2883
+ if in_progress > 0:
2884
+ current_task = f"{in_progress} task(s) in progress"
2763
2885
  # Extract cost from tokens object if present
2764
2886
  tokens = state.get("tokens")
2765
2887
  if isinstance(tokens, dict):
2766
2888
  cost_usd = float(tokens.get("cost_usd", 0) or 0)
2767
2889
  except (json.JSONDecodeError, OSError):
2768
2890
  pass
2891
+ else:
2892
+ # Fallback: try orchestrator.json for phase/iteration
2893
+ orch_file = loki_dir / "state" / "orchestrator.json"
2894
+ if orch_file.exists():
2895
+ try:
2896
+ with open(orch_file) as f:
2897
+ orch = json.load(f)
2898
+ phase = orch.get("currentPhase", phase)
2899
+ except (json.JSONDecodeError, OSError):
2900
+ pass
2769
2901
 
2770
2902
  # Read max_iterations from autonomy state
2771
2903
  autonomy_state = loki_dir / "autonomy-state.json"
@@ -2958,15 +3090,16 @@ async def get_prd_prefill() -> JSONResponse:
2958
3090
  @app.post("/api/session/pause")
2959
3091
  async def pause_session() -> JSONResponse:
2960
3092
  """Pause the current loki session by sending SIGUSR1."""
2961
- if not session.running or session.process is None:
2962
- return JSONResponse(content={"paused": False, "message": "No session running"})
2963
- try:
2964
- os.kill(session.process.pid, signal.SIGUSR1)
2965
- except ProcessLookupError:
2966
- return JSONResponse(content={"paused": False, "message": "Process not found"})
2967
- except Exception as e:
2968
- return JSONResponse(content={"paused": False, "message": str(e)})
2969
- session.paused = True
3093
+ async with session._lock:
3094
+ if not session.running or session.process is None:
3095
+ return JSONResponse(content={"paused": False, "message": "No session running"})
3096
+ try:
3097
+ os.kill(session.process.pid, signal.SIGUSR1)
3098
+ except ProcessLookupError:
3099
+ return JSONResponse(content={"paused": False, "message": "Process not found"})
3100
+ except Exception as e:
3101
+ return JSONResponse(content={"paused": False, "message": str(e)})
3102
+ session.paused = True
2970
3103
  await _broadcast({"type": "session_paused", "data": {}})
2971
3104
  return JSONResponse(content={"paused": True})
2972
3105
 
@@ -2974,22 +3107,23 @@ async def pause_session() -> JSONResponse:
2974
3107
  @app.post("/api/session/resume")
2975
3108
  async def resume_session() -> JSONResponse:
2976
3109
  """Resume the current loki session by sending SIGUSR2."""
2977
- if not session.running or session.process is None:
2978
- return JSONResponse(content={"resumed": False, "message": "No session running"})
2979
- try:
2980
- os.kill(session.process.pid, signal.SIGUSR2)
2981
- except ProcessLookupError:
2982
- return JSONResponse(content={"resumed": False, "message": "Process not found"})
2983
- except Exception as e:
2984
- return JSONResponse(content={"resumed": False, "message": str(e)})
2985
- session.paused = False
3110
+ async with session._lock:
3111
+ if not session.running or session.process is None:
3112
+ return JSONResponse(content={"resumed": False, "message": "No session running"})
3113
+ try:
3114
+ os.kill(session.process.pid, signal.SIGUSR2)
3115
+ except ProcessLookupError:
3116
+ return JSONResponse(content={"resumed": False, "message": "Process not found"})
3117
+ except Exception as e:
3118
+ return JSONResponse(content={"resumed": False, "message": str(e)})
3119
+ session.paused = False
2986
3120
  await _broadcast({"type": "session_resumed", "data": {}})
2987
3121
  return JSONResponse(content={"resumed": True})
2988
3122
 
2989
3123
 
2990
3124
  @app.get("/api/templates")
2991
3125
  async def get_templates() -> JSONResponse:
2992
- """List available PRD templates with description and category."""
3126
+ """List available PRD templates with description, category, tech stack, difficulty, and build time."""
2993
3127
  templates_dir = PROJECT_ROOT / "templates"
2994
3128
  if not templates_dir.is_dir():
2995
3129
  return JSONResponse(content=[])
@@ -3002,29 +3136,102 @@ async def get_templates() -> JSONResponse:
3002
3136
  'cli-tool': 'CLI', 'npm-library': 'CLI',
3003
3137
  'discord-bot': 'Bot', 'slack-bot': 'Bot', 'ai-chatbot': 'Bot',
3004
3138
  'data-pipeline': 'Data', 'web-scraper': 'Data',
3139
+ 'saas-starter': 'Website', 'simple-todo-app': 'Website',
3140
+ 'game': 'Other', 'chrome-extension': 'Other', 'mobile-app': 'Other',
3141
+ }
3142
+
3143
+ # Tech stack detection from template content
3144
+ _tech_keywords = {
3145
+ 'React': ['react', 'jsx', 'tsx'],
3146
+ 'Node.js': ['node.js', 'node 20', 'node 18', 'express'],
3147
+ 'Python': ['python', 'flask', 'django', 'fastapi'],
3148
+ 'TypeScript': ['typescript', '.ts'],
3149
+ 'PostgreSQL': ['postgresql', 'postgres', 'pg'],
3150
+ 'MongoDB': ['mongodb', 'mongoose'],
3151
+ 'Docker': ['docker', 'dockerfile', 'docker-compose'],
3152
+ 'Redis': ['redis', 'valkey'],
3153
+ 'Express': ['express.js', 'expressjs'],
3154
+ 'FastAPI': ['fastapi'],
3155
+ 'SQLite': ['sqlite'],
3156
+ 'Tailwind': ['tailwind', 'tailwindcss'],
3157
+ 'Discord': ['discord.js', 'discord bot'],
3158
+ 'Slack': ['slack api', 'slack bot'],
3159
+ 'Vite': ['vite'],
3160
+ 'Playwright': ['playwright'],
3161
+ }
3162
+
3163
+ # Difficulty mapping based on template complexity
3164
+ _difficulty_map = {
3165
+ 'simple-todo-app': 'beginner', 'static-landing-page': 'beginner',
3166
+ 'cli-tool': 'beginner', 'rest-api': 'beginner',
3167
+ 'blog-platform': 'intermediate', 'full-stack-demo': 'intermediate',
3168
+ 'rest-api-auth': 'intermediate', 'discord-bot': 'intermediate',
3169
+ 'slack-bot': 'intermediate', 'ai-chatbot': 'intermediate',
3170
+ 'npm-library': 'intermediate', 'web-scraper': 'intermediate',
3171
+ 'api-only': 'intermediate', 'chrome-extension': 'intermediate',
3172
+ 'game': 'intermediate', 'dashboard': 'intermediate',
3173
+ 'e-commerce': 'advanced', 'data-pipeline': 'advanced',
3174
+ 'microservice': 'advanced', 'saas-starter': 'advanced',
3175
+ 'mobile-app': 'advanced',
3176
+ }
3177
+
3178
+ # Build time estimates
3179
+ _build_time_map = {
3180
+ 'simple-todo-app': '3-5 min', 'static-landing-page': '2-4 min',
3181
+ 'cli-tool': '3-5 min', 'rest-api': '5-8 min',
3182
+ 'blog-platform': '8-12 min', 'full-stack-demo': '8-12 min',
3183
+ 'rest-api-auth': '8-12 min', 'discord-bot': '5-8 min',
3184
+ 'slack-bot': '5-8 min', 'ai-chatbot': '10-15 min',
3185
+ 'npm-library': '5-8 min', 'web-scraper': '5-8 min',
3186
+ 'api-only': '5-8 min', 'chrome-extension': '8-12 min',
3187
+ 'game': '10-15 min', 'dashboard': '10-15 min',
3188
+ 'e-commerce': '15-20 min', 'data-pipeline': '10-15 min',
3189
+ 'microservice': '10-15 min', 'saas-starter': '15-20 min',
3190
+ 'mobile-app': '15-20 min',
3191
+ }
3192
+
3193
+ # Category gradients for visual preview
3194
+ _gradient_map = {
3195
+ 'Website': 'from-violet-500/20 via-purple-500/10 to-indigo-500/20',
3196
+ 'API': 'from-emerald-500/20 via-teal-500/10 to-cyan-500/20',
3197
+ 'CLI': 'from-amber-500/20 via-orange-500/10 to-yellow-500/20',
3198
+ 'Bot': 'from-blue-500/20 via-sky-500/10 to-cyan-500/20',
3199
+ 'Data': 'from-rose-500/20 via-pink-500/10 to-fuchsia-500/20',
3200
+ 'Other': 'from-slate-500/20 via-gray-500/10 to-zinc-500/20',
3005
3201
  }
3006
3202
 
3007
3203
  templates = []
3008
3204
  for f in sorted(templates_dir.glob("*.md")):
3009
3205
  name = f.stem.replace("-", " ").replace("_", " ").title()
3010
- # Extract description: first non-heading, non-blank paragraph
3011
3206
  description = ""
3207
+ tech_stack = []
3012
3208
  try:
3013
3209
  text = f.read_text(errors="replace")
3210
+ text_lower = text.lower()
3211
+ # Extract description: first non-heading, non-blank paragraph
3014
3212
  for line in text.splitlines():
3015
3213
  stripped = line.strip()
3016
3214
  if not stripped or stripped.startswith("#"):
3017
3215
  continue
3018
3216
  description = stripped[:200]
3019
3217
  break
3218
+ # Detect tech stack from content
3219
+ for tech_name, keywords in _tech_keywords.items():
3220
+ if any(kw in text_lower for kw in keywords):
3221
+ tech_stack.append(tech_name)
3020
3222
  except OSError:
3021
3223
  pass
3224
+
3022
3225
  category = _category_map.get(f.stem, "Other")
3023
3226
  templates.append({
3024
3227
  "name": name,
3025
3228
  "filename": f.name,
3026
3229
  "description": description,
3027
3230
  "category": category,
3231
+ "tech_stack": tech_stack[:6], # Limit to 6 techs
3232
+ "difficulty": _difficulty_map.get(f.stem, "intermediate"),
3233
+ "build_time": _build_time_map.get(f.stem, "5-10 min"),
3234
+ "gradient": _gradient_map.get(category, _gradient_map["Other"]),
3028
3235
  })
3029
3236
  return JSONResponse(content=templates)
3030
3237
 
@@ -3310,8 +3517,9 @@ async def get_metrics() -> JSONResponse:
3310
3517
 
3311
3518
  def _infer_session_status(entry: Path) -> str:
3312
3519
  """Infer session status from project directory contents."""
3313
- # 1. Check .loki/state/session.json for explicit phase
3314
- state_file = entry / ".loki" / "state" / "session.json"
3520
+ # 1. Check .loki/dashboard-state.json for explicit phase
3521
+ # BUG-INT-002 fix: CLI writes dashboard-state.json, not state/session.json
3522
+ state_file = entry / ".loki" / "dashboard-state.json"
3315
3523
  if state_file.exists():
3316
3524
  try:
3317
3525
  with open(state_file) as f:
@@ -3319,7 +3527,7 @@ def _infer_session_status(entry: Path) -> str:
3319
3527
  phase = st.get("phase", "")
3320
3528
  if phase and phase != "idle":
3321
3529
  # Verify the session is actually still running by checking
3322
- # if session.json was modified recently (within last 5 min)
3530
+ # if dashboard-state.json was modified recently (within last 5 min)
3323
3531
  try:
3324
3532
  mtime = state_file.stat().st_mtime
3325
3533
  if time.time() - mtime > 300: # 5 minutes stale
@@ -3395,7 +3603,7 @@ async def get_sessions_history() -> JSONResponse:
3395
3603
  continue
3396
3604
  session_info: dict = {
3397
3605
  "id": entry.name,
3398
- "path": str(entry),
3606
+ "path": f"~/purple-lab-projects/{entry.name}" if "purple-lab" in str(base_dir) else f"~/.loki/sessions/{entry.name}",
3399
3607
  "date": "",
3400
3608
  "prd_snippet": "",
3401
3609
  "status": _infer_session_status(entry),
@@ -3479,7 +3687,7 @@ async def delete_session(session_id: str) -> JSONResponse:
3479
3687
  except Exception as e:
3480
3688
  return JSONResponse(status_code=500, content={"error": f"Failed to delete: {e}"})
3481
3689
 
3482
- return JSONResponse(content={"deleted": True, "path": str(target)})
3690
+ return JSONResponse(content={"deleted": True, "session_id": session_id})
3483
3691
 
3484
3692
 
3485
3693
  @app.get("/api/sessions/{session_id}")
@@ -3675,6 +3883,8 @@ async def create_session_file(session_id: str, req: FileWriteRequest) -> JSONRes
3675
3883
  return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
3676
3884
  if not req.path:
3677
3885
  return JSONResponse(status_code=400, content={"error": "Path is required"})
3886
+ if len(req.content.encode("utf-8", errors="replace")) > 1_048_576:
3887
+ return JSONResponse(status_code=413, content={"error": "Content exceeds 1 MB limit"})
3678
3888
 
3679
3889
  search_dirs = [
3680
3890
  Path.home() / "purple-lab-projects",
@@ -3860,7 +4070,28 @@ async def chat_session(session_id: str, req: ChatRequest) -> JSONResponse:
3860
4070
  # active sessions and post-completion for iterative development.
3861
4071
 
3862
4072
  # -- Phase 1: Inject project context (dev server errors, gate failures) --
3863
- context_parts = [req.message]
4073
+ context_parts = []
4074
+
4075
+ # BUG-E2E-004: Inject recent chat history so AI has conversation context
4076
+ if req.history:
4077
+ # Include last 5 exchanges max to avoid token bloat
4078
+ recent = req.history[-10:] # last 10 items = ~5 exchanges
4079
+ history_lines = []
4080
+ for entry in recent:
4081
+ role = entry.get("role", "user")
4082
+ content = entry.get("content", "")
4083
+ if content and role in ("user", "assistant"):
4084
+ # Truncate long assistant responses to keep context manageable
4085
+ if role == "assistant" and len(content) > 500:
4086
+ content = content[:500] + "..."
4087
+ history_lines.append(f"[{role.upper()}]: {content}")
4088
+ if history_lines:
4089
+ context_parts.append(
4090
+ "PREVIOUS CONVERSATION CONTEXT:\n" + "\n".join(history_lines)
4091
+ + "\n\nNow handle the following new request:"
4092
+ )
4093
+
4094
+ context_parts.append(req.message)
3864
4095
 
3865
4096
  # Inject dev server errors if the server has crashed
3866
4097
  ds_info = dev_server_manager.servers.get(session_id)
@@ -3914,19 +4145,34 @@ async def chat_session(session_id: str, req: ChatRequest) -> JSONResponse:
3914
4145
  docker_extra = "\n\n" + docker_instructions_file.read_text()
3915
4146
  except OSError:
3916
4147
  pass
4148
+ # Determine the active provider for this session
4149
+ chat_provider = session.provider or "claude"
4150
+ # Also check .loki/state/provider file in the session directory
4151
+ provider_file = target / ".loki" / "state" / "provider"
4152
+ if provider_file.exists():
4153
+ try:
4154
+ saved_prov = provider_file.read_text().strip()
4155
+ if saved_prov:
4156
+ chat_provider = saved_prov
4157
+ except OSError:
4158
+ pass
4159
+
3917
4160
  if req.mode == "max":
3918
4161
  # Max mode: full loki start with the message as a PRD
3919
4162
  prd_path = target / ".loki" / "chat-prd.md"
3920
4163
  prd_path.parent.mkdir(parents=True, exist_ok=True)
3921
4164
  prd_content = full_message + "\n\n## Deployment\nMUST include Dockerfile and docker-compose.yml for containerized execution." + docker_extra
3922
4165
  prd_path.write_text(prd_content)
3923
- cmd_args = [loki, "start", "--provider", "claude", str(prd_path)]
4166
+ # BUG-INT-005 fix: use session's configured provider, not hardcoded "claude"
4167
+ cmd_args = [loki, "start", "--provider", chat_provider, str(prd_path)]
3924
4168
  else:
3925
4169
  # Quick and Standard both use 'loki quick' -- fast, focused changes
3926
4170
  cmd_args = [loki, "quick", full_message + docker_note]
3927
4171
  try:
3928
4172
  chat_env = {**os.environ}
3929
4173
  chat_env.update(_load_secrets())
4174
+ # Pass provider via env for quick mode (loki quick uses LOKI_PROVIDER env)
4175
+ chat_env["LOKI_PROVIDER"] = chat_provider
3930
4176
  proc = subprocess.Popen(
3931
4177
  cmd_args,
3932
4178
  stdout=subprocess.PIPE,
@@ -4174,6 +4420,17 @@ async def fix_session(session_id: str) -> JSONResponse:
4174
4420
  try:
4175
4421
  fix_env = {**os.environ}
4176
4422
  fix_env.update(_load_secrets())
4423
+ # Pass provider via env for fix commands (same as chat)
4424
+ fix_provider = session.provider or "claude"
4425
+ _fix_prov_file = target / ".loki" / "state" / "provider"
4426
+ if _fix_prov_file.exists():
4427
+ try:
4428
+ _fp = _fix_prov_file.read_text().strip()
4429
+ if _fp:
4430
+ fix_provider = _fp
4431
+ except OSError:
4432
+ pass
4433
+ fix_env["LOKI_PROVIDER"] = fix_provider
4177
4434
  proc = subprocess.Popen(
4178
4435
  [loki, "quick", fix_message],
4179
4436
  stdout=subprocess.PIPE,
@@ -4185,6 +4442,7 @@ async def fix_session(session_id: str) -> JSONResponse:
4185
4442
  start_new_session=True,
4186
4443
  )
4187
4444
  task.process = proc
4445
+ _track_child_pid(proc.pid)
4188
4446
  loop = asyncio.get_running_loop()
4189
4447
 
4190
4448
  def _read() -> None:
@@ -4224,6 +4482,9 @@ async def fix_session(session_id: str) -> JSONResponse:
4224
4482
  except (ProcessLookupError, OSError):
4225
4483
  proc.kill()
4226
4484
  proc.wait()
4485
+ # Untrack the child PID now that the fix process is done
4486
+ if proc is not None:
4487
+ _untrack_child_pid(proc.pid)
4227
4488
  task.complete = True
4228
4489
 
4229
4490
  asyncio.create_task(run_fix())
@@ -4318,6 +4579,8 @@ async def set_secret(req: SecretRequest) -> JSONResponse:
4318
4579
  @app.delete("/api/secrets/{key}")
4319
4580
  async def delete_secret(key: str) -> JSONResponse:
4320
4581
  """Delete a secret."""
4582
+ if not re.match(r'^[A-Za-z_][A-Za-z0-9_]*$', key):
4583
+ return JSONResponse(status_code=400, content={"error": "Invalid key format"})
4321
4584
  secrets = _load_secrets()
4322
4585
  if key not in secrets:
4323
4586
  return JSONResponse(status_code=404, content={"error": "Secret not found"})
@@ -4806,6 +5069,582 @@ async def restart_service(session_id: str, req: dict = Body(...)) -> JSONRespons
4806
5069
  return JSONResponse(status_code=500, content={"error": f"Restart failed: {exc}"})
4807
5070
 
4808
5071
 
5072
+ # ---------------------------------------------------------------------------
5073
+ # Deploy endpoints
5074
+ # ---------------------------------------------------------------------------
5075
+
5076
+
5077
+ @app.post("/api/sessions/{session_id}/deploy")
5078
+ async def deploy_session(session_id: str, req: dict = Body(...)) -> JSONResponse:
5079
+ """Deploy a project to a hosting platform (Vercel, Netlify, GitHub Pages)."""
5080
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
5081
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
5082
+ target = _find_session_dir(session_id)
5083
+ if target is None:
5084
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
5085
+
5086
+ platform = req.get("platform", "")
5087
+ if platform not in ("vercel", "netlify", "github-pages"):
5088
+ return JSONResponse(status_code=400, content={"error": f"Unsupported platform: {platform}"})
5089
+
5090
+ project_dir = str(target)
5091
+ loop = asyncio.get_running_loop()
5092
+
5093
+ try:
5094
+ if platform == "vercel":
5095
+ # Try to deploy via Vercel CLI
5096
+ result = await loop.run_in_executor(None, lambda: subprocess.run(
5097
+ ["npx", "vercel", "--yes", "--prod"],
5098
+ capture_output=True, text=True, cwd=project_dir, timeout=120,
5099
+ env={**os.environ, "CI": "1"},
5100
+ ))
5101
+ output = (result.stdout or "") + (result.stderr or "")
5102
+ if result.returncode == 0:
5103
+ # Extract URL from Vercel output (last line is typically the URL)
5104
+ lines = [l.strip() for l in output.strip().split("\n") if l.strip()]
5105
+ url = ""
5106
+ for line in reversed(lines):
5107
+ if line.startswith("http"):
5108
+ url = line
5109
+ break
5110
+ return JSONResponse(content={"url": url or "https://vercel.app", "output": output})
5111
+ else:
5112
+ return JSONResponse(content={"error": output or "Vercel deploy failed", "output": output})
5113
+
5114
+ elif platform == "netlify":
5115
+ # Build first, then deploy
5116
+ build_result = await loop.run_in_executor(None, lambda: subprocess.run(
5117
+ ["npm", "run", "build"],
5118
+ capture_output=True, text=True, cwd=project_dir, timeout=120,
5119
+ ))
5120
+ # Deploy via Netlify CLI
5121
+ deploy_dir = "dist"
5122
+ for d in ["build", "out", "dist", ".next"]:
5123
+ if (Path(project_dir) / d).is_dir():
5124
+ deploy_dir = d
5125
+ break
5126
+ result = await loop.run_in_executor(None, lambda: subprocess.run(
5127
+ ["npx", "netlify", "deploy", "--prod", "--dir", deploy_dir],
5128
+ capture_output=True, text=True, cwd=project_dir, timeout=120,
5129
+ ))
5130
+ output = (result.stdout or "") + (result.stderr or "")
5131
+ if result.returncode == 0:
5132
+ # Extract URL from netlify output
5133
+ url = ""
5134
+ for line in output.split("\n"):
5135
+ if "Website URL:" in line or "Unique Deploy URL:" in line:
5136
+ parts = line.split("http")
5137
+ if len(parts) > 1:
5138
+ url = "http" + parts[-1].strip()
5139
+ break
5140
+ return JSONResponse(content={"url": url or "https://netlify.app", "output": output})
5141
+ else:
5142
+ return JSONResponse(content={"error": output or "Netlify deploy failed", "output": output})
5143
+
5144
+ elif platform == "github-pages":
5145
+ # Use gh-pages or manual git push to gh-pages branch
5146
+ build_result = await loop.run_in_executor(None, lambda: subprocess.run(
5147
+ ["npm", "run", "build"],
5148
+ capture_output=True, text=True, cwd=project_dir, timeout=120,
5149
+ ))
5150
+ result = await loop.run_in_executor(None, lambda: subprocess.run(
5151
+ ["npx", "gh-pages", "-d", "dist"],
5152
+ capture_output=True, text=True, cwd=project_dir, timeout=120,
5153
+ ))
5154
+ output = (result.stdout or "") + (result.stderr or "")
5155
+ if result.returncode == 0:
5156
+ # Try to infer the pages URL from git remote
5157
+ remote_result = await loop.run_in_executor(None, lambda: subprocess.run(
5158
+ ["git", "remote", "get-url", "origin"],
5159
+ capture_output=True, text=True, cwd=project_dir, timeout=10,
5160
+ ))
5161
+ url = ""
5162
+ if remote_result.returncode == 0:
5163
+ remote = remote_result.stdout.strip()
5164
+ # Parse github.com/user/repo
5165
+ m = re.search(r"github\.com[:/]([^/]+)/([^/.]+)", remote)
5166
+ if m:
5167
+ url = f"https://{m.group(1)}.github.io/{m.group(2)}/"
5168
+ return JSONResponse(content={"url": url or "https://github.io", "output": output})
5169
+ else:
5170
+ return JSONResponse(content={"error": output or "GitHub Pages deploy failed", "output": output})
5171
+
5172
+ except subprocess.TimeoutExpired:
5173
+ return JSONResponse(status_code=500, content={"error": "Deploy timed out after 120 seconds"})
5174
+ except FileNotFoundError as exc:
5175
+ return JSONResponse(status_code=500, content={"error": f"CLI tool not found: {exc}"})
5176
+ except Exception as exc:
5177
+ return JSONResponse(status_code=500, content={"error": f"Deploy failed: {exc}"})
5178
+
5179
+
5180
+ @app.post("/api/sessions/{session_id}/github/push")
5181
+ async def github_push_session(session_id: str) -> JSONResponse:
5182
+ """Create a GitHub repo and push the project code."""
5183
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
5184
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
5185
+ target = _find_session_dir(session_id)
5186
+ if target is None:
5187
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
5188
+
5189
+ project_dir = str(target)
5190
+ loop = asyncio.get_running_loop()
5191
+
5192
+ try:
5193
+ # Ensure git repo is initialized
5194
+ git_dir = Path(project_dir) / ".git"
5195
+ if not git_dir.is_dir():
5196
+ await loop.run_in_executor(None, lambda: subprocess.run(
5197
+ ["git", "init"],
5198
+ capture_output=True, text=True, cwd=project_dir, timeout=10,
5199
+ ))
5200
+ await loop.run_in_executor(None, lambda: subprocess.run(
5201
+ ["git", "add", "-A"],
5202
+ capture_output=True, text=True, cwd=project_dir, timeout=30,
5203
+ ))
5204
+ await loop.run_in_executor(None, lambda: subprocess.run(
5205
+ ["git", "commit", "-m", "Initial commit from Purple Lab"],
5206
+ capture_output=True, text=True, cwd=project_dir, timeout=30,
5207
+ ))
5208
+
5209
+ # Use gh CLI to create repo and push
5210
+ repo_name = Path(project_dir).name
5211
+ result = await loop.run_in_executor(None, lambda: subprocess.run(
5212
+ ["gh", "repo", "create", repo_name, "--public", "--source", ".", "--push"],
5213
+ capture_output=True, text=True, cwd=project_dir, timeout=60,
5214
+ ))
5215
+ output = (result.stdout or "") + (result.stderr or "")
5216
+
5217
+ if result.returncode == 0:
5218
+ # Extract repo URL from output
5219
+ repo_url = ""
5220
+ for line in output.split("\n"):
5221
+ if "github.com" in line:
5222
+ # Extract the URL
5223
+ m = re.search(r"https://github\.com/[^\s]+", line)
5224
+ if m:
5225
+ repo_url = m.group(0)
5226
+ break
5227
+ if not repo_url:
5228
+ # Fallback: get from git remote
5229
+ remote_result = await loop.run_in_executor(None, lambda: subprocess.run(
5230
+ ["git", "remote", "get-url", "origin"],
5231
+ capture_output=True, text=True, cwd=project_dir, timeout=10,
5232
+ ))
5233
+ if remote_result.returncode == 0:
5234
+ repo_url = remote_result.stdout.strip()
5235
+
5236
+ return JSONResponse(content={"repo_url": repo_url, "output": output})
5237
+ else:
5238
+ return JSONResponse(content={"error": output or "GitHub push failed", "output": output})
5239
+
5240
+ except subprocess.TimeoutExpired:
5241
+ return JSONResponse(status_code=500, content={"error": "GitHub push timed out"})
5242
+ except FileNotFoundError as exc:
5243
+ return JSONResponse(status_code=500, content={"error": f"CLI tool not found (gh or git): {exc}"})
5244
+ except Exception as exc:
5245
+ return JSONResponse(status_code=500, content={"error": f"GitHub push failed: {exc}"})
5246
+
5247
+
5248
+ # ---------------------------------------------------------------------------
5249
+ # Git integration endpoints
5250
+ # ---------------------------------------------------------------------------
5251
+
5252
+
5253
+ def _validate_session_and_find_dir(session_id: str) -> tuple[Optional[Path], Optional[JSONResponse]]:
5254
+ """Validate session ID and find directory. Returns (dir, None) or (None, error_response)."""
5255
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
5256
+ return None, JSONResponse(status_code=400, content={"error": "Invalid session ID"})
5257
+ target = _find_session_dir(session_id)
5258
+ if target is None:
5259
+ return None, JSONResponse(status_code=404, content={"error": "Session not found"})
5260
+ return target, None
5261
+
5262
+
5263
+ def _run_git(cwd: Path, *args: str, timeout: int = 15) -> subprocess.CompletedProcess:
5264
+ """Run a git command in the given directory."""
5265
+ return subprocess.run(
5266
+ ["git"] + list(args),
5267
+ capture_output=True, text=True, cwd=str(cwd), timeout=timeout,
5268
+ )
5269
+
5270
+
5271
+ @app.get("/api/sessions/{session_id}/git/status")
5272
+ async def git_status(session_id: str) -> JSONResponse:
5273
+ """Get git status for a session's project."""
5274
+ target, err = _validate_session_and_find_dir(session_id)
5275
+ if err:
5276
+ return err
5277
+
5278
+ loop = asyncio.get_running_loop()
5279
+ try:
5280
+ # Get branch name
5281
+ branch_result = await loop.run_in_executor(
5282
+ None, lambda: _run_git(target, "rev-parse", "--abbrev-ref", "HEAD")
5283
+ )
5284
+ branch = branch_result.stdout.strip() if branch_result.returncode == 0 else "unknown"
5285
+
5286
+ # Get ahead/behind counts
5287
+ ahead, behind = 0, 0
5288
+ try:
5289
+ ab_result = await loop.run_in_executor(
5290
+ None, lambda: _run_git(target, "rev-list", "--left-right", "--count", f"{branch}...origin/{branch}")
5291
+ )
5292
+ if ab_result.returncode == 0:
5293
+ parts = ab_result.stdout.strip().split("\t")
5294
+ if len(parts) == 2:
5295
+ ahead, behind = int(parts[0]), int(parts[1])
5296
+ except (ValueError, subprocess.TimeoutExpired):
5297
+ pass
5298
+
5299
+ # Get porcelain status
5300
+ status_result = await loop.run_in_executor(
5301
+ None, lambda: _run_git(target, "status", "--porcelain=v1")
5302
+ )
5303
+ files = []
5304
+ if status_result.returncode == 0:
5305
+ for line in status_result.stdout.splitlines():
5306
+ if len(line) < 4:
5307
+ continue
5308
+ index_status = line[0]
5309
+ worktree_status = line[1]
5310
+ filepath = line[3:].strip()
5311
+ # Remove quotes from paths with special characters
5312
+ if filepath.startswith('"') and filepath.endswith('"'):
5313
+ filepath = filepath[1:-1]
5314
+
5315
+ # Determine status and staged
5316
+ if index_status == '?' and worktree_status == '?':
5317
+ files.append({"path": filepath, "status": "untracked", "staged": False})
5318
+ else:
5319
+ # Index status (staged)
5320
+ if index_status in ('M', 'A', 'D', 'R'):
5321
+ status_map = {'M': 'modified', 'A': 'added', 'D': 'deleted', 'R': 'renamed'}
5322
+ files.append({"path": filepath, "status": status_map.get(index_status, 'modified'), "staged": True})
5323
+ # Worktree status (unstaged)
5324
+ if worktree_status in ('M', 'D'):
5325
+ status_map = {'M': 'modified', 'D': 'deleted'}
5326
+ files.append({"path": filepath, "status": status_map.get(worktree_status, 'modified'), "staged": False})
5327
+
5328
+ return JSONResponse(content={
5329
+ "branch": branch,
5330
+ "clean": len(files) == 0,
5331
+ "ahead": ahead,
5332
+ "behind": behind,
5333
+ "files": files,
5334
+ })
5335
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5336
+ return JSONResponse(status_code=500, content={"error": f"Git status failed: {exc}"})
5337
+
5338
+
5339
+ @app.get("/api/sessions/{session_id}/git/log")
5340
+ async def git_log(session_id: str, limit: int = 20) -> JSONResponse:
5341
+ """Get commit history for a session's project."""
5342
+ target, err = _validate_session_and_find_dir(session_id)
5343
+ if err:
5344
+ return err
5345
+
5346
+ limit = min(limit, 100) # Cap at 100
5347
+ loop = asyncio.get_running_loop()
5348
+ try:
5349
+ # Use a format that's easy to parse: hash|short_hash|author|date|refs|message
5350
+ fmt = "%H|%h|%an|%ar|%D|%s"
5351
+ result = await loop.run_in_executor(
5352
+ None, lambda: _run_git(target, "log", f"--format={fmt}", f"-{limit}")
5353
+ )
5354
+ if result.returncode != 0:
5355
+ return JSONResponse(content=[])
5356
+
5357
+ commits = []
5358
+ for line in result.stdout.strip().splitlines():
5359
+ if not line:
5360
+ continue
5361
+ parts = line.split("|", 5)
5362
+ if len(parts) < 6:
5363
+ continue
5364
+ refs = [r.strip() for r in parts[4].split(",") if r.strip()] if parts[4] else []
5365
+ # Clean up ref names (remove HEAD -> prefix, etc.)
5366
+ clean_refs = []
5367
+ for ref in refs:
5368
+ ref = ref.replace("HEAD -> ", "").strip()
5369
+ if ref and ref != "HEAD":
5370
+ clean_refs.append(ref)
5371
+ commits.append({
5372
+ "hash": parts[0],
5373
+ "short_hash": parts[1],
5374
+ "author": parts[2],
5375
+ "date": parts[3],
5376
+ "refs": clean_refs,
5377
+ "message": parts[5],
5378
+ })
5379
+ return JSONResponse(content=commits)
5380
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5381
+ return JSONResponse(status_code=500, content={"error": f"Git log failed: {exc}"})
5382
+
5383
+
5384
+ @app.get("/api/sessions/{session_id}/git/branches")
5385
+ async def git_branches(session_id: str) -> JSONResponse:
5386
+ """List git branches for a session's project."""
5387
+ target, err = _validate_session_and_find_dir(session_id)
5388
+ if err:
5389
+ return err
5390
+
5391
+ loop = asyncio.get_running_loop()
5392
+ try:
5393
+ result = await loop.run_in_executor(
5394
+ None, lambda: _run_git(target, "branch", "-a", "--format=%(refname:short)|%(HEAD)")
5395
+ )
5396
+ if result.returncode != 0:
5397
+ return JSONResponse(content=[])
5398
+
5399
+ branches = []
5400
+ for line in result.stdout.strip().splitlines():
5401
+ if not line:
5402
+ continue
5403
+ parts = line.split("|", 1)
5404
+ name = parts[0].strip()
5405
+ is_current = len(parts) > 1 and parts[1].strip() == "*"
5406
+ is_remote = name.startswith("origin/") or name.startswith("remotes/")
5407
+ # Skip HEAD pointers
5408
+ if "HEAD" in name:
5409
+ continue
5410
+ branches.append({
5411
+ "name": name,
5412
+ "current": is_current,
5413
+ "remote": is_remote,
5414
+ })
5415
+ return JSONResponse(content=branches)
5416
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5417
+ return JSONResponse(status_code=500, content={"error": f"Git branches failed: {exc}"})
5418
+
5419
+
5420
+ @app.post("/api/sessions/{session_id}/git/commit")
5421
+ async def git_commit(session_id: str, req: dict = Body(...)) -> JSONResponse:
5422
+ """Create a git commit in the session's project."""
5423
+ target, err = _validate_session_and_find_dir(session_id)
5424
+ if err:
5425
+ return err
5426
+
5427
+ message = req.get("message", "").strip()
5428
+ if not message:
5429
+ return JSONResponse(status_code=400, content={"error": "Commit message is required"})
5430
+
5431
+ files = req.get("files") # Optional: specific files to stage
5432
+
5433
+ loop = asyncio.get_running_loop()
5434
+ try:
5435
+ # Stage files
5436
+ if files and isinstance(files, list):
5437
+ for f in files:
5438
+ if not isinstance(f, str) or ".." in f:
5439
+ continue
5440
+ await loop.run_in_executor(None, lambda f=f: _run_git(target, "add", f))
5441
+ else:
5442
+ # Stage all changes
5443
+ await loop.run_in_executor(None, lambda: _run_git(target, "add", "-A"))
5444
+
5445
+ # Commit
5446
+ result = await loop.run_in_executor(
5447
+ None, lambda: _run_git(target, "commit", "-m", message)
5448
+ )
5449
+ if result.returncode != 0:
5450
+ error_msg = result.stderr.strip() or result.stdout.strip() or "Commit failed"
5451
+ return JSONResponse(status_code=400, content={"error": error_msg})
5452
+
5453
+ # Get the commit hash
5454
+ hash_result = await loop.run_in_executor(
5455
+ None, lambda: _run_git(target, "rev-parse", "--short", "HEAD")
5456
+ )
5457
+ commit_hash = hash_result.stdout.strip() if hash_result.returncode == 0 else "unknown"
5458
+
5459
+ return JSONResponse(content={"hash": commit_hash, "message": message})
5460
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5461
+ return JSONResponse(status_code=500, content={"error": f"Commit failed: {exc}"})
5462
+
5463
+
5464
+ @app.post("/api/sessions/{session_id}/git/branch")
5465
+ async def git_create_branch(session_id: str, req: dict = Body(...)) -> JSONResponse:
5466
+ """Create a new git branch."""
5467
+ target, err = _validate_session_and_find_dir(session_id)
5468
+ if err:
5469
+ return err
5470
+
5471
+ name = req.get("name", "").strip()
5472
+ checkout = req.get("checkout", True)
5473
+ if not name or not re.match(r"^[a-zA-Z0-9._/-]+$", name):
5474
+ return JSONResponse(status_code=400, content={"error": "Invalid branch name"})
5475
+
5476
+ loop = asyncio.get_running_loop()
5477
+ try:
5478
+ if checkout:
5479
+ result = await loop.run_in_executor(
5480
+ None, lambda: _run_git(target, "checkout", "-b", name)
5481
+ )
5482
+ else:
5483
+ result = await loop.run_in_executor(
5484
+ None, lambda: _run_git(target, "branch", name)
5485
+ )
5486
+ if result.returncode != 0:
5487
+ return JSONResponse(status_code=400, content={"error": result.stderr.strip() or "Branch creation failed"})
5488
+
5489
+ return JSONResponse(content={"branch": name, "created": True})
5490
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5491
+ return JSONResponse(status_code=500, content={"error": f"Branch creation failed: {exc}"})
5492
+
5493
+
5494
+ @app.post("/api/sessions/{session_id}/git/checkout")
5495
+ async def git_checkout_branch(session_id: str, req: dict = Body(...)) -> JSONResponse:
5496
+ """Switch to an existing git branch."""
5497
+ target, err = _validate_session_and_find_dir(session_id)
5498
+ if err:
5499
+ return err
5500
+
5501
+ name = req.get("name", "").strip()
5502
+ if not name or not re.match(r"^[a-zA-Z0-9._/-]+$", name):
5503
+ return JSONResponse(status_code=400, content={"error": "Invalid branch name"})
5504
+
5505
+ loop = asyncio.get_running_loop()
5506
+ try:
5507
+ result = await loop.run_in_executor(
5508
+ None, lambda: _run_git(target, "checkout", name)
5509
+ )
5510
+ if result.returncode != 0:
5511
+ return JSONResponse(status_code=400, content={"error": result.stderr.strip() or "Checkout failed"})
5512
+
5513
+ return JSONResponse(content={"branch": name, "switched": True})
5514
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5515
+ return JSONResponse(status_code=500, content={"error": f"Checkout failed: {exc}"})
5516
+
5517
+
5518
+ @app.post("/api/sessions/{session_id}/git/push")
5519
+ async def git_push(session_id: str) -> JSONResponse:
5520
+ """Push the current branch to remote."""
5521
+ target, err = _validate_session_and_find_dir(session_id)
5522
+ if err:
5523
+ return err
5524
+
5525
+ loop = asyncio.get_running_loop()
5526
+ try:
5527
+ # Get current branch
5528
+ branch_result = await loop.run_in_executor(
5529
+ None, lambda: _run_git(target, "rev-parse", "--abbrev-ref", "HEAD")
5530
+ )
5531
+ branch = branch_result.stdout.strip() if branch_result.returncode == 0 else "main"
5532
+
5533
+ # Push with upstream tracking
5534
+ result = await loop.run_in_executor(
5535
+ None, lambda: _run_git(target, "push", "-u", "origin", branch, timeout=30)
5536
+ )
5537
+ if result.returncode != 0:
5538
+ return JSONResponse(status_code=400, content={
5539
+ "error": result.stderr.strip() or "Push failed",
5540
+ "pushed": False,
5541
+ })
5542
+
5543
+ return JSONResponse(content={"pushed": True, "message": f"Pushed {branch} to origin"})
5544
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5545
+ return JSONResponse(status_code=500, content={"error": f"Push failed: {exc}"})
5546
+
5547
+
5548
+ @app.post("/api/sessions/{session_id}/git/pr")
5549
+ async def git_create_pr(session_id: str, req: dict = Body(...)) -> JSONResponse:
5550
+ """Create a pull request using gh CLI."""
5551
+ target, err = _validate_session_and_find_dir(session_id)
5552
+ if err:
5553
+ return err
5554
+
5555
+ title = req.get("title", "").strip()
5556
+ body = req.get("body", "").strip()
5557
+ if not title:
5558
+ return JSONResponse(status_code=400, content={"error": "PR title is required"})
5559
+
5560
+ loop = asyncio.get_running_loop()
5561
+ try:
5562
+ import shutil
5563
+ gh = shutil.which("gh")
5564
+ if not gh:
5565
+ return JSONResponse(status_code=400, content={"error": "gh CLI not found. Install it: https://cli.github.com/"})
5566
+
5567
+ cmd = [gh, "pr", "create", "--title", title]
5568
+ if body:
5569
+ cmd.extend(["--body", body])
5570
+ else:
5571
+ cmd.extend(["--body", ""])
5572
+
5573
+ result = await loop.run_in_executor(
5574
+ None, lambda: subprocess.run(
5575
+ cmd, capture_output=True, text=True, cwd=str(target), timeout=30,
5576
+ )
5577
+ )
5578
+ if result.returncode != 0:
5579
+ return JSONResponse(status_code=400, content={"error": result.stderr.strip() or "PR creation failed"})
5580
+
5581
+ # gh pr create outputs the PR URL
5582
+ pr_url = result.stdout.strip()
5583
+ # Try to extract PR number from URL
5584
+ pr_number = 0
5585
+ if pr_url:
5586
+ parts = pr_url.rstrip("/").split("/")
5587
+ try:
5588
+ pr_number = int(parts[-1])
5589
+ except (ValueError, IndexError):
5590
+ pass
5591
+
5592
+ return JSONResponse(content={"url": pr_url, "number": pr_number})
5593
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as exc:
5594
+ return JSONResponse(status_code=500, content={"error": f"PR creation failed: {exc}"})
5595
+
5596
+
5597
+ # ---------------------------------------------------------------------------
5598
+ # Image upload for AI chat (screenshot-to-change)
5599
+ # ---------------------------------------------------------------------------
5600
+
5601
+
5602
+ @app.post("/api/sessions/{session_id}/chat/image")
5603
+ async def chat_image_upload(session_id: str, request: Request) -> JSONResponse:
5604
+ """Upload an image for screenshot-to-change in AI chat."""
5605
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
5606
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
5607
+ target = _find_session_dir(session_id)
5608
+ if target is None:
5609
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
5610
+
5611
+ # Parse multipart form data
5612
+ try:
5613
+ form = await request.form()
5614
+ image_file = form.get("image")
5615
+ if image_file is None:
5616
+ return JSONResponse(status_code=400, content={"error": "No image file provided"})
5617
+
5618
+ # Read file content
5619
+ content = await image_file.read()
5620
+ if len(content) > 10 * 1024 * 1024: # 10MB limit
5621
+ return JSONResponse(status_code=400, content={"error": "Image too large (max 10MB)"})
5622
+
5623
+ # Save to session's .loki/images/ directory
5624
+ images_dir = target / ".loki" / "images"
5625
+ images_dir.mkdir(parents=True, exist_ok=True)
5626
+
5627
+ image_id = str(uuid.uuid4())[:8]
5628
+ # Sanitize filename
5629
+ original_name = getattr(image_file, 'filename', 'image.png') or 'image.png'
5630
+ safe_name = re.sub(r'[^a-zA-Z0-9._-]', '_', original_name)
5631
+ saved_name = f"{image_id}_{safe_name}"
5632
+ saved_path = images_dir / saved_name
5633
+
5634
+ with open(saved_path, "wb") as f:
5635
+ f.write(content)
5636
+
5637
+ return JSONResponse(content={
5638
+ "image_id": image_id,
5639
+ "filename": safe_name,
5640
+ "path": str(saved_path.relative_to(target)),
5641
+ "size": len(content),
5642
+ })
5643
+ except Exception as exc:
5644
+ logger.error("Image upload failed: %s", exc, exc_info=True)
5645
+ return JSONResponse(status_code=500, content={"error": f"Upload failed: {exc}"})
5646
+
5647
+
4809
5648
  # ---------------------------------------------------------------------------
4810
5649
  # HTTP Proxy for dev server preview
4811
5650
  # ---------------------------------------------------------------------------
@@ -5238,22 +6077,41 @@ async def _push_state_to_client(ws: WebSocket) -> None:
5238
6077
  _cost_usd = 0.0
5239
6078
  _max_iterations = 0
5240
6079
 
5241
- state_file = loki_dir / "state" / "session.json"
5242
- if state_file.exists():
6080
+ # BUG-INT-002 fix: CLI writes dashboard-state.json, not state/session.json
6081
+ dash_file = loki_dir / "dashboard-state.json"
6082
+ if dash_file.exists():
5243
6083
  try:
5244
- with open(state_file) as f:
6084
+ with open(dash_file) as f:
5245
6085
  state_data = json.load(f)
5246
6086
  _phase = state_data.get("phase", _phase)
5247
6087
  _iteration = state_data.get("iteration", _iteration)
5248
6088
  _complexity = state_data.get("complexity", _complexity)
5249
- _current_task = state_data.get("current_task", _current_task)
5250
- _pending_tasks = state_data.get("pending_tasks", _pending_tasks)
6089
+ _tasks = state_data.get("tasks")
6090
+ if isinstance(_tasks, dict):
6091
+ _pending_tasks = int(_tasks.get("pending", 0) or 0)
6092
+ _in_progress = int(_tasks.get("inProgress", 0) or 0)
6093
+ if _in_progress > 0:
6094
+ _current_task = f"{_in_progress} task(s) in progress"
5251
6095
  # Extract cost from tokens object if present
5252
6096
  _tokens = state_data.get("tokens")
5253
6097
  if isinstance(_tokens, dict):
5254
6098
  _cost_usd = float(_tokens.get("cost_usd", 0) or 0)
6099
+ # Agents are included in dashboard-state.json
6100
+ _dash_agents = state_data.get("agents")
6101
+ if isinstance(_dash_agents, list):
6102
+ _agents = _dash_agents
5255
6103
  except (json.JSONDecodeError, OSError):
5256
6104
  pass
6105
+ else:
6106
+ # Fallback: try orchestrator.json for phase
6107
+ _orch_file = loki_dir / "state" / "orchestrator.json"
6108
+ if _orch_file.exists():
6109
+ try:
6110
+ with open(_orch_file) as f:
6111
+ _orch = json.load(f)
6112
+ _phase = _orch.get("currentPhase", _phase)
6113
+ except (json.JSONDecodeError, OSError):
6114
+ pass
5257
6115
 
5258
6116
  agents_file = loki_dir / "state" / "agents.json"
5259
6117
  if agents_file.exists():
@@ -5367,11 +6225,14 @@ async def websocket_endpoint(ws: WebSocket) -> None:
5367
6225
  if session.running and session.project_dir and "session" not in file_watcher._observers:
5368
6226
  file_watcher.start("session", session.project_dir, _broadcast, asyncio.get_running_loop())
5369
6227
 
5370
- # Send recent log lines as backfill
5371
- for line in session.log_lines[-100:]:
6228
+ # Send recent log lines as backfill (with sequence numbers for ordering)
6229
+ # BUG-E2E-002: Include seq so frontend can maintain correct order
6230
+ backfill_lines = session.log_lines[-100:]
6231
+ backfill_start_seq = max(1, session.log_lines_total - len(backfill_lines) + 1)
6232
+ for i, line in enumerate(backfill_lines):
5372
6233
  await ws.send_text(json.dumps({
5373
6234
  "type": "log",
5374
- "data": {"line": line, "timestamp": ""},
6235
+ "data": {"line": line, "timestamp": "", "seq": backfill_start_seq + i},
5375
6236
  }))
5376
6237
 
5377
6238
  # Start server-push state task for this connection
@@ -5638,6 +6499,413 @@ def _pty_read(pty: "pexpect.spawn") -> Optional[str]:
5638
6499
 
5639
6500
 
5640
6501
 
6502
+ # ---------------------------------------------------------------------------
6503
+ # Checkpoint Timeline (Sprint 3.1)
6504
+ # ---------------------------------------------------------------------------
6505
+
6506
+
6507
+ def _get_checkpoints_dir(session_dir: Path) -> Path:
6508
+ """Return the checkpoints directory for a session."""
6509
+ return session_dir / ".loki" / "checkpoints"
6510
+
6511
+
6512
+ def _list_checkpoints(session_dir: Path) -> list[dict]:
6513
+ """List all checkpoints for a session, sorted by timestamp."""
6514
+ cp_dir = _get_checkpoints_dir(session_dir)
6515
+ checkpoints: list[dict] = []
6516
+
6517
+ if not cp_dir.is_dir():
6518
+ return checkpoints
6519
+
6520
+ for entry in sorted(cp_dir.iterdir()):
6521
+ if not entry.is_dir():
6522
+ continue
6523
+ meta_file = entry / "meta.json"
6524
+ if meta_file.exists():
6525
+ try:
6526
+ meta = json.loads(meta_file.read_text())
6527
+ checkpoints.append({
6528
+ "id": entry.name,
6529
+ "timestamp": meta.get("timestamp", ""),
6530
+ "description": meta.get("description", f"Checkpoint {entry.name}"),
6531
+ "iteration": meta.get("iteration", 0),
6532
+ "files_changed": meta.get("files_changed", 0),
6533
+ "is_current": False,
6534
+ })
6535
+ except (json.JSONDecodeError, OSError):
6536
+ checkpoints.append({
6537
+ "id": entry.name,
6538
+ "timestamp": "",
6539
+ "description": f"Checkpoint {entry.name}",
6540
+ "iteration": 0,
6541
+ "files_changed": 0,
6542
+ "is_current": False,
6543
+ })
6544
+ else:
6545
+ # Create entry from directory name
6546
+ checkpoints.append({
6547
+ "id": entry.name,
6548
+ "timestamp": time.strftime(
6549
+ "%Y-%m-%dT%H:%M:%SZ", time.localtime(entry.stat().st_mtime)
6550
+ ),
6551
+ "description": f"Checkpoint {entry.name}",
6552
+ "iteration": 0,
6553
+ "files_changed": 0,
6554
+ "is_current": False,
6555
+ })
6556
+
6557
+ # Mark the latest as current
6558
+ if checkpoints:
6559
+ checkpoints[-1]["is_current"] = True
6560
+
6561
+ return checkpoints
6562
+
6563
+
6564
+ @app.get("/api/sessions/{session_id}/checkpoints")
6565
+ async def get_checkpoints(session_id: str) -> JSONResponse:
6566
+ """List all checkpoints for a session."""
6567
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
6568
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
6569
+ target = _find_session_dir(session_id)
6570
+ if target is None:
6571
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
6572
+
6573
+ checkpoints = _list_checkpoints(target)
6574
+ return JSONResponse(content=checkpoints)
6575
+
6576
+
6577
+ @app.post("/api/sessions/{session_id}/checkpoints/{cp_id}/restore")
6578
+ async def restore_checkpoint(session_id: str, cp_id: str) -> JSONResponse:
6579
+ """Restore a session to a specific checkpoint."""
6580
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
6581
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
6582
+ if not re.match(r"^[a-zA-Z0-9._-]+$", cp_id):
6583
+ return JSONResponse(status_code=400, content={"error": "Invalid checkpoint ID"})
6584
+
6585
+ target = _find_session_dir(session_id)
6586
+ if target is None:
6587
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
6588
+
6589
+ cp_dir = _get_checkpoints_dir(target) / cp_id
6590
+ if not cp_dir.is_dir():
6591
+ return JSONResponse(status_code=404, content={"error": "Checkpoint not found"})
6592
+
6593
+ # Restore files from checkpoint snapshot
6594
+ snapshot_dir = cp_dir / "snapshot"
6595
+ if snapshot_dir.is_dir():
6596
+ import shutil
6597
+ # Copy snapshot files back to project root, preserving existing .loki dir
6598
+ for item in snapshot_dir.rglob("*"):
6599
+ if item.is_file():
6600
+ rel = item.relative_to(snapshot_dir)
6601
+ dest = target / rel
6602
+ dest.parent.mkdir(parents=True, exist_ok=True)
6603
+ shutil.copy2(str(item), str(dest))
6604
+
6605
+ description = f"Restored to checkpoint {cp_id}"
6606
+ meta_file = cp_dir / "meta.json"
6607
+ if meta_file.exists():
6608
+ try:
6609
+ meta = json.loads(meta_file.read_text())
6610
+ description = meta.get("description", description)
6611
+ except (json.JSONDecodeError, OSError):
6612
+ pass
6613
+
6614
+ return JSONResponse(content={
6615
+ "restored": True,
6616
+ "checkpoint_id": cp_id,
6617
+ "description": description,
6618
+ })
6619
+
6620
+
6621
+ # ---------------------------------------------------------------------------
6622
+ # File Search (Sprint 3.3)
6623
+ # ---------------------------------------------------------------------------
6624
+
6625
+
6626
+ def _flatten_file_tree(nodes: list[dict], results: list[dict]) -> None:
6627
+ """Recursively flatten a file tree into a list of file entries."""
6628
+ for node in nodes:
6629
+ results.append({
6630
+ "path": node["path"],
6631
+ "name": node["name"],
6632
+ "type": node["type"],
6633
+ "size": node.get("size"),
6634
+ })
6635
+ if node.get("children"):
6636
+ _flatten_file_tree(node["children"], results)
6637
+
6638
+
6639
+ @app.get("/api/sessions/{session_id}/files/search")
6640
+ async def search_session_files(session_id: str, q: str = "") -> JSONResponse:
6641
+ """Search files in a session project by name/path."""
6642
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
6643
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
6644
+ if not q.strip():
6645
+ return JSONResponse(content=[])
6646
+
6647
+ target = _find_session_dir(session_id)
6648
+ if target is None:
6649
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
6650
+
6651
+ tree = _build_file_tree(target)
6652
+ all_files: list[dict] = []
6653
+ _flatten_file_tree(tree, all_files)
6654
+
6655
+ query = q.strip().lower()
6656
+ results = [
6657
+ f for f in all_files
6658
+ if query in f["path"].lower() or query in f["name"].lower()
6659
+ ]
6660
+ # Return at most 50 results, files first, then directories
6661
+ results.sort(key=lambda f: (f["type"] != "file", f["path"].lower()))
6662
+ return JSONResponse(content=results[:50])
6663
+
6664
+
6665
+ # ---------------------------------------------------------------------------
6666
+ # Change Preview (Sprint 3.2)
6667
+ # ---------------------------------------------------------------------------
6668
+
6669
+
6670
+ @app.post("/api/sessions/{session_id}/chat/preview")
6671
+ async def preview_chat_changes(session_id: str, req: ChatRequest) -> JSONResponse:
6672
+ """Preview what changes a chat message would make (dry-run diff).
6673
+
6674
+ Uses git diff to show what would change without actually applying.
6675
+ Returns structured diff data for the ChangePreview modal.
6676
+ """
6677
+ if not re.match(r"^[a-zA-Z0-9._-]+$", session_id):
6678
+ return JSONResponse(status_code=400, content={"error": "Invalid session ID"})
6679
+ target = _find_session_dir(session_id)
6680
+ if target is None:
6681
+ return JSONResponse(status_code=404, content={"error": "Session not found"})
6682
+
6683
+ # For the preview, we parse the current git status to generate mock diffs
6684
+ # showing the pending unstaged/staged changes. In production this would
6685
+ # invoke the LLM in dry-run mode, but for now we show actual pending changes.
6686
+ files: list[dict] = []
6687
+ total_add = 0
6688
+ total_del = 0
6689
+
6690
+ try:
6691
+ loop = asyncio.get_running_loop()
6692
+ result = await loop.run_in_executor(None, lambda: subprocess.run(
6693
+ ["git", "diff", "--numstat", "HEAD"],
6694
+ capture_output=True, text=True, cwd=str(target), timeout=10
6695
+ ))
6696
+ if result.returncode == 0 and result.stdout.strip():
6697
+ for line in result.stdout.strip().splitlines():
6698
+ parts = line.split("\t")
6699
+ if len(parts) == 3:
6700
+ additions = int(parts[0]) if parts[0] != "-" else 0
6701
+ deletions = int(parts[1]) if parts[1] != "-" else 0
6702
+ filepath = parts[2]
6703
+ total_add += additions
6704
+ total_del += deletions
6705
+
6706
+ # Get the actual diff hunks for this file
6707
+ diff_result = await loop.run_in_executor(None, lambda fp=filepath: subprocess.run(
6708
+ ["git", "diff", "HEAD", "--", fp],
6709
+ capture_output=True, text=True, cwd=str(target), timeout=10
6710
+ ))
6711
+ hunks = _parse_diff_hunks(diff_result.stdout if diff_result.returncode == 0 else "")
6712
+
6713
+ action = "modify"
6714
+ files.append({
6715
+ "path": filepath,
6716
+ "action": action,
6717
+ "additions": additions,
6718
+ "deletions": deletions,
6719
+ "hunks": hunks,
6720
+ })
6721
+
6722
+ # Also check for untracked files
6723
+ untracked_result = await loop.run_in_executor(None, lambda: subprocess.run(
6724
+ ["git", "ls-files", "--others", "--exclude-standard"],
6725
+ capture_output=True, text=True, cwd=str(target), timeout=10
6726
+ ))
6727
+ if untracked_result.returncode == 0 and untracked_result.stdout.strip():
6728
+ for filepath in untracked_result.stdout.strip().splitlines()[:20]:
6729
+ try:
6730
+ fpath = target / filepath
6731
+ if fpath.is_file():
6732
+ content = fpath.read_text(errors="replace")
6733
+ line_count = len(content.splitlines())
6734
+ total_add += line_count
6735
+ lines = [
6736
+ {"type": "add", "content": l, "new_line": i + 1}
6737
+ for i, l in enumerate(content.splitlines()[:100])
6738
+ ]
6739
+ files.append({
6740
+ "path": filepath,
6741
+ "action": "add",
6742
+ "additions": line_count,
6743
+ "deletions": 0,
6744
+ "hunks": [{"old_start": 0, "old_count": 0, "new_start": 1, "new_count": line_count, "lines": lines}],
6745
+ })
6746
+ except (OSError, UnicodeDecodeError):
6747
+ pass
6748
+ except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
6749
+ pass
6750
+
6751
+ return JSONResponse(content={
6752
+ "session_id": session_id,
6753
+ "message": req.message,
6754
+ "files": files,
6755
+ "total_additions": total_add,
6756
+ "total_deletions": total_del,
6757
+ })
6758
+
6759
+
6760
+ def _parse_diff_hunks(diff_text: str) -> list[dict]:
6761
+ """Parse unified diff output into structured hunks."""
6762
+ hunks: list[dict] = []
6763
+ if not diff_text:
6764
+ return hunks
6765
+
6766
+ current_hunk: Optional[dict] = None
6767
+ old_line = 0
6768
+ new_line = 0
6769
+
6770
+ for line in diff_text.splitlines():
6771
+ if line.startswith("@@"):
6772
+ # Parse hunk header: @@ -old_start,old_count +new_start,new_count @@
6773
+ if current_hunk:
6774
+ hunks.append(current_hunk)
6775
+ match = re.match(r"@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@", line)
6776
+ if match:
6777
+ old_line = int(match.group(1))
6778
+ new_line = int(match.group(3))
6779
+ current_hunk = {
6780
+ "old_start": old_line,
6781
+ "old_count": int(match.group(2) or 1),
6782
+ "new_start": new_line,
6783
+ "new_count": int(match.group(4) or 1),
6784
+ "lines": [],
6785
+ }
6786
+ continue
6787
+
6788
+ if current_hunk is None:
6789
+ continue
6790
+
6791
+ if line.startswith("+"):
6792
+ current_hunk["lines"].append({
6793
+ "type": "add",
6794
+ "content": line[1:],
6795
+ "new_line": new_line,
6796
+ })
6797
+ new_line += 1
6798
+ elif line.startswith("-"):
6799
+ current_hunk["lines"].append({
6800
+ "type": "delete",
6801
+ "content": line[1:],
6802
+ "old_line": old_line,
6803
+ })
6804
+ old_line += 1
6805
+ elif line.startswith(" "):
6806
+ current_hunk["lines"].append({
6807
+ "type": "context",
6808
+ "content": line[1:],
6809
+ "old_line": old_line,
6810
+ "new_line": new_line,
6811
+ })
6812
+ old_line += 1
6813
+ new_line += 1
6814
+
6815
+ if current_hunk:
6816
+ hunks.append(current_hunk)
6817
+
6818
+ # Limit to 50 hunks max to prevent huge responses
6819
+ return hunks[:50]
6820
+ # Teams & RBAC endpoints
6821
+ # ---------------------------------------------------------------------------
6822
+
6823
+ # In-memory store for teams (production would use database)
6824
+ _teams_store: Dict[str, dict] = {}
6825
+ _audit_log: list = []
6826
+
6827
+
6828
+ class CreateTeamRequest(BaseModel):
6829
+ name: str
6830
+
6831
+
6832
+ class AddMemberRequest(BaseModel):
6833
+ email: str
6834
+ role: str = "viewer"
6835
+
6836
+
6837
+ def _audit(action: str, user: str = "system", target: str = "", details: str = ""):
6838
+ """Record an audit log entry."""
6839
+ entry = {
6840
+ "id": str(uuid.uuid4()),
6841
+ "action": action,
6842
+ "user": user,
6843
+ "target": target,
6844
+ "timestamp": datetime.now().isoformat(),
6845
+ "details": details,
6846
+ }
6847
+ _audit_log.insert(0, entry)
6848
+ # Keep last 500 entries
6849
+ if len(_audit_log) > 500:
6850
+ _audit_log[:] = _audit_log[:500]
6851
+
6852
+
6853
+ @app.get("/api/teams")
6854
+ async def list_teams() -> JSONResponse:
6855
+ """List all teams."""
6856
+ teams = list(_teams_store.values())
6857
+ return JSONResponse(content=teams)
6858
+
6859
+
6860
+ @app.post("/api/teams")
6861
+ async def create_team(req: CreateTeamRequest) -> JSONResponse:
6862
+ """Create a new team."""
6863
+ team_id = f"team-{uuid.uuid4().hex[:8]}"
6864
+ team = {
6865
+ "id": team_id,
6866
+ "name": req.name,
6867
+ "members": [],
6868
+ "created_at": datetime.now().isoformat(),
6869
+ }
6870
+ _teams_store[team_id] = team
6871
+ _audit("team.created", target=req.name)
6872
+ return JSONResponse(content={"id": team_id, "name": req.name, "created": True})
6873
+
6874
+
6875
+ @app.get("/api/teams/{team_id}/members")
6876
+ async def list_team_members(team_id: str) -> JSONResponse:
6877
+ """List members of a team."""
6878
+ team = _teams_store.get(team_id)
6879
+ if not team:
6880
+ return JSONResponse(status_code=404, content={"error": "Team not found"})
6881
+ return JSONResponse(content=team.get("members", []))
6882
+
6883
+
6884
+ @app.post("/api/teams/{team_id}/members")
6885
+ async def add_team_member(team_id: str, req: AddMemberRequest) -> JSONResponse:
6886
+ """Add a member to a team."""
6887
+ team = _teams_store.get(team_id)
6888
+ if not team:
6889
+ return JSONResponse(status_code=404, content={"error": "Team not found"})
6890
+ member_id = f"m-{uuid.uuid4().hex[:8]}"
6891
+ member = {
6892
+ "id": member_id,
6893
+ "email": req.email,
6894
+ "name": req.email.split("@")[0],
6895
+ "role": req.role,
6896
+ "joined_at": datetime.now().isoformat(),
6897
+ }
6898
+ team.setdefault("members", []).append(member)
6899
+ _audit("member.added", target=req.email, details=f"Role: {req.role}")
6900
+ return JSONResponse(content={"added": True, "member_id": member_id})
6901
+
6902
+
6903
+ @app.get("/api/audit-log")
6904
+ async def get_audit_log() -> JSONResponse:
6905
+ """Get audit log entries."""
6906
+ return JSONResponse(content=_audit_log)
6907
+
6908
+
5641
6909
  # ---------------------------------------------------------------------------
5642
6910
  # Static file serving (built React app)
5643
6911
  # ---------------------------------------------------------------------------