tarsk 0.4.14 → 0.4.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/index.js +1489 -654
  2. package/dist/public/assets/account-view-Bt1vm-un.js +1 -0
  3. package/dist/public/assets/api-Btei2JXW.js +1 -0
  4. package/dist/public/assets/browser-tab-Bk9dMftL.js +1 -0
  5. package/dist/public/assets/{context-menu-CmaVJQOJ.js → context-menu-DY5pvqON.js} +1 -1
  6. package/dist/public/assets/conversation-history-view-CPKV-CXm.js +1 -0
  7. package/dist/public/assets/{dialogs-config-bLEt-2lj.js → dialogs-config-D99R5wP6.js} +2 -2
  8. package/dist/public/assets/diff-view-CWfe4I9a.js +3 -0
  9. package/dist/public/assets/explorer-tab-view-CJHiNnyd.js +2 -0
  10. package/dist/public/assets/explorer-tree-Ci5Pco28.js +1 -0
  11. package/dist/public/assets/explorer-view-D8ig-_Hu.js +1 -0
  12. package/dist/public/assets/history-view-DZfk4xde.js +1 -0
  13. package/dist/public/assets/index-C-nnqxjL.css +1 -0
  14. package/dist/public/assets/index-DGwlzVgh.js +45 -0
  15. package/dist/public/assets/onboarding-3whdnEsT.js +1 -0
  16. package/dist/public/assets/onboarding-dialog-BXf_-UW9.js +1 -0
  17. package/dist/public/assets/project-settings-view-CxsaclWH.js +1 -0
  18. package/dist/public/assets/provider-details-view-C4UIQaoL.js +1 -0
  19. package/dist/public/assets/providers-sidebar-C8fgu0N8.js +1 -0
  20. package/dist/public/assets/react-vendor-C4zejA65.js +17 -0
  21. package/dist/public/assets/settings-view-D9sBUF6w.js +2 -0
  22. package/dist/public/assets/{store-CDLJd-q_.js → store-C9HlyZYU.js} +1 -1
  23. package/dist/public/assets/tab-context-DqKm6Tr0.js +1 -0
  24. package/dist/public/assets/{terminal-panel-BRdTXLkg.js → terminal-panel-DY8RKsBt.js} +2 -2
  25. package/dist/public/assets/textarea-CQijFV9Y.js +1 -0
  26. package/dist/public/assets/todos-view-Bx3AdgZs.js +1 -0
  27. package/dist/public/assets/use-toast-CIR5dCsV.js +1 -0
  28. package/dist/public/assets/{utils-BxHvPw5k.js → utils-nugtDe3s.js} +1 -1
  29. package/dist/public/index.html +12 -10
  30. package/package.json +2 -2
  31. package/dist/public/assets/account-view-bgfCh9fD.js +0 -1
  32. package/dist/public/assets/api-tXifOYo2.js +0 -1
  33. package/dist/public/assets/browser-tab-BqBW_AK-.js +0 -1
  34. package/dist/public/assets/conversation-history-view-BU-YAZS_.js +0 -1
  35. package/dist/public/assets/diff-view-DjfyQW_j.js +0 -3
  36. package/dist/public/assets/explorer-tab-view-DRpNcRXM.js +0 -2
  37. package/dist/public/assets/explorer-tree-z3yBekT5.js +0 -1
  38. package/dist/public/assets/explorer-view-CjevBGrc.js +0 -1
  39. package/dist/public/assets/history-view-ETKpXvpZ.js +0 -1
  40. package/dist/public/assets/index-CQrq9F6C.js +0 -65
  41. package/dist/public/assets/index-Dwos0qd5.css +0 -1
  42. package/dist/public/assets/onboarding-Dvaoo4rL.js +0 -1
  43. package/dist/public/assets/onboarding-dialog-D7LOFzso.js +0 -1
  44. package/dist/public/assets/project-settings-view-CBW-nuLi.js +0 -1
  45. package/dist/public/assets/provider-details-view-DJXhGdNI.js +0 -1
  46. package/dist/public/assets/providers-sidebar-Cn7XvmQ-.js +0 -1
  47. package/dist/public/assets/react-vendor-CUw-HIQD.js +0 -17
  48. package/dist/public/assets/settings-view-TH1bX0FR.js +0 -2
  49. package/dist/public/assets/textarea-DtvDY2H_.js +0 -1
  50. package/dist/public/assets/todos-view-BHuGeR5Y.js +0 -1
  51. /package/dist/public/assets/{monaco-CgzMkQ7t.js → monaco-DvsnxTfD.js} +0 -0
package/dist/index.js CHANGED
@@ -99,18 +99,18 @@ __export(database_exports, {
99
99
  initializeSchema: () => initializeSchema
100
100
  });
101
101
  import { createClient } from "@libsql/client";
102
- import { join as join4 } from "path";
102
+ import { join as join2 } from "path";
103
103
  import { homedir as homedir2 } from "os";
104
104
  import { mkdirSync } from "fs";
105
105
  function getDatabasePath() {
106
- const appSupportDir = join4(homedir2(), "Library", "Application Support", "Tarsk");
107
- const dataDir = join4(appSupportDir, "data");
108
- return join4(dataDir, "tarsk.db");
106
+ const appSupportDir = join2(homedir2(), "Library", "Application Support", "Tarsk");
107
+ const dataDir = join2(appSupportDir, "data");
108
+ return join2(dataDir, "tarsk.db");
109
109
  }
110
110
  async function initializeDatabase() {
111
111
  const dbPath = getDatabasePath();
112
112
  try {
113
- const dataDir = join4(homedir2(), "Library", "Application Support", "Tarsk", "data");
113
+ const dataDir = join2(homedir2(), "Library", "Application Support", "Tarsk", "data");
114
114
  mkdirSync(dataDir, { recursive: true });
115
115
  const db = createClient({
116
116
  url: `file:${dbPath}`
@@ -214,6 +214,46 @@ async function initializeSchema(db) {
214
214
  cachedAt TEXT NOT NULL
215
215
  )
216
216
  `);
217
+ await db.execute(`
218
+ CREATE TABLE IF NOT EXISTS code_files (
219
+ id INTEGER PRIMARY KEY,
220
+ thread_id TEXT NOT NULL,
221
+ file_path TEXT NOT NULL,
222
+ content TEXT NOT NULL,
223
+ indexed_at TEXT NOT NULL,
224
+ UNIQUE(thread_id, file_path)
225
+ )
226
+ `);
227
+ await db.execute(`
228
+ CREATE VIRTUAL TABLE IF NOT EXISTS code_index USING fts5(
229
+ content,
230
+ content='code_files',
231
+ content_rowid='id'
232
+ )
233
+ `);
234
+ await db.execute(`
235
+ CREATE TABLE IF NOT EXISTS code_index_meta (
236
+ thread_id TEXT PRIMARY KEY,
237
+ indexed_at TEXT NOT NULL,
238
+ file_count INTEGER NOT NULL
239
+ )
240
+ `);
241
+ await db.execute(`
242
+ CREATE TABLE IF NOT EXISTS project_scripts (
243
+ id TEXT PRIMARY KEY,
244
+ projectId TEXT NOT NULL,
245
+ workspace TEXT NOT NULL,
246
+ name TEXT NOT NULL,
247
+ command TEXT NOT NULL,
248
+ friendlyName TEXT NOT NULL,
249
+ updatedAt TEXT NOT NULL,
250
+ FOREIGN KEY (projectId) REFERENCES projects(id) ON DELETE CASCADE,
251
+ UNIQUE(projectId, name)
252
+ )
253
+ `);
254
+ await db.execute(`
255
+ CREATE INDEX IF NOT EXISTS idx_project_scripts_projectId ON project_scripts(projectId)
256
+ `);
217
257
  await db.execute(`
218
258
  CREATE INDEX IF NOT EXISTS idx_threads_projectId ON threads(projectId)
219
259
  `);
@@ -558,6 +598,68 @@ async function runMigrations(db) {
558
598
  )
559
599
  `);
560
600
  }
601
+ const codeFilesExists = await db.execute(
602
+ `SELECT name FROM sqlite_master WHERE type='table' AND name='code_files'`
603
+ );
604
+ if (codeFilesExists.rows.length === 0) {
605
+ console.log("[db] Running migration: Creating code search tables");
606
+ await db.execute(`
607
+ CREATE TABLE code_files (
608
+ id INTEGER PRIMARY KEY,
609
+ thread_id TEXT NOT NULL,
610
+ file_path TEXT NOT NULL,
611
+ content TEXT NOT NULL,
612
+ indexed_at TEXT NOT NULL,
613
+ UNIQUE(thread_id, file_path)
614
+ )
615
+ `);
616
+ await db.execute(`
617
+ CREATE VIRTUAL TABLE code_index USING fts5(
618
+ content,
619
+ content='code_files',
620
+ content_rowid='id'
621
+ )
622
+ `);
623
+ await db.execute(`
624
+ CREATE TABLE code_index_meta (
625
+ thread_id TEXT PRIMARY KEY,
626
+ indexed_at TEXT NOT NULL,
627
+ file_count INTEGER NOT NULL
628
+ )
629
+ `);
630
+ }
631
+ const projectsInfo = await db.execute(`PRAGMA table_info(projects)`);
632
+ const hasPlanPrompt = projectsInfo.rows.some(
633
+ (col) => col.name === "planPrompt"
634
+ );
635
+ if (!hasPlanPrompt) {
636
+ console.log("[db] Running migration: Adding AI prompt columns to projects");
637
+ await db.execute(`ALTER TABLE projects ADD COLUMN planPrompt TEXT`);
638
+ await db.execute(`ALTER TABLE projects ADD COLUMN testPrompt TEXT`);
639
+ await db.execute(`ALTER TABLE projects ADD COLUMN reviewPrompt TEXT`);
640
+ }
641
+ const projectScriptsExists = await db.execute(
642
+ `SELECT name FROM sqlite_master WHERE type='table' AND name='project_scripts'`
643
+ );
644
+ if (projectScriptsExists.rows.length === 0) {
645
+ console.log("[db] Running migration: Creating project_scripts table");
646
+ await db.execute(`
647
+ CREATE TABLE project_scripts (
648
+ id TEXT PRIMARY KEY,
649
+ projectId TEXT NOT NULL,
650
+ workspace TEXT NOT NULL,
651
+ name TEXT NOT NULL,
652
+ command TEXT NOT NULL,
653
+ friendlyName TEXT NOT NULL,
654
+ updatedAt TEXT NOT NULL,
655
+ FOREIGN KEY (projectId) REFERENCES projects(id) ON DELETE CASCADE,
656
+ UNIQUE(projectId, name)
657
+ )
658
+ `);
659
+ await db.execute(`
660
+ CREATE INDEX idx_project_scripts_projectId ON project_scripts(projectId)
661
+ `);
662
+ }
561
663
  } catch (error) {
562
664
  console.error("Failed to run migrations:", error);
563
665
  throw error;
@@ -666,8 +768,8 @@ import fs3 from "fs";
666
768
  import { Hono as Hono21 } from "hono";
667
769
  import { cors } from "hono/cors";
668
770
  import open3 from "open";
669
- import path5 from "path";
670
- import { fileURLToPath as fileURLToPath2 } from "url";
771
+ import path4 from "path";
772
+ import { fileURLToPath } from "url";
671
773
 
672
774
  // src/agent/agent.executor.ts
673
775
  import { Agent as Agent2 } from "@mariozechner/pi-agent-core";
@@ -1382,9 +1484,9 @@ var editSchema = Type2.Object({
1382
1484
  newText: Type2.String({ description: "New text to replace the old text with" })
1383
1485
  });
1384
1486
  var defaultEditOperations = {
1385
- readFile: (path7) => fsReadFile(path7),
1386
- writeFile: (path7, content) => fsWriteFile(path7, content, "utf-8"),
1387
- access: (path7) => fsAccess(path7, constants2.R_OK | constants2.W_OK)
1487
+ readFile: (path6) => fsReadFile(path6),
1488
+ writeFile: (path6, content) => fsWriteFile(path6, content, "utf-8"),
1489
+ access: (path6) => fsAccess(path6, constants2.R_OK | constants2.W_OK)
1388
1490
  };
1389
1491
  function createEditTool(cwd, options) {
1390
1492
  const ops = options?.operations ?? defaultEditOperations;
@@ -1393,14 +1495,14 @@ function createEditTool(cwd, options) {
1393
1495
  label: "edit",
1394
1496
  description: "Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
1395
1497
  parameters: editSchema,
1396
- execute: async (_toolCallId, { path: path7, oldText, newText }, signal) => {
1397
- const absolutePath = resolveToCwd(path7, cwd);
1498
+ execute: async (_toolCallId, { path: path6, oldText, newText }, signal) => {
1499
+ const absolutePath = resolveToCwd(path6, cwd);
1398
1500
  validatePathWithinCwd(absolutePath, cwd);
1399
1501
  return withAbortSignal(signal, async (isAborted) => {
1400
1502
  try {
1401
1503
  await ops.access(absolutePath);
1402
1504
  } catch {
1403
- throw new Error(`File not found: ${path7}`);
1505
+ throw new Error(`File not found: ${path6}`);
1404
1506
  }
1405
1507
  if (isAborted()) return { content: [], details: void 0 };
1406
1508
  const buffer = await ops.readFile(absolutePath);
@@ -1414,7 +1516,7 @@ function createEditTool(cwd, options) {
1414
1516
  const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
1415
1517
  if (!matchResult.found) {
1416
1518
  throw new Error(
1417
- `Could not find the exact text in ${path7}. The old text must match exactly including all whitespace and newlines.`
1519
+ `Could not find the exact text in ${path6}. The old text must match exactly including all whitespace and newlines.`
1418
1520
  );
1419
1521
  }
1420
1522
  const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
@@ -1422,7 +1524,7 @@ function createEditTool(cwd, options) {
1422
1524
  const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
1423
1525
  if (occurrences > 1) {
1424
1526
  throw new Error(
1425
- `Found ${occurrences} occurrences of the text in ${path7}. The text must be unique. Please provide more context to make it unique.`
1527
+ `Found ${occurrences} occurrences of the text in ${path6}. The text must be unique. Please provide more context to make it unique.`
1426
1528
  );
1427
1529
  }
1428
1530
  if (isAborted()) return { content: [], details: void 0 };
@@ -1430,7 +1532,7 @@ function createEditTool(cwd, options) {
1430
1532
  const newContent = baseContent.substring(0, matchResult.index) + normalizedNewText + baseContent.substring(matchResult.index + matchResult.matchLength);
1431
1533
  if (baseContent === newContent) {
1432
1534
  throw new Error(
1433
- `No changes made to ${path7}. The replacement produced identical content.`
1535
+ `No changes made to ${path6}. The replacement produced identical content.`
1434
1536
  );
1435
1537
  }
1436
1538
  const finalContent = bom + restoreLineEndings(newContent, originalEnding);
@@ -1438,7 +1540,7 @@ function createEditTool(cwd, options) {
1438
1540
  if (isAborted()) return { content: [], details: void 0 };
1439
1541
  const diffResult = generateDiffString(baseContent, newContent);
1440
1542
  return {
1441
- content: [{ type: "text", text: `Successfully replaced text in ${path7}.` }],
1543
+ content: [{ type: "text", text: `Successfully replaced text in ${path6}.` }],
1442
1544
  details: { diff: diffResult.diff, firstChangedLine: diffResult.firstChangedLine }
1443
1545
  };
1444
1546
  });
@@ -1465,13 +1567,13 @@ var findSchema = Type3.Object({
1465
1567
  var DEFAULT_LIMIT = 1e3;
1466
1568
  var defaultFindOperations = {
1467
1569
  exists: existsSync3,
1468
- glob: async (pattern, searchCwd, { ignore, limit }) => {
1570
+ glob: async (pattern, searchCwd, { ignore: ignore2, limit }) => {
1469
1571
  const results = [];
1470
1572
  try {
1471
1573
  const found = globSync(pattern, {
1472
1574
  cwd: searchCwd,
1473
1575
  dot: true,
1474
- ignore: ["**/node_modules/**", "**/.git/**", ...ignore],
1576
+ ignore: ["**/node_modules/**", "**/.git/**", ...ignore2],
1475
1577
  mark: false
1476
1578
  });
1477
1579
  for (let i = 0; i < Math.min(found.length, limit); i++) {
@@ -1667,9 +1769,9 @@ function createGrepTool(cwd, options) {
1667
1769
  const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT2);
1668
1770
  const formatPath = (filePath) => {
1669
1771
  if (isDirectory) {
1670
- const relative6 = path2.relative(searchPath, filePath);
1671
- if (relative6 && !relative6.startsWith("..")) {
1672
- return relative6.replace(/\\/g, "/");
1772
+ const relative7 = path2.relative(searchPath, filePath);
1773
+ if (relative7 && !relative7.startsWith("..")) {
1774
+ return relative7.replace(/\\/g, "/");
1673
1775
  }
1674
1776
  }
1675
1777
  return path2.basename(filePath);
@@ -1833,287 +1935,625 @@ function createGrepTool(cwd, options) {
1833
1935
  }
1834
1936
  var grepTool = createGrepTool(process.cwd());
1835
1937
 
1836
- // src/tools/ast-grep.ts
1837
- init_utils();
1938
+ // src/tools/code-search.ts
1939
+ init_database();
1838
1940
  import { Type as Type5 } from "@sinclair/typebox";
1839
- import path3 from "path";
1840
- import { existsSync as existsSync4, readdirSync } from "fs";
1841
- import { fileURLToPath } from "url";
1842
- var astGrepSchema = Type5.Object({
1843
- command: Type5.Union([Type5.Literal("run"), Type5.Literal("scan")], {
1844
- description: "Mode: 'run' for simple pattern search, 'scan' for complex rule-based search"
1845
- }),
1941
+
1942
+ // src/tools/code-search-indexer.ts
1943
+ init_database();
1944
+ import { readFileSync as readFileSync2 } from "fs";
1945
+ import { readFile, opendir } from "fs/promises";
1946
+ import { join as join3, relative as relative2, extname } from "path";
1947
+ import ignore from "ignore";
1948
+ var SKIP_DIRS = /* @__PURE__ */ new Set([
1949
+ "node_modules",
1950
+ ".git",
1951
+ "dist",
1952
+ "build",
1953
+ "out",
1954
+ ".next",
1955
+ ".nuxt",
1956
+ "coverage",
1957
+ "__pycache__",
1958
+ "target",
1959
+ "vendor",
1960
+ ".gradle",
1961
+ "Pods",
1962
+ ".cache",
1963
+ ".tox"
1964
+ ]);
1965
+ var SKIP_FILENAMES = /* @__PURE__ */ new Set([
1966
+ "package-lock.json",
1967
+ "bun.lock",
1968
+ "yarn.lock",
1969
+ "Cargo.lock",
1970
+ "poetry.lock",
1971
+ "Gemfile.lock",
1972
+ "pnpm-lock.yaml",
1973
+ "composer.lock"
1974
+ ]);
1975
+ var TEXT_EXTENSIONS = /* @__PURE__ */ new Set([
1976
+ ".ts",
1977
+ ".tsx",
1978
+ ".js",
1979
+ ".jsx",
1980
+ ".mjs",
1981
+ ".cjs",
1982
+ ".py",
1983
+ ".rb",
1984
+ ".go",
1985
+ ".rs",
1986
+ ".java",
1987
+ ".kt",
1988
+ ".swift",
1989
+ ".c",
1990
+ ".cpp",
1991
+ ".h",
1992
+ ".hpp",
1993
+ ".cs",
1994
+ ".php",
1995
+ ".sh",
1996
+ ".bash",
1997
+ ".zsh",
1998
+ ".fish",
1999
+ ".sql",
2000
+ ".md",
2001
+ ".mdx",
2002
+ ".txt",
2003
+ ".yaml",
2004
+ ".yml",
2005
+ ".toml",
2006
+ ".json",
2007
+ ".xml",
2008
+ ".html",
2009
+ ".css",
2010
+ ".scss",
2011
+ ".sass",
2012
+ ".less"
2013
+ ]);
2014
+ var MAX_FILE_SIZE = 512 * 1024;
2015
+ var BATCH_SIZE = 50;
2016
+ function readGitignore(dirPath) {
2017
+ const ig = ignore();
2018
+ try {
2019
+ const content = readFileSync2(join3(dirPath, ".gitignore"), "utf8");
2020
+ ig.add(content);
2021
+ } catch {
2022
+ }
2023
+ return ig;
2024
+ }
2025
+ async function collectFiles(rootPath, signal) {
2026
+ const results = [];
2027
+ const ignoreStack = [
2028
+ { prefix: "", ig: readGitignore(rootPath) }
2029
+ ];
2030
+ async function walk(dirPath) {
2031
+ if (signal?.aborted) return;
2032
+ const dirHandle = await opendir(dirPath, {}).catch(() => null);
2033
+ if (!dirHandle) return;
2034
+ const relDir = relative2(rootPath, dirPath);
2035
+ let pushedIgnore = false;
2036
+ if (relDir) {
2037
+ const ig = readGitignore(dirPath);
2038
+ ignoreStack.push({ prefix: relDir, ig });
2039
+ pushedIgnore = true;
2040
+ }
2041
+ function isIgnored(relPath) {
2042
+ for (const { prefix, ig } of ignoreStack) {
2043
+ const testPath = prefix ? relPath.slice(prefix.length + 1) : relPath;
2044
+ if (testPath && ig.ignores(testPath)) return true;
2045
+ }
2046
+ return false;
2047
+ }
2048
+ for await (const entry of dirHandle) {
2049
+ if (signal?.aborted) break;
2050
+ const entryPath = join3(dirPath, entry.name);
2051
+ const relPath = relative2(rootPath, entryPath);
2052
+ if (entry.isDirectory()) {
2053
+ if (SKIP_DIRS.has(entry.name)) continue;
2054
+ if (isIgnored(relPath)) continue;
2055
+ await walk(entryPath);
2056
+ } else if (entry.isFile()) {
2057
+ if (SKIP_FILENAMES.has(entry.name)) continue;
2058
+ if (!TEXT_EXTENSIONS.has(extname(entry.name))) continue;
2059
+ if (isIgnored(relPath)) continue;
2060
+ results.push(entryPath);
2061
+ }
2062
+ }
2063
+ if (pushedIgnore) ignoreStack.pop();
2064
+ }
2065
+ await walk(rootPath);
2066
+ return results;
2067
+ }
2068
+ async function ensureThreadIndex(db, threadId, threadPath, signal) {
2069
+ const meta = await db.execute("SELECT thread_id FROM code_index_meta WHERE thread_id = ?", [
2070
+ threadId
2071
+ ]);
2072
+ if (meta.rows.length > 0) return;
2073
+ console.log(`[code-search] indexing ${threadId}: scanning files...`);
2074
+ const start = Date.now();
2075
+ const files = await collectFiles(threadPath, signal);
2076
+ if (signal?.aborted) return;
2077
+ let fileCount = 0;
2078
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2079
+ let i = 0;
2080
+ while (i < files.length) {
2081
+ if (signal?.aborted) return;
2082
+ const chunk = files.slice(i, i + BATCH_SIZE);
2083
+ const rows = [];
2084
+ for (const filePath of chunk) {
2085
+ let content;
2086
+ try {
2087
+ const buf = await readFile(filePath);
2088
+ if (buf.length > MAX_FILE_SIZE) continue;
2089
+ content = buf.toString("utf8");
2090
+ if (content.includes("\0")) continue;
2091
+ } catch {
2092
+ continue;
2093
+ }
2094
+ const relPath = relative2(threadPath, filePath);
2095
+ rows.push({
2096
+ sql: "INSERT OR REPLACE INTO code_files (thread_id, file_path, content, indexed_at) VALUES (?, ?, ?, ?)",
2097
+ args: [threadId, relPath, content, now]
2098
+ });
2099
+ fileCount++;
2100
+ }
2101
+ if (rows.length > 0) {
2102
+ await db.batch(rows);
2103
+ }
2104
+ i += BATCH_SIZE;
2105
+ }
2106
+ await db.execute("INSERT INTO code_index(code_index) VALUES('rebuild')");
2107
+ await db.execute(
2108
+ "INSERT OR REPLACE INTO code_index_meta (thread_id, indexed_at, file_count) VALUES (?, ?, ?)",
2109
+ [threadId, now, fileCount]
2110
+ );
2111
+ console.log(`[code-search] indexed ${fileCount} files in ${Date.now() - start}ms`);
2112
+ }
2113
+ async function clearThreadIndex(db, threadId) {
2114
+ await db.execute(
2115
+ "DELETE FROM code_index WHERE rowid IN (SELECT id FROM code_files WHERE thread_id = ?)",
2116
+ [threadId]
2117
+ );
2118
+ await db.execute("DELETE FROM code_files WHERE thread_id = ?", [threadId]);
2119
+ await db.execute("DELETE FROM code_index_meta WHERE thread_id = ?", [threadId]);
2120
+ }
2121
+ async function invalidateThreadIndex(threadId) {
2122
+ const db = await getDatabase();
2123
+ await clearThreadIndex(db, threadId);
2124
+ }
2125
+
2126
+ // src/tools/code-search.ts
2127
+ var DEFAULT_LIMIT3 = 100;
2128
+ var codeSearchSchema = Type5.Object({
2129
+ query: Type5.Optional(
2130
+ Type5.String({
2131
+ description: 'FTS5 full-text search on plain identifier/word tokens (e.g. "validateEmail", "email validation"). Multi-word queries use proximity matching automatically. Supports "quoted phrases", prefix*, and boolean operators (A OR B, A NOT B). Special characters (backslashes, brackets, dots) are stripped \u2014 do NOT put regex patterns here. For regex use `pattern`.'
2132
+ })
2133
+ ),
1846
2134
  pattern: Type5.Optional(
1847
2135
  Type5.String({
1848
- description: "AST pattern for 'run' command. Use metavariables like $NAME, $$$ARGS. Example: 'console.log($ARG)'"
2136
+ description: "JavaScript regex for searching file content, e.g. 'function\\s+\\w+\\s*\\(' or 'class\\s+\\w+Service'. Works standalone (scans all indexed files) or combined with `query` to pre-filter by FTS first. By default matched line by line; set `multiline: true` for cross-line patterns. Case-insensitive unless the pattern contains uppercase letters."
1849
2137
  })
1850
2138
  ),
1851
- lang: Type5.String({
1852
- description: "Language to parse: javascript, typescript, python, rust, go, java, css, html, etc."
1853
- }),
1854
- rule: Type5.Optional(
2139
+ filePath: Type5.Optional(
1855
2140
  Type5.String({
1856
- description: "YAML rule for 'scan' command. Supports kind, has, inside, all, any, not. Example: 'id: test\\nlanguage: javascript\\nrule:\\n kind: function_declaration\\n has:\\n pattern: await $EXPR\\n stopBy: end'"
2141
+ description: "Search by file path pattern, e.g. 'camp.page.ts', 'src/app/camp', or 'src/**/camp/*.ts'. Supports glob wildcards (* and **). Returns all lines from matching files."
1857
2142
  })
1858
2143
  ),
1859
- path: Type5.Optional(
2144
+ fileGlob: Type5.Optional(
1860
2145
  Type5.String({
1861
- description: "Directory or file to search (default: current directory)"
2146
+ description: "Restrict FTS/regex results to files matching this glob, e.g. '*.ts', '**/camp/*.ts'. Applied before the result limit."
1862
2147
  })
1863
2148
  ),
1864
2149
  limit: Type5.Optional(
1865
2150
  Type5.Number({
1866
- description: "Maximum number of matches to return (default: 50)"
2151
+ description: `Maximum number of file matches to return (default: ${DEFAULT_LIMIT3})`
2152
+ })
2153
+ ),
2154
+ offset: Type5.Optional(
2155
+ Type5.Number({
2156
+ description: "Number of file matches to skip for pagination (default: 0). Use with `limit` to page through results when the match limit or output size is reached."
2157
+ })
2158
+ ),
2159
+ multiline: Type5.Optional(
2160
+ Type5.Boolean({
2161
+ description: "When true, apply `pattern` to the full file content rather than line by line, so patterns can span multiple lines. Use `[\\s\\S]` instead of `.` to match newlines within the pattern (e.g. '@Component\\({[\\s\\S]*?templateUrl:' or 'templateUrl:[\\s\\S]*?styleUrl:'). Has no effect without `pattern`."
1867
2162
  })
1868
2163
  )
1869
2164
  });
1870
- var DEFAULT_LIMIT3 = 50;
1871
- var PLATFORM_PACKAGES = {
1872
- "darwin-arm64": "@ast-grep/cli-darwin-arm64",
1873
- "darwin-x64": "@ast-grep/cli-darwin-x64",
1874
- "linux-arm64": "@ast-grep/cli-linux-arm64-gnu",
1875
- "linux-x64": "@ast-grep/cli-linux-x64-gnu",
1876
- "win32-arm64": "@ast-grep/cli-win32-arm64-msvc",
1877
- "win32-ia32": "@ast-grep/cli-win32-ia32-msvc",
1878
- "win32-x64": "@ast-grep/cli-win32-x64-msvc"
1879
- };
1880
- function findBinInNodeModules(startDir, pkg, binName) {
1881
- const pkgPath = pkg.replace("/", path3.sep);
1882
- let dir = startDir;
1883
- while (true) {
1884
- const nmDir = path3.join(dir, "node_modules");
1885
- const standardPath = path3.join(nmDir, pkgPath, binName);
1886
- if (existsSync4(standardPath)) return standardPath;
1887
- const bunDir = path3.join(nmDir, ".bun");
1888
- if (existsSync4(bunDir)) {
1889
- const bunPrefix = pkg.replace("/", "+");
1890
- try {
1891
- const entries = readdirSync(bunDir);
1892
- const match = entries.find((e) => e.startsWith(bunPrefix + "@"));
1893
- if (match) {
1894
- const bunPath = path3.join(bunDir, match, "node_modules", pkgPath, binName);
1895
- if (existsSync4(bunPath)) return bunPath;
1896
- }
1897
- } catch {
2165
+ function globToLike(glob3) {
2166
+ return glob3.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_").replace(/\*\*/g, "%").replace(/\*/g, "%").replace(/\?/g, "_");
2167
+ }
2168
+ function filePathToLike(filePath) {
2169
+ if (filePath.includes("*") || filePath.includes("?")) {
2170
+ return globToLike(filePath);
2171
+ }
2172
+ return `%${filePath.replace(/%/g, "\\%").replace(/_/g, "\\_")}%`;
2173
+ }
2174
+ function sanitizeQuery(raw) {
2175
+ const parts = [];
2176
+ const phraseRe = /"[^"]*"/g;
2177
+ let lastIndex = 0;
2178
+ let match;
2179
+ while ((match = phraseRe.exec(raw)) !== null) {
2180
+ const before = raw.slice(lastIndex, match.index).trim();
2181
+ if (before) parts.push(cleanTokens(before));
2182
+ parts.push(match[0]);
2183
+ lastIndex = match.index + match[0].length;
2184
+ }
2185
+ const remaining = raw.slice(lastIndex).trim();
2186
+ if (remaining) parts.push(cleanTokens(remaining));
2187
+ return parts.filter(Boolean).join(" ").trim();
2188
+ }
2189
+ function cleanTokens(text) {
2190
+ return text.split(/\s+/).map((token) => {
2191
+ if (/^(AND|OR|NOT)$/i.test(token)) return token;
2192
+ const hasTrailingStar = token.endsWith("*");
2193
+ const cleaned = token.replace(/[^a-zA-Z0-9_]/g, " ").trim();
2194
+ const tokens = cleaned.split(/\s+/).filter(Boolean);
2195
+ if (tokens.length === 0) return "";
2196
+ if (hasTrailingStar) tokens[tokens.length - 1] += "*";
2197
+ return tokens.join(" ");
2198
+ }).filter(Boolean).join(" ");
2199
+ }
2200
+ function buildNearQuery(sanitized) {
2201
+ if (/["()|]|^\s*(AND|OR|NOT)\s/i.test(sanitized)) return null;
2202
+ let tokens = sanitized.split(/\s+/).filter((t) => !/^(AND|OR|NOT)$/i.test(t) && t.length > 0);
2203
+ if (tokens.length < 2) return null;
2204
+ if (tokens.length > 5) {
2205
+ tokens = [...tokens].sort((a, b) => b.length - a.length).slice(0, 5);
2206
+ }
2207
+ const nearDistance = Math.max(10, tokens.length * 10);
2208
+ return `NEAR(${tokens.join(" ")}, ${nearDistance})`;
2209
+ }
2210
+ function buildOrQuery(sanitized) {
2211
+ if (/["()|]|^\s*(AND|OR|NOT)\s/i.test(sanitized)) return null;
2212
+ const tokens = sanitized.split(/\s+/).filter((t) => !/^(AND|OR|NOT)$/i.test(t) && t.length > 0);
2213
+ if (tokens.length < 2) return null;
2214
+ return tokens.join(" OR ");
2215
+ }
2216
+ function extractTokens(query) {
2217
+ return query.replace(/["()*]/g, " ").split(/\s+/).map(
2218
+ (t) => t.replace(/^(AND|OR|NOT)$/i, "").replace(/[^a-zA-Z0-9_]/g, "").trim()
2219
+ ).filter(Boolean).map((t) => t.toLowerCase());
2220
+ }
2221
+ function findLinesByTokens(content, tokens) {
2222
+ if (tokens.length === 0) return [];
2223
+ const lines = content.split("\n");
2224
+ const results = [];
2225
+ for (let i = 0; i < lines.length; i++) {
2226
+ const lower = lines[i].toLowerCase();
2227
+ if (tokens.some((t) => lower.includes(t))) {
2228
+ results.push({ line: i + 1, text: lines[i] });
2229
+ }
2230
+ }
2231
+ return results;
2232
+ }
2233
+ function findLinesByRegex(content, re, multiline = false) {
2234
+ const lines = content.split("\n");
2235
+ if (!multiline) {
2236
+ const results = [];
2237
+ for (let i = 0; i < lines.length; i++) {
2238
+ if (re.test(lines[i])) {
2239
+ results.push({ line: i + 1, text: lines[i] });
1898
2240
  }
1899
2241
  }
1900
- const parent = path3.dirname(dir);
1901
- if (parent === dir) break;
1902
- dir = parent;
2242
+ return results;
1903
2243
  }
1904
- return null;
2244
+ const lineStarts = Array.from({ length: lines.length }).fill(0);
2245
+ let pos = 0;
2246
+ for (let i = 0; i < lines.length; i++) {
2247
+ lineStarts[i] = pos;
2248
+ pos += lines[i].length + 1;
2249
+ }
2250
+ let flags = re.flags;
2251
+ if (!flags.includes("g")) flags += "g";
2252
+ const fullRe = new RegExp(re.source, flags);
2253
+ const matchedLines = /* @__PURE__ */ new Set();
2254
+ let match;
2255
+ while ((match = fullRe.exec(content)) !== null) {
2256
+ const matchStart = match.index;
2257
+ const matchEnd = matchStart + match[0].length;
2258
+ for (let i = 0; i < lines.length; i++) {
2259
+ const lineStart = lineStarts[i];
2260
+ const lineEnd = lineStart + lines[i].length;
2261
+ if (lineStart <= matchEnd && lineEnd >= matchStart) {
2262
+ matchedLines.add(i);
2263
+ }
2264
+ if (lineStart > matchEnd) break;
2265
+ }
2266
+ if (match[0].length === 0) fullRe.lastIndex++;
2267
+ }
2268
+ return Array.from(matchedLines).sort((a, b) => a - b).map((i) => ({ line: i + 1, text: lines[i] }));
2269
+ }
2270
+ function formatOutput(rows, tokens, regex, limit, multiline = false) {
2271
+ const outputLines = [];
2272
+ let linesTruncated = false;
2273
+ const matchLimitReached = rows.length >= limit ? rows.length : void 0;
2274
+ for (const { file_path, content } of rows) {
2275
+ const matchingLines = regex ? findLinesByRegex(content, regex, multiline) : findLinesByTokens(content, tokens);
2276
+ for (const { line, text } of matchingLines) {
2277
+ const { text: truncated, wasTruncated } = truncateLine(text.replace(/\r/g, ""));
2278
+ if (wasTruncated) linesTruncated = true;
2279
+ outputLines.push(`${file_path}:${line}: ${truncated}`);
2280
+ }
2281
+ }
2282
+ return { outputLines, linesTruncated, matchLimitReached };
2283
+ }
2284
+ function buildResult(outputLines, linesTruncated, matchLimitReached, limit, offset) {
2285
+ if (outputLines.length === 0) {
2286
+ const text2 = offset > 0 ? `No matches found at offset ${offset}` : "No matches found";
2287
+ return { content: [{ type: "text", text: text2 }], details: void 0 };
2288
+ }
2289
+ const rawOutput = outputLines.join("\n");
2290
+ const truncation = truncateHead(rawOutput, { maxBytes: DEFAULT_MAX_BYTES });
2291
+ const finalOutput = truncation.content;
2292
+ const nextOffset = offset + limit;
2293
+ const notices = [];
2294
+ if (matchLimitReached) {
2295
+ notices.push(
2296
+ `Note: result limit of ${limit} files reached \u2014 use offset=${nextOffset} to get the next page, or use a more specific query`
2297
+ );
2298
+ }
2299
+ if (linesTruncated) {
2300
+ notices.push(`Note: some lines were truncated at ${GREP_MAX_LINE_LENGTH} characters`);
2301
+ }
2302
+ if (truncation.truncated) {
2303
+ notices.push(
2304
+ `Note: output truncated at ${formatSize(DEFAULT_MAX_BYTES)} \u2014 use offset=${nextOffset} to get the next page`
2305
+ );
2306
+ }
2307
+ const text = notices.length > 0 ? `${finalOutput}
2308
+
2309
+ ${notices.join("\n")}` : finalOutput;
2310
+ return {
2311
+ content: [{ type: "text", text }],
2312
+ details: { truncation, matchLimitReached, linesTruncated, offset }
2313
+ };
1905
2314
  }
1906
- var APP_DIR = path3.resolve(fileURLToPath(import.meta.url), "../../..");
1907
- function resolveAstGrepBin(cwd) {
1908
- const key = `${process.platform}-${process.arch}`;
1909
- const pkg = PLATFORM_PACKAGES[key];
1910
- if (!pkg) return null;
1911
- const binName = process.platform === "win32" ? "ast-grep.exe" : "ast-grep";
1912
- console.log(`[ast-grep] resolving binary: platform=${key}, pkg=${pkg}, APP_DIR=${APP_DIR}`);
1913
- const appResult = findBinInNodeModules(APP_DIR, pkg, binName);
1914
- if (appResult) return appResult;
1915
- return findBinInNodeModules(cwd, pkg, binName);
1916
- }
1917
- function createAstGrepTool(cwd) {
2315
+ function createCodeSearchTool(cwd, threadId) {
1918
2316
  return {
1919
- name: "ast_grep",
2317
+ name: "code_search",
1920
2318
  label: "Code Search",
1921
- description: `Structural code search using AST patterns. Matches code by structure, not text. Use 'run' for simple pattern matching (e.g. 'console.log($ARG)') or 'scan' for complex YAML rules with kind/has/inside/all/any/not. Output truncated to ${DEFAULT_LIMIT3} matches or ${DEFAULT_MAX_BYTES / 1024}KB.`,
1922
- parameters: astGrepSchema,
1923
- execute: async (_toolCallId, { command, pattern, lang, rule, path: searchDir, limit }, signal) => {
1924
- return new Promise((resolve6, reject) => {
1925
- if (signal?.aborted) {
1926
- reject(new Error("Operation aborted"));
1927
- return;
1928
- }
1929
- let settled = false;
1930
- const settle = (fn) => {
1931
- if (!settled) {
1932
- settled = true;
1933
- fn();
1934
- }
2319
+ description: 'Search the codebase with FTS5 (BM25 ranking) + optional regex. Use `query` for token/semantic search \u2014 multi-word queries automatically use proximity matching. Use `pattern` for regex (e.g. "function\\s+\\w+"). Use `filePath` to find files by name/path (supports globs). Use `fileGlob` to restrict results to a directory.',
2320
+ parameters: codeSearchSchema,
2321
+ execute: async (_toolCallId, inputs, signal) => {
2322
+ const {
2323
+ query,
2324
+ pattern,
2325
+ filePath,
2326
+ fileGlob,
2327
+ limit = DEFAULT_LIMIT3,
2328
+ offset = 0,
2329
+ multiline = false
2330
+ } = inputs;
2331
+ if (!threadId) {
2332
+ return {
2333
+ content: [
2334
+ { type: "text", text: "code_search: no threadId \u2014 cannot search index" }
2335
+ ],
2336
+ details: void 0
1935
2337
  };
1936
- void (async () => {
1937
- try {
1938
- if (command === "run" && !pattern) {
1939
- settle(() => reject(new Error("'pattern' is required when command is 'run'")));
1940
- return;
1941
- }
1942
- if (command === "scan" && !rule) {
1943
- settle(() => reject(new Error("'rule' is required when command is 'scan'")));
1944
- return;
2338
+ }
2339
+ if (!query && !pattern && !filePath) {
2340
+ return {
2341
+ content: [
2342
+ {
2343
+ type: "text",
2344
+ text: "Provide at least one of: `query`, `pattern`, or `filePath`"
1945
2345
  }
1946
- const searchPath = resolveToCwd(searchDir ?? ".", cwd);
1947
- validatePathWithinCwd(searchPath, cwd);
1948
- const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT3);
1949
- const args2 = [];
1950
- if (command === "run") {
1951
- args2.push("run", "--pattern", pattern, "--lang", lang, "--json", searchPath);
1952
- } else {
1953
- args2.push("scan", "--inline-rules", rule, "--json", searchPath);
2346
+ ],
2347
+ details: void 0
2348
+ };
2349
+ }
2350
+ const REGEX_IN_QUERY = /\\[swdWSDbBAZ]|[[\]{}]|\(\?/;
2351
+ if (query && !pattern && REGEX_IN_QUERY.test(query)) {
2352
+ return {
2353
+ content: [
2354
+ {
2355
+ type: "text",
2356
+ text: `The \`query\` field received what looks like a regex pattern ("${query}"). \`query\` is for plain token/FTS search only \u2014 backslashes and special characters are stripped, so the regex will not work. Move the pattern to the \`pattern\` field instead (and omit \`query\`).`
1954
2357
  }
1955
- const binPath = resolveAstGrepBin(cwd);
1956
- if (!binPath) {
1957
- settle(
1958
- () => reject(
1959
- new Error("ast-grep is not available. Install @ast-grep/cli or add it to PATH.")
1960
- )
1961
- );
1962
- return;
2358
+ ],
2359
+ details: void 0
2360
+ };
2361
+ }
2362
+ let compiledRegex = null;
2363
+ if (pattern) {
2364
+ try {
2365
+ const flags = pattern === pattern.toLowerCase() ? "i" : "";
2366
+ compiledRegex = new RegExp(pattern, flags);
2367
+ } catch (err) {
2368
+ const msg = err instanceof Error ? err.message : String(err);
2369
+ return {
2370
+ content: [{ type: "text", text: `Invalid regex pattern: ${msg}` }],
2371
+ details: void 0
2372
+ };
2373
+ }
2374
+ }
2375
+ const db = await getDatabase();
2376
+ await ensureThreadIndex(db, threadId, cwd, signal);
2377
+ if (signal?.aborted) {
2378
+ return {
2379
+ content: [{ type: "text", text: "Operation aborted" }],
2380
+ details: void 0
2381
+ };
2382
+ }
2383
+ const likeFilter = fileGlob ? globToLike(fileGlob) : null;
2384
+ if (filePath && !query && !pattern) {
2385
+ const likePattern = filePathToLike(filePath);
2386
+ const result = await db.execute(
2387
+ `SELECT file_path, content FROM code_files
2388
+ WHERE thread_id = ?
2389
+ AND file_path LIKE ? ESCAPE '\\'
2390
+ LIMIT ? OFFSET ?`,
2391
+ [threadId, likePattern, limit, offset]
2392
+ );
2393
+ const rows2 = result.rows;
2394
+ const { outputLines: outputLines2, linesTruncated: linesTruncated2, matchLimitReached: matchLimitReached2 } = formatOutput(
2395
+ rows2,
2396
+ [],
2397
+ null,
2398
+ limit
2399
+ );
2400
+ if (outputLines2.length === 0) {
2401
+ const names = rows2.map((r) => r.file_path).join("\n");
2402
+ return buildResult(names ? names.split("\n") : [], false, void 0, limit, offset);
2403
+ }
2404
+ return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
2405
+ }
2406
+ if (filePath && (query || pattern)) {
2407
+ const likePattern = filePathToLike(filePath);
2408
+ const result = await db.execute(
2409
+ `SELECT file_path, content FROM code_files
2410
+ WHERE thread_id = ?
2411
+ AND file_path LIKE ? ESCAPE '\\'
2412
+ LIMIT ? OFFSET ?`,
2413
+ [threadId, likePattern, limit, offset]
2414
+ );
2415
+ const rows2 = result.rows;
2416
+ const tokens2 = query ? extractTokens(query) : [];
2417
+ const { outputLines: outputLines2, linesTruncated: linesTruncated2, matchLimitReached: matchLimitReached2 } = formatOutput(
2418
+ rows2,
2419
+ tokens2,
2420
+ compiledRegex,
2421
+ limit,
2422
+ multiline
2423
+ );
2424
+ return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
2425
+ }
2426
+ if (pattern && !query) {
2427
+ const sql = likeFilter ? `SELECT file_path, content FROM code_files
2428
+ WHERE thread_id = ?
2429
+ AND file_path LIKE ? ESCAPE '\\'` : `SELECT file_path, content FROM code_files
2430
+ WHERE thread_id = ?`;
2431
+ const args2 = likeFilter ? [threadId, likeFilter] : [threadId];
2432
+ const result = await db.execute(sql, args2);
2433
+ const allRows = result.rows;
2434
+ const outputLines2 = [];
2435
+ let linesTruncated2 = false;
2436
+ let matchedFiles = 0;
2437
+ for (const { file_path, content } of allRows) {
2438
+ const matchingLines = findLinesByRegex(content, compiledRegex, multiline);
2439
+ if (matchingLines.length === 0) continue;
2440
+ matchedFiles++;
2441
+ if (matchedFiles <= offset) continue;
2442
+ if (matchedFiles > offset + limit) break;
2443
+ for (const { line, text } of matchingLines) {
2444
+ const { text: truncated, wasTruncated } = truncateLine(text.replace(/\r/g, ""));
2445
+ if (wasTruncated) linesTruncated2 = true;
2446
+ outputLines2.push(`${file_path}:${line}: ${truncated}`);
2447
+ }
2448
+ }
2449
+ const matchLimitReached2 = matchedFiles > offset + limit ? matchedFiles : void 0;
2450
+ return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
2451
+ }
2452
+ const rawQuery = query;
2453
+ async function runFtsQuery(ftsQuery) {
2454
+ const sql = likeFilter ? `SELECT cf.file_path, cf.content
2455
+ FROM code_index
2456
+ JOIN code_files cf ON cf.id = code_index.rowid
2457
+ WHERE code_index MATCH ?
2458
+ AND cf.thread_id = ?
2459
+ AND cf.file_path LIKE ? ESCAPE '\\'
2460
+ ORDER BY rank
2461
+ LIMIT ? OFFSET ?` : `SELECT cf.file_path, cf.content
2462
+ FROM code_index
2463
+ JOIN code_files cf ON cf.id = code_index.rowid
2464
+ WHERE code_index MATCH ?
2465
+ AND cf.thread_id = ?
2466
+ ORDER BY rank
2467
+ LIMIT ? OFFSET ?`;
2468
+ const args2 = likeFilter ? [ftsQuery, threadId, likeFilter, limit, offset] : [ftsQuery, threadId, limit, offset];
2469
+ const result = await db.execute(sql, args2);
2470
+ return result.rows;
2471
+ }
2472
+ let rows;
2473
+ let effectiveQuery = rawQuery;
2474
+ try {
2475
+ const sanitized = sanitizeQuery(rawQuery);
2476
+ const nearQuery = buildNearQuery(sanitized);
2477
+ if (nearQuery) {
2478
+ try {
2479
+ rows = await runFtsQuery(nearQuery);
2480
+ effectiveQuery = nearQuery;
2481
+ if (rows.length === 0) {
2482
+ rows = await runFtsQuery(sanitized);
2483
+ effectiveQuery = sanitized;
1963
2484
  }
1964
- console.log(`[ast-grep] ${binPath} ${args2.join(" ")}`);
1965
- const child = spawnProcess(binPath, args2, {
1966
- stdio: ["ignore", "pipe", "pipe"]
1967
- });
1968
- const MAX_STDOUT_BYTES = 2 * 1024 * 1024;
1969
- let stdout = "";
1970
- let stdoutBytes = 0;
1971
- let stderr = "";
1972
- let aborted = false;
1973
- let killedDueToCap = false;
1974
- const onAbort = () => {
1975
- aborted = true;
1976
- if (!child.killed) child.kill();
1977
- };
1978
- signal?.addEventListener("abort", onAbort, { once: true });
1979
- child.stdout?.on("data", (chunk) => {
1980
- if (killedDueToCap) return;
1981
- stdoutBytes += chunk.length;
1982
- if (stdoutBytes > MAX_STDOUT_BYTES) {
1983
- killedDueToCap = true;
1984
- if (!child.killed) child.kill();
1985
- return;
1986
- }
1987
- stdout += chunk.toString();
1988
- });
1989
- child.stderr?.on("data", (chunk) => {
1990
- stderr += chunk.toString();
1991
- });
1992
- child.on("error", (error) => {
1993
- signal?.removeEventListener("abort", onAbort);
1994
- settle(() => reject(new Error(`Failed to run ast-grep: ${error.message}`)));
1995
- });
1996
- child.on("close", (code) => {
1997
- signal?.removeEventListener("abort", onAbort);
1998
- console.log(
1999
- `[ast-grep] exit code: ${code}, stdout: ${stdout.length} bytes, stderr: ${stderr.length} bytes`
2000
- );
2001
- if (stderr.trim()) console.log(`[ast-grep] stderr: ${stderr.trim()}`);
2002
- if (aborted) {
2003
- settle(() => reject(new Error("Operation aborted")));
2004
- return;
2005
- }
2006
- if (!killedDueToCap && code !== 0 && code !== 1) {
2007
- const errMsg = stderr.trim() || `ast-grep exited with code ${code}`;
2008
- settle(() => reject(new Error(errMsg)));
2009
- return;
2010
- }
2011
- let matches;
2012
- try {
2013
- matches = JSON.parse(stdout || "[]");
2014
- } catch {
2015
- if (killedDueToCap) {
2016
- const lastBrace = stdout.lastIndexOf("}");
2017
- if (lastBrace > 0) {
2018
- try {
2019
- matches = JSON.parse(stdout.slice(0, lastBrace + 1) + "]");
2020
- } catch {
2021
- matches = [];
2022
- }
2023
- } else {
2024
- matches = [];
2025
- }
2026
- } else if (!stdout.trim()) {
2027
- settle(
2028
- () => resolve6({
2029
- content: [{ type: "text", text: "No matches found" }],
2030
- details: void 0
2031
- })
2032
- );
2033
- return;
2034
- } else {
2035
- settle(
2036
- () => reject(
2037
- new Error(`Failed to parse ast-grep JSON output: ${stdout.slice(0, 200)}`)
2038
- )
2039
- );
2040
- return;
2041
- }
2042
- }
2043
- if (!Array.isArray(matches) || matches.length === 0) {
2044
- settle(
2045
- () => resolve6({
2046
- content: [{ type: "text", text: "No matches found" }],
2047
- details: void 0
2048
- })
2049
- );
2050
- return;
2485
+ if (rows.length === 0) {
2486
+ const orQuery = buildOrQuery(sanitized);
2487
+ if (orQuery) {
2488
+ rows = await runFtsQuery(orQuery);
2489
+ effectiveQuery = orQuery;
2051
2490
  }
2052
- const matchLimitReached = matches.length > effectiveLimit;
2053
- if (matchLimitReached) {
2054
- matches = matches.slice(0, effectiveLimit);
2055
- }
2056
- let linesTruncated = false;
2057
- const outputLines = [];
2058
- for (const match of matches) {
2059
- const filePath = match.file ?? "unknown";
2060
- const relativePath = path3.relative(searchPath, filePath).replace(/\\/g, "/") || filePath;
2061
- const lineNum = (match.range?.start?.line ?? 0) + 1;
2062
- const matchText = (match.lines ?? match.text ?? "").trimEnd();
2063
- const firstLine = matchText.split("\n")[0] ?? "";
2064
- const { text: truncatedText, wasTruncated } = truncateLine(firstLine);
2065
- if (wasTruncated) linesTruncated = true;
2066
- outputLines.push(`${relativePath}:${lineNum}: ${truncatedText}`);
2067
- }
2068
- const rawOutput = outputLines.join("\n");
2069
- const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
2070
- let output = truncation.content;
2071
- const details = {};
2072
- const notices = [];
2073
- if (killedDueToCap) {
2074
- notices.push(
2075
- `Output exceeded 2MB cap. Refine your pattern for more precise results`
2076
- );
2077
- }
2078
- if (matchLimitReached) {
2079
- notices.push(
2080
- `${effectiveLimit} matches limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`
2081
- );
2082
- details.matchLimitReached = effectiveLimit;
2083
- }
2084
- if (truncation.truncated) {
2085
- notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
2086
- details.truncation = truncation;
2087
- }
2088
- if (linesTruncated) {
2089
- notices.push(
2090
- `Some lines truncated to ${GREP_MAX_LINE_LENGTH} chars. Use read tool to see full lines`
2091
- );
2092
- details.linesTruncated = true;
2093
- }
2094
- if (notices.length > 0) output += `
2095
-
2096
- [${notices.join(". ")}]`;
2097
- settle(
2098
- () => resolve6({
2099
- content: [{ type: "text", text: output }],
2100
- details: Object.keys(details).length > 0 ? details : void 0
2101
- })
2102
- );
2103
- });
2491
+ }
2104
2492
  } catch {
2105
- settle(() => reject(new Error("Error processing ast-grep request")));
2493
+ rows = await runFtsQuery(sanitized);
2494
+ effectiveQuery = sanitized;
2106
2495
  }
2107
- })();
2108
- });
2496
+ } else {
2497
+ rows = await runFtsQuery(rawQuery);
2498
+ }
2499
+ } catch {
2500
+ effectiveQuery = sanitizeQuery(rawQuery);
2501
+ if (!effectiveQuery) {
2502
+ return {
2503
+ content: [
2504
+ {
2505
+ type: "text",
2506
+ text: `Query "${rawQuery}" contains only special characters \u2014 no searchable tokens remain. Try using filePath or pattern instead.`
2507
+ }
2508
+ ],
2509
+ details: void 0
2510
+ };
2511
+ }
2512
+ try {
2513
+ rows = await runFtsQuery(effectiveQuery);
2514
+ } catch (err2) {
2515
+ const msg = err2 instanceof Error ? err2.message : String(err2);
2516
+ return {
2517
+ content: [
2518
+ {
2519
+ type: "text",
2520
+ text: `FTS query error even after sanitization.
2521
+ Original: "${rawQuery}"
2522
+ Sanitized: "${effectiveQuery}"
2523
+
2524
+ Details: ${msg}`
2525
+ }
2526
+ ],
2527
+ details: void 0
2528
+ };
2529
+ }
2530
+ }
2531
+ if (rows.length === 0) {
2532
+ return {
2533
+ content: [{ type: "text", text: "No matches found" }],
2534
+ details: void 0
2535
+ };
2536
+ }
2537
+ const tokens = compiledRegex ? [] : extractTokens(effectiveQuery);
2538
+ const { outputLines, linesTruncated, matchLimitReached } = formatOutput(
2539
+ rows,
2540
+ tokens,
2541
+ compiledRegex,
2542
+ limit,
2543
+ multiline
2544
+ );
2545
+ return buildResult(outputLines, linesTruncated, matchLimitReached, limit, offset);
2109
2546
  }
2110
2547
  };
2111
2548
  }
2112
- var astGrepTool = createAstGrepTool(process.cwd());
2549
+ var codeSearchTool = createCodeSearchTool(
2550
+ process.cwd(),
2551
+ ""
2552
+ );
2113
2553
 
2114
2554
  // src/tools/ls.ts
2115
2555
  import { Type as Type6 } from "@sinclair/typebox";
2116
- import { existsSync as existsSync5, readdirSync as readdirSync2, statSync as statSync2 } from "fs";
2556
+ import { existsSync as existsSync4, readdirSync, statSync as statSync2 } from "fs";
2117
2557
  import nodePath from "path";
2118
2558
  var lsSchema = Type6.Object({
2119
2559
  path: Type6.Optional(
@@ -2125,9 +2565,9 @@ var lsSchema = Type6.Object({
2125
2565
  });
2126
2566
  var DEFAULT_LIMIT4 = 500;
2127
2567
  var defaultLsOperations = {
2128
- exists: existsSync5,
2568
+ exists: existsSync4,
2129
2569
  stat: statSync2,
2130
- readdir: readdirSync2
2570
+ readdir: readdirSync
2131
2571
  };
2132
2572
  function createLsTool(cwd, options) {
2133
2573
  const ops = options?.operations ?? defaultLsOperations;
@@ -2236,8 +2676,8 @@ var readSchema = Type7.Object({
2236
2676
  limit: Type7.Optional(Type7.Number({ description: "Maximum number of lines to read" }))
2237
2677
  });
2238
2678
  var defaultReadOperations = {
2239
- readFile: (path7) => fsReadFile2(path7),
2240
- access: (path7) => fsAccess2(path7, constants3.R_OK)
2679
+ readFile: (path6) => fsReadFile2(path6),
2680
+ access: (path6) => fsAccess2(path6, constants3.R_OK)
2241
2681
  };
2242
2682
  function createReadTool(cwd, options) {
2243
2683
  const ops = options?.operations ?? defaultReadOperations;
@@ -2246,8 +2686,8 @@ function createReadTool(cwd, options) {
2246
2686
  label: "read",
2247
2687
  description: `Read the contents of a text file. Output is truncated to ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Use offset/limit for large files.`,
2248
2688
  parameters: readSchema,
2249
- execute: async (_toolCallId, { path: path7, offset, limit }, signal) => {
2250
- const absolutePath = resolveReadPath(path7, cwd);
2689
+ execute: async (_toolCallId, { path: path6, offset, limit }, signal) => {
2690
+ const absolutePath = resolveReadPath(path6, cwd);
2251
2691
  validatePathWithinCwd(absolutePath, cwd);
2252
2692
  return withAbortSignal(signal, async (isAborted) => {
2253
2693
  await ops.access(absolutePath);
@@ -2276,7 +2716,7 @@ function createReadTool(cwd, options) {
2276
2716
  let outputText;
2277
2717
  let details;
2278
2718
  if (truncation.firstLineExceedsLimit) {
2279
- outputText = `[Line ${startLineDisplay} is ${formatSize(Buffer.byteLength(allLines[startLine], "utf-8"))}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path7} | head -c ${DEFAULT_MAX_BYTES}]`;
2719
+ outputText = `[Line ${startLineDisplay} is ${formatSize(Buffer.byteLength(allLines[startLine], "utf-8"))}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path6} | head -c ${DEFAULT_MAX_BYTES}]`;
2280
2720
  details = { truncation };
2281
2721
  } else if (truncation.truncated) {
2282
2722
  const endLineDisplay = startLineDisplay + truncation.outputLines - 1;
@@ -2319,7 +2759,7 @@ var writeSchema = Type8.Object({
2319
2759
  content: Type8.String({ description: "Content to write to the file" })
2320
2760
  });
2321
2761
  var defaultWriteOperations = {
2322
- writeFile: (path7, content) => fsWriteFile2(path7, content, "utf-8"),
2762
+ writeFile: (path6, content) => fsWriteFile2(path6, content, "utf-8"),
2323
2763
  mkdir: (dir) => fsMkdir(dir, { recursive: true }).then(() => {
2324
2764
  })
2325
2765
  };
@@ -2330,8 +2770,8 @@ function createWriteTool(cwd, options) {
2330
2770
  label: "write",
2331
2771
  description: "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Automatically creates parent directories.",
2332
2772
  parameters: writeSchema,
2333
- execute: async (_toolCallId, { path: path7, content }, signal) => {
2334
- const absolutePath = resolveToCwd(path7, cwd);
2773
+ execute: async (_toolCallId, { path: path6, content }, signal) => {
2774
+ const absolutePath = resolveToCwd(path6, cwd);
2335
2775
  validatePathWithinCwd(absolutePath, cwd);
2336
2776
  const dir = dirname(absolutePath);
2337
2777
  return new Promise(
@@ -2355,7 +2795,7 @@ function createWriteTool(cwd, options) {
2355
2795
  if (signal) signal.removeEventListener("abort", onAbort);
2356
2796
  resolve6({
2357
2797
  content: [
2358
- { type: "text", text: `Successfully wrote ${content.length} bytes to ${path7}` }
2798
+ { type: "text", text: `Successfully wrote ${content.length} bytes to ${path6}` }
2359
2799
  ],
2360
2800
  details: void 0
2361
2801
  });
@@ -2374,11 +2814,11 @@ var writeTool = createWriteTool(process.cwd());
2374
2814
  // src/tools/skill-tool.ts
2375
2815
  init_utils();
2376
2816
  import { readdir } from "fs/promises";
2377
- import { join as join2, extname } from "path";
2378
- import { existsSync as existsSync6, statSync as statSync3 } from "fs";
2817
+ import { join as join4, extname as extname2 } from "path";
2818
+ import { existsSync as existsSync5, statSync as statSync3 } from "fs";
2379
2819
  import { Type as Type9 } from "@sinclair/typebox";
2380
2820
  function getInterpreter(scriptPath) {
2381
- const ext = extname(scriptPath);
2821
+ const ext = extname2(scriptPath);
2382
2822
  switch (ext) {
2383
2823
  case ".sh":
2384
2824
  case ".bash":
@@ -2468,8 +2908,8 @@ function createSkillScriptTool(skills, cwd) {
2468
2908
  details: void 0
2469
2909
  };
2470
2910
  }
2471
- const scriptsDir = join2(skill.skillPath, "scripts");
2472
- if (!existsSync6(scriptsDir)) {
2911
+ const scriptsDir = join4(skill.skillPath, "scripts");
2912
+ if (!existsSync5(scriptsDir)) {
2473
2913
  return {
2474
2914
  content: [
2475
2915
  { type: "text", text: `Skill '${skillName}' has no scripts directory` }
@@ -2477,8 +2917,8 @@ function createSkillScriptTool(skills, cwd) {
2477
2917
  details: void 0
2478
2918
  };
2479
2919
  }
2480
- const scriptPath = join2(scriptsDir, scriptName);
2481
- if (!existsSync6(scriptPath)) {
2920
+ const scriptPath = join4(scriptsDir, scriptName);
2921
+ if (!existsSync5(scriptPath)) {
2482
2922
  try {
2483
2923
  const availableScripts = await readdir(scriptsDir);
2484
2924
  return {
@@ -2537,14 +2977,14 @@ ${result.stderr}
2537
2977
  }
2538
2978
 
2539
2979
  // src/tools/skill-reference-tool.ts
2540
- import { readFile, readdir as readdir2 } from "fs/promises";
2541
- import { join as join3, normalize, relative as relative2 } from "path";
2542
- import { existsSync as existsSync7 } from "fs";
2980
+ import { readFile as readFile2, readdir as readdir2 } from "fs/promises";
2981
+ import { join as join5, normalize, relative as relative3 } from "path";
2982
+ import { existsSync as existsSync6 } from "fs";
2543
2983
  import { Type as Type10 } from "@sinclair/typebox";
2544
2984
  function isPathSafe(basePath, requestedPath) {
2545
2985
  const normalized = normalize(requestedPath);
2546
- const fullPath = join3(basePath, normalized);
2547
- const relativePath = relative2(basePath, fullPath);
2986
+ const fullPath = join5(basePath, normalized);
2987
+ const relativePath = relative3(basePath, fullPath);
2548
2988
  return !relativePath.startsWith("..") && !relativePath.startsWith("/");
2549
2989
  }
2550
2990
  var skillReferenceSchema = Type10.Object({
@@ -2572,8 +3012,8 @@ function createSkillReferenceTool(skills) {
2572
3012
  details: void 0
2573
3013
  };
2574
3014
  }
2575
- const referencesDir = join3(skill.skillPath, "references");
2576
- if (!existsSync7(referencesDir)) {
3015
+ const referencesDir = join5(skill.skillPath, "references");
3016
+ if (!existsSync6(referencesDir)) {
2577
3017
  return {
2578
3018
  content: [
2579
3019
  { type: "text", text: `Skill '${skillName}' has no references directory` }
@@ -2592,8 +3032,8 @@ function createSkillReferenceTool(skills) {
2592
3032
  details: void 0
2593
3033
  };
2594
3034
  }
2595
- const fullPath = join3(referencesDir, referencePath);
2596
- if (!existsSync7(fullPath)) {
3035
+ const fullPath = join5(referencesDir, referencePath);
3036
+ if (!existsSync6(fullPath)) {
2597
3037
  try {
2598
3038
  const availableRefs = await listReferencesRecursive(referencesDir);
2599
3039
  return {
@@ -2619,7 +3059,7 @@ ${availableRefs.join("\n")}`
2619
3059
  }
2620
3060
  }
2621
3061
  try {
2622
- const content = await readFile(fullPath, "utf-8");
3062
+ const content = await readFile2(fullPath, "utf-8");
2623
3063
  return {
2624
3064
  content: [{ type: "text", text: content }],
2625
3065
  details: void 0
@@ -2643,9 +3083,9 @@ async function listReferencesRecursive(dir, prefix = "") {
2643
3083
  try {
2644
3084
  const entries = await readdir2(dir, { withFileTypes: true });
2645
3085
  for (const entry of entries) {
2646
- const relativePath = prefix ? join3(prefix, entry.name) : entry.name;
3086
+ const relativePath = prefix ? join5(prefix, entry.name) : entry.name;
2647
3087
  if (entry.isDirectory()) {
2648
- const subFiles = await listReferencesRecursive(join3(dir, entry.name), relativePath);
3088
+ const subFiles = await listReferencesRecursive(join5(dir, entry.name), relativePath);
2649
3089
  files.push(...subFiles);
2650
3090
  } else {
2651
3091
  files.push(relativePath);
@@ -2740,13 +3180,14 @@ async function fetchAllTodos(db, threadId) {
2740
3180
  }
2741
3181
  async function addTodos(threadId, descriptions, assignedTo = "agent") {
2742
3182
  const db = await getDatabase();
2743
- const now = (/* @__PURE__ */ new Date()).toISOString();
2744
- for (const desc of descriptions) {
3183
+ const baseTime = Date.now();
3184
+ for (let i = 0; i < descriptions.length; i++) {
2745
3185
  const id = randomUUID();
3186
+ const ts = new Date(baseTime + i).toISOString();
2746
3187
  await db.execute({
2747
3188
  sql: `INSERT INTO todos (id, threadId, description, status, assignedTo, createdAt, updatedAt)
2748
3189
  VALUES (?, ?, ?, 'pending', ?, ?, ?)`,
2749
- args: [id, threadId, desc.trim(), assignedTo, now, now]
3190
+ args: [id, threadId, descriptions[i].trim(), assignedTo, ts, ts]
2750
3191
  });
2751
3192
  }
2752
3193
  return fetchAllTodos(db, threadId);
@@ -2866,18 +3307,11 @@ function createTodoTool(threadId) {
2866
3307
  };
2867
3308
  }
2868
3309
  const todosAfterUpdate = await fetchAllTodos(db, threadId);
2869
- const allDone = todosAfterUpdate.length > 0 && todosAfterUpdate.every((t) => t.status === "done");
2870
- if (allDone) {
2871
- await db.execute({
2872
- sql: `DELETE FROM todos WHERE threadId = ?`,
2873
- args: [threadId]
2874
- });
2875
- }
2876
3310
  return {
2877
3311
  content: [
2878
3312
  {
2879
3313
  type: "text",
2880
- text: JSON.stringify({ todos: allDone ? [] : todosAfterUpdate })
3314
+ text: JSON.stringify({ todos: todosAfterUpdate })
2881
3315
  }
2882
3316
  ],
2883
3317
  details: void 0
@@ -2936,16 +3370,16 @@ function createTodoTool(threadId) {
2936
3370
  var todoTool = createTodoTool("");
2937
3371
 
2938
3372
  // src/features/mcp/mcp.config.ts
2939
- import { readFile as readFile2, stat, access } from "fs/promises";
2940
- import { join as join5 } from "path";
3373
+ import { readFile as readFile3, stat, access } from "fs/promises";
3374
+ import { join as join6 } from "path";
2941
3375
  var MCP_CONFIG_PATHS = [".agents/mcp.json", "mcp.json"];
2942
3376
  async function loadMCPConfig(projectPath) {
2943
3377
  for (const configPath of MCP_CONFIG_PATHS) {
2944
- const fullPath = join5(projectPath, configPath);
3378
+ const fullPath = join6(projectPath, configPath);
2945
3379
  try {
2946
3380
  await access(fullPath);
2947
3381
  const fileStats = await stat(fullPath);
2948
- const fileContent = await readFile2(fullPath, "utf-8");
3382
+ const fileContent = await readFile3(fullPath, "utf-8");
2949
3383
  const rawConfig = JSON.parse(fileContent);
2950
3384
  const serversData = rawConfig.servers || rawConfig.mcpServers || {};
2951
3385
  const config = {
@@ -2970,7 +3404,7 @@ async function loadMCPConfig(projectPath) {
2970
3404
  }
2971
3405
  async function getMCPConfigModificationTime(projectPath) {
2972
3406
  for (const configPath of MCP_CONFIG_PATHS) {
2973
- const fullPath = join5(projectPath, configPath);
3407
+ const fullPath = join6(projectPath, configPath);
2974
3408
  try {
2975
3409
  await access(fullPath);
2976
3410
  const fileStats = await stat(fullPath);
@@ -4003,11 +4437,15 @@ var runWebWorkerSchema = Type15.Object({
4003
4437
  })
4004
4438
  });
4005
4439
  var pendingTasks = /* @__PURE__ */ new Map();
4440
+ var bufferedResults = /* @__PURE__ */ new Map();
4006
4441
  function submitWebWorkerResult(toolCallId, result) {
4007
4442
  const pending = pendingTasks.get(toolCallId);
4008
- if (!pending) return false;
4009
- pending.resolve(result);
4010
- pendingTasks.delete(toolCallId);
4443
+ if (pending) {
4444
+ pending.resolve(result);
4445
+ pendingTasks.delete(toolCallId);
4446
+ return true;
4447
+ }
4448
+ bufferedResults.set(toolCallId, result);
4011
4449
  return true;
4012
4450
  }
4013
4451
  function cancelPendingWebWorker(toolCallId) {
@@ -4017,6 +4455,7 @@ function cancelPendingWebWorker(toolCallId) {
4017
4455
  pendingTasks.delete(toolCallId);
4018
4456
  return true;
4019
4457
  }
4458
+ var DEFAULT_RUN_JS_TIMEOUT_MS = 3e4;
4020
4459
  function buildDescription(tools) {
4021
4460
  let desc = "Execute javascript code in a web worker (not nodejs). Use this when running JS code is the easiest way to accomplish the goal, such as data transformations, calculations, or orchestrating multiple tool calls programmatically. Returns the result of the execution. You can call other tools by using `await callTool('toolName', { arg: 'value' })`. callTool always returns `{ text: string | null, error: string | null }`. On success `text` contains the result and `error` is null. On failure `error` contains the error message and `text` is null. Example: `const r = await callTool('read', { path: 'foo.ts' }); if (r.error) return r.error; return r.text;`. Write the most compact code possible: no comments, short variable names, minimal whitespace.";
4022
4461
  if (tools && tools.length > 0) {
@@ -4029,6 +4468,7 @@ ${signatures}`;
4029
4468
  return desc;
4030
4469
  }
4031
4470
  function createRunWebWorkerTool(options) {
4471
+ const timeoutMs = options?.timeoutMs ?? DEFAULT_RUN_JS_TIMEOUT_MS;
4032
4472
  return {
4033
4473
  name: "run_js",
4034
4474
  label: "run_js",
@@ -4040,16 +4480,55 @@ function createRunWebWorkerTool(options) {
4040
4480
  throw new Error("Operation aborted");
4041
4481
  }
4042
4482
  const result = await new Promise((resolve6, reject) => {
4043
- pendingTasks.set(toolCallId, {
4044
- resolve: resolve6,
4045
- reject,
4046
- code
4047
- });
4483
+ const earlyResult = bufferedResults.get(toolCallId);
4484
+ if (earlyResult !== void 0) {
4485
+ bufferedResults.delete(toolCallId);
4486
+ resolve6(earlyResult);
4487
+ return;
4488
+ }
4489
+ let timeoutHandle = null;
4048
4490
  const onAbort = () => {
4049
4491
  cancelPendingWebWorker(toolCallId);
4050
4492
  };
4493
+ function cleanup() {
4494
+ signal?.removeEventListener("abort", onAbort);
4495
+ if (timeoutHandle) {
4496
+ clearTimeout(timeoutHandle);
4497
+ }
4498
+ }
4499
+ const wrappedResolve = (value) => {
4500
+ cleanup();
4501
+ resolve6(value);
4502
+ };
4503
+ const wrappedReject = (error) => {
4504
+ cleanup();
4505
+ reject(error);
4506
+ };
4507
+ pendingTasks.set(toolCallId, {
4508
+ resolve: wrappedResolve,
4509
+ reject: wrappedReject,
4510
+ code
4511
+ });
4051
4512
  signal?.addEventListener("abort", onAbort, { once: true });
4513
+ timeoutHandle = setTimeout(() => {
4514
+ const pending = pendingTasks.get(toolCallId);
4515
+ if (!pending) return;
4516
+ pendingTasks.delete(toolCallId);
4517
+ wrappedReject(
4518
+ new Error(
4519
+ `run_js timed out after ${timeoutMs}ms while waiting for browser worker response`
4520
+ )
4521
+ );
4522
+ }, timeoutMs);
4052
4523
  });
4524
+ if (isRunJsErrorPayload(result)) {
4525
+ throw new Error(`run_js execution failed: ${result.error}`);
4526
+ }
4527
+ if (result === void 0 && looksLikeMissingReturn(code)) {
4528
+ throw new Error(
4529
+ "run_js result was undefined. The script likely forgot to return a value (for example: `return JSON.stringify(...)`)."
4530
+ );
4531
+ }
4053
4532
  return {
4054
4533
  content: [
4055
4534
  {
@@ -4062,6 +4541,28 @@ function createRunWebWorkerTool(options) {
4062
4541
  }
4063
4542
  };
4064
4543
  }
4544
+ function isRunJsErrorPayload(result) {
4545
+ if (!result || typeof result !== "object") {
4546
+ return false;
4547
+ }
4548
+ if (!("error" in result)) {
4549
+ return false;
4550
+ }
4551
+ const error = result.error;
4552
+ return typeof error === "string" && error.length > 0;
4553
+ }
4554
+ function looksLikeMissingReturn(code) {
4555
+ if (/\breturn\b/.test(code)) {
4556
+ return false;
4557
+ }
4558
+ if (/\bJSON\.stringify\s*\(/.test(code)) {
4559
+ return true;
4560
+ }
4561
+ if (/\bawait\s+callTool\b/.test(code) && /;\s*$/.test(code.trim())) {
4562
+ return true;
4563
+ }
4564
+ return false;
4565
+ }
4065
4566
  var runWebWorkerTool = createRunWebWorkerTool();
4066
4567
 
4067
4568
  // src/tools/tool-search.ts
@@ -4179,6 +4680,45 @@ function formatDeferredToolsList(deferredTools) {
4179
4680
  }
4180
4681
 
4181
4682
  // src/tools/index.ts
4683
+ var INDEX_INVALIDATING_TOOL_NAMES = /* @__PURE__ */ new Set(["bash", "edit", "write"]);
4684
+ function shouldInvalidateCodeSearchIndex(toolName) {
4685
+ return INDEX_INVALIDATING_TOOL_NAMES.has(toolName);
4686
+ }
4687
+ function shouldInvalidateCodeSearchIndexOnError(toolName) {
4688
+ return toolName === "bash";
4689
+ }
4690
+ async function invalidateCodeSearchIndexSafely(threadId, toolName) {
4691
+ try {
4692
+ await invalidateThreadIndex(threadId);
4693
+ console.log(`[code-search] invalidated thread index after ${toolName}`);
4694
+ } catch (error) {
4695
+ console.warn(`[code-search] failed to invalidate thread index after ${toolName}:`, error);
4696
+ }
4697
+ }
4698
+ function wrapToolWithCodeSearchInvalidation(tool, threadId) {
4699
+ if (!threadId || !shouldInvalidateCodeSearchIndex(tool.name)) {
4700
+ return tool;
4701
+ }
4702
+ const originalExecute = tool.execute.bind(tool);
4703
+ return {
4704
+ ...tool,
4705
+ execute: async (toolCallId, inputs, signal, onUpdate) => {
4706
+ let success = false;
4707
+ try {
4708
+ const result = await originalExecute(toolCallId, inputs, signal, onUpdate);
4709
+ success = true;
4710
+ return result;
4711
+ } finally {
4712
+ if (success || shouldInvalidateCodeSearchIndexOnError(tool.name)) {
4713
+ await invalidateCodeSearchIndexSafely(threadId, tool.name);
4714
+ }
4715
+ }
4716
+ }
4717
+ };
4718
+ }
4719
+ function applyCodeSearchInvalidationHooks(tools, threadId) {
4720
+ return tools.map((tool) => wrapToolWithCodeSearchInvalidation(tool, threadId));
4721
+ }
4182
4722
  async function createCodingTools(cwd, options) {
4183
4723
  const deferOption = options?.deferTools ?? true;
4184
4724
  const coreTools = [
@@ -4189,7 +4729,7 @@ async function createCodingTools(cwd, options) {
4189
4729
  createAskUserTool(),
4190
4730
  createTodoTool(options?.threadId ?? ""),
4191
4731
  createFindTool(cwd),
4192
- createAstGrepTool(cwd),
4732
+ createCodeSearchTool(cwd, options?.threadId ?? ""),
4193
4733
  createRunWebWorkerTool()
4194
4734
  ];
4195
4735
  const optionalTools = [];
@@ -4237,7 +4777,8 @@ async function createCodingTools(cwd, options) {
4237
4777
  `[Tools] ${deferredTools.size} tool(s) deferred behind tool_search: ${Array.from(deferredTools.keys()).join(", ")}`
4238
4778
  );
4239
4779
  }
4240
- return { tools, deferredToolNames: deferredTools };
4780
+ const wrappedTools = applyCodeSearchInvalidationHooks(tools, options?.threadId);
4781
+ return { tools: wrappedTools, deferredToolNames: deferredTools };
4241
4782
  }
4242
4783
  function createAllTools(cwd, options) {
4243
4784
  const tools = {
@@ -4246,7 +4787,7 @@ function createAllTools(cwd, options) {
4246
4787
  edit: createEditTool(cwd),
4247
4788
  write: createWriteTool(cwd),
4248
4789
  grep: createGrepTool(cwd),
4249
- ast_grep: createAstGrepTool(cwd),
4790
+ code_search: createCodeSearchTool(cwd, options?.threadId ?? ""),
4250
4791
  find: createFindTool(cwd),
4251
4792
  ask_user: createAskUserTool(),
4252
4793
  ls: createLsTool(cwd),
@@ -4259,6 +4800,11 @@ function createAllTools(cwd, options) {
4259
4800
  });
4260
4801
  }
4261
4802
  tools.find_images = createFindImagesTool(cwd);
4803
+ if (options?.threadId) {
4804
+ for (const [name, tool] of Object.entries(tools)) {
4805
+ tools[name] = wrapToolWithCodeSearchInvalidation(tool, options.threadId);
4806
+ }
4807
+ }
4262
4808
  return tools;
4263
4809
  }
4264
4810
 
@@ -4721,21 +5267,21 @@ function createAgentTool(options) {
4721
5267
  }
4722
5268
 
4723
5269
  // src/core/paths.ts
4724
- import { join as join6 } from "path";
5270
+ import { join as join7 } from "path";
4725
5271
  import { homedir as homedir3 } from "os";
4726
- var APP_SUPPORT_DIR = join6(homedir3(), "Library", "Application Support", "Tarsk");
4727
- var DATA_DIR = join6(APP_SUPPORT_DIR, "data");
5272
+ var APP_SUPPORT_DIR = join7(homedir3(), "Library", "Application Support", "Tarsk");
5273
+ var DATA_DIR = join7(APP_SUPPORT_DIR, "data");
4728
5274
  function getDataDir() {
4729
5275
  return DATA_DIR;
4730
5276
  }
4731
5277
 
4732
5278
  // src/agent/agent.prompt-loader.ts
4733
5279
  import { resolve } from "path";
4734
- import { readFileSync as readFileSync3 } from "fs";
5280
+ import { readFileSync as readFileSync4 } from "fs";
4735
5281
 
4736
5282
  // src/project-analyzer.ts
4737
- import { readFileSync as readFileSync2, existsSync as existsSync8 } from "fs";
4738
- import { join as join7 } from "path";
5283
+ import { readFileSync as readFileSync3, existsSync as existsSync7 } from "fs";
5284
+ import { join as join8 } from "path";
4739
5285
  var ProjectAnalyzer = class {
4740
5286
  projectPath;
4741
5287
  constructor(projectPath = process.cwd()) {
@@ -4746,10 +5292,10 @@ var ProjectAnalyzer = class {
4746
5292
  return this.generateDescription(info);
4747
5293
  }
4748
5294
  getProjectInfo() {
4749
- const packageJsonPath = join7(this.projectPath, "package.json");
5295
+ const packageJsonPath = join8(this.projectPath, "package.json");
4750
5296
  const info = { description: "" };
4751
- if (existsSync8(packageJsonPath)) {
4752
- const packageJson = JSON.parse(readFileSync2(packageJsonPath, "utf-8"));
5297
+ if (existsSync7(packageJsonPath)) {
5298
+ const packageJson = JSON.parse(readFileSync3(packageJsonPath, "utf-8"));
4753
5299
  const allDeps = { ...packageJson.dependencies, ...packageJson.devDependencies };
4754
5300
  this.detectFramework(allDeps, info);
4755
5301
  this.detectBuildTool(packageJson.scripts, allDeps, info);
@@ -4788,15 +5334,15 @@ var ProjectAnalyzer = class {
4788
5334
  }
4789
5335
  }
4790
5336
  detectBuildTool(_scripts = {}, deps, info) {
4791
- if (deps.vite || existsSync8(join7(this.projectPath, "vite.config.js")) || existsSync8(join7(this.projectPath, "vite.config.ts"))) {
5337
+ if (deps.vite || existsSync7(join8(this.projectPath, "vite.config.js")) || existsSync7(join8(this.projectPath, "vite.config.ts"))) {
4792
5338
  info.buildTool = "Vite";
4793
5339
  return;
4794
5340
  }
4795
- if (deps.webpack || existsSync8(join7(this.projectPath, "webpack.config.js"))) {
5341
+ if (deps.webpack || existsSync7(join8(this.projectPath, "webpack.config.js"))) {
4796
5342
  info.buildTool = "Webpack";
4797
5343
  return;
4798
5344
  }
4799
- if (deps.rollup || existsSync8(join7(this.projectPath, "rollup.config.js"))) {
5345
+ if (deps.rollup || existsSync7(join8(this.projectPath, "rollup.config.js"))) {
4800
5346
  info.buildTool = "Rollup";
4801
5347
  return;
4802
5348
  }
@@ -4846,40 +5392,40 @@ var ProjectAnalyzer = class {
4846
5392
  info.uiLibraries = [...new Set(uiLibs)].filter(Boolean);
4847
5393
  }
4848
5394
  detectProjectType(info) {
4849
- if (existsSync8(join7(this.projectPath, ".xcodeproj")) || existsSync8(join7(this.projectPath, ".xcworkspace")) || existsSync8(join7(this.projectPath, "project.pbxproj"))) {
5395
+ if (existsSync7(join8(this.projectPath, ".xcodeproj")) || existsSync7(join8(this.projectPath, ".xcworkspace")) || existsSync7(join8(this.projectPath, "project.pbxproj"))) {
4850
5396
  info.projectType = "Xcode";
4851
- } else if (existsSync8(join7(this.projectPath, "build.gradle")) || existsSync8(join7(this.projectPath, "build.gradle.kts")) || existsSync8(join7(this.projectPath, "app/build.gradle")) || existsSync8(join7(this.projectPath, "settings.gradle"))) {
5397
+ } else if (existsSync7(join8(this.projectPath, "build.gradle")) || existsSync7(join8(this.projectPath, "build.gradle.kts")) || existsSync7(join8(this.projectPath, "app/build.gradle")) || existsSync7(join8(this.projectPath, "settings.gradle"))) {
4852
5398
  info.projectType = "Android Studio";
4853
- } else if (existsSync8(join7(this.projectPath, "pubspec.yaml"))) {
5399
+ } else if (existsSync7(join8(this.projectPath, "pubspec.yaml"))) {
4854
5400
  info.projectType = "Flutter";
4855
- } else if (existsSync8(join7(this.projectPath, "go.mod"))) {
5401
+ } else if (existsSync7(join8(this.projectPath, "go.mod"))) {
4856
5402
  info.projectType = "Go";
4857
- } else if (existsSync8(join7(this.projectPath, "Cargo.toml"))) {
5403
+ } else if (existsSync7(join8(this.projectPath, "Cargo.toml"))) {
4858
5404
  info.projectType = "Rust";
4859
- } else if (existsSync8(join7(this.projectPath, "requirements.txt")) || existsSync8(join7(this.projectPath, "pyproject.toml")) || existsSync8(join7(this.projectPath, "setup.py"))) {
5405
+ } else if (existsSync7(join8(this.projectPath, "requirements.txt")) || existsSync7(join8(this.projectPath, "pyproject.toml")) || existsSync7(join8(this.projectPath, "setup.py"))) {
4860
5406
  info.projectType = "Python";
4861
- } else if (existsSync8(join7(this.projectPath, "Gemfile"))) {
5407
+ } else if (existsSync7(join8(this.projectPath, "Gemfile"))) {
4862
5408
  info.projectType = "Ruby";
4863
- } else if (existsSync8(join7(this.projectPath, "composer.json"))) {
5409
+ } else if (existsSync7(join8(this.projectPath, "composer.json"))) {
4864
5410
  info.projectType = "PHP";
4865
- } else if (existsSync8(join7(this.projectPath, "pom.xml")) || existsSync8(join7(this.projectPath, "build.xml"))) {
5411
+ } else if (existsSync7(join8(this.projectPath, "pom.xml")) || existsSync7(join8(this.projectPath, "build.xml"))) {
4866
5412
  info.projectType = "Java";
4867
- } else if (existsSync8(join7(this.projectPath, ".csproj")) || existsSync8(join7(this.projectPath, "project.json"))) {
5413
+ } else if (existsSync7(join8(this.projectPath, ".csproj")) || existsSync7(join8(this.projectPath, "project.json"))) {
4868
5414
  info.projectType = ".NET";
4869
5415
  }
4870
5416
  }
4871
5417
  detectPackageManager(info) {
4872
- if (existsSync8(join7(this.projectPath, "bun.lockb"))) {
5418
+ if (existsSync7(join8(this.projectPath, "bun.lockb"))) {
4873
5419
  info.packageManager = "Bun";
4874
- } else if (existsSync8(join7(this.projectPath, "bun.lock"))) {
5420
+ } else if (existsSync7(join8(this.projectPath, "bun.lock"))) {
4875
5421
  info.packageManager = "Bun";
4876
- } else if (existsSync8(join7(this.projectPath, "pnpm-lock.yaml"))) {
5422
+ } else if (existsSync7(join8(this.projectPath, "pnpm-lock.yaml"))) {
4877
5423
  info.packageManager = "pnpm";
4878
- } else if (existsSync8(join7(this.projectPath, "yarn.lock"))) {
5424
+ } else if (existsSync7(join8(this.projectPath, "yarn.lock"))) {
4879
5425
  info.packageManager = "Yarn";
4880
- } else if (existsSync8(join7(this.projectPath, "package-lock.json"))) {
5426
+ } else if (existsSync7(join8(this.projectPath, "package-lock.json"))) {
4881
5427
  info.packageManager = "npm";
4882
- } else if (existsSync8(join7(this.projectPath, "npm-shrinkwrap.json"))) {
5428
+ } else if (existsSync7(join8(this.projectPath, "npm-shrinkwrap.json"))) {
4883
5429
  info.packageManager = "npm";
4884
5430
  }
4885
5431
  }
@@ -4941,9 +5487,9 @@ function analyzeProject(projectPath) {
4941
5487
  }
4942
5488
 
4943
5489
  // src/features/rules/rules.manager.ts
4944
- import { readdir as readdir3, readFile as readFile3 } from "fs/promises";
4945
- import { join as join8, relative as relative3 } from "path";
4946
- import { existsSync as existsSync9 } from "fs";
5490
+ import { readdir as readdir3, readFile as readFile4 } from "fs/promises";
5491
+ import { join as join9, relative as relative4 } from "path";
5492
+ import { existsSync as existsSync8 } from "fs";
4947
5493
  import { homedir as homedir4 } from "os";
4948
5494
  function parseRuleFrontmatter(markdown) {
4949
5495
  const lines = markdown.split("\n");
@@ -4986,10 +5532,10 @@ function parseRuleFrontmatter(markdown) {
4986
5532
  return { metadata, content };
4987
5533
  }
4988
5534
  function getGlobalRulesDir() {
4989
- return join8(homedir4(), ".agents", "rules");
5535
+ return join9(homedir4(), ".agents", "rules");
4990
5536
  }
4991
5537
  function getProjectRulesDir(threadPath) {
4992
- return join8(threadPath, ".agents", "rules");
5538
+ return join9(threadPath, ".agents", "rules");
4993
5539
  }
4994
5540
  var RuleManager = class {
4995
5541
  /**
@@ -4999,14 +5545,14 @@ var RuleManager = class {
4999
5545
  async loadRules(threadPath) {
5000
5546
  const rules = /* @__PURE__ */ new Map();
5001
5547
  const globalDir = getGlobalRulesDir();
5002
- if (existsSync9(globalDir)) {
5548
+ if (existsSync8(globalDir)) {
5003
5549
  const globalRules = await this.loadRulesFromDir(globalDir, threadPath, "global");
5004
5550
  for (const rule of globalRules) {
5005
5551
  rules.set(rule.name, rule);
5006
5552
  }
5007
5553
  }
5008
5554
  const projectDir = getProjectRulesDir(threadPath);
5009
- if (existsSync9(projectDir)) {
5555
+ if (existsSync8(projectDir)) {
5010
5556
  const projectRules = await this.loadRulesFromDir(projectDir, threadPath, "project");
5011
5557
  for (const rule of projectRules) {
5012
5558
  rules.set(rule.name, rule);
@@ -5022,16 +5568,16 @@ var RuleManager = class {
5022
5568
  try {
5023
5569
  const entries = await readdir3(dir, { withFileTypes: true });
5024
5570
  for (const entry of entries) {
5025
- const fullPath = join8(dir, entry.name);
5571
+ const fullPath = join9(dir, entry.name);
5026
5572
  if (entry.isDirectory()) {
5027
5573
  const subRules = await this.loadRulesFromDir(fullPath, threadPath, scope);
5028
5574
  rules.push(...subRules);
5029
5575
  } else if (entry.isFile() && (entry.name.endsWith(".md") || entry.name.endsWith(".mdc"))) {
5030
5576
  try {
5031
- const fileContent = await readFile3(fullPath, "utf-8");
5577
+ const fileContent = await readFile4(fullPath, "utf-8");
5032
5578
  const { metadata, content } = parseRuleFrontmatter(fileContent);
5033
5579
  const ruleName = entry.name.replace(/\.(md|mdc)$/, "");
5034
- const relativePath = relative3(threadPath, fullPath);
5580
+ const relativePath = relative4(threadPath, fullPath);
5035
5581
  const rule = {
5036
5582
  name: ruleName,
5037
5583
  content,
@@ -5190,7 +5736,7 @@ Remember: You are ONLY planning. Do NOT execute any changes.`;
5190
5736
  function loadDeveloperContext(threadPath) {
5191
5737
  try {
5192
5738
  const agentsPath = resolve(threadPath, "agents.md");
5193
- const agentsContent = readFileSync3(agentsPath, "utf-8").trim();
5739
+ const agentsContent = readFileSync4(agentsPath, "utf-8").trim();
5194
5740
  console.log("[ai] Successfully loaded agents.md from thread path for developer context");
5195
5741
  return agentsContent;
5196
5742
  } catch (error) {
@@ -5903,9 +6449,9 @@ import { Hono as Hono2 } from "hono";
5903
6449
  import { randomUUID as randomUUID4 } from "crypto";
5904
6450
 
5905
6451
  // src/features/skills/skills.manager.ts
5906
- import { readdir as readdir4, readFile as readFile4 } from "fs/promises";
5907
- import { join as join9 } from "path";
5908
- import { existsSync as existsSync10 } from "fs";
6452
+ import { readdir as readdir4, readFile as readFile5 } from "fs/promises";
6453
+ import { join as join10 } from "path";
6454
+ import { existsSync as existsSync9 } from "fs";
5909
6455
  import { homedir as homedir5 } from "os";
5910
6456
  function parseFrontmatter(markdown) {
5911
6457
  const lines = markdown.split("\n");
@@ -5952,10 +6498,10 @@ function parseFrontmatter(markdown) {
5952
6498
  return { metadata, content };
5953
6499
  }
5954
6500
  function getGlobalSkillsDir() {
5955
- return join9(homedir5(), ".agents", "skills");
6501
+ return join10(homedir5(), ".agents", "skills");
5956
6502
  }
5957
6503
  function getProjectSkillsDir(threadPath) {
5958
- return join9(threadPath, ".agents", "skills");
6504
+ return join10(threadPath, ".agents", "skills");
5959
6505
  }
5960
6506
  function validateSkillName(name) {
5961
6507
  if (!name || name.length === 0 || name.length > 64) {
@@ -5983,14 +6529,14 @@ var SkillManager = class {
5983
6529
  async loadSkills(threadPath) {
5984
6530
  const skills = /* @__PURE__ */ new Map();
5985
6531
  const globalDir = getGlobalSkillsDir();
5986
- if (existsSync10(globalDir)) {
6532
+ if (existsSync9(globalDir)) {
5987
6533
  const globalSkills = await this.loadSkillsFromDir(globalDir, "global");
5988
6534
  for (const skill of globalSkills) {
5989
6535
  skills.set(skill.name, skill);
5990
6536
  }
5991
6537
  }
5992
6538
  const projectDir = getProjectSkillsDir(threadPath);
5993
- if (existsSync10(projectDir)) {
6539
+ if (existsSync9(projectDir)) {
5994
6540
  const projectSkills = await this.loadSkillsFromDir(projectDir, "project");
5995
6541
  for (const skill of projectSkills) {
5996
6542
  skills.set(skill.name, skill);
@@ -6008,14 +6554,14 @@ var SkillManager = class {
6008
6554
  for (const entry of entries) {
6009
6555
  if (!entry.isDirectory()) continue;
6010
6556
  const skillDirName = entry.name;
6011
- const skillPath = join9(dir, skillDirName);
6012
- const skillFilePath = join9(skillPath, "SKILL.md");
6013
- if (!existsSync10(skillFilePath)) {
6557
+ const skillPath = join10(dir, skillDirName);
6558
+ const skillFilePath = join10(skillPath, "SKILL.md");
6559
+ if (!existsSync9(skillFilePath)) {
6014
6560
  console.warn(`Skipping skill directory ${skillDirName}: SKILL.md not found`);
6015
6561
  continue;
6016
6562
  }
6017
6563
  try {
6018
- const fileContent = await readFile4(skillFilePath, "utf-8");
6564
+ const fileContent = await readFile5(skillFilePath, "utf-8");
6019
6565
  const { metadata, content } = parseFrontmatter(fileContent);
6020
6566
  if (!metadata.name) {
6021
6567
  console.warn(`Skipping skill in ${skillDirName}: missing 'name' in frontmatter`);
@@ -6188,9 +6734,9 @@ async function activateSkills(allSkills, taskDescription, thread, options) {
6188
6734
  }
6189
6735
 
6190
6736
  // src/features/agents/agents.manager.ts
6191
- import { readdir as readdir5, readFile as readFile5 } from "fs/promises";
6192
- import { join as join10 } from "path";
6193
- import { existsSync as existsSync11 } from "fs";
6737
+ import { readdir as readdir5, readFile as readFile6 } from "fs/promises";
6738
+ import { join as join11 } from "path";
6739
+ import { existsSync as existsSync10 } from "fs";
6194
6740
  import { homedir as homedir6 } from "os";
6195
6741
  function parseFrontmatter2(markdown) {
6196
6742
  const lines = markdown.split("\n");
@@ -6247,10 +6793,10 @@ function validateDescription2(description) {
6247
6793
  return !!description && description.length > 0 && description.length <= 1024;
6248
6794
  }
6249
6795
  function getGlobalAgentsDir() {
6250
- return join10(homedir6(), ".agents", "agents");
6796
+ return join11(homedir6(), ".agents", "agents");
6251
6797
  }
6252
6798
  function getProjectAgentsDir(threadPath) {
6253
- return join10(threadPath, ".agents", "agents");
6799
+ return join11(threadPath, ".agents", "agents");
6254
6800
  }
6255
6801
  var AgentsManager = class {
6256
6802
  /**
@@ -6260,14 +6806,14 @@ var AgentsManager = class {
6260
6806
  async loadAgents(threadPath) {
6261
6807
  const agents = /* @__PURE__ */ new Map();
6262
6808
  const globalDir = getGlobalAgentsDir();
6263
- if (existsSync11(globalDir)) {
6809
+ if (existsSync10(globalDir)) {
6264
6810
  const globalAgents = await this.loadAgentsFromDir(globalDir, "global");
6265
6811
  for (const agent of globalAgents) {
6266
6812
  agents.set(agent.name, agent);
6267
6813
  }
6268
6814
  }
6269
6815
  const projectDir = getProjectAgentsDir(threadPath);
6270
- if (existsSync11(projectDir)) {
6816
+ if (existsSync10(projectDir)) {
6271
6817
  const projectAgents = await this.loadAgentsFromDir(projectDir, "project");
6272
6818
  for (const agent of projectAgents) {
6273
6819
  agents.set(agent.name, agent);
@@ -6282,14 +6828,14 @@ var AgentsManager = class {
6282
6828
  for (const entry of entries) {
6283
6829
  if (!entry.isDirectory()) continue;
6284
6830
  const agentDirName = entry.name;
6285
- const agentPath = join10(dir, agentDirName);
6286
- const agentFilePath = join10(agentPath, "AGENT.md");
6287
- if (!existsSync11(agentFilePath)) {
6831
+ const agentPath = join11(dir, agentDirName);
6832
+ const agentFilePath = join11(agentPath, "AGENT.md");
6833
+ if (!existsSync10(agentFilePath)) {
6288
6834
  console.warn(`[agents] Skipping agent directory ${agentDirName}: AGENT.md not found`);
6289
6835
  continue;
6290
6836
  }
6291
6837
  try {
6292
- const fileContent = await readFile5(agentFilePath, "utf-8");
6838
+ const fileContent = await readFile6(agentFilePath, "utf-8");
6293
6839
  const { metadata, content } = parseFrontmatter2(fileContent);
6294
6840
  if (!metadata.name) {
6295
6841
  console.warn(
@@ -6469,8 +7015,8 @@ function extractAssistantContent(events, fallback) {
6469
7015
 
6470
7016
  // src/features/chat/chat-post.route.ts
6471
7017
  init_database();
6472
- import { readFile as readFile6, unlink } from "fs/promises";
6473
- import { join as join11 } from "path";
7018
+ import { readFile as readFile7, unlink } from "fs/promises";
7019
+ import { join as join12 } from "path";
6474
7020
 
6475
7021
  // src/features/project-todos/project-todos.database.ts
6476
7022
  init_database();
@@ -7062,9 +7608,9 @@ async function postChatMessage(c, threadManager, agentExecutor, conversationMana
7062
7608
  try {
7063
7609
  const todo = await getTodoByThreadId(db, threadId);
7064
7610
  if (todo && todo.status === "Plan") {
7065
- const planFilePath = join11(threadPath, `${todo.id}-plan.md`);
7611
+ const planFilePath = join12(threadPath, `${todo.id}-plan.md`);
7066
7612
  try {
7067
- const planContent = await readFile6(planFilePath, "utf-8");
7613
+ const planContent = await readFile7(planFilePath, "utf-8");
7068
7614
  await updateTodo(db, todo.id, { description: planContent.trim() });
7069
7615
  await unlink(planFilePath);
7070
7616
  } catch {
@@ -8321,8 +8867,8 @@ async function insertProject(db, project) {
8321
8867
  try {
8322
8868
  await db.execute(
8323
8869
  `
8324
- INSERT INTO projects (id, name, gitUrl, path, createdAt, openWith, commands, setupScript, runCommand, commitMethod)
8325
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
8870
+ INSERT INTO projects (id, name, gitUrl, path, createdAt, openWith, commands, setupScript, runCommand, commitMethod, planPrompt, testPrompt, reviewPrompt)
8871
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
8326
8872
  `,
8327
8873
  [
8328
8874
  project.id,
@@ -8334,7 +8880,10 @@ async function insertProject(db, project) {
8334
8880
  project.commands ? JSON.stringify(project.commands) : null,
8335
8881
  project.setupScript ?? null,
8336
8882
  project.runCommand ?? null,
8337
- project.commitMethod ?? null
8883
+ project.commitMethod ?? null,
8884
+ project.planPrompt ?? null,
8885
+ project.testPrompt ?? null,
8886
+ project.reviewPrompt ?? null
8338
8887
  ]
8339
8888
  );
8340
8889
  } catch (error) {
@@ -8378,6 +8927,18 @@ async function updateProject(db, id, updates) {
8378
8927
  fields.push("commitMethod = ?");
8379
8928
  values.push(updates.commitMethod ?? null);
8380
8929
  }
8930
+ if (updates.planPrompt !== void 0) {
8931
+ fields.push("planPrompt = ?");
8932
+ values.push(updates.planPrompt ?? null);
8933
+ }
8934
+ if (updates.testPrompt !== void 0) {
8935
+ fields.push("testPrompt = ?");
8936
+ values.push(updates.testPrompt ?? null);
8937
+ }
8938
+ if (updates.reviewPrompt !== void 0) {
8939
+ fields.push("reviewPrompt = ?");
8940
+ values.push(updates.reviewPrompt ?? null);
8941
+ }
8381
8942
  if (fields.length === 0) {
8382
8943
  return;
8383
8944
  }
@@ -8426,33 +8987,43 @@ async function getAllProjects(db) {
8426
8987
  try {
8427
8988
  const result = await db.execute("SELECT * FROM projects ORDER BY createdAt DESC");
8428
8989
  const rows = result.rows;
8429
- return await Promise.all(rows.map((row) => deserializeProject(db, row)));
8990
+ const threadResult = await db.execute(
8991
+ "SELECT id, projectId FROM threads ORDER BY createdAt DESC"
8992
+ );
8993
+ const threadRows = threadResult.rows;
8994
+ const threadIdsByProject = /* @__PURE__ */ new Map();
8995
+ for (const row of threadRows) {
8996
+ const ids = threadIdsByProject.get(row.projectId);
8997
+ if (ids) {
8998
+ ids.push(row.id);
8999
+ } else {
9000
+ threadIdsByProject.set(row.projectId, [row.id]);
9001
+ }
9002
+ }
9003
+ return rows.map(
9004
+ (row) => deserializeProjectWithThreads(row, threadIdsByProject.get(row.id) ?? [])
9005
+ );
8430
9006
  } catch (error) {
8431
9007
  console.error("Failed to get all projects:", error);
8432
9008
  throw error;
8433
9009
  }
8434
9010
  }
8435
- async function getProjectThreadIds(db, projectId) {
8436
- const result = await db.execute(
8437
- "SELECT id FROM threads WHERE projectId = ? ORDER BY createdAt DESC",
8438
- [projectId]
8439
- );
8440
- const rows = result.rows;
8441
- return rows.map((row) => row.id);
8442
- }
8443
- async function deserializeProject(db, row) {
9011
+ function deserializeProjectWithThreads(row, threadIds) {
8444
9012
  return {
8445
9013
  id: row.id,
8446
9014
  name: row.name,
8447
9015
  gitUrl: row.gitUrl,
8448
9016
  path: row.path,
8449
9017
  createdAt: new Date(row.createdAt),
8450
- threads: await getProjectThreadIds(db, row.id),
9018
+ threads: threadIds,
8451
9019
  openWith: row.openWith ?? void 0,
8452
9020
  commands: row.commands ? JSON.parse(row.commands) : void 0,
8453
9021
  setupScript: row.setupScript ?? void 0,
8454
9022
  runCommand: row.runCommand ?? void 0,
8455
- commitMethod: row.commitMethod && isValidCommitMethod(row.commitMethod) ? row.commitMethod : void 0
9023
+ commitMethod: row.commitMethod && isValidCommitMethod(row.commitMethod) ? row.commitMethod : void 0,
9024
+ planPrompt: row.planPrompt ?? void 0,
9025
+ testPrompt: row.testPrompt ?? void 0,
9026
+ reviewPrompt: row.reviewPrompt ?? void 0
8456
9027
  };
8457
9028
  }
8458
9029
 
@@ -8541,6 +9112,12 @@ async function updateThread(db, id, updates) {
8541
9112
  async function deleteThread(db, id) {
8542
9113
  try {
8543
9114
  await markConversationHistoryAsDeleted(db, id);
9115
+ await db.execute(
9116
+ "DELETE FROM code_index WHERE rowid IN (SELECT id FROM code_files WHERE thread_id = ?)",
9117
+ [id]
9118
+ );
9119
+ await db.execute("DELETE FROM code_files WHERE thread_id = ?", [id]);
9120
+ await db.execute("DELETE FROM code_index_meta WHERE thread_id = ?", [id]);
8544
9121
  await db.execute("UPDATE threads SET status = ? WHERE id = ?", ["deleted", id]);
8545
9122
  } catch (error) {
8546
9123
  console.error("Failed to mark thread as deleted:", error);
@@ -9402,34 +9979,58 @@ async function handleOpenFolderProject(c, projectManager) {
9402
9979
  }
9403
9980
 
9404
9981
  // src/features/projects/projects-list.route.ts
9405
- async function handleListProjects(c, projectManager, threadManager) {
9982
+ function computeGitStatusDot(status) {
9983
+ if (status.hasChanges) return true;
9984
+ if (status.commitsAheadOfDefault && !status.prExists) return true;
9985
+ if (status.status === "Behind" || status.status === "Diverged") return true;
9986
+ return false;
9987
+ }
9988
+ async function handleListProjects(c, projectManager, threadManager, db) {
9406
9989
  try {
9407
9990
  const projects = await projectManager.listProjects();
9408
9991
  const selectedThreadId = await threadManager.getSelectedThreadId();
9409
- const expandedProjects = await Promise.all(
9410
- projects.map(async (project) => {
9411
- const threads = await threadManager.listThreads(project.id);
9412
- return {
9413
- projectId: project.id,
9414
- name: project.name,
9415
- gitUrl: project.gitUrl,
9416
- projectPath: project.path,
9417
- openWith: project.openWith,
9418
- commands: project.commands ?? [],
9419
- setupScript: project.setupScript,
9420
- runCommand: project.runCommand,
9421
- commitMethod: project.commitMethod,
9422
- threads: threads.map((thread) => ({
9992
+ const allThreads = await threadManager.listAllThreads();
9993
+ const threadsByProject = /* @__PURE__ */ new Map();
9994
+ for (const thread of allThreads) {
9995
+ const list = threadsByProject.get(thread.projectId);
9996
+ if (list) {
9997
+ list.push(thread);
9998
+ } else {
9999
+ threadsByProject.set(thread.projectId, [thread]);
10000
+ }
10001
+ }
10002
+ const allThreadIds = allThreads.map((t) => t.id);
10003
+ const statusCache = db ? await getGitStatusCacheBulk(db, allThreadIds) : /* @__PURE__ */ new Map();
10004
+ const expandedProjects = projects.map((project) => {
10005
+ const threads = threadsByProject.get(project.id) ?? [];
10006
+ return {
10007
+ projectId: project.id,
10008
+ name: project.name,
10009
+ gitUrl: project.gitUrl,
10010
+ projectPath: project.path,
10011
+ openWith: project.openWith,
10012
+ commands: project.commands ?? [],
10013
+ setupScript: project.setupScript,
10014
+ runCommand: project.runCommand,
10015
+ commitMethod: project.commitMethod,
10016
+ planPrompt: project.planPrompt,
10017
+ testPrompt: project.testPrompt,
10018
+ reviewPrompt: project.reviewPrompt,
10019
+ threads: threads.map((thread) => {
10020
+ const cached = statusCache.get(thread.id);
10021
+ const gitStatusDot = cached ? computeGitStatusDot(cached) : false;
10022
+ return {
9423
10023
  threadId: thread.id,
9424
10024
  title: thread.title,
9425
10025
  path: thread.path,
9426
10026
  isSelected: thread.id === selectedThreadId,
9427
10027
  model: thread.model,
9428
- modelProvider: thread.modelProvider
9429
- }))
9430
- };
9431
- })
9432
- );
10028
+ modelProvider: thread.modelProvider,
10029
+ gitStatusDot
10030
+ };
10031
+ })
10032
+ };
10033
+ });
9433
10034
  return c.json(expandedProjects);
9434
10035
  } catch (error) {
9435
10036
  return errorResponse(
@@ -9677,7 +10278,17 @@ async function handleUpdateProject(c, projectManager) {
9677
10278
  try {
9678
10279
  const projectId = c.req.param("id");
9679
10280
  const body = await c.req.json();
9680
- const { program, setupScript, name, runCommand, commitMethod, runNow } = body;
10281
+ const {
10282
+ program,
10283
+ setupScript,
10284
+ name,
10285
+ runCommand,
10286
+ commitMethod,
10287
+ runNow,
10288
+ planPrompt,
10289
+ testPrompt,
10290
+ reviewPrompt
10291
+ } = body;
9681
10292
  if (!projectId) {
9682
10293
  return errorResponse(c, ErrorCodes.INVALID_REQUEST, "Project ID is required", 400);
9683
10294
  }
@@ -9715,6 +10326,15 @@ async function handleUpdateProject(c, projectManager) {
9715
10326
  }
9716
10327
  await projectManager.updateCommitMethod(projectId, commitMethod);
9717
10328
  }
10329
+ if (planPrompt !== void 0) {
10330
+ await projectManager.updateProjectPrompt(projectId, "planPrompt", planPrompt);
10331
+ }
10332
+ if (testPrompt !== void 0) {
10333
+ await projectManager.updateProjectPrompt(projectId, "testPrompt", testPrompt);
10334
+ }
10335
+ if (reviewPrompt !== void 0) {
10336
+ await projectManager.updateProjectPrompt(projectId, "reviewPrompt", reviewPrompt);
10337
+ }
9718
10338
  if (name !== void 0) {
9719
10339
  if (!name?.trim()) {
9720
10340
  return errorResponse(c, ErrorCodes.INVALID_REQUEST, "Project name is required", 400);
@@ -9881,12 +10501,12 @@ async function handleCheckRunning(c, projectManager) {
9881
10501
  }
9882
10502
 
9883
10503
  // src/core/run-command-detector.ts
9884
- import { readFile as readFile7 } from "fs/promises";
9885
- import { join as join12 } from "path";
10504
+ import { readFile as readFile8 } from "fs/promises";
10505
+ import { join as join13 } from "path";
9886
10506
  async function detectPackageManager(projectPath) {
9887
10507
  try {
9888
- const packageJsonPath = join12(projectPath, "package.json");
9889
- const content = await readFile7(packageJsonPath, "utf-8");
10508
+ const packageJsonPath = join13(projectPath, "package.json");
10509
+ const content = await readFile8(packageJsonPath, "utf-8");
9890
10510
  const packageJson = JSON.parse(content);
9891
10511
  if (packageJson.packageManager) {
9892
10512
  const pm = packageJson.packageManager.split("@")[0];
@@ -9901,8 +10521,8 @@ async function detectPackageManager(projectPath) {
9901
10521
  }
9902
10522
  async function detectRunScripts(projectPath) {
9903
10523
  try {
9904
- const packageJsonPath = join12(projectPath, "package.json");
9905
- const content = await readFile7(packageJsonPath, "utf-8");
10524
+ const packageJsonPath = join13(projectPath, "package.json");
10525
+ const content = await readFile8(packageJsonPath, "utf-8");
9906
10526
  const packageJson = JSON.parse(content);
9907
10527
  const scripts = packageJson.scripts ?? {};
9908
10528
  return {
@@ -9947,8 +10567,8 @@ async function suggestRunCommand(projectPath) {
9947
10567
  }
9948
10568
 
9949
10569
  // src/features/projects/projects-package-scripts.route.ts
9950
- import { readFile as readFile8 } from "fs/promises";
9951
- import { join as join13 } from "path";
10570
+ import { readFile as readFile9 } from "fs/promises";
10571
+ import { join as join14 } from "path";
9952
10572
  import { glob } from "glob";
9953
10573
  function formatScriptName(scriptName) {
9954
10574
  return scriptName.replace(/[-:_]/g, " ").replace(/\b\w/g, (l) => l.toUpperCase());
@@ -9976,8 +10596,8 @@ async function findPackageScripts(projectPath) {
9976
10596
  const packageManager = await detectPackageManager(projectPath);
9977
10597
  for (const filePath of packageJsonFiles) {
9978
10598
  try {
9979
- const fullPath = join13(projectPath, filePath);
9980
- const content = await readFile8(fullPath, "utf-8");
10599
+ const fullPath = join14(projectPath, filePath);
10600
+ const content = await readFile9(fullPath, "utf-8");
9981
10601
  const packageJson = JSON.parse(content);
9982
10602
  if (packageJson.scripts) {
9983
10603
  for (const [scriptName] of Object.entries(packageJson.scripts)) {
@@ -10024,9 +10644,9 @@ async function handleGetPackageScripts(c, projectManager) {
10024
10644
  }
10025
10645
 
10026
10646
  // src/features/projects/projects-ai-files.route.ts
10027
- import { readdir as readdir6, readFile as readFile9, writeFile, mkdir, access as access2, rm } from "fs/promises";
10028
- import { join as join14, extname as extname2, basename } from "path";
10029
- import { existsSync as existsSync12 } from "fs";
10647
+ import { readdir as readdir6, readFile as readFile10, writeFile, mkdir, access as access2, rm } from "fs/promises";
10648
+ import { join as join15, extname as extname3, basename } from "path";
10649
+ import { existsSync as existsSync11 } from "fs";
10030
10650
  var IGNORED_DIRS = /* @__PURE__ */ new Set([
10031
10651
  ".git",
10032
10652
  "node_modules",
@@ -10044,8 +10664,8 @@ async function buildFullTree(dirPath, relativeDirPath) {
10044
10664
  try {
10045
10665
  const entries = await readdir6(dirPath, { withFileTypes: true });
10046
10666
  for (const entry of entries) {
10047
- const entryRelPath = join14(relativeDirPath, entry.name);
10048
- const entryAbsPath = join14(dirPath, entry.name);
10667
+ const entryRelPath = join15(relativeDirPath, entry.name);
10668
+ const entryAbsPath = join15(dirPath, entry.name);
10049
10669
  if (entry.isDirectory()) {
10050
10670
  const children = await buildFullTree(entryAbsPath, entryRelPath);
10051
10671
  nodes.push({
@@ -10073,8 +10693,8 @@ async function buildMarkdownFilteredTree(dirPath, relativeDirPath) {
10073
10693
  try {
10074
10694
  const entries = await readdir6(dirPath, { withFileTypes: true });
10075
10695
  for (const entry of entries) {
10076
- const entryRelPath = join14(relativeDirPath, entry.name);
10077
- const entryAbsPath = join14(dirPath, entry.name);
10696
+ const entryRelPath = join15(relativeDirPath, entry.name);
10697
+ const entryAbsPath = join15(dirPath, entry.name);
10078
10698
  if (entry.isDirectory()) {
10079
10699
  if (IGNORED_DIRS.has(entry.name)) continue;
10080
10700
  const children = await buildMarkdownFilteredTree(entryAbsPath, entryRelPath);
@@ -10087,10 +10707,10 @@ async function buildMarkdownFilteredTree(dirPath, relativeDirPath) {
10087
10707
  children
10088
10708
  });
10089
10709
  }
10090
- } else if (entry.isFile() && MARKDOWN_EXTS.has(extname2(entry.name))) {
10710
+ } else if (entry.isFile() && MARKDOWN_EXTS.has(extname3(entry.name))) {
10091
10711
  nodes.push({
10092
10712
  id: entryRelPath,
10093
- name: basename(entry.name, extname2(entry.name)),
10713
+ name: basename(entry.name, extname3(entry.name)),
10094
10714
  type: "file",
10095
10715
  path: entryRelPath
10096
10716
  });
@@ -10109,9 +10729,9 @@ async function buildAIFileTree(projectPath) {
10109
10729
  { key: "agents", label: "Agents" }
10110
10730
  ];
10111
10731
  for (const { key, label } of agentsFolders) {
10112
- const relPath = join14(".agents", key);
10113
- const absPath = join14(projectPath, relPath);
10114
- const exists = existsSync12(absPath);
10732
+ const relPath = join15(".agents", key);
10733
+ const absPath = join15(projectPath, relPath);
10734
+ const exists = existsSync11(absPath);
10115
10735
  if (exists) {
10116
10736
  const children = await buildFullTree(absPath, relPath);
10117
10737
  nodes.push({
@@ -10145,11 +10765,11 @@ async function buildAIFileTree(projectPath) {
10145
10765
  if (entry.name === ".agents" || entry.name === "AGENTS.md" || IGNORED_DIRS.has(entry.name))
10146
10766
  continue;
10147
10767
  const entryRelPath = entry.name;
10148
- const entryAbsPath = join14(projectPath, entry.name);
10149
- if (entry.isFile() && MARKDOWN_EXTS.has(extname2(entry.name))) {
10768
+ const entryAbsPath = join15(projectPath, entry.name);
10769
+ if (entry.isFile() && MARKDOWN_EXTS.has(extname3(entry.name))) {
10150
10770
  nodes.push({
10151
10771
  id: entryRelPath,
10152
- name: basename(entry.name, extname2(entry.name)),
10772
+ name: basename(entry.name, extname3(entry.name)),
10153
10773
  type: "file",
10154
10774
  path: entryRelPath
10155
10775
  });
@@ -10172,7 +10792,7 @@ async function buildAIFileTree(projectPath) {
10172
10792
  }
10173
10793
  function validateFilePath(projectPath, filePath) {
10174
10794
  if (!filePath) return null;
10175
- const abs = join14(projectPath, filePath);
10795
+ const abs = join15(projectPath, filePath);
10176
10796
  if (!abs.startsWith(projectPath + "/") && abs !== projectPath) return null;
10177
10797
  return abs;
10178
10798
  }
@@ -10253,7 +10873,7 @@ Links to important documentation, tools, or references.
10253
10873
  } catch {
10254
10874
  return c.json({ error: { code: "NOT_FOUND", message: `File not found: ${filePath}` } }, 404);
10255
10875
  }
10256
- const content = await readFile9(absPath, "utf-8");
10876
+ const content = await readFile10(absPath, "utf-8");
10257
10877
  return c.json({ content, path: filePath });
10258
10878
  } catch (error) {
10259
10879
  return errorResponse(
@@ -10289,7 +10909,7 @@ async function handleSaveAIFile(c, projectManager) {
10289
10909
  if (!absPath) {
10290
10910
  return c.json({ error: { code: "BAD_REQUEST", message: "Invalid file path" } }, 400);
10291
10911
  }
10292
- const parentDir = join14(absPath, "..");
10912
+ const parentDir = join15(absPath, "..");
10293
10913
  await mkdir(parentDir, { recursive: true });
10294
10914
  await writeFile(absPath, content, "utf-8");
10295
10915
  return c.json({ success: true, path: filePath });
@@ -10383,11 +11003,11 @@ async function handleCreateSkill(c, projectManager) {
10383
11003
  if (!project) {
10384
11004
  return errorResponse(c, ErrorCodes.PROJECT_NOT_FOUND, `Project not found: ${projectId}`, 404);
10385
11005
  }
10386
- const skillRelPath = join14(".agents", "skills", name);
10387
- const skillAbsPath = join14(project.path, skillRelPath);
10388
- const skillFileRelPath = join14(skillRelPath, "SKILL.md");
10389
- const skillFileAbsPath = join14(skillAbsPath, "SKILL.md");
10390
- if (existsSync12(skillAbsPath)) {
11006
+ const skillRelPath = join15(".agents", "skills", name);
11007
+ const skillAbsPath = join15(project.path, skillRelPath);
11008
+ const skillFileRelPath = join15(skillRelPath, "SKILL.md");
11009
+ const skillFileAbsPath = join15(skillAbsPath, "SKILL.md");
11010
+ if (existsSync11(skillAbsPath)) {
10391
11011
  return c.json(
10392
11012
  { error: { code: "CONFLICT", message: `Skill '${name}' already exists` } },
10393
11013
  409
@@ -10428,7 +11048,9 @@ function createProjectRoutes(projectManager, threadManager) {
10428
11048
  return handleOpenFolderProject(c, projectManager);
10429
11049
  });
10430
11050
  router.get("/", async (c) => {
10431
- return handleListProjects(c, projectManager, threadManager);
11051
+ const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_database(), database_exports));
11052
+ const db = await getDatabase2();
11053
+ return handleListProjects(c, projectManager, threadManager, db);
10432
11054
  });
10433
11055
  router.get("/:id", async (c) => {
10434
11056
  return handleGetProject(c, projectManager);
@@ -10486,7 +11108,7 @@ function createProjectRoutes(projectManager, threadManager) {
10486
11108
 
10487
11109
  // src/features/projects/projects.manager.ts
10488
11110
  init_utils();
10489
- import { join as join17 } from "path";
11111
+ import { join as join18 } from "path";
10490
11112
  import { realpathSync as realpathSync2 } from "fs";
10491
11113
  import { rm as rm3 } from "fs/promises";
10492
11114
 
@@ -10563,11 +11185,13 @@ var ProcessManager = class extends EventEmitter {
10563
11185
  });
10564
11186
  };
10565
11187
  try {
10566
- const [cmd, ...args2] = command.split(" ");
10567
- console.log("CLI ProcessManager: Spawning process:", cmd, "with args:", args2);
10568
- const childProcess = spawnProcess(cmd, args2, {
11188
+ const { shell, args: shellArgs } = getShellConfig();
11189
+ console.log("CLI ProcessManager: Spawning process:", shell, "with command:", command);
11190
+ const childProcess = spawnProcess(shell, [...shellArgs, command], {
10569
11191
  cwd,
10570
- shell: true,
11192
+ env: Object.fromEntries(
11193
+ Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
11194
+ ),
10571
11195
  stdio: ["ignore", "pipe", "pipe"]
10572
11196
  });
10573
11197
  this.processes.set(projectId, childProcess);
@@ -10603,10 +11227,10 @@ var ProcessManager = class extends EventEmitter {
10603
11227
  addToQueue({ type: "stderr", content: text });
10604
11228
  });
10605
11229
  let processEnded = false;
10606
- childProcess.on("close", () => {
11230
+ childProcess.on("close", (code) => {
10607
11231
  processEnded = true;
10608
11232
  this.processes.delete(projectId);
10609
- addToQueue({ type: "complete" });
11233
+ addToQueue({ type: "complete", exitCode: code ?? 0 });
10610
11234
  const res = this.resolvers.get(projectId);
10611
11235
  if (res) {
10612
11236
  res.forEach((r) => r());
@@ -10705,13 +11329,13 @@ var ProcessManager = class extends EventEmitter {
10705
11329
  // src/features/projects/projects.creator.ts
10706
11330
  init_utils();
10707
11331
  import { randomUUID as randomUUID7 } from "crypto";
10708
- import { basename as basename2, join as join16, relative as relative4 } from "path";
11332
+ import { basename as basename2, join as join17, relative as relative5 } from "path";
10709
11333
  import { access as access3, stat as stat3, readdir as readdir8 } from "fs/promises";
10710
11334
 
10711
11335
  // src/features/scaffold/scaffold.runner.ts
10712
11336
  init_utils();
10713
- import { existsSync as existsSync13 } from "fs";
10714
- import { join as join15 } from "path";
11337
+ import { existsSync as existsSync12 } from "fs";
11338
+ import { join as join16 } from "path";
10715
11339
  import { mkdir as mkdir2, readdir as readdir7, stat as stat2, rename, rm as rm2 } from "fs/promises";
10716
11340
  import { createInterface as createInterface2 } from "readline";
10717
11341
 
@@ -11376,10 +12000,12 @@ function buildCommandList(template, options) {
11376
12000
  return withPm;
11377
12001
  }
11378
12002
  async function* runCommandStreaming(cwd, command) {
11379
- const [bin, ...args2] = command.split(/\s+/);
11380
- const child = spawnProcess(bin, args2, {
12003
+ const { shell, args: shellArgs } = getShellConfig();
12004
+ const child = spawnProcess(shell, [...shellArgs, command], {
11381
12005
  cwd,
11382
- shell: true,
12006
+ env: Object.fromEntries(
12007
+ Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
12008
+ ),
11383
12009
  stdio: ["ignore", "pipe", "pipe"]
11384
12010
  });
11385
12011
  const queue = [];
@@ -11424,8 +12050,8 @@ async function* runCommandStreaming(cwd, command) {
11424
12050
  }
11425
12051
  }
11426
12052
  async function* createScaffoldedProjectStreaming(options) {
11427
- const projectPath = join15(options.parentDir, options.threadId);
11428
- if (!existsSync13(options.parentDir)) {
12053
+ const projectPath = join16(options.parentDir, options.threadId);
12054
+ if (!existsSync12(options.parentDir)) {
11429
12055
  yield {
11430
12056
  type: "result",
11431
12057
  result: {
@@ -11512,7 +12138,7 @@ async function* createScaffoldedProjectStreaming(options) {
11512
12138
  }
11513
12139
  try {
11514
12140
  const projectName2 = getProjectName(options.projectName);
11515
- const projectSubDir = join15(projectPath, projectName2);
12141
+ const projectSubDir = join16(projectPath, projectName2);
11516
12142
  try {
11517
12143
  const subDirStat = await stat2(projectSubDir);
11518
12144
  if (subDirStat.isDirectory()) {
@@ -11522,8 +12148,8 @@ async function* createScaffoldedProjectStreaming(options) {
11522
12148
  };
11523
12149
  const items = await readdir7(projectSubDir);
11524
12150
  for (const item of items) {
11525
- const oldPath = join15(projectSubDir, item);
11526
- const newPath = join15(projectPath, item);
12151
+ const oldPath = join16(projectSubDir, item);
12152
+ const newPath = join16(projectPath, item);
11527
12153
  await rename(oldPath, newPath);
11528
12154
  }
11529
12155
  await rm2(projectSubDir, { recursive: true, force: true });
@@ -11539,7 +12165,7 @@ async function* createScaffoldedProjectStreaming(options) {
11539
12165
  };
11540
12166
  }
11541
12167
  const installCwd = scaffoldOptions.projectPath;
11542
- if (existsSync13(installCwd)) {
12168
+ if (existsSync12(installCwd)) {
11543
12169
  const installCmd = getInstallCommand(options.packageManager);
11544
12170
  for await (const event of runCommandStreaming(installCwd, installCmd)) {
11545
12171
  if (event.type === "output") {
@@ -11983,7 +12609,7 @@ var ProjectCreator = class {
11983
12609
  return name;
11984
12610
  }
11985
12611
  generateThreadPath(_projectId, threadId) {
11986
- return join16(this.rootFolder, threadId);
12612
+ return join17(this.rootFolder, threadId);
11987
12613
  }
11988
12614
  async *createProjectFromFolder() {
11989
12615
  await loadUtils();
@@ -12133,19 +12759,19 @@ var ProjectCreator = class {
12133
12759
  }
12134
12760
  async findAllPackageJsonFiles(rootPath, currentPath, setupScripts) {
12135
12761
  try {
12136
- await access3(join16(currentPath, "package.json"));
12137
- const relativePath = relative4(rootPath, currentPath);
12762
+ await access3(join17(currentPath, "package.json"));
12763
+ const relativePath = relative5(rootPath, currentPath);
12138
12764
  let installCommand = "";
12139
12765
  try {
12140
- await access3(join16(currentPath, "yarn.lock"));
12766
+ await access3(join17(currentPath, "yarn.lock"));
12141
12767
  installCommand = "yarn install";
12142
12768
  } catch {
12143
12769
  try {
12144
- await access3(join16(currentPath, "bun.lock"));
12770
+ await access3(join17(currentPath, "bun.lock"));
12145
12771
  installCommand = "bun install";
12146
12772
  } catch {
12147
12773
  try {
12148
- await access3(join16(currentPath, "pnpm-lock.yaml"));
12774
+ await access3(join17(currentPath, "pnpm-lock.yaml"));
12149
12775
  installCommand = "pnpm install";
12150
12776
  } catch {
12151
12777
  installCommand = "npm install";
@@ -12162,7 +12788,7 @@ var ProjectCreator = class {
12162
12788
  try {
12163
12789
  const entries = await readdir8(currentPath);
12164
12790
  for (const entry of entries) {
12165
- const entryPath = join16(currentPath, entry);
12791
+ const entryPath = join17(currentPath, entry);
12166
12792
  try {
12167
12793
  const stats = await stat3(entryPath);
12168
12794
  if (stats.isDirectory() && !entry.startsWith(".") && entry !== "node_modules") {
@@ -12481,7 +13107,7 @@ var ProjectManagerImpl = class {
12481
13107
  const session = this.terminalSessionManager.getOrCreateSession(threadId, thread.path);
12482
13108
  let workingDir;
12483
13109
  if (cwd) {
12484
- workingDir = cwd.startsWith("/") ? cwd : join17(thread.path, cwd);
13110
+ workingDir = cwd.startsWith("/") ? cwd : join18(thread.path, cwd);
12485
13111
  this.terminalSessionManager.updateWorkingDirectory(threadId, workingDir);
12486
13112
  } else {
12487
13113
  workingDir = session.currentWorkingDirectory;
@@ -12586,6 +13212,21 @@ ___CWD___%s
12586
13212
  project.commitMethod = commitMethod;
12587
13213
  await this.metadataManager.saveProjects(projects);
12588
13214
  }
13215
+ /**
13216
+ * Updates a prompt field on a project
13217
+ * @param projectId - The project ID
13218
+ * @param field - Which prompt field to update
13219
+ * @param value - The prompt value (null or empty string clears it)
13220
+ */
13221
+ async updateProjectPrompt(projectId, field, value) {
13222
+ const projects = await this.metadataManager.loadProjects();
13223
+ const project = projects.find((p) => p.id === projectId);
13224
+ if (!project) {
13225
+ throw new Error(`Project not found: ${projectId}`);
13226
+ }
13227
+ project[field] = value && value.trim() ? value : void 0;
13228
+ await this.metadataManager.saveProjects(projects);
13229
+ }
12589
13230
  /**
12590
13231
  * Starts running a dev server process for a project
12591
13232
  * @param projectId - The project ID
@@ -12671,9 +13312,9 @@ import { Hono as Hono7 } from "hono";
12671
13312
 
12672
13313
  // src/core/env-manager.ts
12673
13314
  import { promises as fs } from "fs";
12674
- import { join as join18 } from "path";
13315
+ import { join as join19 } from "path";
12675
13316
  async function updateEnvFile(keyNames) {
12676
- const envPath = join18(process.cwd(), ".env");
13317
+ const envPath = join19(process.cwd(), ".env");
12677
13318
  let content = "";
12678
13319
  try {
12679
13320
  content = await fs.readFile(envPath, "utf-8");
@@ -12702,7 +13343,7 @@ async function updateEnvFile(keyNames) {
12702
13343
  await fs.writeFile(envPath, newLines.join("\n") + "\n", "utf-8");
12703
13344
  }
12704
13345
  async function readEnvFile() {
12705
- const envPath = join18(process.cwd(), ".env");
13346
+ const envPath = join19(process.cwd(), ".env");
12706
13347
  const envMap = {};
12707
13348
  try {
12708
13349
  const content = await fs.readFile(envPath, "utf-8");
@@ -13121,9 +13762,9 @@ function createScaffoldRoutes(projectManager) {
13121
13762
  }
13122
13763
 
13123
13764
  // src/features/slash-commands/slash-commands.manager.ts
13124
- import { readdir as readdir9, readFile as readFile10, mkdir as mkdir3, writeFile as writeFile2, unlink as unlink2 } from "fs/promises";
13125
- import { join as join19, basename as basename3, extname as extname3 } from "path";
13126
- import { existsSync as existsSync14 } from "fs";
13765
+ import { readdir as readdir9, readFile as readFile11, mkdir as mkdir3, writeFile as writeFile2, unlink as unlink2 } from "fs/promises";
13766
+ import { join as join20, basename as basename3, extname as extname4 } from "path";
13767
+ import { existsSync as existsSync13 } from "fs";
13127
13768
  import { homedir as homedir7 } from "os";
13128
13769
  function slugify(filename) {
13129
13770
  return filename.toLowerCase().replace(/\s+/g, "-").replace(/[^a-z0-9-]/g, "").replace(/-+/g, "-").replace(/^-+|-+$/g, "");
@@ -13168,10 +13809,10 @@ function parseFrontmatter3(markdown) {
13168
13809
  return { metadata, content };
13169
13810
  }
13170
13811
  function getGlobalCommandsDir() {
13171
- return join19(homedir7(), ".agents", "commands");
13812
+ return join20(homedir7(), ".agents", "commands");
13172
13813
  }
13173
13814
  function getProjectCommandsDir(threadPath) {
13174
- return join19(threadPath, ".agents", "commands");
13815
+ return join20(threadPath, ".agents", "commands");
13175
13816
  }
13176
13817
  var SlashCommandManager = class _SlashCommandManager {
13177
13818
  /**
@@ -13186,14 +13827,14 @@ var SlashCommandManager = class _SlashCommandManager {
13186
13827
  async loadCommands(threadPath) {
13187
13828
  const commands = /* @__PURE__ */ new Map();
13188
13829
  const globalDir = getGlobalCommandsDir();
13189
- if (existsSync14(globalDir)) {
13830
+ if (existsSync13(globalDir)) {
13190
13831
  const globalCommands = await this.loadCommandsFromDir(globalDir, "global");
13191
13832
  for (const cmd of globalCommands) {
13192
13833
  commands.set(cmd.name, cmd);
13193
13834
  }
13194
13835
  }
13195
13836
  const projectDir = getProjectCommandsDir(threadPath);
13196
- if (existsSync14(projectDir)) {
13837
+ if (existsSync13(projectDir)) {
13197
13838
  const projectCommands = await this.loadCommandsFromDir(projectDir, "project");
13198
13839
  for (const cmd of projectCommands) {
13199
13840
  if (!_SlashCommandManager.BUILT_IN_COMMANDS.has(cmd.name)) {
@@ -13212,10 +13853,10 @@ var SlashCommandManager = class _SlashCommandManager {
13212
13853
  const files = await readdir9(dir);
13213
13854
  for (const file of files) {
13214
13855
  if (!file.endsWith(".md")) continue;
13215
- const filePath = join19(dir, file);
13216
- const fileContent = await readFile10(filePath, "utf-8");
13856
+ const filePath = join20(dir, file);
13857
+ const fileContent = await readFile11(filePath, "utf-8");
13217
13858
  const { metadata, content } = parseFrontmatter3(fileContent);
13218
- const nameWithoutExt = basename3(file, extname3(file));
13859
+ const nameWithoutExt = basename3(file, extname4(file));
13219
13860
  const commandName = slugify(nameWithoutExt);
13220
13861
  if (!commandName) continue;
13221
13862
  commands.push({
@@ -13243,12 +13884,12 @@ var SlashCommandManager = class _SlashCommandManager {
13243
13884
  if (!dir) {
13244
13885
  throw new Error("threadPath required for project-scoped commands");
13245
13886
  }
13246
- if (!existsSync14(dir)) {
13887
+ if (!existsSync13(dir)) {
13247
13888
  await mkdir3(dir, { recursive: true });
13248
13889
  }
13249
13890
  const filename = `${name}.md`;
13250
- const filePath = join19(dir, filename);
13251
- if (existsSync14(filePath)) {
13891
+ const filePath = join20(dir, filename);
13892
+ if (existsSync13(filePath)) {
13252
13893
  throw new Error(`Command already exists: ${name}`);
13253
13894
  }
13254
13895
  let markdown = "";
@@ -13282,7 +13923,7 @@ var SlashCommandManager = class _SlashCommandManager {
13282
13923
  * Update an existing command
13283
13924
  */
13284
13925
  async updateCommand(filePath, content, metadata) {
13285
- if (!existsSync14(filePath)) {
13926
+ if (!existsSync13(filePath)) {
13286
13927
  throw new Error(`Command file not found: ${filePath}`);
13287
13928
  }
13288
13929
  let markdown = "";
@@ -13309,7 +13950,7 @@ var SlashCommandManager = class _SlashCommandManager {
13309
13950
  * Delete a command
13310
13951
  */
13311
13952
  async deleteCommand(filePath) {
13312
- if (!existsSync14(filePath)) {
13953
+ if (!existsSync13(filePath)) {
13313
13954
  throw new Error(`Command file not found: ${filePath}`);
13314
13955
  }
13315
13956
  await unlink2(filePath);
@@ -13655,7 +14296,7 @@ function createSlashCommandRoutes(router, threadManager) {
13655
14296
 
13656
14297
  // src/features/rules/rules-post.route.ts
13657
14298
  import { promises as fs2 } from "fs";
13658
- import path4 from "path";
14299
+ import path3 from "path";
13659
14300
  async function createRule(c, projectManager) {
13660
14301
  try {
13661
14302
  const body = await c.req.json();
@@ -13710,7 +14351,7 @@ async function createRule(c, projectManager) {
13710
14351
  );
13711
14352
  }
13712
14353
  const projectPath = project.path;
13713
- const rulesDir = path4.join(projectPath, ".agents", "rules");
14354
+ const rulesDir = path3.join(projectPath, ".agents", "rules");
13714
14355
  await fs2.mkdir(rulesDir, { recursive: true });
13715
14356
  const ruleContent = `---
13716
14357
  description: ${description.replace(/"/g, '\\"')}
@@ -13718,7 +14359,7 @@ alwaysApply: ${alwaysApply}
13718
14359
  ---
13719
14360
 
13720
14361
  Describe your rules here as markdown which will be used when Tarsk sees is prompted with a request that is related to the description of this rule.`;
13721
- const ruleFilePath = path4.join(rulesDir, `${name}.md`);
14362
+ const ruleFilePath = path3.join(rulesDir, `${name}.md`);
13722
14363
  await fs2.writeFile(ruleFilePath, ruleContent, "utf-8");
13723
14364
  return c.json(
13724
14365
  {
@@ -13842,7 +14483,7 @@ function createRuleRoutes(router, projectManager) {
13842
14483
  // src/features/threads/threads.manager.ts
13843
14484
  init_utils();
13844
14485
  import { randomUUID as randomUUID8 } from "crypto";
13845
- import { join as join20 } from "path";
14486
+ import { join as join21 } from "path";
13846
14487
  import { execSync as execSync3 } from "child_process";
13847
14488
  import { rm as rm4, stat as stat4, mkdir as mkdir4 } from "fs/promises";
13848
14489
  init_database();
@@ -14079,6 +14720,13 @@ var ThreadManagerImpl = class {
14079
14720
  const threads = await this.metadataManager.loadThreads();
14080
14721
  return threads.filter((t) => t.projectId === projectId);
14081
14722
  }
14723
+ /**
14724
+ * Lists all active threads across all projects in a single query
14725
+ * @returns Array of all active threads
14726
+ */
14727
+ async listAllThreads() {
14728
+ return this.metadataManager.loadThreads();
14729
+ }
14082
14730
  /**
14083
14731
  * Deletes a thread and removes its directory
14084
14732
  *
@@ -14172,9 +14820,12 @@ var ThreadManagerImpl = class {
14172
14820
  * @yields ThreadEvent progress events
14173
14821
  */
14174
14822
  async *runSetupScript(threadPath, commandLine) {
14175
- const child = spawnProcess(commandLine, [], {
14176
- shell: true,
14177
- cwd: threadPath
14823
+ const { shell, args: shellArgs } = getShellConfig();
14824
+ const child = spawnProcess(shell, [...shellArgs, commandLine], {
14825
+ cwd: threadPath,
14826
+ env: Object.fromEntries(
14827
+ Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
14828
+ )
14178
14829
  });
14179
14830
  const decoder = new TextDecoder();
14180
14831
  let resolveNext = () => {
@@ -14236,7 +14887,7 @@ var ThreadManagerImpl = class {
14236
14887
  * - 7.4 - THE CLI SHALL ensure each Thread clone is stored in a unique directory path
14237
14888
  */
14238
14889
  generateThreadPath(_projectId, threadId) {
14239
- return join20(this.rootFolder, threadId);
14890
+ return join21(this.rootFolder, threadId);
14240
14891
  }
14241
14892
  /**
14242
14893
  * Generates a thread title if not provided
@@ -14362,7 +15013,7 @@ async function handleCreateThread(c, threadManager, gitManager) {
14362
15013
  }
14363
15014
 
14364
15015
  // src/features/threads/threads-list.route.ts
14365
- function computeGitStatusDot(status) {
15016
+ function computeGitStatusDot2(status) {
14366
15017
  if (status.hasChanges) return true;
14367
15018
  if (status.commitsAheadOfDefault && !status.prExists) return true;
14368
15019
  if (status.status === "Behind" || status.status === "Diverged") return true;
@@ -14389,7 +15040,7 @@ async function handleListThreads(c, threadManager, db) {
14389
15040
  );
14390
15041
  const threadsWithStatus = threads.map((thread) => {
14391
15042
  const cached = statusCache.get(thread.id);
14392
- const gitStatusDot = cached ? computeGitStatusDot(cached) : false;
15043
+ const gitStatusDot = cached ? computeGitStatusDot2(cached) : false;
14393
15044
  return { ...thread, gitStatusDot };
14394
15045
  });
14395
15046
  return c.json(threadsWithStatus);
@@ -14429,26 +15080,63 @@ async function handleDeleteThread(c, threadManager) {
14429
15080
  }
14430
15081
 
14431
15082
  // src/core/project-inspector.ts
14432
- import { readFile as readFile11, readdir as readdir10, writeFile as writeFile3, mkdir as mkdir5 } from "fs/promises";
14433
- import { existsSync as existsSync15 } from "fs";
14434
- import { join as join21, basename as basename4, relative as relative5 } from "path";
15083
+ import { readFile as readFile12, readdir as readdir10 } from "fs/promises";
15084
+ import { existsSync as existsSync14 } from "fs";
15085
+ import { join as join22, basename as basename4, relative as relative6 } from "path";
14435
15086
  import { glob as glob2 } from "glob";
14436
- async function inspectProject(projectPath) {
15087
+
15088
+ // src/features/project-scripts/project-scripts.database.ts
15089
+ init_database();
15090
+ import { randomUUID as randomUUID9 } from "crypto";
15091
+ async function getScriptsByProject(db, projectId) {
15092
+ const result = await db.execute({
15093
+ sql: `SELECT id, projectId, workspace, name, command, friendlyName, updatedAt
15094
+ FROM project_scripts WHERE projectId = ? ORDER BY workspace ASC, name ASC`,
15095
+ args: [projectId]
15096
+ });
15097
+ return result.rows;
15098
+ }
15099
+ async function upsertProjectScripts(projectId, scripts) {
15100
+ const db = await getDatabase();
15101
+ const now = (/* @__PURE__ */ new Date()).toISOString();
15102
+ await db.execute({
15103
+ sql: `DELETE FROM project_scripts WHERE projectId = ?`,
15104
+ args: [projectId]
15105
+ });
15106
+ for (const script of scripts) {
15107
+ await db.execute({
15108
+ sql: `INSERT INTO project_scripts (id, projectId, workspace, name, command, friendlyName, updatedAt)
15109
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
15110
+ args: [
15111
+ randomUUID9(),
15112
+ projectId,
15113
+ script.workspace,
15114
+ script.name,
15115
+ script.command,
15116
+ script.friendlyName,
15117
+ now
15118
+ ]
15119
+ });
15120
+ }
15121
+ }
15122
+
15123
+ // src/core/project-inspector.ts
15124
+ async function inspectProject(projectPath, projectId) {
14437
15125
  const packageManager = await detectPackageManager2(projectPath);
14438
15126
  const repoType = await detectMonoRepoType(projectPath);
14439
15127
  const workspaces = await resolveWorkspaces(projectPath, repoType);
14440
15128
  const scripts = await collectScripts(projectPath, workspaces, packageManager, repoType);
14441
- await writeProjectScripts(projectPath, scripts);
15129
+ await upsertProjectScripts(projectId, scripts);
14442
15130
  return scripts;
14443
15131
  }
14444
15132
  async function detectPackageManager2(projectPath) {
14445
- if (existsSync15(join21(projectPath, "bun.lockb")) || existsSync15(join21(projectPath, "bun.lock"))) {
15133
+ if (existsSync14(join22(projectPath, "bun.lockb")) || existsSync14(join22(projectPath, "bun.lock"))) {
14446
15134
  return "bun";
14447
15135
  }
14448
- if (existsSync15(join21(projectPath, "pnpm-lock.yaml"))) {
15136
+ if (existsSync14(join22(projectPath, "pnpm-lock.yaml"))) {
14449
15137
  return "pnpm";
14450
15138
  }
14451
- if (existsSync15(join21(projectPath, "yarn.lock"))) {
15139
+ if (existsSync14(join22(projectPath, "yarn.lock"))) {
14452
15140
  return "yarn";
14453
15141
  }
14454
15142
  try {
@@ -14467,8 +15155,8 @@ async function detectPackageManager2(projectPath) {
14467
15155
  return "npm";
14468
15156
  }
14469
15157
  async function detectMonoRepoType(projectPath) {
14470
- const hasPnpmWorkspace = existsSync15(join21(projectPath, "pnpm-workspace.yaml"));
14471
- if (existsSync15(join21(projectPath, "nx.json"))) {
15158
+ const hasPnpmWorkspace = existsSync14(join22(projectPath, "pnpm-workspace.yaml"));
15159
+ if (existsSync14(join22(projectPath, "nx.json"))) {
14472
15160
  return "nx";
14473
15161
  }
14474
15162
  const pkg = await readPackageJson(projectPath);
@@ -14482,10 +15170,10 @@ async function detectMonoRepoType(projectPath) {
14482
15170
  if (hasPnpmWorkspace) {
14483
15171
  return "pnpm";
14484
15172
  }
14485
- if (existsSync15(join21(projectPath, "lerna.json"))) {
15173
+ if (existsSync14(join22(projectPath, "lerna.json"))) {
14486
15174
  return "lerna";
14487
15175
  }
14488
- if (existsSync15(join21(projectPath, "turbo.json"))) {
15176
+ if (existsSync14(join22(projectPath, "turbo.json"))) {
14489
15177
  return "turbo";
14490
15178
  }
14491
15179
  const folderWorkspaces = await detectFolderBasedWorkspaces(projectPath);
@@ -14526,8 +15214,8 @@ async function resolvePackageJsonWorkspaces(projectPath) {
14526
15214
  });
14527
15215
  const workspaces = [];
14528
15216
  for (const folder of folders) {
14529
- const absPath = join21(projectPath, folder);
14530
- if (existsSync15(join21(absPath, "package.json"))) {
15217
+ const absPath = join22(projectPath, folder);
15218
+ if (existsSync14(join22(absPath, "package.json"))) {
14531
15219
  const wsPkg = await readPackageJson(absPath);
14532
15220
  workspaces.push({
14533
15221
  name: wsPkg?.name ?? basename4(folder),
@@ -14543,12 +15231,12 @@ async function resolvePackageJsonWorkspaces(projectPath) {
14543
15231
  return workspaces;
14544
15232
  }
14545
15233
  async function resolvePnpmWorkspaces(projectPath) {
14546
- const yamlPath = join21(projectPath, "pnpm-workspace.yaml");
14547
- if (!existsSync15(yamlPath)) {
15234
+ const yamlPath = join22(projectPath, "pnpm-workspace.yaml");
15235
+ if (!existsSync14(yamlPath)) {
14548
15236
  return [{ name: "root", folder: projectPath, relativePath: "." }];
14549
15237
  }
14550
15238
  try {
14551
- const yaml = await readFile11(yamlPath, "utf-8");
15239
+ const yaml = await readFile12(yamlPath, "utf-8");
14552
15240
  const patterns = [];
14553
15241
  for (const line of yaml.split("\n")) {
14554
15242
  const trimmed = line.trim();
@@ -14569,8 +15257,8 @@ async function resolvePnpmWorkspaces(projectPath) {
14569
15257
  });
14570
15258
  const workspaces = [];
14571
15259
  for (const folder of folders) {
14572
- const absPath = join21(projectPath, folder);
14573
- if (existsSync15(join21(absPath, "package.json"))) {
15260
+ const absPath = join22(projectPath, folder);
15261
+ if (existsSync14(join22(absPath, "package.json"))) {
14574
15262
  const wsPkg = await readPackageJson(absPath);
14575
15263
  workspaces.push({
14576
15264
  name: wsPkg?.name ?? basename4(folder),
@@ -14589,12 +15277,12 @@ async function resolvePnpmWorkspaces(projectPath) {
14589
15277
  }
14590
15278
  }
14591
15279
  async function resolveLernaWorkspaces(projectPath) {
14592
- const lernaPath = join21(projectPath, "lerna.json");
14593
- if (!existsSync15(lernaPath)) {
15280
+ const lernaPath = join22(projectPath, "lerna.json");
15281
+ if (!existsSync14(lernaPath)) {
14594
15282
  return [{ name: "root", folder: projectPath, relativePath: "." }];
14595
15283
  }
14596
15284
  try {
14597
- const content = await readFile11(lernaPath, "utf-8");
15285
+ const content = await readFile12(lernaPath, "utf-8");
14598
15286
  const lerna = JSON.parse(content);
14599
15287
  const patterns = lerna.packages ?? ["packages/*"];
14600
15288
  const folders = await glob2(patterns, {
@@ -14603,8 +15291,8 @@ async function resolveLernaWorkspaces(projectPath) {
14603
15291
  });
14604
15292
  const workspaces = [];
14605
15293
  for (const folder of folders) {
14606
- const absPath = join21(projectPath, folder);
14607
- if (existsSync15(join21(absPath, "package.json"))) {
15294
+ const absPath = join22(projectPath, folder);
15295
+ if (existsSync14(join22(absPath, "package.json"))) {
14608
15296
  const wsPkg = await readPackageJson(absPath);
14609
15297
  workspaces.push({
14610
15298
  name: wsPkg?.name ?? basename4(folder),
@@ -14624,17 +15312,17 @@ async function resolveLernaWorkspaces(projectPath) {
14624
15312
  }
14625
15313
  async function resolveNxWorkspaces(projectPath) {
14626
15314
  const workspaces = [];
14627
- const workspaceJsonPath = join21(projectPath, "workspace.json");
14628
- if (existsSync15(workspaceJsonPath)) {
15315
+ const workspaceJsonPath = join22(projectPath, "workspace.json");
15316
+ if (existsSync14(workspaceJsonPath)) {
14629
15317
  try {
14630
- const content = await readFile11(workspaceJsonPath, "utf-8");
15318
+ const content = await readFile12(workspaceJsonPath, "utf-8");
14631
15319
  const wsJson = JSON.parse(content);
14632
15320
  for (const [name, value] of Object.entries(wsJson.projects ?? {})) {
14633
15321
  const folder = typeof value === "string" ? value : value.root;
14634
15322
  if (folder) {
14635
15323
  workspaces.push({
14636
15324
  name,
14637
- folder: join21(projectPath, folder),
15325
+ folder: join22(projectPath, folder),
14638
15326
  relativePath: folder
14639
15327
  });
14640
15328
  }
@@ -14649,13 +15337,13 @@ async function resolveNxWorkspaces(projectPath) {
14649
15337
  });
14650
15338
  for (const file of projectJsonFiles) {
14651
15339
  try {
14652
- const content = await readFile11(join21(projectPath, file), "utf-8");
15340
+ const content = await readFile12(join22(projectPath, file), "utf-8");
14653
15341
  const project = JSON.parse(content);
14654
15342
  if (project.name) {
14655
15343
  const folder = file.replace(/\/project\.json$/, "");
14656
15344
  workspaces.push({
14657
15345
  name: project.name,
14658
- folder: join21(projectPath, folder),
15346
+ folder: join22(projectPath, folder),
14659
15347
  relativePath: folder
14660
15348
  });
14661
15349
  }
@@ -14663,15 +15351,15 @@ async function resolveNxWorkspaces(projectPath) {
14663
15351
  }
14664
15352
  }
14665
15353
  if (workspaces.length > 0) return workspaces;
14666
- const appsDir = join21(projectPath, "apps");
14667
- if (existsSync15(appsDir)) {
15354
+ const appsDir = join22(projectPath, "apps");
15355
+ if (existsSync14(appsDir)) {
14668
15356
  const entries = await readdir10(appsDir, { withFileTypes: true });
14669
15357
  for (const entry of entries) {
14670
15358
  if (entry.isDirectory() && !entry.name.startsWith(".")) {
14671
- const folder = join21("apps", entry.name);
15359
+ const folder = join22("apps", entry.name);
14672
15360
  workspaces.push({
14673
15361
  name: entry.name,
14674
- folder: join21(projectPath, folder),
15362
+ folder: join22(projectPath, folder),
14675
15363
  relativePath: folder
14676
15364
  });
14677
15365
  }
@@ -14706,13 +15394,13 @@ async function detectFolderBasedWorkspaces(projectPath) {
14706
15394
  const results = [];
14707
15395
  for (const entry of entries) {
14708
15396
  if (entry.isDirectory() && !entry.name.startsWith(".") && entry.name !== "node_modules" && entry.name !== "dist" && entry.name !== "build") {
14709
- const pkgPath = join21(projectPath, entry.name, "package.json");
14710
- if (existsSync15(pkgPath)) {
15397
+ const pkgPath = join22(projectPath, entry.name, "package.json");
15398
+ if (existsSync14(pkgPath)) {
14711
15399
  try {
14712
- const content = await readFile11(pkgPath, "utf-8");
15400
+ const content = await readFile12(pkgPath, "utf-8");
14713
15401
  const pkg = JSON.parse(content);
14714
15402
  if (pkg.dependencies || pkg.devDependencies || pkg.scripts) {
14715
- results.push({ name: entry.name, absPath: join21(projectPath, entry.name) });
15403
+ results.push({ name: entry.name, absPath: join22(projectPath, entry.name) });
14716
15404
  }
14717
15405
  } catch {
14718
15406
  }
@@ -14750,7 +15438,7 @@ function buildRunCommand(scriptName, workspace, packageManager, repoType, projec
14750
15438
  return `npm run ${scriptName} --workspace=${workspace.relativePath}`;
14751
15439
  }
14752
15440
  if (!isRoot) {
14753
- const relPath = relative5(projectPath, workspace.folder);
15441
+ const relPath = relative6(projectPath, workspace.folder);
14754
15442
  return `cd ${relPath} && ${runCmd}`;
14755
15443
  }
14756
15444
  return runCmd;
@@ -14768,7 +15456,7 @@ function pmRunCommand(packageManager, scriptName) {
14768
15456
  }
14769
15457
  async function readPackageJson(dir) {
14770
15458
  try {
14771
- const content = await readFile11(join21(dir, "package.json"), "utf-8");
15459
+ const content = await readFile12(join22(dir, "package.json"), "utf-8");
14772
15460
  return JSON.parse(content);
14773
15461
  } catch {
14774
15462
  return null;
@@ -14787,17 +15475,6 @@ function getWorkspaceGlobs(pkg) {
14787
15475
  function toFriendlyName(scriptName) {
14788
15476
  return scriptName.replace(/[-:_]/g, " ").replace(/\b\w/g, (char) => char.toUpperCase());
14789
15477
  }
14790
- async function writeProjectScripts(projectPath, scripts) {
14791
- const agentsDir = join21(projectPath, ".agents");
14792
- if (!existsSync15(agentsDir)) {
14793
- await mkdir5(agentsDir, { recursive: true });
14794
- }
14795
- await writeFile3(
14796
- join21(agentsDir, "project-scripts.json"),
14797
- JSON.stringify(scripts, null, 2),
14798
- "utf-8"
14799
- );
14800
- }
14801
15478
 
14802
15479
  // src/features/threads/threads-select.route.ts
14803
15480
  async function handleSelectThread(c, threadManager) {
@@ -14811,7 +15488,7 @@ async function handleSelectThread(c, threadManager) {
14811
15488
  return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
14812
15489
  }
14813
15490
  await threadManager.selectThread(threadId);
14814
- inspectProject(thread.path).catch((err) => {
15491
+ inspectProject(thread.path, thread.projectId).catch((err) => {
14815
15492
  console.error(`Failed to inspect project scripts for thread ${threadId}:`, err);
14816
15493
  });
14817
15494
  return successResponse(c, { success: true, message: "Thread selected successfully", threadId });
@@ -14982,8 +15659,8 @@ async function handleListThreadFiles(c, threadManager) {
14982
15659
  }
14983
15660
 
14984
15661
  // src/features/threads/threads-explorer.route.ts
14985
- import { readdir as readdir11, readFile as readFile12, rename as rename2, writeFile as writeFile4, mkdir as mkdir6, rm as rm5, access as access4 } from "fs/promises";
14986
- import { join as join22 } from "path";
15662
+ import { readdir as readdir11, readFile as readFile13, rename as rename2, writeFile as writeFile3, mkdir as mkdir5, rm as rm5, access as access4 } from "fs/promises";
15663
+ import { join as join23 } from "path";
14987
15664
  import { spawn as spawn2 } from "child_process";
14988
15665
  var Utils3 = null;
14989
15666
  var utilsLoaded3 = false;
@@ -15043,7 +15720,7 @@ async function handleListThreadExplorerDir(c, threadManager) {
15043
15720
  const entries = dirents.filter((d) => !(isRoot && d.name === ".git")).map((d) => ({
15044
15721
  name: d.name,
15045
15722
  type: d.isDirectory() ? "folder" : "file",
15046
- path: queryPath ? join22(queryPath, d.name) : d.name
15723
+ path: queryPath ? join23(queryPath, d.name) : d.name
15047
15724
  })).sort((a, b) => {
15048
15725
  if (a.type !== b.type) return a.type === "folder" ? -1 : 1;
15049
15726
  return a.name.localeCompare(b.name);
@@ -15179,8 +15856,8 @@ async function handleCreateExplorerFile(c, threadManager) {
15179
15856
  }
15180
15857
  absParent = validated;
15181
15858
  }
15182
- await mkdir6(absParent, { recursive: true });
15183
- await writeFile4(join22(absParent, name), "", "utf-8");
15859
+ await mkdir5(absParent, { recursive: true });
15860
+ await writeFile3(join23(absParent, name), "", "utf-8");
15184
15861
  const relPath = dirPath ? `${dirPath}/${name}` : name;
15185
15862
  return c.json({ success: true, path: relPath });
15186
15863
  } catch (error) {
@@ -15234,7 +15911,7 @@ async function handleGetExplorerMedia(c, threadManager) {
15234
15911
  }
15235
15912
  const ext = filePath.split(".").pop()?.toLowerCase() ?? "";
15236
15913
  const contentType = MEDIA_MIME_TYPES[ext] ?? "application/octet-stream";
15237
- const data = await readFile12(absPath);
15914
+ const data = await readFile13(absPath);
15238
15915
  return new Response(data, { headers: { "Content-Type": contentType } });
15239
15916
  } catch (error) {
15240
15917
  return errorResponse(
@@ -15276,7 +15953,7 @@ async function handleCreateExplorerFolder(c, threadManager) {
15276
15953
  }
15277
15954
  absParent = validated;
15278
15955
  }
15279
- await mkdir6(join22(absParent, name), { recursive: true });
15956
+ await mkdir5(join23(absParent, name), { recursive: true });
15280
15957
  const relPath = dirPath ? `${dirPath}/${name}` : name;
15281
15958
  return c.json({ success: true, path: relPath });
15282
15959
  } catch (error) {
@@ -15386,15 +16063,15 @@ async function handleOpenThread(c, threadManager) {
15386
16063
  }
15387
16064
 
15388
16065
  // src/features/threads/threads-conversation-folder-path.route.ts
15389
- import { join as join23 } from "path";
16066
+ import { join as join24 } from "path";
15390
16067
  async function handleGetConversationFolderPath(c) {
15391
16068
  try {
15392
16069
  const threadId = c.req.param("id");
15393
16070
  if (!threadId) {
15394
16071
  return errorResponse(c, ErrorCodes.INVALID_REQUEST, "Thread ID is required", 400);
15395
16072
  }
15396
- const path7 = join23(getDataDir(), threadId);
15397
- return successResponse(c, { path: path7 });
16073
+ const path6 = join24(getDataDir(), threadId);
16074
+ return successResponse(c, { path: path6 });
15398
16075
  } catch (error) {
15399
16076
  const errorMessage = error instanceof Error ? error.message : String(error);
15400
16077
  return errorResponse(
@@ -15447,13 +16124,13 @@ async function handleFixComments(c, threadManager) {
15447
16124
  }
15448
16125
 
15449
16126
  // src/features/threads/threads-ai-files.route.ts
15450
- import { readFile as readFile13, writeFile as writeFile6, mkdir as mkdir8, access as access5, rm as rm6 } from "fs/promises";
15451
- import { join as join25 } from "path";
15452
- import { existsSync as existsSync16 } from "fs";
16127
+ import { readFile as readFile14, writeFile as writeFile5, mkdir as mkdir7, access as access5, rm as rm6 } from "fs/promises";
16128
+ import { join as join26 } from "path";
16129
+ import { existsSync as existsSync15 } from "fs";
15453
16130
 
15454
16131
  // src/features/git/git-download-folder.ts
15455
- import { mkdir as mkdir7, writeFile as writeFile5 } from "fs/promises";
15456
- import { join as join24, dirname as dirname4 } from "path";
16132
+ import { mkdir as mkdir6, writeFile as writeFile4 } from "fs/promises";
16133
+ import { join as join25, dirname as dirname4 } from "path";
15457
16134
  async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
15458
16135
  const { token, ref } = options;
15459
16136
  const match = repoUrl.replace(/\.git$/, "").match(/github\.com\/([^/]+)\/([^/]+)/);
@@ -15489,8 +16166,8 @@ async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
15489
16166
  await Promise.all(
15490
16167
  files.map(async (file) => {
15491
16168
  const relativePath = file.path.slice(prefix.length);
15492
- const localPath = join24(destPath, relativePath);
15493
- await mkdir7(dirname4(localPath), { recursive: true });
16169
+ const localPath = join25(destPath, relativePath);
16170
+ await mkdir6(dirname4(localPath), { recursive: true });
15494
16171
  const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${refParam}/${file.path}`;
15495
16172
  const fileRes = await fetch(rawUrl, { headers });
15496
16173
  if (!fileRes.ok) {
@@ -15498,7 +16175,7 @@ async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
15498
16175
  }
15499
16176
  let content = Buffer.from(await fileRes.arrayBuffer()).toString("utf8");
15500
16177
  content = content.replace(/Claude/gi, "Tarsk");
15501
- await writeFile5(localPath, content, "utf8");
16178
+ await writeFile4(localPath, content, "utf8");
15502
16179
  console.log(` \u2713 ${relativePath}`);
15503
16180
  })
15504
16181
  );
@@ -15583,7 +16260,7 @@ Links to important documentation, tools, or references.
15583
16260
  } catch {
15584
16261
  return c.json({ error: { code: "NOT_FOUND", message: `File not found: ${filePath}` } }, 404);
15585
16262
  }
15586
- const content = await readFile13(absPath, "utf-8");
16263
+ const content = await readFile14(absPath, "utf-8");
15587
16264
  return c.json({ content, path: filePath });
15588
16265
  } catch (error) {
15589
16266
  return errorResponse(
@@ -15619,9 +16296,9 @@ async function handleSaveThreadAIFile(c, threadManager) {
15619
16296
  if (!absPath) {
15620
16297
  return c.json({ error: { code: "BAD_REQUEST", message: "Invalid file path" } }, 400);
15621
16298
  }
15622
- const parentDir = join25(absPath, "..");
15623
- await mkdir8(parentDir, { recursive: true });
15624
- await writeFile6(absPath, content, "utf-8");
16299
+ const parentDir = join26(absPath, "..");
16300
+ await mkdir7(parentDir, { recursive: true });
16301
+ await writeFile5(absPath, content, "utf-8");
15625
16302
  return c.json({ success: true, path: filePath });
15626
16303
  } catch (error) {
15627
16304
  return errorResponse(
@@ -15702,17 +16379,17 @@ async function handleCreateThreadAgent(c, threadManager) {
15702
16379
  if (!thread) {
15703
16380
  return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
15704
16381
  }
15705
- const agentRelPath = join25(".agents", "agents", name);
15706
- const agentAbsPath = join25(thread.path, agentRelPath);
15707
- const agentFileRelPath = join25(agentRelPath, "AGENT.md");
15708
- const agentFileAbsPath = join25(agentAbsPath, "AGENT.md");
15709
- if (existsSync16(agentAbsPath)) {
16382
+ const agentRelPath = join26(".agents", "agents", name);
16383
+ const agentAbsPath = join26(thread.path, agentRelPath);
16384
+ const agentFileRelPath = join26(agentRelPath, "AGENT.md");
16385
+ const agentFileAbsPath = join26(agentAbsPath, "AGENT.md");
16386
+ if (existsSync15(agentAbsPath)) {
15710
16387
  return c.json(
15711
16388
  { error: { code: "CONFLICT", message: `Agent '${name}' already exists` } },
15712
16389
  409
15713
16390
  );
15714
16391
  }
15715
- await mkdir8(agentAbsPath, { recursive: true });
16392
+ await mkdir7(agentAbsPath, { recursive: true });
15716
16393
  const toolsLine = tools ? `
15717
16394
  tools: ${tools}` : "";
15718
16395
  const agentContent = `---
@@ -15722,7 +16399,7 @@ description: ${description}${toolsLine}
15722
16399
 
15723
16400
  Place your agent system prompt here as markdown. This will be used as the system prompt when this agent is invoked as a subagent.
15724
16401
  `;
15725
- await writeFile6(agentFileAbsPath, agentContent, "utf-8");
16402
+ await writeFile5(agentFileAbsPath, agentContent, "utf-8");
15726
16403
  return c.json({
15727
16404
  success: true,
15728
16405
  path: agentFileRelPath,
@@ -15769,17 +16446,17 @@ async function handleCreateThreadSkill(c, threadManager) {
15769
16446
  if (!thread) {
15770
16447
  return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
15771
16448
  }
15772
- const skillRelPath = join25(".agents", "skills", name);
15773
- const skillAbsPath = join25(thread.path, skillRelPath);
15774
- const skillFileRelPath = join25(skillRelPath, "SKILL.md");
15775
- const skillFileAbsPath = join25(skillAbsPath, "SKILL.md");
15776
- if (existsSync16(skillAbsPath)) {
16449
+ const skillRelPath = join26(".agents", "skills", name);
16450
+ const skillAbsPath = join26(thread.path, skillRelPath);
16451
+ const skillFileRelPath = join26(skillRelPath, "SKILL.md");
16452
+ const skillFileAbsPath = join26(skillAbsPath, "SKILL.md");
16453
+ if (existsSync15(skillAbsPath)) {
15777
16454
  return c.json(
15778
16455
  { error: { code: "CONFLICT", message: `Skill '${name}' already exists` } },
15779
16456
  409
15780
16457
  );
15781
16458
  }
15782
- await mkdir8(skillAbsPath, { recursive: true });
16459
+ await mkdir7(skillAbsPath, { recursive: true });
15783
16460
  const skillContent = `---
15784
16461
  name: ${name}
15785
16462
  description: ${description}
@@ -15787,7 +16464,7 @@ description: ${description}
15787
16464
 
15788
16465
  Place your skill instructions here as markdown. This will be used when Tarsk is prompted with a request related to the description of this skill.
15789
16466
  `;
15790
- await writeFile6(skillFileAbsPath, skillContent, "utf-8");
16467
+ await writeFile5(skillFileAbsPath, skillContent, "utf-8");
15791
16468
  return c.json({
15792
16469
  success: true,
15793
16470
  path: skillFileRelPath,
@@ -15846,9 +16523,7 @@ async function handleInstallGithubFolder(c, threadManager) {
15846
16523
  }
15847
16524
 
15848
16525
  // src/features/threads/threads-project-scripts.route.ts
15849
- import { readFile as readFile14 } from "fs/promises";
15850
- import { join as join26 } from "path";
15851
- import { existsSync as existsSync17 } from "fs";
16526
+ init_database();
15852
16527
 
15853
16528
  // src/features/projects/script-process-manager.ts
15854
16529
  init_utils();
@@ -15866,9 +16541,13 @@ var ScriptProcessManager = class {
15866
16541
  if (this.processes.has(key)) {
15867
16542
  this.stopScript(key);
15868
16543
  }
15869
- const child = spawnShell(command, {
16544
+ const { shell, args: shellArgs } = getShellConfig();
16545
+ const child = spawnProcess(shell, [...shellArgs, command], {
15870
16546
  cwd,
15871
16547
  detached: true,
16548
+ env: Object.fromEntries(
16549
+ Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
16550
+ ),
15872
16551
  stdio: ["pipe", "pipe", "pipe"]
15873
16552
  });
15874
16553
  if (child.pid !== void 0) {
@@ -15955,12 +16634,8 @@ async function handleGetProjectScripts(c, threadManager) {
15955
16634
  if (!thread) {
15956
16635
  return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
15957
16636
  }
15958
- const scriptsPath = join26(thread.path, ".agents", "project-scripts.json");
15959
- if (!existsSync17(scriptsPath)) {
15960
- return successResponse(c, { scripts: [] });
15961
- }
15962
- const content = await readFile14(scriptsPath, "utf-8");
15963
- const scripts = JSON.parse(content);
16637
+ const db = await getDatabase();
16638
+ const scripts = await getScriptsByProject(db, thread.projectId);
15964
16639
  const enrichedScripts = scripts.map((s) => ({
15965
16640
  ...s,
15966
16641
  isRunning: scriptProcessManager.isRunning(`${thread.projectId}:${s.name}`)
@@ -16125,7 +16800,7 @@ function createThreadRoutes(threadManager, gitManager, conversationManager) {
16125
16800
 
16126
16801
  // src/features/git/git.manager.ts
16127
16802
  init_utils();
16128
- import { mkdir as mkdir9 } from "fs/promises";
16803
+ import { mkdir as mkdir8 } from "fs/promises";
16129
16804
  import { dirname as dirname5 } from "path";
16130
16805
  var GitManagerImpl = class {
16131
16806
  /**
@@ -16197,7 +16872,7 @@ var GitManagerImpl = class {
16197
16872
  }
16198
16873
  try {
16199
16874
  const parentDir = dirname5(targetPath);
16200
- await mkdir9(parentDir, { recursive: true });
16875
+ await mkdir8(parentDir, { recursive: true });
16201
16876
  const gitProcess = spawnProcess("git", ["clone", "--progress", gitUrl, targetPath]);
16202
16877
  const eventQueue = [];
16203
16878
  let processExited = false;
@@ -16403,7 +17078,7 @@ var GitManagerImpl = class {
16403
17078
  }
16404
17079
  async initRepository(repoPath) {
16405
17080
  try {
16406
- await mkdir9(repoPath, { recursive: true });
17081
+ await mkdir8(repoPath, { recursive: true });
16407
17082
  } catch (error) {
16408
17083
  if (error.code !== "EEXIST") {
16409
17084
  throw error;
@@ -16450,11 +17125,11 @@ async function gitUserHandler(c) {
16450
17125
  }
16451
17126
 
16452
17127
  // src/features/git/git-status.route.ts
16453
- import { existsSync as existsSync19 } from "fs";
17128
+ import { existsSync as existsSync17 } from "fs";
16454
17129
 
16455
17130
  // src/features/git/git.utils.ts
16456
17131
  init_utils();
16457
- import { existsSync as existsSync18, readFileSync as readFileSync4, statSync as statSync4 } from "fs";
17132
+ import { existsSync as existsSync16, readFileSync as readFileSync5, statSync as statSync4 } from "fs";
16458
17133
  import { isAbsolute as isAbsolute3, normalize as normalize2, resolve as resolve3, join as join27 } from "path";
16459
17134
  import {
16460
17135
  completeSimple,
@@ -16819,10 +17494,10 @@ async function getUntrackedFilesDiff(gitRoot) {
16819
17494
  const maxFileSize = 1e5;
16820
17495
  for (const relPath of untrackedPaths) {
16821
17496
  const fullPath = join27(gitRoot, relPath);
16822
- if (!existsSync18(fullPath)) continue;
17497
+ if (!existsSync16(fullPath)) continue;
16823
17498
  try {
16824
17499
  if (statSync4(fullPath).isDirectory()) continue;
16825
- const content = readFileSync4(fullPath, "utf-8");
17500
+ const content = readFileSync5(fullPath, "utf-8");
16826
17501
  const safeContent = content.length > maxFileSize ? content.slice(0, maxFileSize) + "\n...(truncated)" : content;
16827
17502
  const linesForDiff = safeContent.split(/\r?\n/).map((l) => `+${l}`).join("\n");
16828
17503
  parts.push(
@@ -17358,7 +18033,7 @@ async function gitStatusHandler(c, metadataManager) {
17358
18033
  return c.json({ error: "Thread path not found" }, 404);
17359
18034
  }
17360
18035
  const absolutePath = resolveThreadPath(repoPath);
17361
- if (!existsSync19(absolutePath)) {
18036
+ if (!existsSync17(absolutePath)) {
17362
18037
  return c.json(
17363
18038
  {
17364
18039
  error: `Thread repo path does not exist: ${absolutePath}. Check that the project folder is present.`
@@ -17402,7 +18077,7 @@ async function gitStatusHandler(c, metadataManager) {
17402
18077
 
17403
18078
  // src/features/git/git-diff.route.ts
17404
18079
  init_utils();
17405
- import { readFileSync as readFileSync5 } from "fs";
18080
+ import { readFileSync as readFileSync6 } from "fs";
17406
18081
  import { resolve as resolve4 } from "path";
17407
18082
  async function gitDiffHandler(c, metadataManager) {
17408
18083
  try {
@@ -17465,7 +18140,7 @@ async function gitDiffHandler(c, metadataManager) {
17465
18140
  oldContent = content;
17466
18141
  } else if (file.status === "added" && file.path.startsWith("(new)")) {
17467
18142
  try {
17468
- newContent = readFileSync5(resolve4(gitRoot, file.path.replace("(new) ", "")), "utf-8");
18143
+ newContent = readFileSync6(resolve4(gitRoot, file.path.replace("(new) ", "")), "utf-8");
17469
18144
  } catch {
17470
18145
  newContent = "";
17471
18146
  }
@@ -17498,7 +18173,7 @@ async function gitDiffHandler(c, metadataManager) {
17498
18173
  });
17499
18174
  oldContent = oldContentOutput;
17500
18175
  try {
17501
- newContent = readFileSync5(resolve4(gitRoot, file.path), "utf-8");
18176
+ newContent = readFileSync6(resolve4(gitRoot, file.path), "utf-8");
17502
18177
  } catch {
17503
18178
  newContent = "";
17504
18179
  }
@@ -17562,7 +18237,7 @@ async function gitDiffHandler(c, metadataManager) {
17562
18237
  }
17563
18238
 
17564
18239
  // src/features/git/git-generate-commit-message.route.ts
17565
- import { existsSync as existsSync20 } from "fs";
18240
+ import { existsSync as existsSync18 } from "fs";
17566
18241
  async function gitGenerateCommitMessageHandler(c, metadataManager) {
17567
18242
  try {
17568
18243
  const threadId = c.req.param("threadId");
@@ -17588,7 +18263,7 @@ async function gitGenerateCommitMessageHandler(c, metadataManager) {
17588
18263
  const absolutePath = resolveThreadPath(repoPath);
17589
18264
  process.stdout.write(`[generate-commit-message] resolved path: ${absolutePath}
17590
18265
  `);
17591
- if (!existsSync20(absolutePath)) {
18266
+ if (!existsSync18(absolutePath)) {
17592
18267
  process.stdout.write(`[generate-commit-message] path does not exist: ${absolutePath}
17593
18268
  `);
17594
18269
  return c.json(
@@ -18068,7 +18743,7 @@ async function gitGithubStatusHandler(c, metadataManager) {
18068
18743
  }
18069
18744
 
18070
18745
  // src/features/git/git-unified-status.route.ts
18071
- import { existsSync as existsSync21 } from "fs";
18746
+ import { existsSync as existsSync19 } from "fs";
18072
18747
  async function gitUnifiedStatusHandler(c, metadataManager, db) {
18073
18748
  try {
18074
18749
  const threadId = c.req.param("threadId");
@@ -18088,7 +18763,7 @@ async function gitUnifiedStatusHandler(c, metadataManager, db) {
18088
18763
  return c.json({ error: "Thread path not found" }, 404);
18089
18764
  }
18090
18765
  const absolutePath = resolveThreadPath(repoPath);
18091
- if (!existsSync21(absolutePath)) {
18766
+ if (!existsSync19(absolutePath)) {
18092
18767
  return c.json(
18093
18768
  {
18094
18769
  error: `Thread repo path does not exist: ${absolutePath}. Check that the project folder is present.`
@@ -18434,7 +19109,7 @@ async function gitCreateBranchHandler(c, metadataManager) {
18434
19109
  }
18435
19110
 
18436
19111
  // src/features/git/git-sync-branch.route.ts
18437
- import { existsSync as existsSync22 } from "fs";
19112
+ import { existsSync as existsSync20 } from "fs";
18438
19113
  async function gitSyncBranchHandler(c, metadataManager) {
18439
19114
  try {
18440
19115
  const threadId = c.req.param("threadId");
@@ -18451,7 +19126,7 @@ async function gitSyncBranchHandler(c, metadataManager) {
18451
19126
  }
18452
19127
  const absolutePath = resolveThreadPath(repoPath);
18453
19128
  console.log(`[sync-branch] Resolved path: ${absolutePath}`);
18454
- if (!existsSync22(absolutePath)) {
19129
+ if (!existsSync20(absolutePath)) {
18455
19130
  console.log(`[sync-branch] Path does not exist: ${absolutePath}`);
18456
19131
  return c.json(
18457
19132
  {
@@ -18858,6 +19533,168 @@ async function gitRevertFileHandler(c, metadataManager) {
18858
19533
  }
18859
19534
  }
18860
19535
 
19536
+ // src/features/git/git-checkpoint.route.ts
19537
+ init_utils();
19538
+ function stashWithLabel(gitRoot, label) {
19539
+ return new Promise((resolve6, reject) => {
19540
+ const proc = spawnProcess("git", ["stash", "push", "--include-untracked", "-m", label], {
19541
+ cwd: gitRoot
19542
+ });
19543
+ let out = "";
19544
+ let err = "";
19545
+ if (proc.stdout) {
19546
+ proc.stdout.on("data", (d) => {
19547
+ out += d.toString();
19548
+ });
19549
+ }
19550
+ if (proc.stderr) {
19551
+ proc.stderr.on("data", (d) => {
19552
+ err += d.toString();
19553
+ });
19554
+ }
19555
+ proc.on("close", (code) => {
19556
+ if (code === 0) {
19557
+ resolve6(!out.includes("No local changes to save"));
19558
+ } else {
19559
+ reject(new Error(err || "Failed to create checkpoint"));
19560
+ }
19561
+ });
19562
+ proc.on("error", reject);
19563
+ });
19564
+ }
19565
+ async function gitCheckpointHandler(c, metadataManager) {
19566
+ try {
19567
+ const threadId = c.req.param("threadId");
19568
+ const body = await c.req.json();
19569
+ const { messageId } = body;
19570
+ if (!messageId) {
19571
+ return c.json({ error: "messageId is required" }, 400);
19572
+ }
19573
+ const threads = await metadataManager.loadThreads();
19574
+ const thread = threads.find((t) => t.id === threadId);
19575
+ if (!thread) {
19576
+ return c.json({ error: "Thread not found" }, 404);
19577
+ }
19578
+ const repoPath = thread.path;
19579
+ if (!repoPath) {
19580
+ return c.json({ success: false, reason: "no_path" });
19581
+ }
19582
+ const absolutePath = resolveThreadPath(repoPath);
19583
+ let gitRoot;
19584
+ try {
19585
+ gitRoot = await getGitRoot(absolutePath);
19586
+ } catch {
19587
+ return c.json({ success: false, reason: "not_a_git_repo" });
19588
+ }
19589
+ const label = `tarsk-checkpoint:${messageId}`;
19590
+ const stashed = await stashWithLabel(gitRoot, label);
19591
+ return c.json({ success: true, stashed, checkpointRef: label });
19592
+ } catch (error) {
19593
+ const message = error instanceof Error ? error.message : "Failed to create checkpoint";
19594
+ return c.json({ error: message }, 500);
19595
+ }
19596
+ }
19597
+
19598
+ // src/features/git/git-checkpoint-restore.route.ts
19599
+ init_utils();
19600
+ function listStashes(gitRoot) {
19601
+ return new Promise((resolve6, reject) => {
19602
+ const proc = spawnProcess("git", ["stash", "list", "--format=%gd %s"], { cwd: gitRoot });
19603
+ let out = "";
19604
+ let err = "";
19605
+ if (proc.stdout) {
19606
+ proc.stdout.on("data", (d) => {
19607
+ out += d.toString();
19608
+ });
19609
+ }
19610
+ if (proc.stderr) {
19611
+ proc.stderr.on("data", (d) => {
19612
+ err += d.toString();
19613
+ });
19614
+ }
19615
+ proc.on("close", (code) => {
19616
+ if (code !== 0) {
19617
+ reject(new Error(err || "Failed to list stashes"));
19618
+ return;
19619
+ }
19620
+ const entries = out.trim().split("\n").filter((l) => l.length > 0).map((line) => {
19621
+ const match = line.match(/^stash@\{(\d+)\}\s+(?:On \S+:|WIP on \S+:)?\s*(.*)$/);
19622
+ if (!match) return null;
19623
+ return { index: parseInt(match[1], 10), label: match[2].trim() };
19624
+ }).filter((e) => e !== null);
19625
+ resolve6(entries);
19626
+ });
19627
+ proc.on("error", reject);
19628
+ });
19629
+ }
19630
+ function discardWorkingChanges(gitRoot) {
19631
+ return new Promise((resolve6, reject) => {
19632
+ const proc = spawnProcess("git", ["checkout", "--", "."], { cwd: gitRoot });
19633
+ proc.on("close", () => {
19634
+ const clean = spawnProcess("git", ["clean", "-fd"], { cwd: gitRoot });
19635
+ clean.on("close", (code) => {
19636
+ if (code === 0) resolve6();
19637
+ else reject(new Error("Failed to clean working tree"));
19638
+ });
19639
+ clean.on("error", reject);
19640
+ });
19641
+ proc.on("error", reject);
19642
+ });
19643
+ }
19644
+ function applyStash(gitRoot, index) {
19645
+ return new Promise((resolve6, reject) => {
19646
+ const proc = spawnProcess("git", ["stash", "apply", `stash@{${index}}`], { cwd: gitRoot });
19647
+ let err = "";
19648
+ if (proc.stderr) {
19649
+ proc.stderr.on("data", (d) => {
19650
+ err += d.toString();
19651
+ });
19652
+ }
19653
+ proc.on("close", (code) => {
19654
+ if (code === 0) resolve6();
19655
+ else reject(new Error(err || "Failed to apply stash"));
19656
+ });
19657
+ proc.on("error", reject);
19658
+ });
19659
+ }
19660
+ async function gitCheckpointRestoreHandler(c, metadataManager) {
19661
+ try {
19662
+ const threadId = c.req.param("threadId");
19663
+ const body = await c.req.json();
19664
+ const { checkpointRef } = body;
19665
+ if (!checkpointRef) {
19666
+ return c.json({ error: "checkpointRef is required" }, 400);
19667
+ }
19668
+ const threads = await metadataManager.loadThreads();
19669
+ const thread = threads.find((t) => t.id === threadId);
19670
+ if (!thread) {
19671
+ return c.json({ error: "Thread not found" }, 404);
19672
+ }
19673
+ const repoPath = thread.path;
19674
+ if (!repoPath) {
19675
+ return c.json({ error: "Thread has no path" }, 400);
19676
+ }
19677
+ const absolutePath = resolveThreadPath(repoPath);
19678
+ let gitRoot;
19679
+ try {
19680
+ gitRoot = await getGitRoot(absolutePath);
19681
+ } catch {
19682
+ return c.json({ error: "Not a git repository" }, 400);
19683
+ }
19684
+ const stashes = await listStashes(gitRoot);
19685
+ const match = stashes.find((s) => s.label === checkpointRef);
19686
+ if (!match) {
19687
+ return c.json({ error: "Checkpoint not found in stash list" }, 404);
19688
+ }
19689
+ await discardWorkingChanges(gitRoot);
19690
+ await applyStash(gitRoot, match.index);
19691
+ return c.json({ success: true });
19692
+ } catch (error) {
19693
+ const message = error instanceof Error ? error.message : "Failed to restore checkpoint";
19694
+ return c.json({ error: message }, 500);
19695
+ }
19696
+ }
19697
+
18861
19698
  // src/features/git/git.routes.ts
18862
19699
  function createGitRoutes(metadataManager) {
18863
19700
  const router = new Hono11();
@@ -18946,6 +19783,12 @@ function createGitRoutes(metadataManager) {
18946
19783
  await invalidateGitStatusCache(db, c.req.param("threadId"));
18947
19784
  return gitRevertFileHandler(c, metadataManager);
18948
19785
  });
19786
+ router.post("/checkpoint/:threadId", async (c) => {
19787
+ return gitCheckpointHandler(c, metadataManager);
19788
+ });
19789
+ router.post("/checkpoint-restore/:threadId", async (c) => {
19790
+ return gitCheckpointRestoreHandler(c, metadataManager);
19791
+ });
18949
19792
  return router;
18950
19793
  }
18951
19794
 
@@ -18990,7 +19833,7 @@ function createUpdateRoutes(updater) {
18990
19833
 
18991
19834
  // src/features/mcp/mcp.routes.ts
18992
19835
  import { Hono as Hono13 } from "hono";
18993
- import { readFile as readFile15, writeFile as writeFile7, mkdir as mkdir10, access as access6 } from "fs/promises";
19836
+ import { readFile as readFile15, writeFile as writeFile6, mkdir as mkdir9, access as access6 } from "fs/promises";
18994
19837
  import { join as join28, dirname as dirname6 } from "path";
18995
19838
 
18996
19839
  // src/features/mcp/mcp.popular.json
@@ -19117,8 +19960,8 @@ async function readMCPConfig(projectPath) {
19117
19960
  async function writeMCPConfig(projectPath, config) {
19118
19961
  const existing = await readMCPConfig(projectPath);
19119
19962
  const targetPath = existing?.filePath ?? join28(projectPath, ".agents/mcp.json");
19120
- await mkdir10(dirname6(targetPath), { recursive: true });
19121
- await writeFile7(targetPath, JSON.stringify(config, null, 2), "utf-8");
19963
+ await mkdir9(dirname6(targetPath), { recursive: true });
19964
+ await writeFile6(targetPath, JSON.stringify(config, null, 2), "utf-8");
19122
19965
  }
19123
19966
  function createMCPRoutes() {
19124
19967
  const router = new Hono13();
@@ -19833,15 +20676,7 @@ function createBrowserJsRoutes(threadManager) {
19833
20676
  400
19834
20677
  );
19835
20678
  }
19836
- const resolved = submitWebWorkerResult(toolCallId, result);
19837
- if (!resolved) {
19838
- return errorResponse(
19839
- c,
19840
- ErrorCodes.INVALID_REQUEST,
19841
- "No pending web worker execution found for this toolCallId",
19842
- 404
19843
- );
19844
- }
20679
+ submitWebWorkerResult(toolCallId, result);
19845
20680
  return c.json({ success: true });
19846
20681
  } catch (error) {
19847
20682
  return errorResponse(
@@ -19894,8 +20729,8 @@ function createBrowserJsRoutes(threadManager) {
19894
20729
  }
19895
20730
 
19896
20731
  // src/server.ts
19897
- var __filename = fileURLToPath2(import.meta.url);
19898
- var __dirname = path5.dirname(__filename);
20732
+ var __filename = fileURLToPath(import.meta.url);
20733
+ var __dirname = path4.dirname(__filename);
19899
20734
  async function startTarskServer(options) {
19900
20735
  const { isDebug: isDebug2, publicDir: publicDirOverride } = options;
19901
20736
  const port = isDebug2 ? 462 : process.env.PORT ? parseInt(process.env.PORT) : 641;
@@ -19974,16 +20809,16 @@ async function startTarskServer(options) {
19974
20809
  app.route("/api/update", createUpdateRoutes(options.updater));
19975
20810
  createSlashCommandRoutes(app, threadManager);
19976
20811
  createRuleRoutes(app, projectManager);
19977
- const prodPublicDir = path5.join(__dirname, "public");
19978
- const devCopiedPublicDir = path5.join(process.cwd(), "dist", "public");
19979
- const appDistDir = path5.join(process.cwd(), "..", "app", "dist");
20812
+ const prodPublicDir = path4.join(__dirname, "public");
20813
+ const devCopiedPublicDir = path4.join(process.cwd(), "dist", "public");
20814
+ const appDistDir = path4.join(process.cwd(), "..", "app", "dist");
19980
20815
  const resolvedPublicDir = publicDirOverride ?? [prodPublicDir, devCopiedPublicDir, appDistDir].find((dirPath) => fs3.existsSync(dirPath));
19981
20816
  if (!resolvedPublicDir) {
19982
20817
  throw new Error(
19983
20818
  `No static frontend assets found. Expected one of: ${prodPublicDir}, ${devCopiedPublicDir}, ${appDistDir}. Build the app first with \`cd ../app && bun run build\`.`
19984
20819
  );
19985
20820
  }
19986
- const staticRoot = path5.relative(process.cwd(), resolvedPublicDir);
20821
+ const staticRoot = path4.relative(process.cwd(), resolvedPublicDir);
19987
20822
  app.use("/*", async (c, next) => {
19988
20823
  if (c.req.path.startsWith("/api/")) {
19989
20824
  return next();
@@ -19995,7 +20830,7 @@ async function startTarskServer(options) {
19995
20830
  return next();
19996
20831
  }
19997
20832
  return serveStatic({
19998
- path: path5.relative(process.cwd(), path5.join(resolvedPublicDir, "index.html"))
20833
+ path: path4.relative(process.cwd(), path4.join(resolvedPublicDir, "index.html"))
19999
20834
  })(c, next);
20000
20835
  });
20001
20836
  app.all("*", (c) => {
@@ -20028,8 +20863,8 @@ async function startTarskServer(options) {
20028
20863
 
20029
20864
  // src/index.ts
20030
20865
  import fs4 from "fs";
20031
- import path6 from "path";
20032
- import { fileURLToPath as fileURLToPath3 } from "url";
20866
+ import path5 from "path";
20867
+ import { fileURLToPath as fileURLToPath2 } from "url";
20033
20868
  var args = process.argv.slice(2);
20034
20869
  var isDebug = args.includes("--debug");
20035
20870
  var shouldOpenBrowser = args.includes("--open");
@@ -20037,9 +20872,9 @@ if (!isDebug) {
20037
20872
  console.log = () => {
20038
20873
  };
20039
20874
  } else {
20040
- const __filename2 = fileURLToPath3(import.meta.url);
20041
- const cliDir = path6.resolve(path6.dirname(__filename2), "..");
20042
- const logFilePath = path6.join(cliDir, "logs.txt");
20875
+ const __filename2 = fileURLToPath2(import.meta.url);
20876
+ const cliDir = path5.resolve(path5.dirname(__filename2), "..");
20877
+ const logFilePath = path5.join(cliDir, "logs.txt");
20043
20878
  try {
20044
20879
  fs4.writeFileSync(logFilePath, "");
20045
20880
  } catch {