tarsk 0.4.15 → 0.4.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1427 -654
- package/dist/public/assets/{account-view-Bj-Sn682.js → account-view-Ct1PS3qy.js} +1 -1
- package/dist/public/assets/api-CKm8861D.js +1 -0
- package/dist/public/assets/{browser-tab-C9hYTMS0.js → browser-tab-C_Iqe3Vc.js} +1 -1
- package/dist/public/assets/context-menu-3kNmHEtL.js +1 -0
- package/dist/public/assets/conversation-history-view-Bk1wydkA.js +1 -0
- package/dist/public/assets/{dialogs-config-CteuxXdD.js → dialogs-config-DQYl3sMZ.js} +3 -3
- package/dist/public/assets/diff-view-BqbiT0d1.js +3 -0
- package/dist/public/assets/dist-OF8pavjm.js +134 -0
- package/dist/public/assets/explorer-tab-view-7KsDKtCg.js +2 -0
- package/dist/public/assets/{explorer-tree-BMhkGVQO.js → explorer-tree-DvAdnMk6.js} +1 -1
- package/dist/public/assets/explorer-view-CIHVPut6.js +1 -0
- package/dist/public/assets/history-view-D-rFKy95.js +1 -0
- package/dist/public/assets/index-CkjiMXPO.js +49 -0
- package/dist/public/assets/index-JT2CTiRC.css +1 -0
- package/dist/public/assets/onboarding-CXNcrF0p.js +1 -0
- package/dist/public/assets/onboarding-dialog-HeNGG40b.js +1 -0
- package/dist/public/assets/{project-settings-view-a6woZhAb.js → project-settings-view-e-rir43e.js} +1 -1
- package/dist/public/assets/provider-details-view-CqOZhRbJ.js +1 -0
- package/dist/public/assets/providers-sidebar-DW9GF1go.js +1 -0
- package/dist/public/assets/react-vendor-CdgKcH5K.js +17 -0
- package/dist/public/assets/{rolldown-runtime-Dw2cE7zH.js → rolldown-runtime-B1FJdls4.js} +1 -1
- package/dist/public/assets/{settings-view-CrZ7av2I.js → settings-view-CZRZDm9N.js} +2 -2
- package/dist/public/assets/{store-DXPO1PQf.js → store-DE6QsxYI.js} +1 -1
- package/dist/public/assets/{tab-context-B7Q9fKIl.js → tab-context-YIVEeWqD.js} +1 -1
- package/dist/public/assets/{terminal-panel-BxS2vp4u.js → terminal-panel-fvpKl9qj.js} +2 -2
- package/dist/public/assets/textarea-CX-t8DNp.js +1 -0
- package/dist/public/assets/todos-view-B5EL6lKk.js +1 -0
- package/dist/public/assets/{use-toast-BhhMvy1W.js → use-toast-Dpl0D_FP.js} +1 -1
- package/dist/public/assets/{utils-DcGbvYrk.js → utils-B6DpTHZi.js} +1 -1
- package/dist/public/index.html +12 -12
- package/package.json +2 -2
- package/dist/public/assets/api-BuJ1dPr6.js +0 -1
- package/dist/public/assets/context-menu-DeHLk-VY.js +0 -1
- package/dist/public/assets/conversation-history-view-cEjKqUjG.js +0 -1
- package/dist/public/assets/diff-view-B2VYMGHR.js +0 -3
- package/dist/public/assets/explorer-tab-view-CROJs14G.js +0 -2
- package/dist/public/assets/explorer-view-BsDRu4DY.js +0 -1
- package/dist/public/assets/history-view-zlzHeGwz.js +0 -1
- package/dist/public/assets/index-BsVrm-8c.js +0 -80
- package/dist/public/assets/index-DyoTajdY.css +0 -1
- package/dist/public/assets/onboarding-C0LojpU9.js +0 -1
- package/dist/public/assets/onboarding-dialog-DdeXtx1_.js +0 -1
- package/dist/public/assets/provider-details-view-CMYKkW6H.js +0 -1
- package/dist/public/assets/providers-sidebar-8XbQfUhe.js +0 -1
- package/dist/public/assets/react-vendor-Bt_xeXcZ.js +0 -17
- package/dist/public/assets/textarea-BBmfPFv5.js +0 -1
- package/dist/public/assets/todos-view-E35eshII.js +0 -1
package/dist/index.js
CHANGED
|
@@ -99,18 +99,18 @@ __export(database_exports, {
|
|
|
99
99
|
initializeSchema: () => initializeSchema
|
|
100
100
|
});
|
|
101
101
|
import { createClient } from "@libsql/client";
|
|
102
|
-
import { join as
|
|
102
|
+
import { join as join2 } from "path";
|
|
103
103
|
import { homedir as homedir2 } from "os";
|
|
104
104
|
import { mkdirSync } from "fs";
|
|
105
105
|
function getDatabasePath() {
|
|
106
|
-
const appSupportDir =
|
|
107
|
-
const dataDir =
|
|
108
|
-
return
|
|
106
|
+
const appSupportDir = join2(homedir2(), "Library", "Application Support", "Tarsk");
|
|
107
|
+
const dataDir = join2(appSupportDir, "data");
|
|
108
|
+
return join2(dataDir, "tarsk.db");
|
|
109
109
|
}
|
|
110
110
|
async function initializeDatabase() {
|
|
111
111
|
const dbPath = getDatabasePath();
|
|
112
112
|
try {
|
|
113
|
-
const dataDir =
|
|
113
|
+
const dataDir = join2(homedir2(), "Library", "Application Support", "Tarsk", "data");
|
|
114
114
|
mkdirSync(dataDir, { recursive: true });
|
|
115
115
|
const db = createClient({
|
|
116
116
|
url: `file:${dbPath}`
|
|
@@ -214,6 +214,46 @@ async function initializeSchema(db) {
|
|
|
214
214
|
cachedAt TEXT NOT NULL
|
|
215
215
|
)
|
|
216
216
|
`);
|
|
217
|
+
await db.execute(`
|
|
218
|
+
CREATE TABLE IF NOT EXISTS code_files (
|
|
219
|
+
id INTEGER PRIMARY KEY,
|
|
220
|
+
thread_id TEXT NOT NULL,
|
|
221
|
+
file_path TEXT NOT NULL,
|
|
222
|
+
content TEXT NOT NULL,
|
|
223
|
+
indexed_at TEXT NOT NULL,
|
|
224
|
+
UNIQUE(thread_id, file_path)
|
|
225
|
+
)
|
|
226
|
+
`);
|
|
227
|
+
await db.execute(`
|
|
228
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS code_index USING fts5(
|
|
229
|
+
content,
|
|
230
|
+
content='code_files',
|
|
231
|
+
content_rowid='id'
|
|
232
|
+
)
|
|
233
|
+
`);
|
|
234
|
+
await db.execute(`
|
|
235
|
+
CREATE TABLE IF NOT EXISTS code_index_meta (
|
|
236
|
+
thread_id TEXT PRIMARY KEY,
|
|
237
|
+
indexed_at TEXT NOT NULL,
|
|
238
|
+
file_count INTEGER NOT NULL
|
|
239
|
+
)
|
|
240
|
+
`);
|
|
241
|
+
await db.execute(`
|
|
242
|
+
CREATE TABLE IF NOT EXISTS project_scripts (
|
|
243
|
+
id TEXT PRIMARY KEY,
|
|
244
|
+
projectId TEXT NOT NULL,
|
|
245
|
+
workspace TEXT NOT NULL,
|
|
246
|
+
name TEXT NOT NULL,
|
|
247
|
+
command TEXT NOT NULL,
|
|
248
|
+
friendlyName TEXT NOT NULL,
|
|
249
|
+
updatedAt TEXT NOT NULL,
|
|
250
|
+
FOREIGN KEY (projectId) REFERENCES projects(id) ON DELETE CASCADE,
|
|
251
|
+
UNIQUE(projectId, name)
|
|
252
|
+
)
|
|
253
|
+
`);
|
|
254
|
+
await db.execute(`
|
|
255
|
+
CREATE INDEX IF NOT EXISTS idx_project_scripts_projectId ON project_scripts(projectId)
|
|
256
|
+
`);
|
|
217
257
|
await db.execute(`
|
|
218
258
|
CREATE INDEX IF NOT EXISTS idx_threads_projectId ON threads(projectId)
|
|
219
259
|
`);
|
|
@@ -558,6 +598,36 @@ async function runMigrations(db) {
|
|
|
558
598
|
)
|
|
559
599
|
`);
|
|
560
600
|
}
|
|
601
|
+
const codeFilesExists = await db.execute(
|
|
602
|
+
`SELECT name FROM sqlite_master WHERE type='table' AND name='code_files'`
|
|
603
|
+
);
|
|
604
|
+
if (codeFilesExists.rows.length === 0) {
|
|
605
|
+
console.log("[db] Running migration: Creating code search tables");
|
|
606
|
+
await db.execute(`
|
|
607
|
+
CREATE TABLE code_files (
|
|
608
|
+
id INTEGER PRIMARY KEY,
|
|
609
|
+
thread_id TEXT NOT NULL,
|
|
610
|
+
file_path TEXT NOT NULL,
|
|
611
|
+
content TEXT NOT NULL,
|
|
612
|
+
indexed_at TEXT NOT NULL,
|
|
613
|
+
UNIQUE(thread_id, file_path)
|
|
614
|
+
)
|
|
615
|
+
`);
|
|
616
|
+
await db.execute(`
|
|
617
|
+
CREATE VIRTUAL TABLE code_index USING fts5(
|
|
618
|
+
content,
|
|
619
|
+
content='code_files',
|
|
620
|
+
content_rowid='id'
|
|
621
|
+
)
|
|
622
|
+
`);
|
|
623
|
+
await db.execute(`
|
|
624
|
+
CREATE TABLE code_index_meta (
|
|
625
|
+
thread_id TEXT PRIMARY KEY,
|
|
626
|
+
indexed_at TEXT NOT NULL,
|
|
627
|
+
file_count INTEGER NOT NULL
|
|
628
|
+
)
|
|
629
|
+
`);
|
|
630
|
+
}
|
|
561
631
|
const projectsInfo = await db.execute(`PRAGMA table_info(projects)`);
|
|
562
632
|
const hasPlanPrompt = projectsInfo.rows.some(
|
|
563
633
|
(col) => col.name === "planPrompt"
|
|
@@ -568,6 +638,28 @@ async function runMigrations(db) {
|
|
|
568
638
|
await db.execute(`ALTER TABLE projects ADD COLUMN testPrompt TEXT`);
|
|
569
639
|
await db.execute(`ALTER TABLE projects ADD COLUMN reviewPrompt TEXT`);
|
|
570
640
|
}
|
|
641
|
+
const projectScriptsExists = await db.execute(
|
|
642
|
+
`SELECT name FROM sqlite_master WHERE type='table' AND name='project_scripts'`
|
|
643
|
+
);
|
|
644
|
+
if (projectScriptsExists.rows.length === 0) {
|
|
645
|
+
console.log("[db] Running migration: Creating project_scripts table");
|
|
646
|
+
await db.execute(`
|
|
647
|
+
CREATE TABLE project_scripts (
|
|
648
|
+
id TEXT PRIMARY KEY,
|
|
649
|
+
projectId TEXT NOT NULL,
|
|
650
|
+
workspace TEXT NOT NULL,
|
|
651
|
+
name TEXT NOT NULL,
|
|
652
|
+
command TEXT NOT NULL,
|
|
653
|
+
friendlyName TEXT NOT NULL,
|
|
654
|
+
updatedAt TEXT NOT NULL,
|
|
655
|
+
FOREIGN KEY (projectId) REFERENCES projects(id) ON DELETE CASCADE,
|
|
656
|
+
UNIQUE(projectId, name)
|
|
657
|
+
)
|
|
658
|
+
`);
|
|
659
|
+
await db.execute(`
|
|
660
|
+
CREATE INDEX idx_project_scripts_projectId ON project_scripts(projectId)
|
|
661
|
+
`);
|
|
662
|
+
}
|
|
571
663
|
} catch (error) {
|
|
572
664
|
console.error("Failed to run migrations:", error);
|
|
573
665
|
throw error;
|
|
@@ -676,8 +768,8 @@ import fs3 from "fs";
|
|
|
676
768
|
import { Hono as Hono21 } from "hono";
|
|
677
769
|
import { cors } from "hono/cors";
|
|
678
770
|
import open3 from "open";
|
|
679
|
-
import
|
|
680
|
-
import { fileURLToPath
|
|
771
|
+
import path4 from "path";
|
|
772
|
+
import { fileURLToPath } from "url";
|
|
681
773
|
|
|
682
774
|
// src/agent/agent.executor.ts
|
|
683
775
|
import { Agent as Agent2 } from "@mariozechner/pi-agent-core";
|
|
@@ -1392,9 +1484,9 @@ var editSchema = Type2.Object({
|
|
|
1392
1484
|
newText: Type2.String({ description: "New text to replace the old text with" })
|
|
1393
1485
|
});
|
|
1394
1486
|
var defaultEditOperations = {
|
|
1395
|
-
readFile: (
|
|
1396
|
-
writeFile: (
|
|
1397
|
-
access: (
|
|
1487
|
+
readFile: (path6) => fsReadFile(path6),
|
|
1488
|
+
writeFile: (path6, content) => fsWriteFile(path6, content, "utf-8"),
|
|
1489
|
+
access: (path6) => fsAccess(path6, constants2.R_OK | constants2.W_OK)
|
|
1398
1490
|
};
|
|
1399
1491
|
function createEditTool(cwd, options) {
|
|
1400
1492
|
const ops = options?.operations ?? defaultEditOperations;
|
|
@@ -1403,14 +1495,14 @@ function createEditTool(cwd, options) {
|
|
|
1403
1495
|
label: "edit",
|
|
1404
1496
|
description: "Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
|
|
1405
1497
|
parameters: editSchema,
|
|
1406
|
-
execute: async (_toolCallId, { path:
|
|
1407
|
-
const absolutePath = resolveToCwd(
|
|
1498
|
+
execute: async (_toolCallId, { path: path6, oldText, newText }, signal) => {
|
|
1499
|
+
const absolutePath = resolveToCwd(path6, cwd);
|
|
1408
1500
|
validatePathWithinCwd(absolutePath, cwd);
|
|
1409
1501
|
return withAbortSignal(signal, async (isAborted) => {
|
|
1410
1502
|
try {
|
|
1411
1503
|
await ops.access(absolutePath);
|
|
1412
1504
|
} catch {
|
|
1413
|
-
throw new Error(`File not found: ${
|
|
1505
|
+
throw new Error(`File not found: ${path6}`);
|
|
1414
1506
|
}
|
|
1415
1507
|
if (isAborted()) return { content: [], details: void 0 };
|
|
1416
1508
|
const buffer = await ops.readFile(absolutePath);
|
|
@@ -1424,7 +1516,7 @@ function createEditTool(cwd, options) {
|
|
|
1424
1516
|
const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
|
|
1425
1517
|
if (!matchResult.found) {
|
|
1426
1518
|
throw new Error(
|
|
1427
|
-
`Could not find the exact text in ${
|
|
1519
|
+
`Could not find the exact text in ${path6}. The old text must match exactly including all whitespace and newlines.`
|
|
1428
1520
|
);
|
|
1429
1521
|
}
|
|
1430
1522
|
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
|
@@ -1432,7 +1524,7 @@ function createEditTool(cwd, options) {
|
|
|
1432
1524
|
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
|
1433
1525
|
if (occurrences > 1) {
|
|
1434
1526
|
throw new Error(
|
|
1435
|
-
`Found ${occurrences} occurrences of the text in ${
|
|
1527
|
+
`Found ${occurrences} occurrences of the text in ${path6}. The text must be unique. Please provide more context to make it unique.`
|
|
1436
1528
|
);
|
|
1437
1529
|
}
|
|
1438
1530
|
if (isAborted()) return { content: [], details: void 0 };
|
|
@@ -1440,7 +1532,7 @@ function createEditTool(cwd, options) {
|
|
|
1440
1532
|
const newContent = baseContent.substring(0, matchResult.index) + normalizedNewText + baseContent.substring(matchResult.index + matchResult.matchLength);
|
|
1441
1533
|
if (baseContent === newContent) {
|
|
1442
1534
|
throw new Error(
|
|
1443
|
-
`No changes made to ${
|
|
1535
|
+
`No changes made to ${path6}. The replacement produced identical content.`
|
|
1444
1536
|
);
|
|
1445
1537
|
}
|
|
1446
1538
|
const finalContent = bom + restoreLineEndings(newContent, originalEnding);
|
|
@@ -1448,7 +1540,7 @@ function createEditTool(cwd, options) {
|
|
|
1448
1540
|
if (isAborted()) return { content: [], details: void 0 };
|
|
1449
1541
|
const diffResult = generateDiffString(baseContent, newContent);
|
|
1450
1542
|
return {
|
|
1451
|
-
content: [{ type: "text", text: `Successfully replaced text in ${
|
|
1543
|
+
content: [{ type: "text", text: `Successfully replaced text in ${path6}.` }],
|
|
1452
1544
|
details: { diff: diffResult.diff, firstChangedLine: diffResult.firstChangedLine }
|
|
1453
1545
|
};
|
|
1454
1546
|
});
|
|
@@ -1475,13 +1567,13 @@ var findSchema = Type3.Object({
|
|
|
1475
1567
|
var DEFAULT_LIMIT = 1e3;
|
|
1476
1568
|
var defaultFindOperations = {
|
|
1477
1569
|
exists: existsSync3,
|
|
1478
|
-
glob: async (pattern, searchCwd, { ignore, limit }) => {
|
|
1570
|
+
glob: async (pattern, searchCwd, { ignore: ignore2, limit }) => {
|
|
1479
1571
|
const results = [];
|
|
1480
1572
|
try {
|
|
1481
1573
|
const found = globSync(pattern, {
|
|
1482
1574
|
cwd: searchCwd,
|
|
1483
1575
|
dot: true,
|
|
1484
|
-
ignore: ["**/node_modules/**", "**/.git/**", ...
|
|
1576
|
+
ignore: ["**/node_modules/**", "**/.git/**", ...ignore2],
|
|
1485
1577
|
mark: false
|
|
1486
1578
|
});
|
|
1487
1579
|
for (let i = 0; i < Math.min(found.length, limit); i++) {
|
|
@@ -1677,9 +1769,9 @@ function createGrepTool(cwd, options) {
|
|
|
1677
1769
|
const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT2);
|
|
1678
1770
|
const formatPath = (filePath) => {
|
|
1679
1771
|
if (isDirectory) {
|
|
1680
|
-
const
|
|
1681
|
-
if (
|
|
1682
|
-
return
|
|
1772
|
+
const relative7 = path2.relative(searchPath, filePath);
|
|
1773
|
+
if (relative7 && !relative7.startsWith("..")) {
|
|
1774
|
+
return relative7.replace(/\\/g, "/");
|
|
1683
1775
|
}
|
|
1684
1776
|
}
|
|
1685
1777
|
return path2.basename(filePath);
|
|
@@ -1843,287 +1935,625 @@ function createGrepTool(cwd, options) {
|
|
|
1843
1935
|
}
|
|
1844
1936
|
var grepTool = createGrepTool(process.cwd());
|
|
1845
1937
|
|
|
1846
|
-
// src/tools/
|
|
1847
|
-
|
|
1938
|
+
// src/tools/code-search.ts
|
|
1939
|
+
init_database();
|
|
1848
1940
|
import { Type as Type5 } from "@sinclair/typebox";
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
|
|
1941
|
+
|
|
1942
|
+
// src/tools/code-search-indexer.ts
|
|
1943
|
+
init_database();
|
|
1944
|
+
import { readFileSync as readFileSync2 } from "fs";
|
|
1945
|
+
import { readFile, opendir } from "fs/promises";
|
|
1946
|
+
import { join as join3, relative as relative2, extname } from "path";
|
|
1947
|
+
import ignore from "ignore";
|
|
1948
|
+
var SKIP_DIRS = /* @__PURE__ */ new Set([
|
|
1949
|
+
"node_modules",
|
|
1950
|
+
".git",
|
|
1951
|
+
"dist",
|
|
1952
|
+
"build",
|
|
1953
|
+
"out",
|
|
1954
|
+
".next",
|
|
1955
|
+
".nuxt",
|
|
1956
|
+
"coverage",
|
|
1957
|
+
"__pycache__",
|
|
1958
|
+
"target",
|
|
1959
|
+
"vendor",
|
|
1960
|
+
".gradle",
|
|
1961
|
+
"Pods",
|
|
1962
|
+
".cache",
|
|
1963
|
+
".tox"
|
|
1964
|
+
]);
|
|
1965
|
+
var SKIP_FILENAMES = /* @__PURE__ */ new Set([
|
|
1966
|
+
"package-lock.json",
|
|
1967
|
+
"bun.lock",
|
|
1968
|
+
"yarn.lock",
|
|
1969
|
+
"Cargo.lock",
|
|
1970
|
+
"poetry.lock",
|
|
1971
|
+
"Gemfile.lock",
|
|
1972
|
+
"pnpm-lock.yaml",
|
|
1973
|
+
"composer.lock"
|
|
1974
|
+
]);
|
|
1975
|
+
var TEXT_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
1976
|
+
".ts",
|
|
1977
|
+
".tsx",
|
|
1978
|
+
".js",
|
|
1979
|
+
".jsx",
|
|
1980
|
+
".mjs",
|
|
1981
|
+
".cjs",
|
|
1982
|
+
".py",
|
|
1983
|
+
".rb",
|
|
1984
|
+
".go",
|
|
1985
|
+
".rs",
|
|
1986
|
+
".java",
|
|
1987
|
+
".kt",
|
|
1988
|
+
".swift",
|
|
1989
|
+
".c",
|
|
1990
|
+
".cpp",
|
|
1991
|
+
".h",
|
|
1992
|
+
".hpp",
|
|
1993
|
+
".cs",
|
|
1994
|
+
".php",
|
|
1995
|
+
".sh",
|
|
1996
|
+
".bash",
|
|
1997
|
+
".zsh",
|
|
1998
|
+
".fish",
|
|
1999
|
+
".sql",
|
|
2000
|
+
".md",
|
|
2001
|
+
".mdx",
|
|
2002
|
+
".txt",
|
|
2003
|
+
".yaml",
|
|
2004
|
+
".yml",
|
|
2005
|
+
".toml",
|
|
2006
|
+
".json",
|
|
2007
|
+
".xml",
|
|
2008
|
+
".html",
|
|
2009
|
+
".css",
|
|
2010
|
+
".scss",
|
|
2011
|
+
".sass",
|
|
2012
|
+
".less"
|
|
2013
|
+
]);
|
|
2014
|
+
var MAX_FILE_SIZE = 512 * 1024;
|
|
2015
|
+
var BATCH_SIZE = 50;
|
|
2016
|
+
function readGitignore(dirPath) {
|
|
2017
|
+
const ig = ignore();
|
|
2018
|
+
try {
|
|
2019
|
+
const content = readFileSync2(join3(dirPath, ".gitignore"), "utf8");
|
|
2020
|
+
ig.add(content);
|
|
2021
|
+
} catch {
|
|
2022
|
+
}
|
|
2023
|
+
return ig;
|
|
2024
|
+
}
|
|
2025
|
+
async function collectFiles(rootPath, signal) {
|
|
2026
|
+
const results = [];
|
|
2027
|
+
const ignoreStack = [
|
|
2028
|
+
{ prefix: "", ig: readGitignore(rootPath) }
|
|
2029
|
+
];
|
|
2030
|
+
async function walk(dirPath) {
|
|
2031
|
+
if (signal?.aborted) return;
|
|
2032
|
+
const dirHandle = await opendir(dirPath, {}).catch(() => null);
|
|
2033
|
+
if (!dirHandle) return;
|
|
2034
|
+
const relDir = relative2(rootPath, dirPath);
|
|
2035
|
+
let pushedIgnore = false;
|
|
2036
|
+
if (relDir) {
|
|
2037
|
+
const ig = readGitignore(dirPath);
|
|
2038
|
+
ignoreStack.push({ prefix: relDir, ig });
|
|
2039
|
+
pushedIgnore = true;
|
|
2040
|
+
}
|
|
2041
|
+
function isIgnored(relPath) {
|
|
2042
|
+
for (const { prefix, ig } of ignoreStack) {
|
|
2043
|
+
const testPath = prefix ? relPath.slice(prefix.length + 1) : relPath;
|
|
2044
|
+
if (testPath && ig.ignores(testPath)) return true;
|
|
2045
|
+
}
|
|
2046
|
+
return false;
|
|
2047
|
+
}
|
|
2048
|
+
for await (const entry of dirHandle) {
|
|
2049
|
+
if (signal?.aborted) break;
|
|
2050
|
+
const entryPath = join3(dirPath, entry.name);
|
|
2051
|
+
const relPath = relative2(rootPath, entryPath);
|
|
2052
|
+
if (entry.isDirectory()) {
|
|
2053
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
2054
|
+
if (isIgnored(relPath)) continue;
|
|
2055
|
+
await walk(entryPath);
|
|
2056
|
+
} else if (entry.isFile()) {
|
|
2057
|
+
if (SKIP_FILENAMES.has(entry.name)) continue;
|
|
2058
|
+
if (!TEXT_EXTENSIONS.has(extname(entry.name))) continue;
|
|
2059
|
+
if (isIgnored(relPath)) continue;
|
|
2060
|
+
results.push(entryPath);
|
|
2061
|
+
}
|
|
2062
|
+
}
|
|
2063
|
+
if (pushedIgnore) ignoreStack.pop();
|
|
2064
|
+
}
|
|
2065
|
+
await walk(rootPath);
|
|
2066
|
+
return results;
|
|
2067
|
+
}
|
|
2068
|
+
async function ensureThreadIndex(db, threadId, threadPath, signal) {
|
|
2069
|
+
const meta = await db.execute("SELECT thread_id FROM code_index_meta WHERE thread_id = ?", [
|
|
2070
|
+
threadId
|
|
2071
|
+
]);
|
|
2072
|
+
if (meta.rows.length > 0) return;
|
|
2073
|
+
console.log(`[code-search] indexing ${threadId}: scanning files...`);
|
|
2074
|
+
const start = Date.now();
|
|
2075
|
+
const files = await collectFiles(threadPath, signal);
|
|
2076
|
+
if (signal?.aborted) return;
|
|
2077
|
+
let fileCount = 0;
|
|
2078
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
2079
|
+
let i = 0;
|
|
2080
|
+
while (i < files.length) {
|
|
2081
|
+
if (signal?.aborted) return;
|
|
2082
|
+
const chunk = files.slice(i, i + BATCH_SIZE);
|
|
2083
|
+
const rows = [];
|
|
2084
|
+
for (const filePath of chunk) {
|
|
2085
|
+
let content;
|
|
2086
|
+
try {
|
|
2087
|
+
const buf = await readFile(filePath);
|
|
2088
|
+
if (buf.length > MAX_FILE_SIZE) continue;
|
|
2089
|
+
content = buf.toString("utf8");
|
|
2090
|
+
if (content.includes("\0")) continue;
|
|
2091
|
+
} catch {
|
|
2092
|
+
continue;
|
|
2093
|
+
}
|
|
2094
|
+
const relPath = relative2(threadPath, filePath);
|
|
2095
|
+
rows.push({
|
|
2096
|
+
sql: "INSERT OR REPLACE INTO code_files (thread_id, file_path, content, indexed_at) VALUES (?, ?, ?, ?)",
|
|
2097
|
+
args: [threadId, relPath, content, now]
|
|
2098
|
+
});
|
|
2099
|
+
fileCount++;
|
|
2100
|
+
}
|
|
2101
|
+
if (rows.length > 0) {
|
|
2102
|
+
await db.batch(rows);
|
|
2103
|
+
}
|
|
2104
|
+
i += BATCH_SIZE;
|
|
2105
|
+
}
|
|
2106
|
+
await db.execute("INSERT INTO code_index(code_index) VALUES('rebuild')");
|
|
2107
|
+
await db.execute(
|
|
2108
|
+
"INSERT OR REPLACE INTO code_index_meta (thread_id, indexed_at, file_count) VALUES (?, ?, ?)",
|
|
2109
|
+
[threadId, now, fileCount]
|
|
2110
|
+
);
|
|
2111
|
+
console.log(`[code-search] indexed ${fileCount} files in ${Date.now() - start}ms`);
|
|
2112
|
+
}
|
|
2113
|
+
async function clearThreadIndex(db, threadId) {
|
|
2114
|
+
await db.execute(
|
|
2115
|
+
"DELETE FROM code_index WHERE rowid IN (SELECT id FROM code_files WHERE thread_id = ?)",
|
|
2116
|
+
[threadId]
|
|
2117
|
+
);
|
|
2118
|
+
await db.execute("DELETE FROM code_files WHERE thread_id = ?", [threadId]);
|
|
2119
|
+
await db.execute("DELETE FROM code_index_meta WHERE thread_id = ?", [threadId]);
|
|
2120
|
+
}
|
|
2121
|
+
async function invalidateThreadIndex(threadId) {
|
|
2122
|
+
const db = await getDatabase();
|
|
2123
|
+
await clearThreadIndex(db, threadId);
|
|
2124
|
+
}
|
|
2125
|
+
|
|
2126
|
+
// src/tools/code-search.ts
|
|
2127
|
+
var DEFAULT_LIMIT3 = 100;
|
|
2128
|
+
var codeSearchSchema = Type5.Object({
|
|
2129
|
+
query: Type5.Optional(
|
|
2130
|
+
Type5.String({
|
|
2131
|
+
description: 'FTS5 full-text search on plain identifier/word tokens (e.g. "validateEmail", "email validation"). Multi-word queries use proximity matching automatically. Supports "quoted phrases", prefix*, and boolean operators (A OR B, A NOT B). Special characters (backslashes, brackets, dots) are stripped \u2014 do NOT put regex patterns here. For regex use `pattern`.'
|
|
2132
|
+
})
|
|
2133
|
+
),
|
|
1856
2134
|
pattern: Type5.Optional(
|
|
1857
2135
|
Type5.String({
|
|
1858
|
-
description: "
|
|
2136
|
+
description: "JavaScript regex for searching file content, e.g. 'function\\s+\\w+\\s*\\(' or 'class\\s+\\w+Service'. Works standalone (scans all indexed files) or combined with `query` to pre-filter by FTS first. By default matched line by line; set `multiline: true` for cross-line patterns. Case-insensitive unless the pattern contains uppercase letters."
|
|
1859
2137
|
})
|
|
1860
2138
|
),
|
|
1861
|
-
|
|
1862
|
-
description: "Language to parse: javascript, typescript, python, rust, go, java, css, html, etc."
|
|
1863
|
-
}),
|
|
1864
|
-
rule: Type5.Optional(
|
|
2139
|
+
filePath: Type5.Optional(
|
|
1865
2140
|
Type5.String({
|
|
1866
|
-
description: "
|
|
2141
|
+
description: "Search by file path pattern, e.g. 'camp.page.ts', 'src/app/camp', or 'src/**/camp/*.ts'. Supports glob wildcards (* and **). Returns all lines from matching files."
|
|
1867
2142
|
})
|
|
1868
2143
|
),
|
|
1869
|
-
|
|
2144
|
+
fileGlob: Type5.Optional(
|
|
1870
2145
|
Type5.String({
|
|
1871
|
-
description: "
|
|
2146
|
+
description: "Restrict FTS/regex results to files matching this glob, e.g. '*.ts', '**/camp/*.ts'. Applied before the result limit."
|
|
1872
2147
|
})
|
|
1873
2148
|
),
|
|
1874
2149
|
limit: Type5.Optional(
|
|
1875
2150
|
Type5.Number({
|
|
1876
|
-
description:
|
|
2151
|
+
description: `Maximum number of file matches to return (default: ${DEFAULT_LIMIT3})`
|
|
2152
|
+
})
|
|
2153
|
+
),
|
|
2154
|
+
offset: Type5.Optional(
|
|
2155
|
+
Type5.Number({
|
|
2156
|
+
description: "Number of file matches to skip for pagination (default: 0). Use with `limit` to page through results when the match limit or output size is reached."
|
|
2157
|
+
})
|
|
2158
|
+
),
|
|
2159
|
+
multiline: Type5.Optional(
|
|
2160
|
+
Type5.Boolean({
|
|
2161
|
+
description: "When true, apply `pattern` to the full file content rather than line by line, so patterns can span multiple lines. Use `[\\s\\S]` instead of `.` to match newlines within the pattern (e.g. '@Component\\({[\\s\\S]*?templateUrl:' or 'templateUrl:[\\s\\S]*?styleUrl:'). Has no effect without `pattern`."
|
|
1877
2162
|
})
|
|
1878
2163
|
)
|
|
1879
2164
|
});
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
"
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
"
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
const
|
|
1892
|
-
let
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
const
|
|
1896
|
-
if (
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
2165
|
+
function globToLike(glob3) {
|
|
2166
|
+
return glob3.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_").replace(/\*\*/g, "%").replace(/\*/g, "%").replace(/\?/g, "_");
|
|
2167
|
+
}
|
|
2168
|
+
function filePathToLike(filePath) {
|
|
2169
|
+
if (filePath.includes("*") || filePath.includes("?")) {
|
|
2170
|
+
return globToLike(filePath);
|
|
2171
|
+
}
|
|
2172
|
+
return `%${filePath.replace(/%/g, "\\%").replace(/_/g, "\\_")}%`;
|
|
2173
|
+
}
|
|
2174
|
+
function sanitizeQuery(raw) {
|
|
2175
|
+
const parts = [];
|
|
2176
|
+
const phraseRe = /"[^"]*"/g;
|
|
2177
|
+
let lastIndex = 0;
|
|
2178
|
+
let match;
|
|
2179
|
+
while ((match = phraseRe.exec(raw)) !== null) {
|
|
2180
|
+
const before = raw.slice(lastIndex, match.index).trim();
|
|
2181
|
+
if (before) parts.push(cleanTokens(before));
|
|
2182
|
+
parts.push(match[0]);
|
|
2183
|
+
lastIndex = match.index + match[0].length;
|
|
2184
|
+
}
|
|
2185
|
+
const remaining = raw.slice(lastIndex).trim();
|
|
2186
|
+
if (remaining) parts.push(cleanTokens(remaining));
|
|
2187
|
+
return parts.filter(Boolean).join(" ").trim();
|
|
2188
|
+
}
|
|
2189
|
+
function cleanTokens(text) {
|
|
2190
|
+
return text.split(/\s+/).map((token) => {
|
|
2191
|
+
if (/^(AND|OR|NOT)$/i.test(token)) return token;
|
|
2192
|
+
const hasTrailingStar = token.endsWith("*");
|
|
2193
|
+
const cleaned = token.replace(/[^a-zA-Z0-9_]/g, " ").trim();
|
|
2194
|
+
const tokens = cleaned.split(/\s+/).filter(Boolean);
|
|
2195
|
+
if (tokens.length === 0) return "";
|
|
2196
|
+
if (hasTrailingStar) tokens[tokens.length - 1] += "*";
|
|
2197
|
+
return tokens.join(" ");
|
|
2198
|
+
}).filter(Boolean).join(" ");
|
|
2199
|
+
}
|
|
2200
|
+
function buildNearQuery(sanitized) {
|
|
2201
|
+
if (/["()|]|^\s*(AND|OR|NOT)\s/i.test(sanitized)) return null;
|
|
2202
|
+
let tokens = sanitized.split(/\s+/).filter((t) => !/^(AND|OR|NOT)$/i.test(t) && t.length > 0);
|
|
2203
|
+
if (tokens.length < 2) return null;
|
|
2204
|
+
if (tokens.length > 5) {
|
|
2205
|
+
tokens = [...tokens].sort((a, b) => b.length - a.length).slice(0, 5);
|
|
2206
|
+
}
|
|
2207
|
+
const nearDistance = Math.max(10, tokens.length * 10);
|
|
2208
|
+
return `NEAR(${tokens.join(" ")}, ${nearDistance})`;
|
|
2209
|
+
}
|
|
2210
|
+
function buildOrQuery(sanitized) {
|
|
2211
|
+
if (/["()|]|^\s*(AND|OR|NOT)\s/i.test(sanitized)) return null;
|
|
2212
|
+
const tokens = sanitized.split(/\s+/).filter((t) => !/^(AND|OR|NOT)$/i.test(t) && t.length > 0);
|
|
2213
|
+
if (tokens.length < 2) return null;
|
|
2214
|
+
return tokens.join(" OR ");
|
|
2215
|
+
}
|
|
2216
|
+
function extractTokens(query) {
|
|
2217
|
+
return query.replace(/["()*]/g, " ").split(/\s+/).map(
|
|
2218
|
+
(t) => t.replace(/^(AND|OR|NOT)$/i, "").replace(/[^a-zA-Z0-9_]/g, "").trim()
|
|
2219
|
+
).filter(Boolean).map((t) => t.toLowerCase());
|
|
2220
|
+
}
|
|
2221
|
+
function findLinesByTokens(content, tokens) {
|
|
2222
|
+
if (tokens.length === 0) return [];
|
|
2223
|
+
const lines = content.split("\n");
|
|
2224
|
+
const results = [];
|
|
2225
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2226
|
+
const lower = lines[i].toLowerCase();
|
|
2227
|
+
if (tokens.some((t) => lower.includes(t))) {
|
|
2228
|
+
results.push({ line: i + 1, text: lines[i] });
|
|
2229
|
+
}
|
|
2230
|
+
}
|
|
2231
|
+
return results;
|
|
2232
|
+
}
|
|
2233
|
+
function findLinesByRegex(content, re, multiline = false) {
|
|
2234
|
+
const lines = content.split("\n");
|
|
2235
|
+
if (!multiline) {
|
|
2236
|
+
const results = [];
|
|
2237
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2238
|
+
if (re.test(lines[i])) {
|
|
2239
|
+
results.push({ line: i + 1, text: lines[i] });
|
|
1908
2240
|
}
|
|
1909
2241
|
}
|
|
1910
|
-
|
|
1911
|
-
if (parent === dir) break;
|
|
1912
|
-
dir = parent;
|
|
2242
|
+
return results;
|
|
1913
2243
|
}
|
|
1914
|
-
|
|
2244
|
+
const lineStarts = Array.from({ length: lines.length }).fill(0);
|
|
2245
|
+
let pos = 0;
|
|
2246
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2247
|
+
lineStarts[i] = pos;
|
|
2248
|
+
pos += lines[i].length + 1;
|
|
2249
|
+
}
|
|
2250
|
+
let flags = re.flags;
|
|
2251
|
+
if (!flags.includes("g")) flags += "g";
|
|
2252
|
+
const fullRe = new RegExp(re.source, flags);
|
|
2253
|
+
const matchedLines = /* @__PURE__ */ new Set();
|
|
2254
|
+
let match;
|
|
2255
|
+
while ((match = fullRe.exec(content)) !== null) {
|
|
2256
|
+
const matchStart = match.index;
|
|
2257
|
+
const matchEnd = matchStart + match[0].length;
|
|
2258
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2259
|
+
const lineStart = lineStarts[i];
|
|
2260
|
+
const lineEnd = lineStart + lines[i].length;
|
|
2261
|
+
if (lineStart <= matchEnd && lineEnd >= matchStart) {
|
|
2262
|
+
matchedLines.add(i);
|
|
2263
|
+
}
|
|
2264
|
+
if (lineStart > matchEnd) break;
|
|
2265
|
+
}
|
|
2266
|
+
if (match[0].length === 0) fullRe.lastIndex++;
|
|
2267
|
+
}
|
|
2268
|
+
return Array.from(matchedLines).sort((a, b) => a - b).map((i) => ({ line: i + 1, text: lines[i] }));
|
|
2269
|
+
}
|
|
2270
|
+
function formatOutput(rows, tokens, regex, limit, multiline = false) {
|
|
2271
|
+
const outputLines = [];
|
|
2272
|
+
let linesTruncated = false;
|
|
2273
|
+
const matchLimitReached = rows.length >= limit ? rows.length : void 0;
|
|
2274
|
+
for (const { file_path, content } of rows) {
|
|
2275
|
+
const matchingLines = regex ? findLinesByRegex(content, regex, multiline) : findLinesByTokens(content, tokens);
|
|
2276
|
+
for (const { line, text } of matchingLines) {
|
|
2277
|
+
const { text: truncated, wasTruncated } = truncateLine(text.replace(/\r/g, ""));
|
|
2278
|
+
if (wasTruncated) linesTruncated = true;
|
|
2279
|
+
outputLines.push(`${file_path}:${line}: ${truncated}`);
|
|
2280
|
+
}
|
|
2281
|
+
}
|
|
2282
|
+
return { outputLines, linesTruncated, matchLimitReached };
|
|
2283
|
+
}
|
|
2284
|
+
function buildResult(outputLines, linesTruncated, matchLimitReached, limit, offset) {
|
|
2285
|
+
if (outputLines.length === 0) {
|
|
2286
|
+
const text2 = offset > 0 ? `No matches found at offset ${offset}` : "No matches found";
|
|
2287
|
+
return { content: [{ type: "text", text: text2 }], details: void 0 };
|
|
2288
|
+
}
|
|
2289
|
+
const rawOutput = outputLines.join("\n");
|
|
2290
|
+
const truncation = truncateHead(rawOutput, { maxBytes: DEFAULT_MAX_BYTES });
|
|
2291
|
+
const finalOutput = truncation.content;
|
|
2292
|
+
const nextOffset = offset + limit;
|
|
2293
|
+
const notices = [];
|
|
2294
|
+
if (matchLimitReached) {
|
|
2295
|
+
notices.push(
|
|
2296
|
+
`Note: result limit of ${limit} files reached \u2014 use offset=${nextOffset} to get the next page, or use a more specific query`
|
|
2297
|
+
);
|
|
2298
|
+
}
|
|
2299
|
+
if (linesTruncated) {
|
|
2300
|
+
notices.push(`Note: some lines were truncated at ${GREP_MAX_LINE_LENGTH} characters`);
|
|
2301
|
+
}
|
|
2302
|
+
if (truncation.truncated) {
|
|
2303
|
+
notices.push(
|
|
2304
|
+
`Note: output truncated at ${formatSize(DEFAULT_MAX_BYTES)} \u2014 use offset=${nextOffset} to get the next page`
|
|
2305
|
+
);
|
|
2306
|
+
}
|
|
2307
|
+
const text = notices.length > 0 ? `${finalOutput}
|
|
2308
|
+
|
|
2309
|
+
${notices.join("\n")}` : finalOutput;
|
|
2310
|
+
return {
|
|
2311
|
+
content: [{ type: "text", text }],
|
|
2312
|
+
details: { truncation, matchLimitReached, linesTruncated, offset }
|
|
2313
|
+
};
|
|
1915
2314
|
}
|
|
1916
|
-
|
|
1917
|
-
function resolveAstGrepBin(cwd) {
|
|
1918
|
-
const key = `${process.platform}-${process.arch}`;
|
|
1919
|
-
const pkg = PLATFORM_PACKAGES[key];
|
|
1920
|
-
if (!pkg) return null;
|
|
1921
|
-
const binName = process.platform === "win32" ? "ast-grep.exe" : "ast-grep";
|
|
1922
|
-
console.log(`[ast-grep] resolving binary: platform=${key}, pkg=${pkg}, APP_DIR=${APP_DIR}`);
|
|
1923
|
-
const appResult = findBinInNodeModules(APP_DIR, pkg, binName);
|
|
1924
|
-
if (appResult) return appResult;
|
|
1925
|
-
return findBinInNodeModules(cwd, pkg, binName);
|
|
1926
|
-
}
|
|
1927
|
-
function createAstGrepTool(cwd) {
|
|
2315
|
+
function createCodeSearchTool(cwd, threadId) {
|
|
1928
2316
|
return {
|
|
1929
|
-
name: "
|
|
2317
|
+
name: "code_search",
|
|
1930
2318
|
label: "Code Search",
|
|
1931
|
-
description:
|
|
1932
|
-
parameters:
|
|
1933
|
-
execute: async (_toolCallId,
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
2319
|
+
description: 'Search the codebase with FTS5 (BM25 ranking) + optional regex. Use `query` for token/semantic search \u2014 multi-word queries automatically use proximity matching. Use `pattern` for regex (e.g. "function\\s+\\w+"). Use `filePath` to find files by name/path (supports globs). Use `fileGlob` to restrict results to a directory.',
|
|
2320
|
+
parameters: codeSearchSchema,
|
|
2321
|
+
execute: async (_toolCallId, inputs, signal) => {
|
|
2322
|
+
const {
|
|
2323
|
+
query,
|
|
2324
|
+
pattern,
|
|
2325
|
+
filePath,
|
|
2326
|
+
fileGlob,
|
|
2327
|
+
limit = DEFAULT_LIMIT3,
|
|
2328
|
+
offset = 0,
|
|
2329
|
+
multiline = false
|
|
2330
|
+
} = inputs;
|
|
2331
|
+
if (!threadId) {
|
|
2332
|
+
return {
|
|
2333
|
+
content: [
|
|
2334
|
+
{ type: "text", text: "code_search: no threadId \u2014 cannot search index" }
|
|
2335
|
+
],
|
|
2336
|
+
details: void 0
|
|
1945
2337
|
};
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
settle(() => reject(new Error("'rule' is required when command is 'scan'")));
|
|
1954
|
-
return;
|
|
2338
|
+
}
|
|
2339
|
+
if (!query && !pattern && !filePath) {
|
|
2340
|
+
return {
|
|
2341
|
+
content: [
|
|
2342
|
+
{
|
|
2343
|
+
type: "text",
|
|
2344
|
+
text: "Provide at least one of: `query`, `pattern`, or `filePath`"
|
|
1955
2345
|
}
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
2346
|
+
],
|
|
2347
|
+
details: void 0
|
|
2348
|
+
};
|
|
2349
|
+
}
|
|
2350
|
+
const REGEX_IN_QUERY = /\\[swdWSDbBAZ]|[[\]{}]|\(\?/;
|
|
2351
|
+
if (query && !pattern && REGEX_IN_QUERY.test(query)) {
|
|
2352
|
+
return {
|
|
2353
|
+
content: [
|
|
2354
|
+
{
|
|
2355
|
+
type: "text",
|
|
2356
|
+
text: `The \`query\` field received what looks like a regex pattern ("${query}"). \`query\` is for plain token/FTS search only \u2014 backslashes and special characters are stripped, so the regex will not work. Move the pattern to the \`pattern\` field instead (and omit \`query\`).`
|
|
1964
2357
|
}
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
2358
|
+
],
|
|
2359
|
+
details: void 0
|
|
2360
|
+
};
|
|
2361
|
+
}
|
|
2362
|
+
let compiledRegex = null;
|
|
2363
|
+
if (pattern) {
|
|
2364
|
+
try {
|
|
2365
|
+
const flags = pattern === pattern.toLowerCase() ? "i" : "";
|
|
2366
|
+
compiledRegex = new RegExp(pattern, flags);
|
|
2367
|
+
} catch (err) {
|
|
2368
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
2369
|
+
return {
|
|
2370
|
+
content: [{ type: "text", text: `Invalid regex pattern: ${msg}` }],
|
|
2371
|
+
details: void 0
|
|
2372
|
+
};
|
|
2373
|
+
}
|
|
2374
|
+
}
|
|
2375
|
+
const db = await getDatabase();
|
|
2376
|
+
await ensureThreadIndex(db, threadId, cwd, signal);
|
|
2377
|
+
if (signal?.aborted) {
|
|
2378
|
+
return {
|
|
2379
|
+
content: [{ type: "text", text: "Operation aborted" }],
|
|
2380
|
+
details: void 0
|
|
2381
|
+
};
|
|
2382
|
+
}
|
|
2383
|
+
const likeFilter = fileGlob ? globToLike(fileGlob) : null;
|
|
2384
|
+
if (filePath && !query && !pattern) {
|
|
2385
|
+
const likePattern = filePathToLike(filePath);
|
|
2386
|
+
const result = await db.execute(
|
|
2387
|
+
`SELECT file_path, content FROM code_files
|
|
2388
|
+
WHERE thread_id = ?
|
|
2389
|
+
AND file_path LIKE ? ESCAPE '\\'
|
|
2390
|
+
LIMIT ? OFFSET ?`,
|
|
2391
|
+
[threadId, likePattern, limit, offset]
|
|
2392
|
+
);
|
|
2393
|
+
const rows2 = result.rows;
|
|
2394
|
+
const { outputLines: outputLines2, linesTruncated: linesTruncated2, matchLimitReached: matchLimitReached2 } = formatOutput(
|
|
2395
|
+
rows2,
|
|
2396
|
+
[],
|
|
2397
|
+
null,
|
|
2398
|
+
limit
|
|
2399
|
+
);
|
|
2400
|
+
if (outputLines2.length === 0) {
|
|
2401
|
+
const names = rows2.map((r) => r.file_path).join("\n");
|
|
2402
|
+
return buildResult(names ? names.split("\n") : [], false, void 0, limit, offset);
|
|
2403
|
+
}
|
|
2404
|
+
return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
|
|
2405
|
+
}
|
|
2406
|
+
if (filePath && (query || pattern)) {
|
|
2407
|
+
const likePattern = filePathToLike(filePath);
|
|
2408
|
+
const result = await db.execute(
|
|
2409
|
+
`SELECT file_path, content FROM code_files
|
|
2410
|
+
WHERE thread_id = ?
|
|
2411
|
+
AND file_path LIKE ? ESCAPE '\\'
|
|
2412
|
+
LIMIT ? OFFSET ?`,
|
|
2413
|
+
[threadId, likePattern, limit, offset]
|
|
2414
|
+
);
|
|
2415
|
+
const rows2 = result.rows;
|
|
2416
|
+
const tokens2 = query ? extractTokens(query) : [];
|
|
2417
|
+
const { outputLines: outputLines2, linesTruncated: linesTruncated2, matchLimitReached: matchLimitReached2 } = formatOutput(
|
|
2418
|
+
rows2,
|
|
2419
|
+
tokens2,
|
|
2420
|
+
compiledRegex,
|
|
2421
|
+
limit,
|
|
2422
|
+
multiline
|
|
2423
|
+
);
|
|
2424
|
+
return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
|
|
2425
|
+
}
|
|
2426
|
+
if (pattern && !query) {
|
|
2427
|
+
const sql = likeFilter ? `SELECT file_path, content FROM code_files
|
|
2428
|
+
WHERE thread_id = ?
|
|
2429
|
+
AND file_path LIKE ? ESCAPE '\\'` : `SELECT file_path, content FROM code_files
|
|
2430
|
+
WHERE thread_id = ?`;
|
|
2431
|
+
const args2 = likeFilter ? [threadId, likeFilter] : [threadId];
|
|
2432
|
+
const result = await db.execute(sql, args2);
|
|
2433
|
+
const allRows = result.rows;
|
|
2434
|
+
const outputLines2 = [];
|
|
2435
|
+
let linesTruncated2 = false;
|
|
2436
|
+
let matchedFiles = 0;
|
|
2437
|
+
for (const { file_path, content } of allRows) {
|
|
2438
|
+
const matchingLines = findLinesByRegex(content, compiledRegex, multiline);
|
|
2439
|
+
if (matchingLines.length === 0) continue;
|
|
2440
|
+
matchedFiles++;
|
|
2441
|
+
if (matchedFiles <= offset) continue;
|
|
2442
|
+
if (matchedFiles > offset + limit) break;
|
|
2443
|
+
for (const { line, text } of matchingLines) {
|
|
2444
|
+
const { text: truncated, wasTruncated } = truncateLine(text.replace(/\r/g, ""));
|
|
2445
|
+
if (wasTruncated) linesTruncated2 = true;
|
|
2446
|
+
outputLines2.push(`${file_path}:${line}: ${truncated}`);
|
|
2447
|
+
}
|
|
2448
|
+
}
|
|
2449
|
+
const matchLimitReached2 = matchedFiles > offset + limit ? matchedFiles : void 0;
|
|
2450
|
+
return buildResult(outputLines2, linesTruncated2, matchLimitReached2, limit, offset);
|
|
2451
|
+
}
|
|
2452
|
+
const rawQuery = query;
|
|
2453
|
+
async function runFtsQuery(ftsQuery) {
|
|
2454
|
+
const sql = likeFilter ? `SELECT cf.file_path, cf.content
|
|
2455
|
+
FROM code_index
|
|
2456
|
+
JOIN code_files cf ON cf.id = code_index.rowid
|
|
2457
|
+
WHERE code_index MATCH ?
|
|
2458
|
+
AND cf.thread_id = ?
|
|
2459
|
+
AND cf.file_path LIKE ? ESCAPE '\\'
|
|
2460
|
+
ORDER BY rank
|
|
2461
|
+
LIMIT ? OFFSET ?` : `SELECT cf.file_path, cf.content
|
|
2462
|
+
FROM code_index
|
|
2463
|
+
JOIN code_files cf ON cf.id = code_index.rowid
|
|
2464
|
+
WHERE code_index MATCH ?
|
|
2465
|
+
AND cf.thread_id = ?
|
|
2466
|
+
ORDER BY rank
|
|
2467
|
+
LIMIT ? OFFSET ?`;
|
|
2468
|
+
const args2 = likeFilter ? [ftsQuery, threadId, likeFilter, limit, offset] : [ftsQuery, threadId, limit, offset];
|
|
2469
|
+
const result = await db.execute(sql, args2);
|
|
2470
|
+
return result.rows;
|
|
2471
|
+
}
|
|
2472
|
+
let rows;
|
|
2473
|
+
let effectiveQuery = rawQuery;
|
|
2474
|
+
try {
|
|
2475
|
+
const sanitized = sanitizeQuery(rawQuery);
|
|
2476
|
+
const nearQuery = buildNearQuery(sanitized);
|
|
2477
|
+
if (nearQuery) {
|
|
2478
|
+
try {
|
|
2479
|
+
rows = await runFtsQuery(nearQuery);
|
|
2480
|
+
effectiveQuery = nearQuery;
|
|
2481
|
+
if (rows.length === 0) {
|
|
2482
|
+
rows = await runFtsQuery(sanitized);
|
|
2483
|
+
effectiveQuery = sanitized;
|
|
1973
2484
|
}
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
let stdout = "";
|
|
1980
|
-
let stdoutBytes = 0;
|
|
1981
|
-
let stderr = "";
|
|
1982
|
-
let aborted = false;
|
|
1983
|
-
let killedDueToCap = false;
|
|
1984
|
-
const onAbort = () => {
|
|
1985
|
-
aborted = true;
|
|
1986
|
-
if (!child.killed) child.kill();
|
|
1987
|
-
};
|
|
1988
|
-
signal?.addEventListener("abort", onAbort, { once: true });
|
|
1989
|
-
child.stdout?.on("data", (chunk) => {
|
|
1990
|
-
if (killedDueToCap) return;
|
|
1991
|
-
stdoutBytes += chunk.length;
|
|
1992
|
-
if (stdoutBytes > MAX_STDOUT_BYTES) {
|
|
1993
|
-
killedDueToCap = true;
|
|
1994
|
-
if (!child.killed) child.kill();
|
|
1995
|
-
return;
|
|
1996
|
-
}
|
|
1997
|
-
stdout += chunk.toString();
|
|
1998
|
-
});
|
|
1999
|
-
child.stderr?.on("data", (chunk) => {
|
|
2000
|
-
stderr += chunk.toString();
|
|
2001
|
-
});
|
|
2002
|
-
child.on("error", (error) => {
|
|
2003
|
-
signal?.removeEventListener("abort", onAbort);
|
|
2004
|
-
settle(() => reject(new Error(`Failed to run ast-grep: ${error.message}`)));
|
|
2005
|
-
});
|
|
2006
|
-
child.on("close", (code) => {
|
|
2007
|
-
signal?.removeEventListener("abort", onAbort);
|
|
2008
|
-
console.log(
|
|
2009
|
-
`[ast-grep] exit code: ${code}, stdout: ${stdout.length} bytes, stderr: ${stderr.length} bytes`
|
|
2010
|
-
);
|
|
2011
|
-
if (stderr.trim()) console.log(`[ast-grep] stderr: ${stderr.trim()}`);
|
|
2012
|
-
if (aborted) {
|
|
2013
|
-
settle(() => reject(new Error("Operation aborted")));
|
|
2014
|
-
return;
|
|
2015
|
-
}
|
|
2016
|
-
if (!killedDueToCap && code !== 0 && code !== 1) {
|
|
2017
|
-
const errMsg = stderr.trim() || `ast-grep exited with code ${code}`;
|
|
2018
|
-
settle(() => reject(new Error(errMsg)));
|
|
2019
|
-
return;
|
|
2020
|
-
}
|
|
2021
|
-
let matches;
|
|
2022
|
-
try {
|
|
2023
|
-
matches = JSON.parse(stdout || "[]");
|
|
2024
|
-
} catch {
|
|
2025
|
-
if (killedDueToCap) {
|
|
2026
|
-
const lastBrace = stdout.lastIndexOf("}");
|
|
2027
|
-
if (lastBrace > 0) {
|
|
2028
|
-
try {
|
|
2029
|
-
matches = JSON.parse(stdout.slice(0, lastBrace + 1) + "]");
|
|
2030
|
-
} catch {
|
|
2031
|
-
matches = [];
|
|
2032
|
-
}
|
|
2033
|
-
} else {
|
|
2034
|
-
matches = [];
|
|
2035
|
-
}
|
|
2036
|
-
} else if (!stdout.trim()) {
|
|
2037
|
-
settle(
|
|
2038
|
-
() => resolve6({
|
|
2039
|
-
content: [{ type: "text", text: "No matches found" }],
|
|
2040
|
-
details: void 0
|
|
2041
|
-
})
|
|
2042
|
-
);
|
|
2043
|
-
return;
|
|
2044
|
-
} else {
|
|
2045
|
-
settle(
|
|
2046
|
-
() => reject(
|
|
2047
|
-
new Error(`Failed to parse ast-grep JSON output: ${stdout.slice(0, 200)}`)
|
|
2048
|
-
)
|
|
2049
|
-
);
|
|
2050
|
-
return;
|
|
2051
|
-
}
|
|
2052
|
-
}
|
|
2053
|
-
if (!Array.isArray(matches) || matches.length === 0) {
|
|
2054
|
-
settle(
|
|
2055
|
-
() => resolve6({
|
|
2056
|
-
content: [{ type: "text", text: "No matches found" }],
|
|
2057
|
-
details: void 0
|
|
2058
|
-
})
|
|
2059
|
-
);
|
|
2060
|
-
return;
|
|
2061
|
-
}
|
|
2062
|
-
const matchLimitReached = matches.length > effectiveLimit;
|
|
2063
|
-
if (matchLimitReached) {
|
|
2064
|
-
matches = matches.slice(0, effectiveLimit);
|
|
2065
|
-
}
|
|
2066
|
-
let linesTruncated = false;
|
|
2067
|
-
const outputLines = [];
|
|
2068
|
-
for (const match of matches) {
|
|
2069
|
-
const filePath = match.file ?? "unknown";
|
|
2070
|
-
const relativePath = path3.relative(searchPath, filePath).replace(/\\/g, "/") || filePath;
|
|
2071
|
-
const lineNum = (match.range?.start?.line ?? 0) + 1;
|
|
2072
|
-
const matchText = (match.lines ?? match.text ?? "").trimEnd();
|
|
2073
|
-
const firstLine = matchText.split("\n")[0] ?? "";
|
|
2074
|
-
const { text: truncatedText, wasTruncated } = truncateLine(firstLine);
|
|
2075
|
-
if (wasTruncated) linesTruncated = true;
|
|
2076
|
-
outputLines.push(`${relativePath}:${lineNum}: ${truncatedText}`);
|
|
2077
|
-
}
|
|
2078
|
-
const rawOutput = outputLines.join("\n");
|
|
2079
|
-
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
|
|
2080
|
-
let output = truncation.content;
|
|
2081
|
-
const details = {};
|
|
2082
|
-
const notices = [];
|
|
2083
|
-
if (killedDueToCap) {
|
|
2084
|
-
notices.push(
|
|
2085
|
-
`Output exceeded 2MB cap. Refine your pattern for more precise results`
|
|
2086
|
-
);
|
|
2087
|
-
}
|
|
2088
|
-
if (matchLimitReached) {
|
|
2089
|
-
notices.push(
|
|
2090
|
-
`${effectiveLimit} matches limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`
|
|
2091
|
-
);
|
|
2092
|
-
details.matchLimitReached = effectiveLimit;
|
|
2485
|
+
if (rows.length === 0) {
|
|
2486
|
+
const orQuery = buildOrQuery(sanitized);
|
|
2487
|
+
if (orQuery) {
|
|
2488
|
+
rows = await runFtsQuery(orQuery);
|
|
2489
|
+
effectiveQuery = orQuery;
|
|
2093
2490
|
}
|
|
2094
|
-
|
|
2095
|
-
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
|
|
2096
|
-
details.truncation = truncation;
|
|
2097
|
-
}
|
|
2098
|
-
if (linesTruncated) {
|
|
2099
|
-
notices.push(
|
|
2100
|
-
`Some lines truncated to ${GREP_MAX_LINE_LENGTH} chars. Use read tool to see full lines`
|
|
2101
|
-
);
|
|
2102
|
-
details.linesTruncated = true;
|
|
2103
|
-
}
|
|
2104
|
-
if (notices.length > 0) output += `
|
|
2105
|
-
|
|
2106
|
-
[${notices.join(". ")}]`;
|
|
2107
|
-
settle(
|
|
2108
|
-
() => resolve6({
|
|
2109
|
-
content: [{ type: "text", text: output }],
|
|
2110
|
-
details: Object.keys(details).length > 0 ? details : void 0
|
|
2111
|
-
})
|
|
2112
|
-
);
|
|
2113
|
-
});
|
|
2491
|
+
}
|
|
2114
2492
|
} catch {
|
|
2115
|
-
|
|
2493
|
+
rows = await runFtsQuery(sanitized);
|
|
2494
|
+
effectiveQuery = sanitized;
|
|
2116
2495
|
}
|
|
2117
|
-
}
|
|
2118
|
-
|
|
2496
|
+
} else {
|
|
2497
|
+
rows = await runFtsQuery(rawQuery);
|
|
2498
|
+
}
|
|
2499
|
+
} catch {
|
|
2500
|
+
effectiveQuery = sanitizeQuery(rawQuery);
|
|
2501
|
+
if (!effectiveQuery) {
|
|
2502
|
+
return {
|
|
2503
|
+
content: [
|
|
2504
|
+
{
|
|
2505
|
+
type: "text",
|
|
2506
|
+
text: `Query "${rawQuery}" contains only special characters \u2014 no searchable tokens remain. Try using filePath or pattern instead.`
|
|
2507
|
+
}
|
|
2508
|
+
],
|
|
2509
|
+
details: void 0
|
|
2510
|
+
};
|
|
2511
|
+
}
|
|
2512
|
+
try {
|
|
2513
|
+
rows = await runFtsQuery(effectiveQuery);
|
|
2514
|
+
} catch (err2) {
|
|
2515
|
+
const msg = err2 instanceof Error ? err2.message : String(err2);
|
|
2516
|
+
return {
|
|
2517
|
+
content: [
|
|
2518
|
+
{
|
|
2519
|
+
type: "text",
|
|
2520
|
+
text: `FTS query error even after sanitization.
|
|
2521
|
+
Original: "${rawQuery}"
|
|
2522
|
+
Sanitized: "${effectiveQuery}"
|
|
2523
|
+
|
|
2524
|
+
Details: ${msg}`
|
|
2525
|
+
}
|
|
2526
|
+
],
|
|
2527
|
+
details: void 0
|
|
2528
|
+
};
|
|
2529
|
+
}
|
|
2530
|
+
}
|
|
2531
|
+
if (rows.length === 0) {
|
|
2532
|
+
return {
|
|
2533
|
+
content: [{ type: "text", text: "No matches found" }],
|
|
2534
|
+
details: void 0
|
|
2535
|
+
};
|
|
2536
|
+
}
|
|
2537
|
+
const tokens = compiledRegex ? [] : extractTokens(effectiveQuery);
|
|
2538
|
+
const { outputLines, linesTruncated, matchLimitReached } = formatOutput(
|
|
2539
|
+
rows,
|
|
2540
|
+
tokens,
|
|
2541
|
+
compiledRegex,
|
|
2542
|
+
limit,
|
|
2543
|
+
multiline
|
|
2544
|
+
);
|
|
2545
|
+
return buildResult(outputLines, linesTruncated, matchLimitReached, limit, offset);
|
|
2119
2546
|
}
|
|
2120
2547
|
};
|
|
2121
2548
|
}
|
|
2122
|
-
var
|
|
2549
|
+
var codeSearchTool = createCodeSearchTool(
|
|
2550
|
+
process.cwd(),
|
|
2551
|
+
""
|
|
2552
|
+
);
|
|
2123
2553
|
|
|
2124
2554
|
// src/tools/ls.ts
|
|
2125
2555
|
import { Type as Type6 } from "@sinclair/typebox";
|
|
2126
|
-
import { existsSync as
|
|
2556
|
+
import { existsSync as existsSync4, readdirSync, statSync as statSync2 } from "fs";
|
|
2127
2557
|
import nodePath from "path";
|
|
2128
2558
|
var lsSchema = Type6.Object({
|
|
2129
2559
|
path: Type6.Optional(
|
|
@@ -2135,9 +2565,9 @@ var lsSchema = Type6.Object({
|
|
|
2135
2565
|
});
|
|
2136
2566
|
var DEFAULT_LIMIT4 = 500;
|
|
2137
2567
|
var defaultLsOperations = {
|
|
2138
|
-
exists:
|
|
2568
|
+
exists: existsSync4,
|
|
2139
2569
|
stat: statSync2,
|
|
2140
|
-
readdir:
|
|
2570
|
+
readdir: readdirSync
|
|
2141
2571
|
};
|
|
2142
2572
|
function createLsTool(cwd, options) {
|
|
2143
2573
|
const ops = options?.operations ?? defaultLsOperations;
|
|
@@ -2246,8 +2676,8 @@ var readSchema = Type7.Object({
|
|
|
2246
2676
|
limit: Type7.Optional(Type7.Number({ description: "Maximum number of lines to read" }))
|
|
2247
2677
|
});
|
|
2248
2678
|
var defaultReadOperations = {
|
|
2249
|
-
readFile: (
|
|
2250
|
-
access: (
|
|
2679
|
+
readFile: (path6) => fsReadFile2(path6),
|
|
2680
|
+
access: (path6) => fsAccess2(path6, constants3.R_OK)
|
|
2251
2681
|
};
|
|
2252
2682
|
function createReadTool(cwd, options) {
|
|
2253
2683
|
const ops = options?.operations ?? defaultReadOperations;
|
|
@@ -2256,8 +2686,8 @@ function createReadTool(cwd, options) {
|
|
|
2256
2686
|
label: "read",
|
|
2257
2687
|
description: `Read the contents of a text file. Output is truncated to ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Use offset/limit for large files.`,
|
|
2258
2688
|
parameters: readSchema,
|
|
2259
|
-
execute: async (_toolCallId, { path:
|
|
2260
|
-
const absolutePath = resolveReadPath(
|
|
2689
|
+
execute: async (_toolCallId, { path: path6, offset, limit }, signal) => {
|
|
2690
|
+
const absolutePath = resolveReadPath(path6, cwd);
|
|
2261
2691
|
validatePathWithinCwd(absolutePath, cwd);
|
|
2262
2692
|
return withAbortSignal(signal, async (isAborted) => {
|
|
2263
2693
|
await ops.access(absolutePath);
|
|
@@ -2286,7 +2716,7 @@ function createReadTool(cwd, options) {
|
|
|
2286
2716
|
let outputText;
|
|
2287
2717
|
let details;
|
|
2288
2718
|
if (truncation.firstLineExceedsLimit) {
|
|
2289
|
-
outputText = `[Line ${startLineDisplay} is ${formatSize(Buffer.byteLength(allLines[startLine], "utf-8"))}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${
|
|
2719
|
+
outputText = `[Line ${startLineDisplay} is ${formatSize(Buffer.byteLength(allLines[startLine], "utf-8"))}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path6} | head -c ${DEFAULT_MAX_BYTES}]`;
|
|
2290
2720
|
details = { truncation };
|
|
2291
2721
|
} else if (truncation.truncated) {
|
|
2292
2722
|
const endLineDisplay = startLineDisplay + truncation.outputLines - 1;
|
|
@@ -2329,7 +2759,7 @@ var writeSchema = Type8.Object({
|
|
|
2329
2759
|
content: Type8.String({ description: "Content to write to the file" })
|
|
2330
2760
|
});
|
|
2331
2761
|
var defaultWriteOperations = {
|
|
2332
|
-
writeFile: (
|
|
2762
|
+
writeFile: (path6, content) => fsWriteFile2(path6, content, "utf-8"),
|
|
2333
2763
|
mkdir: (dir) => fsMkdir(dir, { recursive: true }).then(() => {
|
|
2334
2764
|
})
|
|
2335
2765
|
};
|
|
@@ -2340,8 +2770,8 @@ function createWriteTool(cwd, options) {
|
|
|
2340
2770
|
label: "write",
|
|
2341
2771
|
description: "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Automatically creates parent directories.",
|
|
2342
2772
|
parameters: writeSchema,
|
|
2343
|
-
execute: async (_toolCallId, { path:
|
|
2344
|
-
const absolutePath = resolveToCwd(
|
|
2773
|
+
execute: async (_toolCallId, { path: path6, content }, signal) => {
|
|
2774
|
+
const absolutePath = resolveToCwd(path6, cwd);
|
|
2345
2775
|
validatePathWithinCwd(absolutePath, cwd);
|
|
2346
2776
|
const dir = dirname(absolutePath);
|
|
2347
2777
|
return new Promise(
|
|
@@ -2365,7 +2795,7 @@ function createWriteTool(cwd, options) {
|
|
|
2365
2795
|
if (signal) signal.removeEventListener("abort", onAbort);
|
|
2366
2796
|
resolve6({
|
|
2367
2797
|
content: [
|
|
2368
|
-
{ type: "text", text: `Successfully wrote ${content.length} bytes to ${
|
|
2798
|
+
{ type: "text", text: `Successfully wrote ${content.length} bytes to ${path6}` }
|
|
2369
2799
|
],
|
|
2370
2800
|
details: void 0
|
|
2371
2801
|
});
|
|
@@ -2384,11 +2814,11 @@ var writeTool = createWriteTool(process.cwd());
|
|
|
2384
2814
|
// src/tools/skill-tool.ts
|
|
2385
2815
|
init_utils();
|
|
2386
2816
|
import { readdir } from "fs/promises";
|
|
2387
|
-
import { join as
|
|
2388
|
-
import { existsSync as
|
|
2817
|
+
import { join as join4, extname as extname2 } from "path";
|
|
2818
|
+
import { existsSync as existsSync5, statSync as statSync3 } from "fs";
|
|
2389
2819
|
import { Type as Type9 } from "@sinclair/typebox";
|
|
2390
2820
|
function getInterpreter(scriptPath) {
|
|
2391
|
-
const ext =
|
|
2821
|
+
const ext = extname2(scriptPath);
|
|
2392
2822
|
switch (ext) {
|
|
2393
2823
|
case ".sh":
|
|
2394
2824
|
case ".bash":
|
|
@@ -2478,8 +2908,8 @@ function createSkillScriptTool(skills, cwd) {
|
|
|
2478
2908
|
details: void 0
|
|
2479
2909
|
};
|
|
2480
2910
|
}
|
|
2481
|
-
const scriptsDir =
|
|
2482
|
-
if (!
|
|
2911
|
+
const scriptsDir = join4(skill.skillPath, "scripts");
|
|
2912
|
+
if (!existsSync5(scriptsDir)) {
|
|
2483
2913
|
return {
|
|
2484
2914
|
content: [
|
|
2485
2915
|
{ type: "text", text: `Skill '${skillName}' has no scripts directory` }
|
|
@@ -2487,8 +2917,8 @@ function createSkillScriptTool(skills, cwd) {
|
|
|
2487
2917
|
details: void 0
|
|
2488
2918
|
};
|
|
2489
2919
|
}
|
|
2490
|
-
const scriptPath =
|
|
2491
|
-
if (!
|
|
2920
|
+
const scriptPath = join4(scriptsDir, scriptName);
|
|
2921
|
+
if (!existsSync5(scriptPath)) {
|
|
2492
2922
|
try {
|
|
2493
2923
|
const availableScripts = await readdir(scriptsDir);
|
|
2494
2924
|
return {
|
|
@@ -2547,14 +2977,14 @@ ${result.stderr}
|
|
|
2547
2977
|
}
|
|
2548
2978
|
|
|
2549
2979
|
// src/tools/skill-reference-tool.ts
|
|
2550
|
-
import { readFile, readdir as readdir2 } from "fs/promises";
|
|
2551
|
-
import { join as
|
|
2552
|
-
import { existsSync as
|
|
2980
|
+
import { readFile as readFile2, readdir as readdir2 } from "fs/promises";
|
|
2981
|
+
import { join as join5, normalize, relative as relative3 } from "path";
|
|
2982
|
+
import { existsSync as existsSync6 } from "fs";
|
|
2553
2983
|
import { Type as Type10 } from "@sinclair/typebox";
|
|
2554
2984
|
function isPathSafe(basePath, requestedPath) {
|
|
2555
2985
|
const normalized = normalize(requestedPath);
|
|
2556
|
-
const fullPath =
|
|
2557
|
-
const relativePath =
|
|
2986
|
+
const fullPath = join5(basePath, normalized);
|
|
2987
|
+
const relativePath = relative3(basePath, fullPath);
|
|
2558
2988
|
return !relativePath.startsWith("..") && !relativePath.startsWith("/");
|
|
2559
2989
|
}
|
|
2560
2990
|
var skillReferenceSchema = Type10.Object({
|
|
@@ -2582,8 +3012,8 @@ function createSkillReferenceTool(skills) {
|
|
|
2582
3012
|
details: void 0
|
|
2583
3013
|
};
|
|
2584
3014
|
}
|
|
2585
|
-
const referencesDir =
|
|
2586
|
-
if (!
|
|
3015
|
+
const referencesDir = join5(skill.skillPath, "references");
|
|
3016
|
+
if (!existsSync6(referencesDir)) {
|
|
2587
3017
|
return {
|
|
2588
3018
|
content: [
|
|
2589
3019
|
{ type: "text", text: `Skill '${skillName}' has no references directory` }
|
|
@@ -2602,8 +3032,8 @@ function createSkillReferenceTool(skills) {
|
|
|
2602
3032
|
details: void 0
|
|
2603
3033
|
};
|
|
2604
3034
|
}
|
|
2605
|
-
const fullPath =
|
|
2606
|
-
if (!
|
|
3035
|
+
const fullPath = join5(referencesDir, referencePath);
|
|
3036
|
+
if (!existsSync6(fullPath)) {
|
|
2607
3037
|
try {
|
|
2608
3038
|
const availableRefs = await listReferencesRecursive(referencesDir);
|
|
2609
3039
|
return {
|
|
@@ -2629,7 +3059,7 @@ ${availableRefs.join("\n")}`
|
|
|
2629
3059
|
}
|
|
2630
3060
|
}
|
|
2631
3061
|
try {
|
|
2632
|
-
const content = await
|
|
3062
|
+
const content = await readFile2(fullPath, "utf-8");
|
|
2633
3063
|
return {
|
|
2634
3064
|
content: [{ type: "text", text: content }],
|
|
2635
3065
|
details: void 0
|
|
@@ -2653,9 +3083,9 @@ async function listReferencesRecursive(dir, prefix = "") {
|
|
|
2653
3083
|
try {
|
|
2654
3084
|
const entries = await readdir2(dir, { withFileTypes: true });
|
|
2655
3085
|
for (const entry of entries) {
|
|
2656
|
-
const relativePath = prefix ?
|
|
3086
|
+
const relativePath = prefix ? join5(prefix, entry.name) : entry.name;
|
|
2657
3087
|
if (entry.isDirectory()) {
|
|
2658
|
-
const subFiles = await listReferencesRecursive(
|
|
3088
|
+
const subFiles = await listReferencesRecursive(join5(dir, entry.name), relativePath);
|
|
2659
3089
|
files.push(...subFiles);
|
|
2660
3090
|
} else {
|
|
2661
3091
|
files.push(relativePath);
|
|
@@ -2750,13 +3180,14 @@ async function fetchAllTodos(db, threadId) {
|
|
|
2750
3180
|
}
|
|
2751
3181
|
async function addTodos(threadId, descriptions, assignedTo = "agent") {
|
|
2752
3182
|
const db = await getDatabase();
|
|
2753
|
-
const
|
|
2754
|
-
for (
|
|
3183
|
+
const baseTime = Date.now();
|
|
3184
|
+
for (let i = 0; i < descriptions.length; i++) {
|
|
2755
3185
|
const id = randomUUID();
|
|
3186
|
+
const ts = new Date(baseTime + i).toISOString();
|
|
2756
3187
|
await db.execute({
|
|
2757
3188
|
sql: `INSERT INTO todos (id, threadId, description, status, assignedTo, createdAt, updatedAt)
|
|
2758
3189
|
VALUES (?, ?, ?, 'pending', ?, ?, ?)`,
|
|
2759
|
-
args: [id, threadId,
|
|
3190
|
+
args: [id, threadId, descriptions[i].trim(), assignedTo, ts, ts]
|
|
2760
3191
|
});
|
|
2761
3192
|
}
|
|
2762
3193
|
return fetchAllTodos(db, threadId);
|
|
@@ -2876,18 +3307,11 @@ function createTodoTool(threadId) {
|
|
|
2876
3307
|
};
|
|
2877
3308
|
}
|
|
2878
3309
|
const todosAfterUpdate = await fetchAllTodos(db, threadId);
|
|
2879
|
-
const allDone = todosAfterUpdate.length > 0 && todosAfterUpdate.every((t) => t.status === "done");
|
|
2880
|
-
if (allDone) {
|
|
2881
|
-
await db.execute({
|
|
2882
|
-
sql: `DELETE FROM todos WHERE threadId = ?`,
|
|
2883
|
-
args: [threadId]
|
|
2884
|
-
});
|
|
2885
|
-
}
|
|
2886
3310
|
return {
|
|
2887
3311
|
content: [
|
|
2888
3312
|
{
|
|
2889
3313
|
type: "text",
|
|
2890
|
-
text: JSON.stringify({ todos:
|
|
3314
|
+
text: JSON.stringify({ todos: todosAfterUpdate })
|
|
2891
3315
|
}
|
|
2892
3316
|
],
|
|
2893
3317
|
details: void 0
|
|
@@ -2946,16 +3370,16 @@ function createTodoTool(threadId) {
|
|
|
2946
3370
|
var todoTool = createTodoTool("");
|
|
2947
3371
|
|
|
2948
3372
|
// src/features/mcp/mcp.config.ts
|
|
2949
|
-
import { readFile as
|
|
2950
|
-
import { join as
|
|
3373
|
+
import { readFile as readFile3, stat, access } from "fs/promises";
|
|
3374
|
+
import { join as join6 } from "path";
|
|
2951
3375
|
var MCP_CONFIG_PATHS = [".agents/mcp.json", "mcp.json"];
|
|
2952
3376
|
async function loadMCPConfig(projectPath) {
|
|
2953
3377
|
for (const configPath of MCP_CONFIG_PATHS) {
|
|
2954
|
-
const fullPath =
|
|
3378
|
+
const fullPath = join6(projectPath, configPath);
|
|
2955
3379
|
try {
|
|
2956
3380
|
await access(fullPath);
|
|
2957
3381
|
const fileStats = await stat(fullPath);
|
|
2958
|
-
const fileContent = await
|
|
3382
|
+
const fileContent = await readFile3(fullPath, "utf-8");
|
|
2959
3383
|
const rawConfig = JSON.parse(fileContent);
|
|
2960
3384
|
const serversData = rawConfig.servers || rawConfig.mcpServers || {};
|
|
2961
3385
|
const config = {
|
|
@@ -2980,7 +3404,7 @@ async function loadMCPConfig(projectPath) {
|
|
|
2980
3404
|
}
|
|
2981
3405
|
async function getMCPConfigModificationTime(projectPath) {
|
|
2982
3406
|
for (const configPath of MCP_CONFIG_PATHS) {
|
|
2983
|
-
const fullPath =
|
|
3407
|
+
const fullPath = join6(projectPath, configPath);
|
|
2984
3408
|
try {
|
|
2985
3409
|
await access(fullPath);
|
|
2986
3410
|
const fileStats = await stat(fullPath);
|
|
@@ -4013,11 +4437,15 @@ var runWebWorkerSchema = Type15.Object({
|
|
|
4013
4437
|
})
|
|
4014
4438
|
});
|
|
4015
4439
|
var pendingTasks = /* @__PURE__ */ new Map();
|
|
4440
|
+
var bufferedResults = /* @__PURE__ */ new Map();
|
|
4016
4441
|
function submitWebWorkerResult(toolCallId, result) {
|
|
4017
4442
|
const pending = pendingTasks.get(toolCallId);
|
|
4018
|
-
if (
|
|
4019
|
-
|
|
4020
|
-
|
|
4443
|
+
if (pending) {
|
|
4444
|
+
pending.resolve(result);
|
|
4445
|
+
pendingTasks.delete(toolCallId);
|
|
4446
|
+
return true;
|
|
4447
|
+
}
|
|
4448
|
+
bufferedResults.set(toolCallId, result);
|
|
4021
4449
|
return true;
|
|
4022
4450
|
}
|
|
4023
4451
|
function cancelPendingWebWorker(toolCallId) {
|
|
@@ -4027,6 +4455,7 @@ function cancelPendingWebWorker(toolCallId) {
|
|
|
4027
4455
|
pendingTasks.delete(toolCallId);
|
|
4028
4456
|
return true;
|
|
4029
4457
|
}
|
|
4458
|
+
var DEFAULT_RUN_JS_TIMEOUT_MS = 3e4;
|
|
4030
4459
|
function buildDescription(tools) {
|
|
4031
4460
|
let desc = "Execute javascript code in a web worker (not nodejs). Use this when running JS code is the easiest way to accomplish the goal, such as data transformations, calculations, or orchestrating multiple tool calls programmatically. Returns the result of the execution. You can call other tools by using `await callTool('toolName', { arg: 'value' })`. callTool always returns `{ text: string | null, error: string | null }`. On success `text` contains the result and `error` is null. On failure `error` contains the error message and `text` is null. Example: `const r = await callTool('read', { path: 'foo.ts' }); if (r.error) return r.error; return r.text;`. Write the most compact code possible: no comments, short variable names, minimal whitespace.";
|
|
4032
4461
|
if (tools && tools.length > 0) {
|
|
@@ -4039,6 +4468,7 @@ ${signatures}`;
|
|
|
4039
4468
|
return desc;
|
|
4040
4469
|
}
|
|
4041
4470
|
function createRunWebWorkerTool(options) {
|
|
4471
|
+
const timeoutMs = options?.timeoutMs ?? DEFAULT_RUN_JS_TIMEOUT_MS;
|
|
4042
4472
|
return {
|
|
4043
4473
|
name: "run_js",
|
|
4044
4474
|
label: "run_js",
|
|
@@ -4050,16 +4480,55 @@ function createRunWebWorkerTool(options) {
|
|
|
4050
4480
|
throw new Error("Operation aborted");
|
|
4051
4481
|
}
|
|
4052
4482
|
const result = await new Promise((resolve6, reject) => {
|
|
4053
|
-
|
|
4054
|
-
|
|
4055
|
-
|
|
4056
|
-
|
|
4057
|
-
|
|
4483
|
+
const earlyResult = bufferedResults.get(toolCallId);
|
|
4484
|
+
if (earlyResult !== void 0) {
|
|
4485
|
+
bufferedResults.delete(toolCallId);
|
|
4486
|
+
resolve6(earlyResult);
|
|
4487
|
+
return;
|
|
4488
|
+
}
|
|
4489
|
+
let timeoutHandle = null;
|
|
4058
4490
|
const onAbort = () => {
|
|
4059
4491
|
cancelPendingWebWorker(toolCallId);
|
|
4060
4492
|
};
|
|
4493
|
+
function cleanup() {
|
|
4494
|
+
signal?.removeEventListener("abort", onAbort);
|
|
4495
|
+
if (timeoutHandle) {
|
|
4496
|
+
clearTimeout(timeoutHandle);
|
|
4497
|
+
}
|
|
4498
|
+
}
|
|
4499
|
+
const wrappedResolve = (value) => {
|
|
4500
|
+
cleanup();
|
|
4501
|
+
resolve6(value);
|
|
4502
|
+
};
|
|
4503
|
+
const wrappedReject = (error) => {
|
|
4504
|
+
cleanup();
|
|
4505
|
+
reject(error);
|
|
4506
|
+
};
|
|
4507
|
+
pendingTasks.set(toolCallId, {
|
|
4508
|
+
resolve: wrappedResolve,
|
|
4509
|
+
reject: wrappedReject,
|
|
4510
|
+
code
|
|
4511
|
+
});
|
|
4061
4512
|
signal?.addEventListener("abort", onAbort, { once: true });
|
|
4513
|
+
timeoutHandle = setTimeout(() => {
|
|
4514
|
+
const pending = pendingTasks.get(toolCallId);
|
|
4515
|
+
if (!pending) return;
|
|
4516
|
+
pendingTasks.delete(toolCallId);
|
|
4517
|
+
wrappedReject(
|
|
4518
|
+
new Error(
|
|
4519
|
+
`run_js timed out after ${timeoutMs}ms while waiting for browser worker response`
|
|
4520
|
+
)
|
|
4521
|
+
);
|
|
4522
|
+
}, timeoutMs);
|
|
4062
4523
|
});
|
|
4524
|
+
if (isRunJsErrorPayload(result)) {
|
|
4525
|
+
throw new Error(`run_js execution failed: ${result.error}`);
|
|
4526
|
+
}
|
|
4527
|
+
if (result === void 0 && looksLikeMissingReturn(code)) {
|
|
4528
|
+
throw new Error(
|
|
4529
|
+
"run_js result was undefined. The script likely forgot to return a value (for example: `return JSON.stringify(...)`)."
|
|
4530
|
+
);
|
|
4531
|
+
}
|
|
4063
4532
|
return {
|
|
4064
4533
|
content: [
|
|
4065
4534
|
{
|
|
@@ -4072,6 +4541,28 @@ function createRunWebWorkerTool(options) {
|
|
|
4072
4541
|
}
|
|
4073
4542
|
};
|
|
4074
4543
|
}
|
|
4544
|
+
function isRunJsErrorPayload(result) {
|
|
4545
|
+
if (!result || typeof result !== "object") {
|
|
4546
|
+
return false;
|
|
4547
|
+
}
|
|
4548
|
+
if (!("error" in result)) {
|
|
4549
|
+
return false;
|
|
4550
|
+
}
|
|
4551
|
+
const error = result.error;
|
|
4552
|
+
return typeof error === "string" && error.length > 0;
|
|
4553
|
+
}
|
|
4554
|
+
function looksLikeMissingReturn(code) {
|
|
4555
|
+
if (/\breturn\b/.test(code)) {
|
|
4556
|
+
return false;
|
|
4557
|
+
}
|
|
4558
|
+
if (/\bJSON\.stringify\s*\(/.test(code)) {
|
|
4559
|
+
return true;
|
|
4560
|
+
}
|
|
4561
|
+
if (/\bawait\s+callTool\b/.test(code) && /;\s*$/.test(code.trim())) {
|
|
4562
|
+
return true;
|
|
4563
|
+
}
|
|
4564
|
+
return false;
|
|
4565
|
+
}
|
|
4075
4566
|
var runWebWorkerTool = createRunWebWorkerTool();
|
|
4076
4567
|
|
|
4077
4568
|
// src/tools/tool-search.ts
|
|
@@ -4182,13 +4673,52 @@ ${schemaBlocks}
|
|
|
4182
4673
|
}
|
|
4183
4674
|
};
|
|
4184
4675
|
}
|
|
4185
|
-
function formatDeferredToolsList(deferredTools) {
|
|
4186
|
-
if (deferredTools.size === 0) return "";
|
|
4187
|
-
const names = Array.from(deferredTools.keys()).join(", ");
|
|
4188
|
-
return "\n\n# Deferred Tools\n\nThe following tools are available but deferred. Use the `tool_search` tool to fetch their full definitions before calling them:\n" + names;
|
|
4676
|
+
function formatDeferredToolsList(deferredTools) {
|
|
4677
|
+
if (deferredTools.size === 0) return "";
|
|
4678
|
+
const names = Array.from(deferredTools.keys()).join(", ");
|
|
4679
|
+
return "\n\n# Deferred Tools\n\nThe following tools are available but deferred. Use the `tool_search` tool to fetch their full definitions before calling them:\n" + names;
|
|
4680
|
+
}
|
|
4681
|
+
|
|
4682
|
+
// src/tools/index.ts
|
|
4683
|
+
var INDEX_INVALIDATING_TOOL_NAMES = /* @__PURE__ */ new Set(["bash", "edit", "write"]);
|
|
4684
|
+
function shouldInvalidateCodeSearchIndex(toolName) {
|
|
4685
|
+
return INDEX_INVALIDATING_TOOL_NAMES.has(toolName);
|
|
4686
|
+
}
|
|
4687
|
+
function shouldInvalidateCodeSearchIndexOnError(toolName) {
|
|
4688
|
+
return toolName === "bash";
|
|
4689
|
+
}
|
|
4690
|
+
async function invalidateCodeSearchIndexSafely(threadId, toolName) {
|
|
4691
|
+
try {
|
|
4692
|
+
await invalidateThreadIndex(threadId);
|
|
4693
|
+
console.log(`[code-search] invalidated thread index after ${toolName}`);
|
|
4694
|
+
} catch (error) {
|
|
4695
|
+
console.warn(`[code-search] failed to invalidate thread index after ${toolName}:`, error);
|
|
4696
|
+
}
|
|
4697
|
+
}
|
|
4698
|
+
function wrapToolWithCodeSearchInvalidation(tool, threadId) {
|
|
4699
|
+
if (!threadId || !shouldInvalidateCodeSearchIndex(tool.name)) {
|
|
4700
|
+
return tool;
|
|
4701
|
+
}
|
|
4702
|
+
const originalExecute = tool.execute.bind(tool);
|
|
4703
|
+
return {
|
|
4704
|
+
...tool,
|
|
4705
|
+
execute: async (toolCallId, inputs, signal, onUpdate) => {
|
|
4706
|
+
let success = false;
|
|
4707
|
+
try {
|
|
4708
|
+
const result = await originalExecute(toolCallId, inputs, signal, onUpdate);
|
|
4709
|
+
success = true;
|
|
4710
|
+
return result;
|
|
4711
|
+
} finally {
|
|
4712
|
+
if (success || shouldInvalidateCodeSearchIndexOnError(tool.name)) {
|
|
4713
|
+
await invalidateCodeSearchIndexSafely(threadId, tool.name);
|
|
4714
|
+
}
|
|
4715
|
+
}
|
|
4716
|
+
}
|
|
4717
|
+
};
|
|
4718
|
+
}
|
|
4719
|
+
function applyCodeSearchInvalidationHooks(tools, threadId) {
|
|
4720
|
+
return tools.map((tool) => wrapToolWithCodeSearchInvalidation(tool, threadId));
|
|
4189
4721
|
}
|
|
4190
|
-
|
|
4191
|
-
// src/tools/index.ts
|
|
4192
4722
|
async function createCodingTools(cwd, options) {
|
|
4193
4723
|
const deferOption = options?.deferTools ?? true;
|
|
4194
4724
|
const coreTools = [
|
|
@@ -4199,7 +4729,7 @@ async function createCodingTools(cwd, options) {
|
|
|
4199
4729
|
createAskUserTool(),
|
|
4200
4730
|
createTodoTool(options?.threadId ?? ""),
|
|
4201
4731
|
createFindTool(cwd),
|
|
4202
|
-
|
|
4732
|
+
createCodeSearchTool(cwd, options?.threadId ?? ""),
|
|
4203
4733
|
createRunWebWorkerTool()
|
|
4204
4734
|
];
|
|
4205
4735
|
const optionalTools = [];
|
|
@@ -4247,7 +4777,8 @@ async function createCodingTools(cwd, options) {
|
|
|
4247
4777
|
`[Tools] ${deferredTools.size} tool(s) deferred behind tool_search: ${Array.from(deferredTools.keys()).join(", ")}`
|
|
4248
4778
|
);
|
|
4249
4779
|
}
|
|
4250
|
-
|
|
4780
|
+
const wrappedTools = applyCodeSearchInvalidationHooks(tools, options?.threadId);
|
|
4781
|
+
return { tools: wrappedTools, deferredToolNames: deferredTools };
|
|
4251
4782
|
}
|
|
4252
4783
|
function createAllTools(cwd, options) {
|
|
4253
4784
|
const tools = {
|
|
@@ -4256,7 +4787,7 @@ function createAllTools(cwd, options) {
|
|
|
4256
4787
|
edit: createEditTool(cwd),
|
|
4257
4788
|
write: createWriteTool(cwd),
|
|
4258
4789
|
grep: createGrepTool(cwd),
|
|
4259
|
-
|
|
4790
|
+
code_search: createCodeSearchTool(cwd, options?.threadId ?? ""),
|
|
4260
4791
|
find: createFindTool(cwd),
|
|
4261
4792
|
ask_user: createAskUserTool(),
|
|
4262
4793
|
ls: createLsTool(cwd),
|
|
@@ -4269,6 +4800,11 @@ function createAllTools(cwd, options) {
|
|
|
4269
4800
|
});
|
|
4270
4801
|
}
|
|
4271
4802
|
tools.find_images = createFindImagesTool(cwd);
|
|
4803
|
+
if (options?.threadId) {
|
|
4804
|
+
for (const [name, tool] of Object.entries(tools)) {
|
|
4805
|
+
tools[name] = wrapToolWithCodeSearchInvalidation(tool, options.threadId);
|
|
4806
|
+
}
|
|
4807
|
+
}
|
|
4272
4808
|
return tools;
|
|
4273
4809
|
}
|
|
4274
4810
|
|
|
@@ -4731,21 +5267,21 @@ function createAgentTool(options) {
|
|
|
4731
5267
|
}
|
|
4732
5268
|
|
|
4733
5269
|
// src/core/paths.ts
|
|
4734
|
-
import { join as
|
|
5270
|
+
import { join as join7 } from "path";
|
|
4735
5271
|
import { homedir as homedir3 } from "os";
|
|
4736
|
-
var APP_SUPPORT_DIR =
|
|
4737
|
-
var DATA_DIR =
|
|
5272
|
+
var APP_SUPPORT_DIR = join7(homedir3(), "Library", "Application Support", "Tarsk");
|
|
5273
|
+
var DATA_DIR = join7(APP_SUPPORT_DIR, "data");
|
|
4738
5274
|
function getDataDir() {
|
|
4739
5275
|
return DATA_DIR;
|
|
4740
5276
|
}
|
|
4741
5277
|
|
|
4742
5278
|
// src/agent/agent.prompt-loader.ts
|
|
4743
5279
|
import { resolve } from "path";
|
|
4744
|
-
import { readFileSync as
|
|
5280
|
+
import { readFileSync as readFileSync4 } from "fs";
|
|
4745
5281
|
|
|
4746
5282
|
// src/project-analyzer.ts
|
|
4747
|
-
import { readFileSync as
|
|
4748
|
-
import { join as
|
|
5283
|
+
import { readFileSync as readFileSync3, existsSync as existsSync7 } from "fs";
|
|
5284
|
+
import { join as join8 } from "path";
|
|
4749
5285
|
var ProjectAnalyzer = class {
|
|
4750
5286
|
projectPath;
|
|
4751
5287
|
constructor(projectPath = process.cwd()) {
|
|
@@ -4756,10 +5292,10 @@ var ProjectAnalyzer = class {
|
|
|
4756
5292
|
return this.generateDescription(info);
|
|
4757
5293
|
}
|
|
4758
5294
|
getProjectInfo() {
|
|
4759
|
-
const packageJsonPath =
|
|
5295
|
+
const packageJsonPath = join8(this.projectPath, "package.json");
|
|
4760
5296
|
const info = { description: "" };
|
|
4761
|
-
if (
|
|
4762
|
-
const packageJson = JSON.parse(
|
|
5297
|
+
if (existsSync7(packageJsonPath)) {
|
|
5298
|
+
const packageJson = JSON.parse(readFileSync3(packageJsonPath, "utf-8"));
|
|
4763
5299
|
const allDeps = { ...packageJson.dependencies, ...packageJson.devDependencies };
|
|
4764
5300
|
this.detectFramework(allDeps, info);
|
|
4765
5301
|
this.detectBuildTool(packageJson.scripts, allDeps, info);
|
|
@@ -4798,15 +5334,15 @@ var ProjectAnalyzer = class {
|
|
|
4798
5334
|
}
|
|
4799
5335
|
}
|
|
4800
5336
|
detectBuildTool(_scripts = {}, deps, info) {
|
|
4801
|
-
if (deps.vite ||
|
|
5337
|
+
if (deps.vite || existsSync7(join8(this.projectPath, "vite.config.js")) || existsSync7(join8(this.projectPath, "vite.config.ts"))) {
|
|
4802
5338
|
info.buildTool = "Vite";
|
|
4803
5339
|
return;
|
|
4804
5340
|
}
|
|
4805
|
-
if (deps.webpack ||
|
|
5341
|
+
if (deps.webpack || existsSync7(join8(this.projectPath, "webpack.config.js"))) {
|
|
4806
5342
|
info.buildTool = "Webpack";
|
|
4807
5343
|
return;
|
|
4808
5344
|
}
|
|
4809
|
-
if (deps.rollup ||
|
|
5345
|
+
if (deps.rollup || existsSync7(join8(this.projectPath, "rollup.config.js"))) {
|
|
4810
5346
|
info.buildTool = "Rollup";
|
|
4811
5347
|
return;
|
|
4812
5348
|
}
|
|
@@ -4856,40 +5392,40 @@ var ProjectAnalyzer = class {
|
|
|
4856
5392
|
info.uiLibraries = [...new Set(uiLibs)].filter(Boolean);
|
|
4857
5393
|
}
|
|
4858
5394
|
detectProjectType(info) {
|
|
4859
|
-
if (
|
|
5395
|
+
if (existsSync7(join8(this.projectPath, ".xcodeproj")) || existsSync7(join8(this.projectPath, ".xcworkspace")) || existsSync7(join8(this.projectPath, "project.pbxproj"))) {
|
|
4860
5396
|
info.projectType = "Xcode";
|
|
4861
|
-
} else if (
|
|
5397
|
+
} else if (existsSync7(join8(this.projectPath, "build.gradle")) || existsSync7(join8(this.projectPath, "build.gradle.kts")) || existsSync7(join8(this.projectPath, "app/build.gradle")) || existsSync7(join8(this.projectPath, "settings.gradle"))) {
|
|
4862
5398
|
info.projectType = "Android Studio";
|
|
4863
|
-
} else if (
|
|
5399
|
+
} else if (existsSync7(join8(this.projectPath, "pubspec.yaml"))) {
|
|
4864
5400
|
info.projectType = "Flutter";
|
|
4865
|
-
} else if (
|
|
5401
|
+
} else if (existsSync7(join8(this.projectPath, "go.mod"))) {
|
|
4866
5402
|
info.projectType = "Go";
|
|
4867
|
-
} else if (
|
|
5403
|
+
} else if (existsSync7(join8(this.projectPath, "Cargo.toml"))) {
|
|
4868
5404
|
info.projectType = "Rust";
|
|
4869
|
-
} else if (
|
|
5405
|
+
} else if (existsSync7(join8(this.projectPath, "requirements.txt")) || existsSync7(join8(this.projectPath, "pyproject.toml")) || existsSync7(join8(this.projectPath, "setup.py"))) {
|
|
4870
5406
|
info.projectType = "Python";
|
|
4871
|
-
} else if (
|
|
5407
|
+
} else if (existsSync7(join8(this.projectPath, "Gemfile"))) {
|
|
4872
5408
|
info.projectType = "Ruby";
|
|
4873
|
-
} else if (
|
|
5409
|
+
} else if (existsSync7(join8(this.projectPath, "composer.json"))) {
|
|
4874
5410
|
info.projectType = "PHP";
|
|
4875
|
-
} else if (
|
|
5411
|
+
} else if (existsSync7(join8(this.projectPath, "pom.xml")) || existsSync7(join8(this.projectPath, "build.xml"))) {
|
|
4876
5412
|
info.projectType = "Java";
|
|
4877
|
-
} else if (
|
|
5413
|
+
} else if (existsSync7(join8(this.projectPath, ".csproj")) || existsSync7(join8(this.projectPath, "project.json"))) {
|
|
4878
5414
|
info.projectType = ".NET";
|
|
4879
5415
|
}
|
|
4880
5416
|
}
|
|
4881
5417
|
detectPackageManager(info) {
|
|
4882
|
-
if (
|
|
5418
|
+
if (existsSync7(join8(this.projectPath, "bun.lockb"))) {
|
|
4883
5419
|
info.packageManager = "Bun";
|
|
4884
|
-
} else if (
|
|
5420
|
+
} else if (existsSync7(join8(this.projectPath, "bun.lock"))) {
|
|
4885
5421
|
info.packageManager = "Bun";
|
|
4886
|
-
} else if (
|
|
5422
|
+
} else if (existsSync7(join8(this.projectPath, "pnpm-lock.yaml"))) {
|
|
4887
5423
|
info.packageManager = "pnpm";
|
|
4888
|
-
} else if (
|
|
5424
|
+
} else if (existsSync7(join8(this.projectPath, "yarn.lock"))) {
|
|
4889
5425
|
info.packageManager = "Yarn";
|
|
4890
|
-
} else if (
|
|
5426
|
+
} else if (existsSync7(join8(this.projectPath, "package-lock.json"))) {
|
|
4891
5427
|
info.packageManager = "npm";
|
|
4892
|
-
} else if (
|
|
5428
|
+
} else if (existsSync7(join8(this.projectPath, "npm-shrinkwrap.json"))) {
|
|
4893
5429
|
info.packageManager = "npm";
|
|
4894
5430
|
}
|
|
4895
5431
|
}
|
|
@@ -4951,9 +5487,9 @@ function analyzeProject(projectPath) {
|
|
|
4951
5487
|
}
|
|
4952
5488
|
|
|
4953
5489
|
// src/features/rules/rules.manager.ts
|
|
4954
|
-
import { readdir as readdir3, readFile as
|
|
4955
|
-
import { join as
|
|
4956
|
-
import { existsSync as
|
|
5490
|
+
import { readdir as readdir3, readFile as readFile4 } from "fs/promises";
|
|
5491
|
+
import { join as join9, relative as relative4 } from "path";
|
|
5492
|
+
import { existsSync as existsSync8 } from "fs";
|
|
4957
5493
|
import { homedir as homedir4 } from "os";
|
|
4958
5494
|
function parseRuleFrontmatter(markdown) {
|
|
4959
5495
|
const lines = markdown.split("\n");
|
|
@@ -4996,10 +5532,10 @@ function parseRuleFrontmatter(markdown) {
|
|
|
4996
5532
|
return { metadata, content };
|
|
4997
5533
|
}
|
|
4998
5534
|
function getGlobalRulesDir() {
|
|
4999
|
-
return
|
|
5535
|
+
return join9(homedir4(), ".agents", "rules");
|
|
5000
5536
|
}
|
|
5001
5537
|
function getProjectRulesDir(threadPath) {
|
|
5002
|
-
return
|
|
5538
|
+
return join9(threadPath, ".agents", "rules");
|
|
5003
5539
|
}
|
|
5004
5540
|
var RuleManager = class {
|
|
5005
5541
|
/**
|
|
@@ -5009,14 +5545,14 @@ var RuleManager = class {
|
|
|
5009
5545
|
async loadRules(threadPath) {
|
|
5010
5546
|
const rules = /* @__PURE__ */ new Map();
|
|
5011
5547
|
const globalDir = getGlobalRulesDir();
|
|
5012
|
-
if (
|
|
5548
|
+
if (existsSync8(globalDir)) {
|
|
5013
5549
|
const globalRules = await this.loadRulesFromDir(globalDir, threadPath, "global");
|
|
5014
5550
|
for (const rule of globalRules) {
|
|
5015
5551
|
rules.set(rule.name, rule);
|
|
5016
5552
|
}
|
|
5017
5553
|
}
|
|
5018
5554
|
const projectDir = getProjectRulesDir(threadPath);
|
|
5019
|
-
if (
|
|
5555
|
+
if (existsSync8(projectDir)) {
|
|
5020
5556
|
const projectRules = await this.loadRulesFromDir(projectDir, threadPath, "project");
|
|
5021
5557
|
for (const rule of projectRules) {
|
|
5022
5558
|
rules.set(rule.name, rule);
|
|
@@ -5032,16 +5568,16 @@ var RuleManager = class {
|
|
|
5032
5568
|
try {
|
|
5033
5569
|
const entries = await readdir3(dir, { withFileTypes: true });
|
|
5034
5570
|
for (const entry of entries) {
|
|
5035
|
-
const fullPath =
|
|
5571
|
+
const fullPath = join9(dir, entry.name);
|
|
5036
5572
|
if (entry.isDirectory()) {
|
|
5037
5573
|
const subRules = await this.loadRulesFromDir(fullPath, threadPath, scope);
|
|
5038
5574
|
rules.push(...subRules);
|
|
5039
5575
|
} else if (entry.isFile() && (entry.name.endsWith(".md") || entry.name.endsWith(".mdc"))) {
|
|
5040
5576
|
try {
|
|
5041
|
-
const fileContent = await
|
|
5577
|
+
const fileContent = await readFile4(fullPath, "utf-8");
|
|
5042
5578
|
const { metadata, content } = parseRuleFrontmatter(fileContent);
|
|
5043
5579
|
const ruleName = entry.name.replace(/\.(md|mdc)$/, "");
|
|
5044
|
-
const relativePath =
|
|
5580
|
+
const relativePath = relative4(threadPath, fullPath);
|
|
5045
5581
|
const rule = {
|
|
5046
5582
|
name: ruleName,
|
|
5047
5583
|
content,
|
|
@@ -5165,7 +5701,7 @@ var devServerCache = new DevServerCache();
|
|
|
5165
5701
|
// src/agent/agent.prompt-loader.ts
|
|
5166
5702
|
function buildDefaultPrompt(tools) {
|
|
5167
5703
|
const toolList = tools.map((t) => t.name).join(", ");
|
|
5168
|
-
return `You are a helpful coding assistant. You have access to ${toolList} tools. Use them to explore and modify the codebase as needed. Skills are created and stored in .agents/skills/ . `;
|
|
5704
|
+
return `You are a helpful coding assistant. You have access to ${toolList} tools. Use them to explore and modify the codebase as needed. Skills are created and stored in .agents/skills/ . Use Mermaid for flowcharts, sequence diagrams, state diagrams, or graphs when appropriate`;
|
|
5169
5705
|
}
|
|
5170
5706
|
var PLAN_MODE_INSTRUCTIONS = `
|
|
5171
5707
|
|
|
@@ -5200,7 +5736,7 @@ Remember: You are ONLY planning. Do NOT execute any changes.`;
|
|
|
5200
5736
|
function loadDeveloperContext(threadPath) {
|
|
5201
5737
|
try {
|
|
5202
5738
|
const agentsPath = resolve(threadPath, "agents.md");
|
|
5203
|
-
const agentsContent =
|
|
5739
|
+
const agentsContent = readFileSync4(agentsPath, "utf-8").trim();
|
|
5204
5740
|
console.log("[ai] Successfully loaded agents.md from thread path for developer context");
|
|
5205
5741
|
return agentsContent;
|
|
5206
5742
|
} catch (error) {
|
|
@@ -5913,9 +6449,9 @@ import { Hono as Hono2 } from "hono";
|
|
|
5913
6449
|
import { randomUUID as randomUUID4 } from "crypto";
|
|
5914
6450
|
|
|
5915
6451
|
// src/features/skills/skills.manager.ts
|
|
5916
|
-
import { readdir as readdir4, readFile as
|
|
5917
|
-
import { join as
|
|
5918
|
-
import { existsSync as
|
|
6452
|
+
import { readdir as readdir4, readFile as readFile5 } from "fs/promises";
|
|
6453
|
+
import { join as join10 } from "path";
|
|
6454
|
+
import { existsSync as existsSync9 } from "fs";
|
|
5919
6455
|
import { homedir as homedir5 } from "os";
|
|
5920
6456
|
function parseFrontmatter(markdown) {
|
|
5921
6457
|
const lines = markdown.split("\n");
|
|
@@ -5962,10 +6498,10 @@ function parseFrontmatter(markdown) {
|
|
|
5962
6498
|
return { metadata, content };
|
|
5963
6499
|
}
|
|
5964
6500
|
function getGlobalSkillsDir() {
|
|
5965
|
-
return
|
|
6501
|
+
return join10(homedir5(), ".agents", "skills");
|
|
5966
6502
|
}
|
|
5967
6503
|
function getProjectSkillsDir(threadPath) {
|
|
5968
|
-
return
|
|
6504
|
+
return join10(threadPath, ".agents", "skills");
|
|
5969
6505
|
}
|
|
5970
6506
|
function validateSkillName(name) {
|
|
5971
6507
|
if (!name || name.length === 0 || name.length > 64) {
|
|
@@ -5993,14 +6529,14 @@ var SkillManager = class {
|
|
|
5993
6529
|
async loadSkills(threadPath) {
|
|
5994
6530
|
const skills = /* @__PURE__ */ new Map();
|
|
5995
6531
|
const globalDir = getGlobalSkillsDir();
|
|
5996
|
-
if (
|
|
6532
|
+
if (existsSync9(globalDir)) {
|
|
5997
6533
|
const globalSkills = await this.loadSkillsFromDir(globalDir, "global");
|
|
5998
6534
|
for (const skill of globalSkills) {
|
|
5999
6535
|
skills.set(skill.name, skill);
|
|
6000
6536
|
}
|
|
6001
6537
|
}
|
|
6002
6538
|
const projectDir = getProjectSkillsDir(threadPath);
|
|
6003
|
-
if (
|
|
6539
|
+
if (existsSync9(projectDir)) {
|
|
6004
6540
|
const projectSkills = await this.loadSkillsFromDir(projectDir, "project");
|
|
6005
6541
|
for (const skill of projectSkills) {
|
|
6006
6542
|
skills.set(skill.name, skill);
|
|
@@ -6018,14 +6554,14 @@ var SkillManager = class {
|
|
|
6018
6554
|
for (const entry of entries) {
|
|
6019
6555
|
if (!entry.isDirectory()) continue;
|
|
6020
6556
|
const skillDirName = entry.name;
|
|
6021
|
-
const skillPath =
|
|
6022
|
-
const skillFilePath =
|
|
6023
|
-
if (!
|
|
6557
|
+
const skillPath = join10(dir, skillDirName);
|
|
6558
|
+
const skillFilePath = join10(skillPath, "SKILL.md");
|
|
6559
|
+
if (!existsSync9(skillFilePath)) {
|
|
6024
6560
|
console.warn(`Skipping skill directory ${skillDirName}: SKILL.md not found`);
|
|
6025
6561
|
continue;
|
|
6026
6562
|
}
|
|
6027
6563
|
try {
|
|
6028
|
-
const fileContent = await
|
|
6564
|
+
const fileContent = await readFile5(skillFilePath, "utf-8");
|
|
6029
6565
|
const { metadata, content } = parseFrontmatter(fileContent);
|
|
6030
6566
|
if (!metadata.name) {
|
|
6031
6567
|
console.warn(`Skipping skill in ${skillDirName}: missing 'name' in frontmatter`);
|
|
@@ -6198,9 +6734,9 @@ async function activateSkills(allSkills, taskDescription, thread, options) {
|
|
|
6198
6734
|
}
|
|
6199
6735
|
|
|
6200
6736
|
// src/features/agents/agents.manager.ts
|
|
6201
|
-
import { readdir as readdir5, readFile as
|
|
6202
|
-
import { join as
|
|
6203
|
-
import { existsSync as
|
|
6737
|
+
import { readdir as readdir5, readFile as readFile6 } from "fs/promises";
|
|
6738
|
+
import { join as join11 } from "path";
|
|
6739
|
+
import { existsSync as existsSync10 } from "fs";
|
|
6204
6740
|
import { homedir as homedir6 } from "os";
|
|
6205
6741
|
function parseFrontmatter2(markdown) {
|
|
6206
6742
|
const lines = markdown.split("\n");
|
|
@@ -6257,10 +6793,10 @@ function validateDescription2(description) {
|
|
|
6257
6793
|
return !!description && description.length > 0 && description.length <= 1024;
|
|
6258
6794
|
}
|
|
6259
6795
|
function getGlobalAgentsDir() {
|
|
6260
|
-
return
|
|
6796
|
+
return join11(homedir6(), ".agents", "agents");
|
|
6261
6797
|
}
|
|
6262
6798
|
function getProjectAgentsDir(threadPath) {
|
|
6263
|
-
return
|
|
6799
|
+
return join11(threadPath, ".agents", "agents");
|
|
6264
6800
|
}
|
|
6265
6801
|
var AgentsManager = class {
|
|
6266
6802
|
/**
|
|
@@ -6270,14 +6806,14 @@ var AgentsManager = class {
|
|
|
6270
6806
|
async loadAgents(threadPath) {
|
|
6271
6807
|
const agents = /* @__PURE__ */ new Map();
|
|
6272
6808
|
const globalDir = getGlobalAgentsDir();
|
|
6273
|
-
if (
|
|
6809
|
+
if (existsSync10(globalDir)) {
|
|
6274
6810
|
const globalAgents = await this.loadAgentsFromDir(globalDir, "global");
|
|
6275
6811
|
for (const agent of globalAgents) {
|
|
6276
6812
|
agents.set(agent.name, agent);
|
|
6277
6813
|
}
|
|
6278
6814
|
}
|
|
6279
6815
|
const projectDir = getProjectAgentsDir(threadPath);
|
|
6280
|
-
if (
|
|
6816
|
+
if (existsSync10(projectDir)) {
|
|
6281
6817
|
const projectAgents = await this.loadAgentsFromDir(projectDir, "project");
|
|
6282
6818
|
for (const agent of projectAgents) {
|
|
6283
6819
|
agents.set(agent.name, agent);
|
|
@@ -6292,14 +6828,14 @@ var AgentsManager = class {
|
|
|
6292
6828
|
for (const entry of entries) {
|
|
6293
6829
|
if (!entry.isDirectory()) continue;
|
|
6294
6830
|
const agentDirName = entry.name;
|
|
6295
|
-
const agentPath =
|
|
6296
|
-
const agentFilePath =
|
|
6297
|
-
if (!
|
|
6831
|
+
const agentPath = join11(dir, agentDirName);
|
|
6832
|
+
const agentFilePath = join11(agentPath, "AGENT.md");
|
|
6833
|
+
if (!existsSync10(agentFilePath)) {
|
|
6298
6834
|
console.warn(`[agents] Skipping agent directory ${agentDirName}: AGENT.md not found`);
|
|
6299
6835
|
continue;
|
|
6300
6836
|
}
|
|
6301
6837
|
try {
|
|
6302
|
-
const fileContent = await
|
|
6838
|
+
const fileContent = await readFile6(agentFilePath, "utf-8");
|
|
6303
6839
|
const { metadata, content } = parseFrontmatter2(fileContent);
|
|
6304
6840
|
if (!metadata.name) {
|
|
6305
6841
|
console.warn(
|
|
@@ -6479,8 +7015,8 @@ function extractAssistantContent(events, fallback) {
|
|
|
6479
7015
|
|
|
6480
7016
|
// src/features/chat/chat-post.route.ts
|
|
6481
7017
|
init_database();
|
|
6482
|
-
import { readFile as
|
|
6483
|
-
import { join as
|
|
7018
|
+
import { readFile as readFile7, unlink } from "fs/promises";
|
|
7019
|
+
import { join as join12 } from "path";
|
|
6484
7020
|
|
|
6485
7021
|
// src/features/project-todos/project-todos.database.ts
|
|
6486
7022
|
init_database();
|
|
@@ -7072,9 +7608,9 @@ async function postChatMessage(c, threadManager, agentExecutor, conversationMana
|
|
|
7072
7608
|
try {
|
|
7073
7609
|
const todo = await getTodoByThreadId(db, threadId);
|
|
7074
7610
|
if (todo && todo.status === "Plan") {
|
|
7075
|
-
const planFilePath =
|
|
7611
|
+
const planFilePath = join12(threadPath, `${todo.id}-plan.md`);
|
|
7076
7612
|
try {
|
|
7077
|
-
const planContent = await
|
|
7613
|
+
const planContent = await readFile7(planFilePath, "utf-8");
|
|
7078
7614
|
await updateTodo(db, todo.id, { description: planContent.trim() });
|
|
7079
7615
|
await unlink(planFilePath);
|
|
7080
7616
|
} catch {
|
|
@@ -8451,28 +8987,35 @@ async function getAllProjects(db) {
|
|
|
8451
8987
|
try {
|
|
8452
8988
|
const result = await db.execute("SELECT * FROM projects ORDER BY createdAt DESC");
|
|
8453
8989
|
const rows = result.rows;
|
|
8454
|
-
|
|
8990
|
+
const threadResult = await db.execute(
|
|
8991
|
+
"SELECT id, projectId FROM threads ORDER BY createdAt DESC"
|
|
8992
|
+
);
|
|
8993
|
+
const threadRows = threadResult.rows;
|
|
8994
|
+
const threadIdsByProject = /* @__PURE__ */ new Map();
|
|
8995
|
+
for (const row of threadRows) {
|
|
8996
|
+
const ids = threadIdsByProject.get(row.projectId);
|
|
8997
|
+
if (ids) {
|
|
8998
|
+
ids.push(row.id);
|
|
8999
|
+
} else {
|
|
9000
|
+
threadIdsByProject.set(row.projectId, [row.id]);
|
|
9001
|
+
}
|
|
9002
|
+
}
|
|
9003
|
+
return rows.map(
|
|
9004
|
+
(row) => deserializeProjectWithThreads(row, threadIdsByProject.get(row.id) ?? [])
|
|
9005
|
+
);
|
|
8455
9006
|
} catch (error) {
|
|
8456
9007
|
console.error("Failed to get all projects:", error);
|
|
8457
9008
|
throw error;
|
|
8458
9009
|
}
|
|
8459
9010
|
}
|
|
8460
|
-
|
|
8461
|
-
const result = await db.execute(
|
|
8462
|
-
"SELECT id FROM threads WHERE projectId = ? ORDER BY createdAt DESC",
|
|
8463
|
-
[projectId]
|
|
8464
|
-
);
|
|
8465
|
-
const rows = result.rows;
|
|
8466
|
-
return rows.map((row) => row.id);
|
|
8467
|
-
}
|
|
8468
|
-
async function deserializeProject(db, row) {
|
|
9011
|
+
function deserializeProjectWithThreads(row, threadIds) {
|
|
8469
9012
|
return {
|
|
8470
9013
|
id: row.id,
|
|
8471
9014
|
name: row.name,
|
|
8472
9015
|
gitUrl: row.gitUrl,
|
|
8473
9016
|
path: row.path,
|
|
8474
9017
|
createdAt: new Date(row.createdAt),
|
|
8475
|
-
threads:
|
|
9018
|
+
threads: threadIds,
|
|
8476
9019
|
openWith: row.openWith ?? void 0,
|
|
8477
9020
|
commands: row.commands ? JSON.parse(row.commands) : void 0,
|
|
8478
9021
|
setupScript: row.setupScript ?? void 0,
|
|
@@ -8569,6 +9112,12 @@ async function updateThread(db, id, updates) {
|
|
|
8569
9112
|
async function deleteThread(db, id) {
|
|
8570
9113
|
try {
|
|
8571
9114
|
await markConversationHistoryAsDeleted(db, id);
|
|
9115
|
+
await db.execute(
|
|
9116
|
+
"DELETE FROM code_index WHERE rowid IN (SELECT id FROM code_files WHERE thread_id = ?)",
|
|
9117
|
+
[id]
|
|
9118
|
+
);
|
|
9119
|
+
await db.execute("DELETE FROM code_files WHERE thread_id = ?", [id]);
|
|
9120
|
+
await db.execute("DELETE FROM code_index_meta WHERE thread_id = ?", [id]);
|
|
8572
9121
|
await db.execute("UPDATE threads SET status = ? WHERE id = ?", ["deleted", id]);
|
|
8573
9122
|
} catch (error) {
|
|
8574
9123
|
console.error("Failed to mark thread as deleted:", error);
|
|
@@ -9430,34 +9979,58 @@ async function handleOpenFolderProject(c, projectManager) {
|
|
|
9430
9979
|
}
|
|
9431
9980
|
|
|
9432
9981
|
// src/features/projects/projects-list.route.ts
|
|
9433
|
-
|
|
9982
|
+
function computeGitStatusDot(status) {
|
|
9983
|
+
if (status.hasChanges) return true;
|
|
9984
|
+
if (status.commitsAheadOfDefault && !status.prExists) return true;
|
|
9985
|
+
if (status.status === "Behind" || status.status === "Diverged") return true;
|
|
9986
|
+
return false;
|
|
9987
|
+
}
|
|
9988
|
+
async function handleListProjects(c, projectManager, threadManager, db) {
|
|
9434
9989
|
try {
|
|
9435
9990
|
const projects = await projectManager.listProjects();
|
|
9436
9991
|
const selectedThreadId = await threadManager.getSelectedThreadId();
|
|
9437
|
-
const
|
|
9438
|
-
|
|
9439
|
-
|
|
9440
|
-
|
|
9441
|
-
|
|
9442
|
-
|
|
9443
|
-
|
|
9444
|
-
|
|
9445
|
-
|
|
9446
|
-
|
|
9447
|
-
|
|
9448
|
-
|
|
9449
|
-
|
|
9450
|
-
|
|
9992
|
+
const allThreads = await threadManager.listAllThreads();
|
|
9993
|
+
const threadsByProject = /* @__PURE__ */ new Map();
|
|
9994
|
+
for (const thread of allThreads) {
|
|
9995
|
+
const list = threadsByProject.get(thread.projectId);
|
|
9996
|
+
if (list) {
|
|
9997
|
+
list.push(thread);
|
|
9998
|
+
} else {
|
|
9999
|
+
threadsByProject.set(thread.projectId, [thread]);
|
|
10000
|
+
}
|
|
10001
|
+
}
|
|
10002
|
+
const allThreadIds = allThreads.map((t) => t.id);
|
|
10003
|
+
const statusCache = db ? await getGitStatusCacheBulk(db, allThreadIds) : /* @__PURE__ */ new Map();
|
|
10004
|
+
const expandedProjects = projects.map((project) => {
|
|
10005
|
+
const threads = threadsByProject.get(project.id) ?? [];
|
|
10006
|
+
return {
|
|
10007
|
+
projectId: project.id,
|
|
10008
|
+
name: project.name,
|
|
10009
|
+
gitUrl: project.gitUrl,
|
|
10010
|
+
projectPath: project.path,
|
|
10011
|
+
openWith: project.openWith,
|
|
10012
|
+
commands: project.commands ?? [],
|
|
10013
|
+
setupScript: project.setupScript,
|
|
10014
|
+
runCommand: project.runCommand,
|
|
10015
|
+
commitMethod: project.commitMethod,
|
|
10016
|
+
planPrompt: project.planPrompt,
|
|
10017
|
+
testPrompt: project.testPrompt,
|
|
10018
|
+
reviewPrompt: project.reviewPrompt,
|
|
10019
|
+
threads: threads.map((thread) => {
|
|
10020
|
+
const cached = statusCache.get(thread.id);
|
|
10021
|
+
const gitStatusDot = cached ? computeGitStatusDot(cached) : false;
|
|
10022
|
+
return {
|
|
9451
10023
|
threadId: thread.id,
|
|
9452
10024
|
title: thread.title,
|
|
9453
10025
|
path: thread.path,
|
|
9454
10026
|
isSelected: thread.id === selectedThreadId,
|
|
9455
10027
|
model: thread.model,
|
|
9456
|
-
modelProvider: thread.modelProvider
|
|
9457
|
-
|
|
9458
|
-
|
|
9459
|
-
|
|
9460
|
-
|
|
10028
|
+
modelProvider: thread.modelProvider,
|
|
10029
|
+
gitStatusDot
|
|
10030
|
+
};
|
|
10031
|
+
})
|
|
10032
|
+
};
|
|
10033
|
+
});
|
|
9461
10034
|
return c.json(expandedProjects);
|
|
9462
10035
|
} catch (error) {
|
|
9463
10036
|
return errorResponse(
|
|
@@ -9928,12 +10501,12 @@ async function handleCheckRunning(c, projectManager) {
|
|
|
9928
10501
|
}
|
|
9929
10502
|
|
|
9930
10503
|
// src/core/run-command-detector.ts
|
|
9931
|
-
import { readFile as
|
|
9932
|
-
import { join as
|
|
10504
|
+
import { readFile as readFile8 } from "fs/promises";
|
|
10505
|
+
import { join as join13 } from "path";
|
|
9933
10506
|
async function detectPackageManager(projectPath) {
|
|
9934
10507
|
try {
|
|
9935
|
-
const packageJsonPath =
|
|
9936
|
-
const content = await
|
|
10508
|
+
const packageJsonPath = join13(projectPath, "package.json");
|
|
10509
|
+
const content = await readFile8(packageJsonPath, "utf-8");
|
|
9937
10510
|
const packageJson = JSON.parse(content);
|
|
9938
10511
|
if (packageJson.packageManager) {
|
|
9939
10512
|
const pm = packageJson.packageManager.split("@")[0];
|
|
@@ -9948,8 +10521,8 @@ async function detectPackageManager(projectPath) {
|
|
|
9948
10521
|
}
|
|
9949
10522
|
async function detectRunScripts(projectPath) {
|
|
9950
10523
|
try {
|
|
9951
|
-
const packageJsonPath =
|
|
9952
|
-
const content = await
|
|
10524
|
+
const packageJsonPath = join13(projectPath, "package.json");
|
|
10525
|
+
const content = await readFile8(packageJsonPath, "utf-8");
|
|
9953
10526
|
const packageJson = JSON.parse(content);
|
|
9954
10527
|
const scripts = packageJson.scripts ?? {};
|
|
9955
10528
|
return {
|
|
@@ -9994,8 +10567,8 @@ async function suggestRunCommand(projectPath) {
|
|
|
9994
10567
|
}
|
|
9995
10568
|
|
|
9996
10569
|
// src/features/projects/projects-package-scripts.route.ts
|
|
9997
|
-
import { readFile as
|
|
9998
|
-
import { join as
|
|
10570
|
+
import { readFile as readFile9 } from "fs/promises";
|
|
10571
|
+
import { join as join14 } from "path";
|
|
9999
10572
|
import { glob } from "glob";
|
|
10000
10573
|
function formatScriptName(scriptName) {
|
|
10001
10574
|
return scriptName.replace(/[-:_]/g, " ").replace(/\b\w/g, (l) => l.toUpperCase());
|
|
@@ -10023,8 +10596,8 @@ async function findPackageScripts(projectPath) {
|
|
|
10023
10596
|
const packageManager = await detectPackageManager(projectPath);
|
|
10024
10597
|
for (const filePath of packageJsonFiles) {
|
|
10025
10598
|
try {
|
|
10026
|
-
const fullPath =
|
|
10027
|
-
const content = await
|
|
10599
|
+
const fullPath = join14(projectPath, filePath);
|
|
10600
|
+
const content = await readFile9(fullPath, "utf-8");
|
|
10028
10601
|
const packageJson = JSON.parse(content);
|
|
10029
10602
|
if (packageJson.scripts) {
|
|
10030
10603
|
for (const [scriptName] of Object.entries(packageJson.scripts)) {
|
|
@@ -10071,9 +10644,9 @@ async function handleGetPackageScripts(c, projectManager) {
|
|
|
10071
10644
|
}
|
|
10072
10645
|
|
|
10073
10646
|
// src/features/projects/projects-ai-files.route.ts
|
|
10074
|
-
import { readdir as readdir6, readFile as
|
|
10075
|
-
import { join as
|
|
10076
|
-
import { existsSync as
|
|
10647
|
+
import { readdir as readdir6, readFile as readFile10, writeFile, mkdir, access as access2, rm } from "fs/promises";
|
|
10648
|
+
import { join as join15, extname as extname3, basename } from "path";
|
|
10649
|
+
import { existsSync as existsSync11 } from "fs";
|
|
10077
10650
|
var IGNORED_DIRS = /* @__PURE__ */ new Set([
|
|
10078
10651
|
".git",
|
|
10079
10652
|
"node_modules",
|
|
@@ -10091,8 +10664,8 @@ async function buildFullTree(dirPath, relativeDirPath) {
|
|
|
10091
10664
|
try {
|
|
10092
10665
|
const entries = await readdir6(dirPath, { withFileTypes: true });
|
|
10093
10666
|
for (const entry of entries) {
|
|
10094
|
-
const entryRelPath =
|
|
10095
|
-
const entryAbsPath =
|
|
10667
|
+
const entryRelPath = join15(relativeDirPath, entry.name);
|
|
10668
|
+
const entryAbsPath = join15(dirPath, entry.name);
|
|
10096
10669
|
if (entry.isDirectory()) {
|
|
10097
10670
|
const children = await buildFullTree(entryAbsPath, entryRelPath);
|
|
10098
10671
|
nodes.push({
|
|
@@ -10120,8 +10693,8 @@ async function buildMarkdownFilteredTree(dirPath, relativeDirPath) {
|
|
|
10120
10693
|
try {
|
|
10121
10694
|
const entries = await readdir6(dirPath, { withFileTypes: true });
|
|
10122
10695
|
for (const entry of entries) {
|
|
10123
|
-
const entryRelPath =
|
|
10124
|
-
const entryAbsPath =
|
|
10696
|
+
const entryRelPath = join15(relativeDirPath, entry.name);
|
|
10697
|
+
const entryAbsPath = join15(dirPath, entry.name);
|
|
10125
10698
|
if (entry.isDirectory()) {
|
|
10126
10699
|
if (IGNORED_DIRS.has(entry.name)) continue;
|
|
10127
10700
|
const children = await buildMarkdownFilteredTree(entryAbsPath, entryRelPath);
|
|
@@ -10134,10 +10707,10 @@ async function buildMarkdownFilteredTree(dirPath, relativeDirPath) {
|
|
|
10134
10707
|
children
|
|
10135
10708
|
});
|
|
10136
10709
|
}
|
|
10137
|
-
} else if (entry.isFile() && MARKDOWN_EXTS.has(
|
|
10710
|
+
} else if (entry.isFile() && MARKDOWN_EXTS.has(extname3(entry.name))) {
|
|
10138
10711
|
nodes.push({
|
|
10139
10712
|
id: entryRelPath,
|
|
10140
|
-
name: basename(entry.name,
|
|
10713
|
+
name: basename(entry.name, extname3(entry.name)),
|
|
10141
10714
|
type: "file",
|
|
10142
10715
|
path: entryRelPath
|
|
10143
10716
|
});
|
|
@@ -10156,9 +10729,9 @@ async function buildAIFileTree(projectPath) {
|
|
|
10156
10729
|
{ key: "agents", label: "Agents" }
|
|
10157
10730
|
];
|
|
10158
10731
|
for (const { key, label } of agentsFolders) {
|
|
10159
|
-
const relPath =
|
|
10160
|
-
const absPath =
|
|
10161
|
-
const exists =
|
|
10732
|
+
const relPath = join15(".agents", key);
|
|
10733
|
+
const absPath = join15(projectPath, relPath);
|
|
10734
|
+
const exists = existsSync11(absPath);
|
|
10162
10735
|
if (exists) {
|
|
10163
10736
|
const children = await buildFullTree(absPath, relPath);
|
|
10164
10737
|
nodes.push({
|
|
@@ -10192,11 +10765,11 @@ async function buildAIFileTree(projectPath) {
|
|
|
10192
10765
|
if (entry.name === ".agents" || entry.name === "AGENTS.md" || IGNORED_DIRS.has(entry.name))
|
|
10193
10766
|
continue;
|
|
10194
10767
|
const entryRelPath = entry.name;
|
|
10195
|
-
const entryAbsPath =
|
|
10196
|
-
if (entry.isFile() && MARKDOWN_EXTS.has(
|
|
10768
|
+
const entryAbsPath = join15(projectPath, entry.name);
|
|
10769
|
+
if (entry.isFile() && MARKDOWN_EXTS.has(extname3(entry.name))) {
|
|
10197
10770
|
nodes.push({
|
|
10198
10771
|
id: entryRelPath,
|
|
10199
|
-
name: basename(entry.name,
|
|
10772
|
+
name: basename(entry.name, extname3(entry.name)),
|
|
10200
10773
|
type: "file",
|
|
10201
10774
|
path: entryRelPath
|
|
10202
10775
|
});
|
|
@@ -10219,7 +10792,7 @@ async function buildAIFileTree(projectPath) {
|
|
|
10219
10792
|
}
|
|
10220
10793
|
function validateFilePath(projectPath, filePath) {
|
|
10221
10794
|
if (!filePath) return null;
|
|
10222
|
-
const abs =
|
|
10795
|
+
const abs = join15(projectPath, filePath);
|
|
10223
10796
|
if (!abs.startsWith(projectPath + "/") && abs !== projectPath) return null;
|
|
10224
10797
|
return abs;
|
|
10225
10798
|
}
|
|
@@ -10300,7 +10873,7 @@ Links to important documentation, tools, or references.
|
|
|
10300
10873
|
} catch {
|
|
10301
10874
|
return c.json({ error: { code: "NOT_FOUND", message: `File not found: ${filePath}` } }, 404);
|
|
10302
10875
|
}
|
|
10303
|
-
const content = await
|
|
10876
|
+
const content = await readFile10(absPath, "utf-8");
|
|
10304
10877
|
return c.json({ content, path: filePath });
|
|
10305
10878
|
} catch (error) {
|
|
10306
10879
|
return errorResponse(
|
|
@@ -10336,7 +10909,7 @@ async function handleSaveAIFile(c, projectManager) {
|
|
|
10336
10909
|
if (!absPath) {
|
|
10337
10910
|
return c.json({ error: { code: "BAD_REQUEST", message: "Invalid file path" } }, 400);
|
|
10338
10911
|
}
|
|
10339
|
-
const parentDir =
|
|
10912
|
+
const parentDir = join15(absPath, "..");
|
|
10340
10913
|
await mkdir(parentDir, { recursive: true });
|
|
10341
10914
|
await writeFile(absPath, content, "utf-8");
|
|
10342
10915
|
return c.json({ success: true, path: filePath });
|
|
@@ -10430,11 +11003,11 @@ async function handleCreateSkill(c, projectManager) {
|
|
|
10430
11003
|
if (!project) {
|
|
10431
11004
|
return errorResponse(c, ErrorCodes.PROJECT_NOT_FOUND, `Project not found: ${projectId}`, 404);
|
|
10432
11005
|
}
|
|
10433
|
-
const skillRelPath =
|
|
10434
|
-
const skillAbsPath =
|
|
10435
|
-
const skillFileRelPath =
|
|
10436
|
-
const skillFileAbsPath =
|
|
10437
|
-
if (
|
|
11006
|
+
const skillRelPath = join15(".agents", "skills", name);
|
|
11007
|
+
const skillAbsPath = join15(project.path, skillRelPath);
|
|
11008
|
+
const skillFileRelPath = join15(skillRelPath, "SKILL.md");
|
|
11009
|
+
const skillFileAbsPath = join15(skillAbsPath, "SKILL.md");
|
|
11010
|
+
if (existsSync11(skillAbsPath)) {
|
|
10438
11011
|
return c.json(
|
|
10439
11012
|
{ error: { code: "CONFLICT", message: `Skill '${name}' already exists` } },
|
|
10440
11013
|
409
|
|
@@ -10475,7 +11048,9 @@ function createProjectRoutes(projectManager, threadManager) {
|
|
|
10475
11048
|
return handleOpenFolderProject(c, projectManager);
|
|
10476
11049
|
});
|
|
10477
11050
|
router.get("/", async (c) => {
|
|
10478
|
-
|
|
11051
|
+
const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_database(), database_exports));
|
|
11052
|
+
const db = await getDatabase2();
|
|
11053
|
+
return handleListProjects(c, projectManager, threadManager, db);
|
|
10479
11054
|
});
|
|
10480
11055
|
router.get("/:id", async (c) => {
|
|
10481
11056
|
return handleGetProject(c, projectManager);
|
|
@@ -10533,7 +11108,7 @@ function createProjectRoutes(projectManager, threadManager) {
|
|
|
10533
11108
|
|
|
10534
11109
|
// src/features/projects/projects.manager.ts
|
|
10535
11110
|
init_utils();
|
|
10536
|
-
import { join as
|
|
11111
|
+
import { join as join18 } from "path";
|
|
10537
11112
|
import { realpathSync as realpathSync2 } from "fs";
|
|
10538
11113
|
import { rm as rm3 } from "fs/promises";
|
|
10539
11114
|
|
|
@@ -10610,11 +11185,13 @@ var ProcessManager = class extends EventEmitter {
|
|
|
10610
11185
|
});
|
|
10611
11186
|
};
|
|
10612
11187
|
try {
|
|
10613
|
-
const
|
|
10614
|
-
console.log("CLI ProcessManager: Spawning process:",
|
|
10615
|
-
const childProcess = spawnProcess(
|
|
11188
|
+
const { shell, args: shellArgs } = getShellConfig();
|
|
11189
|
+
console.log("CLI ProcessManager: Spawning process:", shell, "with command:", command);
|
|
11190
|
+
const childProcess = spawnProcess(shell, [...shellArgs, command], {
|
|
10616
11191
|
cwd,
|
|
10617
|
-
|
|
11192
|
+
env: Object.fromEntries(
|
|
11193
|
+
Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
|
|
11194
|
+
),
|
|
10618
11195
|
stdio: ["ignore", "pipe", "pipe"]
|
|
10619
11196
|
});
|
|
10620
11197
|
this.processes.set(projectId, childProcess);
|
|
@@ -10752,13 +11329,13 @@ var ProcessManager = class extends EventEmitter {
|
|
|
10752
11329
|
// src/features/projects/projects.creator.ts
|
|
10753
11330
|
init_utils();
|
|
10754
11331
|
import { randomUUID as randomUUID7 } from "crypto";
|
|
10755
|
-
import { basename as basename2, join as
|
|
11332
|
+
import { basename as basename2, join as join17, relative as relative5 } from "path";
|
|
10756
11333
|
import { access as access3, stat as stat3, readdir as readdir8 } from "fs/promises";
|
|
10757
11334
|
|
|
10758
11335
|
// src/features/scaffold/scaffold.runner.ts
|
|
10759
11336
|
init_utils();
|
|
10760
|
-
import { existsSync as
|
|
10761
|
-
import { join as
|
|
11337
|
+
import { existsSync as existsSync12 } from "fs";
|
|
11338
|
+
import { join as join16 } from "path";
|
|
10762
11339
|
import { mkdir as mkdir2, readdir as readdir7, stat as stat2, rename, rm as rm2 } from "fs/promises";
|
|
10763
11340
|
import { createInterface as createInterface2 } from "readline";
|
|
10764
11341
|
|
|
@@ -11423,10 +12000,12 @@ function buildCommandList(template, options) {
|
|
|
11423
12000
|
return withPm;
|
|
11424
12001
|
}
|
|
11425
12002
|
async function* runCommandStreaming(cwd, command) {
|
|
11426
|
-
const
|
|
11427
|
-
const child = spawnProcess(
|
|
12003
|
+
const { shell, args: shellArgs } = getShellConfig();
|
|
12004
|
+
const child = spawnProcess(shell, [...shellArgs, command], {
|
|
11428
12005
|
cwd,
|
|
11429
|
-
|
|
12006
|
+
env: Object.fromEntries(
|
|
12007
|
+
Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
|
|
12008
|
+
),
|
|
11430
12009
|
stdio: ["ignore", "pipe", "pipe"]
|
|
11431
12010
|
});
|
|
11432
12011
|
const queue = [];
|
|
@@ -11471,8 +12050,8 @@ async function* runCommandStreaming(cwd, command) {
|
|
|
11471
12050
|
}
|
|
11472
12051
|
}
|
|
11473
12052
|
async function* createScaffoldedProjectStreaming(options) {
|
|
11474
|
-
const projectPath =
|
|
11475
|
-
if (!
|
|
12053
|
+
const projectPath = join16(options.parentDir, options.threadId);
|
|
12054
|
+
if (!existsSync12(options.parentDir)) {
|
|
11476
12055
|
yield {
|
|
11477
12056
|
type: "result",
|
|
11478
12057
|
result: {
|
|
@@ -11559,7 +12138,7 @@ async function* createScaffoldedProjectStreaming(options) {
|
|
|
11559
12138
|
}
|
|
11560
12139
|
try {
|
|
11561
12140
|
const projectName2 = getProjectName(options.projectName);
|
|
11562
|
-
const projectSubDir =
|
|
12141
|
+
const projectSubDir = join16(projectPath, projectName2);
|
|
11563
12142
|
try {
|
|
11564
12143
|
const subDirStat = await stat2(projectSubDir);
|
|
11565
12144
|
if (subDirStat.isDirectory()) {
|
|
@@ -11569,8 +12148,8 @@ async function* createScaffoldedProjectStreaming(options) {
|
|
|
11569
12148
|
};
|
|
11570
12149
|
const items = await readdir7(projectSubDir);
|
|
11571
12150
|
for (const item of items) {
|
|
11572
|
-
const oldPath =
|
|
11573
|
-
const newPath =
|
|
12151
|
+
const oldPath = join16(projectSubDir, item);
|
|
12152
|
+
const newPath = join16(projectPath, item);
|
|
11574
12153
|
await rename(oldPath, newPath);
|
|
11575
12154
|
}
|
|
11576
12155
|
await rm2(projectSubDir, { recursive: true, force: true });
|
|
@@ -11586,7 +12165,7 @@ async function* createScaffoldedProjectStreaming(options) {
|
|
|
11586
12165
|
};
|
|
11587
12166
|
}
|
|
11588
12167
|
const installCwd = scaffoldOptions.projectPath;
|
|
11589
|
-
if (
|
|
12168
|
+
if (existsSync12(installCwd)) {
|
|
11590
12169
|
const installCmd = getInstallCommand(options.packageManager);
|
|
11591
12170
|
for await (const event of runCommandStreaming(installCwd, installCmd)) {
|
|
11592
12171
|
if (event.type === "output") {
|
|
@@ -12030,7 +12609,7 @@ var ProjectCreator = class {
|
|
|
12030
12609
|
return name;
|
|
12031
12610
|
}
|
|
12032
12611
|
generateThreadPath(_projectId, threadId) {
|
|
12033
|
-
return
|
|
12612
|
+
return join17(this.rootFolder, threadId);
|
|
12034
12613
|
}
|
|
12035
12614
|
async *createProjectFromFolder() {
|
|
12036
12615
|
await loadUtils();
|
|
@@ -12180,19 +12759,19 @@ var ProjectCreator = class {
|
|
|
12180
12759
|
}
|
|
12181
12760
|
async findAllPackageJsonFiles(rootPath, currentPath, setupScripts) {
|
|
12182
12761
|
try {
|
|
12183
|
-
await access3(
|
|
12184
|
-
const relativePath =
|
|
12762
|
+
await access3(join17(currentPath, "package.json"));
|
|
12763
|
+
const relativePath = relative5(rootPath, currentPath);
|
|
12185
12764
|
let installCommand = "";
|
|
12186
12765
|
try {
|
|
12187
|
-
await access3(
|
|
12766
|
+
await access3(join17(currentPath, "yarn.lock"));
|
|
12188
12767
|
installCommand = "yarn install";
|
|
12189
12768
|
} catch {
|
|
12190
12769
|
try {
|
|
12191
|
-
await access3(
|
|
12770
|
+
await access3(join17(currentPath, "bun.lock"));
|
|
12192
12771
|
installCommand = "bun install";
|
|
12193
12772
|
} catch {
|
|
12194
12773
|
try {
|
|
12195
|
-
await access3(
|
|
12774
|
+
await access3(join17(currentPath, "pnpm-lock.yaml"));
|
|
12196
12775
|
installCommand = "pnpm install";
|
|
12197
12776
|
} catch {
|
|
12198
12777
|
installCommand = "npm install";
|
|
@@ -12209,7 +12788,7 @@ var ProjectCreator = class {
|
|
|
12209
12788
|
try {
|
|
12210
12789
|
const entries = await readdir8(currentPath);
|
|
12211
12790
|
for (const entry of entries) {
|
|
12212
|
-
const entryPath =
|
|
12791
|
+
const entryPath = join17(currentPath, entry);
|
|
12213
12792
|
try {
|
|
12214
12793
|
const stats = await stat3(entryPath);
|
|
12215
12794
|
if (stats.isDirectory() && !entry.startsWith(".") && entry !== "node_modules") {
|
|
@@ -12528,7 +13107,7 @@ var ProjectManagerImpl = class {
|
|
|
12528
13107
|
const session = this.terminalSessionManager.getOrCreateSession(threadId, thread.path);
|
|
12529
13108
|
let workingDir;
|
|
12530
13109
|
if (cwd) {
|
|
12531
|
-
workingDir = cwd.startsWith("/") ? cwd :
|
|
13110
|
+
workingDir = cwd.startsWith("/") ? cwd : join18(thread.path, cwd);
|
|
12532
13111
|
this.terminalSessionManager.updateWorkingDirectory(threadId, workingDir);
|
|
12533
13112
|
} else {
|
|
12534
13113
|
workingDir = session.currentWorkingDirectory;
|
|
@@ -12733,9 +13312,9 @@ import { Hono as Hono7 } from "hono";
|
|
|
12733
13312
|
|
|
12734
13313
|
// src/core/env-manager.ts
|
|
12735
13314
|
import { promises as fs } from "fs";
|
|
12736
|
-
import { join as
|
|
13315
|
+
import { join as join19 } from "path";
|
|
12737
13316
|
async function updateEnvFile(keyNames) {
|
|
12738
|
-
const envPath =
|
|
13317
|
+
const envPath = join19(process.cwd(), ".env");
|
|
12739
13318
|
let content = "";
|
|
12740
13319
|
try {
|
|
12741
13320
|
content = await fs.readFile(envPath, "utf-8");
|
|
@@ -12764,7 +13343,7 @@ async function updateEnvFile(keyNames) {
|
|
|
12764
13343
|
await fs.writeFile(envPath, newLines.join("\n") + "\n", "utf-8");
|
|
12765
13344
|
}
|
|
12766
13345
|
async function readEnvFile() {
|
|
12767
|
-
const envPath =
|
|
13346
|
+
const envPath = join19(process.cwd(), ".env");
|
|
12768
13347
|
const envMap = {};
|
|
12769
13348
|
try {
|
|
12770
13349
|
const content = await fs.readFile(envPath, "utf-8");
|
|
@@ -13183,9 +13762,9 @@ function createScaffoldRoutes(projectManager) {
|
|
|
13183
13762
|
}
|
|
13184
13763
|
|
|
13185
13764
|
// src/features/slash-commands/slash-commands.manager.ts
|
|
13186
|
-
import { readdir as readdir9, readFile as
|
|
13187
|
-
import { join as
|
|
13188
|
-
import { existsSync as
|
|
13765
|
+
import { readdir as readdir9, readFile as readFile11, mkdir as mkdir3, writeFile as writeFile2, unlink as unlink2 } from "fs/promises";
|
|
13766
|
+
import { join as join20, basename as basename3, extname as extname4 } from "path";
|
|
13767
|
+
import { existsSync as existsSync13 } from "fs";
|
|
13189
13768
|
import { homedir as homedir7 } from "os";
|
|
13190
13769
|
function slugify(filename) {
|
|
13191
13770
|
return filename.toLowerCase().replace(/\s+/g, "-").replace(/[^a-z0-9-]/g, "").replace(/-+/g, "-").replace(/^-+|-+$/g, "");
|
|
@@ -13230,10 +13809,10 @@ function parseFrontmatter3(markdown) {
|
|
|
13230
13809
|
return { metadata, content };
|
|
13231
13810
|
}
|
|
13232
13811
|
function getGlobalCommandsDir() {
|
|
13233
|
-
return
|
|
13812
|
+
return join20(homedir7(), ".agents", "commands");
|
|
13234
13813
|
}
|
|
13235
13814
|
function getProjectCommandsDir(threadPath) {
|
|
13236
|
-
return
|
|
13815
|
+
return join20(threadPath, ".agents", "commands");
|
|
13237
13816
|
}
|
|
13238
13817
|
var SlashCommandManager = class _SlashCommandManager {
|
|
13239
13818
|
/**
|
|
@@ -13248,14 +13827,14 @@ var SlashCommandManager = class _SlashCommandManager {
|
|
|
13248
13827
|
async loadCommands(threadPath) {
|
|
13249
13828
|
const commands = /* @__PURE__ */ new Map();
|
|
13250
13829
|
const globalDir = getGlobalCommandsDir();
|
|
13251
|
-
if (
|
|
13830
|
+
if (existsSync13(globalDir)) {
|
|
13252
13831
|
const globalCommands = await this.loadCommandsFromDir(globalDir, "global");
|
|
13253
13832
|
for (const cmd of globalCommands) {
|
|
13254
13833
|
commands.set(cmd.name, cmd);
|
|
13255
13834
|
}
|
|
13256
13835
|
}
|
|
13257
13836
|
const projectDir = getProjectCommandsDir(threadPath);
|
|
13258
|
-
if (
|
|
13837
|
+
if (existsSync13(projectDir)) {
|
|
13259
13838
|
const projectCommands = await this.loadCommandsFromDir(projectDir, "project");
|
|
13260
13839
|
for (const cmd of projectCommands) {
|
|
13261
13840
|
if (!_SlashCommandManager.BUILT_IN_COMMANDS.has(cmd.name)) {
|
|
@@ -13274,10 +13853,10 @@ var SlashCommandManager = class _SlashCommandManager {
|
|
|
13274
13853
|
const files = await readdir9(dir);
|
|
13275
13854
|
for (const file of files) {
|
|
13276
13855
|
if (!file.endsWith(".md")) continue;
|
|
13277
|
-
const filePath =
|
|
13278
|
-
const fileContent = await
|
|
13856
|
+
const filePath = join20(dir, file);
|
|
13857
|
+
const fileContent = await readFile11(filePath, "utf-8");
|
|
13279
13858
|
const { metadata, content } = parseFrontmatter3(fileContent);
|
|
13280
|
-
const nameWithoutExt = basename3(file,
|
|
13859
|
+
const nameWithoutExt = basename3(file, extname4(file));
|
|
13281
13860
|
const commandName = slugify(nameWithoutExt);
|
|
13282
13861
|
if (!commandName) continue;
|
|
13283
13862
|
commands.push({
|
|
@@ -13305,12 +13884,12 @@ var SlashCommandManager = class _SlashCommandManager {
|
|
|
13305
13884
|
if (!dir) {
|
|
13306
13885
|
throw new Error("threadPath required for project-scoped commands");
|
|
13307
13886
|
}
|
|
13308
|
-
if (!
|
|
13887
|
+
if (!existsSync13(dir)) {
|
|
13309
13888
|
await mkdir3(dir, { recursive: true });
|
|
13310
13889
|
}
|
|
13311
13890
|
const filename = `${name}.md`;
|
|
13312
|
-
const filePath =
|
|
13313
|
-
if (
|
|
13891
|
+
const filePath = join20(dir, filename);
|
|
13892
|
+
if (existsSync13(filePath)) {
|
|
13314
13893
|
throw new Error(`Command already exists: ${name}`);
|
|
13315
13894
|
}
|
|
13316
13895
|
let markdown = "";
|
|
@@ -13344,7 +13923,7 @@ var SlashCommandManager = class _SlashCommandManager {
|
|
|
13344
13923
|
* Update an existing command
|
|
13345
13924
|
*/
|
|
13346
13925
|
async updateCommand(filePath, content, metadata) {
|
|
13347
|
-
if (!
|
|
13926
|
+
if (!existsSync13(filePath)) {
|
|
13348
13927
|
throw new Error(`Command file not found: ${filePath}`);
|
|
13349
13928
|
}
|
|
13350
13929
|
let markdown = "";
|
|
@@ -13371,7 +13950,7 @@ var SlashCommandManager = class _SlashCommandManager {
|
|
|
13371
13950
|
* Delete a command
|
|
13372
13951
|
*/
|
|
13373
13952
|
async deleteCommand(filePath) {
|
|
13374
|
-
if (!
|
|
13953
|
+
if (!existsSync13(filePath)) {
|
|
13375
13954
|
throw new Error(`Command file not found: ${filePath}`);
|
|
13376
13955
|
}
|
|
13377
13956
|
await unlink2(filePath);
|
|
@@ -13717,7 +14296,7 @@ function createSlashCommandRoutes(router, threadManager) {
|
|
|
13717
14296
|
|
|
13718
14297
|
// src/features/rules/rules-post.route.ts
|
|
13719
14298
|
import { promises as fs2 } from "fs";
|
|
13720
|
-
import
|
|
14299
|
+
import path3 from "path";
|
|
13721
14300
|
async function createRule(c, projectManager) {
|
|
13722
14301
|
try {
|
|
13723
14302
|
const body = await c.req.json();
|
|
@@ -13772,7 +14351,7 @@ async function createRule(c, projectManager) {
|
|
|
13772
14351
|
);
|
|
13773
14352
|
}
|
|
13774
14353
|
const projectPath = project.path;
|
|
13775
|
-
const rulesDir =
|
|
14354
|
+
const rulesDir = path3.join(projectPath, ".agents", "rules");
|
|
13776
14355
|
await fs2.mkdir(rulesDir, { recursive: true });
|
|
13777
14356
|
const ruleContent = `---
|
|
13778
14357
|
description: ${description.replace(/"/g, '\\"')}
|
|
@@ -13780,7 +14359,7 @@ alwaysApply: ${alwaysApply}
|
|
|
13780
14359
|
---
|
|
13781
14360
|
|
|
13782
14361
|
Describe your rules here as markdown which will be used when Tarsk sees is prompted with a request that is related to the description of this rule.`;
|
|
13783
|
-
const ruleFilePath =
|
|
14362
|
+
const ruleFilePath = path3.join(rulesDir, `${name}.md`);
|
|
13784
14363
|
await fs2.writeFile(ruleFilePath, ruleContent, "utf-8");
|
|
13785
14364
|
return c.json(
|
|
13786
14365
|
{
|
|
@@ -13904,7 +14483,7 @@ function createRuleRoutes(router, projectManager) {
|
|
|
13904
14483
|
// src/features/threads/threads.manager.ts
|
|
13905
14484
|
init_utils();
|
|
13906
14485
|
import { randomUUID as randomUUID8 } from "crypto";
|
|
13907
|
-
import { join as
|
|
14486
|
+
import { join as join21 } from "path";
|
|
13908
14487
|
import { execSync as execSync3 } from "child_process";
|
|
13909
14488
|
import { rm as rm4, stat as stat4, mkdir as mkdir4 } from "fs/promises";
|
|
13910
14489
|
init_database();
|
|
@@ -14141,6 +14720,13 @@ var ThreadManagerImpl = class {
|
|
|
14141
14720
|
const threads = await this.metadataManager.loadThreads();
|
|
14142
14721
|
return threads.filter((t) => t.projectId === projectId);
|
|
14143
14722
|
}
|
|
14723
|
+
/**
|
|
14724
|
+
* Lists all active threads across all projects in a single query
|
|
14725
|
+
* @returns Array of all active threads
|
|
14726
|
+
*/
|
|
14727
|
+
async listAllThreads() {
|
|
14728
|
+
return this.metadataManager.loadThreads();
|
|
14729
|
+
}
|
|
14144
14730
|
/**
|
|
14145
14731
|
* Deletes a thread and removes its directory
|
|
14146
14732
|
*
|
|
@@ -14234,9 +14820,12 @@ var ThreadManagerImpl = class {
|
|
|
14234
14820
|
* @yields ThreadEvent progress events
|
|
14235
14821
|
*/
|
|
14236
14822
|
async *runSetupScript(threadPath, commandLine) {
|
|
14237
|
-
const
|
|
14238
|
-
|
|
14239
|
-
cwd: threadPath
|
|
14823
|
+
const { shell, args: shellArgs } = getShellConfig();
|
|
14824
|
+
const child = spawnProcess(shell, [...shellArgs, commandLine], {
|
|
14825
|
+
cwd: threadPath,
|
|
14826
|
+
env: Object.fromEntries(
|
|
14827
|
+
Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
|
|
14828
|
+
)
|
|
14240
14829
|
});
|
|
14241
14830
|
const decoder = new TextDecoder();
|
|
14242
14831
|
let resolveNext = () => {
|
|
@@ -14298,7 +14887,7 @@ var ThreadManagerImpl = class {
|
|
|
14298
14887
|
* - 7.4 - THE CLI SHALL ensure each Thread clone is stored in a unique directory path
|
|
14299
14888
|
*/
|
|
14300
14889
|
generateThreadPath(_projectId, threadId) {
|
|
14301
|
-
return
|
|
14890
|
+
return join21(this.rootFolder, threadId);
|
|
14302
14891
|
}
|
|
14303
14892
|
/**
|
|
14304
14893
|
* Generates a thread title if not provided
|
|
@@ -14424,7 +15013,7 @@ async function handleCreateThread(c, threadManager, gitManager) {
|
|
|
14424
15013
|
}
|
|
14425
15014
|
|
|
14426
15015
|
// src/features/threads/threads-list.route.ts
|
|
14427
|
-
function
|
|
15016
|
+
function computeGitStatusDot2(status) {
|
|
14428
15017
|
if (status.hasChanges) return true;
|
|
14429
15018
|
if (status.commitsAheadOfDefault && !status.prExists) return true;
|
|
14430
15019
|
if (status.status === "Behind" || status.status === "Diverged") return true;
|
|
@@ -14451,7 +15040,7 @@ async function handleListThreads(c, threadManager, db) {
|
|
|
14451
15040
|
);
|
|
14452
15041
|
const threadsWithStatus = threads.map((thread) => {
|
|
14453
15042
|
const cached = statusCache.get(thread.id);
|
|
14454
|
-
const gitStatusDot = cached ?
|
|
15043
|
+
const gitStatusDot = cached ? computeGitStatusDot2(cached) : false;
|
|
14455
15044
|
return { ...thread, gitStatusDot };
|
|
14456
15045
|
});
|
|
14457
15046
|
return c.json(threadsWithStatus);
|
|
@@ -14491,26 +15080,63 @@ async function handleDeleteThread(c, threadManager) {
|
|
|
14491
15080
|
}
|
|
14492
15081
|
|
|
14493
15082
|
// src/core/project-inspector.ts
|
|
14494
|
-
import { readFile as
|
|
14495
|
-
import { existsSync as
|
|
14496
|
-
import { join as
|
|
15083
|
+
import { readFile as readFile12, readdir as readdir10 } from "fs/promises";
|
|
15084
|
+
import { existsSync as existsSync14 } from "fs";
|
|
15085
|
+
import { join as join22, basename as basename4, relative as relative6 } from "path";
|
|
14497
15086
|
import { glob as glob2 } from "glob";
|
|
14498
|
-
|
|
15087
|
+
|
|
15088
|
+
// src/features/project-scripts/project-scripts.database.ts
|
|
15089
|
+
init_database();
|
|
15090
|
+
import { randomUUID as randomUUID9 } from "crypto";
|
|
15091
|
+
async function getScriptsByProject(db, projectId) {
|
|
15092
|
+
const result = await db.execute({
|
|
15093
|
+
sql: `SELECT id, projectId, workspace, name, command, friendlyName, updatedAt
|
|
15094
|
+
FROM project_scripts WHERE projectId = ? ORDER BY workspace ASC, name ASC`,
|
|
15095
|
+
args: [projectId]
|
|
15096
|
+
});
|
|
15097
|
+
return result.rows;
|
|
15098
|
+
}
|
|
15099
|
+
async function upsertProjectScripts(projectId, scripts) {
|
|
15100
|
+
const db = await getDatabase();
|
|
15101
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
15102
|
+
await db.execute({
|
|
15103
|
+
sql: `DELETE FROM project_scripts WHERE projectId = ?`,
|
|
15104
|
+
args: [projectId]
|
|
15105
|
+
});
|
|
15106
|
+
for (const script of scripts) {
|
|
15107
|
+
await db.execute({
|
|
15108
|
+
sql: `INSERT INTO project_scripts (id, projectId, workspace, name, command, friendlyName, updatedAt)
|
|
15109
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
15110
|
+
args: [
|
|
15111
|
+
randomUUID9(),
|
|
15112
|
+
projectId,
|
|
15113
|
+
script.workspace,
|
|
15114
|
+
script.name,
|
|
15115
|
+
script.command,
|
|
15116
|
+
script.friendlyName,
|
|
15117
|
+
now
|
|
15118
|
+
]
|
|
15119
|
+
});
|
|
15120
|
+
}
|
|
15121
|
+
}
|
|
15122
|
+
|
|
15123
|
+
// src/core/project-inspector.ts
|
|
15124
|
+
async function inspectProject(projectPath, projectId) {
|
|
14499
15125
|
const packageManager = await detectPackageManager2(projectPath);
|
|
14500
15126
|
const repoType = await detectMonoRepoType(projectPath);
|
|
14501
15127
|
const workspaces = await resolveWorkspaces(projectPath, repoType);
|
|
14502
15128
|
const scripts = await collectScripts(projectPath, workspaces, packageManager, repoType);
|
|
14503
|
-
await
|
|
15129
|
+
await upsertProjectScripts(projectId, scripts);
|
|
14504
15130
|
return scripts;
|
|
14505
15131
|
}
|
|
14506
15132
|
async function detectPackageManager2(projectPath) {
|
|
14507
|
-
if (
|
|
15133
|
+
if (existsSync14(join22(projectPath, "bun.lockb")) || existsSync14(join22(projectPath, "bun.lock"))) {
|
|
14508
15134
|
return "bun";
|
|
14509
15135
|
}
|
|
14510
|
-
if (
|
|
15136
|
+
if (existsSync14(join22(projectPath, "pnpm-lock.yaml"))) {
|
|
14511
15137
|
return "pnpm";
|
|
14512
15138
|
}
|
|
14513
|
-
if (
|
|
15139
|
+
if (existsSync14(join22(projectPath, "yarn.lock"))) {
|
|
14514
15140
|
return "yarn";
|
|
14515
15141
|
}
|
|
14516
15142
|
try {
|
|
@@ -14529,8 +15155,8 @@ async function detectPackageManager2(projectPath) {
|
|
|
14529
15155
|
return "npm";
|
|
14530
15156
|
}
|
|
14531
15157
|
async function detectMonoRepoType(projectPath) {
|
|
14532
|
-
const hasPnpmWorkspace =
|
|
14533
|
-
if (
|
|
15158
|
+
const hasPnpmWorkspace = existsSync14(join22(projectPath, "pnpm-workspace.yaml"));
|
|
15159
|
+
if (existsSync14(join22(projectPath, "nx.json"))) {
|
|
14534
15160
|
return "nx";
|
|
14535
15161
|
}
|
|
14536
15162
|
const pkg = await readPackageJson(projectPath);
|
|
@@ -14544,10 +15170,10 @@ async function detectMonoRepoType(projectPath) {
|
|
|
14544
15170
|
if (hasPnpmWorkspace) {
|
|
14545
15171
|
return "pnpm";
|
|
14546
15172
|
}
|
|
14547
|
-
if (
|
|
15173
|
+
if (existsSync14(join22(projectPath, "lerna.json"))) {
|
|
14548
15174
|
return "lerna";
|
|
14549
15175
|
}
|
|
14550
|
-
if (
|
|
15176
|
+
if (existsSync14(join22(projectPath, "turbo.json"))) {
|
|
14551
15177
|
return "turbo";
|
|
14552
15178
|
}
|
|
14553
15179
|
const folderWorkspaces = await detectFolderBasedWorkspaces(projectPath);
|
|
@@ -14588,8 +15214,8 @@ async function resolvePackageJsonWorkspaces(projectPath) {
|
|
|
14588
15214
|
});
|
|
14589
15215
|
const workspaces = [];
|
|
14590
15216
|
for (const folder of folders) {
|
|
14591
|
-
const absPath =
|
|
14592
|
-
if (
|
|
15217
|
+
const absPath = join22(projectPath, folder);
|
|
15218
|
+
if (existsSync14(join22(absPath, "package.json"))) {
|
|
14593
15219
|
const wsPkg = await readPackageJson(absPath);
|
|
14594
15220
|
workspaces.push({
|
|
14595
15221
|
name: wsPkg?.name ?? basename4(folder),
|
|
@@ -14605,12 +15231,12 @@ async function resolvePackageJsonWorkspaces(projectPath) {
|
|
|
14605
15231
|
return workspaces;
|
|
14606
15232
|
}
|
|
14607
15233
|
async function resolvePnpmWorkspaces(projectPath) {
|
|
14608
|
-
const yamlPath =
|
|
14609
|
-
if (!
|
|
15234
|
+
const yamlPath = join22(projectPath, "pnpm-workspace.yaml");
|
|
15235
|
+
if (!existsSync14(yamlPath)) {
|
|
14610
15236
|
return [{ name: "root", folder: projectPath, relativePath: "." }];
|
|
14611
15237
|
}
|
|
14612
15238
|
try {
|
|
14613
|
-
const yaml = await
|
|
15239
|
+
const yaml = await readFile12(yamlPath, "utf-8");
|
|
14614
15240
|
const patterns = [];
|
|
14615
15241
|
for (const line of yaml.split("\n")) {
|
|
14616
15242
|
const trimmed = line.trim();
|
|
@@ -14631,8 +15257,8 @@ async function resolvePnpmWorkspaces(projectPath) {
|
|
|
14631
15257
|
});
|
|
14632
15258
|
const workspaces = [];
|
|
14633
15259
|
for (const folder of folders) {
|
|
14634
|
-
const absPath =
|
|
14635
|
-
if (
|
|
15260
|
+
const absPath = join22(projectPath, folder);
|
|
15261
|
+
if (existsSync14(join22(absPath, "package.json"))) {
|
|
14636
15262
|
const wsPkg = await readPackageJson(absPath);
|
|
14637
15263
|
workspaces.push({
|
|
14638
15264
|
name: wsPkg?.name ?? basename4(folder),
|
|
@@ -14651,12 +15277,12 @@ async function resolvePnpmWorkspaces(projectPath) {
|
|
|
14651
15277
|
}
|
|
14652
15278
|
}
|
|
14653
15279
|
async function resolveLernaWorkspaces(projectPath) {
|
|
14654
|
-
const lernaPath =
|
|
14655
|
-
if (!
|
|
15280
|
+
const lernaPath = join22(projectPath, "lerna.json");
|
|
15281
|
+
if (!existsSync14(lernaPath)) {
|
|
14656
15282
|
return [{ name: "root", folder: projectPath, relativePath: "." }];
|
|
14657
15283
|
}
|
|
14658
15284
|
try {
|
|
14659
|
-
const content = await
|
|
15285
|
+
const content = await readFile12(lernaPath, "utf-8");
|
|
14660
15286
|
const lerna = JSON.parse(content);
|
|
14661
15287
|
const patterns = lerna.packages ?? ["packages/*"];
|
|
14662
15288
|
const folders = await glob2(patterns, {
|
|
@@ -14665,8 +15291,8 @@ async function resolveLernaWorkspaces(projectPath) {
|
|
|
14665
15291
|
});
|
|
14666
15292
|
const workspaces = [];
|
|
14667
15293
|
for (const folder of folders) {
|
|
14668
|
-
const absPath =
|
|
14669
|
-
if (
|
|
15294
|
+
const absPath = join22(projectPath, folder);
|
|
15295
|
+
if (existsSync14(join22(absPath, "package.json"))) {
|
|
14670
15296
|
const wsPkg = await readPackageJson(absPath);
|
|
14671
15297
|
workspaces.push({
|
|
14672
15298
|
name: wsPkg?.name ?? basename4(folder),
|
|
@@ -14686,17 +15312,17 @@ async function resolveLernaWorkspaces(projectPath) {
|
|
|
14686
15312
|
}
|
|
14687
15313
|
async function resolveNxWorkspaces(projectPath) {
|
|
14688
15314
|
const workspaces = [];
|
|
14689
|
-
const workspaceJsonPath =
|
|
14690
|
-
if (
|
|
15315
|
+
const workspaceJsonPath = join22(projectPath, "workspace.json");
|
|
15316
|
+
if (existsSync14(workspaceJsonPath)) {
|
|
14691
15317
|
try {
|
|
14692
|
-
const content = await
|
|
15318
|
+
const content = await readFile12(workspaceJsonPath, "utf-8");
|
|
14693
15319
|
const wsJson = JSON.parse(content);
|
|
14694
15320
|
for (const [name, value] of Object.entries(wsJson.projects ?? {})) {
|
|
14695
15321
|
const folder = typeof value === "string" ? value : value.root;
|
|
14696
15322
|
if (folder) {
|
|
14697
15323
|
workspaces.push({
|
|
14698
15324
|
name,
|
|
14699
|
-
folder:
|
|
15325
|
+
folder: join22(projectPath, folder),
|
|
14700
15326
|
relativePath: folder
|
|
14701
15327
|
});
|
|
14702
15328
|
}
|
|
@@ -14711,13 +15337,13 @@ async function resolveNxWorkspaces(projectPath) {
|
|
|
14711
15337
|
});
|
|
14712
15338
|
for (const file of projectJsonFiles) {
|
|
14713
15339
|
try {
|
|
14714
|
-
const content = await
|
|
15340
|
+
const content = await readFile12(join22(projectPath, file), "utf-8");
|
|
14715
15341
|
const project = JSON.parse(content);
|
|
14716
15342
|
if (project.name) {
|
|
14717
15343
|
const folder = file.replace(/\/project\.json$/, "");
|
|
14718
15344
|
workspaces.push({
|
|
14719
15345
|
name: project.name,
|
|
14720
|
-
folder:
|
|
15346
|
+
folder: join22(projectPath, folder),
|
|
14721
15347
|
relativePath: folder
|
|
14722
15348
|
});
|
|
14723
15349
|
}
|
|
@@ -14725,15 +15351,15 @@ async function resolveNxWorkspaces(projectPath) {
|
|
|
14725
15351
|
}
|
|
14726
15352
|
}
|
|
14727
15353
|
if (workspaces.length > 0) return workspaces;
|
|
14728
|
-
const appsDir =
|
|
14729
|
-
if (
|
|
15354
|
+
const appsDir = join22(projectPath, "apps");
|
|
15355
|
+
if (existsSync14(appsDir)) {
|
|
14730
15356
|
const entries = await readdir10(appsDir, { withFileTypes: true });
|
|
14731
15357
|
for (const entry of entries) {
|
|
14732
15358
|
if (entry.isDirectory() && !entry.name.startsWith(".")) {
|
|
14733
|
-
const folder =
|
|
15359
|
+
const folder = join22("apps", entry.name);
|
|
14734
15360
|
workspaces.push({
|
|
14735
15361
|
name: entry.name,
|
|
14736
|
-
folder:
|
|
15362
|
+
folder: join22(projectPath, folder),
|
|
14737
15363
|
relativePath: folder
|
|
14738
15364
|
});
|
|
14739
15365
|
}
|
|
@@ -14768,13 +15394,13 @@ async function detectFolderBasedWorkspaces(projectPath) {
|
|
|
14768
15394
|
const results = [];
|
|
14769
15395
|
for (const entry of entries) {
|
|
14770
15396
|
if (entry.isDirectory() && !entry.name.startsWith(".") && entry.name !== "node_modules" && entry.name !== "dist" && entry.name !== "build") {
|
|
14771
|
-
const pkgPath =
|
|
14772
|
-
if (
|
|
15397
|
+
const pkgPath = join22(projectPath, entry.name, "package.json");
|
|
15398
|
+
if (existsSync14(pkgPath)) {
|
|
14773
15399
|
try {
|
|
14774
|
-
const content = await
|
|
15400
|
+
const content = await readFile12(pkgPath, "utf-8");
|
|
14775
15401
|
const pkg = JSON.parse(content);
|
|
14776
15402
|
if (pkg.dependencies || pkg.devDependencies || pkg.scripts) {
|
|
14777
|
-
results.push({ name: entry.name, absPath:
|
|
15403
|
+
results.push({ name: entry.name, absPath: join22(projectPath, entry.name) });
|
|
14778
15404
|
}
|
|
14779
15405
|
} catch {
|
|
14780
15406
|
}
|
|
@@ -14812,7 +15438,7 @@ function buildRunCommand(scriptName, workspace, packageManager, repoType, projec
|
|
|
14812
15438
|
return `npm run ${scriptName} --workspace=${workspace.relativePath}`;
|
|
14813
15439
|
}
|
|
14814
15440
|
if (!isRoot) {
|
|
14815
|
-
const relPath =
|
|
15441
|
+
const relPath = relative6(projectPath, workspace.folder);
|
|
14816
15442
|
return `cd ${relPath} && ${runCmd}`;
|
|
14817
15443
|
}
|
|
14818
15444
|
return runCmd;
|
|
@@ -14830,7 +15456,7 @@ function pmRunCommand(packageManager, scriptName) {
|
|
|
14830
15456
|
}
|
|
14831
15457
|
async function readPackageJson(dir) {
|
|
14832
15458
|
try {
|
|
14833
|
-
const content = await
|
|
15459
|
+
const content = await readFile12(join22(dir, "package.json"), "utf-8");
|
|
14834
15460
|
return JSON.parse(content);
|
|
14835
15461
|
} catch {
|
|
14836
15462
|
return null;
|
|
@@ -14849,17 +15475,6 @@ function getWorkspaceGlobs(pkg) {
|
|
|
14849
15475
|
function toFriendlyName(scriptName) {
|
|
14850
15476
|
return scriptName.replace(/[-:_]/g, " ").replace(/\b\w/g, (char) => char.toUpperCase());
|
|
14851
15477
|
}
|
|
14852
|
-
async function writeProjectScripts(projectPath, scripts) {
|
|
14853
|
-
const agentsDir = join21(projectPath, ".agents");
|
|
14854
|
-
if (!existsSync15(agentsDir)) {
|
|
14855
|
-
await mkdir5(agentsDir, { recursive: true });
|
|
14856
|
-
}
|
|
14857
|
-
await writeFile3(
|
|
14858
|
-
join21(agentsDir, "project-scripts.json"),
|
|
14859
|
-
JSON.stringify(scripts, null, 2),
|
|
14860
|
-
"utf-8"
|
|
14861
|
-
);
|
|
14862
|
-
}
|
|
14863
15478
|
|
|
14864
15479
|
// src/features/threads/threads-select.route.ts
|
|
14865
15480
|
async function handleSelectThread(c, threadManager) {
|
|
@@ -14873,7 +15488,7 @@ async function handleSelectThread(c, threadManager) {
|
|
|
14873
15488
|
return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
|
|
14874
15489
|
}
|
|
14875
15490
|
await threadManager.selectThread(threadId);
|
|
14876
|
-
inspectProject(thread.path).catch((err) => {
|
|
15491
|
+
inspectProject(thread.path, thread.projectId).catch((err) => {
|
|
14877
15492
|
console.error(`Failed to inspect project scripts for thread ${threadId}:`, err);
|
|
14878
15493
|
});
|
|
14879
15494
|
return successResponse(c, { success: true, message: "Thread selected successfully", threadId });
|
|
@@ -15044,8 +15659,8 @@ async function handleListThreadFiles(c, threadManager) {
|
|
|
15044
15659
|
}
|
|
15045
15660
|
|
|
15046
15661
|
// src/features/threads/threads-explorer.route.ts
|
|
15047
|
-
import { readdir as readdir11, readFile as
|
|
15048
|
-
import { join as
|
|
15662
|
+
import { readdir as readdir11, readFile as readFile13, rename as rename2, writeFile as writeFile3, mkdir as mkdir5, rm as rm5, access as access4 } from "fs/promises";
|
|
15663
|
+
import { join as join23 } from "path";
|
|
15049
15664
|
import { spawn as spawn2 } from "child_process";
|
|
15050
15665
|
var Utils3 = null;
|
|
15051
15666
|
var utilsLoaded3 = false;
|
|
@@ -15105,7 +15720,7 @@ async function handleListThreadExplorerDir(c, threadManager) {
|
|
|
15105
15720
|
const entries = dirents.filter((d) => !(isRoot && d.name === ".git")).map((d) => ({
|
|
15106
15721
|
name: d.name,
|
|
15107
15722
|
type: d.isDirectory() ? "folder" : "file",
|
|
15108
|
-
path: queryPath ?
|
|
15723
|
+
path: queryPath ? join23(queryPath, d.name) : d.name
|
|
15109
15724
|
})).sort((a, b) => {
|
|
15110
15725
|
if (a.type !== b.type) return a.type === "folder" ? -1 : 1;
|
|
15111
15726
|
return a.name.localeCompare(b.name);
|
|
@@ -15241,8 +15856,8 @@ async function handleCreateExplorerFile(c, threadManager) {
|
|
|
15241
15856
|
}
|
|
15242
15857
|
absParent = validated;
|
|
15243
15858
|
}
|
|
15244
|
-
await
|
|
15245
|
-
await
|
|
15859
|
+
await mkdir5(absParent, { recursive: true });
|
|
15860
|
+
await writeFile3(join23(absParent, name), "", "utf-8");
|
|
15246
15861
|
const relPath = dirPath ? `${dirPath}/${name}` : name;
|
|
15247
15862
|
return c.json({ success: true, path: relPath });
|
|
15248
15863
|
} catch (error) {
|
|
@@ -15296,7 +15911,7 @@ async function handleGetExplorerMedia(c, threadManager) {
|
|
|
15296
15911
|
}
|
|
15297
15912
|
const ext = filePath.split(".").pop()?.toLowerCase() ?? "";
|
|
15298
15913
|
const contentType = MEDIA_MIME_TYPES[ext] ?? "application/octet-stream";
|
|
15299
|
-
const data = await
|
|
15914
|
+
const data = await readFile13(absPath);
|
|
15300
15915
|
return new Response(data, { headers: { "Content-Type": contentType } });
|
|
15301
15916
|
} catch (error) {
|
|
15302
15917
|
return errorResponse(
|
|
@@ -15338,7 +15953,7 @@ async function handleCreateExplorerFolder(c, threadManager) {
|
|
|
15338
15953
|
}
|
|
15339
15954
|
absParent = validated;
|
|
15340
15955
|
}
|
|
15341
|
-
await
|
|
15956
|
+
await mkdir5(join23(absParent, name), { recursive: true });
|
|
15342
15957
|
const relPath = dirPath ? `${dirPath}/${name}` : name;
|
|
15343
15958
|
return c.json({ success: true, path: relPath });
|
|
15344
15959
|
} catch (error) {
|
|
@@ -15448,15 +16063,15 @@ async function handleOpenThread(c, threadManager) {
|
|
|
15448
16063
|
}
|
|
15449
16064
|
|
|
15450
16065
|
// src/features/threads/threads-conversation-folder-path.route.ts
|
|
15451
|
-
import { join as
|
|
16066
|
+
import { join as join24 } from "path";
|
|
15452
16067
|
async function handleGetConversationFolderPath(c) {
|
|
15453
16068
|
try {
|
|
15454
16069
|
const threadId = c.req.param("id");
|
|
15455
16070
|
if (!threadId) {
|
|
15456
16071
|
return errorResponse(c, ErrorCodes.INVALID_REQUEST, "Thread ID is required", 400);
|
|
15457
16072
|
}
|
|
15458
|
-
const
|
|
15459
|
-
return successResponse(c, { path:
|
|
16073
|
+
const path6 = join24(getDataDir(), threadId);
|
|
16074
|
+
return successResponse(c, { path: path6 });
|
|
15460
16075
|
} catch (error) {
|
|
15461
16076
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
15462
16077
|
return errorResponse(
|
|
@@ -15509,13 +16124,13 @@ async function handleFixComments(c, threadManager) {
|
|
|
15509
16124
|
}
|
|
15510
16125
|
|
|
15511
16126
|
// src/features/threads/threads-ai-files.route.ts
|
|
15512
|
-
import { readFile as
|
|
15513
|
-
import { join as
|
|
15514
|
-
import { existsSync as
|
|
16127
|
+
import { readFile as readFile14, writeFile as writeFile5, mkdir as mkdir7, access as access5, rm as rm6 } from "fs/promises";
|
|
16128
|
+
import { join as join26 } from "path";
|
|
16129
|
+
import { existsSync as existsSync15 } from "fs";
|
|
15515
16130
|
|
|
15516
16131
|
// src/features/git/git-download-folder.ts
|
|
15517
|
-
import { mkdir as
|
|
15518
|
-
import { join as
|
|
16132
|
+
import { mkdir as mkdir6, writeFile as writeFile4 } from "fs/promises";
|
|
16133
|
+
import { join as join25, dirname as dirname4 } from "path";
|
|
15519
16134
|
async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
|
|
15520
16135
|
const { token, ref } = options;
|
|
15521
16136
|
const match = repoUrl.replace(/\.git$/, "").match(/github\.com\/([^/]+)\/([^/]+)/);
|
|
@@ -15551,8 +16166,8 @@ async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
|
|
|
15551
16166
|
await Promise.all(
|
|
15552
16167
|
files.map(async (file) => {
|
|
15553
16168
|
const relativePath = file.path.slice(prefix.length);
|
|
15554
|
-
const localPath =
|
|
15555
|
-
await
|
|
16169
|
+
const localPath = join25(destPath, relativePath);
|
|
16170
|
+
await mkdir6(dirname4(localPath), { recursive: true });
|
|
15556
16171
|
const rawUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${refParam}/${file.path}`;
|
|
15557
16172
|
const fileRes = await fetch(rawUrl, { headers });
|
|
15558
16173
|
if (!fileRes.ok) {
|
|
@@ -15560,7 +16175,7 @@ async function downloadGithubFolder(repoUrl, srcPath, destPath, options = {}) {
|
|
|
15560
16175
|
}
|
|
15561
16176
|
let content = Buffer.from(await fileRes.arrayBuffer()).toString("utf8");
|
|
15562
16177
|
content = content.replace(/Claude/gi, "Tarsk");
|
|
15563
|
-
await
|
|
16178
|
+
await writeFile4(localPath, content, "utf8");
|
|
15564
16179
|
console.log(` \u2713 ${relativePath}`);
|
|
15565
16180
|
})
|
|
15566
16181
|
);
|
|
@@ -15645,7 +16260,7 @@ Links to important documentation, tools, or references.
|
|
|
15645
16260
|
} catch {
|
|
15646
16261
|
return c.json({ error: { code: "NOT_FOUND", message: `File not found: ${filePath}` } }, 404);
|
|
15647
16262
|
}
|
|
15648
|
-
const content = await
|
|
16263
|
+
const content = await readFile14(absPath, "utf-8");
|
|
15649
16264
|
return c.json({ content, path: filePath });
|
|
15650
16265
|
} catch (error) {
|
|
15651
16266
|
return errorResponse(
|
|
@@ -15681,9 +16296,9 @@ async function handleSaveThreadAIFile(c, threadManager) {
|
|
|
15681
16296
|
if (!absPath) {
|
|
15682
16297
|
return c.json({ error: { code: "BAD_REQUEST", message: "Invalid file path" } }, 400);
|
|
15683
16298
|
}
|
|
15684
|
-
const parentDir =
|
|
15685
|
-
await
|
|
15686
|
-
await
|
|
16299
|
+
const parentDir = join26(absPath, "..");
|
|
16300
|
+
await mkdir7(parentDir, { recursive: true });
|
|
16301
|
+
await writeFile5(absPath, content, "utf-8");
|
|
15687
16302
|
return c.json({ success: true, path: filePath });
|
|
15688
16303
|
} catch (error) {
|
|
15689
16304
|
return errorResponse(
|
|
@@ -15764,17 +16379,17 @@ async function handleCreateThreadAgent(c, threadManager) {
|
|
|
15764
16379
|
if (!thread) {
|
|
15765
16380
|
return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
|
|
15766
16381
|
}
|
|
15767
|
-
const agentRelPath =
|
|
15768
|
-
const agentAbsPath =
|
|
15769
|
-
const agentFileRelPath =
|
|
15770
|
-
const agentFileAbsPath =
|
|
15771
|
-
if (
|
|
16382
|
+
const agentRelPath = join26(".agents", "agents", name);
|
|
16383
|
+
const agentAbsPath = join26(thread.path, agentRelPath);
|
|
16384
|
+
const agentFileRelPath = join26(agentRelPath, "AGENT.md");
|
|
16385
|
+
const agentFileAbsPath = join26(agentAbsPath, "AGENT.md");
|
|
16386
|
+
if (existsSync15(agentAbsPath)) {
|
|
15772
16387
|
return c.json(
|
|
15773
16388
|
{ error: { code: "CONFLICT", message: `Agent '${name}' already exists` } },
|
|
15774
16389
|
409
|
|
15775
16390
|
);
|
|
15776
16391
|
}
|
|
15777
|
-
await
|
|
16392
|
+
await mkdir7(agentAbsPath, { recursive: true });
|
|
15778
16393
|
const toolsLine = tools ? `
|
|
15779
16394
|
tools: ${tools}` : "";
|
|
15780
16395
|
const agentContent = `---
|
|
@@ -15784,7 +16399,7 @@ description: ${description}${toolsLine}
|
|
|
15784
16399
|
|
|
15785
16400
|
Place your agent system prompt here as markdown. This will be used as the system prompt when this agent is invoked as a subagent.
|
|
15786
16401
|
`;
|
|
15787
|
-
await
|
|
16402
|
+
await writeFile5(agentFileAbsPath, agentContent, "utf-8");
|
|
15788
16403
|
return c.json({
|
|
15789
16404
|
success: true,
|
|
15790
16405
|
path: agentFileRelPath,
|
|
@@ -15831,17 +16446,17 @@ async function handleCreateThreadSkill(c, threadManager) {
|
|
|
15831
16446
|
if (!thread) {
|
|
15832
16447
|
return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
|
|
15833
16448
|
}
|
|
15834
|
-
const skillRelPath =
|
|
15835
|
-
const skillAbsPath =
|
|
15836
|
-
const skillFileRelPath =
|
|
15837
|
-
const skillFileAbsPath =
|
|
15838
|
-
if (
|
|
16449
|
+
const skillRelPath = join26(".agents", "skills", name);
|
|
16450
|
+
const skillAbsPath = join26(thread.path, skillRelPath);
|
|
16451
|
+
const skillFileRelPath = join26(skillRelPath, "SKILL.md");
|
|
16452
|
+
const skillFileAbsPath = join26(skillAbsPath, "SKILL.md");
|
|
16453
|
+
if (existsSync15(skillAbsPath)) {
|
|
15839
16454
|
return c.json(
|
|
15840
16455
|
{ error: { code: "CONFLICT", message: `Skill '${name}' already exists` } },
|
|
15841
16456
|
409
|
|
15842
16457
|
);
|
|
15843
16458
|
}
|
|
15844
|
-
await
|
|
16459
|
+
await mkdir7(skillAbsPath, { recursive: true });
|
|
15845
16460
|
const skillContent = `---
|
|
15846
16461
|
name: ${name}
|
|
15847
16462
|
description: ${description}
|
|
@@ -15849,7 +16464,7 @@ description: ${description}
|
|
|
15849
16464
|
|
|
15850
16465
|
Place your skill instructions here as markdown. This will be used when Tarsk is prompted with a request related to the description of this skill.
|
|
15851
16466
|
`;
|
|
15852
|
-
await
|
|
16467
|
+
await writeFile5(skillFileAbsPath, skillContent, "utf-8");
|
|
15853
16468
|
return c.json({
|
|
15854
16469
|
success: true,
|
|
15855
16470
|
path: skillFileRelPath,
|
|
@@ -15908,9 +16523,7 @@ async function handleInstallGithubFolder(c, threadManager) {
|
|
|
15908
16523
|
}
|
|
15909
16524
|
|
|
15910
16525
|
// src/features/threads/threads-project-scripts.route.ts
|
|
15911
|
-
|
|
15912
|
-
import { join as join26 } from "path";
|
|
15913
|
-
import { existsSync as existsSync17 } from "fs";
|
|
16526
|
+
init_database();
|
|
15914
16527
|
|
|
15915
16528
|
// src/features/projects/script-process-manager.ts
|
|
15916
16529
|
init_utils();
|
|
@@ -15928,9 +16541,13 @@ var ScriptProcessManager = class {
|
|
|
15928
16541
|
if (this.processes.has(key)) {
|
|
15929
16542
|
this.stopScript(key);
|
|
15930
16543
|
}
|
|
15931
|
-
const
|
|
16544
|
+
const { shell, args: shellArgs } = getShellConfig();
|
|
16545
|
+
const child = spawnProcess(shell, [...shellArgs, command], {
|
|
15932
16546
|
cwd,
|
|
15933
16547
|
detached: true,
|
|
16548
|
+
env: Object.fromEntries(
|
|
16549
|
+
Object.entries(getShellEnv()).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
|
|
16550
|
+
),
|
|
15934
16551
|
stdio: ["pipe", "pipe", "pipe"]
|
|
15935
16552
|
});
|
|
15936
16553
|
if (child.pid !== void 0) {
|
|
@@ -16017,12 +16634,8 @@ async function handleGetProjectScripts(c, threadManager) {
|
|
|
16017
16634
|
if (!thread) {
|
|
16018
16635
|
return errorResponse(c, ErrorCodes.THREAD_NOT_FOUND, `Thread not found: ${threadId}`, 404);
|
|
16019
16636
|
}
|
|
16020
|
-
const
|
|
16021
|
-
|
|
16022
|
-
return successResponse(c, { scripts: [] });
|
|
16023
|
-
}
|
|
16024
|
-
const content = await readFile14(scriptsPath, "utf-8");
|
|
16025
|
-
const scripts = JSON.parse(content);
|
|
16637
|
+
const db = await getDatabase();
|
|
16638
|
+
const scripts = await getScriptsByProject(db, thread.projectId);
|
|
16026
16639
|
const enrichedScripts = scripts.map((s) => ({
|
|
16027
16640
|
...s,
|
|
16028
16641
|
isRunning: scriptProcessManager.isRunning(`${thread.projectId}:${s.name}`)
|
|
@@ -16187,7 +16800,7 @@ function createThreadRoutes(threadManager, gitManager, conversationManager) {
|
|
|
16187
16800
|
|
|
16188
16801
|
// src/features/git/git.manager.ts
|
|
16189
16802
|
init_utils();
|
|
16190
|
-
import { mkdir as
|
|
16803
|
+
import { mkdir as mkdir8 } from "fs/promises";
|
|
16191
16804
|
import { dirname as dirname5 } from "path";
|
|
16192
16805
|
var GitManagerImpl = class {
|
|
16193
16806
|
/**
|
|
@@ -16259,7 +16872,7 @@ var GitManagerImpl = class {
|
|
|
16259
16872
|
}
|
|
16260
16873
|
try {
|
|
16261
16874
|
const parentDir = dirname5(targetPath);
|
|
16262
|
-
await
|
|
16875
|
+
await mkdir8(parentDir, { recursive: true });
|
|
16263
16876
|
const gitProcess = spawnProcess("git", ["clone", "--progress", gitUrl, targetPath]);
|
|
16264
16877
|
const eventQueue = [];
|
|
16265
16878
|
let processExited = false;
|
|
@@ -16465,7 +17078,7 @@ var GitManagerImpl = class {
|
|
|
16465
17078
|
}
|
|
16466
17079
|
async initRepository(repoPath) {
|
|
16467
17080
|
try {
|
|
16468
|
-
await
|
|
17081
|
+
await mkdir8(repoPath, { recursive: true });
|
|
16469
17082
|
} catch (error) {
|
|
16470
17083
|
if (error.code !== "EEXIST") {
|
|
16471
17084
|
throw error;
|
|
@@ -16512,11 +17125,11 @@ async function gitUserHandler(c) {
|
|
|
16512
17125
|
}
|
|
16513
17126
|
|
|
16514
17127
|
// src/features/git/git-status.route.ts
|
|
16515
|
-
import { existsSync as
|
|
17128
|
+
import { existsSync as existsSync17 } from "fs";
|
|
16516
17129
|
|
|
16517
17130
|
// src/features/git/git.utils.ts
|
|
16518
17131
|
init_utils();
|
|
16519
|
-
import { existsSync as
|
|
17132
|
+
import { existsSync as existsSync16, readFileSync as readFileSync5, statSync as statSync4 } from "fs";
|
|
16520
17133
|
import { isAbsolute as isAbsolute3, normalize as normalize2, resolve as resolve3, join as join27 } from "path";
|
|
16521
17134
|
import {
|
|
16522
17135
|
completeSimple,
|
|
@@ -16881,10 +17494,10 @@ async function getUntrackedFilesDiff(gitRoot) {
|
|
|
16881
17494
|
const maxFileSize = 1e5;
|
|
16882
17495
|
for (const relPath of untrackedPaths) {
|
|
16883
17496
|
const fullPath = join27(gitRoot, relPath);
|
|
16884
|
-
if (!
|
|
17497
|
+
if (!existsSync16(fullPath)) continue;
|
|
16885
17498
|
try {
|
|
16886
17499
|
if (statSync4(fullPath).isDirectory()) continue;
|
|
16887
|
-
const content =
|
|
17500
|
+
const content = readFileSync5(fullPath, "utf-8");
|
|
16888
17501
|
const safeContent = content.length > maxFileSize ? content.slice(0, maxFileSize) + "\n...(truncated)" : content;
|
|
16889
17502
|
const linesForDiff = safeContent.split(/\r?\n/).map((l) => `+${l}`).join("\n");
|
|
16890
17503
|
parts.push(
|
|
@@ -17420,7 +18033,7 @@ async function gitStatusHandler(c, metadataManager) {
|
|
|
17420
18033
|
return c.json({ error: "Thread path not found" }, 404);
|
|
17421
18034
|
}
|
|
17422
18035
|
const absolutePath = resolveThreadPath(repoPath);
|
|
17423
|
-
if (!
|
|
18036
|
+
if (!existsSync17(absolutePath)) {
|
|
17424
18037
|
return c.json(
|
|
17425
18038
|
{
|
|
17426
18039
|
error: `Thread repo path does not exist: ${absolutePath}. Check that the project folder is present.`
|
|
@@ -17464,7 +18077,7 @@ async function gitStatusHandler(c, metadataManager) {
|
|
|
17464
18077
|
|
|
17465
18078
|
// src/features/git/git-diff.route.ts
|
|
17466
18079
|
init_utils();
|
|
17467
|
-
import { readFileSync as
|
|
18080
|
+
import { readFileSync as readFileSync6 } from "fs";
|
|
17468
18081
|
import { resolve as resolve4 } from "path";
|
|
17469
18082
|
async function gitDiffHandler(c, metadataManager) {
|
|
17470
18083
|
try {
|
|
@@ -17527,7 +18140,7 @@ async function gitDiffHandler(c, metadataManager) {
|
|
|
17527
18140
|
oldContent = content;
|
|
17528
18141
|
} else if (file.status === "added" && file.path.startsWith("(new)")) {
|
|
17529
18142
|
try {
|
|
17530
|
-
newContent =
|
|
18143
|
+
newContent = readFileSync6(resolve4(gitRoot, file.path.replace("(new) ", "")), "utf-8");
|
|
17531
18144
|
} catch {
|
|
17532
18145
|
newContent = "";
|
|
17533
18146
|
}
|
|
@@ -17560,7 +18173,7 @@ async function gitDiffHandler(c, metadataManager) {
|
|
|
17560
18173
|
});
|
|
17561
18174
|
oldContent = oldContentOutput;
|
|
17562
18175
|
try {
|
|
17563
|
-
newContent =
|
|
18176
|
+
newContent = readFileSync6(resolve4(gitRoot, file.path), "utf-8");
|
|
17564
18177
|
} catch {
|
|
17565
18178
|
newContent = "";
|
|
17566
18179
|
}
|
|
@@ -17624,7 +18237,7 @@ async function gitDiffHandler(c, metadataManager) {
|
|
|
17624
18237
|
}
|
|
17625
18238
|
|
|
17626
18239
|
// src/features/git/git-generate-commit-message.route.ts
|
|
17627
|
-
import { existsSync as
|
|
18240
|
+
import { existsSync as existsSync18 } from "fs";
|
|
17628
18241
|
async function gitGenerateCommitMessageHandler(c, metadataManager) {
|
|
17629
18242
|
try {
|
|
17630
18243
|
const threadId = c.req.param("threadId");
|
|
@@ -17650,7 +18263,7 @@ async function gitGenerateCommitMessageHandler(c, metadataManager) {
|
|
|
17650
18263
|
const absolutePath = resolveThreadPath(repoPath);
|
|
17651
18264
|
process.stdout.write(`[generate-commit-message] resolved path: ${absolutePath}
|
|
17652
18265
|
`);
|
|
17653
|
-
if (!
|
|
18266
|
+
if (!existsSync18(absolutePath)) {
|
|
17654
18267
|
process.stdout.write(`[generate-commit-message] path does not exist: ${absolutePath}
|
|
17655
18268
|
`);
|
|
17656
18269
|
return c.json(
|
|
@@ -18130,7 +18743,7 @@ async function gitGithubStatusHandler(c, metadataManager) {
|
|
|
18130
18743
|
}
|
|
18131
18744
|
|
|
18132
18745
|
// src/features/git/git-unified-status.route.ts
|
|
18133
|
-
import { existsSync as
|
|
18746
|
+
import { existsSync as existsSync19 } from "fs";
|
|
18134
18747
|
async function gitUnifiedStatusHandler(c, metadataManager, db) {
|
|
18135
18748
|
try {
|
|
18136
18749
|
const threadId = c.req.param("threadId");
|
|
@@ -18150,7 +18763,7 @@ async function gitUnifiedStatusHandler(c, metadataManager, db) {
|
|
|
18150
18763
|
return c.json({ error: "Thread path not found" }, 404);
|
|
18151
18764
|
}
|
|
18152
18765
|
const absolutePath = resolveThreadPath(repoPath);
|
|
18153
|
-
if (!
|
|
18766
|
+
if (!existsSync19(absolutePath)) {
|
|
18154
18767
|
return c.json(
|
|
18155
18768
|
{
|
|
18156
18769
|
error: `Thread repo path does not exist: ${absolutePath}. Check that the project folder is present.`
|
|
@@ -18496,7 +19109,7 @@ async function gitCreateBranchHandler(c, metadataManager) {
|
|
|
18496
19109
|
}
|
|
18497
19110
|
|
|
18498
19111
|
// src/features/git/git-sync-branch.route.ts
|
|
18499
|
-
import { existsSync as
|
|
19112
|
+
import { existsSync as existsSync20 } from "fs";
|
|
18500
19113
|
async function gitSyncBranchHandler(c, metadataManager) {
|
|
18501
19114
|
try {
|
|
18502
19115
|
const threadId = c.req.param("threadId");
|
|
@@ -18513,7 +19126,7 @@ async function gitSyncBranchHandler(c, metadataManager) {
|
|
|
18513
19126
|
}
|
|
18514
19127
|
const absolutePath = resolveThreadPath(repoPath);
|
|
18515
19128
|
console.log(`[sync-branch] Resolved path: ${absolutePath}`);
|
|
18516
|
-
if (!
|
|
19129
|
+
if (!existsSync20(absolutePath)) {
|
|
18517
19130
|
console.log(`[sync-branch] Path does not exist: ${absolutePath}`);
|
|
18518
19131
|
return c.json(
|
|
18519
19132
|
{
|
|
@@ -18920,6 +19533,168 @@ async function gitRevertFileHandler(c, metadataManager) {
|
|
|
18920
19533
|
}
|
|
18921
19534
|
}
|
|
18922
19535
|
|
|
19536
|
+
// src/features/git/git-checkpoint.route.ts
|
|
19537
|
+
init_utils();
|
|
19538
|
+
function stashWithLabel(gitRoot, label) {
|
|
19539
|
+
return new Promise((resolve6, reject) => {
|
|
19540
|
+
const proc = spawnProcess("git", ["stash", "push", "--include-untracked", "-m", label], {
|
|
19541
|
+
cwd: gitRoot
|
|
19542
|
+
});
|
|
19543
|
+
let out = "";
|
|
19544
|
+
let err = "";
|
|
19545
|
+
if (proc.stdout) {
|
|
19546
|
+
proc.stdout.on("data", (d) => {
|
|
19547
|
+
out += d.toString();
|
|
19548
|
+
});
|
|
19549
|
+
}
|
|
19550
|
+
if (proc.stderr) {
|
|
19551
|
+
proc.stderr.on("data", (d) => {
|
|
19552
|
+
err += d.toString();
|
|
19553
|
+
});
|
|
19554
|
+
}
|
|
19555
|
+
proc.on("close", (code) => {
|
|
19556
|
+
if (code === 0) {
|
|
19557
|
+
resolve6(!out.includes("No local changes to save"));
|
|
19558
|
+
} else {
|
|
19559
|
+
reject(new Error(err || "Failed to create checkpoint"));
|
|
19560
|
+
}
|
|
19561
|
+
});
|
|
19562
|
+
proc.on("error", reject);
|
|
19563
|
+
});
|
|
19564
|
+
}
|
|
19565
|
+
async function gitCheckpointHandler(c, metadataManager) {
|
|
19566
|
+
try {
|
|
19567
|
+
const threadId = c.req.param("threadId");
|
|
19568
|
+
const body = await c.req.json();
|
|
19569
|
+
const { messageId } = body;
|
|
19570
|
+
if (!messageId) {
|
|
19571
|
+
return c.json({ error: "messageId is required" }, 400);
|
|
19572
|
+
}
|
|
19573
|
+
const threads = await metadataManager.loadThreads();
|
|
19574
|
+
const thread = threads.find((t) => t.id === threadId);
|
|
19575
|
+
if (!thread) {
|
|
19576
|
+
return c.json({ error: "Thread not found" }, 404);
|
|
19577
|
+
}
|
|
19578
|
+
const repoPath = thread.path;
|
|
19579
|
+
if (!repoPath) {
|
|
19580
|
+
return c.json({ success: false, reason: "no_path" });
|
|
19581
|
+
}
|
|
19582
|
+
const absolutePath = resolveThreadPath(repoPath);
|
|
19583
|
+
let gitRoot;
|
|
19584
|
+
try {
|
|
19585
|
+
gitRoot = await getGitRoot(absolutePath);
|
|
19586
|
+
} catch {
|
|
19587
|
+
return c.json({ success: false, reason: "not_a_git_repo" });
|
|
19588
|
+
}
|
|
19589
|
+
const label = `tarsk-checkpoint:${messageId}`;
|
|
19590
|
+
const stashed = await stashWithLabel(gitRoot, label);
|
|
19591
|
+
return c.json({ success: true, stashed, checkpointRef: label });
|
|
19592
|
+
} catch (error) {
|
|
19593
|
+
const message = error instanceof Error ? error.message : "Failed to create checkpoint";
|
|
19594
|
+
return c.json({ error: message }, 500);
|
|
19595
|
+
}
|
|
19596
|
+
}
|
|
19597
|
+
|
|
19598
|
+
// src/features/git/git-checkpoint-restore.route.ts
|
|
19599
|
+
init_utils();
|
|
19600
|
+
function listStashes(gitRoot) {
|
|
19601
|
+
return new Promise((resolve6, reject) => {
|
|
19602
|
+
const proc = spawnProcess("git", ["stash", "list", "--format=%gd %s"], { cwd: gitRoot });
|
|
19603
|
+
let out = "";
|
|
19604
|
+
let err = "";
|
|
19605
|
+
if (proc.stdout) {
|
|
19606
|
+
proc.stdout.on("data", (d) => {
|
|
19607
|
+
out += d.toString();
|
|
19608
|
+
});
|
|
19609
|
+
}
|
|
19610
|
+
if (proc.stderr) {
|
|
19611
|
+
proc.stderr.on("data", (d) => {
|
|
19612
|
+
err += d.toString();
|
|
19613
|
+
});
|
|
19614
|
+
}
|
|
19615
|
+
proc.on("close", (code) => {
|
|
19616
|
+
if (code !== 0) {
|
|
19617
|
+
reject(new Error(err || "Failed to list stashes"));
|
|
19618
|
+
return;
|
|
19619
|
+
}
|
|
19620
|
+
const entries = out.trim().split("\n").filter((l) => l.length > 0).map((line) => {
|
|
19621
|
+
const match = line.match(/^stash@\{(\d+)\}\s+(?:On \S+:|WIP on \S+:)?\s*(.*)$/);
|
|
19622
|
+
if (!match) return null;
|
|
19623
|
+
return { index: parseInt(match[1], 10), label: match[2].trim() };
|
|
19624
|
+
}).filter((e) => e !== null);
|
|
19625
|
+
resolve6(entries);
|
|
19626
|
+
});
|
|
19627
|
+
proc.on("error", reject);
|
|
19628
|
+
});
|
|
19629
|
+
}
|
|
19630
|
+
function discardWorkingChanges(gitRoot) {
|
|
19631
|
+
return new Promise((resolve6, reject) => {
|
|
19632
|
+
const proc = spawnProcess("git", ["checkout", "--", "."], { cwd: gitRoot });
|
|
19633
|
+
proc.on("close", () => {
|
|
19634
|
+
const clean = spawnProcess("git", ["clean", "-fd"], { cwd: gitRoot });
|
|
19635
|
+
clean.on("close", (code) => {
|
|
19636
|
+
if (code === 0) resolve6();
|
|
19637
|
+
else reject(new Error("Failed to clean working tree"));
|
|
19638
|
+
});
|
|
19639
|
+
clean.on("error", reject);
|
|
19640
|
+
});
|
|
19641
|
+
proc.on("error", reject);
|
|
19642
|
+
});
|
|
19643
|
+
}
|
|
19644
|
+
function applyStash(gitRoot, index) {
|
|
19645
|
+
return new Promise((resolve6, reject) => {
|
|
19646
|
+
const proc = spawnProcess("git", ["stash", "apply", `stash@{${index}}`], { cwd: gitRoot });
|
|
19647
|
+
let err = "";
|
|
19648
|
+
if (proc.stderr) {
|
|
19649
|
+
proc.stderr.on("data", (d) => {
|
|
19650
|
+
err += d.toString();
|
|
19651
|
+
});
|
|
19652
|
+
}
|
|
19653
|
+
proc.on("close", (code) => {
|
|
19654
|
+
if (code === 0) resolve6();
|
|
19655
|
+
else reject(new Error(err || "Failed to apply stash"));
|
|
19656
|
+
});
|
|
19657
|
+
proc.on("error", reject);
|
|
19658
|
+
});
|
|
19659
|
+
}
|
|
19660
|
+
async function gitCheckpointRestoreHandler(c, metadataManager) {
|
|
19661
|
+
try {
|
|
19662
|
+
const threadId = c.req.param("threadId");
|
|
19663
|
+
const body = await c.req.json();
|
|
19664
|
+
const { checkpointRef } = body;
|
|
19665
|
+
if (!checkpointRef) {
|
|
19666
|
+
return c.json({ error: "checkpointRef is required" }, 400);
|
|
19667
|
+
}
|
|
19668
|
+
const threads = await metadataManager.loadThreads();
|
|
19669
|
+
const thread = threads.find((t) => t.id === threadId);
|
|
19670
|
+
if (!thread) {
|
|
19671
|
+
return c.json({ error: "Thread not found" }, 404);
|
|
19672
|
+
}
|
|
19673
|
+
const repoPath = thread.path;
|
|
19674
|
+
if (!repoPath) {
|
|
19675
|
+
return c.json({ error: "Thread has no path" }, 400);
|
|
19676
|
+
}
|
|
19677
|
+
const absolutePath = resolveThreadPath(repoPath);
|
|
19678
|
+
let gitRoot;
|
|
19679
|
+
try {
|
|
19680
|
+
gitRoot = await getGitRoot(absolutePath);
|
|
19681
|
+
} catch {
|
|
19682
|
+
return c.json({ error: "Not a git repository" }, 400);
|
|
19683
|
+
}
|
|
19684
|
+
const stashes = await listStashes(gitRoot);
|
|
19685
|
+
const match = stashes.find((s) => s.label === checkpointRef);
|
|
19686
|
+
if (!match) {
|
|
19687
|
+
return c.json({ error: "Checkpoint not found in stash list" }, 404);
|
|
19688
|
+
}
|
|
19689
|
+
await discardWorkingChanges(gitRoot);
|
|
19690
|
+
await applyStash(gitRoot, match.index);
|
|
19691
|
+
return c.json({ success: true });
|
|
19692
|
+
} catch (error) {
|
|
19693
|
+
const message = error instanceof Error ? error.message : "Failed to restore checkpoint";
|
|
19694
|
+
return c.json({ error: message }, 500);
|
|
19695
|
+
}
|
|
19696
|
+
}
|
|
19697
|
+
|
|
18923
19698
|
// src/features/git/git.routes.ts
|
|
18924
19699
|
function createGitRoutes(metadataManager) {
|
|
18925
19700
|
const router = new Hono11();
|
|
@@ -19008,6 +19783,12 @@ function createGitRoutes(metadataManager) {
|
|
|
19008
19783
|
await invalidateGitStatusCache(db, c.req.param("threadId"));
|
|
19009
19784
|
return gitRevertFileHandler(c, metadataManager);
|
|
19010
19785
|
});
|
|
19786
|
+
router.post("/checkpoint/:threadId", async (c) => {
|
|
19787
|
+
return gitCheckpointHandler(c, metadataManager);
|
|
19788
|
+
});
|
|
19789
|
+
router.post("/checkpoint-restore/:threadId", async (c) => {
|
|
19790
|
+
return gitCheckpointRestoreHandler(c, metadataManager);
|
|
19791
|
+
});
|
|
19011
19792
|
return router;
|
|
19012
19793
|
}
|
|
19013
19794
|
|
|
@@ -19052,7 +19833,7 @@ function createUpdateRoutes(updater) {
|
|
|
19052
19833
|
|
|
19053
19834
|
// src/features/mcp/mcp.routes.ts
|
|
19054
19835
|
import { Hono as Hono13 } from "hono";
|
|
19055
|
-
import { readFile as readFile15, writeFile as
|
|
19836
|
+
import { readFile as readFile15, writeFile as writeFile6, mkdir as mkdir9, access as access6 } from "fs/promises";
|
|
19056
19837
|
import { join as join28, dirname as dirname6 } from "path";
|
|
19057
19838
|
|
|
19058
19839
|
// src/features/mcp/mcp.popular.json
|
|
@@ -19179,8 +19960,8 @@ async function readMCPConfig(projectPath) {
|
|
|
19179
19960
|
async function writeMCPConfig(projectPath, config) {
|
|
19180
19961
|
const existing = await readMCPConfig(projectPath);
|
|
19181
19962
|
const targetPath = existing?.filePath ?? join28(projectPath, ".agents/mcp.json");
|
|
19182
|
-
await
|
|
19183
|
-
await
|
|
19963
|
+
await mkdir9(dirname6(targetPath), { recursive: true });
|
|
19964
|
+
await writeFile6(targetPath, JSON.stringify(config, null, 2), "utf-8");
|
|
19184
19965
|
}
|
|
19185
19966
|
function createMCPRoutes() {
|
|
19186
19967
|
const router = new Hono13();
|
|
@@ -19895,15 +20676,7 @@ function createBrowserJsRoutes(threadManager) {
|
|
|
19895
20676
|
400
|
|
19896
20677
|
);
|
|
19897
20678
|
}
|
|
19898
|
-
|
|
19899
|
-
if (!resolved) {
|
|
19900
|
-
return errorResponse(
|
|
19901
|
-
c,
|
|
19902
|
-
ErrorCodes.INVALID_REQUEST,
|
|
19903
|
-
"No pending web worker execution found for this toolCallId",
|
|
19904
|
-
404
|
|
19905
|
-
);
|
|
19906
|
-
}
|
|
20679
|
+
submitWebWorkerResult(toolCallId, result);
|
|
19907
20680
|
return c.json({ success: true });
|
|
19908
20681
|
} catch (error) {
|
|
19909
20682
|
return errorResponse(
|
|
@@ -19956,8 +20729,8 @@ function createBrowserJsRoutes(threadManager) {
|
|
|
19956
20729
|
}
|
|
19957
20730
|
|
|
19958
20731
|
// src/server.ts
|
|
19959
|
-
var __filename =
|
|
19960
|
-
var __dirname =
|
|
20732
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
20733
|
+
var __dirname = path4.dirname(__filename);
|
|
19961
20734
|
async function startTarskServer(options) {
|
|
19962
20735
|
const { isDebug: isDebug2, publicDir: publicDirOverride } = options;
|
|
19963
20736
|
const port = isDebug2 ? 462 : process.env.PORT ? parseInt(process.env.PORT) : 641;
|
|
@@ -20036,16 +20809,16 @@ async function startTarskServer(options) {
|
|
|
20036
20809
|
app.route("/api/update", createUpdateRoutes(options.updater));
|
|
20037
20810
|
createSlashCommandRoutes(app, threadManager);
|
|
20038
20811
|
createRuleRoutes(app, projectManager);
|
|
20039
|
-
const prodPublicDir =
|
|
20040
|
-
const devCopiedPublicDir =
|
|
20041
|
-
const appDistDir =
|
|
20812
|
+
const prodPublicDir = path4.join(__dirname, "public");
|
|
20813
|
+
const devCopiedPublicDir = path4.join(process.cwd(), "dist", "public");
|
|
20814
|
+
const appDistDir = path4.join(process.cwd(), "..", "app", "dist");
|
|
20042
20815
|
const resolvedPublicDir = publicDirOverride ?? [prodPublicDir, devCopiedPublicDir, appDistDir].find((dirPath) => fs3.existsSync(dirPath));
|
|
20043
20816
|
if (!resolvedPublicDir) {
|
|
20044
20817
|
throw new Error(
|
|
20045
20818
|
`No static frontend assets found. Expected one of: ${prodPublicDir}, ${devCopiedPublicDir}, ${appDistDir}. Build the app first with \`cd ../app && bun run build\`.`
|
|
20046
20819
|
);
|
|
20047
20820
|
}
|
|
20048
|
-
const staticRoot =
|
|
20821
|
+
const staticRoot = path4.relative(process.cwd(), resolvedPublicDir);
|
|
20049
20822
|
app.use("/*", async (c, next) => {
|
|
20050
20823
|
if (c.req.path.startsWith("/api/")) {
|
|
20051
20824
|
return next();
|
|
@@ -20057,7 +20830,7 @@ async function startTarskServer(options) {
|
|
|
20057
20830
|
return next();
|
|
20058
20831
|
}
|
|
20059
20832
|
return serveStatic({
|
|
20060
|
-
path:
|
|
20833
|
+
path: path4.relative(process.cwd(), path4.join(resolvedPublicDir, "index.html"))
|
|
20061
20834
|
})(c, next);
|
|
20062
20835
|
});
|
|
20063
20836
|
app.all("*", (c) => {
|
|
@@ -20090,8 +20863,8 @@ async function startTarskServer(options) {
|
|
|
20090
20863
|
|
|
20091
20864
|
// src/index.ts
|
|
20092
20865
|
import fs4 from "fs";
|
|
20093
|
-
import
|
|
20094
|
-
import { fileURLToPath as
|
|
20866
|
+
import path5 from "path";
|
|
20867
|
+
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
20095
20868
|
var args = process.argv.slice(2);
|
|
20096
20869
|
var isDebug = args.includes("--debug");
|
|
20097
20870
|
var shouldOpenBrowser = args.includes("--open");
|
|
@@ -20099,9 +20872,9 @@ if (!isDebug) {
|
|
|
20099
20872
|
console.log = () => {
|
|
20100
20873
|
};
|
|
20101
20874
|
} else {
|
|
20102
|
-
const __filename2 =
|
|
20103
|
-
const cliDir =
|
|
20104
|
-
const logFilePath =
|
|
20875
|
+
const __filename2 = fileURLToPath2(import.meta.url);
|
|
20876
|
+
const cliDir = path5.resolve(path5.dirname(__filename2), "..");
|
|
20877
|
+
const logFilePath = path5.join(cliDir, "logs.txt");
|
|
20105
20878
|
try {
|
|
20106
20879
|
fs4.writeFileSync(logFilePath, "");
|
|
20107
20880
|
} catch {
|