sparkecoder 0.1.82 → 0.1.83

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/agent/index.js +284 -71
  2. package/dist/agent/index.js.map +1 -1
  3. package/dist/cli.js +370 -144
  4. package/dist/cli.js.map +1 -1
  5. package/dist/index.js +358 -132
  6. package/dist/index.js.map +1 -1
  7. package/dist/server/index.js +358 -132
  8. package/dist/server/index.js.map +1 -1
  9. package/dist/skills/default/browser.md +30 -0
  10. package/dist/tools/index.d.ts +117 -1
  11. package/dist/tools/index.js +183 -41
  12. package/dist/tools/index.js.map +1 -1
  13. package/package.json +1 -1
  14. package/src/skills/default/browser.md +30 -0
  15. package/web/.next/BUILD_ID +1 -1
  16. package/web/.next/standalone/web/.next/BUILD_ID +1 -1
  17. package/web/.next/standalone/web/.next/build-manifest.json +2 -2
  18. package/web/.next/standalone/web/.next/prerender-manifest.json +3 -3
  19. package/web/.next/standalone/web/.next/server/app/_global-error.html +2 -2
  20. package/web/.next/standalone/web/.next/server/app/_global-error.rsc +1 -1
  21. package/web/.next/standalone/web/.next/server/app/_global-error.segments/__PAGE__.segment.rsc +1 -1
  22. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_full.segment.rsc +1 -1
  23. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_head.segment.rsc +1 -1
  24. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_index.segment.rsc +1 -1
  25. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -1
  26. package/web/.next/standalone/web/.next/server/app/_not-found.html +1 -1
  27. package/web/.next/standalone/web/.next/server/app/_not-found.rsc +1 -1
  28. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_full.segment.rsc +1 -1
  29. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_head.segment.rsc +1 -1
  30. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_index.segment.rsc +1 -1
  31. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +1 -1
  32. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found.segment.rsc +1 -1
  33. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_tree.segment.rsc +1 -1
  34. package/web/.next/standalone/web/.next/server/app/docs/installation.html +2 -2
  35. package/web/.next/standalone/web/.next/server/app/docs/installation.rsc +1 -1
  36. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_full.segment.rsc +1 -1
  37. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_head.segment.rsc +1 -1
  38. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_index.segment.rsc +1 -1
  39. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_tree.segment.rsc +1 -1
  40. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation/__PAGE__.segment.rsc +1 -1
  41. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation.segment.rsc +1 -1
  42. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs.segment.rsc +1 -1
  43. package/web/.next/standalone/web/.next/server/app/docs/skills.html +2 -2
  44. package/web/.next/standalone/web/.next/server/app/docs/skills.rsc +1 -1
  45. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_full.segment.rsc +1 -1
  46. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_head.segment.rsc +1 -1
  47. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_index.segment.rsc +1 -1
  48. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_tree.segment.rsc +1 -1
  49. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills/__PAGE__.segment.rsc +1 -1
  50. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills.segment.rsc +1 -1
  51. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs.segment.rsc +1 -1
  52. package/web/.next/standalone/web/.next/server/app/docs/tools.html +2 -2
  53. package/web/.next/standalone/web/.next/server/app/docs/tools.rsc +1 -1
  54. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_full.segment.rsc +1 -1
  55. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_head.segment.rsc +1 -1
  56. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_index.segment.rsc +1 -1
  57. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_tree.segment.rsc +1 -1
  58. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools/__PAGE__.segment.rsc +1 -1
  59. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools.segment.rsc +1 -1
  60. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs.segment.rsc +1 -1
  61. package/web/.next/standalone/web/.next/server/app/docs.html +2 -2
  62. package/web/.next/standalone/web/.next/server/app/docs.rsc +1 -1
  63. package/web/.next/standalone/web/.next/server/app/docs.segments/_full.segment.rsc +1 -1
  64. package/web/.next/standalone/web/.next/server/app/docs.segments/_head.segment.rsc +1 -1
  65. package/web/.next/standalone/web/.next/server/app/docs.segments/_index.segment.rsc +1 -1
  66. package/web/.next/standalone/web/.next/server/app/docs.segments/_tree.segment.rsc +1 -1
  67. package/web/.next/standalone/web/.next/server/app/docs.segments/docs/__PAGE__.segment.rsc +1 -1
  68. package/web/.next/standalone/web/.next/server/app/docs.segments/docs.segment.rsc +1 -1
  69. package/web/.next/standalone/web/.next/server/app/index.html +1 -1
  70. package/web/.next/standalone/web/.next/server/app/index.rsc +1 -1
  71. package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p/__PAGE__.segment.rsc +1 -1
  72. package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p.segment.rsc +1 -1
  73. package/web/.next/standalone/web/.next/server/app/index.segments/_full.segment.rsc +1 -1
  74. package/web/.next/standalone/web/.next/server/app/index.segments/_head.segment.rsc +1 -1
  75. package/web/.next/standalone/web/.next/server/app/index.segments/_index.segment.rsc +1 -1
  76. package/web/.next/standalone/web/.next/server/app/index.segments/_tree.segment.rsc +1 -1
  77. package/web/.next/standalone/web/.next/server/pages/404.html +1 -1
  78. package/web/.next/standalone/web/.next/server/pages/500.html +2 -2
  79. package/web/.next/standalone/web/.next/server/server-reference-manifest.js +1 -1
  80. package/web/.next/standalone/web/.next/server/server-reference-manifest.json +1 -1
  81. package/web/.next/standalone/web/package-lock.json +3 -3
  82. /package/web/.next/standalone/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  83. /package/web/.next/standalone/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  84. /package/web/.next/standalone/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
  85. /package/web/.next/standalone/web/.next/static/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  86. /package/web/.next/standalone/web/.next/static/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  87. /package/web/.next/standalone/web/.next/static/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
  88. /package/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  89. /package/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  90. /package/web/.next/static/{Ne3ChQc_mw5oh4Y1Rr7qj → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
package/dist/index.js CHANGED
@@ -369,27 +369,38 @@ function requiresApproval(toolName, sessionConfig) {
369
369
  return false;
370
370
  }
371
371
  function loadStoredAuthKey() {
372
- const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);
373
- if (!existsSync(keysPath)) {
374
- return null;
375
- }
376
- try {
377
- const content = readFileSync(keysPath, "utf-8");
378
- const data = JSON.parse(content);
379
- return data.authKey || null;
380
- } catch {
381
- return null;
372
+ const locations = [
373
+ join(process.cwd(), ".sparkecoder", AUTH_KEY_FILE),
374
+ join(getAppDataDirectory(), AUTH_KEY_FILE)
375
+ ];
376
+ for (const keysPath of locations) {
377
+ if (!existsSync(keysPath)) continue;
378
+ try {
379
+ const content = readFileSync(keysPath, "utf-8");
380
+ const data = JSON.parse(content);
381
+ if (data.authKey) return data.authKey;
382
+ } catch {
383
+ }
382
384
  }
385
+ return null;
383
386
  }
384
387
  function saveAuthKey(authKey3, userId) {
385
- const appDir = ensureAppDataDirectory();
386
- const keysPath = join(appDir, AUTH_KEY_FILE);
387
388
  const data = {
388
389
  authKey: authKey3,
389
390
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
390
391
  userId
391
392
  };
392
- writeFileSync(keysPath, JSON.stringify(data, null, 2), { mode: 384 });
393
+ const json = JSON.stringify(data, null, 2);
394
+ const appDir = ensureAppDataDirectory();
395
+ writeFileSync(join(appDir, AUTH_KEY_FILE), json, { mode: 384 });
396
+ try {
397
+ const workspaceAuthDir = join(process.cwd(), ".sparkecoder");
398
+ if (!existsSync(workspaceAuthDir)) {
399
+ mkdirSync(workspaceAuthDir, { recursive: true });
400
+ }
401
+ writeFileSync(join(workspaceAuthDir, AUTH_KEY_FILE), json, { mode: 384 });
402
+ } catch {
403
+ }
393
404
  }
394
405
  async function registerWithRemoteServer(serverUrl, name) {
395
406
  const response = await fetch(`${serverUrl}/auth/register`, {
@@ -1023,9 +1034,9 @@ __export(skills_exports, {
1023
1034
  loadSkillContent: () => loadSkillContent,
1024
1035
  loadSkillsFromDirectory: () => loadSkillsFromDirectory
1025
1036
  });
1026
- import { readFile as readFile6, readdir } from "fs/promises";
1037
+ import { readFile as readFile7, readdir } from "fs/promises";
1027
1038
  import { resolve as resolve6, basename, extname as extname4, relative as relative4 } from "path";
1028
- import { existsSync as existsSync9 } from "fs";
1039
+ import { existsSync as existsSync10 } from "fs";
1029
1040
  import { minimatch } from "minimatch";
1030
1041
  function parseSkillFrontmatter(content) {
1031
1042
  const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
@@ -1103,7 +1114,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1103
1114
  defaultLoadType = "on_demand",
1104
1115
  forceAlwaysApply = false
1105
1116
  } = options;
1106
- if (!existsSync9(directory)) {
1117
+ if (!existsSync10(directory)) {
1107
1118
  return [];
1108
1119
  }
1109
1120
  const skills = [];
@@ -1113,7 +1124,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1113
1124
  let fileName;
1114
1125
  if (entry.isDirectory()) {
1115
1126
  const skillMdPath = resolve6(directory, entry.name, "SKILL.md");
1116
- if (existsSync9(skillMdPath)) {
1127
+ if (existsSync10(skillMdPath)) {
1117
1128
  filePath = skillMdPath;
1118
1129
  fileName = entry.name;
1119
1130
  } else {
@@ -1125,7 +1136,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1125
1136
  } else {
1126
1137
  continue;
1127
1138
  }
1128
- const content = await readFile6(filePath, "utf-8");
1139
+ const content = await readFile7(filePath, "utf-8");
1129
1140
  const parsed = parseSkillFrontmatter(content);
1130
1141
  if (parsed) {
1131
1142
  const alwaysApply = forceAlwaysApply || parsed.metadata.alwaysApply;
@@ -1204,7 +1215,7 @@ async function loadAllSkillsFromDiscovered(discovered) {
1204
1215
  const onDemandSkills = allSkills.filter((s) => !s.alwaysApply && s.loadType !== "always");
1205
1216
  const alwaysWithContent = await Promise.all(
1206
1217
  alwaysSkills.map(async (skill) => {
1207
- const content = await readFile6(skill.filePath, "utf-8");
1218
+ const content = await readFile7(skill.filePath, "utf-8");
1208
1219
  const parsed = parseSkillFrontmatter(content);
1209
1220
  return {
1210
1221
  ...skill,
@@ -1241,7 +1252,7 @@ async function getGlobMatchedSkills(skills, activeFiles, workingDirectory) {
1241
1252
  });
1242
1253
  const matchedWithContent = await Promise.all(
1243
1254
  matchedSkills.map(async (skill) => {
1244
- const content = await readFile6(skill.filePath, "utf-8");
1255
+ const content = await readFile7(skill.filePath, "utf-8");
1245
1256
  const parsed = parseSkillFrontmatter(content);
1246
1257
  return {
1247
1258
  ...skill,
@@ -1253,10 +1264,10 @@ async function getGlobMatchedSkills(skills, activeFiles, workingDirectory) {
1253
1264
  return matchedWithContent;
1254
1265
  }
1255
1266
  async function loadAgentsMd(agentsMdPath) {
1256
- if (!agentsMdPath || !existsSync9(agentsMdPath)) {
1267
+ if (!agentsMdPath || !existsSync10(agentsMdPath)) {
1257
1268
  return null;
1258
1269
  }
1259
- const content = await readFile6(agentsMdPath, "utf-8");
1270
+ const content = await readFile7(agentsMdPath, "utf-8");
1260
1271
  return content;
1261
1272
  }
1262
1273
  async function loadSkillContent(skillName, directories) {
@@ -1267,7 +1278,7 @@ async function loadSkillContent(skillName, directories) {
1267
1278
  if (!skill) {
1268
1279
  return null;
1269
1280
  }
1270
- const content = await readFile6(skill.filePath, "utf-8");
1281
+ const content = await readFile7(skill.filePath, "utf-8");
1271
1282
  const parsed = parseSkillFrontmatter(content);
1272
1283
  return {
1273
1284
  ...skill,
@@ -1528,7 +1539,7 @@ var init_client = __esm({
1528
1539
  });
1529
1540
 
1530
1541
  // src/semantic/indexer.ts
1531
- import { readFileSync as readFileSync4, statSync } from "fs";
1542
+ import { readFileSync as readFileSync5, statSync } from "fs";
1532
1543
  import { relative as relative6 } from "path";
1533
1544
  import { minimatch as minimatch2 } from "minimatch";
1534
1545
  async function getIndexStatus(workingDirectory) {
@@ -1613,8 +1624,8 @@ __export(semantic_search_exports, {
1613
1624
  });
1614
1625
  import { tool as tool8 } from "ai";
1615
1626
  import { z as z9 } from "zod";
1616
- import { existsSync as existsSync12, readFileSync as readFileSync5 } from "fs";
1617
- import { join as join5 } from "path";
1627
+ import { existsSync as existsSync13, readFileSync as readFileSync6 } from "fs";
1628
+ import { join as join6 } from "path";
1618
1629
  import { minimatch as minimatch3 } from "minimatch";
1619
1630
  function createSemanticSearchTool(options) {
1620
1631
  return tool8({
@@ -1681,13 +1692,13 @@ Returns matching code snippets with file paths, line numbers, and relevance scor
1681
1692
  if (language && matchLanguage !== language.toLowerCase()) {
1682
1693
  continue;
1683
1694
  }
1684
- const fullPath = join5(options.workingDirectory, filePath);
1685
- if (!existsSync12(fullPath)) {
1695
+ const fullPath = join6(options.workingDirectory, filePath);
1696
+ if (!existsSync13(fullPath)) {
1686
1697
  continue;
1687
1698
  }
1688
1699
  let snippet = "";
1689
1700
  try {
1690
- const content = readFileSync5(fullPath, "utf-8");
1701
+ const content = readFileSync6(fullPath, "utf-8");
1691
1702
  const lines = content.split("\n");
1692
1703
  const snippetLines = lines.slice(
1693
1704
  Math.max(0, startLine - 1),
@@ -1755,7 +1766,7 @@ async function sendWebhook(url, event) {
1755
1766
  try {
1756
1767
  const controller = new AbortController();
1757
1768
  const timeout = setTimeout(() => controller.abort(), 5e3);
1758
- await fetch(url, {
1769
+ const response = await fetch(url, {
1759
1770
  method: "POST",
1760
1771
  headers: {
1761
1772
  "Content-Type": "application/json",
@@ -1765,7 +1776,12 @@ async function sendWebhook(url, event) {
1765
1776
  signal: controller.signal
1766
1777
  });
1767
1778
  clearTimeout(timeout);
1768
- } catch {
1779
+ if (!response.ok) {
1780
+ console.warn(`[WEBHOOK] ${event.type} to ${url} returned HTTP ${response.status}`);
1781
+ }
1782
+ } catch (err) {
1783
+ const reason = err.name === "AbortError" ? "timeout (5s)" : err.message;
1784
+ console.warn(`[WEBHOOK] ${event.type} to ${url} failed: ${reason}`);
1769
1785
  }
1770
1786
  }
1771
1787
  var init_webhook = __esm({
@@ -1982,8 +1998,8 @@ __export(recorder_exports, {
1982
1998
  });
1983
1999
  import { exec as exec5 } from "child_process";
1984
2000
  import { promisify as promisify5 } from "util";
1985
- import { writeFile as writeFile4, mkdir as mkdir4, readFile as readFile10, unlink as unlink2, readdir as readdir5, rm } from "fs/promises";
1986
- import { join as join7 } from "path";
2001
+ import { writeFile as writeFile5, mkdir as mkdir4, readFile as readFile11, unlink as unlink2, readdir as readdir5, rm } from "fs/promises";
2002
+ import { join as join8 } from "path";
1987
2003
  import { tmpdir } from "os";
1988
2004
  import { nanoid as nanoid3 } from "nanoid";
1989
2005
  async function checkFfmpeg() {
@@ -2040,21 +2056,21 @@ var init_recorder = __esm({
2040
2056
  */
2041
2057
  async encode() {
2042
2058
  if (this.frames.length === 0) return null;
2043
- const workDir = join7(tmpdir(), `sparkecoder-recording-${nanoid3(8)}`);
2059
+ const workDir = join8(tmpdir(), `sparkecoder-recording-${nanoid3(8)}`);
2044
2060
  await mkdir4(workDir, { recursive: true });
2045
2061
  try {
2046
2062
  for (let i = 0; i < this.frames.length; i++) {
2047
- const framePath = join7(workDir, `frame_${String(i).padStart(6, "0")}.jpg`);
2048
- await writeFile4(framePath, this.frames[i].data);
2063
+ const framePath = join8(workDir, `frame_${String(i).padStart(6, "0")}.jpg`);
2064
+ await writeFile5(framePath, this.frames[i].data);
2049
2065
  }
2050
2066
  const duration = (this.frames[this.frames.length - 1].timestamp - this.frames[0].timestamp) / 1e3;
2051
2067
  const fps = duration > 0 ? Math.round(this.frames.length / duration) : 10;
2052
2068
  const clampedFps = Math.max(1, Math.min(fps, 30));
2053
- const outputPath = join7(workDir, `recording_${this.sessionId}.mp4`);
2069
+ const outputPath = join8(workDir, `recording_${this.sessionId}.mp4`);
2054
2070
  const hasFfmpeg = await checkFfmpeg();
2055
2071
  if (hasFfmpeg) {
2056
2072
  await execAsync5(
2057
- `ffmpeg -y -framerate ${clampedFps} -i "${join7(workDir, "frame_%06d.jpg")}" -c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 "${outputPath}"`,
2073
+ `ffmpeg -y -framerate ${clampedFps} -i "${join8(workDir, "frame_%06d.jpg")}" -c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 "${outputPath}"`,
2058
2074
  { timeout: 12e4 }
2059
2075
  );
2060
2076
  } else {
@@ -2062,11 +2078,11 @@ var init_recorder = __esm({
2062
2078
  await cleanup(workDir);
2063
2079
  return null;
2064
2080
  }
2065
- const outputBuf = await readFile10(outputPath);
2081
+ const outputBuf = await readFile11(outputPath);
2066
2082
  const files = await readdir5(workDir);
2067
2083
  for (const f of files) {
2068
2084
  if (f.startsWith("frame_")) {
2069
- await unlink2(join7(workDir, f)).catch(() => {
2085
+ await unlink2(join8(workDir, f)).catch(() => {
2070
2086
  });
2071
2087
  }
2072
2088
  }
@@ -4168,8 +4184,34 @@ Working directory: ${options.workingDirectory}`,
4168
4184
  init_db();
4169
4185
  import { tool as tool4 } from "ai";
4170
4186
  import { z as z5 } from "zod";
4187
+ import { existsSync as existsSync9, mkdirSync as mkdirSync4, readdirSync, unlinkSync, readFileSync as readFileSync3, appendFileSync } from "fs";
4188
+ import { readFile as readFile6, writeFile as writeFile4 } from "fs/promises";
4189
+ import { join as join4 } from "path";
4190
+ function getPlansDir(workingDirectory, sessionId) {
4191
+ return join4(workingDirectory, ".sparkecoder", "plans", sessionId);
4192
+ }
4193
+ function ensurePlansDir(workingDirectory, sessionId) {
4194
+ const dir = getPlansDir(workingDirectory, sessionId);
4195
+ if (!existsSync9(dir)) {
4196
+ mkdirSync4(dir, { recursive: true });
4197
+ }
4198
+ const gitignorePath = join4(workingDirectory, ".gitignore");
4199
+ if (existsSync9(gitignorePath)) {
4200
+ try {
4201
+ const content = readFileSync3(gitignorePath, "utf-8");
4202
+ if (!content.includes(".sparkecoder")) {
4203
+ appendFileSync(gitignorePath, "\n.sparkecoder/\n");
4204
+ }
4205
+ } catch {
4206
+ }
4207
+ }
4208
+ return dir;
4209
+ }
4210
+ function slugify(name) {
4211
+ return name.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "").slice(0, 80) || "plan";
4212
+ }
4171
4213
  var todoInputSchema = z5.object({
4172
- action: z5.enum(["add", "list", "mark", "clear"]).describe("The action to perform on the todo list"),
4214
+ action: z5.enum(["add", "list", "mark", "clear", "save_plan", "list_plans", "get_plan", "delete_plan"]).describe("The action to perform"),
4173
4215
  items: z5.array(
4174
4216
  z5.object({
4175
4217
  content: z5.string().describe("Description of the task"),
@@ -4177,27 +4219,67 @@ var todoInputSchema = z5.object({
4177
4219
  })
4178
4220
  ).optional().describe('For "add" action: Array of todo items to add'),
4179
4221
  todoId: z5.string().optional().describe('For "mark" action: The ID of the todo item to update'),
4180
- status: z5.enum(["pending", "in_progress", "completed", "cancelled"]).optional().describe('For "mark" action: The new status for the todo item')
4222
+ status: z5.enum(["pending", "in_progress", "completed", "cancelled"]).optional().describe('For "mark" action: The new status for the todo item'),
4223
+ planName: z5.string().optional().describe('For plan actions: Name of the plan (e.g. "auth-system", "db-migration")'),
4224
+ planContent: z5.string().optional().describe('For "save_plan": Full plan content as markdown with hierarchical tasks using checkboxes')
4181
4225
  });
4182
4226
  function createTodoTool(options) {
4183
4227
  return tool4({
4184
- description: `Manage your task list for the current session. Use this to:
4185
- - Break down complex tasks into smaller steps
4186
- - Track progress on multi-step operations
4187
- - Organize your work systematically
4228
+ description: `Manage your task list and persistent plans for the current session.
4188
4229
 
4189
- Available actions:
4230
+ ## Todo Actions (for tracking current work)
4190
4231
  - "add": Add one or more new todo items to the list
4191
4232
  - "list": View all current todo items and their status
4192
4233
  - "mark": Update the status of a todo item (pending, in_progress, completed, cancelled)
4193
4234
  - "clear": Remove all todo items from the list
4194
4235
 
4195
- Best practices:
4196
- - Add todos before starting complex tasks
4197
- - Mark items as "in_progress" when actively working on them
4198
- - Update status as you complete each step`,
4236
+ ## Plan Actions (for complex, multi-phase work)
4237
+ - "save_plan": Create or update a named plan \u2014 a persistent markdown document with hierarchical tasks, subtasks, and notes. Plans survive context compaction and are always available.
4238
+ - "list_plans": List all plans for this session
4239
+ - "get_plan": Read a specific plan by name
4240
+ - "delete_plan": Remove a plan
4241
+
4242
+ ## Plans vs Todos
4243
+ - **Plans** are the big picture \u2014 the full spec with phases, subtasks, notes, and decisions. They persist on disk and are always injected into your context, even after old messages are summarized.
4244
+ - **Todos** are your current focus \u2014 the immediate steps you're working on right now.
4245
+
4246
+ ## Workflow for complex tasks
4247
+ 1. Create a plan with phases and subtasks (save_plan)
4248
+ 2. Create todos from the first uncompleted phase (add)
4249
+ 3. Work through the todos, marking them as you go
4250
+ 4. When all current todos are done, update the plan (mark completed sections with [x]) and save it
4251
+ 5. Create new todos from the next uncompleted phase
4252
+ 6. Repeat until the plan is fully complete
4253
+
4254
+ ## Plan format
4255
+ Plans should be markdown with this structure:
4256
+ \`\`\`markdown
4257
+ # Plan: [Title]
4258
+
4259
+ ## Overview
4260
+ [What we're doing and why]
4261
+
4262
+ ## Phase 1: [Name] [completed]
4263
+ - [x] Task 1
4264
+ - [x] Task 2
4265
+
4266
+ ## Phase 2: [Name] [in_progress]
4267
+ - [x] Subtask 2.1
4268
+ - [ ] Subtask 2.2
4269
+ - [ ] Sub-subtask 2.2.1
4270
+ - [ ] Sub-subtask 2.2.2
4271
+ - [ ] Subtask 2.3
4272
+
4273
+ ## Phase 3: [Name] [pending]
4274
+ - [ ] Task 1
4275
+ - [ ] Task 2
4276
+
4277
+ ## Notes
4278
+ - Key decisions and context to preserve
4279
+ - Important file paths discovered
4280
+ \`\`\``,
4199
4281
  inputSchema: todoInputSchema,
4200
- execute: async ({ action, items, todoId, status }) => {
4282
+ execute: async ({ action, items, todoId, status, planName, planContent }) => {
4201
4283
  try {
4202
4284
  switch (action) {
4203
4285
  case "add": {
@@ -4265,6 +4347,81 @@ Best practices:
4265
4347
  itemsRemoved: count
4266
4348
  };
4267
4349
  }
4350
+ // ── Plan actions ─────────────────────────────────────────
4351
+ case "save_plan": {
4352
+ if (!planName) {
4353
+ return { success: false, error: 'planName is required for "save_plan"' };
4354
+ }
4355
+ if (!planContent) {
4356
+ return { success: false, error: 'planContent is required for "save_plan"' };
4357
+ }
4358
+ const dir = ensurePlansDir(options.workingDirectory, options.sessionId);
4359
+ const filename = `${slugify(planName)}.md`;
4360
+ const filePath = join4(dir, filename);
4361
+ await writeFile4(filePath, planContent, "utf-8");
4362
+ return {
4363
+ success: true,
4364
+ action: "save_plan",
4365
+ planName,
4366
+ filename,
4367
+ path: filePath,
4368
+ sizeChars: planContent.length
4369
+ };
4370
+ }
4371
+ case "list_plans": {
4372
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
4373
+ if (!existsSync9(dir)) {
4374
+ return { success: true, action: "list_plans", plans: [], count: 0 };
4375
+ }
4376
+ const files = readdirSync(dir).filter((f) => f.endsWith(".md"));
4377
+ const plans = [];
4378
+ for (const f of files) {
4379
+ try {
4380
+ const content = await readFile6(join4(dir, f), "utf-8");
4381
+ const titleMatch = content.match(/^#\s+(?:Plan:\s*)?(.+)/m);
4382
+ plans.push({
4383
+ name: f.replace(/\.md$/, ""),
4384
+ title: titleMatch?.[1]?.trim() || f.replace(/\.md$/, ""),
4385
+ filename: f,
4386
+ sizeChars: content.length
4387
+ });
4388
+ } catch {
4389
+ }
4390
+ }
4391
+ return { success: true, action: "list_plans", plans, count: plans.length };
4392
+ }
4393
+ case "get_plan": {
4394
+ if (!planName) {
4395
+ return { success: false, error: 'planName is required for "get_plan"' };
4396
+ }
4397
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
4398
+ const filename = `${slugify(planName)}.md`;
4399
+ const filePath = join4(dir, filename);
4400
+ if (!existsSync9(filePath)) {
4401
+ return { success: false, error: `Plan not found: "${planName}" (looked for ${filename})` };
4402
+ }
4403
+ const content = await readFile6(filePath, "utf-8");
4404
+ return {
4405
+ success: true,
4406
+ action: "get_plan",
4407
+ planName,
4408
+ content,
4409
+ sizeChars: content.length
4410
+ };
4411
+ }
4412
+ case "delete_plan": {
4413
+ if (!planName) {
4414
+ return { success: false, error: 'planName is required for "delete_plan"' };
4415
+ }
4416
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
4417
+ const filename = `${slugify(planName)}.md`;
4418
+ const filePath = join4(dir, filename);
4419
+ if (!existsSync9(filePath)) {
4420
+ return { success: false, error: `Plan not found: "${planName}"` };
4421
+ }
4422
+ unlinkSync(filePath);
4423
+ return { success: true, action: "delete_plan", planName, deleted: true };
4424
+ }
4268
4425
  default:
4269
4426
  return {
4270
4427
  success: false,
@@ -4289,6 +4446,21 @@ function formatTodoItem(item) {
4289
4446
  createdAt: item.createdAt.toISOString()
4290
4447
  };
4291
4448
  }
4449
+ async function readSessionPlans(workingDirectory, sessionId) {
4450
+ const dir = getPlansDir(workingDirectory, sessionId);
4451
+ if (!existsSync9(dir)) return [];
4452
+ const files = readdirSync(dir).filter((f) => f.endsWith(".md"));
4453
+ if (files.length === 0) return [];
4454
+ const plans = [];
4455
+ for (const f of files) {
4456
+ try {
4457
+ const content = await readFile6(join4(dir, f), "utf-8");
4458
+ plans.push({ name: f.replace(/\.md$/, ""), content });
4459
+ } catch {
4460
+ }
4461
+ }
4462
+ return plans;
4463
+ }
4292
4464
 
4293
4465
  // src/tools/load-skill.ts
4294
4466
  init_skills();
@@ -4378,7 +4550,7 @@ Once loaded, a skill's content will be available in the conversation context.`,
4378
4550
  import { tool as tool6 } from "ai";
4379
4551
  import { z as z7 } from "zod";
4380
4552
  import { resolve as resolve7, relative as relative5, isAbsolute as isAbsolute3, extname as extname5 } from "path";
4381
- import { existsSync as existsSync10 } from "fs";
4553
+ import { existsSync as existsSync11 } from "fs";
4382
4554
  import { readdir as readdir2, stat as stat2 } from "fs/promises";
4383
4555
  var linterInputSchema = z7.object({
4384
4556
  paths: z7.array(z7.string()).optional().describe("File or directory paths to check for lint errors. If not provided, returns diagnostics for all recently touched files."),
@@ -4446,7 +4618,7 @@ Working directory: ${options.workingDirectory}`,
4446
4618
  const filesToCheck = [];
4447
4619
  for (const path of paths) {
4448
4620
  const absolutePath = isAbsolute3(path) ? path : resolve7(options.workingDirectory, path);
4449
- if (!existsSync10(absolutePath)) {
4621
+ if (!existsSync11(absolutePath)) {
4450
4622
  continue;
4451
4623
  }
4452
4624
  const stats = await stat2(absolutePath);
@@ -4758,17 +4930,17 @@ import { tool as tool9 } from "ai";
4758
4930
  import { z as z10 } from "zod";
4759
4931
  import { exec as exec4 } from "child_process";
4760
4932
  import { promisify as promisify4 } from "util";
4761
- import { readFile as readFile8, stat as stat3, readdir as readdir4 } from "fs/promises";
4933
+ import { readFile as readFile9, stat as stat3, readdir as readdir4 } from "fs/promises";
4762
4934
  import { resolve as resolve9, relative as relative8, isAbsolute as isAbsolute5 } from "path";
4763
- import { existsSync as existsSync13 } from "fs";
4935
+ import { existsSync as existsSync14 } from "fs";
4764
4936
  init_semantic();
4765
4937
 
4766
4938
  // src/tools/code-graph.ts
4767
4939
  import { tool as tool7 } from "ai";
4768
4940
  import { z as z8 } from "zod";
4769
4941
  import { resolve as resolve8, relative as relative7, isAbsolute as isAbsolute4, basename as basename3 } from "path";
4770
- import { readFile as readFile7, readdir as readdir3 } from "fs/promises";
4771
- import { existsSync as existsSync11 } from "fs";
4942
+ import { readFile as readFile8, readdir as readdir3 } from "fs/promises";
4943
+ import { existsSync as existsSync12 } from "fs";
4772
4944
  import { fileURLToPath as fileURLToPath2 } from "url";
4773
4945
  import { execFileSync } from "child_process";
4774
4946
  var codeGraphInputSchema = z8.object({
@@ -4905,7 +5077,7 @@ async function grepForSymbol(symbol, workingDirectory) {
4905
5077
  const ext = entry.name.substring(entry.name.lastIndexOf("."));
4906
5078
  if (!SUPPORTED_EXTS.has(ext)) continue;
4907
5079
  remaining--;
4908
- const content = await readFile7(fullPath, "utf-8");
5080
+ const content = await readFile8(fullPath, "utf-8");
4909
5081
  const lines = content.split("\n");
4910
5082
  for (let i = 0; i < lines.length; i++) {
4911
5083
  if (defPattern.test(lines[i])) {
@@ -4954,7 +5126,7 @@ Working directory: ${options.workingDirectory}`,
4954
5126
  let defSymbol = null;
4955
5127
  if (filePath) {
4956
5128
  const absPath = isAbsolute4(filePath) ? filePath : resolve8(options.workingDirectory, filePath);
4957
- if (!existsSync11(absPath)) {
5129
+ if (!existsSync12(absPath)) {
4958
5130
  return { success: false, error: `File not found: ${filePath}` };
4959
5131
  }
4960
5132
  if (!isSupported(absPath)) {
@@ -4968,7 +5140,7 @@ Working directory: ${options.workingDirectory}`,
4968
5140
  defLine = defSymbol.selectionRange.start.line;
4969
5141
  defChar = defSymbol.selectionRange.start.character;
4970
5142
  } else {
4971
- const content = await readFile7(absPath, "utf-8");
5143
+ const content = await readFile8(absPath, "utf-8");
4972
5144
  const lines2 = content.split("\n");
4973
5145
  const defPattern = new RegExp(
4974
5146
  `(export|function|const|let|var|class|interface|type|enum)\\s+.*\\b${symbol.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\b`
@@ -5381,7 +5553,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
5381
5553
  execute: async ({ path, startLine, endLine }) => {
5382
5554
  try {
5383
5555
  const absolutePath = isAbsolute5(path) ? path : resolve9(workingDirectory, path);
5384
- if (!existsSync13(absolutePath)) {
5556
+ if (!existsSync14(absolutePath)) {
5385
5557
  return {
5386
5558
  success: false,
5387
5559
  error: `File not found: ${path}`
@@ -5394,7 +5566,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
5394
5566
  error: `File too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Use startLine/endLine to read portions.`
5395
5567
  };
5396
5568
  }
5397
- let content = await readFile8(absolutePath, "utf-8");
5569
+ let content = await readFile9(absolutePath, "utf-8");
5398
5570
  if (startLine !== void 0 || endLine !== void 0) {
5399
5571
  const lines = content.split("\n");
5400
5572
  const start = (startLine ?? 1) - 1;
@@ -5425,7 +5597,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
5425
5597
  execute: async ({ path, recursive, maxDepth }) => {
5426
5598
  try {
5427
5599
  const absolutePath = isAbsolute5(path) ? path : resolve9(workingDirectory, path);
5428
- if (!existsSync13(absolutePath)) {
5600
+ if (!existsSync14(absolutePath)) {
5429
5601
  return {
5430
5602
  success: false,
5431
5603
  error: `Directory not found: ${path}`
@@ -5792,8 +5964,8 @@ function createTaskFailedTool(options) {
5792
5964
  // src/tools/upload-file.ts
5793
5965
  import { tool as tool12 } from "ai";
5794
5966
  import { z as z13 } from "zod";
5795
- import { readFile as readFile9, stat as stat4 } from "fs/promises";
5796
- import { join as join6, basename as basename4, extname as extname7 } from "path";
5967
+ import { readFile as readFile10, stat as stat4 } from "fs/promises";
5968
+ import { join as join7, basename as basename4, extname as extname7 } from "path";
5797
5969
  var MIME_TYPES = {
5798
5970
  ".txt": "text/plain",
5799
5971
  ".md": "text/markdown",
@@ -5835,7 +6007,7 @@ function createUploadFileTool(options) {
5835
6007
  error: "File upload is not available \u2014 remote server with GCS is not configured."
5836
6008
  };
5837
6009
  }
5838
- const fullPath = input.path.startsWith("/") ? input.path : join6(options.workingDirectory, input.path);
6010
+ const fullPath = input.path.startsWith("/") ? input.path : join7(options.workingDirectory, input.path);
5839
6011
  try {
5840
6012
  await stat4(fullPath);
5841
6013
  } catch {
@@ -5853,7 +6025,7 @@ function createUploadFileTool(options) {
5853
6025
  contentType,
5854
6026
  "general"
5855
6027
  );
5856
- const fileData = await readFile9(fullPath);
6028
+ const fileData = await readFile10(fullPath);
5857
6029
  const putRes = await fetch(uploadInfo.uploadUrl, {
5858
6030
  method: "PUT",
5859
6031
  headers: { "Content-Type": contentType },
@@ -5908,7 +6080,8 @@ async function createTools(options) {
5908
6080
  onProgress: options.onWriteFileProgress
5909
6081
  }),
5910
6082
  todo: createTodoTool({
5911
- sessionId: options.sessionId
6083
+ sessionId: options.sessionId,
6084
+ workingDirectory: options.workingDirectory
5912
6085
  }),
5913
6086
  load_skill: createLoadSkillTool({
5914
6087
  sessionId: options.sessionId,
@@ -6006,6 +6179,8 @@ async function buildSystemPrompt(options) {
6006
6179
  }
6007
6180
  const todos = await todoQueries.getBySession(sessionId);
6008
6181
  const todosContext = formatTodosForContext(todos);
6182
+ const plans = await readSessionPlans(workingDirectory, sessionId);
6183
+ const plansContext = formatPlansForContext(plans);
6009
6184
  const platform3 = process.platform === "win32" ? "Windows" : process.platform === "darwin" ? "macOS" : "Linux";
6010
6185
  const currentDate = (/* @__PURE__ */ new Date()).toLocaleDateString("en-US", { weekday: "long", year: "numeric", month: "long", day: "numeric" });
6011
6186
  const searchInstructions = getSearchInstructions();
@@ -6022,7 +6197,7 @@ You have access to powerful tools for:
6022
6197
  - **read_file**: Read file contents to understand code and context
6023
6198
  - **write_file**: Create new files or edit existing ones (supports targeted string replacement)
6024
6199
  - **linter**: Check files for type errors and lint issues (TypeScript, JavaScript, TSX, JSX)
6025
- - **todo**: Manage your task list to track progress on complex operations
6200
+ - **todo**: Manage your task list AND persistent plans for complex multi-phase operations
6026
6201
  - **load_skill**: Load specialized knowledge documents for specific tasks
6027
6202
  - **explore_agent**: Explore agent for semantic discovery - for exploratory questions and finding code by meaning
6028
6203
  - **code_graph**: Inspect a symbol's type hierarchy and usage graph via the TypeScript language server
@@ -6031,9 +6206,23 @@ You have access to powerful tools for:
6031
6206
 
6032
6207
  IMPORTANT: If you have zero context of where you are working, always explore it first to understand the structure before doing things for the user.
6033
6208
 
6034
- Use the TODO tool to manage your task list to track progress on complex operations. Always ask the user what they want to do specifically before doing it, and make a plan.
6035
- Step 1 of the plan should be researching files and understanding the components/structure of what you're working on (if you don't already have context), then after u have done that, plan out the rest of the tasks u need to do.
6036
- You can clear the todo and restart it, and do multiple things inside of one session.
6209
+ ### Planning & Task Management
6210
+ Use the **todo tool** to manage both immediate tasks AND persistent plans:
6211
+
6212
+ **For simple tasks (< 5 steps):** Just use regular todos (add/mark/clear).
6213
+
6214
+ **For complex, multi-phase tasks:** Create a persistent **plan** first.
6215
+ 1. Research the codebase to understand what you're working with
6216
+ 2. Create a plan with save_plan \u2014 a structured markdown document with phases and subtasks
6217
+ 3. Create todos from the first uncompleted phase
6218
+ 4. Work through the todos
6219
+ 5. When done, update the plan (mark completed phases with [x]), save it again
6220
+ 6. Create new todos from the next uncompleted phase
6221
+ 7. Repeat until the plan is fully complete
6222
+
6223
+ Plans persist on disk and are always injected into your context \u2014 they survive context compaction even in very long sessions. You can have multiple plans active at once (e.g., one for frontend, one for backend).
6224
+
6225
+ You can clear the todo list and restart it, and do multiple things inside of one session.
6037
6226
 
6038
6227
  ### bash Tool
6039
6228
  The bash tool runs commands in the terminal. Every command runs in its own session with logs saved to disk.
@@ -6257,6 +6446,8 @@ ${onDemandSkillsContext}
6257
6446
  ## Current Task List
6258
6447
  ${todosContext}
6259
6448
 
6449
+ ${plansContext}
6450
+
6260
6451
  ${customInstructions ? `## Custom Instructions
6261
6452
  ${customInstructions}` : ""}
6262
6453
 
@@ -6280,6 +6471,37 @@ function formatTodosForContext(todos) {
6280
6471
  }
6281
6472
  return lines.join("\n");
6282
6473
  }
6474
+ var MAX_PLAN_CHARS = 3e4;
6475
+ var MAX_TOTAL_PLANS_CHARS = 6e4;
6476
+ function formatPlansForContext(plans) {
6477
+ if (plans.length === 0) return "";
6478
+ let totalChars = 0;
6479
+ const sections = [];
6480
+ sections.push(`## Persistent Plans (${plans.length})`);
6481
+ sections.push("");
6482
+ sections.push("These plans persist across context compaction \u2014 they are always available.");
6483
+ sections.push("When you finish your current todos, check these plans for the next uncompleted phase,");
6484
+ sections.push("update the plan (mark completed items with [x]), then create new todos for the next phase.");
6485
+ sections.push("");
6486
+ for (const plan of plans) {
6487
+ let content = plan.content;
6488
+ if (content.length > MAX_PLAN_CHARS) {
6489
+ content = content.slice(0, MAX_PLAN_CHARS) + `
6490
+
6491
+ ... [plan truncated \u2014 ${content.length - MAX_PLAN_CHARS} chars omitted. Use get_plan to read the full plan.]`;
6492
+ }
6493
+ if (totalChars + content.length > MAX_TOTAL_PLANS_CHARS) {
6494
+ sections.push(`### \u{1F4CB} Plan: ${plan.name} [truncated \u2014 use get_plan("${plan.name}") to read]`);
6495
+ continue;
6496
+ }
6497
+ sections.push(`### \u{1F4CB} Plan: ${plan.name}`);
6498
+ sections.push("");
6499
+ sections.push(content);
6500
+ sections.push("");
6501
+ totalChars += content.length;
6502
+ }
6503
+ return sections.join("\n");
6504
+ }
6283
6505
  function buildTaskPromptAddendum(outputSchema) {
6284
6506
  return `
6285
6507
  ## Task Mode
@@ -6351,7 +6573,7 @@ Before calling \`complete_task\`, you MUST verify your work completely. Do not j
6351
6573
  - **load_skill**: Load specialized skills/knowledge relevant to the task. Check what skills are available and use them.
6352
6574
  - **explore_agent**: Use for codebase exploration and understanding before making changes.
6353
6575
  - **code_graph**: Use to understand type hierarchies, references, and impact before refactoring.
6354
- - **todo**: Track your progress on multi-step tasks so you don't miss steps.
6576
+ - **todo**: Track your progress on multi-step tasks so you don't miss steps. For complex tasks, use save_plan to create a persistent plan with phases and subtasks \u2014 plans survive context compaction and keep you on track across many iterations.
6355
6577
  - **bash**: Full shell access \u2014 run builds, tests, dev servers, open browsers, curl endpoints, anything.
6356
6578
  - **upload_file**: Upload files (screenshots, reports, exports) to cloud storage. Use this to include screenshots of completed work in your task result \u2014 visual proof is very helpful.
6357
6579
 
@@ -7104,12 +7326,14 @@ ${prompt}` });
7104
7326
  const config = getConfig();
7105
7327
  const maxIterations = options.taskConfig.maxIterations ?? 50;
7106
7328
  const webhookUrl = options.taskConfig.webhookUrl;
7329
+ const parentTaskId = options.taskConfig.parentTaskId;
7107
7330
  const fireWebhook = (type, data) => {
7108
7331
  if (!webhookUrl) return;
7109
7332
  sendWebhook(webhookUrl, {
7110
7333
  type,
7111
7334
  taskId: this.session.id,
7112
7335
  sessionId: this.session.id,
7336
+ ...parentTaskId ? { parentTaskId } : {},
7113
7337
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7114
7338
  data
7115
7339
  });
@@ -7302,14 +7526,14 @@ ${taskAddendum}`;
7302
7526
  for (const step of resultSteps) {
7303
7527
  if (step.toolCalls) {
7304
7528
  for (const tc of step.toolCalls) {
7305
- options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.args });
7306
- fireWebhook("task.tool_call", { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.args });
7529
+ options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.input });
7530
+ fireWebhook("task.tool_call", { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.input });
7307
7531
  }
7308
7532
  }
7309
7533
  if (step.toolResults) {
7310
7534
  for (const tr of step.toolResults) {
7311
- options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.result });
7312
- fireWebhook("task.tool_result", { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.result });
7535
+ options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.output });
7536
+ fireWebhook("task.tool_result", { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.output });
7313
7537
  }
7314
7538
  }
7315
7539
  }
@@ -7401,14 +7625,14 @@ ${taskAddendum}`;
7401
7625
  const result = await recorder.encode();
7402
7626
  recorder.clear();
7403
7627
  if (!result) return [];
7404
- const { readFile: readFile11, unlink: unlink3 } = await import("fs/promises");
7628
+ const { readFile: readFile12, unlink: unlink3 } = await import("fs/promises");
7405
7629
  const uploadInfo = await storageQueries2.getUploadUrl(
7406
7630
  this.session.id,
7407
7631
  `browser-recording-${Date.now()}.mp4`,
7408
7632
  "video/mp4",
7409
7633
  "browser-recording"
7410
7634
  );
7411
- const fileData = await readFile11(result.path);
7635
+ const fileData = await readFile12(result.path);
7412
7636
  await fetch(uploadInfo.uploadUrl, {
7413
7637
  method: "PUT",
7414
7638
  headers: { "Content-Type": "video/mp4" },
@@ -7434,12 +7658,12 @@ ${taskAddendum}`;
7434
7658
  try {
7435
7659
  const { isRemoteConfigured: isRemoteConfigured2, storageQueries: storageQueries2 } = await Promise.resolve().then(() => (init_remote(), remote_exports));
7436
7660
  if (!isRemoteConfigured2()) return [];
7437
- const { readFile: readFile11 } = await import("fs/promises");
7438
- const { join: join12, basename: basename6 } = await import("path");
7661
+ const { readFile: readFile12 } = await import("fs/promises");
7662
+ const { join: join13, basename: basename6 } = await import("path");
7439
7663
  const urls = [];
7440
7664
  for (const filePath of filePaths) {
7441
7665
  try {
7442
- const fullPath = filePath.startsWith("/") ? filePath : join12(this.session.workingDirectory, filePath);
7666
+ const fullPath = filePath.startsWith("/") ? filePath : join13(this.session.workingDirectory, filePath);
7443
7667
  const fileName = basename6(fullPath);
7444
7668
  const ext = fileName.split(".").pop()?.toLowerCase() || "";
7445
7669
  const mimeMap = {
@@ -7464,7 +7688,7 @@ ${taskAddendum}`;
7464
7688
  contentType,
7465
7689
  "task-output"
7466
7690
  );
7467
- const fileData = await readFile11(fullPath);
7691
+ const fileData = await readFile12(fullPath);
7468
7692
  await fetch(uploadInfo.uploadUrl, {
7469
7693
  method: "PUT",
7470
7694
  headers: { "Content-Type": contentType },
@@ -7614,8 +7838,8 @@ import { Hono as Hono6 } from "hono";
7614
7838
  import { serve } from "@hono/node-server";
7615
7839
  import { cors } from "hono/cors";
7616
7840
  import { logger } from "hono/logger";
7617
- import { existsSync as existsSync16, mkdirSync as mkdirSync6, writeFileSync as writeFileSync5 } from "fs";
7618
- import { resolve as resolve10, dirname as dirname7, join as join11 } from "path";
7841
+ import { existsSync as existsSync17, mkdirSync as mkdirSync7, writeFileSync as writeFileSync5 } from "fs";
7842
+ import { resolve as resolve10, dirname as dirname7, join as join12 } from "path";
7619
7843
  import { spawn as spawn2 } from "child_process";
7620
7844
  import { createServer as createNetServer } from "net";
7621
7845
  import { fileURLToPath as fileURLToPath4 } from "url";
@@ -7625,9 +7849,9 @@ init_db();
7625
7849
  import { Hono } from "hono";
7626
7850
  import { zValidator } from "@hono/zod-validator";
7627
7851
  import { z as z15 } from "zod";
7628
- import { existsSync as existsSync14, mkdirSync as mkdirSync4, writeFileSync as writeFileSync3, readdirSync, statSync as statSync2, unlinkSync } from "fs";
7852
+ import { existsSync as existsSync15, mkdirSync as mkdirSync5, writeFileSync as writeFileSync3, readdirSync as readdirSync2, statSync as statSync2, unlinkSync as unlinkSync2 } from "fs";
7629
7853
  import { readdir as readdir6 } from "fs/promises";
7630
- import { join as join8, basename as basename5, extname as extname8, relative as relative9 } from "path";
7854
+ import { join as join9, basename as basename5, extname as extname8, relative as relative9 } from "path";
7631
7855
  import { nanoid as nanoid5 } from "nanoid";
7632
7856
  init_config();
7633
7857
 
@@ -8089,12 +8313,12 @@ sessions.get("/:id/diff/:filePath", async (c) => {
8089
8313
  });
8090
8314
  function getAttachmentsDir(sessionId) {
8091
8315
  const appDataDir = getAppDataDirectory();
8092
- return join8(appDataDir, "attachments", sessionId);
8316
+ return join9(appDataDir, "attachments", sessionId);
8093
8317
  }
8094
8318
  function ensureAttachmentsDir(sessionId) {
8095
8319
  const dir = getAttachmentsDir(sessionId);
8096
- if (!existsSync14(dir)) {
8097
- mkdirSync4(dir, { recursive: true });
8320
+ if (!existsSync15(dir)) {
8321
+ mkdirSync5(dir, { recursive: true });
8098
8322
  }
8099
8323
  return dir;
8100
8324
  }
@@ -8105,12 +8329,12 @@ sessions.get("/:id/attachments", async (c) => {
8105
8329
  return c.json({ error: "Session not found" }, 404);
8106
8330
  }
8107
8331
  const dir = getAttachmentsDir(sessionId);
8108
- if (!existsSync14(dir)) {
8332
+ if (!existsSync15(dir)) {
8109
8333
  return c.json({ sessionId, attachments: [], count: 0 });
8110
8334
  }
8111
- const files = readdirSync(dir);
8335
+ const files = readdirSync2(dir);
8112
8336
  const attachments = files.map((filename) => {
8113
- const filePath = join8(dir, filename);
8337
+ const filePath = join9(dir, filename);
8114
8338
  const stats = statSync2(filePath);
8115
8339
  return {
8116
8340
  id: filename.split("_")[0],
@@ -8145,7 +8369,7 @@ sessions.post("/:id/attachments", async (c) => {
8145
8369
  const id = nanoid5(10);
8146
8370
  const ext = extname8(file.name) || "";
8147
8371
  const safeFilename = `${id}_${basename5(file.name).replace(/[^a-zA-Z0-9._-]/g, "_")}`;
8148
- const filePath = join8(dir, safeFilename);
8372
+ const filePath = join9(dir, safeFilename);
8149
8373
  const arrayBuffer = await file.arrayBuffer();
8150
8374
  writeFileSync3(filePath, Buffer.from(arrayBuffer));
8151
8375
  return c.json({
@@ -8171,7 +8395,7 @@ sessions.post("/:id/attachments", async (c) => {
8171
8395
  const id = nanoid5(10);
8172
8396
  const ext = extname8(body.filename) || "";
8173
8397
  const safeFilename = `${id}_${basename5(body.filename).replace(/[^a-zA-Z0-9._-]/g, "_")}`;
8174
- const filePath = join8(dir, safeFilename);
8398
+ const filePath = join9(dir, safeFilename);
8175
8399
  let base64Data = body.data;
8176
8400
  if (base64Data.includes(",")) {
8177
8401
  base64Data = base64Data.split(",")[1];
@@ -8200,16 +8424,16 @@ sessions.delete("/:id/attachments/:attachmentId", async (c) => {
8200
8424
  return c.json({ error: "Session not found" }, 404);
8201
8425
  }
8202
8426
  const dir = getAttachmentsDir(sessionId);
8203
- if (!existsSync14(dir)) {
8427
+ if (!existsSync15(dir)) {
8204
8428
  return c.json({ error: "Attachment not found" }, 404);
8205
8429
  }
8206
- const files = readdirSync(dir);
8430
+ const files = readdirSync2(dir);
8207
8431
  const file = files.find((f) => f.startsWith(attachmentId + "_"));
8208
8432
  if (!file) {
8209
8433
  return c.json({ error: "Attachment not found" }, 404);
8210
8434
  }
8211
- const filePath = join8(dir, file);
8212
- unlinkSync(filePath);
8435
+ const filePath = join9(dir, file);
8436
+ unlinkSync2(filePath);
8213
8437
  return c.json({ success: true, id: attachmentId });
8214
8438
  });
8215
8439
  var filesQuerySchema = z15.object({
@@ -8291,7 +8515,7 @@ async function listWorkspaceFiles(baseDir, currentDir, query, limit, results = [
8291
8515
  const entries = await readdir6(currentDir, { withFileTypes: true });
8292
8516
  for (const entry of entries) {
8293
8517
  if (results.length >= limit * 2) break;
8294
- const fullPath = join8(currentDir, entry.name);
8518
+ const fullPath = join9(currentDir, entry.name);
8295
8519
  const relativePath = relative9(baseDir, fullPath);
8296
8520
  if (entry.isDirectory() && IGNORED_DIRECTORIES.has(entry.name)) {
8297
8521
  continue;
@@ -8339,7 +8563,7 @@ sessions.get(
8339
8563
  return c.json({ error: "Session not found" }, 404);
8340
8564
  }
8341
8565
  const workingDirectory = session.workingDirectory;
8342
- if (!existsSync14(workingDirectory)) {
8566
+ if (!existsSync15(workingDirectory)) {
8343
8567
  return c.json({
8344
8568
  sessionId,
8345
8569
  workingDirectory,
@@ -8450,8 +8674,8 @@ init_db();
8450
8674
  import { Hono as Hono2 } from "hono";
8451
8675
  import { zValidator as zValidator2 } from "@hono/zod-validator";
8452
8676
  import { z as z16 } from "zod";
8453
- import { existsSync as existsSync15, mkdirSync as mkdirSync5, writeFileSync as writeFileSync4 } from "fs";
8454
- import { join as join9 } from "path";
8677
+ import { existsSync as existsSync16, mkdirSync as mkdirSync6, writeFileSync as writeFileSync4 } from "fs";
8678
+ import { join as join10 } from "path";
8455
8679
  init_config();
8456
8680
 
8457
8681
  // src/server/resumable-stream.ts
@@ -8657,12 +8881,12 @@ var rejectSchema = z16.object({
8657
8881
  var streamAbortControllers = /* @__PURE__ */ new Map();
8658
8882
  function getAttachmentsDirectory(sessionId) {
8659
8883
  const appDataDir = getAppDataDirectory();
8660
- return join9(appDataDir, "attachments", sessionId);
8884
+ return join10(appDataDir, "attachments", sessionId);
8661
8885
  }
8662
8886
  async function saveAttachmentToDisk(sessionId, attachment, index) {
8663
8887
  const attachmentsDir = getAttachmentsDirectory(sessionId);
8664
- if (!existsSync15(attachmentsDir)) {
8665
- mkdirSync5(attachmentsDir, { recursive: true });
8888
+ if (!existsSync16(attachmentsDir)) {
8889
+ mkdirSync6(attachmentsDir, { recursive: true });
8666
8890
  }
8667
8891
  let filename = attachment.filename;
8668
8892
  if (!filename) {
@@ -8680,7 +8904,7 @@ async function saveAttachmentToDisk(sessionId, attachment, index) {
8680
8904
  attachment.mediaType = resized.mediaType;
8681
8905
  attachment.data = buffer.toString("base64");
8682
8906
  }
8683
- const filePath = join9(attachmentsDir, filename);
8907
+ const filePath = join10(attachmentsDir, filename);
8684
8908
  writeFileSync4(filePath, buffer);
8685
8909
  return filePath;
8686
8910
  }
@@ -9621,26 +9845,26 @@ init_config();
9621
9845
  import { Hono as Hono3 } from "hono";
9622
9846
  import { zValidator as zValidator3 } from "@hono/zod-validator";
9623
9847
  import { z as z17 } from "zod";
9624
- import { readFileSync as readFileSync6 } from "fs";
9848
+ import { readFileSync as readFileSync7 } from "fs";
9625
9849
  import { fileURLToPath as fileURLToPath3 } from "url";
9626
- import { dirname as dirname6, join as join10 } from "path";
9850
+ import { dirname as dirname6, join as join11 } from "path";
9627
9851
  var __filename = fileURLToPath3(import.meta.url);
9628
9852
  var __dirname = dirname6(__filename);
9629
9853
  var possiblePaths = [
9630
- join10(__dirname, "../package.json"),
9854
+ join11(__dirname, "../package.json"),
9631
9855
  // From dist/server -> dist/../package.json
9632
- join10(__dirname, "../../package.json"),
9856
+ join11(__dirname, "../../package.json"),
9633
9857
  // From dist/server (if nested differently)
9634
- join10(__dirname, "../../../package.json"),
9858
+ join11(__dirname, "../../../package.json"),
9635
9859
  // From src/server/routes (development)
9636
- join10(process.cwd(), "package.json")
9860
+ join11(process.cwd(), "package.json")
9637
9861
  // From current working directory
9638
9862
  ];
9639
9863
  var currentVersion = "0.0.0";
9640
9864
  var packageName = "sparkecoder";
9641
9865
  for (const packageJsonPath of possiblePaths) {
9642
9866
  try {
9643
- const packageJson = JSON.parse(readFileSync6(packageJsonPath, "utf-8"));
9867
+ const packageJson = JSON.parse(readFileSync7(packageJsonPath, "utf-8"));
9644
9868
  if (packageJson.name === "sparkecoder") {
9645
9869
  currentVersion = packageJson.version || "0.0.0";
9646
9870
  packageName = packageJson.name || "sparkecoder";
@@ -10210,6 +10434,7 @@ tasks.post(
10210
10434
  type: "task.failed",
10211
10435
  taskId,
10212
10436
  sessionId: taskId,
10437
+ ...taskConfig.parentTaskId ? { parentTaskId: taskConfig.parentTaskId } : {},
10213
10438
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
10214
10439
  data: { status: "failed", error: errorMsg }
10215
10440
  });
@@ -10299,6 +10524,7 @@ tasks.post("/:id/cancel", async (c) => {
10299
10524
  type: "task.failed",
10300
10525
  taskId: id,
10301
10526
  sessionId: id,
10527
+ ...task.parentTaskId ? { parentTaskId: task.parentTaskId } : {},
10302
10528
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
10303
10529
  data: { status: "failed", error: "Task cancelled by user" }
10304
10530
  });
@@ -10397,11 +10623,11 @@ function getWebDirectory() {
10397
10623
  try {
10398
10624
  const currentDir = dirname7(fileURLToPath4(import.meta.url));
10399
10625
  const webDir = resolve10(currentDir, "..", "web");
10400
- if (existsSync16(webDir) && existsSync16(join11(webDir, "package.json"))) {
10626
+ if (existsSync17(webDir) && existsSync17(join12(webDir, "package.json"))) {
10401
10627
  return webDir;
10402
10628
  }
10403
10629
  const altWebDir = resolve10(currentDir, "..", "..", "web");
10404
- if (existsSync16(altWebDir) && existsSync16(join11(altWebDir, "package.json"))) {
10630
+ if (existsSync17(altWebDir) && existsSync17(join12(altWebDir, "package.json"))) {
10405
10631
  return altWebDir;
10406
10632
  }
10407
10633
  return null;
@@ -10459,23 +10685,23 @@ async function findWebPort(preferredPort) {
10459
10685
  return { port: preferredPort, alreadyRunning: false };
10460
10686
  }
10461
10687
  function hasProductionBuild(webDir) {
10462
- const buildIdPath = join11(webDir, ".next", "BUILD_ID");
10463
- return existsSync16(buildIdPath);
10688
+ const buildIdPath = join12(webDir, ".next", "BUILD_ID");
10689
+ return existsSync17(buildIdPath);
10464
10690
  }
10465
10691
  function hasSourceFiles(webDir) {
10466
- const appDir = join11(webDir, "src", "app");
10467
- const pagesDir = join11(webDir, "src", "pages");
10468
- const rootAppDir = join11(webDir, "app");
10469
- const rootPagesDir = join11(webDir, "pages");
10470
- return existsSync16(appDir) || existsSync16(pagesDir) || existsSync16(rootAppDir) || existsSync16(rootPagesDir);
10692
+ const appDir = join12(webDir, "src", "app");
10693
+ const pagesDir = join12(webDir, "src", "pages");
10694
+ const rootAppDir = join12(webDir, "app");
10695
+ const rootPagesDir = join12(webDir, "pages");
10696
+ return existsSync17(appDir) || existsSync17(pagesDir) || existsSync17(rootAppDir) || existsSync17(rootPagesDir);
10471
10697
  }
10472
10698
  function getStandaloneServerPath(webDir) {
10473
10699
  const possiblePaths2 = [
10474
- join11(webDir, ".next", "standalone", "server.js"),
10475
- join11(webDir, ".next", "standalone", "web", "server.js")
10700
+ join12(webDir, ".next", "standalone", "server.js"),
10701
+ join12(webDir, ".next", "standalone", "web", "server.js")
10476
10702
  ];
10477
10703
  for (const serverPath of possiblePaths2) {
10478
- if (existsSync16(serverPath)) {
10704
+ if (existsSync17(serverPath)) {
10479
10705
  return serverPath;
10480
10706
  }
10481
10707
  }
@@ -10515,13 +10741,13 @@ async function startWebUI(apiPort, webPort = DEFAULT_WEB_PORT, quiet = false, pu
10515
10741
  if (!quiet) console.log(` \u2713 Web UI already running at http://localhost:${actualPort}`);
10516
10742
  return { process: null, port: actualPort };
10517
10743
  }
10518
- const usePnpm = existsSync16(join11(webDir, "pnpm-lock.yaml"));
10519
- const useNpm = !usePnpm && existsSync16(join11(webDir, "package-lock.json"));
10744
+ const usePnpm = existsSync17(join12(webDir, "pnpm-lock.yaml"));
10745
+ const useNpm = !usePnpm && existsSync17(join12(webDir, "package-lock.json"));
10520
10746
  const pkgManager = usePnpm ? "pnpm" : useNpm ? "npm" : "npx";
10521
10747
  const { NODE_OPTIONS, TSX_TSCONFIG_PATH, ...cleanEnv } = process.env;
10522
10748
  const apiUrl = publicUrl || `http://127.0.0.1:${apiPort}`;
10523
10749
  const runtimeConfig = { apiBaseUrl: apiUrl };
10524
- const runtimeConfigPath = join11(webDir, "runtime-config.json");
10750
+ const runtimeConfigPath = join12(webDir, "runtime-config.json");
10525
10751
  try {
10526
10752
  writeFileSync5(runtimeConfigPath, JSON.stringify(runtimeConfig, null, 2));
10527
10753
  if (!quiet) console.log(` \u{1F4DD} Runtime config written to ${runtimeConfigPath}`);
@@ -10714,8 +10940,8 @@ async function startServer(options = {}) {
10714
10940
  if (options.workingDirectory) {
10715
10941
  config.resolvedWorkingDirectory = options.workingDirectory;
10716
10942
  }
10717
- if (!existsSync16(config.resolvedWorkingDirectory)) {
10718
- mkdirSync6(config.resolvedWorkingDirectory, { recursive: true });
10943
+ if (!existsSync17(config.resolvedWorkingDirectory)) {
10944
+ mkdirSync7(config.resolvedWorkingDirectory, { recursive: true });
10719
10945
  if (!options.quiet) console.log(`\u{1F4C1} Created agent workspace: ${config.resolvedWorkingDirectory}`);
10720
10946
  }
10721
10947
  if (!config.resolvedRemoteServer.url) {