sparkecoder 0.1.81 → 0.1.83

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/agent/index.js +284 -71
  2. package/dist/agent/index.js.map +1 -1
  3. package/dist/cli.js +370 -144
  4. package/dist/cli.js.map +1 -1
  5. package/dist/index.js +358 -132
  6. package/dist/index.js.map +1 -1
  7. package/dist/server/index.js +358 -132
  8. package/dist/server/index.js.map +1 -1
  9. package/dist/skills/default/browser.md +30 -0
  10. package/dist/tools/index.d.ts +117 -1
  11. package/dist/tools/index.js +183 -41
  12. package/dist/tools/index.js.map +1 -1
  13. package/package.json +1 -1
  14. package/src/skills/default/browser.md +30 -0
  15. package/web/.next/BUILD_ID +1 -1
  16. package/web/.next/standalone/web/.next/BUILD_ID +1 -1
  17. package/web/.next/standalone/web/.next/build-manifest.json +2 -2
  18. package/web/.next/standalone/web/.next/prerender-manifest.json +3 -3
  19. package/web/.next/standalone/web/.next/server/app/_global-error.html +2 -2
  20. package/web/.next/standalone/web/.next/server/app/_global-error.rsc +1 -1
  21. package/web/.next/standalone/web/.next/server/app/_global-error.segments/__PAGE__.segment.rsc +1 -1
  22. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_full.segment.rsc +1 -1
  23. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_head.segment.rsc +1 -1
  24. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_index.segment.rsc +1 -1
  25. package/web/.next/standalone/web/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -1
  26. package/web/.next/standalone/web/.next/server/app/_not-found.html +1 -1
  27. package/web/.next/standalone/web/.next/server/app/_not-found.rsc +1 -1
  28. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_full.segment.rsc +1 -1
  29. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_head.segment.rsc +1 -1
  30. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_index.segment.rsc +1 -1
  31. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +1 -1
  32. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found.segment.rsc +1 -1
  33. package/web/.next/standalone/web/.next/server/app/_not-found.segments/_tree.segment.rsc +1 -1
  34. package/web/.next/standalone/web/.next/server/app/docs/installation.html +2 -2
  35. package/web/.next/standalone/web/.next/server/app/docs/installation.rsc +1 -1
  36. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_full.segment.rsc +1 -1
  37. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_head.segment.rsc +1 -1
  38. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_index.segment.rsc +1 -1
  39. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_tree.segment.rsc +1 -1
  40. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation/__PAGE__.segment.rsc +1 -1
  41. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation.segment.rsc +1 -1
  42. package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs.segment.rsc +1 -1
  43. package/web/.next/standalone/web/.next/server/app/docs/skills.html +2 -2
  44. package/web/.next/standalone/web/.next/server/app/docs/skills.rsc +1 -1
  45. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_full.segment.rsc +1 -1
  46. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_head.segment.rsc +1 -1
  47. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_index.segment.rsc +1 -1
  48. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_tree.segment.rsc +1 -1
  49. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills/__PAGE__.segment.rsc +1 -1
  50. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills.segment.rsc +1 -1
  51. package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs.segment.rsc +1 -1
  52. package/web/.next/standalone/web/.next/server/app/docs/tools.html +2 -2
  53. package/web/.next/standalone/web/.next/server/app/docs/tools.rsc +1 -1
  54. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_full.segment.rsc +1 -1
  55. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_head.segment.rsc +1 -1
  56. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_index.segment.rsc +1 -1
  57. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_tree.segment.rsc +1 -1
  58. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools/__PAGE__.segment.rsc +1 -1
  59. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools.segment.rsc +1 -1
  60. package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs.segment.rsc +1 -1
  61. package/web/.next/standalone/web/.next/server/app/docs.html +2 -2
  62. package/web/.next/standalone/web/.next/server/app/docs.rsc +1 -1
  63. package/web/.next/standalone/web/.next/server/app/docs.segments/_full.segment.rsc +1 -1
  64. package/web/.next/standalone/web/.next/server/app/docs.segments/_head.segment.rsc +1 -1
  65. package/web/.next/standalone/web/.next/server/app/docs.segments/_index.segment.rsc +1 -1
  66. package/web/.next/standalone/web/.next/server/app/docs.segments/_tree.segment.rsc +1 -1
  67. package/web/.next/standalone/web/.next/server/app/docs.segments/docs/__PAGE__.segment.rsc +1 -1
  68. package/web/.next/standalone/web/.next/server/app/docs.segments/docs.segment.rsc +1 -1
  69. package/web/.next/standalone/web/.next/server/app/index.html +1 -1
  70. package/web/.next/standalone/web/.next/server/app/index.rsc +1 -1
  71. package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p/__PAGE__.segment.rsc +1 -1
  72. package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p.segment.rsc +1 -1
  73. package/web/.next/standalone/web/.next/server/app/index.segments/_full.segment.rsc +1 -1
  74. package/web/.next/standalone/web/.next/server/app/index.segments/_head.segment.rsc +1 -1
  75. package/web/.next/standalone/web/.next/server/app/index.segments/_index.segment.rsc +1 -1
  76. package/web/.next/standalone/web/.next/server/app/index.segments/_tree.segment.rsc +1 -1
  77. package/web/.next/standalone/web/.next/server/pages/404.html +1 -1
  78. package/web/.next/standalone/web/.next/server/pages/500.html +2 -2
  79. package/web/.next/standalone/web/.next/server/server-reference-manifest.js +1 -1
  80. package/web/.next/standalone/web/.next/server/server-reference-manifest.json +1 -1
  81. package/web/.next/standalone/web/package-lock.json +3 -3
  82. /package/web/.next/standalone/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  83. /package/web/.next/standalone/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  84. /package/web/.next/standalone/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
  85. /package/web/.next/standalone/web/.next/static/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  86. /package/web/.next/standalone/web/.next/static/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  87. /package/web/.next/standalone/web/.next/static/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
  88. /package/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_buildManifest.js +0 -0
  89. /package/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_clientMiddlewareManifest.json +0 -0
  90. /package/web/.next/static/{CAGGCb0khU_QcA3lh6Rk6 → aCZCpTkVv_k-RisOFPegk}/_ssgManifest.js +0 -0
package/dist/cli.js CHANGED
@@ -883,27 +883,38 @@ function createDefaultConfig() {
883
883
  };
884
884
  }
885
885
  function loadStoredAuthKey() {
886
- const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);
887
- if (!existsSync(keysPath)) {
888
- return null;
889
- }
890
- try {
891
- const content = readFileSync(keysPath, "utf-8");
892
- const data = JSON.parse(content);
893
- return data.authKey || null;
894
- } catch {
895
- return null;
886
+ const locations = [
887
+ join(process.cwd(), ".sparkecoder", AUTH_KEY_FILE),
888
+ join(getAppDataDirectory(), AUTH_KEY_FILE)
889
+ ];
890
+ for (const keysPath of locations) {
891
+ if (!existsSync(keysPath)) continue;
892
+ try {
893
+ const content = readFileSync(keysPath, "utf-8");
894
+ const data = JSON.parse(content);
895
+ if (data.authKey) return data.authKey;
896
+ } catch {
897
+ }
896
898
  }
899
+ return null;
897
900
  }
898
901
  function saveAuthKey(authKey3, userId) {
899
- const appDir = ensureAppDataDirectory();
900
- const keysPath = join(appDir, AUTH_KEY_FILE);
901
902
  const data = {
902
903
  authKey: authKey3,
903
904
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
904
905
  userId
905
906
  };
906
- writeFileSync(keysPath, JSON.stringify(data, null, 2), { mode: 384 });
907
+ const json = JSON.stringify(data, null, 2);
908
+ const appDir = ensureAppDataDirectory();
909
+ writeFileSync(join(appDir, AUTH_KEY_FILE), json, { mode: 384 });
910
+ try {
911
+ const workspaceAuthDir = join(process.cwd(), ".sparkecoder");
912
+ if (!existsSync(workspaceAuthDir)) {
913
+ mkdirSync(workspaceAuthDir, { recursive: true });
914
+ }
915
+ writeFileSync(join(workspaceAuthDir, AUTH_KEY_FILE), json, { mode: 384 });
916
+ } catch {
917
+ }
907
918
  }
908
919
  async function registerWithRemoteServer(serverUrl, name) {
909
920
  const response = await fetch(`${serverUrl}/auth/register`, {
@@ -1052,9 +1063,9 @@ __export(skills_exports, {
1052
1063
  loadSkillContent: () => loadSkillContent,
1053
1064
  loadSkillsFromDirectory: () => loadSkillsFromDirectory
1054
1065
  });
1055
- import { readFile as readFile6, readdir } from "fs/promises";
1066
+ import { readFile as readFile7, readdir } from "fs/promises";
1056
1067
  import { resolve as resolve6, basename, extname as extname4, relative as relative4 } from "path";
1057
- import { existsSync as existsSync9 } from "fs";
1068
+ import { existsSync as existsSync10 } from "fs";
1058
1069
  import { minimatch } from "minimatch";
1059
1070
  function parseSkillFrontmatter(content) {
1060
1071
  const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
@@ -1132,7 +1143,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1132
1143
  defaultLoadType = "on_demand",
1133
1144
  forceAlwaysApply = false
1134
1145
  } = options;
1135
- if (!existsSync9(directory)) {
1146
+ if (!existsSync10(directory)) {
1136
1147
  return [];
1137
1148
  }
1138
1149
  const skills = [];
@@ -1142,7 +1153,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1142
1153
  let fileName;
1143
1154
  if (entry.isDirectory()) {
1144
1155
  const skillMdPath = resolve6(directory, entry.name, "SKILL.md");
1145
- if (existsSync9(skillMdPath)) {
1156
+ if (existsSync10(skillMdPath)) {
1146
1157
  filePath = skillMdPath;
1147
1158
  fileName = entry.name;
1148
1159
  } else {
@@ -1154,7 +1165,7 @@ async function loadSkillsFromDirectory(directory, options = {}) {
1154
1165
  } else {
1155
1166
  continue;
1156
1167
  }
1157
- const content = await readFile6(filePath, "utf-8");
1168
+ const content = await readFile7(filePath, "utf-8");
1158
1169
  const parsed = parseSkillFrontmatter(content);
1159
1170
  if (parsed) {
1160
1171
  const alwaysApply = forceAlwaysApply || parsed.metadata.alwaysApply;
@@ -1233,7 +1244,7 @@ async function loadAllSkillsFromDiscovered(discovered) {
1233
1244
  const onDemandSkills = allSkills.filter((s) => !s.alwaysApply && s.loadType !== "always");
1234
1245
  const alwaysWithContent = await Promise.all(
1235
1246
  alwaysSkills.map(async (skill) => {
1236
- const content = await readFile6(skill.filePath, "utf-8");
1247
+ const content = await readFile7(skill.filePath, "utf-8");
1237
1248
  const parsed = parseSkillFrontmatter(content);
1238
1249
  return {
1239
1250
  ...skill,
@@ -1270,7 +1281,7 @@ async function getGlobMatchedSkills(skills, activeFiles, workingDirectory) {
1270
1281
  });
1271
1282
  const matchedWithContent = await Promise.all(
1272
1283
  matchedSkills.map(async (skill) => {
1273
- const content = await readFile6(skill.filePath, "utf-8");
1284
+ const content = await readFile7(skill.filePath, "utf-8");
1274
1285
  const parsed = parseSkillFrontmatter(content);
1275
1286
  return {
1276
1287
  ...skill,
@@ -1282,10 +1293,10 @@ async function getGlobMatchedSkills(skills, activeFiles, workingDirectory) {
1282
1293
  return matchedWithContent;
1283
1294
  }
1284
1295
  async function loadAgentsMd(agentsMdPath) {
1285
- if (!agentsMdPath || !existsSync9(agentsMdPath)) {
1296
+ if (!agentsMdPath || !existsSync10(agentsMdPath)) {
1286
1297
  return null;
1287
1298
  }
1288
- const content = await readFile6(agentsMdPath, "utf-8");
1299
+ const content = await readFile7(agentsMdPath, "utf-8");
1289
1300
  return content;
1290
1301
  }
1291
1302
  async function loadSkillContent(skillName, directories) {
@@ -1296,7 +1307,7 @@ async function loadSkillContent(skillName, directories) {
1296
1307
  if (!skill) {
1297
1308
  return null;
1298
1309
  }
1299
- const content = await readFile6(skill.filePath, "utf-8");
1310
+ const content = await readFile7(skill.filePath, "utf-8");
1300
1311
  const parsed = parseSkillFrontmatter(content);
1301
1312
  return {
1302
1313
  ...skill,
@@ -1963,7 +1974,7 @@ var init_client = __esm({
1963
1974
  });
1964
1975
 
1965
1976
  // src/semantic/indexer.ts
1966
- import { readFileSync as readFileSync4, statSync } from "fs";
1977
+ import { readFileSync as readFileSync5, statSync } from "fs";
1967
1978
  import { relative as relative6 } from "path";
1968
1979
  import { minimatch as minimatch2 } from "minimatch";
1969
1980
  function parsePositiveInt(value, fallback) {
@@ -2004,18 +2015,18 @@ function isPathExcluded(relativePath, exclude) {
2004
2015
  });
2005
2016
  }
2006
2017
  async function walkDirectory(dir, include, exclude, baseDir) {
2007
- const { readdirSync: readdirSync2 } = await import("fs");
2008
- const { join: join13, relative: relative10 } = await import("path");
2018
+ const { readdirSync: readdirSync3 } = await import("fs");
2019
+ const { join: join14, relative: relative10 } = await import("path");
2009
2020
  const files = [];
2010
2021
  function walk(currentDir) {
2011
2022
  let entries;
2012
2023
  try {
2013
- entries = readdirSync2(currentDir, { withFileTypes: true });
2024
+ entries = readdirSync3(currentDir, { withFileTypes: true });
2014
2025
  } catch {
2015
2026
  return;
2016
2027
  }
2017
2028
  for (const entry of entries) {
2018
- const fullPath = join13(currentDir, entry.name);
2029
+ const fullPath = join14(currentDir, entry.name);
2019
2030
  const relativePath = relative10(baseDir, fullPath);
2020
2031
  if (isPathExcluded(relativePath, exclude)) {
2021
2032
  continue;
@@ -2045,7 +2056,7 @@ function shouldSkipFile(filePath) {
2045
2056
  return { skip: true, reason: "Empty file" };
2046
2057
  }
2047
2058
  try {
2048
- const content = readFileSync4(filePath, "utf-8");
2059
+ const content = readFileSync5(filePath, "utf-8");
2049
2060
  const sample = content.slice(0, 1e3);
2050
2061
  if (sample.includes("\0")) {
2051
2062
  return { skip: true, reason: "Binary file" };
@@ -2144,7 +2155,7 @@ async function indexRepository(options) {
2144
2155
  continue;
2145
2156
  }
2146
2157
  try {
2147
- const content = readFileSync4(filePath, "utf-8");
2158
+ const content = readFileSync5(filePath, "utf-8");
2148
2159
  const chunks = chunkFile(relativePath, content);
2149
2160
  allChunks.push(...chunks);
2150
2161
  progress.totalChunks += chunks.length;
@@ -2384,8 +2395,8 @@ __export(semantic_search_exports, {
2384
2395
  });
2385
2396
  import { tool as tool8 } from "ai";
2386
2397
  import { z as z9 } from "zod";
2387
- import { existsSync as existsSync12, readFileSync as readFileSync5 } from "fs";
2388
- import { join as join5 } from "path";
2398
+ import { existsSync as existsSync13, readFileSync as readFileSync6 } from "fs";
2399
+ import { join as join6 } from "path";
2389
2400
  import { minimatch as minimatch3 } from "minimatch";
2390
2401
  function createSemanticSearchTool(options) {
2391
2402
  return tool8({
@@ -2452,13 +2463,13 @@ Returns matching code snippets with file paths, line numbers, and relevance scor
2452
2463
  if (language && matchLanguage !== language.toLowerCase()) {
2453
2464
  continue;
2454
2465
  }
2455
- const fullPath = join5(options.workingDirectory, filePath);
2456
- if (!existsSync12(fullPath)) {
2466
+ const fullPath = join6(options.workingDirectory, filePath);
2467
+ if (!existsSync13(fullPath)) {
2457
2468
  continue;
2458
2469
  }
2459
2470
  let snippet = "";
2460
2471
  try {
2461
- const content = readFileSync5(fullPath, "utf-8");
2472
+ const content = readFileSync6(fullPath, "utf-8");
2462
2473
  const lines = content.split("\n");
2463
2474
  const snippetLines = lines.slice(
2464
2475
  Math.max(0, startLine - 1),
@@ -2526,7 +2537,7 @@ async function sendWebhook(url, event) {
2526
2537
  try {
2527
2538
  const controller = new AbortController();
2528
2539
  const timeout = setTimeout(() => controller.abort(), 5e3);
2529
- await fetch(url, {
2540
+ const response = await fetch(url, {
2530
2541
  method: "POST",
2531
2542
  headers: {
2532
2543
  "Content-Type": "application/json",
@@ -2536,7 +2547,12 @@ async function sendWebhook(url, event) {
2536
2547
  signal: controller.signal
2537
2548
  });
2538
2549
  clearTimeout(timeout);
2539
- } catch {
2550
+ if (!response.ok) {
2551
+ console.warn(`[WEBHOOK] ${event.type} to ${url} returned HTTP ${response.status}`);
2552
+ }
2553
+ } catch (err) {
2554
+ const reason = err.name === "AbortError" ? "timeout (5s)" : err.message;
2555
+ console.warn(`[WEBHOOK] ${event.type} to ${url} failed: ${reason}`);
2540
2556
  }
2541
2557
  }
2542
2558
  var init_webhook = __esm({
@@ -2753,8 +2769,8 @@ __export(recorder_exports, {
2753
2769
  });
2754
2770
  import { exec as exec5 } from "child_process";
2755
2771
  import { promisify as promisify5 } from "util";
2756
- import { writeFile as writeFile4, mkdir as mkdir4, readFile as readFile10, unlink as unlink2, readdir as readdir5, rm } from "fs/promises";
2757
- import { join as join7 } from "path";
2772
+ import { writeFile as writeFile5, mkdir as mkdir4, readFile as readFile11, unlink as unlink2, readdir as readdir5, rm } from "fs/promises";
2773
+ import { join as join8 } from "path";
2758
2774
  import { tmpdir } from "os";
2759
2775
  import { nanoid as nanoid3 } from "nanoid";
2760
2776
  async function checkFfmpeg() {
@@ -2811,21 +2827,21 @@ var init_recorder = __esm({
2811
2827
  */
2812
2828
  async encode() {
2813
2829
  if (this.frames.length === 0) return null;
2814
- const workDir = join7(tmpdir(), `sparkecoder-recording-${nanoid3(8)}`);
2830
+ const workDir = join8(tmpdir(), `sparkecoder-recording-${nanoid3(8)}`);
2815
2831
  await mkdir4(workDir, { recursive: true });
2816
2832
  try {
2817
2833
  for (let i = 0; i < this.frames.length; i++) {
2818
- const framePath = join7(workDir, `frame_${String(i).padStart(6, "0")}.jpg`);
2819
- await writeFile4(framePath, this.frames[i].data);
2834
+ const framePath = join8(workDir, `frame_${String(i).padStart(6, "0")}.jpg`);
2835
+ await writeFile5(framePath, this.frames[i].data);
2820
2836
  }
2821
2837
  const duration = (this.frames[this.frames.length - 1].timestamp - this.frames[0].timestamp) / 1e3;
2822
2838
  const fps = duration > 0 ? Math.round(this.frames.length / duration) : 10;
2823
2839
  const clampedFps = Math.max(1, Math.min(fps, 30));
2824
- const outputPath = join7(workDir, `recording_${this.sessionId}.mp4`);
2840
+ const outputPath = join8(workDir, `recording_${this.sessionId}.mp4`);
2825
2841
  const hasFfmpeg = await checkFfmpeg();
2826
2842
  if (hasFfmpeg) {
2827
2843
  await execAsync5(
2828
- `ffmpeg -y -framerate ${clampedFps} -i "${join7(workDir, "frame_%06d.jpg")}" -c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 "${outputPath}"`,
2844
+ `ffmpeg -y -framerate ${clampedFps} -i "${join8(workDir, "frame_%06d.jpg")}" -c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 "${outputPath}"`,
2829
2845
  { timeout: 12e4 }
2830
2846
  );
2831
2847
  } else {
@@ -2833,11 +2849,11 @@ var init_recorder = __esm({
2833
2849
  await cleanup(workDir);
2834
2850
  return null;
2835
2851
  }
2836
- const outputBuf = await readFile10(outputPath);
2852
+ const outputBuf = await readFile11(outputPath);
2837
2853
  const files = await readdir5(workDir);
2838
2854
  for (const f of files) {
2839
2855
  if (f.startsWith("frame_")) {
2840
- await unlink2(join7(workDir, f)).catch(() => {
2856
+ await unlink2(join8(workDir, f)).catch(() => {
2841
2857
  });
2842
2858
  }
2843
2859
  }
@@ -2871,8 +2887,8 @@ import { Hono as Hono6 } from "hono";
2871
2887
  import { serve } from "@hono/node-server";
2872
2888
  import { cors } from "hono/cors";
2873
2889
  import { logger } from "hono/logger";
2874
- import { existsSync as existsSync16, mkdirSync as mkdirSync6, writeFileSync as writeFileSync5 } from "fs";
2875
- import { resolve as resolve10, dirname as dirname7, join as join11 } from "path";
2890
+ import { existsSync as existsSync17, mkdirSync as mkdirSync7, writeFileSync as writeFileSync5 } from "fs";
2891
+ import { resolve as resolve10, dirname as dirname7, join as join12 } from "path";
2876
2892
  import { spawn as spawn2 } from "child_process";
2877
2893
  import { createServer as createNetServer } from "net";
2878
2894
  import { fileURLToPath as fileURLToPath4 } from "url";
@@ -2882,9 +2898,9 @@ init_db();
2882
2898
  import { Hono } from "hono";
2883
2899
  import { zValidator } from "@hono/zod-validator";
2884
2900
  import { z as z15 } from "zod";
2885
- import { existsSync as existsSync14, mkdirSync as mkdirSync4, writeFileSync as writeFileSync3, readdirSync, statSync as statSync2, unlinkSync } from "fs";
2901
+ import { existsSync as existsSync15, mkdirSync as mkdirSync5, writeFileSync as writeFileSync3, readdirSync as readdirSync2, statSync as statSync2, unlinkSync as unlinkSync2 } from "fs";
2886
2902
  import { readdir as readdir6 } from "fs/promises";
2887
- import { join as join8, basename as basename5, extname as extname8, relative as relative9 } from "path";
2903
+ import { join as join9, basename as basename5, extname as extname8, relative as relative9 } from "path";
2888
2904
  import { nanoid as nanoid5 } from "nanoid";
2889
2905
 
2890
2906
  // src/agent/index.ts
@@ -4951,8 +4967,34 @@ Working directory: ${options.workingDirectory}`,
4951
4967
  init_db();
4952
4968
  import { tool as tool4 } from "ai";
4953
4969
  import { z as z5 } from "zod";
4970
+ import { existsSync as existsSync9, mkdirSync as mkdirSync4, readdirSync, unlinkSync, readFileSync as readFileSync3, appendFileSync } from "fs";
4971
+ import { readFile as readFile6, writeFile as writeFile4 } from "fs/promises";
4972
+ import { join as join4 } from "path";
4973
+ function getPlansDir(workingDirectory, sessionId) {
4974
+ return join4(workingDirectory, ".sparkecoder", "plans", sessionId);
4975
+ }
4976
+ function ensurePlansDir(workingDirectory, sessionId) {
4977
+ const dir = getPlansDir(workingDirectory, sessionId);
4978
+ if (!existsSync9(dir)) {
4979
+ mkdirSync4(dir, { recursive: true });
4980
+ }
4981
+ const gitignorePath = join4(workingDirectory, ".gitignore");
4982
+ if (existsSync9(gitignorePath)) {
4983
+ try {
4984
+ const content = readFileSync3(gitignorePath, "utf-8");
4985
+ if (!content.includes(".sparkecoder")) {
4986
+ appendFileSync(gitignorePath, "\n.sparkecoder/\n");
4987
+ }
4988
+ } catch {
4989
+ }
4990
+ }
4991
+ return dir;
4992
+ }
4993
+ function slugify(name) {
4994
+ return name.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "").slice(0, 80) || "plan";
4995
+ }
4954
4996
  var todoInputSchema = z5.object({
4955
- action: z5.enum(["add", "list", "mark", "clear"]).describe("The action to perform on the todo list"),
4997
+ action: z5.enum(["add", "list", "mark", "clear", "save_plan", "list_plans", "get_plan", "delete_plan"]).describe("The action to perform"),
4956
4998
  items: z5.array(
4957
4999
  z5.object({
4958
5000
  content: z5.string().describe("Description of the task"),
@@ -4960,27 +5002,67 @@ var todoInputSchema = z5.object({
4960
5002
  })
4961
5003
  ).optional().describe('For "add" action: Array of todo items to add'),
4962
5004
  todoId: z5.string().optional().describe('For "mark" action: The ID of the todo item to update'),
4963
- status: z5.enum(["pending", "in_progress", "completed", "cancelled"]).optional().describe('For "mark" action: The new status for the todo item')
5005
+ status: z5.enum(["pending", "in_progress", "completed", "cancelled"]).optional().describe('For "mark" action: The new status for the todo item'),
5006
+ planName: z5.string().optional().describe('For plan actions: Name of the plan (e.g. "auth-system", "db-migration")'),
5007
+ planContent: z5.string().optional().describe('For "save_plan": Full plan content as markdown with hierarchical tasks using checkboxes')
4964
5008
  });
4965
5009
  function createTodoTool(options) {
4966
5010
  return tool4({
4967
- description: `Manage your task list for the current session. Use this to:
4968
- - Break down complex tasks into smaller steps
4969
- - Track progress on multi-step operations
4970
- - Organize your work systematically
5011
+ description: `Manage your task list and persistent plans for the current session.
4971
5012
 
4972
- Available actions:
5013
+ ## Todo Actions (for tracking current work)
4973
5014
  - "add": Add one or more new todo items to the list
4974
5015
  - "list": View all current todo items and their status
4975
5016
  - "mark": Update the status of a todo item (pending, in_progress, completed, cancelled)
4976
5017
  - "clear": Remove all todo items from the list
4977
5018
 
4978
- Best practices:
4979
- - Add todos before starting complex tasks
4980
- - Mark items as "in_progress" when actively working on them
4981
- - Update status as you complete each step`,
5019
+ ## Plan Actions (for complex, multi-phase work)
5020
+ - "save_plan": Create or update a named plan \u2014 a persistent markdown document with hierarchical tasks, subtasks, and notes. Plans survive context compaction and are always available.
5021
+ - "list_plans": List all plans for this session
5022
+ - "get_plan": Read a specific plan by name
5023
+ - "delete_plan": Remove a plan
5024
+
5025
+ ## Plans vs Todos
5026
+ - **Plans** are the big picture \u2014 the full spec with phases, subtasks, notes, and decisions. They persist on disk and are always injected into your context, even after old messages are summarized.
5027
+ - **Todos** are your current focus \u2014 the immediate steps you're working on right now.
5028
+
5029
+ ## Workflow for complex tasks
5030
+ 1. Create a plan with phases and subtasks (save_plan)
5031
+ 2. Create todos from the first uncompleted phase (add)
5032
+ 3. Work through the todos, marking them as you go
5033
+ 4. When all current todos are done, update the plan (mark completed sections with [x]) and save it
5034
+ 5. Create new todos from the next uncompleted phase
5035
+ 6. Repeat until the plan is fully complete
5036
+
5037
+ ## Plan format
5038
+ Plans should be markdown with this structure:
5039
+ \`\`\`markdown
5040
+ # Plan: [Title]
5041
+
5042
+ ## Overview
5043
+ [What we're doing and why]
5044
+
5045
+ ## Phase 1: [Name] [completed]
5046
+ - [x] Task 1
5047
+ - [x] Task 2
5048
+
5049
+ ## Phase 2: [Name] [in_progress]
5050
+ - [x] Subtask 2.1
5051
+ - [ ] Subtask 2.2
5052
+ - [ ] Sub-subtask 2.2.1
5053
+ - [ ] Sub-subtask 2.2.2
5054
+ - [ ] Subtask 2.3
5055
+
5056
+ ## Phase 3: [Name] [pending]
5057
+ - [ ] Task 1
5058
+ - [ ] Task 2
5059
+
5060
+ ## Notes
5061
+ - Key decisions and context to preserve
5062
+ - Important file paths discovered
5063
+ \`\`\``,
4982
5064
  inputSchema: todoInputSchema,
4983
- execute: async ({ action, items, todoId, status }) => {
5065
+ execute: async ({ action, items, todoId, status, planName, planContent }) => {
4984
5066
  try {
4985
5067
  switch (action) {
4986
5068
  case "add": {
@@ -5048,6 +5130,81 @@ Best practices:
5048
5130
  itemsRemoved: count
5049
5131
  };
5050
5132
  }
5133
+ // ── Plan actions ─────────────────────────────────────────
5134
+ case "save_plan": {
5135
+ if (!planName) {
5136
+ return { success: false, error: 'planName is required for "save_plan"' };
5137
+ }
5138
+ if (!planContent) {
5139
+ return { success: false, error: 'planContent is required for "save_plan"' };
5140
+ }
5141
+ const dir = ensurePlansDir(options.workingDirectory, options.sessionId);
5142
+ const filename = `${slugify(planName)}.md`;
5143
+ const filePath = join4(dir, filename);
5144
+ await writeFile4(filePath, planContent, "utf-8");
5145
+ return {
5146
+ success: true,
5147
+ action: "save_plan",
5148
+ planName,
5149
+ filename,
5150
+ path: filePath,
5151
+ sizeChars: planContent.length
5152
+ };
5153
+ }
5154
+ case "list_plans": {
5155
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
5156
+ if (!existsSync9(dir)) {
5157
+ return { success: true, action: "list_plans", plans: [], count: 0 };
5158
+ }
5159
+ const files = readdirSync(dir).filter((f) => f.endsWith(".md"));
5160
+ const plans = [];
5161
+ for (const f of files) {
5162
+ try {
5163
+ const content = await readFile6(join4(dir, f), "utf-8");
5164
+ const titleMatch = content.match(/^#\s+(?:Plan:\s*)?(.+)/m);
5165
+ plans.push({
5166
+ name: f.replace(/\.md$/, ""),
5167
+ title: titleMatch?.[1]?.trim() || f.replace(/\.md$/, ""),
5168
+ filename: f,
5169
+ sizeChars: content.length
5170
+ });
5171
+ } catch {
5172
+ }
5173
+ }
5174
+ return { success: true, action: "list_plans", plans, count: plans.length };
5175
+ }
5176
+ case "get_plan": {
5177
+ if (!planName) {
5178
+ return { success: false, error: 'planName is required for "get_plan"' };
5179
+ }
5180
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
5181
+ const filename = `${slugify(planName)}.md`;
5182
+ const filePath = join4(dir, filename);
5183
+ if (!existsSync9(filePath)) {
5184
+ return { success: false, error: `Plan not found: "${planName}" (looked for ${filename})` };
5185
+ }
5186
+ const content = await readFile6(filePath, "utf-8");
5187
+ return {
5188
+ success: true,
5189
+ action: "get_plan",
5190
+ planName,
5191
+ content,
5192
+ sizeChars: content.length
5193
+ };
5194
+ }
5195
+ case "delete_plan": {
5196
+ if (!planName) {
5197
+ return { success: false, error: 'planName is required for "delete_plan"' };
5198
+ }
5199
+ const dir = getPlansDir(options.workingDirectory, options.sessionId);
5200
+ const filename = `${slugify(planName)}.md`;
5201
+ const filePath = join4(dir, filename);
5202
+ if (!existsSync9(filePath)) {
5203
+ return { success: false, error: `Plan not found: "${planName}"` };
5204
+ }
5205
+ unlinkSync(filePath);
5206
+ return { success: true, action: "delete_plan", planName, deleted: true };
5207
+ }
5051
5208
  default:
5052
5209
  return {
5053
5210
  success: false,
@@ -5072,6 +5229,21 @@ function formatTodoItem(item) {
5072
5229
  createdAt: item.createdAt.toISOString()
5073
5230
  };
5074
5231
  }
5232
+ async function readSessionPlans(workingDirectory, sessionId) {
5233
+ const dir = getPlansDir(workingDirectory, sessionId);
5234
+ if (!existsSync9(dir)) return [];
5235
+ const files = readdirSync(dir).filter((f) => f.endsWith(".md"));
5236
+ if (files.length === 0) return [];
5237
+ const plans = [];
5238
+ for (const f of files) {
5239
+ try {
5240
+ const content = await readFile6(join4(dir, f), "utf-8");
5241
+ plans.push({ name: f.replace(/\.md$/, ""), content });
5242
+ } catch {
5243
+ }
5244
+ }
5245
+ return plans;
5246
+ }
5075
5247
 
5076
5248
  // src/tools/load-skill.ts
5077
5249
  init_skills();
@@ -5161,7 +5333,7 @@ Once loaded, a skill's content will be available in the conversation context.`,
5161
5333
  import { tool as tool6 } from "ai";
5162
5334
  import { z as z7 } from "zod";
5163
5335
  import { resolve as resolve7, relative as relative5, isAbsolute as isAbsolute3, extname as extname5 } from "path";
5164
- import { existsSync as existsSync10 } from "fs";
5336
+ import { existsSync as existsSync11 } from "fs";
5165
5337
  import { readdir as readdir2, stat as stat2 } from "fs/promises";
5166
5338
  var linterInputSchema = z7.object({
5167
5339
  paths: z7.array(z7.string()).optional().describe("File or directory paths to check for lint errors. If not provided, returns diagnostics for all recently touched files."),
@@ -5229,7 +5401,7 @@ Working directory: ${options.workingDirectory}`,
5229
5401
  const filesToCheck = [];
5230
5402
  for (const path of paths) {
5231
5403
  const absolutePath = isAbsolute3(path) ? path : resolve7(options.workingDirectory, path);
5232
- if (!existsSync10(absolutePath)) {
5404
+ if (!existsSync11(absolutePath)) {
5233
5405
  continue;
5234
5406
  }
5235
5407
  const stats = await stat2(absolutePath);
@@ -5541,17 +5713,17 @@ import { tool as tool9 } from "ai";
5541
5713
  import { z as z10 } from "zod";
5542
5714
  import { exec as exec4 } from "child_process";
5543
5715
  import { promisify as promisify4 } from "util";
5544
- import { readFile as readFile8, stat as stat3, readdir as readdir4 } from "fs/promises";
5716
+ import { readFile as readFile9, stat as stat3, readdir as readdir4 } from "fs/promises";
5545
5717
  import { resolve as resolve9, relative as relative8, isAbsolute as isAbsolute5 } from "path";
5546
- import { existsSync as existsSync13 } from "fs";
5718
+ import { existsSync as existsSync14 } from "fs";
5547
5719
  init_semantic();
5548
5720
 
5549
5721
  // src/tools/code-graph.ts
5550
5722
  import { tool as tool7 } from "ai";
5551
5723
  import { z as z8 } from "zod";
5552
5724
  import { resolve as resolve8, relative as relative7, isAbsolute as isAbsolute4, basename as basename3 } from "path";
5553
- import { readFile as readFile7, readdir as readdir3 } from "fs/promises";
5554
- import { existsSync as existsSync11 } from "fs";
5725
+ import { readFile as readFile8, readdir as readdir3 } from "fs/promises";
5726
+ import { existsSync as existsSync12 } from "fs";
5555
5727
  import { fileURLToPath as fileURLToPath2 } from "url";
5556
5728
  import { execFileSync } from "child_process";
5557
5729
  var codeGraphInputSchema = z8.object({
@@ -5688,7 +5860,7 @@ async function grepForSymbol(symbol, workingDirectory) {
5688
5860
  const ext = entry.name.substring(entry.name.lastIndexOf("."));
5689
5861
  if (!SUPPORTED_EXTS.has(ext)) continue;
5690
5862
  remaining--;
5691
- const content = await readFile7(fullPath, "utf-8");
5863
+ const content = await readFile8(fullPath, "utf-8");
5692
5864
  const lines = content.split("\n");
5693
5865
  for (let i = 0; i < lines.length; i++) {
5694
5866
  if (defPattern.test(lines[i])) {
@@ -5737,7 +5909,7 @@ Working directory: ${options.workingDirectory}`,
5737
5909
  let defSymbol = null;
5738
5910
  if (filePath) {
5739
5911
  const absPath = isAbsolute4(filePath) ? filePath : resolve8(options.workingDirectory, filePath);
5740
- if (!existsSync11(absPath)) {
5912
+ if (!existsSync12(absPath)) {
5741
5913
  return { success: false, error: `File not found: ${filePath}` };
5742
5914
  }
5743
5915
  if (!isSupported(absPath)) {
@@ -5751,7 +5923,7 @@ Working directory: ${options.workingDirectory}`,
5751
5923
  defLine = defSymbol.selectionRange.start.line;
5752
5924
  defChar = defSymbol.selectionRange.start.character;
5753
5925
  } else {
5754
- const content = await readFile7(absPath, "utf-8");
5926
+ const content = await readFile8(absPath, "utf-8");
5755
5927
  const lines2 = content.split("\n");
5756
5928
  const defPattern = new RegExp(
5757
5929
  `(export|function|const|let|var|class|interface|type|enum)\\s+.*\\b${symbol.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\b`
@@ -6164,7 +6336,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
6164
6336
  execute: async ({ path, startLine, endLine }) => {
6165
6337
  try {
6166
6338
  const absolutePath = isAbsolute5(path) ? path : resolve9(workingDirectory, path);
6167
- if (!existsSync13(absolutePath)) {
6339
+ if (!existsSync14(absolutePath)) {
6168
6340
  return {
6169
6341
  success: false,
6170
6342
  error: `File not found: ${path}`
@@ -6177,7 +6349,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
6177
6349
  error: `File too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Use startLine/endLine to read portions.`
6178
6350
  };
6179
6351
  }
6180
- let content = await readFile8(absolutePath, "utf-8");
6352
+ let content = await readFile9(absolutePath, "utf-8");
6181
6353
  if (startLine !== void 0 || endLine !== void 0) {
6182
6354
  const lines = content.split("\n");
6183
6355
  const start = (startLine ?? 1) - 1;
@@ -6208,7 +6380,7 @@ Keep it concise but INCLUDE THE ACTUAL DATA.`;
6208
6380
  execute: async ({ path, recursive, maxDepth }) => {
6209
6381
  try {
6210
6382
  const absolutePath = isAbsolute5(path) ? path : resolve9(workingDirectory, path);
6211
- if (!existsSync13(absolutePath)) {
6383
+ if (!existsSync14(absolutePath)) {
6212
6384
  return {
6213
6385
  success: false,
6214
6386
  error: `Directory not found: ${path}`
@@ -6575,8 +6747,8 @@ function createTaskFailedTool(options) {
6575
6747
  // src/tools/upload-file.ts
6576
6748
  import { tool as tool12 } from "ai";
6577
6749
  import { z as z13 } from "zod";
6578
- import { readFile as readFile9, stat as stat4 } from "fs/promises";
6579
- import { join as join6, basename as basename4, extname as extname7 } from "path";
6750
+ import { readFile as readFile10, stat as stat4 } from "fs/promises";
6751
+ import { join as join7, basename as basename4, extname as extname7 } from "path";
6580
6752
  var MIME_TYPES = {
6581
6753
  ".txt": "text/plain",
6582
6754
  ".md": "text/markdown",
@@ -6618,7 +6790,7 @@ function createUploadFileTool(options) {
6618
6790
  error: "File upload is not available \u2014 remote server with GCS is not configured."
6619
6791
  };
6620
6792
  }
6621
- const fullPath = input.path.startsWith("/") ? input.path : join6(options.workingDirectory, input.path);
6793
+ const fullPath = input.path.startsWith("/") ? input.path : join7(options.workingDirectory, input.path);
6622
6794
  try {
6623
6795
  await stat4(fullPath);
6624
6796
  } catch {
@@ -6636,7 +6808,7 @@ function createUploadFileTool(options) {
6636
6808
  contentType,
6637
6809
  "general"
6638
6810
  );
6639
- const fileData = await readFile9(fullPath);
6811
+ const fileData = await readFile10(fullPath);
6640
6812
  const putRes = await fetch(uploadInfo.uploadUrl, {
6641
6813
  method: "PUT",
6642
6814
  headers: { "Content-Type": contentType },
@@ -6691,7 +6863,8 @@ async function createTools(options) {
6691
6863
  onProgress: options.onWriteFileProgress
6692
6864
  }),
6693
6865
  todo: createTodoTool({
6694
- sessionId: options.sessionId
6866
+ sessionId: options.sessionId,
6867
+ workingDirectory: options.workingDirectory
6695
6868
  }),
6696
6869
  load_skill: createLoadSkillTool({
6697
6870
  sessionId: options.sessionId,
@@ -6789,6 +6962,8 @@ async function buildSystemPrompt(options) {
6789
6962
  }
6790
6963
  const todos = await todoQueries.getBySession(sessionId);
6791
6964
  const todosContext = formatTodosForContext(todos);
6965
+ const plans = await readSessionPlans(workingDirectory, sessionId);
6966
+ const plansContext = formatPlansForContext(plans);
6792
6967
  const platform3 = process.platform === "win32" ? "Windows" : process.platform === "darwin" ? "macOS" : "Linux";
6793
6968
  const currentDate = (/* @__PURE__ */ new Date()).toLocaleDateString("en-US", { weekday: "long", year: "numeric", month: "long", day: "numeric" });
6794
6969
  const searchInstructions = getSearchInstructions();
@@ -6805,7 +6980,7 @@ You have access to powerful tools for:
6805
6980
  - **read_file**: Read file contents to understand code and context
6806
6981
  - **write_file**: Create new files or edit existing ones (supports targeted string replacement)
6807
6982
  - **linter**: Check files for type errors and lint issues (TypeScript, JavaScript, TSX, JSX)
6808
- - **todo**: Manage your task list to track progress on complex operations
6983
+ - **todo**: Manage your task list AND persistent plans for complex multi-phase operations
6809
6984
  - **load_skill**: Load specialized knowledge documents for specific tasks
6810
6985
  - **explore_agent**: Explore agent for semantic discovery - for exploratory questions and finding code by meaning
6811
6986
  - **code_graph**: Inspect a symbol's type hierarchy and usage graph via the TypeScript language server
@@ -6814,9 +6989,23 @@ You have access to powerful tools for:
6814
6989
 
6815
6990
  IMPORTANT: If you have zero context of where you are working, always explore it first to understand the structure before doing things for the user.
6816
6991
 
6817
- Use the TODO tool to manage your task list to track progress on complex operations. Always ask the user what they want to do specifically before doing it, and make a plan.
6818
- Step 1 of the plan should be researching files and understanding the components/structure of what you're working on (if you don't already have context), then after u have done that, plan out the rest of the tasks u need to do.
6819
- You can clear the todo and restart it, and do multiple things inside of one session.
6992
+ ### Planning & Task Management
6993
+ Use the **todo tool** to manage both immediate tasks AND persistent plans:
6994
+
6995
+ **For simple tasks (< 5 steps):** Just use regular todos (add/mark/clear).
6996
+
6997
+ **For complex, multi-phase tasks:** Create a persistent **plan** first.
6998
+ 1. Research the codebase to understand what you're working with
6999
+ 2. Create a plan with save_plan \u2014 a structured markdown document with phases and subtasks
7000
+ 3. Create todos from the first uncompleted phase
7001
+ 4. Work through the todos
7002
+ 5. When done, update the plan (mark completed phases with [x]), save it again
7003
+ 6. Create new todos from the next uncompleted phase
7004
+ 7. Repeat until the plan is fully complete
7005
+
7006
+ Plans persist on disk and are always injected into your context \u2014 they survive context compaction even in very long sessions. You can have multiple plans active at once (e.g., one for frontend, one for backend).
7007
+
7008
+ You can clear the todo list and restart it, and do multiple things inside of one session.
6820
7009
 
6821
7010
  ### bash Tool
6822
7011
  The bash tool runs commands in the terminal. Every command runs in its own session with logs saved to disk.
@@ -7040,6 +7229,8 @@ ${onDemandSkillsContext}
7040
7229
  ## Current Task List
7041
7230
  ${todosContext}
7042
7231
 
7232
+ ${plansContext}
7233
+
7043
7234
  ${customInstructions ? `## Custom Instructions
7044
7235
  ${customInstructions}` : ""}
7045
7236
 
@@ -7063,6 +7254,37 @@ function formatTodosForContext(todos) {
7063
7254
  }
7064
7255
  return lines.join("\n");
7065
7256
  }
7257
+ var MAX_PLAN_CHARS = 3e4;
7258
+ var MAX_TOTAL_PLANS_CHARS = 6e4;
7259
+ function formatPlansForContext(plans) {
7260
+ if (plans.length === 0) return "";
7261
+ let totalChars = 0;
7262
+ const sections = [];
7263
+ sections.push(`## Persistent Plans (${plans.length})`);
7264
+ sections.push("");
7265
+ sections.push("These plans persist across context compaction \u2014 they are always available.");
7266
+ sections.push("When you finish your current todos, check these plans for the next uncompleted phase,");
7267
+ sections.push("update the plan (mark completed items with [x]), then create new todos for the next phase.");
7268
+ sections.push("");
7269
+ for (const plan of plans) {
7270
+ let content = plan.content;
7271
+ if (content.length > MAX_PLAN_CHARS) {
7272
+ content = content.slice(0, MAX_PLAN_CHARS) + `
7273
+
7274
+ ... [plan truncated \u2014 ${content.length - MAX_PLAN_CHARS} chars omitted. Use get_plan to read the full plan.]`;
7275
+ }
7276
+ if (totalChars + content.length > MAX_TOTAL_PLANS_CHARS) {
7277
+ sections.push(`### \u{1F4CB} Plan: ${plan.name} [truncated \u2014 use get_plan("${plan.name}") to read]`);
7278
+ continue;
7279
+ }
7280
+ sections.push(`### \u{1F4CB} Plan: ${plan.name}`);
7281
+ sections.push("");
7282
+ sections.push(content);
7283
+ sections.push("");
7284
+ totalChars += content.length;
7285
+ }
7286
+ return sections.join("\n");
7287
+ }
7066
7288
  function buildTaskPromptAddendum(outputSchema) {
7067
7289
  return `
7068
7290
  ## Task Mode
@@ -7134,7 +7356,7 @@ Before calling \`complete_task\`, you MUST verify your work completely. Do not j
7134
7356
  - **load_skill**: Load specialized skills/knowledge relevant to the task. Check what skills are available and use them.
7135
7357
  - **explore_agent**: Use for codebase exploration and understanding before making changes.
7136
7358
  - **code_graph**: Use to understand type hierarchies, references, and impact before refactoring.
7137
- - **todo**: Track your progress on multi-step tasks so you don't miss steps.
7359
+ - **todo**: Track your progress on multi-step tasks so you don't miss steps. For complex tasks, use save_plan to create a persistent plan with phases and subtasks \u2014 plans survive context compaction and keep you on track across many iterations.
7138
7360
  - **bash**: Full shell access \u2014 run builds, tests, dev servers, open browsers, curl endpoints, anything.
7139
7361
  - **upload_file**: Upload files (screenshots, reports, exports) to cloud storage. Use this to include screenshots of completed work in your task result \u2014 visual proof is very helpful.
7140
7362
 
@@ -7887,12 +8109,14 @@ ${prompt}` });
7887
8109
  const config = getConfig();
7888
8110
  const maxIterations = options.taskConfig.maxIterations ?? 50;
7889
8111
  const webhookUrl = options.taskConfig.webhookUrl;
8112
+ const parentTaskId = options.taskConfig.parentTaskId;
7890
8113
  const fireWebhook = (type, data) => {
7891
8114
  if (!webhookUrl) return;
7892
8115
  sendWebhook(webhookUrl, {
7893
8116
  type,
7894
8117
  taskId: this.session.id,
7895
8118
  sessionId: this.session.id,
8119
+ ...parentTaskId ? { parentTaskId } : {},
7896
8120
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7897
8121
  data
7898
8122
  });
@@ -8085,14 +8309,14 @@ ${taskAddendum}`;
8085
8309
  for (const step of resultSteps) {
8086
8310
  if (step.toolCalls) {
8087
8311
  for (const tc of step.toolCalls) {
8088
- options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.args });
8089
- fireWebhook("task.tool_call", { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.args });
8312
+ options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.input });
8313
+ fireWebhook("task.tool_call", { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.input });
8090
8314
  }
8091
8315
  }
8092
8316
  if (step.toolResults) {
8093
8317
  for (const tr of step.toolResults) {
8094
- options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.result });
8095
- fireWebhook("task.tool_result", { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.result });
8318
+ options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.output });
8319
+ fireWebhook("task.tool_result", { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.output });
8096
8320
  }
8097
8321
  }
8098
8322
  }
@@ -8184,14 +8408,14 @@ ${taskAddendum}`;
8184
8408
  const result = await recorder.encode();
8185
8409
  recorder.clear();
8186
8410
  if (!result) return [];
8187
- const { readFile: readFile11, unlink: unlink3 } = await import("fs/promises");
8411
+ const { readFile: readFile12, unlink: unlink3 } = await import("fs/promises");
8188
8412
  const uploadInfo = await storageQueries2.getUploadUrl(
8189
8413
  this.session.id,
8190
8414
  `browser-recording-${Date.now()}.mp4`,
8191
8415
  "video/mp4",
8192
8416
  "browser-recording"
8193
8417
  );
8194
- const fileData = await readFile11(result.path);
8418
+ const fileData = await readFile12(result.path);
8195
8419
  await fetch(uploadInfo.uploadUrl, {
8196
8420
  method: "PUT",
8197
8421
  headers: { "Content-Type": "video/mp4" },
@@ -8217,12 +8441,12 @@ ${taskAddendum}`;
8217
8441
  try {
8218
8442
  const { isRemoteConfigured: isRemoteConfigured2, storageQueries: storageQueries2 } = await Promise.resolve().then(() => (init_remote(), remote_exports));
8219
8443
  if (!isRemoteConfigured2()) return [];
8220
- const { readFile: readFile11 } = await import("fs/promises");
8221
- const { join: join13, basename: basename6 } = await import("path");
8444
+ const { readFile: readFile12 } = await import("fs/promises");
8445
+ const { join: join14, basename: basename6 } = await import("path");
8222
8446
  const urls = [];
8223
8447
  for (const filePath of filePaths) {
8224
8448
  try {
8225
- const fullPath = filePath.startsWith("/") ? filePath : join13(this.session.workingDirectory, filePath);
8449
+ const fullPath = filePath.startsWith("/") ? filePath : join14(this.session.workingDirectory, filePath);
8226
8450
  const fileName = basename6(fullPath);
8227
8451
  const ext = fileName.split(".").pop()?.toLowerCase() || "";
8228
8452
  const mimeMap = {
@@ -8247,7 +8471,7 @@ ${taskAddendum}`;
8247
8471
  contentType,
8248
8472
  "task-output"
8249
8473
  );
8250
- const fileData = await readFile11(fullPath);
8474
+ const fileData = await readFile12(fullPath);
8251
8475
  await fetch(uploadInfo.uploadUrl, {
8252
8476
  method: "PUT",
8253
8477
  headers: { "Content-Type": contentType },
@@ -8852,12 +9076,12 @@ sessions.get("/:id/diff/:filePath", async (c) => {
8852
9076
  });
8853
9077
  function getAttachmentsDir(sessionId) {
8854
9078
  const appDataDir = getAppDataDirectory();
8855
- return join8(appDataDir, "attachments", sessionId);
9079
+ return join9(appDataDir, "attachments", sessionId);
8856
9080
  }
8857
9081
  function ensureAttachmentsDir(sessionId) {
8858
9082
  const dir = getAttachmentsDir(sessionId);
8859
- if (!existsSync14(dir)) {
8860
- mkdirSync4(dir, { recursive: true });
9083
+ if (!existsSync15(dir)) {
9084
+ mkdirSync5(dir, { recursive: true });
8861
9085
  }
8862
9086
  return dir;
8863
9087
  }
@@ -8868,12 +9092,12 @@ sessions.get("/:id/attachments", async (c) => {
8868
9092
  return c.json({ error: "Session not found" }, 404);
8869
9093
  }
8870
9094
  const dir = getAttachmentsDir(sessionId);
8871
- if (!existsSync14(dir)) {
9095
+ if (!existsSync15(dir)) {
8872
9096
  return c.json({ sessionId, attachments: [], count: 0 });
8873
9097
  }
8874
- const files = readdirSync(dir);
9098
+ const files = readdirSync2(dir);
8875
9099
  const attachments = files.map((filename) => {
8876
- const filePath = join8(dir, filename);
9100
+ const filePath = join9(dir, filename);
8877
9101
  const stats = statSync2(filePath);
8878
9102
  return {
8879
9103
  id: filename.split("_")[0],
@@ -8908,7 +9132,7 @@ sessions.post("/:id/attachments", async (c) => {
8908
9132
  const id = nanoid5(10);
8909
9133
  const ext = extname8(file.name) || "";
8910
9134
  const safeFilename = `${id}_${basename5(file.name).replace(/[^a-zA-Z0-9._-]/g, "_")}`;
8911
- const filePath = join8(dir, safeFilename);
9135
+ const filePath = join9(dir, safeFilename);
8912
9136
  const arrayBuffer = await file.arrayBuffer();
8913
9137
  writeFileSync3(filePath, Buffer.from(arrayBuffer));
8914
9138
  return c.json({
@@ -8934,7 +9158,7 @@ sessions.post("/:id/attachments", async (c) => {
8934
9158
  const id = nanoid5(10);
8935
9159
  const ext = extname8(body.filename) || "";
8936
9160
  const safeFilename = `${id}_${basename5(body.filename).replace(/[^a-zA-Z0-9._-]/g, "_")}`;
8937
- const filePath = join8(dir, safeFilename);
9161
+ const filePath = join9(dir, safeFilename);
8938
9162
  let base64Data = body.data;
8939
9163
  if (base64Data.includes(",")) {
8940
9164
  base64Data = base64Data.split(",")[1];
@@ -8963,16 +9187,16 @@ sessions.delete("/:id/attachments/:attachmentId", async (c) => {
8963
9187
  return c.json({ error: "Session not found" }, 404);
8964
9188
  }
8965
9189
  const dir = getAttachmentsDir(sessionId);
8966
- if (!existsSync14(dir)) {
9190
+ if (!existsSync15(dir)) {
8967
9191
  return c.json({ error: "Attachment not found" }, 404);
8968
9192
  }
8969
- const files = readdirSync(dir);
9193
+ const files = readdirSync2(dir);
8970
9194
  const file = files.find((f) => f.startsWith(attachmentId + "_"));
8971
9195
  if (!file) {
8972
9196
  return c.json({ error: "Attachment not found" }, 404);
8973
9197
  }
8974
- const filePath = join8(dir, file);
8975
- unlinkSync(filePath);
9198
+ const filePath = join9(dir, file);
9199
+ unlinkSync2(filePath);
8976
9200
  return c.json({ success: true, id: attachmentId });
8977
9201
  });
8978
9202
  var filesQuerySchema = z15.object({
@@ -9054,7 +9278,7 @@ async function listWorkspaceFiles(baseDir, currentDir, query, limit, results = [
9054
9278
  const entries = await readdir6(currentDir, { withFileTypes: true });
9055
9279
  for (const entry of entries) {
9056
9280
  if (results.length >= limit * 2) break;
9057
- const fullPath = join8(currentDir, entry.name);
9281
+ const fullPath = join9(currentDir, entry.name);
9058
9282
  const relativePath = relative9(baseDir, fullPath);
9059
9283
  if (entry.isDirectory() && IGNORED_DIRECTORIES.has(entry.name)) {
9060
9284
  continue;
@@ -9102,7 +9326,7 @@ sessions.get(
9102
9326
  return c.json({ error: "Session not found" }, 404);
9103
9327
  }
9104
9328
  const workingDirectory = session.workingDirectory;
9105
- if (!existsSync14(workingDirectory)) {
9329
+ if (!existsSync15(workingDirectory)) {
9106
9330
  return c.json({
9107
9331
  sessionId,
9108
9332
  workingDirectory,
@@ -9213,8 +9437,8 @@ init_db();
9213
9437
  import { Hono as Hono2 } from "hono";
9214
9438
  import { zValidator as zValidator2 } from "@hono/zod-validator";
9215
9439
  import { z as z16 } from "zod";
9216
- import { existsSync as existsSync15, mkdirSync as mkdirSync5, writeFileSync as writeFileSync4 } from "fs";
9217
- import { join as join9 } from "path";
9440
+ import { existsSync as existsSync16, mkdirSync as mkdirSync6, writeFileSync as writeFileSync4 } from "fs";
9441
+ import { join as join10 } from "path";
9218
9442
  init_config();
9219
9443
 
9220
9444
  // src/server/resumable-stream.ts
@@ -9420,12 +9644,12 @@ var rejectSchema = z16.object({
9420
9644
  var streamAbortControllers = /* @__PURE__ */ new Map();
9421
9645
  function getAttachmentsDirectory(sessionId) {
9422
9646
  const appDataDir = getAppDataDirectory();
9423
- return join9(appDataDir, "attachments", sessionId);
9647
+ return join10(appDataDir, "attachments", sessionId);
9424
9648
  }
9425
9649
  async function saveAttachmentToDisk(sessionId, attachment, index) {
9426
9650
  const attachmentsDir = getAttachmentsDirectory(sessionId);
9427
- if (!existsSync15(attachmentsDir)) {
9428
- mkdirSync5(attachmentsDir, { recursive: true });
9651
+ if (!existsSync16(attachmentsDir)) {
9652
+ mkdirSync6(attachmentsDir, { recursive: true });
9429
9653
  }
9430
9654
  let filename = attachment.filename;
9431
9655
  if (!filename) {
@@ -9443,7 +9667,7 @@ async function saveAttachmentToDisk(sessionId, attachment, index) {
9443
9667
  attachment.mediaType = resized.mediaType;
9444
9668
  attachment.data = buffer.toString("base64");
9445
9669
  }
9446
- const filePath = join9(attachmentsDir, filename);
9670
+ const filePath = join10(attachmentsDir, filename);
9447
9671
  writeFileSync4(filePath, buffer);
9448
9672
  return filePath;
9449
9673
  }
@@ -10384,26 +10608,26 @@ init_config();
10384
10608
  import { Hono as Hono3 } from "hono";
10385
10609
  import { zValidator as zValidator3 } from "@hono/zod-validator";
10386
10610
  import { z as z17 } from "zod";
10387
- import { readFileSync as readFileSync6 } from "fs";
10611
+ import { readFileSync as readFileSync7 } from "fs";
10388
10612
  import { fileURLToPath as fileURLToPath3 } from "url";
10389
- import { dirname as dirname6, join as join10 } from "path";
10613
+ import { dirname as dirname6, join as join11 } from "path";
10390
10614
  var __filename = fileURLToPath3(import.meta.url);
10391
10615
  var __dirname = dirname6(__filename);
10392
10616
  var possiblePaths = [
10393
- join10(__dirname, "../package.json"),
10617
+ join11(__dirname, "../package.json"),
10394
10618
  // From dist/server -> dist/../package.json
10395
- join10(__dirname, "../../package.json"),
10619
+ join11(__dirname, "../../package.json"),
10396
10620
  // From dist/server (if nested differently)
10397
- join10(__dirname, "../../../package.json"),
10621
+ join11(__dirname, "../../../package.json"),
10398
10622
  // From src/server/routes (development)
10399
- join10(process.cwd(), "package.json")
10623
+ join11(process.cwd(), "package.json")
10400
10624
  // From current working directory
10401
10625
  ];
10402
10626
  var currentVersion = "0.0.0";
10403
10627
  var packageName = "sparkecoder";
10404
10628
  for (const packageJsonPath of possiblePaths) {
10405
10629
  try {
10406
- const packageJson = JSON.parse(readFileSync6(packageJsonPath, "utf-8"));
10630
+ const packageJson = JSON.parse(readFileSync7(packageJsonPath, "utf-8"));
10407
10631
  if (packageJson.name === "sparkecoder") {
10408
10632
  currentVersion = packageJson.version || "0.0.0";
10409
10633
  packageName = packageJson.name || "sparkecoder";
@@ -10973,6 +11197,7 @@ tasks.post(
10973
11197
  type: "task.failed",
10974
11198
  taskId,
10975
11199
  sessionId: taskId,
11200
+ ...taskConfig.parentTaskId ? { parentTaskId: taskConfig.parentTaskId } : {},
10976
11201
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
10977
11202
  data: { status: "failed", error: errorMsg }
10978
11203
  });
@@ -11062,6 +11287,7 @@ tasks.post("/:id/cancel", async (c) => {
11062
11287
  type: "task.failed",
11063
11288
  taskId: id,
11064
11289
  sessionId: id,
11290
+ ...task.parentTaskId ? { parentTaskId: task.parentTaskId } : {},
11065
11291
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
11066
11292
  data: { status: "failed", error: "Task cancelled by user" }
11067
11293
  });
@@ -11256,11 +11482,11 @@ function getWebDirectory() {
11256
11482
  try {
11257
11483
  const currentDir = dirname7(fileURLToPath4(import.meta.url));
11258
11484
  const webDir = resolve10(currentDir, "..", "web");
11259
- if (existsSync16(webDir) && existsSync16(join11(webDir, "package.json"))) {
11485
+ if (existsSync17(webDir) && existsSync17(join12(webDir, "package.json"))) {
11260
11486
  return webDir;
11261
11487
  }
11262
11488
  const altWebDir = resolve10(currentDir, "..", "..", "web");
11263
- if (existsSync16(altWebDir) && existsSync16(join11(altWebDir, "package.json"))) {
11489
+ if (existsSync17(altWebDir) && existsSync17(join12(altWebDir, "package.json"))) {
11264
11490
  return altWebDir;
11265
11491
  }
11266
11492
  return null;
@@ -11318,23 +11544,23 @@ async function findWebPort(preferredPort) {
11318
11544
  return { port: preferredPort, alreadyRunning: false };
11319
11545
  }
11320
11546
  function hasProductionBuild(webDir) {
11321
- const buildIdPath = join11(webDir, ".next", "BUILD_ID");
11322
- return existsSync16(buildIdPath);
11547
+ const buildIdPath = join12(webDir, ".next", "BUILD_ID");
11548
+ return existsSync17(buildIdPath);
11323
11549
  }
11324
11550
  function hasSourceFiles(webDir) {
11325
- const appDir = join11(webDir, "src", "app");
11326
- const pagesDir = join11(webDir, "src", "pages");
11327
- const rootAppDir = join11(webDir, "app");
11328
- const rootPagesDir = join11(webDir, "pages");
11329
- return existsSync16(appDir) || existsSync16(pagesDir) || existsSync16(rootAppDir) || existsSync16(rootPagesDir);
11551
+ const appDir = join12(webDir, "src", "app");
11552
+ const pagesDir = join12(webDir, "src", "pages");
11553
+ const rootAppDir = join12(webDir, "app");
11554
+ const rootPagesDir = join12(webDir, "pages");
11555
+ return existsSync17(appDir) || existsSync17(pagesDir) || existsSync17(rootAppDir) || existsSync17(rootPagesDir);
11330
11556
  }
11331
11557
  function getStandaloneServerPath(webDir) {
11332
11558
  const possiblePaths2 = [
11333
- join11(webDir, ".next", "standalone", "server.js"),
11334
- join11(webDir, ".next", "standalone", "web", "server.js")
11559
+ join12(webDir, ".next", "standalone", "server.js"),
11560
+ join12(webDir, ".next", "standalone", "web", "server.js")
11335
11561
  ];
11336
11562
  for (const serverPath of possiblePaths2) {
11337
- if (existsSync16(serverPath)) {
11563
+ if (existsSync17(serverPath)) {
11338
11564
  return serverPath;
11339
11565
  }
11340
11566
  }
@@ -11374,13 +11600,13 @@ async function startWebUI(apiPort, webPort = DEFAULT_WEB_PORT, quiet = false, pu
11374
11600
  if (!quiet) console.log(` \u2713 Web UI already running at http://localhost:${actualPort}`);
11375
11601
  return { process: null, port: actualPort };
11376
11602
  }
11377
- const usePnpm = existsSync16(join11(webDir, "pnpm-lock.yaml"));
11378
- const useNpm = !usePnpm && existsSync16(join11(webDir, "package-lock.json"));
11603
+ const usePnpm = existsSync17(join12(webDir, "pnpm-lock.yaml"));
11604
+ const useNpm = !usePnpm && existsSync17(join12(webDir, "package-lock.json"));
11379
11605
  const pkgManager = usePnpm ? "pnpm" : useNpm ? "npm" : "npx";
11380
11606
  const { NODE_OPTIONS, TSX_TSCONFIG_PATH, ...cleanEnv } = process.env;
11381
11607
  const apiUrl = publicUrl || `http://127.0.0.1:${apiPort}`;
11382
11608
  const runtimeConfig = { apiBaseUrl: apiUrl };
11383
- const runtimeConfigPath = join11(webDir, "runtime-config.json");
11609
+ const runtimeConfigPath = join12(webDir, "runtime-config.json");
11384
11610
  try {
11385
11611
  writeFileSync5(runtimeConfigPath, JSON.stringify(runtimeConfig, null, 2));
11386
11612
  if (!quiet) console.log(` \u{1F4DD} Runtime config written to ${runtimeConfigPath}`);
@@ -11573,8 +11799,8 @@ async function startServer(options = {}) {
11573
11799
  if (options.workingDirectory) {
11574
11800
  config.resolvedWorkingDirectory = options.workingDirectory;
11575
11801
  }
11576
- if (!existsSync16(config.resolvedWorkingDirectory)) {
11577
- mkdirSync6(config.resolvedWorkingDirectory, { recursive: true });
11802
+ if (!existsSync17(config.resolvedWorkingDirectory)) {
11803
+ mkdirSync7(config.resolvedWorkingDirectory, { recursive: true });
11578
11804
  if (!options.quiet) console.log(`\u{1F4C1} Created agent workspace: ${config.resolvedWorkingDirectory}`);
11579
11805
  }
11580
11806
  if (!config.resolvedRemoteServer.url) {
@@ -12090,8 +12316,8 @@ function generateOpenAPISpec() {
12090
12316
  init_config();
12091
12317
  init_semantic();
12092
12318
  init_db();
12093
- import { writeFileSync as writeFileSync6, readFileSync as readFileSync7, existsSync as existsSync17 } from "fs";
12094
- import { resolve as resolve11, join as join12 } from "path";
12319
+ import { writeFileSync as writeFileSync6, readFileSync as readFileSync8, existsSync as existsSync18 } from "fs";
12320
+ import { resolve as resolve11, join as join13 } from "path";
12095
12321
  async function apiRequest(baseUrl, path, options = {}) {
12096
12322
  const url = `${baseUrl}${path}`;
12097
12323
  const init = {
@@ -12726,8 +12952,8 @@ program.command("task").description("Run an autonomous task that completes witho
12726
12952
  let outputSchema;
12727
12953
  try {
12728
12954
  const schemaStr = options.schema;
12729
- if (existsSync17(schemaStr)) {
12730
- outputSchema = JSON.parse(readFileSync7(schemaStr, "utf-8"));
12955
+ if (existsSync18(schemaStr)) {
12956
+ outputSchema = JSON.parse(readFileSync8(schemaStr, "utf-8"));
12731
12957
  } else {
12732
12958
  outputSchema = JSON.parse(schemaStr);
12733
12959
  }
@@ -12794,13 +13020,13 @@ program.command("init").description("Create a sparkecoder.config.json file").opt
12794
13020
  let configLocation;
12795
13021
  if (options.global) {
12796
13022
  const appDataDir = ensureAppDataDirectory();
12797
- configPath = join12(appDataDir, "sparkecoder.config.json");
13023
+ configPath = join13(appDataDir, "sparkecoder.config.json");
12798
13024
  configLocation = "global";
12799
13025
  } else {
12800
13026
  configPath = resolve11(process.cwd(), "sparkecoder.config.json");
12801
13027
  configLocation = "local";
12802
13028
  }
12803
- if (existsSync17(configPath) && !options.force) {
13029
+ if (existsSync18(configPath) && !options.force) {
12804
13030
  console.log(chalk.yellow("Config file already exists. Use --force to overwrite."));
12805
13031
  console.log(chalk.dim(` ${configPath}`));
12806
13032
  return;