@cjvana/claude-auto 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. package/.claude-plugin/plugin.json +10 -0
  2. package/LICENSE +21 -0
  3. package/README.md +435 -0
  4. package/dist/check-repo-6C4QI2M2.js +33 -0
  5. package/dist/check-repo-6C4QI2M2.js.map +1 -0
  6. package/dist/check-repo-SXWFIVO5.js +8 -0
  7. package/dist/check-repo-SXWFIVO5.js.map +1 -0
  8. package/dist/chunk-24PS2XSV.js +203 -0
  9. package/dist/chunk-24PS2XSV.js.map +1 -0
  10. package/dist/chunk-2D5E23XA.js +129 -0
  11. package/dist/chunk-2D5E23XA.js.map +1 -0
  12. package/dist/chunk-3NEANSUS.js +26 -0
  13. package/dist/chunk-3NEANSUS.js.map +1 -0
  14. package/dist/chunk-4I5UIASZ.js +71 -0
  15. package/dist/chunk-4I5UIASZ.js.map +1 -0
  16. package/dist/chunk-5LGOK52J.js +38 -0
  17. package/dist/chunk-5LGOK52J.js.map +1 -0
  18. package/dist/chunk-6RYMWH5M.js +35 -0
  19. package/dist/chunk-6RYMWH5M.js.map +1 -0
  20. package/dist/chunk-A6XWZPLY.js +56 -0
  21. package/dist/chunk-A6XWZPLY.js.map +1 -0
  22. package/dist/chunk-AWLSYOVF.js +61 -0
  23. package/dist/chunk-AWLSYOVF.js.map +1 -0
  24. package/dist/chunk-BY5YEOVG.js +75 -0
  25. package/dist/chunk-BY5YEOVG.js.map +1 -0
  26. package/dist/chunk-D4MBOIYQ.js +46 -0
  27. package/dist/chunk-D4MBOIYQ.js.map +1 -0
  28. package/dist/chunk-DVZC42TL.js +33 -0
  29. package/dist/chunk-DVZC42TL.js.map +1 -0
  30. package/dist/chunk-E3XVLTT4.js +13 -0
  31. package/dist/chunk-E3XVLTT4.js.map +1 -0
  32. package/dist/chunk-GLW7T4QE.js +116 -0
  33. package/dist/chunk-GLW7T4QE.js.map +1 -0
  34. package/dist/chunk-H2MUDYMW.js +23 -0
  35. package/dist/chunk-H2MUDYMW.js.map +1 -0
  36. package/dist/chunk-HF7PGQI3.js +69 -0
  37. package/dist/chunk-HF7PGQI3.js.map +1 -0
  38. package/dist/chunk-LBH6SLHH.js +543 -0
  39. package/dist/chunk-LBH6SLHH.js.map +1 -0
  40. package/dist/chunk-M53MPY3U.js +115 -0
  41. package/dist/chunk-M53MPY3U.js.map +1 -0
  42. package/dist/chunk-MI7OZ5XD.js +146 -0
  43. package/dist/chunk-MI7OZ5XD.js.map +1 -0
  44. package/dist/chunk-NB46PEG2.js +177 -0
  45. package/dist/chunk-NB46PEG2.js.map +1 -0
  46. package/dist/chunk-ORBF5IW3.js +60 -0
  47. package/dist/chunk-ORBF5IW3.js.map +1 -0
  48. package/dist/chunk-PFU5YLRH.js +131 -0
  49. package/dist/chunk-PFU5YLRH.js.map +1 -0
  50. package/dist/chunk-QLRCFKLU.js +34 -0
  51. package/dist/chunk-QLRCFKLU.js.map +1 -0
  52. package/dist/chunk-QQTIJN3S.js +167 -0
  53. package/dist/chunk-QQTIJN3S.js.map +1 -0
  54. package/dist/chunk-QRYCNVLT.js +72 -0
  55. package/dist/chunk-QRYCNVLT.js.map +1 -0
  56. package/dist/chunk-S6E67XMR.js +52 -0
  57. package/dist/chunk-S6E67XMR.js.map +1 -0
  58. package/dist/chunk-S6W4SURF.js +33 -0
  59. package/dist/chunk-S6W4SURF.js.map +1 -0
  60. package/dist/chunk-SMZYA6CY.js +121 -0
  61. package/dist/chunk-SMZYA6CY.js.map +1 -0
  62. package/dist/chunk-SNOA575X.js +12 -0
  63. package/dist/chunk-SNOA575X.js.map +1 -0
  64. package/dist/chunk-SZRIZBWI.js +44 -0
  65. package/dist/chunk-SZRIZBWI.js.map +1 -0
  66. package/dist/chunk-TAGHPCFT.js +47 -0
  67. package/dist/chunk-TAGHPCFT.js.map +1 -0
  68. package/dist/chunk-TGKCHHXT.js +34 -0
  69. package/dist/chunk-TGKCHHXT.js.map +1 -0
  70. package/dist/chunk-TORYFKPK.js +39 -0
  71. package/dist/chunk-TORYFKPK.js.map +1 -0
  72. package/dist/chunk-U35GRLBD.js +143 -0
  73. package/dist/chunk-U35GRLBD.js.map +1 -0
  74. package/dist/chunk-W2HBRERV.js +57 -0
  75. package/dist/chunk-W2HBRERV.js.map +1 -0
  76. package/dist/chunk-WYU476R2.js +119 -0
  77. package/dist/chunk-WYU476R2.js.map +1 -0
  78. package/dist/chunk-YMO45Z6G.js +69 -0
  79. package/dist/chunk-YMO45Z6G.js.map +1 -0
  80. package/dist/claude-auto-run.js +1717 -0
  81. package/dist/claude-auto-run.js.map +1 -0
  82. package/dist/claude-auto.js +186 -0
  83. package/dist/claude-auto.js.map +1 -0
  84. package/dist/cost-QGM3D4QW.js +72 -0
  85. package/dist/cost-QGM3D4QW.js.map +1 -0
  86. package/dist/cost-QKN3U7AG.js +11 -0
  87. package/dist/cost-QKN3U7AG.js.map +1 -0
  88. package/dist/create-T3BDDS6G.js +14 -0
  89. package/dist/create-T3BDDS6G.js.map +1 -0
  90. package/dist/create-U5WYKTD4.js +118 -0
  91. package/dist/create-U5WYKTD4.js.map +1 -0
  92. package/dist/crontab-CDMC2FDT.js +118 -0
  93. package/dist/crontab-CDMC2FDT.js.map +1 -0
  94. package/dist/crontab-MAJ52FOK.js +118 -0
  95. package/dist/crontab-MAJ52FOK.js.map +1 -0
  96. package/dist/crontab-PNEWANLW.js +12 -0
  97. package/dist/crontab-PNEWANLW.js.map +1 -0
  98. package/dist/edit-77E3ZQHM.js +134 -0
  99. package/dist/edit-77E3ZQHM.js.map +1 -0
  100. package/dist/edit-RVPRAAQ2.js +13 -0
  101. package/dist/edit-RVPRAAQ2.js.map +1 -0
  102. package/dist/index.d.ts +1137 -0
  103. package/dist/index.js +2049 -0
  104. package/dist/index.js.map +1 -0
  105. package/dist/launchd-7F27BIZB.js +166 -0
  106. package/dist/launchd-7F27BIZB.js.map +1 -0
  107. package/dist/launchd-HNZIWLNC.js +166 -0
  108. package/dist/launchd-HNZIWLNC.js.map +1 -0
  109. package/dist/launchd-LZGDP7BM.js +12 -0
  110. package/dist/launchd-LZGDP7BM.js.map +1 -0
  111. package/dist/list-OIGERGYJ.js +15 -0
  112. package/dist/list-OIGERGYJ.js.map +1 -0
  113. package/dist/list-T35RSQVU.js +73 -0
  114. package/dist/list-T35RSQVU.js.map +1 -0
  115. package/dist/logs-D5FNSCXE.js +12 -0
  116. package/dist/logs-D5FNSCXE.js.map +1 -0
  117. package/dist/logs-YVSFXBSB.js +40 -0
  118. package/dist/logs-YVSFXBSB.js.map +1 -0
  119. package/dist/pause-2YOLFMAR.js +12 -0
  120. package/dist/pause-2YOLFMAR.js.map +1 -0
  121. package/dist/pause-JB42JGTB.js +45 -0
  122. package/dist/pause-JB42JGTB.js.map +1 -0
  123. package/dist/pause-OJNUYBCJ.js +47 -0
  124. package/dist/pause-OJNUYBCJ.js.map +1 -0
  125. package/dist/remove-RXYKFYBI.js +12 -0
  126. package/dist/remove-RXYKFYBI.js.map +1 -0
  127. package/dist/remove-UASXZCOR.js +59 -0
  128. package/dist/remove-UASXZCOR.js.map +1 -0
  129. package/dist/report-CHAJH2SA.js +150 -0
  130. package/dist/report-CHAJH2SA.js.map +1 -0
  131. package/dist/report-IYGK5HTC.js +14 -0
  132. package/dist/report-IYGK5HTC.js.map +1 -0
  133. package/dist/resume-3ATNZP6D.js +13 -0
  134. package/dist/resume-3ATNZP6D.js.map +1 -0
  135. package/dist/resume-6WVGU6XW.js +48 -0
  136. package/dist/resume-6WVGU6XW.js.map +1 -0
  137. package/dist/resume-JVTR7OEX.js +50 -0
  138. package/dist/resume-JVTR7OEX.js.map +1 -0
  139. package/dist/schtasks-2EQAD3ES.js +11 -0
  140. package/dist/schtasks-2EQAD3ES.js.map +1 -0
  141. package/dist/schtasks-4V2IFD3A.js +142 -0
  142. package/dist/schtasks-4V2IFD3A.js.map +1 -0
  143. package/dist/schtasks-JGEPEKQS.js +142 -0
  144. package/dist/schtasks-JGEPEKQS.js.map +1 -0
  145. package/dist/tui-2DUPCX3Q.js +15 -0
  146. package/dist/tui-2DUPCX3Q.js.map +1 -0
  147. package/dist/tui-6LOGPILA.js +547 -0
  148. package/dist/tui-6LOGPILA.js.map +1 -0
  149. package/package.json +81 -0
  150. package/scripts/postinstall.mjs +65 -0
  151. package/scripts/preuninstall.mjs +33 -0
  152. package/skills/edit/SKILL.md +25 -0
  153. package/skills/list/SKILL.md +26 -0
  154. package/skills/logs/SKILL.md +33 -0
  155. package/skills/pause/SKILL.md +21 -0
  156. package/skills/remove/SKILL.md +22 -0
  157. package/skills/resume/SKILL.md +21 -0
  158. package/skills/setup/SKILL.md +195 -0
  159. package/skills/status/SKILL.md +27 -0
@@ -0,0 +1,61 @@
1
+ // src/util/errors.ts
2
+ var ConfigParseError = class extends Error {
3
+ constructor(filePath, parseErrors) {
4
+ const errorMessages = parseErrors.map((e) => ` - ${e.message}`).join("\n");
5
+ super(`YAML syntax error in ${filePath}:
6
+ ${errorMessages}`);
7
+ this.filePath = filePath;
8
+ this.parseErrors = parseErrors;
9
+ }
10
+ name = "ConfigParseError";
11
+ };
12
+ var ConfigValidationError = class extends Error {
13
+ constructor(filePath, validationMessage) {
14
+ super(`Invalid config in ${filePath}:
15
+ ${validationMessage}`);
16
+ this.filePath = filePath;
17
+ this.validationMessage = validationMessage;
18
+ }
19
+ name = "ConfigValidationError";
20
+ };
21
+ var SchedulerError = class extends Error {
22
+ constructor(platform, message, cause) {
23
+ super(`Scheduler error (${platform}): ${message}`);
24
+ this.platform = platform;
25
+ this.cause = cause;
26
+ }
27
+ name = "SchedulerError";
28
+ };
29
+ var CronValidationError = class extends Error {
30
+ constructor(expression, message) {
31
+ super(`Invalid cron expression "${expression}": ${message}`);
32
+ this.expression = expression;
33
+ }
34
+ name = "CronValidationError";
35
+ };
36
+ var GitOpsError = class extends Error {
37
+ constructor(operation, repoPath, message, cause) {
38
+ super(`Git operation '${operation}' failed in ${repoPath}: ${message}`);
39
+ this.operation = operation;
40
+ this.repoPath = repoPath;
41
+ this.cause = cause;
42
+ }
43
+ name = "GitOpsError";
44
+ };
45
+ var SpawnError = class extends Error {
46
+ constructor(message, exitCode) {
47
+ super(`Claude spawn error: ${message}`);
48
+ this.exitCode = exitCode;
49
+ }
50
+ name = "SpawnError";
51
+ };
52
+
53
+ export {
54
+ ConfigParseError,
55
+ ConfigValidationError,
56
+ SchedulerError,
57
+ CronValidationError,
58
+ GitOpsError,
59
+ SpawnError
60
+ };
61
+ //# sourceMappingURL=chunk-AWLSYOVF.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/util/errors.ts"],"sourcesContent":["export class ConfigParseError extends Error {\n\toverride name = \"ConfigParseError\" as const;\n\tconstructor(\n\t\tpublic readonly filePath: string,\n\t\tpublic readonly parseErrors: Array<{ message: string }>,\n\t) {\n\t\tconst errorMessages = parseErrors.map((e) => ` - ${e.message}`).join(\"\\n\");\n\t\tsuper(`YAML syntax error in ${filePath}:\\n${errorMessages}`);\n\t}\n}\n\nexport class ConfigValidationError extends Error {\n\toverride name = \"ConfigValidationError\" as const;\n\tconstructor(\n\t\tpublic readonly filePath: string,\n\t\tpublic readonly validationMessage: string,\n\t) {\n\t\tsuper(`Invalid config in ${filePath}:\\n${validationMessage}`);\n\t}\n}\n\nexport class SchedulerError extends Error {\n\toverride name = \"SchedulerError\" as const;\n\tconstructor(\n\t\tpublic readonly platform: string,\n\t\tmessage: string,\n\t\tpublic readonly cause?: Error,\n\t) {\n\t\tsuper(`Scheduler error (${platform}): ${message}`);\n\t}\n}\n\nexport class CronValidationError extends Error {\n\toverride name = \"CronValidationError\" as const;\n\tconstructor(\n\t\tpublic readonly expression: string,\n\t\tmessage: string,\n\t) {\n\t\tsuper(`Invalid cron expression \"${expression}\": ${message}`);\n\t}\n}\n\nexport class GitOpsError extends Error {\n\toverride name = \"GitOpsError\" as const;\n\tconstructor(\n\t\tpublic readonly operation: string,\n\t\tpublic readonly repoPath: string,\n\t\tmessage: string,\n\t\tpublic override readonly cause?: Error,\n\t) {\n\t\tsuper(`Git operation '${operation}' failed in ${repoPath}: ${message}`);\n\t}\n}\n\nexport class LockError extends Error {\n\toverride name = \"LockError\" as const;\n\tconstructor(\n\t\tpublic readonly jobId: string,\n\t\tmessage: string,\n\t) {\n\t\tsuper(`Lock error for job ${jobId}: ${message}`);\n\t}\n}\n\nexport class SpawnError extends Error {\n\toverride name = \"SpawnError\" as const;\n\tconstructor(\n\t\tmessage: string,\n\t\tpublic readonly exitCode?: number,\n\t) {\n\t\tsuper(`Claude spawn error: ${message}`);\n\t}\n}\n"],"mappings":";AAAO,IAAM,mBAAN,cAA+B,MAAM;AAAA,EAE3C,YACiB,UACA,aACf;AACD,UAAM,gBAAgB,YAAY,IAAI,CAAC,MAAM,OAAO,EAAE,OAAO,EAAE,EAAE,KAAK,IAAI;AAC1E,UAAM,wBAAwB,QAAQ;AAAA,EAAM,aAAa,EAAE;AAJ3C;AACA;AAAA,EAIjB;AAAA,EAPS,OAAO;AAQjB;AAEO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAEhD,YACiB,UACA,mBACf;AACD,UAAM,qBAAqB,QAAQ;AAAA,EAAM,iBAAiB,EAAE;AAH5C;AACA;AAAA,EAGjB;AAAA,EANS,OAAO;AAOjB;AAEO,IAAM,iBAAN,cAA6B,MAAM;AAAA,EAEzC,YACiB,UAChB,SACgB,OACf;AACD,UAAM,oBAAoB,QAAQ,MAAM,OAAO,EAAE;AAJjC;AAEA;AAAA,EAGjB;AAAA,EAPS,OAAO;AAQjB;AAEO,IAAM,sBAAN,cAAkC,MAAM;AAAA,EAE9C,YACiB,YAChB,SACC;AACD,UAAM,4BAA4B,UAAU,MAAM,OAAO,EAAE;AAH3C;AAAA,EAIjB;AAAA,EANS,OAAO;AAOjB;AAEO,IAAM,cAAN,cAA0B,MAAM;AAAA,EAEtC,YACiB,WACA,UAChB,SACyB,OACxB;AACD,UAAM,kBAAkB,SAAS,eAAe,QAAQ,KAAK,OAAO,EAAE;AALtD;AACA;AAES;AAAA,EAG1B;AAAA,EARS,OAAO;AASjB;AAYO,IAAM,aAAN,cAAyB,MAAM;AAAA,EAErC,YACC,SACgB,UACf;AACD,UAAM,uBAAuB,OAAO,EAAE;AAFtB;AAAA,EAGjB;AAAA,EANS,OAAO;AAOjB;","names":[]}
@@ -0,0 +1,75 @@
1
+ import {
2
+ JobConfigSchema,
3
+ loadJobConfig,
4
+ saveJobConfig
5
+ } from "./chunk-2D5E23XA.js";
6
+ import {
7
+ writeFileSafe
8
+ } from "./chunk-E3XVLTT4.js";
9
+ import {
10
+ paths
11
+ } from "./chunk-H2MUDYMW.js";
12
+
13
+ // src/core/job-manager.ts
14
+ import { readdir, readFile, rm } from "fs/promises";
15
+ import { nanoid } from "nanoid";
16
+ import { parseDocument } from "yaml";
17
+ import { z } from "zod";
18
+ async function createJob(input) {
19
+ const id = nanoid(12);
20
+ const config = { ...input, id };
21
+ await saveJobConfig(paths.jobConfig(id), config);
22
+ return config;
23
+ }
24
+ async function readJob(jobId) {
25
+ return loadJobConfig(paths.jobConfig(jobId));
26
+ }
27
+ async function updateJob(jobId, updates) {
28
+ const configPath = paths.jobConfig(jobId);
29
+ const content = await readFile(configPath, "utf-8");
30
+ const doc = parseDocument(content);
31
+ for (const [key, value] of Object.entries(updates)) {
32
+ if (key === "id") continue;
33
+ doc.set(key, doc.createNode(value));
34
+ }
35
+ const raw = doc.toJS();
36
+ const result = JobConfigSchema.safeParse(raw);
37
+ if (!result.success) {
38
+ throw new Error(`Invalid config after update: ${z.prettifyError(result.error)}`);
39
+ }
40
+ await writeFileSafe(configPath, doc.toString({ indent: 2 }));
41
+ return result.data;
42
+ }
43
+ async function deleteJob(jobId) {
44
+ await rm(paths.jobDir(jobId), { recursive: true, force: true });
45
+ }
46
+ async function listJobs() {
47
+ let entries;
48
+ try {
49
+ entries = await readdir(paths.jobs, { withFileTypes: true, encoding: "utf-8" });
50
+ } catch (error) {
51
+ if (error instanceof Error && "code" in error && error.code === "ENOENT") {
52
+ return [];
53
+ }
54
+ throw error;
55
+ }
56
+ const results = [];
57
+ for (const entry of entries) {
58
+ if (!entry.isDirectory()) continue;
59
+ try {
60
+ const config = await readJob(entry.name);
61
+ results.push(config);
62
+ } catch {
63
+ }
64
+ }
65
+ return results;
66
+ }
67
+
68
+ export {
69
+ createJob,
70
+ readJob,
71
+ updateJob,
72
+ deleteJob,
73
+ listJobs
74
+ };
75
+ //# sourceMappingURL=chunk-BY5YEOVG.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/job-manager.ts"],"sourcesContent":["import { readdir, readFile, rm } from \"node:fs/promises\";\nimport { nanoid } from \"nanoid\";\nimport { parseDocument } from \"yaml\";\nimport { z } from \"zod\";\nimport { writeFileSafe } from \"../util/fs.js\";\nimport { paths } from \"../util/paths.js\";\nimport { loadJobConfig, saveJobConfig } from \"./config.js\";\nimport { type JobConfig, JobConfigSchema } from \"./types.js\";\n\n/**\n * Create a new job with a generated nanoid(12) identifier.\n * Writes the config to disk and returns the complete JobConfig.\n */\nexport async function createJob(input: Omit<JobConfig, \"id\">): Promise<JobConfig> {\n\tconst id = nanoid(12);\n\tconst config: JobConfig = { ...input, id };\n\tawait saveJobConfig(paths.jobConfig(id), config);\n\treturn config;\n}\n\n/**\n * Read and validate a job config by its ID.\n * Throws if the job doesn't exist (ENOENT) or config is invalid.\n */\nexport async function readJob(jobId: string): Promise<JobConfig> {\n\treturn loadJobConfig(paths.jobConfig(jobId));\n}\n\n/**\n * Update specified fields of a job config while preserving YAML comments.\n * Validates the result before writing. Rejects invalid updates.\n */\nexport async function updateJob(\n\tjobId: string,\n\tupdates: Partial<Omit<JobConfig, \"id\">>,\n): Promise<JobConfig> {\n\tconst configPath = paths.jobConfig(jobId);\n\tconst content = await readFile(configPath, \"utf-8\");\n\tconst doc = parseDocument(content);\n\n\t// Apply each update key to the Document, preserving comments\n\tfor (const [key, value] of Object.entries(updates)) {\n\t\tif (key === \"id\") continue; // Never update the id\n\t\tdoc.set(key, doc.createNode(value));\n\t}\n\n\t// Validate the modified document\n\tconst raw = doc.toJS();\n\tconst result = JobConfigSchema.safeParse(raw);\n\n\tif (!result.success) {\n\t\tthrow new Error(`Invalid config after update: ${z.prettifyError(result.error)}`);\n\t}\n\n\tawait writeFileSafe(configPath, doc.toString({ indent: 2 }));\n\treturn result.data;\n}\n\n/**\n * Delete a job and its entire directory.\n * Idempotent: does not throw if the job doesn't exist.\n */\nexport async function deleteJob(jobId: string): Promise<void> {\n\tawait rm(paths.jobDir(jobId), { recursive: true, force: true });\n}\n\n/**\n * List all valid job configs.\n * Skips invalid directories/configs. Returns empty array if no jobs directory exists.\n */\nexport async function listJobs(): Promise<JobConfig[]> {\n\tlet entries: import(\"node:fs\").Dirent<string>[];\n\ttry {\n\t\tentries = await readdir(paths.jobs, { withFileTypes: true, encoding: \"utf-8\" });\n\t} catch (error: unknown) {\n\t\tif (\n\t\t\terror instanceof Error &&\n\t\t\t\"code\" in error &&\n\t\t\t(error as NodeJS.ErrnoException).code === \"ENOENT\"\n\t\t) {\n\t\t\treturn [];\n\t\t}\n\t\tthrow error;\n\t}\n\n\tconst results: JobConfig[] = [];\n\tfor (const entry of entries) {\n\t\tif (!entry.isDirectory()) continue;\n\t\ttry {\n\t\t\tconst config = await readJob(entry.name);\n\t\t\tresults.push(config);\n\t\t} catch {\n\t\t\t// Skip invalid directories/configs\n\t\t}\n\t}\n\n\treturn results;\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,SAAS,SAAS,UAAU,UAAU;AACtC,SAAS,cAAc;AACvB,SAAS,qBAAqB;AAC9B,SAAS,SAAS;AAUlB,eAAsB,UAAU,OAAkD;AACjF,QAAM,KAAK,OAAO,EAAE;AACpB,QAAM,SAAoB,EAAE,GAAG,OAAO,GAAG;AACzC,QAAM,cAAc,MAAM,UAAU,EAAE,GAAG,MAAM;AAC/C,SAAO;AACR;AAMA,eAAsB,QAAQ,OAAmC;AAChE,SAAO,cAAc,MAAM,UAAU,KAAK,CAAC;AAC5C;AAMA,eAAsB,UACrB,OACA,SACqB;AACrB,QAAM,aAAa,MAAM,UAAU,KAAK;AACxC,QAAM,UAAU,MAAM,SAAS,YAAY,OAAO;AAClD,QAAM,MAAM,cAAc,OAAO;AAGjC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,QAAI,QAAQ,KAAM;AAClB,QAAI,IAAI,KAAK,IAAI,WAAW,KAAK,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM,IAAI,KAAK;AACrB,QAAM,SAAS,gBAAgB,UAAU,GAAG;AAE5C,MAAI,CAAC,OAAO,SAAS;AACpB,UAAM,IAAI,MAAM,gCAAgC,EAAE,cAAc,OAAO,KAAK,CAAC,EAAE;AAAA,EAChF;AAEA,QAAM,cAAc,YAAY,IAAI,SAAS,EAAE,QAAQ,EAAE,CAAC,CAAC;AAC3D,SAAO,OAAO;AACf;AAMA,eAAsB,UAAU,OAA8B;AAC7D,QAAM,GAAG,MAAM,OAAO,KAAK,GAAG,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAC/D;AAMA,eAAsB,WAAiC;AACtD,MAAI;AACJ,MAAI;AACH,cAAU,MAAM,QAAQ,MAAM,MAAM,EAAE,eAAe,MAAM,UAAU,QAAQ,CAAC;AAAA,EAC/E,SAAS,OAAgB;AACxB,QACC,iBAAiB,SACjB,UAAU,SACT,MAAgC,SAAS,UACzC;AACD,aAAO,CAAC;AAAA,IACT;AACA,UAAM;AAAA,EACP;AAEA,QAAM,UAAuB,CAAC;AAC9B,aAAW,SAAS,SAAS;AAC5B,QAAI,CAAC,MAAM,YAAY,EAAG;AAC1B,QAAI;AACH,YAAM,SAAS,MAAM,QAAQ,MAAM,IAAI;AACvC,cAAQ,KAAK,MAAM;AAAA,IACpB,QAAQ;AAAA,IAER;AAAA,EACD;AAEA,SAAO;AACR;","names":[]}
@@ -0,0 +1,46 @@
1
+ import {
2
+ CronValidationError
3
+ } from "./chunk-YMO45Z6G.js";
4
+
5
+ // src/core/schedule.ts
6
+ import { CronExpressionParser } from "cron-parser";
7
+ import cronstrue from "cronstrue";
8
+ function validateCronExpression(cronExpr) {
9
+ const fields = cronExpr.trim().split(/\s+/);
10
+ if (fields.length !== 5) {
11
+ throw new CronValidationError(
12
+ cronExpr,
13
+ `Expected 5 fields (minute hour day month weekday), got ${fields.length}`
14
+ );
15
+ }
16
+ try {
17
+ CronExpressionParser.parse(cronExpr);
18
+ } catch (error) {
19
+ const msg = error instanceof Error ? error.message : String(error);
20
+ throw new CronValidationError(cronExpr, msg);
21
+ }
22
+ }
23
+ function describeSchedule(cronExpr) {
24
+ return cronstrue.toString(cronExpr, {
25
+ use24HourTimeFormat: false,
26
+ verbose: true
27
+ });
28
+ }
29
+ function getNextRuns(cronExpr, timezone, count = 3) {
30
+ const interval = CronExpressionParser.parse(cronExpr, { tz: timezone });
31
+ return interval.take(count).map((d) => d.toDate());
32
+ }
33
+ function validateAndDescribeSchedule(cronExpr, timezone) {
34
+ validateCronExpression(cronExpr);
35
+ const humanReadable = describeSchedule(cronExpr);
36
+ const nextRuns = getNextRuns(cronExpr, timezone);
37
+ return { cron: cronExpr, timezone, humanReadable, nextRuns };
38
+ }
39
+
40
+ export {
41
+ validateCronExpression,
42
+ describeSchedule,
43
+ getNextRuns,
44
+ validateAndDescribeSchedule
45
+ };
46
+ //# sourceMappingURL=chunk-D4MBOIYQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/schedule.ts"],"sourcesContent":["import { CronExpressionParser } from \"cron-parser\";\nimport cronstrue from \"cronstrue\";\nimport { CronValidationError } from \"../util/errors.js\";\nimport type { ScheduleInfo } from \"./types.js\";\n\n/**\n * Validate a cron expression. Throws CronValidationError if invalid.\n * Only accepts standard 5-field cron expressions.\n */\nexport function validateCronExpression(cronExpr: string): void {\n\tconst fields = cronExpr.trim().split(/\\s+/);\n\tif (fields.length !== 5) {\n\t\tthrow new CronValidationError(\n\t\t\tcronExpr,\n\t\t\t`Expected 5 fields (minute hour day month weekday), got ${fields.length}`,\n\t\t);\n\t}\n\ttry {\n\t\tCronExpressionParser.parse(cronExpr);\n\t} catch (error: unknown) {\n\t\tconst msg = error instanceof Error ? error.message : String(error);\n\t\tthrow new CronValidationError(cronExpr, msg);\n\t}\n}\n\n/**\n * Get a human-readable description of a cron expression.\n * Example: \"0 *​/6 * * *\" -> \"Every 6 hours\"\n */\nexport function describeSchedule(cronExpr: string): string {\n\treturn cronstrue.toString(cronExpr, {\n\t\tuse24HourTimeFormat: false,\n\t\tverbose: true,\n\t});\n}\n\n/**\n * Compute the next N run times for a cron expression in the given IANA timezone.\n */\nexport function getNextRuns(cronExpr: string, timezone: string, count = 3): Date[] {\n\tconst interval = CronExpressionParser.parse(cronExpr, { tz: timezone });\n\treturn interval.take(count).map((d) => d.toDate());\n}\n\n/**\n * Validate a cron expression and return complete schedule info\n * including human-readable description and next run times.\n */\nexport function validateAndDescribeSchedule(cronExpr: string, timezone: string): ScheduleInfo {\n\tvalidateCronExpression(cronExpr);\n\tconst humanReadable = describeSchedule(cronExpr);\n\tconst nextRuns = getNextRuns(cronExpr, timezone);\n\treturn { cron: cronExpr, timezone, humanReadable, nextRuns };\n}\n"],"mappings":";;;;;AAAA,SAAS,4BAA4B;AACrC,OAAO,eAAe;AAQf,SAAS,uBAAuB,UAAwB;AAC9D,QAAM,SAAS,SAAS,KAAK,EAAE,MAAM,KAAK;AAC1C,MAAI,OAAO,WAAW,GAAG;AACxB,UAAM,IAAI;AAAA,MACT;AAAA,MACA,0DAA0D,OAAO,MAAM;AAAA,IACxE;AAAA,EACD;AACA,MAAI;AACH,yBAAqB,MAAM,QAAQ;AAAA,EACpC,SAAS,OAAgB;AACxB,UAAM,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACjE,UAAM,IAAI,oBAAoB,UAAU,GAAG;AAAA,EAC5C;AACD;AAMO,SAAS,iBAAiB,UAA0B;AAC1D,SAAO,UAAU,SAAS,UAAU;AAAA,IACnC,qBAAqB;AAAA,IACrB,SAAS;AAAA,EACV,CAAC;AACF;AAKO,SAAS,YAAY,UAAkB,UAAkB,QAAQ,GAAW;AAClF,QAAM,WAAW,qBAAqB,MAAM,UAAU,EAAE,IAAI,SAAS,CAAC;AACtE,SAAO,SAAS,KAAK,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC;AAClD;AAMO,SAAS,4BAA4B,UAAkB,UAAgC;AAC7F,yBAAuB,QAAQ;AAC/B,QAAM,gBAAgB,iBAAiB,QAAQ;AAC/C,QAAM,WAAW,YAAY,UAAU,QAAQ;AAC/C,SAAO,EAAE,MAAM,UAAU,UAAU,eAAe,SAAS;AAC5D;","names":[]}
@@ -0,0 +1,33 @@
1
+ import {
2
+ SchedulerError
3
+ } from "./chunk-AWLSYOVF.js";
4
+
5
+ // src/platform/detect.ts
6
+ function detectPlatform() {
7
+ const p = process.platform;
8
+ if (p === "linux" || p === "darwin" || p === "win32") return p;
9
+ throw new SchedulerError(
10
+ p,
11
+ `Unsupported platform: ${p}. Only linux, darwin, and win32 are supported.`
12
+ );
13
+ }
14
+
15
+ // src/platform/scheduler.ts
16
+ async function createScheduler() {
17
+ const platform = detectPlatform();
18
+ if (platform === "linux") {
19
+ const { CrontabScheduler } = await import("./crontab-CDMC2FDT.js");
20
+ return new CrontabScheduler();
21
+ }
22
+ if (platform === "win32") {
23
+ const { SchtasksScheduler } = await import("./schtasks-JGEPEKQS.js");
24
+ return new SchtasksScheduler();
25
+ }
26
+ const { LaunchdScheduler } = await import("./launchd-HNZIWLNC.js");
27
+ return new LaunchdScheduler();
28
+ }
29
+
30
+ export {
31
+ createScheduler
32
+ };
33
+ //# sourceMappingURL=chunk-DVZC42TL.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/platform/detect.ts","../src/platform/scheduler.ts"],"sourcesContent":["import { SchedulerError } from \"../util/errors.js\";\n\nexport type Platform = \"linux\" | \"darwin\" | \"win32\";\n\n/**\n * Detect the current platform. Returns \"linux\", \"darwin\", or \"win32\".\n * Throws SchedulerError for unsupported platforms.\n */\nexport function detectPlatform(): Platform {\n\tconst p = process.platform;\n\tif (p === \"linux\" || p === \"darwin\" || p === \"win32\") return p;\n\tthrow new SchedulerError(\n\t\tp,\n\t\t`Unsupported platform: ${p}. Only linux, darwin, and win32 are supported.`,\n\t);\n}\n","import type { JobConfig } from \"../core/types.js\";\nimport { detectPlatform } from \"./detect.js\";\n\nexport interface RegisteredJob {\n\tjobId: string;\n\tschedule: string;\n\tcommand: string;\n}\n\nexport interface Scheduler {\n\tregister(job: JobConfig, env?: Record<string, string>): Promise<void>;\n\tunregister(jobId: string): Promise<void>;\n\tisRegistered(jobId: string): Promise<boolean>;\n\tlist(): Promise<RegisteredJob[]>;\n}\n\n/**\n * Factory function that returns the correct Scheduler implementation\n * for the current platform: CrontabScheduler on Linux, LaunchdScheduler on macOS,\n * SchtasksScheduler on Windows.\n */\nexport async function createScheduler(): Promise<Scheduler> {\n\tconst platform = detectPlatform();\n\tif (platform === \"linux\") {\n\t\tconst { CrontabScheduler } = await import(\"./crontab.js\");\n\t\treturn new CrontabScheduler();\n\t}\n\tif (platform === \"win32\") {\n\t\tconst { SchtasksScheduler } = await import(\"./schtasks.js\");\n\t\treturn new SchtasksScheduler();\n\t}\n\tconst { LaunchdScheduler } = await import(\"./launchd.js\");\n\treturn new LaunchdScheduler();\n}\n"],"mappings":";;;;;AAQO,SAAS,iBAA2B;AAC1C,QAAM,IAAI,QAAQ;AAClB,MAAI,MAAM,WAAW,MAAM,YAAY,MAAM,QAAS,QAAO;AAC7D,QAAM,IAAI;AAAA,IACT;AAAA,IACA,yBAAyB,CAAC;AAAA,EAC3B;AACD;;;ACMA,eAAsB,kBAAsC;AAC3D,QAAM,WAAW,eAAe;AAChC,MAAI,aAAa,SAAS;AACzB,UAAM,EAAE,iBAAiB,IAAI,MAAM,OAAO,uBAAc;AACxD,WAAO,IAAI,iBAAiB;AAAA,EAC7B;AACA,MAAI,aAAa,SAAS;AACzB,UAAM,EAAE,kBAAkB,IAAI,MAAM,OAAO,wBAAe;AAC1D,WAAO,IAAI,kBAAkB;AAAA,EAC9B;AACA,QAAM,EAAE,iBAAiB,IAAI,MAAM,OAAO,uBAAc;AACxD,SAAO,IAAI,iBAAiB;AAC7B;","names":[]}
@@ -0,0 +1,13 @@
1
+ // src/util/fs.ts
2
+ import { mkdir } from "fs/promises";
3
+ import { dirname } from "path";
4
+ import writeFileAtomic from "write-file-atomic";
5
+ async function writeFileSafe(filePath, content) {
6
+ await mkdir(dirname(filePath), { recursive: true });
7
+ await writeFileAtomic(filePath, content, "utf-8");
8
+ }
9
+
10
+ export {
11
+ writeFileSafe
12
+ };
13
+ //# sourceMappingURL=chunk-E3XVLTT4.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/util/fs.ts"],"sourcesContent":["import { mkdir } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport writeFileAtomic from \"write-file-atomic\";\n\nexport async function writeFileSafe(filePath: string, content: string): Promise<void> {\n\tawait mkdir(dirname(filePath), { recursive: true });\n\tawait writeFileAtomic(filePath, content, \"utf-8\");\n}\n"],"mappings":";AAAA,SAAS,aAAa;AACtB,SAAS,eAAe;AACxB,OAAO,qBAAqB;AAE5B,eAAsB,cAAc,UAAkB,SAAgC;AACrF,QAAM,MAAM,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,QAAM,gBAAgB,UAAU,SAAS,OAAO;AACjD;","names":[]}
@@ -0,0 +1,116 @@
1
+ import {
2
+ getDatabase
3
+ } from "./chunk-4I5UIASZ.js";
4
+ import {
5
+ writeFileSafe
6
+ } from "./chunk-E3XVLTT4.js";
7
+ import {
8
+ paths
9
+ } from "./chunk-H2MUDYMW.js";
10
+
11
+ // src/runner/logger.ts
12
+ import { readdir, readFile } from "fs/promises";
13
+
14
+ // src/runner/context-store.ts
15
+ function saveRunContext(entry) {
16
+ const db = getDatabase();
17
+ db.prepare(
18
+ `INSERT INTO runs (
19
+ id, job_id, status, started_at, completed_at, duration_ms,
20
+ cost_usd, num_turns, session_id, model, pr_url, branch_name,
21
+ issue_number, summary, error, feedback_round, pr_number, pipeline_stages
22
+ ) VALUES (
23
+ @id, @job_id, @status, @started_at, @completed_at, @duration_ms,
24
+ @cost_usd, @num_turns, @session_id, @model, @pr_url, @branch_name,
25
+ @issue_number, @summary, @error, @feedback_round, @pr_number, @pipeline_stages
26
+ )`
27
+ ).run({
28
+ id: entry.runId,
29
+ job_id: entry.jobId,
30
+ status: entry.status,
31
+ started_at: entry.startedAt,
32
+ completed_at: entry.completedAt,
33
+ duration_ms: entry.durationMs,
34
+ cost_usd: entry.costUsd ?? null,
35
+ num_turns: entry.numTurns ?? null,
36
+ session_id: entry.sessionId ?? null,
37
+ model: entry.model ?? null,
38
+ pr_url: entry.prUrl ?? null,
39
+ branch_name: entry.branchName ?? null,
40
+ issue_number: entry.issueNumber ?? null,
41
+ summary: entry.summary ?? null,
42
+ error: entry.error ?? null,
43
+ feedback_round: entry.feedbackRound ?? null,
44
+ pr_number: entry.prNumber ?? null,
45
+ pipeline_stages: entry.pipelineStages ? JSON.stringify(entry.pipelineStages) : null
46
+ });
47
+ }
48
+ function loadRunContext(jobId, limit = 5) {
49
+ const db = getDatabase();
50
+ return db.prepare(
51
+ `SELECT id, status, pr_url, branch_name, issue_number, summary, started_at
52
+ FROM runs
53
+ WHERE job_id = ? AND status IN ('success', 'no-changes')
54
+ ORDER BY started_at DESC
55
+ LIMIT ?`
56
+ ).all(jobId, limit);
57
+ }
58
+ function formatContextWindow(context) {
59
+ if (context.length === 0) return "";
60
+ const lines = context.map((c) => {
61
+ const parts = [`- Run ${c.id} (${c.started_at}): ${c.status}`];
62
+ if (c.issue_number) parts.push(` Issue: #${c.issue_number}`);
63
+ if (c.pr_url) parts.push(` PR: ${c.pr_url}`);
64
+ if (c.branch_name) parts.push(` Branch: ${c.branch_name}`);
65
+ return parts.join("\n");
66
+ });
67
+ return `## Previous Work (DO NOT duplicate)
68
+
69
+ ${lines.join("\n\n")}`;
70
+ }
71
+
72
+ // src/runner/logger.ts
73
+ async function writeRunLog(jobId, entry) {
74
+ const logPath = paths.jobLog(jobId, entry.runId);
75
+ await writeFileSafe(logPath, JSON.stringify(entry, null, 2));
76
+ try {
77
+ saveRunContext(entry);
78
+ } catch {
79
+ }
80
+ }
81
+ async function listRunLogs(jobId) {
82
+ const logsDir = paths.jobLogs(jobId);
83
+ let files;
84
+ try {
85
+ files = await readdir(logsDir);
86
+ } catch (err) {
87
+ if (err instanceof Error && "code" in err && err.code === "ENOENT") {
88
+ return [];
89
+ }
90
+ throw err;
91
+ }
92
+ const logFiles = files.filter((f) => f.endsWith(".log"));
93
+ const entries = [];
94
+ for (const file of logFiles) {
95
+ try {
96
+ const filePath = paths.jobLog(jobId, file.replace(/\.log$/, ""));
97
+ const content = await readFile(filePath, "utf-8");
98
+ entries.push(JSON.parse(content));
99
+ } catch {
100
+ }
101
+ }
102
+ entries.sort((a, b) => {
103
+ const aTime = new Date(a.startedAt).getTime();
104
+ const bTime = new Date(b.startedAt).getTime();
105
+ return bTime - aTime;
106
+ });
107
+ return entries;
108
+ }
109
+
110
+ export {
111
+ loadRunContext,
112
+ formatContextWindow,
113
+ writeRunLog,
114
+ listRunLogs
115
+ };
116
+ //# sourceMappingURL=chunk-GLW7T4QE.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runner/logger.ts","../src/runner/context-store.ts"],"sourcesContent":["import { readdir, readFile } from \"node:fs/promises\";\nimport { writeFileSafe } from \"../util/fs.js\";\nimport { paths } from \"../util/paths.js\";\nimport { saveRunContext } from \"./context-store.js\";\nimport type { RunLogEntry } from \"./types.js\";\n\n/**\n * Write a run log entry as a JSON file for a specific job run.\n * Creates the runs directory if it doesn't exist (via writeFileSafe).\n *\n * @param jobId - The job identifier\n * @param entry - The run log entry to write\n */\nexport async function writeRunLog(jobId: string, entry: RunLogEntry): Promise<void> {\n\tconst logPath = paths.jobLog(jobId, entry.runId);\n\tawait writeFileSafe(logPath, JSON.stringify(entry, null, 2));\n\n\t// Dual-write: persist to SQLite for cross-run context queries (CTXT-01, COST-01).\n\t// Best-effort -- never fail the run due to DB issues.\n\ttry {\n\t\tsaveRunContext(entry);\n\t} catch {\n\t\t// SQLite write is best-effort\n\t}\n}\n\n/**\n * Read and parse a specific run log entry.\n *\n * @param jobId - The job identifier\n * @param runId - The run identifier\n * @returns Parsed RunLogEntry\n * @throws When the log file doesn't exist (ENOENT)\n */\nexport async function readRunLog(jobId: string, runId: string): Promise<RunLogEntry> {\n\tconst logPath = paths.jobLog(jobId, runId);\n\tconst content = await readFile(logPath, \"utf-8\");\n\treturn JSON.parse(content) as RunLogEntry;\n}\n\n/**\n * List all run logs for a job, sorted by startedAt descending (newest first).\n * Returns an empty array when the runs directory doesn't exist.\n * Skips files that fail to parse (logs warning but doesn't throw).\n *\n * @param jobId - The job identifier\n * @returns Array of RunLogEntry sorted by startedAt descending\n */\nexport async function listRunLogs(jobId: string): Promise<RunLogEntry[]> {\n\tconst logsDir = paths.jobLogs(jobId);\n\n\tlet files: string[];\n\ttry {\n\t\tfiles = (await readdir(logsDir)) as string[];\n\t} catch (err: unknown) {\n\t\tif (err instanceof Error && \"code\" in err && (err as NodeJS.ErrnoException).code === \"ENOENT\") {\n\t\t\treturn [];\n\t\t}\n\t\tthrow err;\n\t}\n\n\tconst logFiles = files.filter((f) => f.endsWith(\".log\"));\n\tconst entries: RunLogEntry[] = [];\n\n\tfor (const file of logFiles) {\n\t\ttry {\n\t\t\tconst filePath = paths.jobLog(jobId, file.replace(/\\.log$/, \"\"));\n\t\t\tconst content = await readFile(filePath, \"utf-8\");\n\t\t\tentries.push(JSON.parse(content) as RunLogEntry);\n\t\t} catch {\n\t\t\t// Skip files that fail to parse\n\t\t}\n\t}\n\n\t// Sort by startedAt descending (newest first)\n\tentries.sort((a, b) => {\n\t\tconst aTime = new Date(a.startedAt).getTime();\n\t\tconst bTime = new Date(b.startedAt).getTime();\n\t\treturn bTime - aTime;\n\t});\n\n\treturn entries;\n}\n","import { getDatabase } from \"../core/database.js\";\nimport type { RunLogEntry } from \"./types.js\";\n\n/**\n * Structured context from a prior run, containing only verifiable facts\n * (issue numbers, PR URLs, branch names) -- never raw narrative summaries.\n */\nexport interface RunContext {\n\tid: string;\n\tstatus: string;\n\tpr_url: string | null;\n\tbranch_name: string | null;\n\tissue_number: number | null;\n\tsummary: string | null;\n\tstarted_at: string;\n}\n\n/**\n * Persist a run log entry to the SQLite database.\n * Maps RunLogEntry camelCase fields to snake_case columns.\n *\n * @param entry - The run log entry to save\n */\nexport function saveRunContext(entry: RunLogEntry): void {\n\tconst db = getDatabase();\n\tdb.prepare(\n\t\t`INSERT INTO runs (\n\t\t\tid, job_id, status, started_at, completed_at, duration_ms,\n\t\t\tcost_usd, num_turns, session_id, model, pr_url, branch_name,\n\t\t\tissue_number, summary, error, feedback_round, pr_number, pipeline_stages\n\t\t) VALUES (\n\t\t\t@id, @job_id, @status, @started_at, @completed_at, @duration_ms,\n\t\t\t@cost_usd, @num_turns, @session_id, @model, @pr_url, @branch_name,\n\t\t\t@issue_number, @summary, @error, @feedback_round, @pr_number, @pipeline_stages\n\t\t)`,\n\t).run({\n\t\tid: entry.runId,\n\t\tjob_id: entry.jobId,\n\t\tstatus: entry.status,\n\t\tstarted_at: entry.startedAt,\n\t\tcompleted_at: entry.completedAt,\n\t\tduration_ms: entry.durationMs,\n\t\tcost_usd: entry.costUsd ?? null,\n\t\tnum_turns: entry.numTurns ?? null,\n\t\tsession_id: entry.sessionId ?? null,\n\t\tmodel: entry.model ?? null,\n\t\tpr_url: entry.prUrl ?? null,\n\t\tbranch_name: entry.branchName ?? null,\n\t\tissue_number: entry.issueNumber ?? null,\n\t\tsummary: entry.summary ?? null,\n\t\terror: entry.error ?? null,\n\t\tfeedback_round: entry.feedbackRound ?? null,\n\t\tpr_number: entry.prNumber ?? null,\n\t\tpipeline_stages: entry.pipelineStages ? JSON.stringify(entry.pipelineStages) : null,\n\t});\n}\n\n/**\n * Load recent run context for a job from SQLite.\n * Only returns runs with status \"success\" or \"no-changes\" (not errors, locked, etc.)\n * to avoid injecting failure context into Claude's prompt.\n *\n * @param jobId - The job identifier\n * @param limit - Maximum number of recent runs to return (default: 5)\n * @returns Array of RunContext sorted by started_at descending\n */\nexport function loadRunContext(jobId: string, limit = 5): RunContext[] {\n\tconst db = getDatabase();\n\treturn db\n\t\t.prepare(\n\t\t\t`SELECT id, status, pr_url, branch_name, issue_number, summary, started_at\n\t\t\tFROM runs\n\t\t\tWHERE job_id = ? AND status IN ('success', 'no-changes')\n\t\t\tORDER BY started_at DESC\n\t\t\tLIMIT ?`,\n\t\t)\n\t\t.all(jobId, limit) as RunContext[];\n}\n\n/**\n * Format run context as a structured \"Previous Work\" prompt section.\n * Only includes verifiable facts (issue numbers, PR URLs, branch names)\n * to avoid hallucination amplification from raw narrative summaries.\n *\n * @param context - Array of RunContext from loadRunContext\n * @returns Formatted string for prompt injection, or empty string if no context\n */\nexport function formatContextWindow(context: RunContext[]): string {\n\tif (context.length === 0) return \"\";\n\n\tconst lines = context.map((c) => {\n\t\tconst parts = [`- Run ${c.id} (${c.started_at}): ${c.status}`];\n\t\tif (c.issue_number) parts.push(` Issue: #${c.issue_number}`);\n\t\tif (c.pr_url) parts.push(` PR: ${c.pr_url}`);\n\t\tif (c.branch_name) parts.push(` Branch: ${c.branch_name}`);\n\t\treturn parts.join(\"\\n\");\n\t});\n\n\treturn `## Previous Work (DO NOT duplicate)\\n\\n${lines.join(\"\\n\\n\")}`;\n}\n"],"mappings":";;;;;;;;;;;AAAA,SAAS,SAAS,gBAAgB;;;ACuB3B,SAAS,eAAe,OAA0B;AACxD,QAAM,KAAK,YAAY;AACvB,KAAG;AAAA,IACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,EAAE,IAAI;AAAA,IACL,IAAI,MAAM;AAAA,IACV,QAAQ,MAAM;AAAA,IACd,QAAQ,MAAM;AAAA,IACd,YAAY,MAAM;AAAA,IAClB,cAAc,MAAM;AAAA,IACpB,aAAa,MAAM;AAAA,IACnB,UAAU,MAAM,WAAW;AAAA,IAC3B,WAAW,MAAM,YAAY;AAAA,IAC7B,YAAY,MAAM,aAAa;AAAA,IAC/B,OAAO,MAAM,SAAS;AAAA,IACtB,QAAQ,MAAM,SAAS;AAAA,IACvB,aAAa,MAAM,cAAc;AAAA,IACjC,cAAc,MAAM,eAAe;AAAA,IACnC,SAAS,MAAM,WAAW;AAAA,IAC1B,OAAO,MAAM,SAAS;AAAA,IACtB,gBAAgB,MAAM,iBAAiB;AAAA,IACvC,WAAW,MAAM,YAAY;AAAA,IAC7B,iBAAiB,MAAM,iBAAiB,KAAK,UAAU,MAAM,cAAc,IAAI;AAAA,EAChF,CAAC;AACF;AAWO,SAAS,eAAe,OAAe,QAAQ,GAAiB;AACtE,QAAM,KAAK,YAAY;AACvB,SAAO,GACL;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKD,EACC,IAAI,OAAO,KAAK;AACnB;AAUO,SAAS,oBAAoB,SAA+B;AAClE,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,QAAQ,QAAQ,IAAI,CAAC,MAAM;AAChC,UAAM,QAAQ,CAAC,SAAS,EAAE,EAAE,KAAK,EAAE,UAAU,MAAM,EAAE,MAAM,EAAE;AAC7D,QAAI,EAAE,aAAc,OAAM,KAAK,aAAa,EAAE,YAAY,EAAE;AAC5D,QAAI,EAAE,OAAQ,OAAM,KAAK,SAAS,EAAE,MAAM,EAAE;AAC5C,QAAI,EAAE,YAAa,OAAM,KAAK,aAAa,EAAE,WAAW,EAAE;AAC1D,WAAO,MAAM,KAAK,IAAI;AAAA,EACvB,CAAC;AAED,SAAO;AAAA;AAAA,EAA0C,MAAM,KAAK,MAAM,CAAC;AACpE;;;ADtFA,eAAsB,YAAY,OAAe,OAAmC;AACnF,QAAM,UAAU,MAAM,OAAO,OAAO,MAAM,KAAK;AAC/C,QAAM,cAAc,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC,CAAC;AAI3D,MAAI;AACH,mBAAe,KAAK;AAAA,EACrB,QAAQ;AAAA,EAER;AACD;AAwBA,eAAsB,YAAY,OAAuC;AACxE,QAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,MAAI;AACJ,MAAI;AACH,YAAS,MAAM,QAAQ,OAAO;AAAA,EAC/B,SAAS,KAAc;AACtB,QAAI,eAAe,SAAS,UAAU,OAAQ,IAA8B,SAAS,UAAU;AAC9F,aAAO,CAAC;AAAA,IACT;AACA,UAAM;AAAA,EACP;AAEA,QAAM,WAAW,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,CAAC;AACvD,QAAM,UAAyB,CAAC;AAEhC,aAAW,QAAQ,UAAU;AAC5B,QAAI;AACH,YAAM,WAAW,MAAM,OAAO,OAAO,KAAK,QAAQ,UAAU,EAAE,CAAC;AAC/D,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,cAAQ,KAAK,KAAK,MAAM,OAAO,CAAgB;AAAA,IAChD,QAAQ;AAAA,IAER;AAAA,EACD;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM;AACtB,UAAM,QAAQ,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAC5C,UAAM,QAAQ,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAC5C,WAAO,QAAQ;AAAA,EAChB,CAAC;AAED,SAAO;AACR;","names":[]}
@@ -0,0 +1,23 @@
1
+ // src/util/paths.ts
2
+ import { homedir } from "os";
3
+ import { join } from "path";
4
+ var BASE_DIR = join(homedir(), ".claude-auto");
5
+ var paths = {
6
+ base: BASE_DIR,
7
+ jobs: join(BASE_DIR, "jobs"),
8
+ jobDir: (jobId) => join(BASE_DIR, "jobs", jobId),
9
+ jobConfig: (jobId) => join(BASE_DIR, "jobs", jobId, "config.yaml"),
10
+ logs: join(BASE_DIR, "logs"),
11
+ jobLogs: (jobId) => join(BASE_DIR, "jobs", jobId, "runs"),
12
+ jobLog: (jobId, runId) => join(BASE_DIR, "jobs", jobId, "runs", `${runId}.log`),
13
+ jobLock: (jobId) => join(BASE_DIR, "jobs", jobId, ".lock"),
14
+ plistDir: join(homedir(), "Library", "LaunchAgents"),
15
+ plistPath: (jobId) => join(homedir(), "Library", "LaunchAgents", `com.claude-auto.${jobId}.plist`),
16
+ crontabLock: join(BASE_DIR, ".crontab.lock"),
17
+ database: join(BASE_DIR, "claude-auto.db")
18
+ };
19
+
20
+ export {
21
+ paths
22
+ };
23
+ //# sourceMappingURL=chunk-H2MUDYMW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/util/paths.ts"],"sourcesContent":["import { homedir } from \"node:os\";\nimport { join } from \"node:path\";\n\nconst BASE_DIR = join(homedir(), \".claude-auto\");\n\nexport const paths = {\n\tbase: BASE_DIR,\n\tjobs: join(BASE_DIR, \"jobs\"),\n\tjobDir: (jobId: string) => join(BASE_DIR, \"jobs\", jobId),\n\tjobConfig: (jobId: string) => join(BASE_DIR, \"jobs\", jobId, \"config.yaml\"),\n\tlogs: join(BASE_DIR, \"logs\"),\n\tjobLogs: (jobId: string) => join(BASE_DIR, \"jobs\", jobId, \"runs\"),\n\tjobLog: (jobId: string, runId: string) => join(BASE_DIR, \"jobs\", jobId, \"runs\", `${runId}.log`),\n\tjobLock: (jobId: string) => join(BASE_DIR, \"jobs\", jobId, \".lock\"),\n\tplistDir: join(homedir(), \"Library\", \"LaunchAgents\"),\n\tplistPath: (jobId: string) =>\n\t\tjoin(homedir(), \"Library\", \"LaunchAgents\", `com.claude-auto.${jobId}.plist`),\n\tcrontabLock: join(BASE_DIR, \".crontab.lock\"),\n\tdatabase: join(BASE_DIR, \"claude-auto.db\"),\n} as const;\n"],"mappings":";AAAA,SAAS,eAAe;AACxB,SAAS,YAAY;AAErB,IAAM,WAAW,KAAK,QAAQ,GAAG,cAAc;AAExC,IAAM,QAAQ;AAAA,EACpB,MAAM;AAAA,EACN,MAAM,KAAK,UAAU,MAAM;AAAA,EAC3B,QAAQ,CAAC,UAAkB,KAAK,UAAU,QAAQ,KAAK;AAAA,EACvD,WAAW,CAAC,UAAkB,KAAK,UAAU,QAAQ,OAAO,aAAa;AAAA,EACzE,MAAM,KAAK,UAAU,MAAM;AAAA,EAC3B,SAAS,CAAC,UAAkB,KAAK,UAAU,QAAQ,OAAO,MAAM;AAAA,EAChE,QAAQ,CAAC,OAAe,UAAkB,KAAK,UAAU,QAAQ,OAAO,QAAQ,GAAG,KAAK,MAAM;AAAA,EAC9F,SAAS,CAAC,UAAkB,KAAK,UAAU,QAAQ,OAAO,OAAO;AAAA,EACjE,UAAU,KAAK,QAAQ,GAAG,WAAW,cAAc;AAAA,EACnD,WAAW,CAAC,UACX,KAAK,QAAQ,GAAG,WAAW,gBAAgB,mBAAmB,KAAK,QAAQ;AAAA,EAC5E,aAAa,KAAK,UAAU,eAAe;AAAA,EAC3C,UAAU,KAAK,UAAU,gBAAgB;AAC1C;","names":[]}
@@ -0,0 +1,69 @@
1
+ import {
2
+ listRunLogs
3
+ } from "./chunk-SMZYA6CY.js";
4
+ import {
5
+ formatRelativeTime,
6
+ formatTable
7
+ } from "./chunk-ORBF5IW3.js";
8
+ import {
9
+ describeSchedule,
10
+ getNextRuns
11
+ } from "./chunk-D4MBOIYQ.js";
12
+ import {
13
+ listJobs
14
+ } from "./chunk-24PS2XSV.js";
15
+
16
+ // src/cli/commands/list.ts
17
+ function truncatePath(fullPath, segments = 2) {
18
+ const parts = fullPath.split("/").filter(Boolean);
19
+ if (parts.length <= segments) {
20
+ return fullPath;
21
+ }
22
+ return parts.slice(-segments).join("/");
23
+ }
24
+ async function listCommand(args) {
25
+ const jobs = await listJobs();
26
+ if (jobs.length === 0) {
27
+ if (args.json) {
28
+ console.log("[]");
29
+ } else {
30
+ console.log("No jobs configured.");
31
+ }
32
+ return;
33
+ }
34
+ const rows = [];
35
+ for (const job of jobs) {
36
+ const status = job.enabled ? "active" : "paused";
37
+ const schedule = describeSchedule(job.schedule.cron);
38
+ const logs = await listRunLogs(job.id);
39
+ const lastRun = logs.length > 0 ? formatRelativeTime(new Date(logs[0].startedAt)) : "never";
40
+ let nextRun;
41
+ if (!job.enabled) {
42
+ nextRun = "--";
43
+ } else {
44
+ const nextRuns = getNextRuns(job.schedule.cron, job.schedule.timezone, 1);
45
+ nextRun = nextRuns.length > 0 ? formatRelativeTime(nextRuns[0]) : "--";
46
+ }
47
+ rows.push([job.id, job.name, status, truncatePath(job.repo.path), schedule, lastRun, nextRun]);
48
+ }
49
+ if (args.json) {
50
+ const jsonOutput = jobs.map((job, i) => ({
51
+ id: job.id,
52
+ name: job.name,
53
+ status: job.enabled ? "active" : "paused",
54
+ repo: job.repo.path,
55
+ schedule: rows[i][4],
56
+ lastRun: rows[i][5],
57
+ nextRun: rows[i][6]
58
+ }));
59
+ console.log(JSON.stringify(jsonOutput, null, 2));
60
+ return;
61
+ }
62
+ const headers = ["ID", "Name", "Status", "Repo", "Schedule", "Last Run", "Next Run"];
63
+ console.log(formatTable(headers, rows));
64
+ }
65
+
66
+ export {
67
+ listCommand
68
+ };
69
+ //# sourceMappingURL=chunk-HF7PGQI3.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/cli/commands/list.ts"],"sourcesContent":["import { listJobs } from \"../../core/job-manager.js\";\nimport { describeSchedule, getNextRuns } from \"../../core/schedule.js\";\nimport { listRunLogs } from \"../../runner/logger.js\";\nimport { formatRelativeTime, formatTable } from \"../format.js\";\nimport type { ParsedCommand } from \"../types.js\";\n\n/**\n * Truncate a path to its last N segments for compact display.\n */\nfunction truncatePath(fullPath: string, segments = 2): string {\n\tconst parts = fullPath.split(\"/\").filter(Boolean);\n\tif (parts.length <= segments) {\n\t\treturn fullPath;\n\t}\n\treturn parts.slice(-segments).join(\"/\");\n}\n\n/**\n * List all configured jobs with enriched status info:\n * ID, Name, Status, Repo, Schedule, Last Run, Next Run.\n *\n * Satisfies JOB-01 (list with status/schedule/last run/next run)\n * and JOB-05 (multiple jobs visible as separate entries).\n */\nexport async function listCommand(args: ParsedCommand[\"args\"]): Promise<void> {\n\tconst jobs = await listJobs();\n\n\tif (jobs.length === 0) {\n\t\tif (args.json) {\n\t\t\tconsole.log(\"[]\");\n\t\t} else {\n\t\t\tconsole.log(\"No jobs configured.\");\n\t\t}\n\t\treturn;\n\t}\n\n\tconst rows: string[][] = [];\n\n\tfor (const job of jobs) {\n\t\tconst status = job.enabled ? \"active\" : \"paused\";\n\t\tconst schedule = describeSchedule(job.schedule.cron);\n\n\t\t// Last run\n\t\tconst logs = await listRunLogs(job.id);\n\t\tconst lastRun = logs.length > 0 ? formatRelativeTime(new Date(logs[0].startedAt)) : \"never\";\n\n\t\t// Next run\n\t\tlet nextRun: string;\n\t\tif (!job.enabled) {\n\t\t\tnextRun = \"--\";\n\t\t} else {\n\t\t\tconst nextRuns = getNextRuns(job.schedule.cron, job.schedule.timezone, 1);\n\t\t\tnextRun = nextRuns.length > 0 ? formatRelativeTime(nextRuns[0]) : \"--\";\n\t\t}\n\n\t\trows.push([job.id, job.name, status, truncatePath(job.repo.path), schedule, lastRun, nextRun]);\n\t}\n\n\tif (args.json) {\n\t\tconst jsonOutput = jobs.map((job, i) => ({\n\t\t\tid: job.id,\n\t\t\tname: job.name,\n\t\t\tstatus: job.enabled ? \"active\" : \"paused\",\n\t\t\trepo: job.repo.path,\n\t\t\tschedule: rows[i][4],\n\t\t\tlastRun: rows[i][5],\n\t\t\tnextRun: rows[i][6],\n\t\t}));\n\t\tconsole.log(JSON.stringify(jsonOutput, null, 2));\n\t\treturn;\n\t}\n\n\tconst headers = [\"ID\", \"Name\", \"Status\", \"Repo\", \"Schedule\", \"Last Run\", \"Next Run\"];\n\tconsole.log(formatTable(headers, rows));\n}\n"],"mappings":";;;;;;;;;;;;;;;;AASA,SAAS,aAAa,UAAkB,WAAW,GAAW;AAC7D,QAAM,QAAQ,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AAChD,MAAI,MAAM,UAAU,UAAU;AAC7B,WAAO;AAAA,EACR;AACA,SAAO,MAAM,MAAM,CAAC,QAAQ,EAAE,KAAK,GAAG;AACvC;AASA,eAAsB,YAAY,MAA4C;AAC7E,QAAM,OAAO,MAAM,SAAS;AAE5B,MAAI,KAAK,WAAW,GAAG;AACtB,QAAI,KAAK,MAAM;AACd,cAAQ,IAAI,IAAI;AAAA,IACjB,OAAO;AACN,cAAQ,IAAI,qBAAqB;AAAA,IAClC;AACA;AAAA,EACD;AAEA,QAAM,OAAmB,CAAC;AAE1B,aAAW,OAAO,MAAM;AACvB,UAAM,SAAS,IAAI,UAAU,WAAW;AACxC,UAAM,WAAW,iBAAiB,IAAI,SAAS,IAAI;AAGnD,UAAM,OAAO,MAAM,YAAY,IAAI,EAAE;AACrC,UAAM,UAAU,KAAK,SAAS,IAAI,mBAAmB,IAAI,KAAK,KAAK,CAAC,EAAE,SAAS,CAAC,IAAI;AAGpF,QAAI;AACJ,QAAI,CAAC,IAAI,SAAS;AACjB,gBAAU;AAAA,IACX,OAAO;AACN,YAAM,WAAW,YAAY,IAAI,SAAS,MAAM,IAAI,SAAS,UAAU,CAAC;AACxE,gBAAU,SAAS,SAAS,IAAI,mBAAmB,SAAS,CAAC,CAAC,IAAI;AAAA,IACnE;AAEA,SAAK,KAAK,CAAC,IAAI,IAAI,IAAI,MAAM,QAAQ,aAAa,IAAI,KAAK,IAAI,GAAG,UAAU,SAAS,OAAO,CAAC;AAAA,EAC9F;AAEA,MAAI,KAAK,MAAM;AACd,UAAM,aAAa,KAAK,IAAI,CAAC,KAAK,OAAO;AAAA,MACxC,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,QAAQ,IAAI,UAAU,WAAW;AAAA,MACjC,MAAM,IAAI,KAAK;AAAA,MACf,UAAU,KAAK,CAAC,EAAE,CAAC;AAAA,MACnB,SAAS,KAAK,CAAC,EAAE,CAAC;AAAA,MAClB,SAAS,KAAK,CAAC,EAAE,CAAC;AAAA,IACnB,EAAE;AACF,YAAQ,IAAI,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC;AAC/C;AAAA,EACD;AAEA,QAAM,UAAU,CAAC,MAAM,QAAQ,UAAU,QAAQ,YAAY,YAAY,UAAU;AACnF,UAAQ,IAAI,YAAY,SAAS,IAAI,CAAC;AACvC;","names":[]}