@neotx/cli 0.1.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/agents-Y6LREFXP.js +58 -0
- package/dist/agents-Y6LREFXP.js.map +1 -0
- package/dist/chunk-CP54H7WA.js +85 -0
- package/dist/chunk-CP54H7WA.js.map +1 -0
- package/dist/chunk-EZAJLAUF.js +40 -0
- package/dist/chunk-EZAJLAUF.js.map +1 -0
- package/dist/chunk-TNJOG54I.js +16 -0
- package/dist/chunk-TNJOG54I.js.map +1 -0
- package/dist/chunk-YQIWMDXL.js +33 -0
- package/dist/chunk-YQIWMDXL.js.map +1 -0
- package/dist/cost-DNGKT4UC.js +134 -0
- package/dist/cost-DNGKT4UC.js.map +1 -0
- package/dist/daemon/supervisor-worker.js +28 -0
- package/dist/daemon/supervisor-worker.js.map +1 -0
- package/dist/daemon/worker.js +95 -0
- package/dist/daemon/worker.js.map +1 -0
- package/dist/doctor-CPVIT7IP.js +198 -0
- package/dist/doctor-CPVIT7IP.js.map +1 -0
- package/dist/index.js +24 -0
- package/dist/index.js.map +1 -0
- package/dist/init-YNSPTCA3.js +74 -0
- package/dist/init-YNSPTCA3.js.map +1 -0
- package/dist/logs-AWNAMMJC.js +200 -0
- package/dist/logs-AWNAMMJC.js.map +1 -0
- package/dist/mcp-LC5VU65M.js +217 -0
- package/dist/mcp-LC5VU65M.js.map +1 -0
- package/dist/repos-GI6F72NO.js +111 -0
- package/dist/repos-GI6F72NO.js.map +1 -0
- package/dist/run-KIU2ZE72.js +231 -0
- package/dist/run-KIU2ZE72.js.map +1 -0
- package/dist/runs-CHA2JM5K.js +176 -0
- package/dist/runs-CHA2JM5K.js.map +1 -0
- package/dist/supervise-7ZITWRSL.js +298 -0
- package/dist/supervise-7ZITWRSL.js.map +1 -0
- package/dist/tui-W2FHMMMN.js +489 -0
- package/dist/tui-W2FHMMMN.js.map +1 -0
- package/package.json +53 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Voltaire Network
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import {
|
|
2
|
+
resolveAgentsDir
|
|
3
|
+
} from "./chunk-TNJOG54I.js";
|
|
4
|
+
import {
|
|
5
|
+
printError,
|
|
6
|
+
printJson,
|
|
7
|
+
printTable
|
|
8
|
+
} from "./chunk-YQIWMDXL.js";
|
|
9
|
+
|
|
10
|
+
// src/commands/agents.ts
|
|
11
|
+
import { existsSync } from "fs";
|
|
12
|
+
import path from "path";
|
|
13
|
+
import { AgentRegistry } from "@neotx/core";
|
|
14
|
+
import { defineCommand } from "citty";
|
|
15
|
+
var agents_default = defineCommand({
|
|
16
|
+
meta: {
|
|
17
|
+
name: "agents",
|
|
18
|
+
description: "List available agents (built-in and custom from .neo/agents/)"
|
|
19
|
+
},
|
|
20
|
+
args: {
|
|
21
|
+
output: {
|
|
22
|
+
type: "string",
|
|
23
|
+
description: "Output format: json"
|
|
24
|
+
}
|
|
25
|
+
},
|
|
26
|
+
async run({ args }) {
|
|
27
|
+
const jsonOutput = args.output === "json";
|
|
28
|
+
const builtInDir = resolveAgentsDir();
|
|
29
|
+
const customDir = path.resolve(".neo/agents");
|
|
30
|
+
if (!existsSync(builtInDir)) {
|
|
31
|
+
printError("Agent definitions not found. Is @neotx/agents installed?");
|
|
32
|
+
process.exitCode = 1;
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
const registry = new AgentRegistry(builtInDir, existsSync(customDir) ? customDir : void 0);
|
|
36
|
+
await registry.load();
|
|
37
|
+
const agents = registry.list();
|
|
38
|
+
if (jsonOutput) {
|
|
39
|
+
printJson(
|
|
40
|
+
agents.map((a) => ({
|
|
41
|
+
name: a.name,
|
|
42
|
+
model: a.definition.model,
|
|
43
|
+
sandbox: a.sandbox,
|
|
44
|
+
source: a.source
|
|
45
|
+
}))
|
|
46
|
+
);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
printTable(
|
|
50
|
+
["NAME", "MODEL", "SANDBOX", "SOURCE"],
|
|
51
|
+
agents.map((a) => [a.name, a.definition.model, a.sandbox, a.source])
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
export {
|
|
56
|
+
agents_default as default
|
|
57
|
+
};
|
|
58
|
+
//# sourceMappingURL=agents-Y6LREFXP.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/agents.ts"],"sourcesContent":["import { existsSync } from \"node:fs\";\nimport path from \"node:path\";\nimport { AgentRegistry } from \"@neotx/core\";\nimport { defineCommand } from \"citty\";\nimport { printError, printJson, printTable } from \"../output.js\";\nimport { resolveAgentsDir } from \"../resolve.js\";\n\nexport default defineCommand({\n meta: {\n name: \"agents\",\n description: \"List available agents (built-in and custom from .neo/agents/)\",\n },\n args: {\n output: {\n type: \"string\",\n description: \"Output format: json\",\n },\n },\n async run({ args }) {\n const jsonOutput = args.output === \"json\";\n const builtInDir = resolveAgentsDir();\n const customDir = path.resolve(\".neo/agents\");\n\n if (!existsSync(builtInDir)) {\n printError(\"Agent definitions not found. Is @neotx/agents installed?\");\n process.exitCode = 1;\n return;\n }\n\n const registry = new AgentRegistry(builtInDir, existsSync(customDir) ? customDir : undefined);\n await registry.load();\n\n const agents = registry.list();\n\n if (jsonOutput) {\n printJson(\n agents.map((a) => ({\n name: a.name,\n model: a.definition.model,\n sandbox: a.sandbox,\n source: a.source,\n })),\n );\n return;\n }\n\n printTable(\n [\"NAME\", \"MODEL\", \"SANDBOX\", \"SOURCE\"],\n agents.map((a) => [a.name, a.definition.model, a.sandbox, a.source]),\n );\n },\n});\n"],"mappings":";;;;;;;;;;AAAA,SAAS,kBAAkB;AAC3B,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAC9B,SAAS,qBAAqB;AAI9B,IAAO,iBAAQ,cAAc;AAAA,EAC3B,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,aAAa,KAAK,WAAW;AACnC,UAAM,aAAa,iBAAiB;AACpC,UAAM,YAAY,KAAK,QAAQ,aAAa;AAE5C,QAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,iBAAW,0DAA0D;AACrE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,WAAW,IAAI,cAAc,YAAY,WAAW,SAAS,IAAI,YAAY,MAAS;AAC5F,UAAM,SAAS,KAAK;AAEpB,UAAM,SAAS,SAAS,KAAK;AAE7B,QAAI,YAAY;AACd;AAAA,QACE,OAAO,IAAI,CAAC,OAAO;AAAA,UACjB,MAAM,EAAE;AAAA,UACR,OAAO,EAAE,WAAW;AAAA,UACpB,SAAS,EAAE;AAAA,UACX,QAAQ,EAAE;AAAA,QACZ,EAAE;AAAA,MACJ;AACA;AAAA,IACF;AAEA;AAAA,MACE,CAAC,QAAQ,SAAS,WAAW,QAAQ;AAAA,MACrC,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,WAAW,OAAO,EAAE,SAAS,EAAE,MAAM,CAAC;AAAA,IACrE;AAAA,EACF;AACF,CAAC;","names":[]}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
// src/repo-filter.ts
|
|
2
|
+
import { existsSync } from "fs";
|
|
3
|
+
import { readdir, readFile } from "fs/promises";
|
|
4
|
+
import path from "path";
|
|
5
|
+
import { getRunsDir, listReposFromGlobalConfig, toRepoSlug } from "@neotx/core";
|
|
6
|
+
async function resolveRepoFilter(args) {
|
|
7
|
+
if (args.all) return { mode: "all" };
|
|
8
|
+
if (args.repo) {
|
|
9
|
+
const repo = args.repo;
|
|
10
|
+
const repos2 = await listReposFromGlobalConfig();
|
|
11
|
+
const match2 = repos2.find(
|
|
12
|
+
(r) => toRepoSlug(r) === repo || path.resolve(r.path) === path.resolve(repo)
|
|
13
|
+
);
|
|
14
|
+
if (match2) {
|
|
15
|
+
return { mode: "named", repoSlug: toRepoSlug(match2), repoPath: match2.path };
|
|
16
|
+
}
|
|
17
|
+
return { mode: "named", repoSlug: toRepoSlug({ path: repo }), repoPath: repo };
|
|
18
|
+
}
|
|
19
|
+
const cwd = process.cwd();
|
|
20
|
+
const repos = await listReposFromGlobalConfig();
|
|
21
|
+
const match = repos.find((r) => path.resolve(r.path) === cwd);
|
|
22
|
+
const slug = match ? toRepoSlug(match) : toRepoSlug({ path: cwd });
|
|
23
|
+
return { mode: "cwd", repoSlug: slug, repoPath: cwd };
|
|
24
|
+
}
|
|
25
|
+
async function loadRunsFiltered(filter) {
|
|
26
|
+
const runsDir = getRunsDir();
|
|
27
|
+
if (!existsSync(runsDir)) return [];
|
|
28
|
+
const runs = [];
|
|
29
|
+
if (filter.mode === "all") {
|
|
30
|
+
const entries = await readdir(runsDir, { withFileTypes: true });
|
|
31
|
+
for (const entry of entries) {
|
|
32
|
+
if (entry.isDirectory()) {
|
|
33
|
+
await loadRunsFromDir(path.join(runsDir, entry.name), runs);
|
|
34
|
+
} else if (entry.name.endsWith(".json")) {
|
|
35
|
+
await loadRunFile(path.join(runsDir, entry.name), runs);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
} else {
|
|
39
|
+
const slugDir = path.join(runsDir, filter.repoSlug ?? "unknown");
|
|
40
|
+
await loadRunsFromDir(slugDir, runs);
|
|
41
|
+
await loadLegacyRuns(runsDir, filter.repoPath, runs);
|
|
42
|
+
}
|
|
43
|
+
runs.sort((a, b) => b.updatedAt.localeCompare(a.updatedAt));
|
|
44
|
+
return runs;
|
|
45
|
+
}
|
|
46
|
+
async function loadRunsFromDir(dir, runs) {
|
|
47
|
+
if (!existsSync(dir)) return;
|
|
48
|
+
const files = await readdir(dir);
|
|
49
|
+
for (const file of files) {
|
|
50
|
+
if (!file.endsWith(".json")) continue;
|
|
51
|
+
await loadRunFile(path.join(dir, file), runs);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
async function loadRunFile(filePath, runs) {
|
|
55
|
+
try {
|
|
56
|
+
const content = await readFile(filePath, "utf-8");
|
|
57
|
+
runs.push(JSON.parse(content));
|
|
58
|
+
} catch {
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async function loadLegacyRuns(runsDir, repoPath, runs) {
|
|
62
|
+
if (!repoPath) return;
|
|
63
|
+
const resolvedRepo = path.resolve(repoPath);
|
|
64
|
+
try {
|
|
65
|
+
const entries = await readdir(runsDir, { withFileTypes: true });
|
|
66
|
+
for (const entry of entries) {
|
|
67
|
+
if (!entry.isFile() || !entry.name.endsWith(".json")) continue;
|
|
68
|
+
const filePath = path.join(runsDir, entry.name);
|
|
69
|
+
const content = await readFile(filePath, "utf-8");
|
|
70
|
+
const run = JSON.parse(content);
|
|
71
|
+
if (path.resolve(run.repo) === resolvedRepo) {
|
|
72
|
+
if (!runs.some((r) => r.runId === run.runId)) {
|
|
73
|
+
runs.push(run);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
} catch {
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export {
|
|
82
|
+
resolveRepoFilter,
|
|
83
|
+
loadRunsFiltered
|
|
84
|
+
};
|
|
85
|
+
//# sourceMappingURL=chunk-CP54H7WA.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/repo-filter.ts"],"sourcesContent":["import { existsSync } from \"node:fs\";\nimport { readdir, readFile } from \"node:fs/promises\";\nimport path from \"node:path\";\nimport type { PersistedRun } from \"@neotx/core\";\nimport { getRunsDir, listReposFromGlobalConfig, toRepoSlug } from \"@neotx/core\";\n\nexport interface RepoFilter {\n mode: \"cwd\" | \"all\" | \"named\";\n repoSlug?: string;\n repoPath?: string;\n}\n\n/**\n * Resolve which repos to query based on --all / --repo flags.\n * Default: CWD-based (finds the matching registered repo slug, or uses basename).\n */\nexport async function resolveRepoFilter(args: {\n all?: boolean | undefined;\n repo?: string | undefined;\n}): Promise<RepoFilter> {\n if (args.all) return { mode: \"all\" };\n\n if (args.repo) {\n const repo = args.repo;\n // Could be a name/slug or a path\n const repos = await listReposFromGlobalConfig();\n const match = repos.find(\n (r) => toRepoSlug(r) === repo || path.resolve(r.path) === path.resolve(repo),\n );\n if (match) {\n return { mode: \"named\", repoSlug: toRepoSlug(match), repoPath: match.path };\n }\n // Treat as path, derive slug\n return { mode: \"named\", repoSlug: toRepoSlug({ path: repo }), repoPath: repo };\n }\n\n // Default: CWD\n const cwd = process.cwd();\n const repos = await listReposFromGlobalConfig();\n const match = repos.find((r) => path.resolve(r.path) === cwd);\n const slug = match ? toRepoSlug(match) : toRepoSlug({ path: cwd });\n return { mode: \"cwd\", repoSlug: slug, repoPath: cwd };\n}\n\n/**\n * Load persisted runs, filtered by RepoFilter.\n */\nexport async function loadRunsFiltered(filter: RepoFilter): Promise<PersistedRun[]> {\n const runsDir = getRunsDir();\n if (!existsSync(runsDir)) return [];\n\n const runs: PersistedRun[] = [];\n\n if (filter.mode === \"all\") {\n // Scan all slug subdirs + legacy flat files\n const entries = await readdir(runsDir, { withFileTypes: true });\n for (const entry of entries) {\n if (entry.isDirectory()) {\n await loadRunsFromDir(path.join(runsDir, entry.name), runs);\n } else if (entry.name.endsWith(\".json\")) {\n await loadRunFile(path.join(runsDir, entry.name), runs);\n }\n }\n } else {\n // Specific slug dir\n const slugDir = path.join(runsDir, filter.repoSlug ?? \"unknown\");\n await loadRunsFromDir(slugDir, runs);\n // Also check legacy flat files matching this repo\n await loadLegacyRuns(runsDir, filter.repoPath, runs);\n }\n\n runs.sort((a, b) => b.updatedAt.localeCompare(a.updatedAt));\n return runs;\n}\n\nasync function loadRunsFromDir(dir: string, runs: PersistedRun[]): Promise<void> {\n if (!existsSync(dir)) return;\n const files = await readdir(dir);\n for (const file of files) {\n if (!file.endsWith(\".json\")) continue;\n await loadRunFile(path.join(dir, file), runs);\n }\n}\n\nasync function loadRunFile(filePath: string, runs: PersistedRun[]): Promise<void> {\n try {\n const content = await readFile(filePath, \"utf-8\");\n runs.push(JSON.parse(content) as PersistedRun);\n } catch {\n // Skip corrupt files\n }\n}\n\nasync function loadLegacyRuns(\n runsDir: string,\n repoPath: string | undefined,\n runs: PersistedRun[],\n): Promise<void> {\n if (!repoPath) return;\n const resolvedRepo = path.resolve(repoPath);\n\n try {\n const entries = await readdir(runsDir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isFile() || !entry.name.endsWith(\".json\")) continue;\n const filePath = path.join(runsDir, entry.name);\n const content = await readFile(filePath, \"utf-8\");\n const run = JSON.parse(content) as PersistedRun;\n if (path.resolve(run.repo) === resolvedRepo) {\n // Avoid duplicates\n if (!runs.some((r) => r.runId === run.runId)) {\n runs.push(run);\n }\n }\n }\n } catch {\n // Non-critical\n }\n}\n"],"mappings":";AAAA,SAAS,kBAAkB;AAC3B,SAAS,SAAS,gBAAgB;AAClC,OAAO,UAAU;AAEjB,SAAS,YAAY,2BAA2B,kBAAkB;AAYlE,eAAsB,kBAAkB,MAGhB;AACtB,MAAI,KAAK,IAAK,QAAO,EAAE,MAAM,MAAM;AAEnC,MAAI,KAAK,MAAM;AACb,UAAM,OAAO,KAAK;AAElB,UAAMA,SAAQ,MAAM,0BAA0B;AAC9C,UAAMC,SAAQD,OAAM;AAAA,MAClB,CAAC,MAAM,WAAW,CAAC,MAAM,QAAQ,KAAK,QAAQ,EAAE,IAAI,MAAM,KAAK,QAAQ,IAAI;AAAA,IAC7E;AACA,QAAIC,QAAO;AACT,aAAO,EAAE,MAAM,SAAS,UAAU,WAAWA,MAAK,GAAG,UAAUA,OAAM,KAAK;AAAA,IAC5E;AAEA,WAAO,EAAE,MAAM,SAAS,UAAU,WAAW,EAAE,MAAM,KAAK,CAAC,GAAG,UAAU,KAAK;AAAA,EAC/E;AAGA,QAAM,MAAM,QAAQ,IAAI;AACxB,QAAM,QAAQ,MAAM,0BAA0B;AAC9C,QAAM,QAAQ,MAAM,KAAK,CAAC,MAAM,KAAK,QAAQ,EAAE,IAAI,MAAM,GAAG;AAC5D,QAAM,OAAO,QAAQ,WAAW,KAAK,IAAI,WAAW,EAAE,MAAM,IAAI,CAAC;AACjE,SAAO,EAAE,MAAM,OAAO,UAAU,MAAM,UAAU,IAAI;AACtD;AAKA,eAAsB,iBAAiB,QAA6C;AAClF,QAAM,UAAU,WAAW;AAC3B,MAAI,CAAC,WAAW,OAAO,EAAG,QAAO,CAAC;AAElC,QAAM,OAAuB,CAAC;AAE9B,MAAI,OAAO,SAAS,OAAO;AAEzB,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,eAAe,KAAK,CAAC;AAC9D,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,YAAY,GAAG;AACvB,cAAM,gBAAgB,KAAK,KAAK,SAAS,MAAM,IAAI,GAAG,IAAI;AAAA,MAC5D,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AACvC,cAAM,YAAY,KAAK,KAAK,SAAS,MAAM,IAAI,GAAG,IAAI;AAAA,MACxD;AAAA,IACF;AAAA,EACF,OAAO;AAEL,UAAM,UAAU,KAAK,KAAK,SAAS,OAAO,YAAY,SAAS;AAC/D,UAAM,gBAAgB,SAAS,IAAI;AAEnC,UAAM,eAAe,SAAS,OAAO,UAAU,IAAI;AAAA,EACrD;AAEA,OAAK,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,cAAc,EAAE,SAAS,CAAC;AAC1D,SAAO;AACT;AAEA,eAAe,gBAAgB,KAAa,MAAqC;AAC/E,MAAI,CAAC,WAAW,GAAG,EAAG;AACtB,QAAM,QAAQ,MAAM,QAAQ,GAAG;AAC/B,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,UAAM,YAAY,KAAK,KAAK,KAAK,IAAI,GAAG,IAAI;AAAA,EAC9C;AACF;AAEA,eAAe,YAAY,UAAkB,MAAqC;AAChF,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,SAAK,KAAK,KAAK,MAAM,OAAO,CAAiB;AAAA,EAC/C,QAAQ;AAAA,EAER;AACF;AAEA,eAAe,eACb,SACA,UACA,MACe;AACf,MAAI,CAAC,SAAU;AACf,QAAM,eAAe,KAAK,QAAQ,QAAQ;AAE1C,MAAI;AACF,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,eAAe,KAAK,CAAC;AAC9D,eAAW,SAAS,SAAS;AAC3B,UAAI,CAAC,MAAM,OAAO,KAAK,CAAC,MAAM,KAAK,SAAS,OAAO,EAAG;AACtD,YAAM,WAAW,KAAK,KAAK,SAAS,MAAM,IAAI;AAC9C,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAI,KAAK,QAAQ,IAAI,IAAI,MAAM,cAAc;AAE3C,YAAI,CAAC,KAAK,KAAK,CAAC,MAAM,EAAE,UAAU,IAAI,KAAK,GAAG;AAC5C,eAAK,KAAK,GAAG;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AACF;","names":["repos","match"]}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
// src/git-utils.ts
|
|
2
|
+
import { execFile } from "child_process";
|
|
3
|
+
import { promisify } from "util";
|
|
4
|
+
var execFileAsync = promisify(execFile);
|
|
5
|
+
async function detectDefaultBranch(cwd) {
|
|
6
|
+
const opts = { cwd };
|
|
7
|
+
try {
|
|
8
|
+
const { stdout } = await execFileAsync(
|
|
9
|
+
"git",
|
|
10
|
+
["symbolic-ref", "refs/remotes/origin/HEAD"],
|
|
11
|
+
opts
|
|
12
|
+
);
|
|
13
|
+
const ref = stdout.trim();
|
|
14
|
+
const branch = ref.replace(/^refs\/remotes\/origin\//, "");
|
|
15
|
+
if (branch && branch !== ref) return branch;
|
|
16
|
+
} catch {
|
|
17
|
+
}
|
|
18
|
+
for (const candidate of ["main", "master"]) {
|
|
19
|
+
try {
|
|
20
|
+
await execFileAsync("git", ["rev-parse", "--verify", `refs/heads/${candidate}`], opts);
|
|
21
|
+
return candidate;
|
|
22
|
+
} catch {
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return "main";
|
|
26
|
+
}
|
|
27
|
+
async function isGitRepo(cwd) {
|
|
28
|
+
try {
|
|
29
|
+
await execFileAsync("git", ["rev-parse", "--git-dir"], { cwd });
|
|
30
|
+
return true;
|
|
31
|
+
} catch {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export {
|
|
37
|
+
detectDefaultBranch,
|
|
38
|
+
isGitRepo
|
|
39
|
+
};
|
|
40
|
+
//# sourceMappingURL=chunk-EZAJLAUF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/git-utils.ts"],"sourcesContent":["import { execFile } from \"node:child_process\";\nimport { promisify } from \"node:util\";\n\nconst execFileAsync = promisify(execFile);\n\n/**\n * Detect the default branch of a git repo.\n * Tries remote HEAD first, then falls back to common branch names.\n * @param cwd - Directory to run git commands in (defaults to process.cwd())\n */\nexport async function detectDefaultBranch(cwd?: string): Promise<string> {\n const opts = { cwd };\n\n // Try remote HEAD first (works even on a feature branch)\n try {\n const { stdout } = await execFileAsync(\n \"git\",\n [\"symbolic-ref\", \"refs/remotes/origin/HEAD\"],\n opts,\n );\n const ref = stdout.trim();\n const branch = ref.replace(/^refs\\/remotes\\/origin\\//, \"\");\n if (branch && branch !== ref) return branch;\n } catch {\n // origin/HEAD may not be set — fall through\n }\n\n // Fallback: check if common default branch names exist locally\n for (const candidate of [\"main\", \"master\"]) {\n try {\n await execFileAsync(\"git\", [\"rev-parse\", \"--verify\", `refs/heads/${candidate}`], opts);\n return candidate;\n } catch {\n // branch doesn't exist — try next\n }\n }\n\n return \"main\";\n}\n\n/**\n * Check if the current directory is inside a git repository.\n */\nexport async function isGitRepo(cwd?: string): Promise<boolean> {\n try {\n await execFileAsync(\"git\", [\"rev-parse\", \"--git-dir\"], { cwd });\n return true;\n } catch {\n return false;\n }\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,iBAAiB;AAE1B,IAAM,gBAAgB,UAAU,QAAQ;AAOxC,eAAsB,oBAAoB,KAA+B;AACvE,QAAM,OAAO,EAAE,IAAI;AAGnB,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,CAAC,gBAAgB,0BAA0B;AAAA,MAC3C;AAAA,IACF;AACA,UAAM,MAAM,OAAO,KAAK;AACxB,UAAM,SAAS,IAAI,QAAQ,4BAA4B,EAAE;AACzD,QAAI,UAAU,WAAW,IAAK,QAAO;AAAA,EACvC,QAAQ;AAAA,EAER;AAGA,aAAW,aAAa,CAAC,QAAQ,QAAQ,GAAG;AAC1C,QAAI;AACF,YAAM,cAAc,OAAO,CAAC,aAAa,YAAY,cAAc,SAAS,EAAE,GAAG,IAAI;AACrF,aAAO;AAAA,IACT,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,UAAU,KAAgC;AAC9D,MAAI;AACF,UAAM,cAAc,OAAO,CAAC,aAAa,WAAW,GAAG,EAAE,IAAI,CAAC;AAC9D,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
// src/resolve.ts
|
|
2
|
+
import { createRequire } from "module";
|
|
3
|
+
import path from "path";
|
|
4
|
+
var require2 = createRequire(import.meta.url);
|
|
5
|
+
function resolvePackageDir(pkg) {
|
|
6
|
+
const pkgPath = require2.resolve(`${pkg}/package.json`);
|
|
7
|
+
return path.dirname(pkgPath);
|
|
8
|
+
}
|
|
9
|
+
function resolveAgentsDir() {
|
|
10
|
+
return path.join(resolvePackageDir("@neotx/agents"), "agents");
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export {
|
|
14
|
+
resolveAgentsDir
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=chunk-TNJOG54I.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/resolve.ts"],"sourcesContent":["import { createRequire } from \"node:module\";\nimport path from \"node:path\";\n\nconst require = createRequire(import.meta.url);\n\nfunction resolvePackageDir(pkg: string): string {\n const pkgPath = require.resolve(`${pkg}/package.json`);\n return path.dirname(pkgPath);\n}\n\nexport function resolveAgentsDir(): string {\n return path.join(resolvePackageDir(\"@neotx/agents\"), \"agents\");\n}\n"],"mappings":";AAAA,SAAS,qBAAqB;AAC9B,OAAO,UAAU;AAEjB,IAAMA,WAAU,cAAc,YAAY,GAAG;AAE7C,SAAS,kBAAkB,KAAqB;AAC9C,QAAM,UAAUA,SAAQ,QAAQ,GAAG,GAAG,eAAe;AACrD,SAAO,KAAK,QAAQ,OAAO;AAC7B;AAEO,SAAS,mBAA2B;AACzC,SAAO,KAAK,KAAK,kBAAkB,eAAe,GAAG,QAAQ;AAC/D;","names":["require"]}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
// src/output.ts
|
|
2
|
+
var NO_COLOR = !!process.env.NO_COLOR;
|
|
3
|
+
function ansi(code, text) {
|
|
4
|
+
return NO_COLOR ? text : `\x1B[${code}m${text}\x1B[0m`;
|
|
5
|
+
}
|
|
6
|
+
function printJson(data) {
|
|
7
|
+
console.log(JSON.stringify(data, null, 2));
|
|
8
|
+
}
|
|
9
|
+
function printTable(headers, rows) {
|
|
10
|
+
const widths = headers.map((h, i) => Math.max(h.length, ...rows.map((r) => (r[i] ?? "").length)));
|
|
11
|
+
const header = headers.map((h, i) => h.padEnd(widths[i] ?? 0)).join(" ");
|
|
12
|
+
const separator = widths.map((w) => "\u2500".repeat(w)).join("\u2500\u2500");
|
|
13
|
+
console.log(ansi("1", header));
|
|
14
|
+
console.log(separator);
|
|
15
|
+
for (const row of rows) {
|
|
16
|
+
console.log(row.map((cell, i) => cell.padEnd(widths[i] ?? 0)).join(" "));
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
function printSuccess(msg) {
|
|
20
|
+
console.log(`${ansi("32", "\u2713")} ${msg}`);
|
|
21
|
+
}
|
|
22
|
+
function printError(msg) {
|
|
23
|
+
process.stderr.write(`${ansi("31", "\u2717")} ${msg}
|
|
24
|
+
`);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export {
|
|
28
|
+
printJson,
|
|
29
|
+
printTable,
|
|
30
|
+
printSuccess,
|
|
31
|
+
printError
|
|
32
|
+
};
|
|
33
|
+
//# sourceMappingURL=chunk-YQIWMDXL.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/output.ts"],"sourcesContent":["const NO_COLOR = !!process.env.NO_COLOR;\n\nfunction ansi(code: string, text: string): string {\n return NO_COLOR ? text : `\\x1b[${code}m${text}\\x1b[0m`;\n}\n\nexport function printJson(data: unknown): void {\n console.log(JSON.stringify(data, null, 2));\n}\n\nexport function printTable(headers: string[], rows: string[][]): void {\n const widths = headers.map((h, i) => Math.max(h.length, ...rows.map((r) => (r[i] ?? \"\").length)));\n const header = headers.map((h, i) => h.padEnd(widths[i] ?? 0)).join(\" \");\n const separator = widths.map((w) => \"─\".repeat(w)).join(\"──\");\n\n console.log(ansi(\"1\", header));\n console.log(separator);\n for (const row of rows) {\n console.log(row.map((cell, i) => cell.padEnd(widths[i] ?? 0)).join(\" \"));\n }\n}\n\nexport function printSuccess(msg: string): void {\n console.log(`${ansi(\"32\", \"✓\")} ${msg}`);\n}\n\nexport function printError(msg: string): void {\n process.stderr.write(`${ansi(\"31\", \"✗\")} ${msg}\\n`);\n}\n"],"mappings":";AAAA,IAAM,WAAW,CAAC,CAAC,QAAQ,IAAI;AAE/B,SAAS,KAAK,MAAc,MAAsB;AAChD,SAAO,WAAW,OAAO,QAAQ,IAAI,IAAI,IAAI;AAC/C;AAEO,SAAS,UAAU,MAAqB;AAC7C,UAAQ,IAAI,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAC3C;AAEO,SAAS,WAAW,SAAmB,MAAwB;AACpE,QAAM,SAAS,QAAQ,IAAI,CAAC,GAAG,MAAM,KAAK,IAAI,EAAE,QAAQ,GAAG,KAAK,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,IAAI,MAAM,CAAC,CAAC;AAChG,QAAM,SAAS,QAAQ,IAAI,CAAC,GAAG,MAAM,EAAE,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI;AACxE,QAAM,YAAY,OAAO,IAAI,CAAC,MAAM,SAAI,OAAO,CAAC,CAAC,EAAE,KAAK,cAAI;AAE5D,UAAQ,IAAI,KAAK,KAAK,MAAM,CAAC;AAC7B,UAAQ,IAAI,SAAS;AACrB,aAAW,OAAO,MAAM;AACtB,YAAQ,IAAI,IAAI,IAAI,CAAC,MAAM,MAAM,KAAK,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,EAC1E;AACF;AAEO,SAAS,aAAa,KAAmB;AAC9C,UAAQ,IAAI,GAAG,KAAK,MAAM,QAAG,CAAC,IAAI,GAAG,EAAE;AACzC;AAEO,SAAS,WAAW,KAAmB;AAC5C,UAAQ,OAAO,MAAM,GAAG,KAAK,MAAM,QAAG,CAAC,IAAI,GAAG;AAAA,CAAI;AACpD;","names":[]}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import {
|
|
2
|
+
resolveRepoFilter
|
|
3
|
+
} from "./chunk-CP54H7WA.js";
|
|
4
|
+
import {
|
|
5
|
+
printError,
|
|
6
|
+
printJson,
|
|
7
|
+
printTable
|
|
8
|
+
} from "./chunk-YQIWMDXL.js";
|
|
9
|
+
|
|
10
|
+
// src/commands/cost.ts
|
|
11
|
+
import { existsSync } from "fs";
|
|
12
|
+
import { readdir, readFile } from "fs/promises";
|
|
13
|
+
import path from "path";
|
|
14
|
+
import { getJournalsDir, toRepoSlug } from "@neotx/core";
|
|
15
|
+
import { defineCommand } from "citty";
|
|
16
|
+
async function readCostEntries(journalDir) {
|
|
17
|
+
if (!existsSync(journalDir)) return [];
|
|
18
|
+
const files = await readdir(journalDir);
|
|
19
|
+
const costFiles = files.filter((f) => f.startsWith("cost-")).sort().reverse();
|
|
20
|
+
const entries = [];
|
|
21
|
+
for (const file of costFiles) {
|
|
22
|
+
const content = await readFile(path.join(journalDir, file), "utf-8");
|
|
23
|
+
for (const line of content.trim().split("\n")) {
|
|
24
|
+
if (!line.trim()) continue;
|
|
25
|
+
entries.push(JSON.parse(line));
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return entries;
|
|
29
|
+
}
|
|
30
|
+
function isToday(timestamp) {
|
|
31
|
+
const d = new Date(timestamp);
|
|
32
|
+
const now = /* @__PURE__ */ new Date();
|
|
33
|
+
return d.getUTCFullYear() === now.getUTCFullYear() && d.getUTCMonth() === now.getUTCMonth() && d.getUTCDate() === now.getUTCDate();
|
|
34
|
+
}
|
|
35
|
+
var cost_default = defineCommand({
|
|
36
|
+
meta: {
|
|
37
|
+
name: "cost",
|
|
38
|
+
description: "Show cost breakdown from journals (today, by agent, by run)"
|
|
39
|
+
},
|
|
40
|
+
args: {
|
|
41
|
+
all: {
|
|
42
|
+
type: "boolean",
|
|
43
|
+
description: "Show costs from all repos",
|
|
44
|
+
default: false
|
|
45
|
+
},
|
|
46
|
+
repo: {
|
|
47
|
+
type: "string",
|
|
48
|
+
description: "Filter by repo name or path"
|
|
49
|
+
},
|
|
50
|
+
short: {
|
|
51
|
+
type: "boolean",
|
|
52
|
+
description: "Compact output for supervisor agents (saves tokens)",
|
|
53
|
+
default: false
|
|
54
|
+
},
|
|
55
|
+
output: {
|
|
56
|
+
type: "string",
|
|
57
|
+
description: "Output format: json"
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
async run({ args }) {
|
|
61
|
+
const jsonOutput = args.output === "json";
|
|
62
|
+
const journalDir = getJournalsDir();
|
|
63
|
+
let entries = await readCostEntries(journalDir);
|
|
64
|
+
if (entries.length === 0) {
|
|
65
|
+
printError("No cost data found.");
|
|
66
|
+
process.exitCode = 1;
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
const filter = await resolveRepoFilter({ all: args.all, repo: args.repo });
|
|
70
|
+
if (filter.mode !== "all") {
|
|
71
|
+
const slug = filter.repoSlug;
|
|
72
|
+
entries = entries.filter((e) => {
|
|
73
|
+
if (!e.repo) return false;
|
|
74
|
+
return toRepoSlug({ path: e.repo }) === slug;
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
const todayEntries = entries.filter((e) => isToday(e.timestamp));
|
|
78
|
+
const todayTotal = todayEntries.reduce((sum, e) => sum + e.costUsd, 0);
|
|
79
|
+
const allTimeTotal = entries.reduce((sum, e) => sum + e.costUsd, 0);
|
|
80
|
+
const byAgent = /* @__PURE__ */ new Map();
|
|
81
|
+
for (const e of todayEntries) {
|
|
82
|
+
const prev = byAgent.get(e.agent) ?? { cost: 0, runs: 0 };
|
|
83
|
+
byAgent.set(e.agent, { cost: prev.cost + e.costUsd, runs: prev.runs + 1 });
|
|
84
|
+
}
|
|
85
|
+
const byRepo = /* @__PURE__ */ new Map();
|
|
86
|
+
if (filter.mode === "all") {
|
|
87
|
+
for (const e of todayEntries) {
|
|
88
|
+
const repo = e.repo ?? "unknown";
|
|
89
|
+
const prev = byRepo.get(repo) ?? { cost: 0, runs: 0 };
|
|
90
|
+
byRepo.set(repo, { cost: prev.cost + e.costUsd, runs: prev.runs + 1 });
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (jsonOutput) {
|
|
94
|
+
printJson({
|
|
95
|
+
today: {
|
|
96
|
+
total: todayTotal,
|
|
97
|
+
sessions: todayEntries.length,
|
|
98
|
+
byAgent: Object.fromEntries(byAgent),
|
|
99
|
+
...byRepo.size > 0 ? { byRepo: Object.fromEntries(byRepo) } : {}
|
|
100
|
+
},
|
|
101
|
+
allTime: {
|
|
102
|
+
total: allTimeTotal,
|
|
103
|
+
sessions: entries.length
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
if (args.short) {
|
|
109
|
+
const agents = [...byAgent.entries()].map(([name, data]) => `${name}=$${data.cost.toFixed(4)}`).join(" ");
|
|
110
|
+
console.log(`today=$${todayTotal.toFixed(4)} sessions=${todayEntries.length} ${agents}`);
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
console.log(`Today: $${todayTotal.toFixed(4)} (${todayEntries.length} sessions)`);
|
|
114
|
+
console.log(`All time: $${allTimeTotal.toFixed(4)} (${entries.length} sessions)`);
|
|
115
|
+
if (byAgent.size > 0) {
|
|
116
|
+
console.log("");
|
|
117
|
+
printTable(
|
|
118
|
+
["AGENT", "COST TODAY", "SESSIONS"],
|
|
119
|
+
[...byAgent.entries()].sort((a, b) => b[1].cost - a[1].cost).map(([name, data]) => [name, `$${data.cost.toFixed(4)}`, String(data.runs)])
|
|
120
|
+
);
|
|
121
|
+
}
|
|
122
|
+
if (byRepo.size > 0) {
|
|
123
|
+
console.log("");
|
|
124
|
+
printTable(
|
|
125
|
+
["REPO", "COST TODAY", "SESSIONS"],
|
|
126
|
+
[...byRepo.entries()].sort((a, b) => b[1].cost - a[1].cost).map(([repo, data]) => [repo, `$${data.cost.toFixed(4)}`, String(data.runs)])
|
|
127
|
+
);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
export {
|
|
132
|
+
cost_default as default
|
|
133
|
+
};
|
|
134
|
+
//# sourceMappingURL=cost-DNGKT4UC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/cost.ts"],"sourcesContent":["import { existsSync } from \"node:fs\";\nimport { readdir, readFile } from \"node:fs/promises\";\nimport path from \"node:path\";\nimport type { CostEntry } from \"@neotx/core\";\nimport { getJournalsDir, toRepoSlug } from \"@neotx/core\";\nimport { defineCommand } from \"citty\";\nimport { printError, printJson, printTable } from \"../output.js\";\nimport { resolveRepoFilter } from \"../repo-filter.js\";\n\nasync function readCostEntries(journalDir: string): Promise<CostEntry[]> {\n if (!existsSync(journalDir)) return [];\n const files = await readdir(journalDir);\n const costFiles = files\n .filter((f) => f.startsWith(\"cost-\"))\n .sort()\n .reverse();\n const entries: CostEntry[] = [];\n\n for (const file of costFiles) {\n const content = await readFile(path.join(journalDir, file), \"utf-8\");\n for (const line of content.trim().split(\"\\n\")) {\n if (!line.trim()) continue;\n entries.push(JSON.parse(line) as CostEntry);\n }\n }\n\n return entries;\n}\n\nfunction isToday(timestamp: string): boolean {\n const d = new Date(timestamp);\n const now = new Date();\n return (\n d.getUTCFullYear() === now.getUTCFullYear() &&\n d.getUTCMonth() === now.getUTCMonth() &&\n d.getUTCDate() === now.getUTCDate()\n );\n}\n\nexport default defineCommand({\n meta: {\n name: \"cost\",\n description: \"Show cost breakdown from journals (today, by agent, by run)\",\n },\n args: {\n all: {\n type: \"boolean\",\n description: \"Show costs from all repos\",\n default: false,\n },\n repo: {\n type: \"string\",\n description: \"Filter by repo name or path\",\n },\n short: {\n type: \"boolean\",\n description: \"Compact output for supervisor agents (saves tokens)\",\n default: false,\n },\n output: {\n type: \"string\",\n description: \"Output format: json\",\n },\n },\n async run({ args }) {\n const jsonOutput = args.output === \"json\";\n const journalDir = getJournalsDir();\n let entries = await readCostEntries(journalDir);\n\n if (entries.length === 0) {\n printError(\"No cost data found.\");\n process.exitCode = 1;\n return;\n }\n\n // Filter by repo unless --all\n const filter = await resolveRepoFilter({ all: args.all, repo: args.repo });\n if (filter.mode !== \"all\") {\n const slug = filter.repoSlug;\n entries = entries.filter((e) => {\n if (!e.repo) return false;\n return toRepoSlug({ path: e.repo }) === slug;\n });\n }\n\n const todayEntries = entries.filter((e) => isToday(e.timestamp));\n const todayTotal = todayEntries.reduce((sum, e) => sum + e.costUsd, 0);\n const allTimeTotal = entries.reduce((sum, e) => sum + e.costUsd, 0);\n\n // Breakdown by agent (today)\n const byAgent = new Map<string, { cost: number; runs: number }>();\n for (const e of todayEntries) {\n const prev = byAgent.get(e.agent) ?? { cost: 0, runs: 0 };\n byAgent.set(e.agent, { cost: prev.cost + e.costUsd, runs: prev.runs + 1 });\n }\n\n // Breakdown by repo (today, only in --all mode)\n const byRepo = new Map<string, { cost: number; runs: number }>();\n if (filter.mode === \"all\") {\n for (const e of todayEntries) {\n const repo = e.repo ?? \"unknown\";\n const prev = byRepo.get(repo) ?? { cost: 0, runs: 0 };\n byRepo.set(repo, { cost: prev.cost + e.costUsd, runs: prev.runs + 1 });\n }\n }\n\n if (jsonOutput) {\n printJson({\n today: {\n total: todayTotal,\n sessions: todayEntries.length,\n byAgent: Object.fromEntries(byAgent),\n ...(byRepo.size > 0 ? { byRepo: Object.fromEntries(byRepo) } : {}),\n },\n allTime: {\n total: allTimeTotal,\n sessions: entries.length,\n },\n });\n return;\n }\n\n if (args.short) {\n const agents = [...byAgent.entries()]\n .map(([name, data]) => `${name}=$${data.cost.toFixed(4)}`)\n .join(\" \");\n console.log(`today=$${todayTotal.toFixed(4)} sessions=${todayEntries.length} ${agents}`);\n return;\n }\n\n console.log(`Today: $${todayTotal.toFixed(4)} (${todayEntries.length} sessions)`);\n console.log(`All time: $${allTimeTotal.toFixed(4)} (${entries.length} sessions)`);\n\n if (byAgent.size > 0) {\n console.log(\"\");\n printTable(\n [\"AGENT\", \"COST TODAY\", \"SESSIONS\"],\n [...byAgent.entries()]\n .sort((a, b) => b[1].cost - a[1].cost)\n .map(([name, data]) => [name, `$${data.cost.toFixed(4)}`, String(data.runs)]),\n );\n }\n\n if (byRepo.size > 0) {\n console.log(\"\");\n printTable(\n [\"REPO\", \"COST TODAY\", \"SESSIONS\"],\n [...byRepo.entries()]\n .sort((a, b) => b[1].cost - a[1].cost)\n .map(([repo, data]) => [repo, `$${data.cost.toFixed(4)}`, String(data.runs)]),\n );\n }\n },\n});\n"],"mappings":";;;;;;;;;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,SAAS,gBAAgB;AAClC,OAAO,UAAU;AAEjB,SAAS,gBAAgB,kBAAkB;AAC3C,SAAS,qBAAqB;AAI9B,eAAe,gBAAgB,YAA0C;AACvE,MAAI,CAAC,WAAW,UAAU,EAAG,QAAO,CAAC;AACrC,QAAM,QAAQ,MAAM,QAAQ,UAAU;AACtC,QAAM,YAAY,MACf,OAAO,CAAC,MAAM,EAAE,WAAW,OAAO,CAAC,EACnC,KAAK,EACL,QAAQ;AACX,QAAM,UAAuB,CAAC;AAE9B,aAAW,QAAQ,WAAW;AAC5B,UAAM,UAAU,MAAM,SAAS,KAAK,KAAK,YAAY,IAAI,GAAG,OAAO;AACnE,eAAW,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI,GAAG;AAC7C,UAAI,CAAC,KAAK,KAAK,EAAG;AAClB,cAAQ,KAAK,KAAK,MAAM,IAAI,CAAc;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,QAAQ,WAA4B;AAC3C,QAAM,IAAI,IAAI,KAAK,SAAS;AAC5B,QAAM,MAAM,oBAAI,KAAK;AACrB,SACE,EAAE,eAAe,MAAM,IAAI,eAAe,KAC1C,EAAE,YAAY,MAAM,IAAI,YAAY,KACpC,EAAE,WAAW,MAAM,IAAI,WAAW;AAEtC;AAEA,IAAO,eAAQ,cAAc;AAAA,EAC3B,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,aAAa,KAAK,WAAW;AACnC,UAAM,aAAa,eAAe;AAClC,QAAI,UAAU,MAAM,gBAAgB,UAAU;AAE9C,QAAI,QAAQ,WAAW,GAAG;AACxB,iBAAW,qBAAqB;AAChC,cAAQ,WAAW;AACnB;AAAA,IACF;AAGA,UAAM,SAAS,MAAM,kBAAkB,EAAE,KAAK,KAAK,KAAK,MAAM,KAAK,KAAK,CAAC;AACzE,QAAI,OAAO,SAAS,OAAO;AACzB,YAAM,OAAO,OAAO;AACpB,gBAAU,QAAQ,OAAO,CAAC,MAAM;AAC9B,YAAI,CAAC,EAAE,KAAM,QAAO;AACpB,eAAO,WAAW,EAAE,MAAM,EAAE,KAAK,CAAC,MAAM;AAAA,MAC1C,CAAC;AAAA,IACH;AAEA,UAAM,eAAe,QAAQ,OAAO,CAAC,MAAM,QAAQ,EAAE,SAAS,CAAC;AAC/D,UAAM,aAAa,aAAa,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,CAAC;AACrE,UAAM,eAAe,QAAQ,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,CAAC;AAGlE,UAAM,UAAU,oBAAI,IAA4C;AAChE,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAO,QAAQ,IAAI,EAAE,KAAK,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE;AACxD,cAAQ,IAAI,EAAE,OAAO,EAAE,MAAM,KAAK,OAAO,EAAE,SAAS,MAAM,KAAK,OAAO,EAAE,CAAC;AAAA,IAC3E;AAGA,UAAM,SAAS,oBAAI,IAA4C;AAC/D,QAAI,OAAO,SAAS,OAAO;AACzB,iBAAW,KAAK,cAAc;AAC5B,cAAM,OAAO,EAAE,QAAQ;AACvB,cAAM,OAAO,OAAO,IAAI,IAAI,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE;AACpD,eAAO,IAAI,MAAM,EAAE,MAAM,KAAK,OAAO,EAAE,SAAS,MAAM,KAAK,OAAO,EAAE,CAAC;AAAA,MACvE;AAAA,IACF;AAEA,QAAI,YAAY;AACd,gBAAU;AAAA,QACR,OAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU,aAAa;AAAA,UACvB,SAAS,OAAO,YAAY,OAAO;AAAA,UACnC,GAAI,OAAO,OAAO,IAAI,EAAE,QAAQ,OAAO,YAAY,MAAM,EAAE,IAAI,CAAC;AAAA,QAClE;AAAA,QACA,SAAS;AAAA,UACP,OAAO;AAAA,UACP,UAAU,QAAQ;AAAA,QACpB;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAEA,QAAI,KAAK,OAAO;AACd,YAAM,SAAS,CAAC,GAAG,QAAQ,QAAQ,CAAC,EACjC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,GAAG,IAAI,KAAK,KAAK,KAAK,QAAQ,CAAC,CAAC,EAAE,EACxD,KAAK,GAAG;AACX,cAAQ,IAAI,UAAU,WAAW,QAAQ,CAAC,CAAC,aAAa,aAAa,MAAM,IAAI,MAAM,EAAE;AACvF;AAAA,IACF;AAEA,YAAQ,IAAI,cAAc,WAAW,QAAQ,CAAC,CAAC,KAAK,aAAa,MAAM,YAAY;AACnF,YAAQ,IAAI,cAAc,aAAa,QAAQ,CAAC,CAAC,KAAK,QAAQ,MAAM,YAAY;AAEhF,QAAI,QAAQ,OAAO,GAAG;AACpB,cAAQ,IAAI,EAAE;AACd;AAAA,QACE,CAAC,SAAS,cAAc,UAAU;AAAA,QAClC,CAAC,GAAG,QAAQ,QAAQ,CAAC,EAClB,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EACpC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,IAAI,KAAK,KAAK,QAAQ,CAAC,CAAC,IAAI,OAAO,KAAK,IAAI,CAAC,CAAC;AAAA,MAChF;AAAA,IACF;AAEA,QAAI,OAAO,OAAO,GAAG;AACnB,cAAQ,IAAI,EAAE;AACd;AAAA,QACE,CAAC,QAAQ,cAAc,UAAU;AAAA,QACjC,CAAC,GAAG,OAAO,QAAQ,CAAC,EACjB,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EACpC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,IAAI,KAAK,KAAK,QAAQ,CAAC,CAAC,IAAI,OAAO,KAAK,IAAI,CAAC,CAAC;AAAA,MAChF;AAAA,IACF;AAAA,EACF;AACF,CAAC;","names":[]}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
// src/daemon/supervisor-worker.ts
|
|
2
|
+
import { createWriteStream } from "fs";
|
|
3
|
+
import { mkdir } from "fs/promises";
|
|
4
|
+
import { getSupervisorDir, loadGlobalConfig, SupervisorDaemon } from "@neotx/core";
|
|
5
|
+
async function main() {
|
|
6
|
+
const name = process.argv[2];
|
|
7
|
+
if (!name) {
|
|
8
|
+
process.stderr.write("Usage: supervisor-worker.js <name>\n");
|
|
9
|
+
process.exit(1);
|
|
10
|
+
}
|
|
11
|
+
const dir = getSupervisorDir(name);
|
|
12
|
+
await mkdir(dir, { recursive: true });
|
|
13
|
+
const logPath = `${dir}/daemon.log`;
|
|
14
|
+
const logStream = createWriteStream(logPath, { flags: "a" });
|
|
15
|
+
process.stdout.write = logStream.write.bind(logStream);
|
|
16
|
+
process.stderr.write = logStream.write.bind(logStream);
|
|
17
|
+
try {
|
|
18
|
+
const config = await loadGlobalConfig();
|
|
19
|
+
const daemon = new SupervisorDaemon({ name, config });
|
|
20
|
+
await daemon.start();
|
|
21
|
+
} catch (error) {
|
|
22
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
23
|
+
console.error(`[supervisor-worker] Fatal: ${msg}`);
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
main();
|
|
28
|
+
//# sourceMappingURL=supervisor-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/daemon/supervisor-worker.ts"],"sourcesContent":["/**\n * Detached worker process for the supervisor daemon.\n *\n * Launched via child_process.fork() from the supervise command.\n * Runs the SupervisorDaemon which starts the heartbeat loop,\n * webhook server, and event queue.\n *\n * Usage: node supervisor-worker.js <name>\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir } from \"node:fs/promises\";\nimport { getSupervisorDir, loadGlobalConfig, SupervisorDaemon } from \"@neotx/core\";\n\nasync function main(): Promise<void> {\n const name = process.argv[2];\n if (!name) {\n process.stderr.write(\"Usage: supervisor-worker.js <name>\\n\");\n process.exit(1);\n }\n\n // Redirect stdout/stderr to a log file\n const dir = getSupervisorDir(name);\n await mkdir(dir, { recursive: true });\n const logPath = `${dir}/daemon.log`;\n const logStream = createWriteStream(logPath, { flags: \"a\" });\n process.stdout.write = logStream.write.bind(logStream);\n process.stderr.write = logStream.write.bind(logStream);\n\n try {\n const config = await loadGlobalConfig();\n const daemon = new SupervisorDaemon({ name, config });\n await daemon.start();\n } catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n console.error(`[supervisor-worker] Fatal: ${msg}`);\n process.exit(1);\n }\n}\n\nmain();\n"],"mappings":";AAUA,SAAS,yBAAyB;AAClC,SAAS,aAAa;AACtB,SAAS,kBAAkB,kBAAkB,wBAAwB;AAErE,eAAe,OAAsB;AACnC,QAAM,OAAO,QAAQ,KAAK,CAAC;AAC3B,MAAI,CAAC,MAAM;AACT,YAAQ,OAAO,MAAM,sCAAsC;AAC3D,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,MAAM,iBAAiB,IAAI;AACjC,QAAM,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AACpC,QAAM,UAAU,GAAG,GAAG;AACtB,QAAM,YAAY,kBAAkB,SAAS,EAAE,OAAO,IAAI,CAAC;AAC3D,UAAQ,OAAO,QAAQ,UAAU,MAAM,KAAK,SAAS;AACrD,UAAQ,OAAO,QAAQ,UAAU,MAAM,KAAK,SAAS;AAErD,MAAI;AACF,UAAM,SAAS,MAAM,iBAAiB;AACtC,UAAM,SAAS,IAAI,iBAAiB,EAAE,MAAM,OAAO,CAAC;AACpD,UAAM,OAAO,MAAM;AAAA,EACrB,SAAS,OAAO;AACd,UAAM,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACjE,YAAQ,MAAM,8BAA8B,GAAG,EAAE;AACjD,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,KAAK;","names":[]}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
// src/daemon/worker.ts
|
|
2
|
+
import { createWriteStream, existsSync } from "fs";
|
|
3
|
+
import { mkdir, readFile, unlink, writeFile } from "fs/promises";
|
|
4
|
+
import path from "path";
|
|
5
|
+
import {
|
|
6
|
+
AgentRegistry,
|
|
7
|
+
getRepoRunsDir,
|
|
8
|
+
getRunDispatchPath,
|
|
9
|
+
getRunLogPath,
|
|
10
|
+
loadGlobalConfig,
|
|
11
|
+
Orchestrator
|
|
12
|
+
} from "@neotx/core";
|
|
13
|
+
async function main() {
|
|
14
|
+
const [runId, repoSlug] = process.argv.slice(2);
|
|
15
|
+
if (!runId || !repoSlug) {
|
|
16
|
+
process.stderr.write("Usage: worker.js <runId> <repoSlug>\n");
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
const logPath = getRunLogPath(repoSlug, runId);
|
|
20
|
+
await mkdir(path.dirname(logPath), { recursive: true });
|
|
21
|
+
const logStream = createWriteStream(logPath, { flags: "a" });
|
|
22
|
+
process.stdout.write = logStream.write.bind(logStream);
|
|
23
|
+
process.stderr.write = logStream.write.bind(logStream);
|
|
24
|
+
const dispatchPath = getRunDispatchPath(repoSlug, runId);
|
|
25
|
+
const runPath = path.join(getRepoRunsDir(repoSlug), `${runId}.json`);
|
|
26
|
+
try {
|
|
27
|
+
const raw = await readFile(dispatchPath, "utf-8");
|
|
28
|
+
const request = JSON.parse(raw);
|
|
29
|
+
await unlink(dispatchPath).catch(() => {
|
|
30
|
+
});
|
|
31
|
+
const config = await loadGlobalConfig();
|
|
32
|
+
const agentRegistry = new AgentRegistry(
|
|
33
|
+
request.bundledAgentsDir,
|
|
34
|
+
request.customAgentsDir && existsSync(request.customAgentsDir) ? request.customAgentsDir : void 0
|
|
35
|
+
);
|
|
36
|
+
await agentRegistry.load();
|
|
37
|
+
const agent = agentRegistry.get(request.agentName);
|
|
38
|
+
if (!agent) {
|
|
39
|
+
throw new Error(`Agent "${request.agentName}" not found`);
|
|
40
|
+
}
|
|
41
|
+
const orchestrator = new Orchestrator(config);
|
|
42
|
+
orchestrator.registerAgent(agent);
|
|
43
|
+
orchestrator.registerWorkflow({
|
|
44
|
+
name: `_run_${request.agentName}`,
|
|
45
|
+
description: `Detached dispatch to ${request.agentName}`,
|
|
46
|
+
steps: {
|
|
47
|
+
run: { agent: request.agentName }
|
|
48
|
+
}
|
|
49
|
+
});
|
|
50
|
+
await updatePersistedRun(runPath, { pid: process.pid });
|
|
51
|
+
const safetyTimeout = setTimeout(() => {
|
|
52
|
+
console.error("[worker] Safety timeout reached, forcing exit");
|
|
53
|
+
process.exit(1);
|
|
54
|
+
}, config.sessions.maxDurationMs + 6e4);
|
|
55
|
+
safetyTimeout.unref();
|
|
56
|
+
await orchestrator.start();
|
|
57
|
+
await updatePersistedRun(runPath, { status: "running", pid: process.pid });
|
|
58
|
+
const result = await orchestrator.dispatch({
|
|
59
|
+
runId,
|
|
60
|
+
workflow: `_run_${request.agentName}`,
|
|
61
|
+
repo: request.repo,
|
|
62
|
+
prompt: request.prompt,
|
|
63
|
+
priority: request.priority ?? "medium",
|
|
64
|
+
metadata: request.metadata
|
|
65
|
+
});
|
|
66
|
+
await orchestrator.shutdown();
|
|
67
|
+
console.log(`[worker] Run ${runId} completed: ${result.status}`);
|
|
68
|
+
console.log(`[worker] Cost: $${result.costUsd.toFixed(4)}`);
|
|
69
|
+
if (result.branch) {
|
|
70
|
+
console.log(`[worker] Branch: ${result.branch}`);
|
|
71
|
+
}
|
|
72
|
+
} catch (error) {
|
|
73
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
74
|
+
console.error(`[worker] Run ${runId} failed: ${errorMsg}`);
|
|
75
|
+
await updatePersistedRun(runPath, {
|
|
76
|
+
status: "failed",
|
|
77
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
78
|
+
}).catch(() => {
|
|
79
|
+
});
|
|
80
|
+
} finally {
|
|
81
|
+
logStream.end();
|
|
82
|
+
process.exit(0);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
async function updatePersistedRun(runPath, updates) {
|
|
86
|
+
try {
|
|
87
|
+
const raw = await readFile(runPath, "utf-8");
|
|
88
|
+
const run = JSON.parse(raw);
|
|
89
|
+
Object.assign(run, updates);
|
|
90
|
+
await writeFile(runPath, JSON.stringify(run, null, 2), "utf-8");
|
|
91
|
+
} catch {
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
main();
|
|
95
|
+
//# sourceMappingURL=worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/daemon/worker.ts"],"sourcesContent":["/**\n * Detached worker process for `neo run -d`.\n *\n * Launched via child_process.fork() from the run command.\n * Reads dispatch parameters from a .dispatch.json file, runs the orchestrator,\n * and persists results. Stdout/stderr are redirected to a log file.\n *\n * Usage: node worker.js <runId> <repoSlug>\n */\n\nimport { createWriteStream, existsSync } from \"node:fs\";\nimport { mkdir, readFile, unlink, writeFile } from \"node:fs/promises\";\nimport path from \"node:path\";\nimport type { PersistedRun } from \"@neotx/core\";\nimport {\n AgentRegistry,\n getRepoRunsDir,\n getRunDispatchPath,\n getRunLogPath,\n loadGlobalConfig,\n Orchestrator,\n} from \"@neotx/core\";\n\ninterface DispatchRequest {\n agentName: string;\n repo: string;\n prompt: string;\n priority?: \"critical\" | \"high\" | \"medium\" | \"low\";\n metadata?: Record<string, unknown>;\n bundledAgentsDir: string;\n customAgentsDir?: string;\n}\n\nasync function main(): Promise<void> {\n const [runId, repoSlug] = process.argv.slice(2);\n if (!runId || !repoSlug) {\n process.stderr.write(\"Usage: worker.js <runId> <repoSlug>\\n\");\n process.exit(1);\n }\n\n // Redirect stdout/stderr to log file\n const logPath = getRunLogPath(repoSlug, runId);\n await mkdir(path.dirname(logPath), { recursive: true });\n const logStream = createWriteStream(logPath, { flags: \"a\" });\n process.stdout.write = logStream.write.bind(logStream);\n process.stderr.write = logStream.write.bind(logStream);\n\n const dispatchPath = getRunDispatchPath(repoSlug, runId);\n const runPath = path.join(getRepoRunsDir(repoSlug), `${runId}.json`);\n\n try {\n // Read dispatch request\n const raw = await readFile(dispatchPath, \"utf-8\");\n const request = JSON.parse(raw) as DispatchRequest;\n\n // Clean up dispatch file\n await unlink(dispatchPath).catch(() => {});\n\n // Load config and agents\n const config = await loadGlobalConfig();\n const agentRegistry = new AgentRegistry(\n request.bundledAgentsDir,\n request.customAgentsDir && existsSync(request.customAgentsDir)\n ? request.customAgentsDir\n : undefined,\n );\n await agentRegistry.load();\n\n const agent = agentRegistry.get(request.agentName);\n if (!agent) {\n throw new Error(`Agent \"${request.agentName}\" not found`);\n }\n\n // Create orchestrator\n const orchestrator = new Orchestrator(config);\n orchestrator.registerAgent(agent);\n orchestrator.registerWorkflow({\n name: `_run_${request.agentName}`,\n description: `Detached dispatch to ${request.agentName}`,\n steps: {\n run: { agent: request.agentName },\n },\n });\n\n // Update persisted run with PID\n await updatePersistedRun(runPath, { pid: process.pid });\n\n // Safety timeout — ensure the process eventually exits\n const safetyTimeout = setTimeout(() => {\n console.error(\"[worker] Safety timeout reached, forcing exit\");\n process.exit(1);\n }, config.sessions.maxDurationMs + 60_000);\n safetyTimeout.unref();\n\n await orchestrator.start();\n\n // Re-assert running status — orchestrator.start() calls recoverOrphanedRuns()\n // which marks any \"running\" persisted runs as \"failed\"\n await updatePersistedRun(runPath, { status: \"running\", pid: process.pid });\n\n const result = await orchestrator.dispatch({\n runId,\n workflow: `_run_${request.agentName}`,\n repo: request.repo,\n prompt: request.prompt,\n priority: request.priority ?? \"medium\",\n metadata: request.metadata,\n });\n\n await orchestrator.shutdown();\n\n console.log(`[worker] Run ${runId} completed: ${result.status}`);\n console.log(`[worker] Cost: $${result.costUsd.toFixed(4)}`);\n if (result.branch) {\n console.log(`[worker] Branch: ${result.branch}`);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n console.error(`[worker] Run ${runId} failed: ${errorMsg}`);\n\n // Update persisted run to failed status\n await updatePersistedRun(runPath, {\n status: \"failed\",\n updatedAt: new Date().toISOString(),\n }).catch(() => {});\n } finally {\n logStream.end();\n process.exit(0);\n }\n}\n\nasync function updatePersistedRun(runPath: string, updates: Partial<PersistedRun>): Promise<void> {\n try {\n const raw = await readFile(runPath, \"utf-8\");\n const run = JSON.parse(raw) as PersistedRun;\n Object.assign(run, updates);\n await writeFile(runPath, JSON.stringify(run, null, 2), \"utf-8\");\n } catch {\n // Non-critical\n }\n}\n\nmain();\n"],"mappings":";AAUA,SAAS,mBAAmB,kBAAkB;AAC9C,SAAS,OAAO,UAAU,QAAQ,iBAAiB;AACnD,OAAO,UAAU;AAEjB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAYP,eAAe,OAAsB;AACnC,QAAM,CAAC,OAAO,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAAC;AAC9C,MAAI,CAAC,SAAS,CAAC,UAAU;AACvB,YAAQ,OAAO,MAAM,uCAAuC;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,UAAU,cAAc,UAAU,KAAK;AAC7C,QAAM,MAAM,KAAK,QAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AACtD,QAAM,YAAY,kBAAkB,SAAS,EAAE,OAAO,IAAI,CAAC;AAC3D,UAAQ,OAAO,QAAQ,UAAU,MAAM,KAAK,SAAS;AACrD,UAAQ,OAAO,QAAQ,UAAU,MAAM,KAAK,SAAS;AAErD,QAAM,eAAe,mBAAmB,UAAU,KAAK;AACvD,QAAM,UAAU,KAAK,KAAK,eAAe,QAAQ,GAAG,GAAG,KAAK,OAAO;AAEnE,MAAI;AAEF,UAAM,MAAM,MAAM,SAAS,cAAc,OAAO;AAChD,UAAM,UAAU,KAAK,MAAM,GAAG;AAG9B,UAAM,OAAO,YAAY,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAGzC,UAAM,SAAS,MAAM,iBAAiB;AACtC,UAAM,gBAAgB,IAAI;AAAA,MACxB,QAAQ;AAAA,MACR,QAAQ,mBAAmB,WAAW,QAAQ,eAAe,IACzD,QAAQ,kBACR;AAAA,IACN;AACA,UAAM,cAAc,KAAK;AAEzB,UAAM,QAAQ,cAAc,IAAI,QAAQ,SAAS;AACjD,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,UAAU,QAAQ,SAAS,aAAa;AAAA,IAC1D;AAGA,UAAM,eAAe,IAAI,aAAa,MAAM;AAC5C,iBAAa,cAAc,KAAK;AAChC,iBAAa,iBAAiB;AAAA,MAC5B,MAAM,QAAQ,QAAQ,SAAS;AAAA,MAC/B,aAAa,wBAAwB,QAAQ,SAAS;AAAA,MACtD,OAAO;AAAA,QACL,KAAK,EAAE,OAAO,QAAQ,UAAU;AAAA,MAClC;AAAA,IACF,CAAC;AAGD,UAAM,mBAAmB,SAAS,EAAE,KAAK,QAAQ,IAAI,CAAC;AAGtD,UAAM,gBAAgB,WAAW,MAAM;AACrC,cAAQ,MAAM,+CAA+C;AAC7D,cAAQ,KAAK,CAAC;AAAA,IAChB,GAAG,OAAO,SAAS,gBAAgB,GAAM;AACzC,kBAAc,MAAM;AAEpB,UAAM,aAAa,MAAM;AAIzB,UAAM,mBAAmB,SAAS,EAAE,QAAQ,WAAW,KAAK,QAAQ,IAAI,CAAC;AAEzE,UAAM,SAAS,MAAM,aAAa,SAAS;AAAA,MACzC;AAAA,MACA,UAAU,QAAQ,QAAQ,SAAS;AAAA,MACnC,MAAM,QAAQ;AAAA,MACd,QAAQ,QAAQ;AAAA,MAChB,UAAU,QAAQ,YAAY;AAAA,MAC9B,UAAU,QAAQ;AAAA,IACpB,CAAC;AAED,UAAM,aAAa,SAAS;AAE5B,YAAQ,IAAI,gBAAgB,KAAK,eAAe,OAAO,MAAM,EAAE;AAC/D,YAAQ,IAAI,mBAAmB,OAAO,QAAQ,QAAQ,CAAC,CAAC,EAAE;AAC1D,QAAI,OAAO,QAAQ;AACjB,cAAQ,IAAI,oBAAoB,OAAO,MAAM,EAAE;AAAA,IACjD;AAAA,EACF,SAAS,OAAO;AACd,UAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACtE,YAAQ,MAAM,gBAAgB,KAAK,YAAY,QAAQ,EAAE;AAGzD,UAAM,mBAAmB,SAAS;AAAA,MAChC,QAAQ;AAAA,MACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EACnB,UAAE;AACA,cAAU,IAAI;AACd,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,eAAe,mBAAmB,SAAiB,SAA+C;AAChG,MAAI;AACF,UAAM,MAAM,MAAM,SAAS,SAAS,OAAO;AAC3C,UAAM,MAAM,KAAK,MAAM,GAAG;AAC1B,WAAO,OAAO,KAAK,OAAO;AAC1B,UAAM,UAAU,SAAS,KAAK,UAAU,KAAK,MAAM,CAAC,GAAG,OAAO;AAAA,EAChE,QAAQ;AAAA,EAER;AACF;AAEA,KAAK;","names":[]}
|