@usezombie/zombiectl 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -0
- package/bin/zombiectl.js +11 -0
- package/bun.lock +29 -0
- package/package.json +28 -0
- package/scripts/run-tests.mjs +38 -0
- package/src/cli.js +275 -0
- package/src/commands/admin.js +39 -0
- package/src/commands/agent.js +98 -0
- package/src/commands/agent_harness.js +43 -0
- package/src/commands/agent_improvement_report.js +42 -0
- package/src/commands/agent_profile.js +39 -0
- package/src/commands/agent_proposals.js +158 -0
- package/src/commands/agent_scores.js +44 -0
- package/src/commands/core-ops.js +108 -0
- package/src/commands/core.js +537 -0
- package/src/commands/harness.js +35 -0
- package/src/commands/harness_activate.js +53 -0
- package/src/commands/harness_active.js +32 -0
- package/src/commands/harness_compile.js +40 -0
- package/src/commands/harness_source.js +72 -0
- package/src/commands/run_preview.js +212 -0
- package/src/commands/run_preview_walk.js +1 -0
- package/src/commands/runs.js +35 -0
- package/src/commands/spec_init.js +287 -0
- package/src/commands/workspace_billing.js +26 -0
- package/src/constants/error-codes.js +1 -0
- package/src/lib/agent-loop.js +106 -0
- package/src/lib/analytics.js +114 -0
- package/src/lib/api-paths.js +2 -0
- package/src/lib/browser.js +96 -0
- package/src/lib/http.js +149 -0
- package/src/lib/sse-parser.js +50 -0
- package/src/lib/state.js +67 -0
- package/src/lib/tool-executors.js +110 -0
- package/src/lib/walk-dir.js +41 -0
- package/src/program/args.js +95 -0
- package/src/program/auth-guard.js +12 -0
- package/src/program/auth-token.js +44 -0
- package/src/program/banner.js +46 -0
- package/src/program/command-registry.js +17 -0
- package/src/program/http-client.js +38 -0
- package/src/program/io.js +83 -0
- package/src/program/routes.js +20 -0
- package/src/program/suggest.js +76 -0
- package/src/program/validate.js +24 -0
- package/src/ui-progress.js +59 -0
- package/src/ui-theme.js +62 -0
- package/test/admin_config.unit.test.js +25 -0
- package/test/agent-loop.unit.test.js +497 -0
- package/test/agent_harness.unit.test.js +52 -0
- package/test/agent_improvement_report.unit.test.js +74 -0
- package/test/agent_profile.unit.test.js +156 -0
- package/test/agent_proposals.unit.test.js +167 -0
- package/test/agent_scores.unit.test.js +220 -0
- package/test/analytics.unit.test.js +41 -0
- package/test/args.unit.test.js +69 -0
- package/test/auth-guard.test.js +33 -0
- package/test/auth-token.unit.test.js +112 -0
- package/test/banner.unit.test.js +442 -0
- package/test/browser.unit.test.js +16 -0
- package/test/cli-analytics.unit.test.js +296 -0
- package/test/did-you-mean.integration.test.js +76 -0
- package/test/doctor-json.test.js +81 -0
- package/test/error-codes.unit.test.js +7 -0
- package/test/harness-command.unit.test.js +180 -0
- package/test/harness-compile.test.js +81 -0
- package/test/harness-lifecycle.integration.test.js +339 -0
- package/test/harness-source-put.test.js +72 -0
- package/test/harness_activate.unit.test.js +48 -0
- package/test/harness_active.unit.test.js +53 -0
- package/test/harness_compile.unit.test.js +54 -0
- package/test/harness_source.unit.test.js +59 -0
- package/test/help.test.js +276 -0
- package/test/helpers-fs.js +32 -0
- package/test/helpers.js +31 -0
- package/test/io.unit.test.js +57 -0
- package/test/login.unit.test.js +115 -0
- package/test/logout.unit.test.js +65 -0
- package/test/parse.test.js +16 -0
- package/test/run-preview.edge.test.js +422 -0
- package/test/run-preview.integration.test.js +135 -0
- package/test/run-preview.security.test.js +246 -0
- package/test/run-preview.unit.test.js +131 -0
- package/test/run.unit.test.js +149 -0
- package/test/runs-cancel.unit.test.js +288 -0
- package/test/runs-list.unit.test.js +105 -0
- package/test/skill-secret.unit.test.js +94 -0
- package/test/spec-init.edge.test.js +232 -0
- package/test/spec-init.integration.test.js +128 -0
- package/test/spec-init.security.test.js +285 -0
- package/test/spec-init.unit.test.js +160 -0
- package/test/specs-sync.unit.test.js +164 -0
- package/test/sse-parser.unit.test.js +54 -0
- package/test/state.unit.test.js +34 -0
- package/test/streamfetch.unit.test.js +211 -0
- package/test/suggest.test.js +75 -0
- package/test/tool-executors.unit.test.js +165 -0
- package/test/validate.test.js +81 -0
- package/test/workspace-add.test.js +106 -0
- package/test/workspace.unit.test.js +230 -0
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { streamFetch, authHeaders } from "./http.js";
|
|
2
|
+
import { executeTool } from "./tool-executors.js";
|
|
3
|
+
|
|
4
|
+
const MAX_TOOL_CALLS = 10;
|
|
5
|
+
const MAX_WALL_MS = 30000;
|
|
6
|
+
|
|
7
|
+
const TOOL_DEFINITIONS = [
|
|
8
|
+
{
|
|
9
|
+
name: "read_file",
|
|
10
|
+
description: "Read a file from the user's repo",
|
|
11
|
+
input_schema: { type: "object", properties: { path: { type: "string" } }, required: ["path"] },
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
name: "list_dir",
|
|
15
|
+
description: "List directory contents",
|
|
16
|
+
input_schema: { type: "object", properties: { path: { type: "string" } }, required: ["path"] },
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
name: "glob",
|
|
20
|
+
description: "Find files matching a glob pattern",
|
|
21
|
+
input_schema: { type: "object", properties: { pattern: { type: "string" } }, required: ["pattern"] },
|
|
22
|
+
},
|
|
23
|
+
];
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Run the agent tool-call loop.
|
|
27
|
+
* POST messages + tools → receive SSE → execute tool_use locally → POST again → repeat.
|
|
28
|
+
*
|
|
29
|
+
* @param {string} endpoint - API path (e.g., "/v1/workspaces/{id}/spec/template")
|
|
30
|
+
* @param {string} userMessage - Initial user message
|
|
31
|
+
* @param {string} repoRoot - Absolute path to repo root
|
|
32
|
+
* @param {object} ctx - CLI context (apiUrl, token, etc.)
|
|
33
|
+
* @param {object} callbacks - { onToolCall, onText, onDone, onError }
|
|
34
|
+
* @returns {Promise<{text: string, usage: object|null, toolCalls: number, wallMs: number}>}
|
|
35
|
+
*/
|
|
36
|
+
export async function agentLoop(endpoint, userMessage, repoRoot, ctx, callbacks = {}) {
|
|
37
|
+
const baseUrl = ctx.apiUrl;
|
|
38
|
+
const headers = authHeaders({ token: ctx.token, apiKey: ctx.apiKey });
|
|
39
|
+
const url = `${baseUrl}${endpoint}`;
|
|
40
|
+
|
|
41
|
+
let messages = [{ role: "user", content: userMessage }];
|
|
42
|
+
let toolCalls = 0;
|
|
43
|
+
let accumulatedText = "";
|
|
44
|
+
let lastUsage = null;
|
|
45
|
+
const startTime = Date.now();
|
|
46
|
+
|
|
47
|
+
while (toolCalls < MAX_TOOL_CALLS && (Date.now() - startTime) < MAX_WALL_MS) {
|
|
48
|
+
const payload = { messages, tools: TOOL_DEFINITIONS };
|
|
49
|
+
let pendingToolUses = [];
|
|
50
|
+
let gotDone = false;
|
|
51
|
+
|
|
52
|
+
await streamFetch(url, payload, headers, (event) => {
|
|
53
|
+
switch (event.type) {
|
|
54
|
+
case "tool_use":
|
|
55
|
+
pendingToolUses.push(event.data);
|
|
56
|
+
break;
|
|
57
|
+
case "text_delta":
|
|
58
|
+
if (event.data?.text) {
|
|
59
|
+
accumulatedText += event.data.text;
|
|
60
|
+
callbacks.onText?.(event.data.text);
|
|
61
|
+
}
|
|
62
|
+
break;
|
|
63
|
+
case "done":
|
|
64
|
+
lastUsage = event.data?.usage ?? null;
|
|
65
|
+
gotDone = true;
|
|
66
|
+
callbacks.onDone?.(event.data);
|
|
67
|
+
break;
|
|
68
|
+
case "error":
|
|
69
|
+
callbacks.onError?.(event.data?.message ?? "unknown error");
|
|
70
|
+
break;
|
|
71
|
+
}
|
|
72
|
+
}, { fetchImpl: ctx.fetchImpl, timeoutMs: MAX_WALL_MS - (Date.now() - startTime) });
|
|
73
|
+
|
|
74
|
+
// If no tool calls, we're done
|
|
75
|
+
if (pendingToolUses.length === 0 || gotDone) break;
|
|
76
|
+
|
|
77
|
+
// Execute tool calls locally and build next message batch
|
|
78
|
+
for (const tc of pendingToolUses) {
|
|
79
|
+
toolCalls++;
|
|
80
|
+
callbacks.onToolCall?.(tc);
|
|
81
|
+
const result = executeTool(tc.name, tc.input, repoRoot);
|
|
82
|
+
|
|
83
|
+
// Append assistant tool_use + user tool_result to message history
|
|
84
|
+
messages.push({
|
|
85
|
+
role: "assistant",
|
|
86
|
+
content: JSON.stringify([{ type: "tool_use", id: tc.id, name: tc.name, input: tc.input }]),
|
|
87
|
+
});
|
|
88
|
+
messages.push({
|
|
89
|
+
role: "user",
|
|
90
|
+
content: JSON.stringify([{ type: "tool_result", tool_use_id: tc.id, content: result }]),
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (toolCalls >= MAX_TOOL_CALLS) {
|
|
95
|
+
callbacks.onError?.(`max tool calls reached (${MAX_TOOL_CALLS})`);
|
|
96
|
+
break;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const wallMs = Date.now() - startTime;
|
|
101
|
+
if (wallMs >= MAX_WALL_MS && !accumulatedText) {
|
|
102
|
+
callbacks.onError?.("wall time exceeded (30s)");
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return { text: accumulatedText, usage: lastUsage, toolCalls, wallMs };
|
|
106
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
const DEFAULT_POSTHOG_HOST = "https://us.i.posthog.com";
|
|
2
|
+
const DEFAULT_POSTHOG_KEY = [
|
|
3
|
+
"phc_XmuRIXBST",
|
|
4
|
+
"Rfxka7IgfkU0V",
|
|
5
|
+
"PMD3LDRR3IqIL",
|
|
6
|
+
"XNg3bXzv",
|
|
7
|
+
].join("");
|
|
8
|
+
|
|
9
|
+
function boolFromEnv(value, fallback) {
|
|
10
|
+
if (value == null || value === "") return fallback;
|
|
11
|
+
const normalized = String(value).trim().toLowerCase();
|
|
12
|
+
return !(normalized === "0" || normalized === "false" || normalized === "off" || normalized === "no");
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function resolveConfig(env = process.env) {
|
|
16
|
+
const key = env.ZOMBIE_POSTHOG_KEY || DEFAULT_POSTHOG_KEY;
|
|
17
|
+
const host = env.ZOMBIE_POSTHOG_HOST || DEFAULT_POSTHOG_HOST;
|
|
18
|
+
const enabled = boolFromEnv(env.ZOMBIE_POSTHOG_ENABLED, key.length > 0);
|
|
19
|
+
return { key, host, enabled };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function sanitizeProperties(properties = {}) {
|
|
23
|
+
const out = {};
|
|
24
|
+
for (const [key, value] of Object.entries(properties)) {
|
|
25
|
+
if (value === undefined || value === null) continue;
|
|
26
|
+
out[key] = String(value);
|
|
27
|
+
}
|
|
28
|
+
return out;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export async function createCliAnalytics(env = process.env) {
|
|
32
|
+
const cfg = resolveConfig(env);
|
|
33
|
+
if (!cfg.enabled || !cfg.key) return null;
|
|
34
|
+
|
|
35
|
+
try {
|
|
36
|
+
const loaded = await import("posthog-node");
|
|
37
|
+
const PostHogCtor = loaded.PostHog || loaded.default;
|
|
38
|
+
const client = new PostHogCtor(cfg.key, {
|
|
39
|
+
host: cfg.host,
|
|
40
|
+
flushAt: 1,
|
|
41
|
+
flushInterval: 0,
|
|
42
|
+
});
|
|
43
|
+
return client;
|
|
44
|
+
} catch {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export function trackCliEvent(client, distinctId, event, properties = {}) {
|
|
50
|
+
if (!client) return;
|
|
51
|
+
try {
|
|
52
|
+
client.capture({
|
|
53
|
+
distinctId: distinctId || "anonymous",
|
|
54
|
+
event,
|
|
55
|
+
properties: sanitizeProperties(properties),
|
|
56
|
+
});
|
|
57
|
+
} catch {
|
|
58
|
+
// Telemetry must never block or break CLI UX.
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export function setCliAnalyticsContext(ctx, properties = {}) {
|
|
63
|
+
if (!ctx) return;
|
|
64
|
+
const current = ctx.analyticsContext || {};
|
|
65
|
+
ctx.analyticsContext = {
|
|
66
|
+
...current,
|
|
67
|
+
...sanitizeProperties(properties),
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function getCliAnalyticsContext(ctx) {
|
|
72
|
+
return ctx?.analyticsContext ? { ...ctx.analyticsContext } : {};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export function queueCliAnalyticsEvent(ctx, event, properties = {}) {
|
|
76
|
+
if (!ctx) return;
|
|
77
|
+
if (!Array.isArray(ctx.analyticsEvents)) ctx.analyticsEvents = [];
|
|
78
|
+
ctx.analyticsEvents.push({
|
|
79
|
+
event,
|
|
80
|
+
properties: sanitizeProperties(properties),
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export function drainCliAnalyticsEvents(ctx) {
|
|
85
|
+
if (!ctx || !Array.isArray(ctx.analyticsEvents) || ctx.analyticsEvents.length === 0) return [];
|
|
86
|
+
const events = ctx.analyticsEvents.slice();
|
|
87
|
+
ctx.analyticsEvents = [];
|
|
88
|
+
return events;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export async function shutdownCliAnalytics(client) {
|
|
92
|
+
if (!client) return;
|
|
93
|
+
try {
|
|
94
|
+
await client.shutdown();
|
|
95
|
+
} catch {
|
|
96
|
+
// ignore shutdown failures
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export const cliAnalyticsInternals = {
|
|
101
|
+
DEFAULT_POSTHOG_KEY,
|
|
102
|
+
drainCliAnalyticsEvents,
|
|
103
|
+
getCliAnalyticsContext,
|
|
104
|
+
queueCliAnalyticsEvent,
|
|
105
|
+
resolveConfig,
|
|
106
|
+
sanitizeProperties,
|
|
107
|
+
setCliAnalyticsContext,
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
export const cliAnalytics = {
|
|
111
|
+
createCliAnalytics,
|
|
112
|
+
trackCliEvent,
|
|
113
|
+
shutdownCliAnalytics,
|
|
114
|
+
};
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
|
|
3
|
+
function browserDisabled(env) {
|
|
4
|
+
const raw = env.BROWSER;
|
|
5
|
+
if (raw == null) return false;
|
|
6
|
+
const normalized = String(raw).trim().toLowerCase();
|
|
7
|
+
return normalized === "false" || normalized === "0" || normalized === "off" || normalized === "none";
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
function hasDisplay(env) {
|
|
11
|
+
return Boolean(env.DISPLAY || env.WAYLAND_DISPLAY);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function isSsh(env) {
|
|
15
|
+
return Boolean(env.SSH_CLIENT || env.SSH_TTY || env.SSH_CONNECTION);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function looksLikeWsl(env) {
|
|
19
|
+
const release = `${env.WSL_DISTRO_NAME || ""}${env.WSL_INTEROP || ""}${env.OSTYPE || ""}`.toLowerCase();
|
|
20
|
+
return release.includes("wsl");
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function commandExists(command) {
|
|
24
|
+
return new Promise((resolve) => {
|
|
25
|
+
const probe = spawn("sh", ["-lc", `command -v ${command} >/dev/null 2>&1`], {
|
|
26
|
+
stdio: "ignore",
|
|
27
|
+
});
|
|
28
|
+
probe.on("exit", (code) => resolve(code === 0));
|
|
29
|
+
probe.on("error", () => resolve(false));
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export async function resolveBrowserCommand(env = process.env, platform = process.platform) {
|
|
34
|
+
if (browserDisabled(env)) {
|
|
35
|
+
return { argv: null, reason: "browser-disabled" };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (platform === "win32") {
|
|
39
|
+
return { argv: ["cmd", "/c", "start", ""], quoteUrl: true, command: "cmd" };
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (platform === "darwin") {
|
|
43
|
+
return { argv: ["open"], quoteUrl: false, command: "open" };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (platform === "linux") {
|
|
47
|
+
const wsl = looksLikeWsl(env);
|
|
48
|
+
if (wsl) {
|
|
49
|
+
if (await commandExists("wslview")) {
|
|
50
|
+
return { argv: ["wslview"], quoteUrl: false, command: "wslview" };
|
|
51
|
+
}
|
|
52
|
+
if (!hasDisplay(env)) {
|
|
53
|
+
return { argv: null, reason: "wsl-no-wslview" };
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (!hasDisplay(env)) {
|
|
58
|
+
return { argv: null, reason: isSsh(env) ? "ssh-no-display" : "no-display" };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (await commandExists("xdg-open")) {
|
|
62
|
+
return { argv: ["xdg-open"], quoteUrl: false, command: "xdg-open" };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return { argv: null, reason: "missing-xdg-open" };
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return { argv: null, reason: "unsupported-platform" };
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export async function openUrl(url, opts = {}) {
|
|
72
|
+
const env = opts.env || process.env;
|
|
73
|
+
const platform = opts.platform || process.platform;
|
|
74
|
+
|
|
75
|
+
const resolved = await resolveBrowserCommand(env, platform);
|
|
76
|
+
if (!resolved.argv) return false;
|
|
77
|
+
|
|
78
|
+
return new Promise((resolve) => {
|
|
79
|
+
const argv = [...resolved.argv];
|
|
80
|
+
if (resolved.quoteUrl) {
|
|
81
|
+
argv.push(`"${url}"`);
|
|
82
|
+
} else {
|
|
83
|
+
argv.push(url);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const child = spawn(argv[0], argv.slice(1), {
|
|
87
|
+
detached: true,
|
|
88
|
+
stdio: "ignore",
|
|
89
|
+
windowsVerbatimArguments: resolved.quoteUrl === true,
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
child.on("error", () => resolve(false));
|
|
93
|
+
child.unref();
|
|
94
|
+
resolve(true);
|
|
95
|
+
});
|
|
96
|
+
}
|
package/src/lib/http.js
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
const DEFAULT_TIMEOUT_MS = 15000;
|
|
2
|
+
|
|
3
|
+
export class ApiError extends Error {
|
|
4
|
+
constructor(message, details = {}) {
|
|
5
|
+
super(message);
|
|
6
|
+
this.name = "ApiError";
|
|
7
|
+
this.status = details.status;
|
|
8
|
+
this.code = details.code;
|
|
9
|
+
this.requestId = details.requestId;
|
|
10
|
+
this.body = details.body;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function apiRequest(url, options = {}) {
|
|
15
|
+
const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
16
|
+
const fetchImpl = options.fetchImpl || globalThis.fetch;
|
|
17
|
+
if (typeof fetchImpl !== "function") {
|
|
18
|
+
throw new ApiError("fetch is unavailable", { code: "NO_FETCH" });
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const ctrl = new AbortController();
|
|
22
|
+
const timer = setTimeout(() => ctrl.abort(), timeoutMs);
|
|
23
|
+
|
|
24
|
+
try {
|
|
25
|
+
const res = await fetchImpl(url, {
|
|
26
|
+
method: options.method || "GET",
|
|
27
|
+
headers: options.headers || {},
|
|
28
|
+
body: options.body,
|
|
29
|
+
signal: ctrl.signal,
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
const text = await res.text();
|
|
33
|
+
let json = null;
|
|
34
|
+
if (text.length > 0) {
|
|
35
|
+
try {
|
|
36
|
+
json = JSON.parse(text);
|
|
37
|
+
} catch {
|
|
38
|
+
json = null;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (!res.ok) {
|
|
43
|
+
const errorCode = json?.error?.code || `HTTP_${res.status}`;
|
|
44
|
+
const requestId = json?.error?.request_id ?? json?.request_id ?? null;
|
|
45
|
+
const message = json?.error?.message || res.statusText || "request failed";
|
|
46
|
+
throw new ApiError(message, {
|
|
47
|
+
status: res.status,
|
|
48
|
+
code: errorCode,
|
|
49
|
+
requestId,
|
|
50
|
+
body: json ?? text,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return json ?? {};
|
|
55
|
+
} catch (err) {
|
|
56
|
+
if (err.name === "AbortError") {
|
|
57
|
+
throw new ApiError(`request timed out after ${timeoutMs}ms`, {
|
|
58
|
+
status: 408,
|
|
59
|
+
code: "TIMEOUT",
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
throw err;
|
|
63
|
+
} finally {
|
|
64
|
+
clearTimeout(timer);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* POST with SSE streaming response. Calls onEvent for each parsed SSE event.
|
|
70
|
+
* Returns when the stream ends or an error occurs.
|
|
71
|
+
*/
|
|
72
|
+
export async function streamFetch(url, payload, headers, onEvent, options = {}) {
|
|
73
|
+
const timeoutMs = options.timeoutMs ?? 30000;
|
|
74
|
+
const fetchImpl = options.fetchImpl || globalThis.fetch;
|
|
75
|
+
const ctrl = new AbortController();
|
|
76
|
+
const timer = setTimeout(() => ctrl.abort(), timeoutMs);
|
|
77
|
+
|
|
78
|
+
try {
|
|
79
|
+
const res = await fetchImpl(url, {
|
|
80
|
+
method: "POST",
|
|
81
|
+
headers: { ...headers, "Content-Type": "application/json", "Accept": "text/event-stream" },
|
|
82
|
+
body: JSON.stringify(payload),
|
|
83
|
+
signal: ctrl.signal,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
if (!res.ok) {
|
|
87
|
+
const text = await res.text();
|
|
88
|
+
let json = null;
|
|
89
|
+
try { json = JSON.parse(text); } catch { /* ignore */ }
|
|
90
|
+
const errorCode = json?.error?.code || `HTTP_${res.status}`;
|
|
91
|
+
const message = json?.error?.message || res.statusText || "request failed";
|
|
92
|
+
throw new ApiError(message, { status: res.status, code: errorCode, body: json ?? text });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const reader = res.body.getReader();
|
|
96
|
+
const decoder = new TextDecoder();
|
|
97
|
+
let buf = "";
|
|
98
|
+
|
|
99
|
+
while (true) {
|
|
100
|
+
const { done, value } = await reader.read();
|
|
101
|
+
if (done) break;
|
|
102
|
+
buf += decoder.decode(value, { stream: true });
|
|
103
|
+
|
|
104
|
+
let boundary;
|
|
105
|
+
while ((boundary = buf.indexOf("\n\n")) !== -1) {
|
|
106
|
+
const frame = buf.slice(0, boundary);
|
|
107
|
+
buf = buf.slice(boundary + 2);
|
|
108
|
+
const event = parseSseFrame(frame);
|
|
109
|
+
if (event) onEvent(event);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
} catch (err) {
|
|
113
|
+
if (err.name === "AbortError") {
|
|
114
|
+
throw new ApiError(`stream timed out after ${timeoutMs}ms`, { status: 408, code: "TIMEOUT" });
|
|
115
|
+
}
|
|
116
|
+
throw err;
|
|
117
|
+
} finally {
|
|
118
|
+
clearTimeout(timer);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function parseSseFrame(frame) {
|
|
123
|
+
let type = "message";
|
|
124
|
+
let data = "";
|
|
125
|
+
for (const line of frame.split("\n")) {
|
|
126
|
+
if (line.startsWith("event: ")) type = line.slice(7);
|
|
127
|
+
else if (line.startsWith("data: ")) data = line.slice(6);
|
|
128
|
+
else if (line.startsWith(":")) continue; // comment/heartbeat
|
|
129
|
+
}
|
|
130
|
+
if (!data) return null;
|
|
131
|
+
try { return { type, data: JSON.parse(data) }; } catch { return { type, data }; }
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
export function authHeaders(auth) {
|
|
135
|
+
const headers = {
|
|
136
|
+
"Content-Type": "application/json",
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
if (auth?.token) {
|
|
140
|
+
headers.Authorization = `Bearer ${auth.token}`;
|
|
141
|
+
return headers;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
if (auth?.apiKey) {
|
|
145
|
+
headers.Authorization = `Bearer ${auth.apiKey}`;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return headers;
|
|
149
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse SSE frames from a text buffer.
|
|
3
|
+
* Returns { events: [...], remainder: string }.
|
|
4
|
+
* Each event has { type, data }.
|
|
5
|
+
*/
|
|
6
|
+
export function parseSseBuffer(buf) {
|
|
7
|
+
const events = [];
|
|
8
|
+
let remainder = buf;
|
|
9
|
+
|
|
10
|
+
while (true) {
|
|
11
|
+
const boundary = remainder.indexOf("\n\n");
|
|
12
|
+
if (boundary === -1) break;
|
|
13
|
+
|
|
14
|
+
const frame = remainder.slice(0, boundary);
|
|
15
|
+
remainder = remainder.slice(boundary + 2);
|
|
16
|
+
|
|
17
|
+
const event = parseSseFrame(frame);
|
|
18
|
+
if (event) events.push(event);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return { events, remainder };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Parse a single SSE frame (text between double newlines).
|
|
26
|
+
* Returns { type, data } or null if not a valid event.
|
|
27
|
+
*/
|
|
28
|
+
function parseSseFrame(frame) {
|
|
29
|
+
let type = "message";
|
|
30
|
+
let data = "";
|
|
31
|
+
|
|
32
|
+
for (const line of frame.split("\n")) {
|
|
33
|
+
if (line.startsWith("event: ")) {
|
|
34
|
+
type = line.slice(7);
|
|
35
|
+
} else if (line.startsWith("data: ")) {
|
|
36
|
+
data = line.slice(6);
|
|
37
|
+
} else if (line.startsWith(":")) {
|
|
38
|
+
// Comment line (heartbeat), skip
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (!data) return null;
|
|
44
|
+
|
|
45
|
+
try {
|
|
46
|
+
return { type, data: JSON.parse(data) };
|
|
47
|
+
} catch {
|
|
48
|
+
return { type, data };
|
|
49
|
+
}
|
|
50
|
+
}
|
package/src/lib/state.js
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { randomBytes } from "node:crypto";
|
|
2
|
+
import fs from "node:fs/promises";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
|
|
6
|
+
function resolveStatePaths() {
|
|
7
|
+
const baseDir = process.env.ZOMBIE_STATE_DIR || path.join(os.homedir(), ".config", "zombiectl");
|
|
8
|
+
return {
|
|
9
|
+
baseDir,
|
|
10
|
+
credentialsPath: path.join(baseDir, "credentials.json"),
|
|
11
|
+
workspacesPath: path.join(baseDir, "workspaces.json"),
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async function ensureBaseDir() {
|
|
16
|
+
const { baseDir } = resolveStatePaths();
|
|
17
|
+
await fs.mkdir(baseDir, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async function readJson(filePath, fallback) {
|
|
21
|
+
try {
|
|
22
|
+
const raw = await fs.readFile(filePath, "utf8");
|
|
23
|
+
return JSON.parse(raw);
|
|
24
|
+
} catch (err) {
|
|
25
|
+
if (err && (err.code === "ENOENT" || err.name === "SyntaxError")) return fallback;
|
|
26
|
+
throw err;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function writeJson(filePath, value) {
|
|
31
|
+
await ensureBaseDir();
|
|
32
|
+
const body = `${JSON.stringify(value, null, 2)}\n`;
|
|
33
|
+
await fs.writeFile(filePath, body, { mode: 0o600 });
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function newIdempotencyKey() {
|
|
37
|
+
return randomBytes(12).toString("hex");
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function loadCredentials() {
|
|
41
|
+
const { credentialsPath } = resolveStatePaths();
|
|
42
|
+
return readJson(credentialsPath, { token: null, saved_at: null, session_id: null, api_url: null });
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export async function saveCredentials(next) {
|
|
46
|
+
const { credentialsPath } = resolveStatePaths();
|
|
47
|
+
await writeJson(credentialsPath, next);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function clearCredentials() {
|
|
51
|
+
const { credentialsPath } = resolveStatePaths();
|
|
52
|
+
await writeJson(credentialsPath, { token: null, saved_at: Date.now(), session_id: null, api_url: null });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export async function loadWorkspaces() {
|
|
56
|
+
const { workspacesPath } = resolveStatePaths();
|
|
57
|
+
return readJson(workspacesPath, { current_workspace_id: null, items: [] });
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export async function saveWorkspaces(next) {
|
|
61
|
+
const { workspacesPath } = resolveStatePaths();
|
|
62
|
+
await writeJson(workspacesPath, next);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export const stateInternals = {
|
|
66
|
+
resolveStatePaths,
|
|
67
|
+
};
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { readFileSync, readdirSync, statSync } from "node:fs";
|
|
2
|
+
import { resolve, sep, relative } from "node:path";
|
|
3
|
+
|
|
4
|
+
const MAX_GLOB_RESULTS = 500;
|
|
5
|
+
const MAX_FILE_SIZE = 256 * 1024; // 256KB
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Validate that a resolved path is within the repo root.
|
|
9
|
+
* Returns { resolved } or { error }.
|
|
10
|
+
*/
|
|
11
|
+
export function validatePath(inputPath, repoRoot) {
|
|
12
|
+
const resolved = resolve(repoRoot, inputPath);
|
|
13
|
+
if (resolved !== repoRoot && !resolved.startsWith(repoRoot + sep)) {
|
|
14
|
+
return { error: "path outside repo root" };
|
|
15
|
+
}
|
|
16
|
+
return { resolved };
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Execute a tool call locally. Returns the result string.
|
|
21
|
+
*/
|
|
22
|
+
export function executeTool(name, input, repoRoot) {
|
|
23
|
+
switch (name) {
|
|
24
|
+
case "read_file":
|
|
25
|
+
return executeReadFile(input.path, repoRoot);
|
|
26
|
+
case "list_dir":
|
|
27
|
+
return executeListDir(input.path, repoRoot);
|
|
28
|
+
case "glob":
|
|
29
|
+
return executeGlob(input.pattern, repoRoot);
|
|
30
|
+
default:
|
|
31
|
+
return `error: unknown tool "${name}"`;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function executeReadFile(path, repoRoot) {
|
|
36
|
+
const v = validatePath(path, repoRoot);
|
|
37
|
+
if (v.error) return `error: ${v.error}`;
|
|
38
|
+
|
|
39
|
+
try {
|
|
40
|
+
const stat = statSync(v.resolved);
|
|
41
|
+
if (stat.isDirectory()) return "error: path is a directory, use list_dir instead";
|
|
42
|
+
if (stat.size > MAX_FILE_SIZE) return `error: file too large (${stat.size} bytes, max ${MAX_FILE_SIZE})`;
|
|
43
|
+
return readFileSync(v.resolved, "utf8");
|
|
44
|
+
} catch (err) {
|
|
45
|
+
if (err.code === "ENOENT") return `error: file not found: ${path}`;
|
|
46
|
+
return `error: ${err.message}`;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function executeListDir(path, repoRoot) {
|
|
51
|
+
const v = validatePath(path || ".", repoRoot);
|
|
52
|
+
if (v.error) return `error: ${v.error}`;
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
const entries = readdirSync(v.resolved, { withFileTypes: true });
|
|
56
|
+
return entries
|
|
57
|
+
.filter((e) => e.name !== ".git")
|
|
58
|
+
.map((e) => (e.isDirectory() ? `${e.name}/` : e.name))
|
|
59
|
+
.sort()
|
|
60
|
+
.join("\n");
|
|
61
|
+
} catch (err) {
|
|
62
|
+
if (err.code === "ENOENT") return `error: directory not found: ${path}`;
|
|
63
|
+
return `error: ${err.message}`;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function micromatch(filePath, pattern) {
|
|
68
|
+
const regex = pattern
|
|
69
|
+
.replace(/\*\*\//g, "\x00/")
|
|
70
|
+
.replace(/\*\*/g, "\x01")
|
|
71
|
+
.replace(/\./g, "\\.")
|
|
72
|
+
.replace(/\*/g, "[^/]*")
|
|
73
|
+
.replace(/\?/g, "[^/]")
|
|
74
|
+
.replace(/\x00\//g, "(?:.*/)?")
|
|
75
|
+
.replace(/\x01/g, ".*");
|
|
76
|
+
return new RegExp(`^${regex}$`).test(filePath);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function walkSync(dir, root, results, limit) {
|
|
80
|
+
let entries;
|
|
81
|
+
try { entries = readdirSync(dir, { withFileTypes: true }); } catch { return; }
|
|
82
|
+
for (const entry of entries) {
|
|
83
|
+
if (results.length >= limit) return;
|
|
84
|
+
if (entry.name === ".git") continue;
|
|
85
|
+
const full = resolve(dir, entry.name);
|
|
86
|
+
if (entry.isDirectory()) {
|
|
87
|
+
walkSync(full, root, results, limit);
|
|
88
|
+
} else {
|
|
89
|
+
results.push(relative(root, full));
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function executeGlob(pattern, repoRoot) {
|
|
95
|
+
try {
|
|
96
|
+
const allFiles = [];
|
|
97
|
+
walkSync(repoRoot, repoRoot, allFiles, MAX_GLOB_RESULTS * 10);
|
|
98
|
+
const results = [];
|
|
99
|
+
for (const f of allFiles) {
|
|
100
|
+
if (micromatch(f, pattern)) {
|
|
101
|
+
results.push(f);
|
|
102
|
+
if (results.length >= MAX_GLOB_RESULTS) break;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
if (results.length === 0) return "(no matches)";
|
|
106
|
+
return results.sort().join("\n");
|
|
107
|
+
} catch (err) {
|
|
108
|
+
return `error: ${err.message}`;
|
|
109
|
+
}
|
|
110
|
+
}
|