@aigne/afs-ash 1.11.0-beta.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +26 -0
- package/dist/agent-run-CXJQ0jPV.mjs +873 -0
- package/dist/agent-run-CXJQ0jPV.mjs.map +1 -0
- package/dist/index.d.mts +179 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +1715 -0
- package/dist/index.mjs.map +1 -0
- package/docs/agent.md +322 -0
- package/docs/cookbook/patterns.md +250 -0
- package/docs/cookbook/quickref.md +111 -0
- package/package.json +61 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,1715 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { mkdir, readFile, readdir, rm, stat, writeFile } from "node:fs/promises";
|
|
3
|
+
import { dirname, join } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { AFSNotFoundError, createProgramAFS, parseProgramManifest } from "@aigne/afs";
|
|
6
|
+
import { AFSBaseProvider, Actions, Delete, Exec, Explain, List, Meta, Read, Search, Stat, Write } from "@aigne/afs/provider";
|
|
7
|
+
import { ASH_REFERENCE, compileSource } from "@aigne/ash";
|
|
8
|
+
import { Cron, CronPattern } from "croner";
|
|
9
|
+
import { joinURL } from "ufo";
|
|
10
|
+
import { parse } from "yaml";
|
|
11
|
+
import { z } from "zod";
|
|
12
|
+
|
|
13
|
+
//#region \0@oxc-project+runtime@0.108.0/helpers/decorate.js
|
|
14
|
+
function __decorate(decorators, target, key, desc) {
|
|
15
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
16
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
17
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
18
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
//#endregion
|
|
22
|
+
//#region src/index.ts
|
|
23
|
+
const DOCS_DIR = join(dirname(fileURLToPath(import.meta.url)), "..", "docs");
|
|
24
|
+
function readDoc(relativePath) {
|
|
25
|
+
return readFileSync(join(DOCS_DIR, relativePath), "utf-8");
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Derive tool declarations for agent-run from a ProgramManifest.
|
|
29
|
+
*
|
|
30
|
+
* Generates tools for:
|
|
31
|
+
* - `/program/**` — readonly (read, list, search, stat)
|
|
32
|
+
* - `/data/**` — readwrite (read, list, write, delete, search, stat)
|
|
33
|
+
* - `/{target}/**` — per mount ops from manifest
|
|
34
|
+
*/
|
|
35
|
+
function deriveToolsFromProgram(manifest) {
|
|
36
|
+
const tools = [{
|
|
37
|
+
path: "/program/**",
|
|
38
|
+
ops: [
|
|
39
|
+
"read",
|
|
40
|
+
"list",
|
|
41
|
+
"search",
|
|
42
|
+
"stat"
|
|
43
|
+
],
|
|
44
|
+
maxDepth: 10
|
|
45
|
+
}, {
|
|
46
|
+
path: "/data/**",
|
|
47
|
+
ops: [
|
|
48
|
+
"read",
|
|
49
|
+
"list",
|
|
50
|
+
"write",
|
|
51
|
+
"delete",
|
|
52
|
+
"search",
|
|
53
|
+
"stat"
|
|
54
|
+
],
|
|
55
|
+
maxDepth: 10
|
|
56
|
+
}];
|
|
57
|
+
for (const mount of manifest.mounts) {
|
|
58
|
+
const ops = mount.ops ?? ["read", "list"];
|
|
59
|
+
if (ops.length === 0) continue;
|
|
60
|
+
tools.push({
|
|
61
|
+
path: `${mount.target}/**`,
|
|
62
|
+
ops: [...ops],
|
|
63
|
+
maxDepth: 5
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
return tools;
|
|
67
|
+
}
|
|
68
|
+
function sanitizeErrorPath(msg) {
|
|
69
|
+
return msg.replace(/'\/[^']*'/g, "'[path]'").replace(/"\/[^"]*"/g, "\"[path]\"");
|
|
70
|
+
}
|
|
71
|
+
function normalizeContent(content) {
|
|
72
|
+
if (content == null) return {};
|
|
73
|
+
if (typeof content === "object" && !Array.isArray(content)) return {
|
|
74
|
+
content,
|
|
75
|
+
...content
|
|
76
|
+
};
|
|
77
|
+
if (typeof content === "string") try {
|
|
78
|
+
const parsed = parse(content);
|
|
79
|
+
if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) return {
|
|
80
|
+
content,
|
|
81
|
+
...parsed
|
|
82
|
+
};
|
|
83
|
+
} catch {}
|
|
84
|
+
return { content };
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* AFS ASH Provider
|
|
88
|
+
*
|
|
89
|
+
* Bridges ASH pipeline DSL to AFS. ASH scripts are stored in-memory and
|
|
90
|
+
* executed against AFS via a world bridge that translates ASH's
|
|
91
|
+
* read/write/publish to AFS read/write operations.
|
|
92
|
+
*
|
|
93
|
+
* Structure:
|
|
94
|
+
* - `/` - root directory
|
|
95
|
+
* - `/scripts` - stored .ash scripts
|
|
96
|
+
* - `/scripts/{name}.ash` - individual script files (executable)
|
|
97
|
+
* - `/.actions/run` - execute inline ASH code
|
|
98
|
+
* - `/.actions/validate` - validate ASH source (compile + type-check)
|
|
99
|
+
*/
|
|
100
|
+
var AFSAsh = class AFSAsh extends AFSBaseProvider {
|
|
101
|
+
static manifest() {
|
|
102
|
+
return {
|
|
103
|
+
name: "ash",
|
|
104
|
+
description: "ASH pipeline DSL for deterministic data pipelines.\n- Store, validate, and execute ASH scripts\n- Commands: find, where, map, save, publish, tee, fanout, output, input, count, group-by, action, route, lookup\n- Read /.meta/.ash-reference for full syntax",
|
|
105
|
+
uriTemplate: "ash://",
|
|
106
|
+
category: "compute",
|
|
107
|
+
schema: z.object({
|
|
108
|
+
name: z.string().optional(),
|
|
109
|
+
description: z.string().optional(),
|
|
110
|
+
allowRootActions: z.array(z.enum([
|
|
111
|
+
"agent-run",
|
|
112
|
+
"mount",
|
|
113
|
+
"unmount",
|
|
114
|
+
"read",
|
|
115
|
+
"list",
|
|
116
|
+
"write",
|
|
117
|
+
"delete"
|
|
118
|
+
])).optional()
|
|
119
|
+
}),
|
|
120
|
+
tags: [
|
|
121
|
+
"ash",
|
|
122
|
+
"pipeline",
|
|
123
|
+
"etl",
|
|
124
|
+
"dsl"
|
|
125
|
+
],
|
|
126
|
+
capabilityTags: [
|
|
127
|
+
"read-write",
|
|
128
|
+
"search",
|
|
129
|
+
"auth:none",
|
|
130
|
+
"local"
|
|
131
|
+
],
|
|
132
|
+
security: {
|
|
133
|
+
riskLevel: "system",
|
|
134
|
+
resourceAccess: ["process-spawn"],
|
|
135
|
+
dataSensitivity: ["code"],
|
|
136
|
+
notes: ["Executes ASH pipeline scripts that can read/write data across mounted providers"]
|
|
137
|
+
},
|
|
138
|
+
capabilities: { crossProvider: { afsAccess: true } }
|
|
139
|
+
};
|
|
140
|
+
}
|
|
141
|
+
static treeSchema() {
|
|
142
|
+
return {
|
|
143
|
+
operations: [
|
|
144
|
+
"list",
|
|
145
|
+
"read",
|
|
146
|
+
"write",
|
|
147
|
+
"delete",
|
|
148
|
+
"exec",
|
|
149
|
+
"search",
|
|
150
|
+
"stat",
|
|
151
|
+
"explain"
|
|
152
|
+
],
|
|
153
|
+
tree: {
|
|
154
|
+
"/": {
|
|
155
|
+
kind: "ash:root",
|
|
156
|
+
operations: [
|
|
157
|
+
"list",
|
|
158
|
+
"read",
|
|
159
|
+
"exec"
|
|
160
|
+
],
|
|
161
|
+
actions: ["run", "validate"]
|
|
162
|
+
},
|
|
163
|
+
"/scripts": {
|
|
164
|
+
kind: "ash:directory",
|
|
165
|
+
operations: ["list", "read"]
|
|
166
|
+
},
|
|
167
|
+
"/scripts/{name}.ash": {
|
|
168
|
+
kind: "ash:script",
|
|
169
|
+
operations: [
|
|
170
|
+
"read",
|
|
171
|
+
"write",
|
|
172
|
+
"delete",
|
|
173
|
+
"exec"
|
|
174
|
+
],
|
|
175
|
+
actions: ["exec"]
|
|
176
|
+
}
|
|
177
|
+
},
|
|
178
|
+
auth: { type: "none" },
|
|
179
|
+
bestFor: [
|
|
180
|
+
"data pipelines",
|
|
181
|
+
"ETL scripts",
|
|
182
|
+
"deterministic DSL"
|
|
183
|
+
],
|
|
184
|
+
notFor: ["general-purpose code"]
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
static async load({ config } = {}) {
|
|
188
|
+
return new AFSAsh(config);
|
|
189
|
+
}
|
|
190
|
+
name;
|
|
191
|
+
description;
|
|
192
|
+
accessMode = "readwrite";
|
|
193
|
+
scripts = /* @__PURE__ */ new Map();
|
|
194
|
+
afsRoot;
|
|
195
|
+
scriptsDir;
|
|
196
|
+
dataDir;
|
|
197
|
+
allowRootActions;
|
|
198
|
+
triggerSubs = /* @__PURE__ */ new Map();
|
|
199
|
+
cronJobs = /* @__PURE__ */ new Map();
|
|
200
|
+
externalScripts = /* @__PURE__ */ new Map();
|
|
201
|
+
externalScriptSubs = [];
|
|
202
|
+
dirsReady = false;
|
|
203
|
+
constructor(options = {}) {
|
|
204
|
+
super();
|
|
205
|
+
this.name = options.name ?? "ash";
|
|
206
|
+
this.description = options.description ?? "ASH pipeline DSL for deterministic data pipelines";
|
|
207
|
+
this.scriptsDir = options.scriptsDir;
|
|
208
|
+
this.dataDir = options.dataDir;
|
|
209
|
+
const rawAllow = options.allowRootActions;
|
|
210
|
+
const allowList = Array.isArray(rawAllow) ? rawAllow : typeof rawAllow === "string" ? [rawAllow] : ["agent-run"];
|
|
211
|
+
const validActions = new Set([
|
|
212
|
+
"agent-run",
|
|
213
|
+
"mount",
|
|
214
|
+
"unmount",
|
|
215
|
+
"read",
|
|
216
|
+
"list",
|
|
217
|
+
"write",
|
|
218
|
+
"delete"
|
|
219
|
+
]);
|
|
220
|
+
const normalized = allowList.filter((a) => validActions.has(a));
|
|
221
|
+
this.allowRootActions = new Set(normalized.length > 0 ? normalized : ["agent-run"]);
|
|
222
|
+
}
|
|
223
|
+
async ready() {
|
|
224
|
+
if (this.dirsReady) return;
|
|
225
|
+
if (this.scriptsDir) await mkdir(this.scriptsDir, { recursive: true });
|
|
226
|
+
if (this.dataDir) await mkdir(this.dataDir, { recursive: true });
|
|
227
|
+
this.dirsReady = true;
|
|
228
|
+
}
|
|
229
|
+
async ensureDirs() {
|
|
230
|
+
if (!this.dirsReady) await this.ready();
|
|
231
|
+
}
|
|
232
|
+
async getScript(name) {
|
|
233
|
+
if (this.scriptsDir) {
|
|
234
|
+
const filePath = join(this.scriptsDir, `${name}.ash`);
|
|
235
|
+
try {
|
|
236
|
+
return {
|
|
237
|
+
source: await readFile(filePath, "utf-8"),
|
|
238
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
239
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
240
|
+
};
|
|
241
|
+
} catch {
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
return this.scripts.get(name);
|
|
246
|
+
}
|
|
247
|
+
async setScript(name, script) {
|
|
248
|
+
if (this.scriptsDir) {
|
|
249
|
+
await this.ensureDirs();
|
|
250
|
+
await writeFile(join(this.scriptsDir, `${name}.ash`), script.source, "utf-8");
|
|
251
|
+
}
|
|
252
|
+
this.scripts.set(name, script);
|
|
253
|
+
}
|
|
254
|
+
async hasScript(name) {
|
|
255
|
+
if (this.scriptsDir) try {
|
|
256
|
+
await stat(join(this.scriptsDir, `${name}.ash`));
|
|
257
|
+
return true;
|
|
258
|
+
} catch {
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
return this.scripts.has(name);
|
|
262
|
+
}
|
|
263
|
+
async removeScript(name) {
|
|
264
|
+
if (this.scriptsDir) try {
|
|
265
|
+
await rm(join(this.scriptsDir, `${name}.ash`));
|
|
266
|
+
} catch {}
|
|
267
|
+
this.scripts.delete(name);
|
|
268
|
+
}
|
|
269
|
+
async getAllScriptNames() {
|
|
270
|
+
if (this.scriptsDir) try {
|
|
271
|
+
return (await readdir(this.scriptsDir)).filter((f) => f.endsWith(".ash")).map((f) => f.replace(/\.ash$/, ""));
|
|
272
|
+
} catch {
|
|
273
|
+
return [];
|
|
274
|
+
}
|
|
275
|
+
return Array.from(this.scripts.keys());
|
|
276
|
+
}
|
|
277
|
+
async getScriptCount() {
|
|
278
|
+
return (await this.getAllScriptNames()).length;
|
|
279
|
+
}
|
|
280
|
+
/** Sanitize an ASH path to a flat filename: /data/clean → data%2Fclean.json */
|
|
281
|
+
dataFileName(path) {
|
|
282
|
+
return `${path.replace(/^\/+/, "").replace(/\//g, "%2F")}.json`;
|
|
283
|
+
}
|
|
284
|
+
/** Persist save output to dataDir */
|
|
285
|
+
async persistData(path, data) {
|
|
286
|
+
if (!this.dataDir) return;
|
|
287
|
+
await this.ensureDirs();
|
|
288
|
+
await writeFile(join(this.dataDir, this.dataFileName(path)), JSON.stringify(data), "utf-8");
|
|
289
|
+
}
|
|
290
|
+
/** Load all persisted data files into a dataStore */
|
|
291
|
+
async loadPersistedData() {
|
|
292
|
+
const store = {};
|
|
293
|
+
if (!this.dataDir) return store;
|
|
294
|
+
try {
|
|
295
|
+
const files = await readdir(this.dataDir);
|
|
296
|
+
for (const file of files) {
|
|
297
|
+
if (!file.endsWith(".json")) continue;
|
|
298
|
+
const key = `/${file.replace(/\.json$/, "").replace(/%2F/gi, "/")}`;
|
|
299
|
+
try {
|
|
300
|
+
store[key] = JSON.parse(await readFile(join(this.dataDir, file), "utf-8"));
|
|
301
|
+
} catch {}
|
|
302
|
+
}
|
|
303
|
+
} catch {}
|
|
304
|
+
return store;
|
|
305
|
+
}
|
|
306
|
+
onMount(afs) {
|
|
307
|
+
this.destroy();
|
|
308
|
+
this.afsRoot = afs;
|
|
309
|
+
this.initTriggers().catch(() => {});
|
|
310
|
+
if (afs.subscribe) this.externalScriptSubs = [afs.subscribe({ type: "script:registered" }, (evt) => {
|
|
311
|
+
if (evt.data?.runtime !== "ash") return;
|
|
312
|
+
this.registerExternalScript(evt.path).catch(() => {});
|
|
313
|
+
}), afs.subscribe({ type: "script:unregistered" }, (evt) => {
|
|
314
|
+
if (evt.data?.runtime !== "ash") return;
|
|
315
|
+
this.unregisterExternalScript(evt.path);
|
|
316
|
+
})];
|
|
317
|
+
}
|
|
318
|
+
async registerExternalScript(path) {
|
|
319
|
+
const afs = this.afsRoot;
|
|
320
|
+
if (!afs?.read) return;
|
|
321
|
+
try {
|
|
322
|
+
const file = await afs.read(path, {});
|
|
323
|
+
const source = String(file.data?.content ?? "");
|
|
324
|
+
if (!source.trim()) return;
|
|
325
|
+
this.externalScripts.set(path, source);
|
|
326
|
+
this.registerTriggers(path, source);
|
|
327
|
+
} catch {}
|
|
328
|
+
}
|
|
329
|
+
unregisterExternalScript(path) {
|
|
330
|
+
this.externalScripts.delete(path);
|
|
331
|
+
this.unregisterTriggers(path);
|
|
332
|
+
}
|
|
333
|
+
/** Stop all cron jobs and unsubscribe all event triggers. Call on provider teardown. */
|
|
334
|
+
destroy() {
|
|
335
|
+
const allNames = new Set([...this.triggerSubs.keys(), ...this.cronJobs.keys()]);
|
|
336
|
+
for (const name of allNames) this.unregisterTriggers(name);
|
|
337
|
+
for (const unsub of this.externalScriptSubs) unsub();
|
|
338
|
+
this.externalScriptSubs = [];
|
|
339
|
+
this.externalScripts.clear();
|
|
340
|
+
}
|
|
341
|
+
async initTriggers() {
|
|
342
|
+
await this.ensureDirs();
|
|
343
|
+
for (const name of await this.getAllScriptNames()) {
|
|
344
|
+
const script = await this.getScript(name);
|
|
345
|
+
if (script) this.registerTriggers(name, script.source);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
/**
|
|
349
|
+
* Register EventBus subscriptions and cron schedules for all triggers in a script.
|
|
350
|
+
* Replaces any previous subscriptions/schedules for the same script name.
|
|
351
|
+
*/
|
|
352
|
+
registerTriggers(scriptName, source) {
|
|
353
|
+
this.unregisterTriggers(scriptName);
|
|
354
|
+
const result = compileSource(source);
|
|
355
|
+
if (!result.program) return;
|
|
356
|
+
const afs = this.afsRoot;
|
|
357
|
+
const unsubs = [];
|
|
358
|
+
const crons = [];
|
|
359
|
+
for (const unit of result.program.units) {
|
|
360
|
+
if (unit.kind !== "job" || !unit.trigger) continue;
|
|
361
|
+
const jobName = unit.name;
|
|
362
|
+
const trigger = unit.trigger;
|
|
363
|
+
if (trigger.kind === "event") {
|
|
364
|
+
if (!afs?.subscribe) continue;
|
|
365
|
+
const filter = { path: trigger.path };
|
|
366
|
+
const unsub = afs.subscribe(filter, (evt) => {
|
|
367
|
+
this.executeTriggerJob(scriptName, jobName, evt);
|
|
368
|
+
});
|
|
369
|
+
unsubs.push(unsub);
|
|
370
|
+
} else if (trigger.kind === "cron") try {
|
|
371
|
+
new CronPattern(trigger.expression);
|
|
372
|
+
const cron = new Cron(trigger.expression, { protect: true }, () => {
|
|
373
|
+
const syntheticEvent = {
|
|
374
|
+
type: "cron",
|
|
375
|
+
path: `/cron/${scriptName}/${jobName}`,
|
|
376
|
+
source: "ash-cron",
|
|
377
|
+
timestamp: Date.now()
|
|
378
|
+
};
|
|
379
|
+
this.executeTriggerJob(scriptName, jobName, syntheticEvent);
|
|
380
|
+
});
|
|
381
|
+
crons.push(cron);
|
|
382
|
+
} catch (err) {
|
|
383
|
+
console.error(`[ASH cron] Invalid cron expression "${trigger.expression}" in ${scriptName}/${jobName}:`, err);
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
if (unsubs.length > 0) this.triggerSubs.set(scriptName, unsubs);
|
|
387
|
+
if (crons.length > 0) this.cronJobs.set(scriptName, crons);
|
|
388
|
+
}
|
|
389
|
+
/** Unsubscribe all event triggers and stop all cron jobs for a given script. */
|
|
390
|
+
unregisterTriggers(scriptName) {
|
|
391
|
+
const unsubs = this.triggerSubs.get(scriptName);
|
|
392
|
+
if (unsubs) {
|
|
393
|
+
for (const unsub of unsubs) unsub();
|
|
394
|
+
this.triggerSubs.delete(scriptName);
|
|
395
|
+
}
|
|
396
|
+
const crons = this.cronJobs.get(scriptName);
|
|
397
|
+
if (crons) {
|
|
398
|
+
for (const cron of crons) cron.stop();
|
|
399
|
+
this.cronJobs.delete(scriptName);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* Execute a single triggered job reactively in response to an AFS event.
|
|
404
|
+
* Re-compiles the script, builds a bridged world context, executes the
|
|
405
|
+
* target job with event data as the initial stream, and writes results
|
|
406
|
+
* back to AFS.
|
|
407
|
+
*/
|
|
408
|
+
async executeTriggerJob(scriptName, jobName, event) {
|
|
409
|
+
try {
|
|
410
|
+
let source;
|
|
411
|
+
const script = await this.getScript(scriptName);
|
|
412
|
+
if (script) source = script.source;
|
|
413
|
+
else if (this.externalScripts.has(scriptName)) source = this.externalScripts.get(scriptName);
|
|
414
|
+
else if (this.afsRoot?.read) try {
|
|
415
|
+
const file = await this.afsRoot.read(scriptName, {});
|
|
416
|
+
source = String(file.data?.content ?? "");
|
|
417
|
+
} catch {}
|
|
418
|
+
if (!source) return;
|
|
419
|
+
const result = compileSource(source);
|
|
420
|
+
if (!result.program) return;
|
|
421
|
+
const job = result.program.units.find((u) => u.kind === "job" && u.name === jobName);
|
|
422
|
+
if (!job || job.kind !== "job") return;
|
|
423
|
+
const { world, written } = await this.createAsyncWorld();
|
|
424
|
+
const ctx = {
|
|
425
|
+
world: this.afsRoot ? {
|
|
426
|
+
read: async (path) => {
|
|
427
|
+
const local = await world.read(path);
|
|
428
|
+
if (local.length > 0) return local;
|
|
429
|
+
if (this.afsRoot?.list) {
|
|
430
|
+
const listResult = await this.afsRoot.list(path, {});
|
|
431
|
+
if (listResult.data.length > 0) {
|
|
432
|
+
const needsEnrich = listResult.data.some((e) => e.content == null);
|
|
433
|
+
let entries = listResult.data;
|
|
434
|
+
if (needsEnrich && this.afsRoot.read) {
|
|
435
|
+
const afsRead = this.afsRoot.read.bind(this.afsRoot);
|
|
436
|
+
entries = await Promise.all(listResult.data.map(async (e) => {
|
|
437
|
+
if (e.content != null) return e;
|
|
438
|
+
try {
|
|
439
|
+
const r = await afsRead(e.path, {});
|
|
440
|
+
return r.data ? {
|
|
441
|
+
...e,
|
|
442
|
+
content: r.data.content
|
|
443
|
+
} : e;
|
|
444
|
+
} catch {
|
|
445
|
+
return e;
|
|
446
|
+
}
|
|
447
|
+
}));
|
|
448
|
+
}
|
|
449
|
+
return entries.map((entry) => ({
|
|
450
|
+
path: entry.path,
|
|
451
|
+
name: entry.path.split("/").filter(Boolean).pop(),
|
|
452
|
+
...entry.meta,
|
|
453
|
+
...normalizeContent(entry.content)
|
|
454
|
+
}));
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
if (this.afsRoot?.read) try {
|
|
458
|
+
const readResult = await this.afsRoot.read(path, {});
|
|
459
|
+
if (readResult.data) {
|
|
460
|
+
const entry = readResult.data;
|
|
461
|
+
return [{
|
|
462
|
+
path: entry.path ?? path,
|
|
463
|
+
name: (entry.path ?? path).split("/").filter(Boolean).pop(),
|
|
464
|
+
...entry.meta,
|
|
465
|
+
...normalizeContent(entry.content)
|
|
466
|
+
}];
|
|
467
|
+
}
|
|
468
|
+
} catch {}
|
|
469
|
+
return [];
|
|
470
|
+
},
|
|
471
|
+
write: (path, data) => {
|
|
472
|
+
world.write(path, data);
|
|
473
|
+
},
|
|
474
|
+
publish: (topic, data) => {
|
|
475
|
+
world.publish(topic, data);
|
|
476
|
+
},
|
|
477
|
+
exec: world.exec
|
|
478
|
+
} : world,
|
|
479
|
+
caps: new Set(["*"]),
|
|
480
|
+
logger: { log() {} },
|
|
481
|
+
output: { output() {} }
|
|
482
|
+
};
|
|
483
|
+
const initialStream = [{ ...event }];
|
|
484
|
+
const jobResult = await job.execute(ctx, initialStream);
|
|
485
|
+
if (jobResult.status !== "ok") console.error(`[ASH trigger] ${scriptName}/${jobName} status=${jobResult.status}`, jobResult.errors);
|
|
486
|
+
const afs = this.afsRoot;
|
|
487
|
+
if (afs?.write) for (const [path, data] of Object.entries(written)) {
|
|
488
|
+
const content = JSON.stringify(data);
|
|
489
|
+
await afs.write(path, { content });
|
|
490
|
+
}
|
|
491
|
+
} catch (err) {
|
|
492
|
+
console.error(`[ASH trigger] ${scriptName}/${jobName} threw:`, err);
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
async listRoot(_ctx) {
|
|
496
|
+
return { data: [{
|
|
497
|
+
id: "/scripts",
|
|
498
|
+
path: "/scripts",
|
|
499
|
+
summary: "Stored ASH scripts",
|
|
500
|
+
meta: {
|
|
501
|
+
kind: "ash:directory",
|
|
502
|
+
type: "directory",
|
|
503
|
+
childrenCount: await this.getScriptCount()
|
|
504
|
+
}
|
|
505
|
+
}, {
|
|
506
|
+
id: "/cookbook",
|
|
507
|
+
path: "/cookbook",
|
|
508
|
+
summary: "ASH best practices and quick reference",
|
|
509
|
+
meta: {
|
|
510
|
+
kind: "ash:directory",
|
|
511
|
+
type: "directory",
|
|
512
|
+
childrenCount: 2
|
|
513
|
+
}
|
|
514
|
+
}] };
|
|
515
|
+
}
|
|
516
|
+
async listScripts(_ctx) {
|
|
517
|
+
const entries = [];
|
|
518
|
+
for (const name of await this.getAllScriptNames()) {
|
|
519
|
+
const script = await this.getScript(name);
|
|
520
|
+
if (!script) continue;
|
|
521
|
+
entries.push(this.buildScriptEntry(name, script));
|
|
522
|
+
}
|
|
523
|
+
return { data: entries };
|
|
524
|
+
}
|
|
525
|
+
async listScript(ctx) {
|
|
526
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
527
|
+
if (!await this.hasScript(name)) throw new AFSNotFoundError(ctx.path);
|
|
528
|
+
return { data: [] };
|
|
529
|
+
}
|
|
530
|
+
async listRootActions(_ctx) {
|
|
531
|
+
return { data: [
|
|
532
|
+
{
|
|
533
|
+
id: "run",
|
|
534
|
+
path: "/.actions/run",
|
|
535
|
+
summary: "Execute inline ASH code",
|
|
536
|
+
meta: {
|
|
537
|
+
kind: "afs:executable",
|
|
538
|
+
kinds: ["afs:executable", "afs:node"],
|
|
539
|
+
name: "run",
|
|
540
|
+
description: "Execute inline ASH pipeline code",
|
|
541
|
+
inputSchema: {
|
|
542
|
+
type: "object",
|
|
543
|
+
properties: {
|
|
544
|
+
source: {
|
|
545
|
+
type: "string",
|
|
546
|
+
description: "ASH pipeline source code. Read /.meta/.ash-reference for syntax."
|
|
547
|
+
},
|
|
548
|
+
caps: {
|
|
549
|
+
type: "array",
|
|
550
|
+
items: { type: "string" },
|
|
551
|
+
description: "Capability paths. Required for write-back to AFS (default: no write-back without caps)"
|
|
552
|
+
}
|
|
553
|
+
},
|
|
554
|
+
required: ["source"]
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
},
|
|
558
|
+
{
|
|
559
|
+
id: "validate",
|
|
560
|
+
path: "/.actions/validate",
|
|
561
|
+
summary: "Validate ASH source code",
|
|
562
|
+
meta: {
|
|
563
|
+
kind: "afs:executable",
|
|
564
|
+
kinds: ["afs:executable", "afs:node"],
|
|
565
|
+
name: "validate",
|
|
566
|
+
description: "Compile and type-check ASH source, return diagnostics",
|
|
567
|
+
inputSchema: {
|
|
568
|
+
type: "object",
|
|
569
|
+
properties: { source: {
|
|
570
|
+
type: "string",
|
|
571
|
+
description: "ASH pipeline source code to validate. Read /.meta/.ash-reference for syntax."
|
|
572
|
+
} },
|
|
573
|
+
required: ["source"]
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
},
|
|
577
|
+
{
|
|
578
|
+
id: "agent-run",
|
|
579
|
+
path: "/.actions/agent-run",
|
|
580
|
+
summary: "Run a single-task agentic loop",
|
|
581
|
+
meta: {
|
|
582
|
+
kind: "afs:executable",
|
|
583
|
+
kinds: ["afs:executable", "afs:node"],
|
|
584
|
+
name: "agent-run",
|
|
585
|
+
description: "Execute a multi-round agentic loop: LLM inference → tool_call validation → ASH execution → result feedback. Stops when the LLM returns text or budget exhausts.",
|
|
586
|
+
inputSchema: {
|
|
587
|
+
type: "object",
|
|
588
|
+
properties: {
|
|
589
|
+
task: {
|
|
590
|
+
type: "string",
|
|
591
|
+
description: "The task for the agent to accomplish"
|
|
592
|
+
},
|
|
593
|
+
model: {
|
|
594
|
+
type: "string",
|
|
595
|
+
description: "AigneHub model path, e.g. /modules/aignehub/defaults/chat"
|
|
596
|
+
},
|
|
597
|
+
tools: {
|
|
598
|
+
oneOf: [{
|
|
599
|
+
type: "array",
|
|
600
|
+
items: {
|
|
601
|
+
type: "object",
|
|
602
|
+
properties: {
|
|
603
|
+
path: {
|
|
604
|
+
type: "string",
|
|
605
|
+
description: "AFS path pattern. Use * for single-level glob, ** for multi-level glob"
|
|
606
|
+
},
|
|
607
|
+
ops: {
|
|
608
|
+
type: "array",
|
|
609
|
+
items: {
|
|
610
|
+
type: "string",
|
|
611
|
+
enum: [
|
|
612
|
+
"read",
|
|
613
|
+
"list",
|
|
614
|
+
"exec",
|
|
615
|
+
"search",
|
|
616
|
+
"write",
|
|
617
|
+
"stat",
|
|
618
|
+
"explain"
|
|
619
|
+
]
|
|
620
|
+
}
|
|
621
|
+
},
|
|
622
|
+
exclude_actions: {
|
|
623
|
+
type: "array",
|
|
624
|
+
items: { type: "string" },
|
|
625
|
+
description: "Action names to exclude from exec"
|
|
626
|
+
},
|
|
627
|
+
maxDepth: {
|
|
628
|
+
type: "number",
|
|
629
|
+
description: "Max directory depth for ** glob (default: 0, i.e. ** with no maxDepth matches nothing)"
|
|
630
|
+
}
|
|
631
|
+
},
|
|
632
|
+
required: ["path", "ops"]
|
|
633
|
+
}
|
|
634
|
+
}, {
|
|
635
|
+
type: "string",
|
|
636
|
+
enum: ["none"],
|
|
637
|
+
description: "Use 'none' for no tools (LLM-only)"
|
|
638
|
+
}],
|
|
639
|
+
description: "AFS capabilities the agent may use, or 'none' for LLM-only mode"
|
|
640
|
+
},
|
|
641
|
+
budget: {
|
|
642
|
+
type: "object",
|
|
643
|
+
properties: {
|
|
644
|
+
max_rounds: {
|
|
645
|
+
type: "number",
|
|
646
|
+
description: "Max LLM rounds (default 20)"
|
|
647
|
+
},
|
|
648
|
+
actions_per_round: {
|
|
649
|
+
type: "number",
|
|
650
|
+
description: "Max tool calls per round (default 10)"
|
|
651
|
+
},
|
|
652
|
+
total_tokens: {
|
|
653
|
+
type: "number",
|
|
654
|
+
description: "Token budget across all rounds"
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
},
|
|
658
|
+
system: {
|
|
659
|
+
type: "string",
|
|
660
|
+
description: "System prompt for the LLM"
|
|
661
|
+
}
|
|
662
|
+
},
|
|
663
|
+
required: ["task", "model"]
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
] };
|
|
668
|
+
}
|
|
669
|
+
async listScriptActions(ctx) {
|
|
670
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
671
|
+
const script = await this.getScript(name);
|
|
672
|
+
if (!script) throw new AFSNotFoundError(ctx.path);
|
|
673
|
+
const inputSchema = this.buildScriptExecInputSchema(script.source);
|
|
674
|
+
return { data: [{
|
|
675
|
+
id: "exec",
|
|
676
|
+
path: joinURL(joinURL("/scripts", ctx.params.scriptName, ".actions"), "exec"),
|
|
677
|
+
summary: `Execute script ${name}`,
|
|
678
|
+
meta: {
|
|
679
|
+
kind: "afs:executable",
|
|
680
|
+
kinds: ["afs:executable", "afs:node"],
|
|
681
|
+
name: "exec",
|
|
682
|
+
description: `Execute the ${name} ASH script`,
|
|
683
|
+
inputSchema
|
|
684
|
+
}
|
|
685
|
+
}] };
|
|
686
|
+
}
|
|
687
|
+
async readRoot(_ctx) {
|
|
688
|
+
return {
|
|
689
|
+
id: "/",
|
|
690
|
+
path: "/",
|
|
691
|
+
meta: {
|
|
692
|
+
type: "directory",
|
|
693
|
+
childrenCount: 2
|
|
694
|
+
},
|
|
695
|
+
actions: [{
|
|
696
|
+
name: "run",
|
|
697
|
+
description: "Execute inline ASH code"
|
|
698
|
+
}, {
|
|
699
|
+
name: "validate",
|
|
700
|
+
description: "Validate ASH source code"
|
|
701
|
+
}]
|
|
702
|
+
};
|
|
703
|
+
}
|
|
704
|
+
async readScriptsDir(_ctx) {
|
|
705
|
+
return {
|
|
706
|
+
id: "/scripts",
|
|
707
|
+
path: "/scripts",
|
|
708
|
+
summary: "Stored ASH scripts",
|
|
709
|
+
meta: {
|
|
710
|
+
kind: "ash:directory",
|
|
711
|
+
type: "directory",
|
|
712
|
+
childrenCount: await this.getScriptCount()
|
|
713
|
+
}
|
|
714
|
+
};
|
|
715
|
+
}
|
|
716
|
+
async readScript(ctx) {
|
|
717
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
718
|
+
const script = await this.getScript(name);
|
|
719
|
+
if (!script) throw new AFSNotFoundError(ctx.path, `Script not found: ${name}`);
|
|
720
|
+
return this.buildScriptEntry(name, script);
|
|
721
|
+
}
|
|
722
|
+
static COOKBOOK_FILES = ["patterns.md", "quickref.md"];
|
|
723
|
+
async listCookbook(_ctx) {
|
|
724
|
+
return { data: AFSAsh.COOKBOOK_FILES.map((file) => ({
|
|
725
|
+
id: joinURL("/cookbook", file),
|
|
726
|
+
path: joinURL("/cookbook", file),
|
|
727
|
+
summary: file === "patterns.md" ? "agent-run patterns and pitfalls" : "Quick reference card",
|
|
728
|
+
meta: { kind: "ash:documentation" }
|
|
729
|
+
})) };
|
|
730
|
+
}
|
|
731
|
+
async readCookbookDir(_ctx) {
|
|
732
|
+
return {
|
|
733
|
+
id: "/cookbook",
|
|
734
|
+
path: "/cookbook",
|
|
735
|
+
meta: {
|
|
736
|
+
kind: "ash:directory",
|
|
737
|
+
type: "directory",
|
|
738
|
+
childrenCount: AFSAsh.COOKBOOK_FILES.length
|
|
739
|
+
}
|
|
740
|
+
};
|
|
741
|
+
}
|
|
742
|
+
async listCookbookFile(_ctx) {
|
|
743
|
+
return { data: [] };
|
|
744
|
+
}
|
|
745
|
+
async readCookbook(ctx) {
|
|
746
|
+
const file = ctx.params.file;
|
|
747
|
+
if (!AFSAsh.COOKBOOK_FILES.includes(file)) throw new AFSNotFoundError(ctx.path);
|
|
748
|
+
return {
|
|
749
|
+
id: joinURL("/cookbook", file),
|
|
750
|
+
path: joinURL("/cookbook", file),
|
|
751
|
+
content: readDoc(joinURL("cookbook", file)),
|
|
752
|
+
meta: { kind: "ash:documentation" }
|
|
753
|
+
};
|
|
754
|
+
}
|
|
755
|
+
async readAgentMd(_ctx) {
|
|
756
|
+
return {
|
|
757
|
+
id: "/.meta/agent.md",
|
|
758
|
+
path: "/.meta/agent.md",
|
|
759
|
+
content: readDoc("agent.md"),
|
|
760
|
+
meta: {
|
|
761
|
+
kind: "ash:documentation",
|
|
762
|
+
description: "ASH agent usage guide"
|
|
763
|
+
}
|
|
764
|
+
};
|
|
765
|
+
}
|
|
766
|
+
async readAshReference(_ctx) {
|
|
767
|
+
return {
|
|
768
|
+
id: "/.meta/.ash-reference",
|
|
769
|
+
path: "/.meta/.ash-reference",
|
|
770
|
+
content: ASH_REFERENCE,
|
|
771
|
+
meta: {
|
|
772
|
+
kind: "ash:reference",
|
|
773
|
+
description: "ASH language reference for writing valid ASH pipelines"
|
|
774
|
+
}
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
async readCapabilities(_ctx) {
|
|
778
|
+
return {
|
|
779
|
+
id: "/.meta/.capabilities",
|
|
780
|
+
path: "/.meta/.capabilities",
|
|
781
|
+
content: {
|
|
782
|
+
schemaVersion: 1,
|
|
783
|
+
provider: this.name,
|
|
784
|
+
description: `${this.description}. Read /.meta/.ash-reference for ASH syntax.`,
|
|
785
|
+
tools: [],
|
|
786
|
+
operations: this.getOperationsDeclaration(),
|
|
787
|
+
actions: [{
|
|
788
|
+
description: "Root-level ASH actions",
|
|
789
|
+
catalog: [{
|
|
790
|
+
name: "run",
|
|
791
|
+
description: "Execute inline ASH code"
|
|
792
|
+
}, {
|
|
793
|
+
name: "validate",
|
|
794
|
+
description: "Validate ASH source code"
|
|
795
|
+
}],
|
|
796
|
+
discovery: { pathTemplate: "/.actions" }
|
|
797
|
+
}, {
|
|
798
|
+
kind: "ash:script",
|
|
799
|
+
description: "Script-level actions",
|
|
800
|
+
catalog: [{
|
|
801
|
+
name: "exec",
|
|
802
|
+
description: "Execute the script"
|
|
803
|
+
}],
|
|
804
|
+
discovery: { pathTemplate: "/scripts/:scriptName/.actions" }
|
|
805
|
+
}]
|
|
806
|
+
},
|
|
807
|
+
meta: {
|
|
808
|
+
kind: "afs:capabilities",
|
|
809
|
+
description: "ASH provider capabilities manifest",
|
|
810
|
+
operations: [
|
|
811
|
+
"list",
|
|
812
|
+
"read",
|
|
813
|
+
"write",
|
|
814
|
+
"delete",
|
|
815
|
+
"exec",
|
|
816
|
+
"stat",
|
|
817
|
+
"explain",
|
|
818
|
+
"search"
|
|
819
|
+
]
|
|
820
|
+
}
|
|
821
|
+
};
|
|
822
|
+
}
|
|
823
|
+
async readRootMeta(_ctx) {
|
|
824
|
+
return {
|
|
825
|
+
id: "/.meta",
|
|
826
|
+
path: "/.meta",
|
|
827
|
+
content: {
|
|
828
|
+
type: "directory",
|
|
829
|
+
childrenCount: 2
|
|
830
|
+
},
|
|
831
|
+
meta: {
|
|
832
|
+
type: "directory",
|
|
833
|
+
childrenCount: 2
|
|
834
|
+
}
|
|
835
|
+
};
|
|
836
|
+
}
|
|
837
|
+
async readCookbookMeta(_ctx) {
|
|
838
|
+
return {
|
|
839
|
+
id: "/cookbook/.meta",
|
|
840
|
+
path: "/cookbook/.meta",
|
|
841
|
+
content: {
|
|
842
|
+
kind: "ash:directory",
|
|
843
|
+
type: "directory",
|
|
844
|
+
childrenCount: AFSAsh.COOKBOOK_FILES.length
|
|
845
|
+
},
|
|
846
|
+
meta: {
|
|
847
|
+
kind: "ash:directory",
|
|
848
|
+
type: "directory",
|
|
849
|
+
childrenCount: AFSAsh.COOKBOOK_FILES.length
|
|
850
|
+
}
|
|
851
|
+
};
|
|
852
|
+
}
|
|
853
|
+
async readCookbookFileMeta(ctx) {
|
|
854
|
+
return {
|
|
855
|
+
id: joinURL("/cookbook", ctx.params.file, ".meta"),
|
|
856
|
+
path: joinURL("/cookbook", ctx.params.file, ".meta"),
|
|
857
|
+
content: { kind: "ash:documentation" },
|
|
858
|
+
meta: { kind: "ash:documentation" }
|
|
859
|
+
};
|
|
860
|
+
}
|
|
861
|
+
async readScriptsMeta(_ctx) {
|
|
862
|
+
return {
|
|
863
|
+
id: "/scripts/.meta",
|
|
864
|
+
path: "/scripts/.meta",
|
|
865
|
+
content: {
|
|
866
|
+
type: "directory",
|
|
867
|
+
childrenCount: await this.getScriptCount()
|
|
868
|
+
},
|
|
869
|
+
meta: {
|
|
870
|
+
type: "directory",
|
|
871
|
+
childrenCount: await this.getScriptCount()
|
|
872
|
+
}
|
|
873
|
+
};
|
|
874
|
+
}
|
|
875
|
+
async readScriptMeta(ctx) {
|
|
876
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
877
|
+
const script = await this.getScript(name);
|
|
878
|
+
if (!script) throw new AFSNotFoundError(ctx.path, `Script not found: ${name}`);
|
|
879
|
+
const entry = this.buildScriptEntry(name, script);
|
|
880
|
+
return {
|
|
881
|
+
id: joinURL("/scripts", ctx.params.scriptName, ".meta"),
|
|
882
|
+
path: joinURL("/scripts", ctx.params.scriptName, ".meta"),
|
|
883
|
+
content: entry.meta ?? {},
|
|
884
|
+
meta: entry.meta ?? {}
|
|
885
|
+
};
|
|
886
|
+
}
|
|
887
|
+
async writeScript(ctx, entry) {
|
|
888
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
889
|
+
const now = /* @__PURE__ */ new Date();
|
|
890
|
+
const existing = await this.getScript(name);
|
|
891
|
+
const rawContent = typeof entry === "string" ? entry : entry.content;
|
|
892
|
+
if (rawContent === void 0 && existing) {
|
|
893
|
+
const stored$1 = {
|
|
894
|
+
source: existing.source,
|
|
895
|
+
createdAt: existing.createdAt,
|
|
896
|
+
updatedAt: now
|
|
897
|
+
};
|
|
898
|
+
await this.setScript(name, stored$1);
|
|
899
|
+
return {
|
|
900
|
+
data: this.buildScriptEntry(name, stored$1),
|
|
901
|
+
message: "Script updated"
|
|
902
|
+
};
|
|
903
|
+
}
|
|
904
|
+
const source = typeof rawContent === "string" ? rawContent : JSON.stringify(rawContent);
|
|
905
|
+
const stored = {
|
|
906
|
+
source,
|
|
907
|
+
createdAt: existing?.createdAt ?? now,
|
|
908
|
+
updatedAt: now
|
|
909
|
+
};
|
|
910
|
+
await this.setScript(name, stored);
|
|
911
|
+
this.registerTriggers(name, source);
|
|
912
|
+
return {
|
|
913
|
+
data: this.buildScriptEntry(name, stored),
|
|
914
|
+
message: existing ? "Script updated" : "Script created"
|
|
915
|
+
};
|
|
916
|
+
}
|
|
917
|
+
async deleteScript(ctx) {
|
|
918
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
919
|
+
if (!await this.hasScript(name)) throw new AFSNotFoundError(ctx.path, `Script not found: ${name}`);
|
|
920
|
+
this.unregisterTriggers(name);
|
|
921
|
+
await this.removeScript(name);
|
|
922
|
+
return { message: `Script '${name}' deleted` };
|
|
923
|
+
}
|
|
924
|
+
async deleteCatchAll(ctx) {
|
|
925
|
+
throw new AFSNotFoundError(ctx.path);
|
|
926
|
+
}
|
|
927
|
+
async execScript(ctx, args) {
|
|
928
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
929
|
+
const script = await this.getScript(name);
|
|
930
|
+
if (!script) return {
|
|
931
|
+
success: false,
|
|
932
|
+
data: { error: `Script not found: ${name}` }
|
|
933
|
+
};
|
|
934
|
+
return this.runAsh(script.source, args);
|
|
935
|
+
}
|
|
936
|
+
async execScriptDirect(ctx, args) {
|
|
937
|
+
return this.execScript(ctx, args);
|
|
938
|
+
}
|
|
939
|
+
async execRootAction(ctx, args) {
|
|
940
|
+
const actionName = ctx.params.action;
|
|
941
|
+
if (actionName === "run") {
|
|
942
|
+
const source = args.source;
|
|
943
|
+
if (typeof source !== "string") throw new Error("action 'run' requires 'source' parameter as string");
|
|
944
|
+
const jobName = args.job;
|
|
945
|
+
if (jobName) {
|
|
946
|
+
const runtimeAFS$1 = args._runtime_afs;
|
|
947
|
+
return this.runJob(source, jobName, args, { afsOverride: runtimeAFS$1 });
|
|
948
|
+
}
|
|
949
|
+
const hasCaps = Array.isArray(args.caps) && args.caps.length > 0;
|
|
950
|
+
const runtimeAFS = args._runtime_afs;
|
|
951
|
+
const cleanArgs = { ...args };
|
|
952
|
+
delete cleanArgs._runtime_afs;
|
|
953
|
+
return this.runAsh(source, cleanArgs, {
|
|
954
|
+
allowWriteBack: hasCaps,
|
|
955
|
+
afsOverride: runtimeAFS
|
|
956
|
+
});
|
|
957
|
+
}
|
|
958
|
+
if (actionName === "validate") {
|
|
959
|
+
const source = args.source;
|
|
960
|
+
if (typeof source !== "string") throw new Error("action 'validate' requires 'source' parameter as string");
|
|
961
|
+
const result = compileSource(source);
|
|
962
|
+
const errors = result.diagnostics.filter((d) => d.severity !== "warning");
|
|
963
|
+
const warnings = result.diagnostics.filter((d) => d.severity === "warning");
|
|
964
|
+
return {
|
|
965
|
+
success: true,
|
|
966
|
+
data: {
|
|
967
|
+
valid: errors.length === 0,
|
|
968
|
+
errors,
|
|
969
|
+
warnings,
|
|
970
|
+
jobCount: result.program?.jobs.length ?? 0,
|
|
971
|
+
sourceHash: result.sourceHash
|
|
972
|
+
}
|
|
973
|
+
};
|
|
974
|
+
}
|
|
975
|
+
if (actionName === "agent-run") return this.executeAgentRun(args);
|
|
976
|
+
throw new Error(`Unknown action: ${actionName}`);
|
|
977
|
+
}
|
|
978
|
+
async statRoot(_ctx) {
|
|
979
|
+
return { data: {
|
|
980
|
+
id: "/",
|
|
981
|
+
path: "/",
|
|
982
|
+
meta: {
|
|
983
|
+
type: "directory",
|
|
984
|
+
childrenCount: 1,
|
|
985
|
+
scriptCount: await this.getScriptCount()
|
|
986
|
+
}
|
|
987
|
+
} };
|
|
988
|
+
}
|
|
989
|
+
async statScriptsDir(_ctx) {
|
|
990
|
+
return { data: {
|
|
991
|
+
id: "/scripts",
|
|
992
|
+
path: "/scripts",
|
|
993
|
+
meta: {
|
|
994
|
+
type: "directory",
|
|
995
|
+
childrenCount: await this.getScriptCount()
|
|
996
|
+
}
|
|
997
|
+
} };
|
|
998
|
+
}
|
|
999
|
+
async statScript(ctx) {
|
|
1000
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
1001
|
+
const script = await this.getScript(name);
|
|
1002
|
+
if (!script) throw new AFSNotFoundError(ctx.path, `Script not found: ${name}`);
|
|
1003
|
+
return { data: {
|
|
1004
|
+
id: joinURL("/scripts", `${name}.ash`),
|
|
1005
|
+
path: joinURL("/scripts", `${name}.ash`),
|
|
1006
|
+
createdAt: script.createdAt,
|
|
1007
|
+
updatedAt: script.updatedAt,
|
|
1008
|
+
meta: {
|
|
1009
|
+
type: "file",
|
|
1010
|
+
kind: "ash:script",
|
|
1011
|
+
scriptName: name,
|
|
1012
|
+
size: new TextEncoder().encode(script.source).length
|
|
1013
|
+
}
|
|
1014
|
+
} };
|
|
1015
|
+
}
|
|
1016
|
+
async explainRoot(_ctx) {
|
|
1017
|
+
return {
|
|
1018
|
+
format: "markdown",
|
|
1019
|
+
content: readDoc("agent.md")
|
|
1020
|
+
};
|
|
1021
|
+
}
|
|
1022
|
+
async explainScript(ctx) {
|
|
1023
|
+
const name = ctx.params.scriptName.replace(/\.ash$/, "");
|
|
1024
|
+
const script = await this.getScript(name);
|
|
1025
|
+
if (!script) throw new AFSNotFoundError(ctx.path, `Script not found: ${name}`);
|
|
1026
|
+
const result = compileSource(script.source);
|
|
1027
|
+
const lines = [];
|
|
1028
|
+
lines.push(`# Script: ${name}.ash`);
|
|
1029
|
+
lines.push("");
|
|
1030
|
+
lines.push("## Source");
|
|
1031
|
+
lines.push("");
|
|
1032
|
+
lines.push("```ash");
|
|
1033
|
+
lines.push(script.source);
|
|
1034
|
+
lines.push("```");
|
|
1035
|
+
lines.push("");
|
|
1036
|
+
lines.push("## Analysis");
|
|
1037
|
+
lines.push("");
|
|
1038
|
+
lines.push(`- **Jobs**: ${result.program?.jobs.length ?? 0}`);
|
|
1039
|
+
lines.push(`- **Diagnostics**: ${result.diagnostics.length}`);
|
|
1040
|
+
lines.push(`- **Size**: ${new TextEncoder().encode(script.source).length} bytes`);
|
|
1041
|
+
lines.push("");
|
|
1042
|
+
return {
|
|
1043
|
+
format: "markdown",
|
|
1044
|
+
content: lines.join("\n")
|
|
1045
|
+
};
|
|
1046
|
+
}
|
|
1047
|
+
async searchScripts(_ctx, query) {
|
|
1048
|
+
const results = [];
|
|
1049
|
+
const lowerQuery = query.toLowerCase();
|
|
1050
|
+
for (const name of await this.getAllScriptNames()) {
|
|
1051
|
+
const script = await this.getScript(name);
|
|
1052
|
+
if (!script) continue;
|
|
1053
|
+
if (name.toLowerCase().includes(lowerQuery) || script.source.toLowerCase().includes(lowerQuery)) results.push(this.buildScriptEntry(name, script));
|
|
1054
|
+
}
|
|
1055
|
+
return { data: results };
|
|
1056
|
+
}
|
|
1057
|
+
buildScriptEntry(name, script) {
|
|
1058
|
+
const inputSchema = this.buildScriptExecInputSchema(script.source);
|
|
1059
|
+
return {
|
|
1060
|
+
id: joinURL("/scripts", `${name}.ash`),
|
|
1061
|
+
path: joinURL("/scripts", `${name}.ash`),
|
|
1062
|
+
content: script.source,
|
|
1063
|
+
createdAt: script.createdAt,
|
|
1064
|
+
updatedAt: script.updatedAt,
|
|
1065
|
+
meta: {
|
|
1066
|
+
type: "file",
|
|
1067
|
+
kind: "ash:script",
|
|
1068
|
+
kinds: [
|
|
1069
|
+
"ash:script",
|
|
1070
|
+
"afs:executable",
|
|
1071
|
+
"afs:node"
|
|
1072
|
+
],
|
|
1073
|
+
scriptName: name,
|
|
1074
|
+
size: new TextEncoder().encode(script.source).length
|
|
1075
|
+
},
|
|
1076
|
+
actions: [{
|
|
1077
|
+
name: "exec",
|
|
1078
|
+
description: `Execute script ${name}`,
|
|
1079
|
+
inputSchema
|
|
1080
|
+
}]
|
|
1081
|
+
};
|
|
1082
|
+
}
|
|
1083
|
+
buildScriptExecInputSchema(source) {
|
|
1084
|
+
const properties = { caps: {
|
|
1085
|
+
type: "array",
|
|
1086
|
+
items: { type: "string" },
|
|
1087
|
+
description: "Capability paths. Required for write-back to AFS (default: no write-back without caps)"
|
|
1088
|
+
} };
|
|
1089
|
+
const result = compileSource(source);
|
|
1090
|
+
if (result.program) for (const [key, value] of result.program.params) properties[key] = {
|
|
1091
|
+
type: typeof value === "number" ? "number" : "string",
|
|
1092
|
+
description: `Override ASH param '${key}' at execution time`
|
|
1093
|
+
};
|
|
1094
|
+
return {
|
|
1095
|
+
type: "object",
|
|
1096
|
+
properties
|
|
1097
|
+
};
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Create an async-capable world that bridges ASH to AFS.
|
|
1101
|
+
* Since ASH's WorldInterface is synchronous but AFS is async,
|
|
1102
|
+
* we collect operations and replay them, or use a pre-loaded data approach.
|
|
1103
|
+
*/
|
|
1104
|
+
async createAsyncWorld(afsOverride) {
|
|
1105
|
+
const dataStore = { ...await this.loadPersistedData() };
|
|
1106
|
+
const written = {};
|
|
1107
|
+
const published = {};
|
|
1108
|
+
const writeCounts = {};
|
|
1109
|
+
const afsRoot = afsOverride ?? this.afsRoot;
|
|
1110
|
+
const selfPrefix = `/${this.name}/`;
|
|
1111
|
+
const allowRootActions = this.allowRootActions;
|
|
1112
|
+
const pendingPersist = [];
|
|
1113
|
+
const persistData = (path, data) => {
|
|
1114
|
+
pendingPersist.push(this.persistData(path, data).catch(() => {}));
|
|
1115
|
+
};
|
|
1116
|
+
return {
|
|
1117
|
+
world: {
|
|
1118
|
+
read(path) {
|
|
1119
|
+
return dataStore[path] ?? [];
|
|
1120
|
+
},
|
|
1121
|
+
write(path, data) {
|
|
1122
|
+
if (path.startsWith(selfPrefix) || path.startsWith("/scripts") || path.startsWith("/.actions") || path.startsWith("/.meta")) throw new Error(`Write blocked: '${path}' is a protected ASH provider path`);
|
|
1123
|
+
writeCounts[path] = (writeCounts[path] ?? 0) + 1;
|
|
1124
|
+
written[path] = data;
|
|
1125
|
+
dataStore[path] = data;
|
|
1126
|
+
persistData(path, data);
|
|
1127
|
+
},
|
|
1128
|
+
publish(topic, data) {
|
|
1129
|
+
published[topic] = data;
|
|
1130
|
+
},
|
|
1131
|
+
async exec(path, input, params) {
|
|
1132
|
+
const rootActionMatch = /^\/\.actions\/([^/]+)$/.exec(path) ?? (/* @__PURE__ */ new RegExp(`^${selfPrefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\.actions\\/([^/]+)$`)).exec(path);
|
|
1133
|
+
if (!(rootActionMatch !== null && allowRootActions.has(rootActionMatch[1])) && (path.startsWith(selfPrefix) || path.startsWith("/.actions") || path.startsWith("/scripts"))) throw new Error(`Exec blocked: self-referencing call to '${path}' would cause recursion`);
|
|
1134
|
+
if (afsRoot?.exec) {
|
|
1135
|
+
const baseParams = params ?? {};
|
|
1136
|
+
const records = input.filter((r) => typeof r === "object" && r !== null);
|
|
1137
|
+
if (records.length <= 1) {
|
|
1138
|
+
const mergedArgs = {
|
|
1139
|
+
...records[0] ?? {},
|
|
1140
|
+
...baseParams
|
|
1141
|
+
};
|
|
1142
|
+
if (afsOverride) mergedArgs._runtime_afs = afsOverride;
|
|
1143
|
+
const result = await afsRoot.exec(path, mergedArgs, {});
|
|
1144
|
+
return Array.isArray(result.data) ? result.data : [result.data];
|
|
1145
|
+
}
|
|
1146
|
+
const results = [];
|
|
1147
|
+
for (const record of records) {
|
|
1148
|
+
const mergedArgs = {
|
|
1149
|
+
...record,
|
|
1150
|
+
...baseParams
|
|
1151
|
+
};
|
|
1152
|
+
if (afsOverride) mergedArgs._runtime_afs = afsOverride;
|
|
1153
|
+
const result = await afsRoot.exec(path, mergedArgs, {});
|
|
1154
|
+
if (Array.isArray(result.data)) results.push(...result.data);
|
|
1155
|
+
else results.push(result.data);
|
|
1156
|
+
}
|
|
1157
|
+
return results;
|
|
1158
|
+
}
|
|
1159
|
+
return input;
|
|
1160
|
+
},
|
|
1161
|
+
async input(prompt) {
|
|
1162
|
+
if (afsRoot?.exec) try {
|
|
1163
|
+
const result = await afsRoot.exec("/ui/.actions/prompt", {
|
|
1164
|
+
message: prompt,
|
|
1165
|
+
type: "text"
|
|
1166
|
+
}, {});
|
|
1167
|
+
return String(result.data?.response ?? "");
|
|
1168
|
+
} catch (err) {
|
|
1169
|
+
if (err instanceof AFSNotFoundError) return "";
|
|
1170
|
+
throw err;
|
|
1171
|
+
}
|
|
1172
|
+
return "";
|
|
1173
|
+
}
|
|
1174
|
+
},
|
|
1175
|
+
written,
|
|
1176
|
+
published,
|
|
1177
|
+
writeCounts,
|
|
1178
|
+
preload(path, data) {
|
|
1179
|
+
dataStore[path] = data;
|
|
1180
|
+
},
|
|
1181
|
+
async flushPersist() {
|
|
1182
|
+
await Promise.all(pendingPersist);
|
|
1183
|
+
}
|
|
1184
|
+
};
|
|
1185
|
+
}
|
|
1186
|
+
deriveTools(agentDir, extraTools, channelPath) {
|
|
1187
|
+
const base = [{
|
|
1188
|
+
path: `${agentDir}/**`,
|
|
1189
|
+
ops: [
|
|
1190
|
+
"read",
|
|
1191
|
+
"list",
|
|
1192
|
+
"write",
|
|
1193
|
+
"search",
|
|
1194
|
+
"stat",
|
|
1195
|
+
"exec"
|
|
1196
|
+
],
|
|
1197
|
+
maxDepth: 10
|
|
1198
|
+
}, {
|
|
1199
|
+
path: "/modules/ash/.actions/*",
|
|
1200
|
+
ops: ["exec"]
|
|
1201
|
+
}];
|
|
1202
|
+
if (channelPath) base.push({
|
|
1203
|
+
path: `${channelPath}/**`,
|
|
1204
|
+
ops: ["exec"],
|
|
1205
|
+
maxDepth: 5
|
|
1206
|
+
});
|
|
1207
|
+
return [...base, ...extraTools];
|
|
1208
|
+
}
|
|
1209
|
+
async executeAgentRun(args) {
|
|
1210
|
+
const runtimeAFSOverride = args._runtime_afs;
|
|
1211
|
+
const cleanArgs = { ...args };
|
|
1212
|
+
delete cleanArgs._runtime_afs;
|
|
1213
|
+
const task = cleanArgs.task;
|
|
1214
|
+
if (typeof task !== "string" || !task) throw new Error("agent-run requires 'task' parameter as string");
|
|
1215
|
+
const model = cleanArgs.model;
|
|
1216
|
+
if (typeof model !== "string" || !model) throw new Error("agent-run requires 'model' parameter as string");
|
|
1217
|
+
const rawTools = cleanArgs.tools;
|
|
1218
|
+
let tools = rawTools === "none" || rawTools === void 0 ? [] : rawTools;
|
|
1219
|
+
if (!Array.isArray(tools)) throw new Error("agent-run 'tools' must be an array or 'none'");
|
|
1220
|
+
const globalAFS = this.afsRoot;
|
|
1221
|
+
if (!globalAFS && !runtimeAFSOverride) throw new Error("agent-run requires AFS root to be mounted");
|
|
1222
|
+
const agentDir = cleanArgs.agent_dir;
|
|
1223
|
+
let targetAFS = runtimeAFSOverride ?? globalAFS;
|
|
1224
|
+
if (runtimeAFSOverride && agentDir) try {
|
|
1225
|
+
const yamlResult = await runtimeAFSOverride.read(joinURL(agentDir, "program.yaml"), {});
|
|
1226
|
+
const yamlContent = String(yamlResult.data?.content ?? "");
|
|
1227
|
+
if (yamlContent.trim()) tools = deriveToolsFromProgram(parseProgramManifest(yamlContent));
|
|
1228
|
+
} catch {}
|
|
1229
|
+
else if (agentDir && globalAFS) {
|
|
1230
|
+
let isProgram = false;
|
|
1231
|
+
try {
|
|
1232
|
+
await globalAFS.read(joinURL(agentDir, "program.yaml"), {});
|
|
1233
|
+
isProgram = true;
|
|
1234
|
+
} catch {}
|
|
1235
|
+
if (isProgram) try {
|
|
1236
|
+
const pyResult = await globalAFS.read(joinURL(agentDir, "program.yaml"), {});
|
|
1237
|
+
const dataPath = joinURL("/.data", parseProgramManifest(String(pyResult.data?.content ?? "")).id);
|
|
1238
|
+
try {
|
|
1239
|
+
await globalAFS.write(joinURL(dataPath, ".keep"), { content: "" });
|
|
1240
|
+
} catch {}
|
|
1241
|
+
const { afs: runtimeAFS, manifest } = await createProgramAFS(agentDir, dataPath, globalAFS);
|
|
1242
|
+
targetAFS = runtimeAFS;
|
|
1243
|
+
tools = deriveToolsFromProgram(manifest);
|
|
1244
|
+
} catch (err) {
|
|
1245
|
+
return {
|
|
1246
|
+
success: false,
|
|
1247
|
+
error: {
|
|
1248
|
+
code: "PROGRAM_INIT_ERROR",
|
|
1249
|
+
message: sanitizeErrorPath(err instanceof Error ? err.message : String(err))
|
|
1250
|
+
}
|
|
1251
|
+
};
|
|
1252
|
+
}
|
|
1253
|
+
else {
|
|
1254
|
+
const extraTools = cleanArgs.extra_tools ?? tools;
|
|
1255
|
+
const channelPath = cleanArgs.channel_path;
|
|
1256
|
+
tools = this.deriveTools(agentDir, extraTools, channelPath);
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
const afs = targetAFS;
|
|
1260
|
+
const { runAgentLoop } = await import("./agent-run-CXJQ0jPV.mjs");
|
|
1261
|
+
let enrichedTask = task;
|
|
1262
|
+
if (typeof cleanArgs.text === "string" && cleanArgs.text) enrichedTask += `\n\nUser message: ${cleanArgs.text}`;
|
|
1263
|
+
const result = await runAgentLoop({
|
|
1264
|
+
task: enrichedTask,
|
|
1265
|
+
model,
|
|
1266
|
+
tools,
|
|
1267
|
+
budget: cleanArgs.budget,
|
|
1268
|
+
system: cleanArgs.system,
|
|
1269
|
+
session: cleanArgs.session
|
|
1270
|
+
}, {
|
|
1271
|
+
callLLM: async (llmArgs) => {
|
|
1272
|
+
if (!afs.exec) return {
|
|
1273
|
+
success: false,
|
|
1274
|
+
data: { error: "exec not supported" }
|
|
1275
|
+
};
|
|
1276
|
+
let resolvedModel = model;
|
|
1277
|
+
if (!resolvedModel.startsWith("/")) resolvedModel = `/modules/aignehub/providers/${resolvedModel}`;
|
|
1278
|
+
const modelPath = `${resolvedModel}/.actions/chat`;
|
|
1279
|
+
const translated = { ...llmArgs };
|
|
1280
|
+
if (Array.isArray(translated.messages)) translated.messages = translated.messages.map((msg) => {
|
|
1281
|
+
const m = { ...msg };
|
|
1282
|
+
if (m.role === "assistant") m.role = "agent";
|
|
1283
|
+
if (m.role === "tool" && m.tool_call_id && !m.toolCallId) {
|
|
1284
|
+
m.toolCallId = m.tool_call_id;
|
|
1285
|
+
delete m.tool_call_id;
|
|
1286
|
+
}
|
|
1287
|
+
if (Array.isArray(m.toolCalls)) m.toolCalls = m.toolCalls.map((tc) => {
|
|
1288
|
+
const fn = tc.function;
|
|
1289
|
+
if (fn && typeof fn.arguments === "string") try {
|
|
1290
|
+
return {
|
|
1291
|
+
...tc,
|
|
1292
|
+
function: {
|
|
1293
|
+
...fn,
|
|
1294
|
+
arguments: JSON.parse(fn.arguments)
|
|
1295
|
+
}
|
|
1296
|
+
};
|
|
1297
|
+
} catch {
|
|
1298
|
+
return tc;
|
|
1299
|
+
}
|
|
1300
|
+
return tc;
|
|
1301
|
+
});
|
|
1302
|
+
return m;
|
|
1303
|
+
});
|
|
1304
|
+
const raw = await afs.exec(modelPath, translated, {});
|
|
1305
|
+
if (raw.success && raw.data) {
|
|
1306
|
+
const result$1 = raw.data.result;
|
|
1307
|
+
if (result$1) {
|
|
1308
|
+
const usage = result$1.usage;
|
|
1309
|
+
raw.data = {
|
|
1310
|
+
text: result$1.text,
|
|
1311
|
+
toolCalls: result$1.toolCalls,
|
|
1312
|
+
inputTokens: usage?.inputTokens,
|
|
1313
|
+
outputTokens: usage?.outputTokens
|
|
1314
|
+
};
|
|
1315
|
+
}
|
|
1316
|
+
}
|
|
1317
|
+
return raw;
|
|
1318
|
+
},
|
|
1319
|
+
runAsh: async (source, runArgs) => {
|
|
1320
|
+
return this.runAsh(source, runArgs, {
|
|
1321
|
+
returnWrittenData: true,
|
|
1322
|
+
skipWritePrefix: "/.results/"
|
|
1323
|
+
});
|
|
1324
|
+
},
|
|
1325
|
+
callAFS: async (op, path, opArgs) => {
|
|
1326
|
+
switch (op) {
|
|
1327
|
+
case "read": return {
|
|
1328
|
+
success: true,
|
|
1329
|
+
data: (await afs.read(path, opArgs ?? {})).data
|
|
1330
|
+
};
|
|
1331
|
+
case "list": return {
|
|
1332
|
+
success: true,
|
|
1333
|
+
data: (await afs.list(path, opArgs ?? {})).data
|
|
1334
|
+
};
|
|
1335
|
+
case "search":
|
|
1336
|
+
if (!afs.search) return {
|
|
1337
|
+
success: false,
|
|
1338
|
+
data: { error: "search not supported" }
|
|
1339
|
+
};
|
|
1340
|
+
return {
|
|
1341
|
+
success: true,
|
|
1342
|
+
data: { results: (await afs.search(path, opArgs?.query ?? "", opArgs ?? {})).data }
|
|
1343
|
+
};
|
|
1344
|
+
case "write":
|
|
1345
|
+
if (!afs.write) return {
|
|
1346
|
+
success: false,
|
|
1347
|
+
data: { error: "write not supported" }
|
|
1348
|
+
};
|
|
1349
|
+
return {
|
|
1350
|
+
success: true,
|
|
1351
|
+
data: { written: (await afs.write(path, opArgs ?? {})).data }
|
|
1352
|
+
};
|
|
1353
|
+
case "stat":
|
|
1354
|
+
if (!afs.stat) return {
|
|
1355
|
+
success: false,
|
|
1356
|
+
data: { error: "stat not supported" }
|
|
1357
|
+
};
|
|
1358
|
+
return {
|
|
1359
|
+
success: true,
|
|
1360
|
+
data: { stat: (await afs.stat(path)).data }
|
|
1361
|
+
};
|
|
1362
|
+
case "explain":
|
|
1363
|
+
if (!afs.explain) return {
|
|
1364
|
+
success: false,
|
|
1365
|
+
data: { error: "explain not supported" }
|
|
1366
|
+
};
|
|
1367
|
+
return {
|
|
1368
|
+
success: true,
|
|
1369
|
+
data: { content: (await afs.explain(path)).content }
|
|
1370
|
+
};
|
|
1371
|
+
case "exec": {
|
|
1372
|
+
if (!afs.exec) return {
|
|
1373
|
+
success: false,
|
|
1374
|
+
data: { error: "exec not supported" }
|
|
1375
|
+
};
|
|
1376
|
+
const execArgs = opArgs?.args && typeof opArgs.args === "object" && !Array.isArray(opArgs.args) ? opArgs.args : opArgs ?? {};
|
|
1377
|
+
return afs.exec(path, execArgs, {});
|
|
1378
|
+
}
|
|
1379
|
+
default: return {
|
|
1380
|
+
success: false,
|
|
1381
|
+
data: { error: `Unsupported direct op: ${op}` }
|
|
1382
|
+
};
|
|
1383
|
+
}
|
|
1384
|
+
},
|
|
1385
|
+
loadHistory: async (sessionPath) => {
|
|
1386
|
+
const historyPath = `${sessionPath}/history.jsonl`;
|
|
1387
|
+
try {
|
|
1388
|
+
const result$1 = await afs.read(historyPath, {});
|
|
1389
|
+
const content = String(result$1.data?.content ?? "");
|
|
1390
|
+
if (!content.trim()) return [];
|
|
1391
|
+
return content.trim().split("\n").flatMap((line) => {
|
|
1392
|
+
try {
|
|
1393
|
+
return [JSON.parse(line)];
|
|
1394
|
+
} catch {
|
|
1395
|
+
return [];
|
|
1396
|
+
}
|
|
1397
|
+
});
|
|
1398
|
+
} catch {
|
|
1399
|
+
return [];
|
|
1400
|
+
}
|
|
1401
|
+
},
|
|
1402
|
+
saveHistory: async (sessionPath, messages) => {
|
|
1403
|
+
const historyPath = `${sessionPath}/history.jsonl`;
|
|
1404
|
+
const content = `${messages.map((m) => JSON.stringify(m)).join("\n")}\n`;
|
|
1405
|
+
await afs.write(historyPath, { content });
|
|
1406
|
+
}
|
|
1407
|
+
});
|
|
1408
|
+
return {
|
|
1409
|
+
success: result.status === "completed",
|
|
1410
|
+
data: result
|
|
1411
|
+
};
|
|
1412
|
+
}
|
|
1413
|
+
/**
|
|
1414
|
+
* Execute a single named job from an ASH source.
|
|
1415
|
+
* Used by ProgramManager to run trigger jobs with event data.
|
|
1416
|
+
*/
|
|
1417
|
+
async runJob(source, jobName, args, options) {
|
|
1418
|
+
const result = compileSource(this.applyParamOverridesToSource(source, args));
|
|
1419
|
+
if (!result.program) return {
|
|
1420
|
+
success: false,
|
|
1421
|
+
data: {
|
|
1422
|
+
error: "Compilation failed",
|
|
1423
|
+
diagnostics: result.diagnostics
|
|
1424
|
+
}
|
|
1425
|
+
};
|
|
1426
|
+
const job = result.program.units.find((u) => u.kind === "job" && u.name === jobName);
|
|
1427
|
+
if (!job || job.kind !== "job") return {
|
|
1428
|
+
success: false,
|
|
1429
|
+
data: { error: `Job "${jobName}" not found in source` }
|
|
1430
|
+
};
|
|
1431
|
+
const afsOverride = options?.afsOverride;
|
|
1432
|
+
const { world, written } = await this.createAsyncWorld(afsOverride);
|
|
1433
|
+
const effectiveAFS = afsOverride ?? this.afsRoot;
|
|
1434
|
+
const ctx = {
|
|
1435
|
+
world: effectiveAFS ? {
|
|
1436
|
+
read: async (path) => {
|
|
1437
|
+
const local = await world.read(path);
|
|
1438
|
+
if (local.length > 0) return local;
|
|
1439
|
+
if (effectiveAFS.list) {
|
|
1440
|
+
const listResult = await effectiveAFS.list(path, {});
|
|
1441
|
+
if (listResult.data.length > 0) {
|
|
1442
|
+
const needsEnrich = listResult.data.some((e) => e.content == null);
|
|
1443
|
+
let entries = listResult.data;
|
|
1444
|
+
if (needsEnrich && effectiveAFS.read) {
|
|
1445
|
+
const afsRead = effectiveAFS.read.bind(effectiveAFS);
|
|
1446
|
+
entries = await Promise.all(listResult.data.map(async (e) => {
|
|
1447
|
+
if (e.content != null) return e;
|
|
1448
|
+
try {
|
|
1449
|
+
const r = await afsRead(e.path, {});
|
|
1450
|
+
return r.data ? {
|
|
1451
|
+
...e,
|
|
1452
|
+
content: r.data.content
|
|
1453
|
+
} : e;
|
|
1454
|
+
} catch {
|
|
1455
|
+
return e;
|
|
1456
|
+
}
|
|
1457
|
+
}));
|
|
1458
|
+
}
|
|
1459
|
+
return entries.map((entry) => ({
|
|
1460
|
+
path: entry.path,
|
|
1461
|
+
name: entry.path.split("/").filter(Boolean).pop(),
|
|
1462
|
+
...entry.meta,
|
|
1463
|
+
...normalizeContent(entry.content)
|
|
1464
|
+
}));
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
if (effectiveAFS.read) try {
|
|
1468
|
+
const readResult = await effectiveAFS.read(path, {});
|
|
1469
|
+
if (readResult.data) {
|
|
1470
|
+
const entry = readResult.data;
|
|
1471
|
+
return [{
|
|
1472
|
+
path: entry.path ?? path,
|
|
1473
|
+
name: (entry.path ?? path).split("/").filter(Boolean).pop(),
|
|
1474
|
+
...entry.meta,
|
|
1475
|
+
...normalizeContent(entry.content)
|
|
1476
|
+
}];
|
|
1477
|
+
}
|
|
1478
|
+
} catch {}
|
|
1479
|
+
return [];
|
|
1480
|
+
},
|
|
1481
|
+
write: (path, data) => {
|
|
1482
|
+
world.write(path, data);
|
|
1483
|
+
},
|
|
1484
|
+
publish: (topic, data) => {
|
|
1485
|
+
world.publish(topic, data);
|
|
1486
|
+
},
|
|
1487
|
+
exec: world.exec
|
|
1488
|
+
} : world,
|
|
1489
|
+
caps: new Set(["*"]),
|
|
1490
|
+
logger: { log() {} },
|
|
1491
|
+
output: { output() {} }
|
|
1492
|
+
};
|
|
1493
|
+
const event = args.event;
|
|
1494
|
+
const initialStream = event ? [{ ...event }] : [];
|
|
1495
|
+
const jobResult = await job.execute(ctx, initialStream);
|
|
1496
|
+
const afs = effectiveAFS;
|
|
1497
|
+
if (afs?.write) for (const [path, data] of Object.entries(written)) {
|
|
1498
|
+
const content = JSON.stringify(data);
|
|
1499
|
+
await afs.write(path, { content });
|
|
1500
|
+
}
|
|
1501
|
+
if (jobResult.status !== "ok") return {
|
|
1502
|
+
success: false,
|
|
1503
|
+
data: {
|
|
1504
|
+
error: `Job "${jobName}" failed`,
|
|
1505
|
+
status: jobResult.status,
|
|
1506
|
+
errors: jobResult.errors
|
|
1507
|
+
}
|
|
1508
|
+
};
|
|
1509
|
+
return {
|
|
1510
|
+
success: true,
|
|
1511
|
+
data: {
|
|
1512
|
+
job: jobName,
|
|
1513
|
+
recordCount: jobResult.recordCount
|
|
1514
|
+
}
|
|
1515
|
+
};
|
|
1516
|
+
}
|
|
1517
|
+
async runAsh(source, args, options) {
|
|
1518
|
+
const result = compileSource(this.applyParamOverridesToSource(source, args));
|
|
1519
|
+
if (result.diagnostics.filter((d) => d.severity !== "warning").length > 0 || !result.program) return {
|
|
1520
|
+
success: false,
|
|
1521
|
+
data: {
|
|
1522
|
+
error: "Compilation failed",
|
|
1523
|
+
diagnostics: result.diagnostics.map((d) => ({
|
|
1524
|
+
...d,
|
|
1525
|
+
message: sanitizeErrorPath(d.message)
|
|
1526
|
+
})),
|
|
1527
|
+
sourceHash: result.sourceHash
|
|
1528
|
+
}
|
|
1529
|
+
};
|
|
1530
|
+
const expectedHash = args.expectedHash;
|
|
1531
|
+
if (typeof expectedHash === "string" && expectedHash !== result.sourceHash) return {
|
|
1532
|
+
success: false,
|
|
1533
|
+
data: {
|
|
1534
|
+
error: "Source hash mismatch — code was modified between validate and run (TOCTOU)",
|
|
1535
|
+
expectedHash,
|
|
1536
|
+
actualHash: result.sourceHash
|
|
1537
|
+
}
|
|
1538
|
+
};
|
|
1539
|
+
const capsArg = args.caps;
|
|
1540
|
+
const caps = Array.isArray(capsArg) && capsArg.every((c) => typeof c === "string") ? new Set(capsArg) : new Set(["*"]);
|
|
1541
|
+
const afsOverride = options?.afsOverride;
|
|
1542
|
+
const { world, written, published, writeCounts, flushPersist } = await this.createAsyncWorld(afsOverride);
|
|
1543
|
+
const effectiveAFS = afsOverride ?? this.afsRoot;
|
|
1544
|
+
const bridgedWorld = effectiveAFS ? {
|
|
1545
|
+
read: async (path) => {
|
|
1546
|
+
const local = await world.read(path);
|
|
1547
|
+
if (local.length > 0) return local;
|
|
1548
|
+
if (effectiveAFS) {
|
|
1549
|
+
if (effectiveAFS.list) {
|
|
1550
|
+
const listResult = await effectiveAFS.list(path, {});
|
|
1551
|
+
if (listResult.data.length > 0) {
|
|
1552
|
+
const needsEnrich = listResult.data.some((e) => e.content == null);
|
|
1553
|
+
let entries = listResult.data;
|
|
1554
|
+
if (needsEnrich && effectiveAFS.read) {
|
|
1555
|
+
const afsRead = effectiveAFS.read.bind(effectiveAFS);
|
|
1556
|
+
entries = await Promise.all(listResult.data.map(async (e) => {
|
|
1557
|
+
if (e.content != null) return e;
|
|
1558
|
+
try {
|
|
1559
|
+
const r = await afsRead(e.path, {});
|
|
1560
|
+
return r.data ? {
|
|
1561
|
+
...e,
|
|
1562
|
+
content: r.data.content
|
|
1563
|
+
} : e;
|
|
1564
|
+
} catch {
|
|
1565
|
+
return e;
|
|
1566
|
+
}
|
|
1567
|
+
}));
|
|
1568
|
+
}
|
|
1569
|
+
return entries.map((entry) => ({
|
|
1570
|
+
path: entry.path,
|
|
1571
|
+
name: entry.path.split("/").filter(Boolean).pop(),
|
|
1572
|
+
...entry.meta,
|
|
1573
|
+
...normalizeContent(entry.content)
|
|
1574
|
+
}));
|
|
1575
|
+
}
|
|
1576
|
+
}
|
|
1577
|
+
if (effectiveAFS.read) try {
|
|
1578
|
+
const readResult = await effectiveAFS.read(path, {});
|
|
1579
|
+
if (readResult.data) {
|
|
1580
|
+
const entry = readResult.data;
|
|
1581
|
+
return [{
|
|
1582
|
+
path: entry.path ?? path,
|
|
1583
|
+
name: (entry.path ?? path).split("/").filter(Boolean).pop(),
|
|
1584
|
+
...entry.meta,
|
|
1585
|
+
...normalizeContent(entry.content)
|
|
1586
|
+
}];
|
|
1587
|
+
}
|
|
1588
|
+
} catch {}
|
|
1589
|
+
}
|
|
1590
|
+
return [];
|
|
1591
|
+
},
|
|
1592
|
+
write: (path, data) => {
|
|
1593
|
+
world.write(path, data);
|
|
1594
|
+
},
|
|
1595
|
+
publish: (topic, data) => {
|
|
1596
|
+
world.publish(topic, data);
|
|
1597
|
+
},
|
|
1598
|
+
exec: world.exec
|
|
1599
|
+
} : world;
|
|
1600
|
+
const logs = [];
|
|
1601
|
+
const logger = { log(stage, action, detail) {
|
|
1602
|
+
logs.push({
|
|
1603
|
+
stage,
|
|
1604
|
+
action,
|
|
1605
|
+
detail
|
|
1606
|
+
});
|
|
1607
|
+
} };
|
|
1608
|
+
const outputs = [];
|
|
1609
|
+
const ctx = {
|
|
1610
|
+
world: bridgedWorld,
|
|
1611
|
+
caps,
|
|
1612
|
+
logger,
|
|
1613
|
+
output: { output(event) {
|
|
1614
|
+
outputs.push(event.content);
|
|
1615
|
+
} }
|
|
1616
|
+
};
|
|
1617
|
+
const jobResults = [];
|
|
1618
|
+
for (const unit of result.program.units) if (unit.kind === "job") {
|
|
1619
|
+
if (result.program.routeTargets?.has(unit.name)) continue;
|
|
1620
|
+
if (unit.trigger) continue;
|
|
1621
|
+
jobResults.push(await unit.execute(ctx));
|
|
1622
|
+
} else await unit.execute(ctx);
|
|
1623
|
+
const sanitizedJobs = jobResults.map(({ stages, ...rest }) => rest);
|
|
1624
|
+
const allOk = jobResults.every((r) => r.status === "ok");
|
|
1625
|
+
const allErrors = jobResults.flatMap((r) => r.errors).map(sanitizeErrorPath);
|
|
1626
|
+
const afs = afsOverride ?? this.afsRoot;
|
|
1627
|
+
const skipPrefix = options?.skipWritePrefix;
|
|
1628
|
+
if (afs?.write && options?.allowWriteBack !== false) {
|
|
1629
|
+
for (const [path, data] of Object.entries(written)) {
|
|
1630
|
+
if (skipPrefix && path.startsWith(skipPrefix)) continue;
|
|
1631
|
+
try {
|
|
1632
|
+
await afs.write(path, { content: JSON.stringify(data) });
|
|
1633
|
+
} catch {
|
|
1634
|
+
allErrors.push("Failed to write to AFS");
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
for (const [topic, data] of Object.entries(published)) try {
|
|
1638
|
+
await afs.write(topic, { content: JSON.stringify(data) });
|
|
1639
|
+
} catch {
|
|
1640
|
+
allErrors.push("Failed to publish to AFS");
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
const overwrittenPaths = Object.entries(writeCounts).filter(([, count]) => count > 1).map(([path, count]) => sanitizeErrorPath(`Path '${path}' was written ${count} times — only last write preserved`));
|
|
1644
|
+
if (overwrittenPaths.length > 0) allErrors.push(...overwrittenPaths);
|
|
1645
|
+
await flushPersist();
|
|
1646
|
+
return {
|
|
1647
|
+
success: allOk && allErrors.length === 0 && overwrittenPaths.length === 0,
|
|
1648
|
+
data: {
|
|
1649
|
+
jobs: sanitizedJobs,
|
|
1650
|
+
written: Object.keys(written),
|
|
1651
|
+
published: Object.keys(published),
|
|
1652
|
+
outputs,
|
|
1653
|
+
errors: allErrors.length > 0 ? allErrors : void 0,
|
|
1654
|
+
warnings: result.diagnostics.filter((d) => d.severity === "warning").map((d) => ({
|
|
1655
|
+
...d,
|
|
1656
|
+
message: sanitizeErrorPath(d.message)
|
|
1657
|
+
})),
|
|
1658
|
+
sourceHash: result.sourceHash,
|
|
1659
|
+
...options?.returnWrittenData ? { writtenData: written } : {}
|
|
1660
|
+
}
|
|
1661
|
+
};
|
|
1662
|
+
}
|
|
1663
|
+
applyParamOverridesToSource(source, args) {
|
|
1664
|
+
const lines = source.split("\n");
|
|
1665
|
+
let changed = false;
|
|
1666
|
+
const replaced = lines.map((line) => {
|
|
1667
|
+
const match = /^(\s*param\s+([A-Za-z_][A-Za-z0-9_]*)\s*=\s*)(.+)$/.exec(line);
|
|
1668
|
+
if (!match) return line;
|
|
1669
|
+
const [, prefix, name] = match;
|
|
1670
|
+
if (!name) return line;
|
|
1671
|
+
const value = args[name];
|
|
1672
|
+
if (typeof value !== "string" && typeof value !== "number") return line;
|
|
1673
|
+
changed = true;
|
|
1674
|
+
return `${prefix}${typeof value === "string" ? JSON.stringify(value) : String(value)}`;
|
|
1675
|
+
});
|
|
1676
|
+
return changed ? replaced.join("\n") : source;
|
|
1677
|
+
}
|
|
1678
|
+
};
|
|
1679
|
+
__decorate([List("/")], AFSAsh.prototype, "listRoot", null);
|
|
1680
|
+
__decorate([List("/scripts")], AFSAsh.prototype, "listScripts", null);
|
|
1681
|
+
__decorate([List("/scripts/:scriptName")], AFSAsh.prototype, "listScript", null);
|
|
1682
|
+
__decorate([Actions("/")], AFSAsh.prototype, "listRootActions", null);
|
|
1683
|
+
__decorate([Actions("/scripts/:scriptName")], AFSAsh.prototype, "listScriptActions", null);
|
|
1684
|
+
__decorate([Read("/")], AFSAsh.prototype, "readRoot", null);
|
|
1685
|
+
__decorate([Read("/scripts")], AFSAsh.prototype, "readScriptsDir", null);
|
|
1686
|
+
__decorate([Read("/scripts/:scriptName")], AFSAsh.prototype, "readScript", null);
|
|
1687
|
+
__decorate([List("/cookbook")], AFSAsh.prototype, "listCookbook", null);
|
|
1688
|
+
__decorate([Read("/cookbook")], AFSAsh.prototype, "readCookbookDir", null);
|
|
1689
|
+
__decorate([List("/cookbook/:file")], AFSAsh.prototype, "listCookbookFile", null);
|
|
1690
|
+
__decorate([Read("/cookbook/:file")], AFSAsh.prototype, "readCookbook", null);
|
|
1691
|
+
__decorate([Read("/.meta/agent.md")], AFSAsh.prototype, "readAgentMd", null);
|
|
1692
|
+
__decorate([Read("/.meta/.ash-reference")], AFSAsh.prototype, "readAshReference", null);
|
|
1693
|
+
__decorate([Read("/.meta/.capabilities")], AFSAsh.prototype, "readCapabilities", null);
|
|
1694
|
+
__decorate([Meta("/")], AFSAsh.prototype, "readRootMeta", null);
|
|
1695
|
+
__decorate([Meta("/cookbook")], AFSAsh.prototype, "readCookbookMeta", null);
|
|
1696
|
+
__decorate([Meta("/cookbook/:file")], AFSAsh.prototype, "readCookbookFileMeta", null);
|
|
1697
|
+
__decorate([Meta("/scripts")], AFSAsh.prototype, "readScriptsMeta", null);
|
|
1698
|
+
__decorate([Meta("/scripts/:scriptName")], AFSAsh.prototype, "readScriptMeta", null);
|
|
1699
|
+
__decorate([Write("/scripts/:scriptName")], AFSAsh.prototype, "writeScript", null);
|
|
1700
|
+
__decorate([Delete("/scripts/:scriptName")], AFSAsh.prototype, "deleteScript", null);
|
|
1701
|
+
__decorate([Delete("/:path*")], AFSAsh.prototype, "deleteCatchAll", null);
|
|
1702
|
+
__decorate([Actions.Exec("/scripts/:scriptName", "exec")], AFSAsh.prototype, "execScript", null);
|
|
1703
|
+
__decorate([Exec("/scripts/:scriptName")], AFSAsh.prototype, "execScriptDirect", null);
|
|
1704
|
+
__decorate([Actions.Exec("/")], AFSAsh.prototype, "execRootAction", null);
|
|
1705
|
+
__decorate([Stat("/")], AFSAsh.prototype, "statRoot", null);
|
|
1706
|
+
__decorate([Stat("/scripts")], AFSAsh.prototype, "statScriptsDir", null);
|
|
1707
|
+
__decorate([Stat("/scripts/:scriptName")], AFSAsh.prototype, "statScript", null);
|
|
1708
|
+
__decorate([Explain("/")], AFSAsh.prototype, "explainRoot", null);
|
|
1709
|
+
__decorate([Explain("/scripts/:scriptName")], AFSAsh.prototype, "explainScript", null);
|
|
1710
|
+
__decorate([Search("/:path*")], AFSAsh.prototype, "searchScripts", null);
|
|
1711
|
+
var src_default = AFSAsh;
|
|
1712
|
+
|
|
1713
|
+
//#endregion
|
|
1714
|
+
export { AFSAsh, src_default as default, deriveToolsFromProgram };
|
|
1715
|
+
//# sourceMappingURL=index.mjs.map
|