@agentuity/cli 0.0.63 → 0.0.65
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cmd/build/ast.d.ts.map +1 -1
- package/dist/cmd/build/ast.js +70 -4
- package/dist/cmd/build/ast.js.map +1 -1
- package/dist/cmd/build/ast.test.js +186 -1
- package/dist/cmd/build/ast.test.js.map +1 -1
- package/dist/cmd/build/bundler.d.ts +2 -24
- package/dist/cmd/build/bundler.d.ts.map +1 -1
- package/dist/cmd/build/bundler.js +24 -41
- package/dist/cmd/build/bundler.js.map +1 -1
- package/dist/cmd/build/index.js +22 -23
- package/dist/cmd/build/index.js.map +1 -1
- package/dist/cmd/build/plugin.d.ts.map +1 -1
- package/dist/cmd/build/plugin.js +5 -4
- package/dist/cmd/build/plugin.js.map +1 -1
- package/dist/cmd/build/workbench-templates.d.ts +1 -1
- package/dist/cmd/build/workbench-templates.d.ts.map +1 -1
- package/dist/cmd/build/workbench-templates.js +6 -19
- package/dist/cmd/build/workbench-templates.js.map +1 -1
- package/dist/cmd/cloud/apikey/create.d.ts.map +1 -1
- package/dist/cmd/cloud/apikey/create.js +7 -16
- package/dist/cmd/cloud/apikey/create.js.map +1 -1
- package/dist/cmd/cloud/db/create.d.ts.map +1 -1
- package/dist/cmd/cloud/db/create.js +3 -2
- package/dist/cmd/cloud/db/create.js.map +1 -1
- package/dist/cmd/cloud/db/get.d.ts.map +1 -1
- package/dist/cmd/cloud/db/get.js +72 -3
- package/dist/cmd/cloud/db/get.js.map +1 -1
- package/dist/cmd/cloud/db/index.d.ts.map +1 -1
- package/dist/cmd/cloud/db/index.js +9 -1
- package/dist/cmd/cloud/db/index.js.map +1 -1
- package/dist/cmd/cloud/db/logs.d.ts +2 -0
- package/dist/cmd/cloud/db/logs.d.ts.map +1 -0
- package/dist/cmd/cloud/db/logs.js +150 -0
- package/dist/cmd/cloud/db/logs.js.map +1 -0
- package/dist/cmd/cloud/db/sql.d.ts.map +1 -1
- package/dist/cmd/cloud/db/sql.js +16 -51
- package/dist/cmd/cloud/db/sql.js.map +1 -1
- package/dist/cmd/cloud/deploy.d.ts.map +1 -1
- package/dist/cmd/cloud/deploy.js +3 -2
- package/dist/cmd/cloud/deploy.js.map +1 -1
- package/dist/cmd/cloud/deployment/list.d.ts.map +1 -1
- package/dist/cmd/cloud/deployment/list.js +1 -1
- package/dist/cmd/cloud/deployment/list.js.map +1 -1
- package/dist/cmd/cloud/deployment/logs.d.ts.map +1 -1
- package/dist/cmd/cloud/deployment/logs.js +1 -1
- package/dist/cmd/cloud/deployment/logs.js.map +1 -1
- package/dist/cmd/cloud/deployment/remove.js +1 -1
- package/dist/cmd/cloud/deployment/remove.js.map +1 -1
- package/dist/cmd/cloud/deployment/rollback.js +1 -1
- package/dist/cmd/cloud/deployment/rollback.js.map +1 -1
- package/dist/cmd/cloud/deployment/show.js +1 -1
- package/dist/cmd/cloud/deployment/show.js.map +1 -1
- package/dist/cmd/cloud/deployment/undeploy.js +1 -1
- package/dist/cmd/cloud/deployment/undeploy.js.map +1 -1
- package/dist/cmd/cloud/keyvalue/util.js +1 -1
- package/dist/cmd/cloud/keyvalue/util.js.map +1 -1
- package/dist/cmd/cloud/objectstore/util.js +1 -1
- package/dist/cmd/cloud/objectstore/util.js.map +1 -1
- package/dist/cmd/cloud/session/get.d.ts.map +1 -1
- package/dist/cmd/cloud/session/get.js +1 -1
- package/dist/cmd/cloud/session/get.js.map +1 -1
- package/dist/cmd/cloud/session/list.d.ts.map +1 -1
- package/dist/cmd/cloud/session/list.js +1 -1
- package/dist/cmd/cloud/session/list.js.map +1 -1
- package/dist/cmd/cloud/session/logs.d.ts.map +1 -1
- package/dist/cmd/cloud/session/logs.js +1 -1
- package/dist/cmd/cloud/session/logs.js.map +1 -1
- package/dist/cmd/cloud/stream/util.d.ts.map +1 -1
- package/dist/cmd/cloud/stream/util.js +2 -1
- package/dist/cmd/cloud/stream/util.js.map +1 -1
- package/dist/cmd/cloud/thread/delete.d.ts.map +1 -1
- package/dist/cmd/cloud/thread/delete.js +1 -1
- package/dist/cmd/cloud/thread/delete.js.map +1 -1
- package/dist/cmd/cloud/thread/get.d.ts.map +1 -1
- package/dist/cmd/cloud/thread/get.js +1 -1
- package/dist/cmd/cloud/thread/get.js.map +1 -1
- package/dist/cmd/cloud/thread/list.d.ts.map +1 -1
- package/dist/cmd/cloud/thread/list.js +1 -1
- package/dist/cmd/cloud/thread/list.js.map +1 -1
- package/dist/cmd/cloud/vector/util.js +1 -1
- package/dist/cmd/cloud/vector/util.js.map +1 -1
- package/dist/cmd/dev/index.d.ts.map +1 -1
- package/dist/cmd/dev/index.js +34 -1
- package/dist/cmd/dev/index.js.map +1 -1
- package/dist/cmd/profile/current.d.ts +3 -0
- package/dist/cmd/profile/current.d.ts.map +1 -0
- package/dist/cmd/profile/current.js +27 -0
- package/dist/cmd/profile/current.js.map +1 -0
- package/dist/cmd/profile/index.d.ts.map +1 -1
- package/dist/cmd/profile/index.js +9 -1
- package/dist/cmd/profile/index.js.map +1 -1
- package/dist/cmd/profile/show.d.ts.map +1 -1
- package/dist/cmd/profile/show.js +0 -1
- package/dist/cmd/profile/show.js.map +1 -1
- package/dist/command-prefix.d.ts.map +1 -1
- package/dist/command-prefix.js +2 -1
- package/dist/command-prefix.js.map +1 -1
- package/dist/config.d.ts +1 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +18 -5
- package/dist/config.js.map +1 -1
- package/dist/download.js +1 -1
- package/dist/download.js.map +1 -1
- package/dist/env-util.d.ts.map +1 -1
- package/dist/env-util.js +3 -0
- package/dist/env-util.js.map +1 -1
- package/dist/schemas/deploy.d.ts +24 -0
- package/dist/schemas/deploy.d.ts.map +1 -0
- package/dist/schemas/deploy.js +26 -0
- package/dist/schemas/deploy.js.map +1 -0
- package/dist/utils/workbench-notify.d.ts +29 -0
- package/dist/utils/workbench-notify.d.ts.map +1 -0
- package/dist/utils/workbench-notify.js +56 -0
- package/dist/utils/workbench-notify.js.map +1 -0
- package/package.json +3 -3
- package/src/cmd/build/ast.test.ts +246 -1
- package/src/cmd/build/ast.ts +88 -4
- package/src/cmd/build/bundler.ts +27 -44
- package/src/cmd/build/index.ts +23 -23
- package/src/cmd/build/plugin.ts +5 -4
- package/src/cmd/build/workbench-templates.ts +6 -21
- package/src/cmd/cloud/apikey/create.ts +7 -15
- package/src/cmd/cloud/db/create.ts +3 -2
- package/src/cmd/cloud/db/get.ts +85 -5
- package/src/cmd/cloud/db/index.ts +9 -1
- package/src/cmd/cloud/db/logs.ts +163 -0
- package/src/cmd/cloud/db/sql.ts +16 -66
- package/src/cmd/cloud/deploy.ts +3 -2
- package/src/cmd/cloud/deployment/list.ts +1 -4
- package/src/cmd/cloud/deployment/logs.ts +1 -4
- package/src/cmd/cloud/deployment/remove.ts +1 -1
- package/src/cmd/cloud/deployment/rollback.ts +1 -1
- package/src/cmd/cloud/deployment/show.ts +1 -1
- package/src/cmd/cloud/deployment/undeploy.ts +1 -1
- package/src/cmd/cloud/keyvalue/util.ts +1 -1
- package/src/cmd/cloud/objectstore/util.ts +1 -1
- package/src/cmd/cloud/session/get.ts +1 -4
- package/src/cmd/cloud/session/list.ts +1 -4
- package/src/cmd/cloud/session/logs.ts +1 -4
- package/src/cmd/cloud/stream/util.ts +4 -1
- package/src/cmd/cloud/thread/delete.ts +1 -4
- package/src/cmd/cloud/thread/get.ts +1 -4
- package/src/cmd/cloud/thread/list.ts +1 -4
- package/src/cmd/cloud/vector/util.ts +1 -1
- package/src/cmd/dev/index.ts +40 -1
- package/src/cmd/profile/current.ts +31 -0
- package/src/cmd/profile/index.ts +9 -1
- package/src/cmd/profile/show.ts +0 -1
- package/src/command-prefix.ts +4 -1
- package/src/config.ts +20 -5
- package/src/download.ts +1 -1
- package/src/env-util.ts +3 -0
- package/src/schemas/deploy.ts +28 -0
- package/src/utils/workbench-notify.ts +67 -0
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility to send notifications directly to workbench clients via WebSocket
|
|
3
|
+
*/
|
|
4
|
+
export interface WorkbenchNotifyOptions {
|
|
5
|
+
port?: number;
|
|
6
|
+
message: 'restarting' | 'alive';
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Send a notification directly to workbench clients via WebSocket
|
|
10
|
+
*
|
|
11
|
+
* @param options - Configuration for the notification
|
|
12
|
+
* @returns Promise that resolves when notification is sent
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* ```typescript
|
|
16
|
+
* // Notify clients that server is restarting
|
|
17
|
+
* await notifyWorkbenchClients({
|
|
18
|
+
* baseUrl: 'ws://localhost:3500',
|
|
19
|
+
* message: 'restarting'
|
|
20
|
+
* });
|
|
21
|
+
*
|
|
22
|
+
* // Notify clients that server is alive
|
|
23
|
+
* await notifyWorkbenchClients({
|
|
24
|
+
* message: 'alive'
|
|
25
|
+
* });
|
|
26
|
+
* ```
|
|
27
|
+
*/
|
|
28
|
+
export declare function notifyWorkbenchClients(options: WorkbenchNotifyOptions): Promise<void>;
|
|
29
|
+
//# sourceMappingURL=workbench-notify.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workbench-notify.d.ts","sourceRoot":"","sources":["../../src/utils/workbench-notify.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,sBAAsB;IACtC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC;CAChC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAsB,sBAAsB,CAAC,OAAO,EAAE,sBAAsB,GAAG,OAAO,CAAC,IAAI,CAAC,CAqC3F"}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility to send notifications directly to workbench clients via WebSocket
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Send a notification directly to workbench clients via WebSocket
|
|
6
|
+
*
|
|
7
|
+
* @param options - Configuration for the notification
|
|
8
|
+
* @returns Promise that resolves when notification is sent
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```typescript
|
|
12
|
+
* // Notify clients that server is restarting
|
|
13
|
+
* await notifyWorkbenchClients({
|
|
14
|
+
* baseUrl: 'ws://localhost:3500',
|
|
15
|
+
* message: 'restarting'
|
|
16
|
+
* });
|
|
17
|
+
*
|
|
18
|
+
* // Notify clients that server is alive
|
|
19
|
+
* await notifyWorkbenchClients({
|
|
20
|
+
* message: 'alive'
|
|
21
|
+
* });
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
export async function notifyWorkbenchClients(options) {
|
|
25
|
+
const { port = 3500, message } = options;
|
|
26
|
+
const wsUrl = new URL(`ws://localhost:${port}`);
|
|
27
|
+
return new Promise((resolve) => {
|
|
28
|
+
try {
|
|
29
|
+
wsUrl.pathname = '/_agentuity/workbench/ws';
|
|
30
|
+
const ws = new WebSocket(wsUrl.toString());
|
|
31
|
+
// Set a timeout to avoid hanging
|
|
32
|
+
const timeout = setTimeout(() => {
|
|
33
|
+
ws.close();
|
|
34
|
+
resolve();
|
|
35
|
+
}, 2000);
|
|
36
|
+
ws.onopen = () => {
|
|
37
|
+
ws.send(message);
|
|
38
|
+
ws.close();
|
|
39
|
+
};
|
|
40
|
+
ws.onclose = () => {
|
|
41
|
+
clearTimeout(timeout);
|
|
42
|
+
resolve();
|
|
43
|
+
};
|
|
44
|
+
ws.onerror = () => {
|
|
45
|
+
clearTimeout(timeout);
|
|
46
|
+
ws.close();
|
|
47
|
+
resolve();
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
catch (_error) {
|
|
51
|
+
// Silently fail - this ensures the CLI doesn't fail if the app server isn't running
|
|
52
|
+
resolve();
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
//# sourceMappingURL=workbench-notify.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workbench-notify.js","sourceRoot":"","sources":["../../src/utils/workbench-notify.ts"],"names":[],"mappings":"AAAA;;GAEG;AAOH;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,CAAC,KAAK,UAAU,sBAAsB,CAAC,OAA+B;IAC3E,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;IAEzC,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,kBAAkB,IAAI,EAAE,CAAC,CAAC;IAEhD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;QAC9B,IAAI,CAAC;YACJ,KAAK,CAAC,QAAQ,GAAG,0BAA0B,CAAC;YAE5C,MAAM,EAAE,GAAG,IAAI,SAAS,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;YAE3C,iCAAiC;YACjC,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE;gBAC/B,EAAE,CAAC,KAAK,EAAE,CAAC;gBACX,OAAO,EAAE,CAAC;YACX,CAAC,EAAE,IAAI,CAAC,CAAC;YAET,EAAE,CAAC,MAAM,GAAG,GAAG,EAAE;gBAChB,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACjB,EAAE,CAAC,KAAK,EAAE,CAAC;YACZ,CAAC,CAAC;YAEF,EAAE,CAAC,OAAO,GAAG,GAAG,EAAE;gBACjB,YAAY,CAAC,OAAO,CAAC,CAAC;gBACtB,OAAO,EAAE,CAAC;YACX,CAAC,CAAC;YAEF,EAAE,CAAC,OAAO,GAAG,GAAG,EAAE;gBACjB,YAAY,CAAC,OAAO,CAAC,CAAC;gBACtB,EAAE,CAAC,KAAK,EAAE,CAAC;gBACX,OAAO,EAAE,CAAC;YACX,CAAC,CAAC;QACH,CAAC;QAAC,OAAO,MAAM,EAAE,CAAC;YACjB,oFAAoF;YACpF,OAAO,EAAE,CAAC;QACX,CAAC;IACF,CAAC,CAAC,CAAC;AACJ,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agentuity/cli",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.65",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"author": "Agentuity employees and contributors",
|
|
6
6
|
"type": "module",
|
|
@@ -35,8 +35,8 @@
|
|
|
35
35
|
"prepublishOnly": "bun run clean && bun run build"
|
|
36
36
|
},
|
|
37
37
|
"dependencies": {
|
|
38
|
-
"@agentuity/core": "0.0.
|
|
39
|
-
"@agentuity/server": "0.0.
|
|
38
|
+
"@agentuity/core": "0.0.65",
|
|
39
|
+
"@agentuity/server": "0.0.65",
|
|
40
40
|
"@datasert/cronjs-parser": "^1.4.0",
|
|
41
41
|
"@terascope/fetch-github-release": "^2.2.1",
|
|
42
42
|
"acorn-loose": "^8.5.2",
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { describe, test, expect } from 'bun:test';
|
|
2
|
-
import { parseRoute } from './ast';
|
|
2
|
+
import { parseRoute, parseAgentMetadata } from './ast';
|
|
3
3
|
import { writeFileSync, mkdirSync, rmSync } from 'node:fs';
|
|
4
4
|
import { join } from 'node:path';
|
|
5
5
|
|
|
@@ -171,3 +171,248 @@ export default router;
|
|
|
171
171
|
cleanup();
|
|
172
172
|
});
|
|
173
173
|
});
|
|
174
|
+
|
|
175
|
+
describe('parseAgentMetadata - Schema Code Extraction', () => {
|
|
176
|
+
const setup = () => {
|
|
177
|
+
rmSync(TEST_DIR, { recursive: true, force: true });
|
|
178
|
+
mkdirSync(TEST_DIR, { recursive: true });
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
const cleanup = () => {
|
|
182
|
+
rmSync(TEST_DIR, { recursive: true, force: true });
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
test('should extract input and output schema code', async () => {
|
|
186
|
+
setup();
|
|
187
|
+
const agentFile = join(TEST_DIR, 'agent.ts');
|
|
188
|
+
const code = `
|
|
189
|
+
import { createAgent } from '@agentuity/runtime';
|
|
190
|
+
import { z } from 'zod';
|
|
191
|
+
|
|
192
|
+
const agent = createAgent({
|
|
193
|
+
metadata: {
|
|
194
|
+
name: 'test-agent',
|
|
195
|
+
description: 'Test agent',
|
|
196
|
+
},
|
|
197
|
+
schema: {
|
|
198
|
+
input: z.object({
|
|
199
|
+
name: z.string(),
|
|
200
|
+
age: z.number(),
|
|
201
|
+
}),
|
|
202
|
+
output: z.object({
|
|
203
|
+
result: z.string(),
|
|
204
|
+
}),
|
|
205
|
+
},
|
|
206
|
+
handler: async (ctx, input) => {
|
|
207
|
+
return { result: 'success' };
|
|
208
|
+
},
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
export default agent;
|
|
212
|
+
`;
|
|
213
|
+
writeFileSync(agentFile, code);
|
|
214
|
+
|
|
215
|
+
const transpiler = new Bun.Transpiler({ loader: 'ts', target: 'bun' });
|
|
216
|
+
const contents = transpiler.transformSync(code);
|
|
217
|
+
const [, metadata] = await parseAgentMetadata(
|
|
218
|
+
TEST_DIR,
|
|
219
|
+
agentFile,
|
|
220
|
+
contents,
|
|
221
|
+
'proj_1',
|
|
222
|
+
'dep_1'
|
|
223
|
+
);
|
|
224
|
+
|
|
225
|
+
expect(metadata.has('inputSchemaCode')).toBe(true);
|
|
226
|
+
expect(metadata.has('outputSchemaCode')).toBe(true);
|
|
227
|
+
|
|
228
|
+
const inputSchemaCode = metadata.get('inputSchemaCode');
|
|
229
|
+
const outputSchemaCode = metadata.get('outputSchemaCode');
|
|
230
|
+
|
|
231
|
+
expect(inputSchemaCode).toContain('z.object');
|
|
232
|
+
expect(inputSchemaCode).toContain('name');
|
|
233
|
+
expect(inputSchemaCode).toContain('age');
|
|
234
|
+
|
|
235
|
+
expect(outputSchemaCode).toContain('z.object');
|
|
236
|
+
expect(outputSchemaCode).toContain('result');
|
|
237
|
+
|
|
238
|
+
cleanup();
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
test('should extract only input schema code when output is missing', async () => {
|
|
242
|
+
setup();
|
|
243
|
+
const agentFile = join(TEST_DIR, 'agent.ts');
|
|
244
|
+
const code = `
|
|
245
|
+
import { createAgent } from '@agentuity/runtime';
|
|
246
|
+
import { z } from 'zod';
|
|
247
|
+
|
|
248
|
+
const agent = createAgent({
|
|
249
|
+
metadata: {
|
|
250
|
+
name: 'test-agent',
|
|
251
|
+
},
|
|
252
|
+
schema: {
|
|
253
|
+
input: z.string(),
|
|
254
|
+
},
|
|
255
|
+
handler: async (ctx, input) => {
|
|
256
|
+
return 'success';
|
|
257
|
+
},
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
export default agent;
|
|
261
|
+
`;
|
|
262
|
+
writeFileSync(agentFile, code);
|
|
263
|
+
|
|
264
|
+
const transpiler = new Bun.Transpiler({ loader: 'ts', target: 'bun' });
|
|
265
|
+
const contents = transpiler.transformSync(code);
|
|
266
|
+
const [, metadata] = await parseAgentMetadata(
|
|
267
|
+
TEST_DIR,
|
|
268
|
+
agentFile,
|
|
269
|
+
contents,
|
|
270
|
+
'proj_1',
|
|
271
|
+
'dep_1'
|
|
272
|
+
);
|
|
273
|
+
|
|
274
|
+
expect(metadata.has('inputSchemaCode')).toBe(true);
|
|
275
|
+
expect(metadata.has('outputSchemaCode')).toBe(false);
|
|
276
|
+
|
|
277
|
+
const inputSchemaCode = metadata.get('inputSchemaCode');
|
|
278
|
+
expect(inputSchemaCode).toContain('z.string');
|
|
279
|
+
|
|
280
|
+
cleanup();
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
test('should extract only output schema code when input is missing', async () => {
|
|
284
|
+
setup();
|
|
285
|
+
const agentFile = join(TEST_DIR, 'agent.ts');
|
|
286
|
+
const code = `
|
|
287
|
+
import { createAgent } from '@agentuity/runtime';
|
|
288
|
+
import { z } from 'zod';
|
|
289
|
+
|
|
290
|
+
const agent = createAgent({
|
|
291
|
+
metadata: {
|
|
292
|
+
name: 'test-agent',
|
|
293
|
+
},
|
|
294
|
+
schema: {
|
|
295
|
+
output: z.array(z.string()),
|
|
296
|
+
},
|
|
297
|
+
handler: async (ctx) => {
|
|
298
|
+
return ['item1', 'item2'];
|
|
299
|
+
},
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
export default agent;
|
|
303
|
+
`;
|
|
304
|
+
writeFileSync(agentFile, code);
|
|
305
|
+
|
|
306
|
+
const transpiler = new Bun.Transpiler({ loader: 'ts', target: 'bun' });
|
|
307
|
+
const contents = transpiler.transformSync(code);
|
|
308
|
+
const [, metadata] = await parseAgentMetadata(
|
|
309
|
+
TEST_DIR,
|
|
310
|
+
agentFile,
|
|
311
|
+
contents,
|
|
312
|
+
'proj_1',
|
|
313
|
+
'dep_1'
|
|
314
|
+
);
|
|
315
|
+
|
|
316
|
+
expect(metadata.has('inputSchemaCode')).toBe(false);
|
|
317
|
+
expect(metadata.has('outputSchemaCode')).toBe(true);
|
|
318
|
+
|
|
319
|
+
const outputSchemaCode = metadata.get('outputSchemaCode');
|
|
320
|
+
expect(outputSchemaCode).toContain('z.array');
|
|
321
|
+
expect(outputSchemaCode).toContain('z.string');
|
|
322
|
+
|
|
323
|
+
cleanup();
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
test('should handle complex nested schemas', async () => {
|
|
327
|
+
setup();
|
|
328
|
+
const agentFile = join(TEST_DIR, 'agent.ts');
|
|
329
|
+
const code = `
|
|
330
|
+
import { createAgent } from '@agentuity/runtime';
|
|
331
|
+
import { z } from 'zod';
|
|
332
|
+
|
|
333
|
+
const agent = createAgent({
|
|
334
|
+
metadata: {
|
|
335
|
+
name: 'test-agent',
|
|
336
|
+
},
|
|
337
|
+
schema: {
|
|
338
|
+
input: z.object({
|
|
339
|
+
user: z.object({
|
|
340
|
+
name: z.string(),
|
|
341
|
+
email: z.string().email(),
|
|
342
|
+
}),
|
|
343
|
+
tags: z.array(z.string()),
|
|
344
|
+
}),
|
|
345
|
+
output: z.union([
|
|
346
|
+
z.object({ success: z.boolean() }),
|
|
347
|
+
z.object({ error: z.string() }),
|
|
348
|
+
]),
|
|
349
|
+
},
|
|
350
|
+
handler: async (ctx, input) => {
|
|
351
|
+
return { success: true };
|
|
352
|
+
},
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
export default agent;
|
|
356
|
+
`;
|
|
357
|
+
writeFileSync(agentFile, code);
|
|
358
|
+
|
|
359
|
+
const transpiler = new Bun.Transpiler({ loader: 'ts', target: 'bun' });
|
|
360
|
+
const contents = transpiler.transformSync(code);
|
|
361
|
+
const [, metadata] = await parseAgentMetadata(
|
|
362
|
+
TEST_DIR,
|
|
363
|
+
agentFile,
|
|
364
|
+
contents,
|
|
365
|
+
'proj_1',
|
|
366
|
+
'dep_1'
|
|
367
|
+
);
|
|
368
|
+
|
|
369
|
+
expect(metadata.has('inputSchemaCode')).toBe(true);
|
|
370
|
+
expect(metadata.has('outputSchemaCode')).toBe(true);
|
|
371
|
+
|
|
372
|
+
const inputSchemaCode = metadata.get('inputSchemaCode');
|
|
373
|
+
const outputSchemaCode = metadata.get('outputSchemaCode');
|
|
374
|
+
|
|
375
|
+
expect(inputSchemaCode).toContain('z.object');
|
|
376
|
+
expect(inputSchemaCode).toContain('user');
|
|
377
|
+
expect(inputSchemaCode).toContain('tags');
|
|
378
|
+
|
|
379
|
+
expect(outputSchemaCode).toContain('z.union');
|
|
380
|
+
|
|
381
|
+
cleanup();
|
|
382
|
+
});
|
|
383
|
+
|
|
384
|
+
test('should handle agent without schema property', async () => {
|
|
385
|
+
setup();
|
|
386
|
+
const agentFile = join(TEST_DIR, 'agent.ts');
|
|
387
|
+
const code = `
|
|
388
|
+
import { createAgent } from '@agentuity/runtime';
|
|
389
|
+
|
|
390
|
+
const agent = createAgent({
|
|
391
|
+
metadata: {
|
|
392
|
+
name: 'test-agent',
|
|
393
|
+
},
|
|
394
|
+
handler: async (ctx) => {
|
|
395
|
+
return 'success';
|
|
396
|
+
},
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
export default agent;
|
|
400
|
+
`;
|
|
401
|
+
writeFileSync(agentFile, code);
|
|
402
|
+
|
|
403
|
+
const transpiler = new Bun.Transpiler({ loader: 'ts', target: 'bun' });
|
|
404
|
+
const contents = transpiler.transformSync(code);
|
|
405
|
+
const [, metadata] = await parseAgentMetadata(
|
|
406
|
+
TEST_DIR,
|
|
407
|
+
agentFile,
|
|
408
|
+
contents,
|
|
409
|
+
'proj_1',
|
|
410
|
+
'dep_1'
|
|
411
|
+
);
|
|
412
|
+
|
|
413
|
+
expect(metadata.has('inputSchemaCode')).toBe(false);
|
|
414
|
+
expect(metadata.has('outputSchemaCode')).toBe(false);
|
|
415
|
+
|
|
416
|
+
cleanup();
|
|
417
|
+
});
|
|
418
|
+
});
|
package/src/cmd/build/ast.ts
CHANGED
|
@@ -174,6 +174,49 @@ function generateStableEvalId(projectId: string, agentId: string, name: string):
|
|
|
174
174
|
return `evalid_${hashSHA1(projectId, agentId, name)}`.substring(0, 64);
|
|
175
175
|
}
|
|
176
176
|
|
|
177
|
+
/**
|
|
178
|
+
* Extract schema code from createAgent call arguments
|
|
179
|
+
* Returns input and output schema code as strings
|
|
180
|
+
*/
|
|
181
|
+
function extractSchemaCode(callargexp: ASTObjectExpression): {
|
|
182
|
+
inputSchemaCode?: string;
|
|
183
|
+
outputSchemaCode?: string;
|
|
184
|
+
} {
|
|
185
|
+
let schemaObj: ASTObjectExpression | undefined;
|
|
186
|
+
|
|
187
|
+
// Find the schema property
|
|
188
|
+
for (const prop of callargexp.properties) {
|
|
189
|
+
if (prop.key.type === 'Identifier' && prop.key.name === 'schema') {
|
|
190
|
+
if (prop.value.type === 'ObjectExpression') {
|
|
191
|
+
schemaObj = prop.value as ASTObjectExpression;
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (!schemaObj) {
|
|
198
|
+
return {};
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
let inputSchemaCode: string | undefined;
|
|
202
|
+
let outputSchemaCode: string | undefined;
|
|
203
|
+
|
|
204
|
+
// Extract input and output schema code
|
|
205
|
+
for (const prop of schemaObj.properties) {
|
|
206
|
+
if (prop.key.type === 'Identifier') {
|
|
207
|
+
if (prop.key.name === 'input' && prop.value) {
|
|
208
|
+
// Generate source code from AST node
|
|
209
|
+
inputSchemaCode = generate(prop.value);
|
|
210
|
+
} else if (prop.key.name === 'output' && prop.value) {
|
|
211
|
+
// Generate source code from AST node
|
|
212
|
+
outputSchemaCode = generate(prop.value);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
return { inputSchemaCode, outputSchemaCode };
|
|
218
|
+
}
|
|
219
|
+
|
|
177
220
|
type AcornParseResultType = ReturnType<typeof acornLoose.parse>;
|
|
178
221
|
|
|
179
222
|
const MetadataError = StructuredError('MetatadataNameMissingError')<{
|
|
@@ -189,7 +232,9 @@ function augmentAgentMetadataNode(
|
|
|
189
232
|
version: string,
|
|
190
233
|
ast: AcornParseResultType,
|
|
191
234
|
propvalue: ASTObjectExpression,
|
|
192
|
-
filename: string
|
|
235
|
+
filename: string,
|
|
236
|
+
inputSchemaCode?: string,
|
|
237
|
+
outputSchemaCode?: string
|
|
193
238
|
): [string, Map<string, string>] {
|
|
194
239
|
const metadata = parseObjectExpressionToMap(propvalue);
|
|
195
240
|
if (!metadata.has('name')) {
|
|
@@ -218,6 +263,12 @@ function augmentAgentMetadataNode(
|
|
|
218
263
|
metadata.set('id', id);
|
|
219
264
|
metadata.set('agentId', agentId);
|
|
220
265
|
metadata.set('description', description);
|
|
266
|
+
if (inputSchemaCode) {
|
|
267
|
+
metadata.set('inputSchemaCode', inputSchemaCode);
|
|
268
|
+
}
|
|
269
|
+
if (outputSchemaCode) {
|
|
270
|
+
metadata.set('outputSchemaCode', outputSchemaCode);
|
|
271
|
+
}
|
|
221
272
|
propvalue.properties.push(
|
|
222
273
|
createObjectPropertyNode('id', id),
|
|
223
274
|
createObjectPropertyNode('agentId', agentId),
|
|
@@ -226,6 +277,12 @@ function augmentAgentMetadataNode(
|
|
|
226
277
|
createObjectPropertyNode('filename', rel),
|
|
227
278
|
createObjectPropertyNode('description', description)
|
|
228
279
|
);
|
|
280
|
+
if (inputSchemaCode) {
|
|
281
|
+
propvalue.properties.push(createObjectPropertyNode('inputSchemaCode', inputSchemaCode));
|
|
282
|
+
}
|
|
283
|
+
if (outputSchemaCode) {
|
|
284
|
+
propvalue.properties.push(createObjectPropertyNode('outputSchemaCode', outputSchemaCode));
|
|
285
|
+
}
|
|
229
286
|
|
|
230
287
|
const newsource = generate(ast);
|
|
231
288
|
|
|
@@ -626,6 +683,7 @@ export async function parseAgentMetadata(
|
|
|
626
683
|
const id = getAgentId(projectId, deploymentId, rel, version);
|
|
627
684
|
|
|
628
685
|
let result: [string, Map<string, string>] | undefined;
|
|
686
|
+
let schemaCodeExtracted = false;
|
|
629
687
|
|
|
630
688
|
for (const body of ast.body) {
|
|
631
689
|
if (body.type === 'ExportDefaultDeclaration') {
|
|
@@ -634,6 +692,17 @@ export async function parseAgentMetadata(
|
|
|
634
692
|
if (call.callee.name === 'createAgent') {
|
|
635
693
|
for (const callarg of call.arguments) {
|
|
636
694
|
const callargexp = callarg as ASTObjectExpression;
|
|
695
|
+
|
|
696
|
+
// Extract schema code before processing metadata
|
|
697
|
+
let inputSchemaCode: string | undefined;
|
|
698
|
+
let outputSchemaCode: string | undefined;
|
|
699
|
+
if (!schemaCodeExtracted) {
|
|
700
|
+
const schemaCode = extractSchemaCode(callargexp);
|
|
701
|
+
inputSchemaCode = schemaCode.inputSchemaCode;
|
|
702
|
+
outputSchemaCode = schemaCode.outputSchemaCode;
|
|
703
|
+
schemaCodeExtracted = true;
|
|
704
|
+
}
|
|
705
|
+
|
|
637
706
|
for (const prop of callargexp.properties) {
|
|
638
707
|
if (prop.key.type === 'Identifier' && prop.key.name === 'metadata') {
|
|
639
708
|
result = augmentAgentMetadataNode(
|
|
@@ -644,7 +713,9 @@ export async function parseAgentMetadata(
|
|
|
644
713
|
version,
|
|
645
714
|
ast,
|
|
646
715
|
prop.value as ASTObjectExpression,
|
|
647
|
-
filename
|
|
716
|
+
filename,
|
|
717
|
+
inputSchemaCode,
|
|
718
|
+
outputSchemaCode
|
|
648
719
|
);
|
|
649
720
|
break;
|
|
650
721
|
}
|
|
@@ -689,6 +760,17 @@ export async function parseAgentMetadata(
|
|
|
689
760
|
if (call.callee.name === 'createAgent') {
|
|
690
761
|
for (const callarg of call.arguments) {
|
|
691
762
|
const callargexp = callarg as ASTObjectExpression;
|
|
763
|
+
|
|
764
|
+
// Extract schema code before processing metadata
|
|
765
|
+
let inputSchemaCode: string | undefined;
|
|
766
|
+
let outputSchemaCode: string | undefined;
|
|
767
|
+
if (!schemaCodeExtracted) {
|
|
768
|
+
const schemaCode = extractSchemaCode(callargexp);
|
|
769
|
+
inputSchemaCode = schemaCode.inputSchemaCode;
|
|
770
|
+
outputSchemaCode = schemaCode.outputSchemaCode;
|
|
771
|
+
schemaCodeExtracted = true;
|
|
772
|
+
}
|
|
773
|
+
|
|
692
774
|
for (const prop of callargexp.properties) {
|
|
693
775
|
if (
|
|
694
776
|
prop.key.type === 'Identifier' &&
|
|
@@ -702,7 +784,9 @@ export async function parseAgentMetadata(
|
|
|
702
784
|
version,
|
|
703
785
|
ast,
|
|
704
786
|
prop.value as ASTObjectExpression,
|
|
705
|
-
filename
|
|
787
|
+
filename,
|
|
788
|
+
inputSchemaCode,
|
|
789
|
+
outputSchemaCode
|
|
706
790
|
);
|
|
707
791
|
break;
|
|
708
792
|
}
|
|
@@ -1572,7 +1656,7 @@ export function analyzeWorkbench(content: string): WorkbenchAnalysis {
|
|
|
1572
1656
|
|
|
1573
1657
|
// Set default config if workbench is used but no config was parsed
|
|
1574
1658
|
if (hasImport && hasUsage && !config) {
|
|
1575
|
-
config = { route: '/workbench' };
|
|
1659
|
+
config = { route: '/workbench', headers: {}, port: 3500 };
|
|
1576
1660
|
}
|
|
1577
1661
|
|
|
1578
1662
|
return {
|
package/src/cmd/build/bundler.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { $ } from 'bun';
|
|
2
|
-
import { z } from 'zod';
|
|
3
2
|
import { join, relative, resolve, dirname, basename } from 'node:path';
|
|
4
3
|
import { cpSync, existsSync, mkdirSync, rmSync } from 'node:fs';
|
|
5
4
|
import gitParseUrl from 'git-url-parse';
|
|
@@ -12,34 +11,11 @@ import { createLogger } from '@agentuity/server';
|
|
|
12
11
|
import type { LogLevel } from '../../types';
|
|
13
12
|
import { generateWorkbenchMainTsx, generateWorkbenchIndexHtml } from './workbench-templates';
|
|
14
13
|
import { analyzeWorkbench } from './ast';
|
|
15
|
-
import {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
.default(['latest'])
|
|
21
|
-
.optional()
|
|
22
|
-
.describe('One or more tags to add to the deployment'),
|
|
23
|
-
logsUrl: z.url().optional().describe('The url to the CI build logs'),
|
|
24
|
-
trigger: z
|
|
25
|
-
.enum(['cli', 'workflow', 'webhook'])
|
|
26
|
-
.default('cli')
|
|
27
|
-
.optional()
|
|
28
|
-
.describe('The trigger that caused the build'),
|
|
29
|
-
commitUrl: z.url().optional().describe('The url to the CI commit'),
|
|
30
|
-
message: z.string().optional().describe('The message to associate with this deployment'),
|
|
31
|
-
provider: z.string().optional().describe('The CI provider name (attempts to autodetect)'),
|
|
32
|
-
event: z
|
|
33
|
-
.enum(['pull_request', 'push', 'manual', 'workflow'])
|
|
34
|
-
.default('manual')
|
|
35
|
-
.optional()
|
|
36
|
-
.describe('The event that triggered the deployment'),
|
|
37
|
-
pullRequestNumber: z.number().optional().describe('the pull request number'),
|
|
38
|
-
pullRequestCommentId: z.string().optional().describe('the pull request comment id'),
|
|
39
|
-
pullRequestURL: z.url().optional().describe('the pull request url'),
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
type DeployOptions = z.infer<typeof DeployOptionsSchema>;
|
|
14
|
+
import { StructuredError } from '@agentuity/core';
|
|
15
|
+
import { DeployOptionsSchema, type DeployOptions } from '../../schemas/deploy';
|
|
16
|
+
|
|
17
|
+
// Re-export for backward compatibility
|
|
18
|
+
export { DeployOptionsSchema };
|
|
43
19
|
|
|
44
20
|
export interface BundleOptions extends DeployOptions {
|
|
45
21
|
rootDir: string;
|
|
@@ -138,6 +114,15 @@ export async function bundle({
|
|
|
138
114
|
mkdirSync(join(outDir, 'chunk'), { recursive: true });
|
|
139
115
|
mkdirSync(join(outDir, 'asset'), { recursive: true });
|
|
140
116
|
|
|
117
|
+
// Pre-create all nested source directories in output
|
|
118
|
+
// This is needed because Bun.build with naming.entry preserves structure
|
|
119
|
+
// but doesn't create nested directories automatically
|
|
120
|
+
for (const entrypoint of appEntrypoints) {
|
|
121
|
+
const relPath = relative(rootDir, dirname(entrypoint));
|
|
122
|
+
const outputSubdir = join(outDir, relPath);
|
|
123
|
+
mkdirSync(outputSubdir, { recursive: true });
|
|
124
|
+
}
|
|
125
|
+
|
|
141
126
|
const pkgFile = Bun.file(join(rootDir, 'package.json'));
|
|
142
127
|
const pkgContents = JSON.parse(await pkgFile.text());
|
|
143
128
|
const isProd = !dev;
|
|
@@ -270,10 +255,15 @@ export async function bundle({
|
|
|
270
255
|
);
|
|
271
256
|
|
|
272
257
|
if (webEntrypoints.length) {
|
|
258
|
+
const webOutDir = join(outDir, 'web');
|
|
259
|
+
mkdirSync(webOutDir, { recursive: true });
|
|
260
|
+
mkdirSync(join(webOutDir, 'chunk'), { recursive: true });
|
|
261
|
+
mkdirSync(join(webOutDir, 'asset'), { recursive: true });
|
|
262
|
+
|
|
273
263
|
const config: Bun.BuildConfig = {
|
|
274
264
|
entrypoints: webEntrypoints,
|
|
275
265
|
root: webDir,
|
|
276
|
-
outdir:
|
|
266
|
+
outdir: webOutDir,
|
|
277
267
|
define,
|
|
278
268
|
sourcemap: dev ? 'inline' : 'linked',
|
|
279
269
|
env: 'AGENTUITY_PUBLIC_*',
|
|
@@ -330,15 +320,9 @@ export async function bundle({
|
|
|
330
320
|
const analysis = analyzeWorkbench(appContent);
|
|
331
321
|
|
|
332
322
|
if (analysis.hasWorkbench) {
|
|
333
|
-
//
|
|
334
|
-
const
|
|
335
|
-
|
|
336
|
-
const configWithPort = { ...config, port: port || 3500 };
|
|
337
|
-
const encodedConfig = encodeWorkbenchConfig(configWithPort);
|
|
338
|
-
const workbenchDefine = {
|
|
339
|
-
...define,
|
|
340
|
-
AGENTUITY_WORKBENCH_CONFIG_INLINE: JSON.stringify(encodedConfig),
|
|
341
|
-
};
|
|
323
|
+
// Create workbench config with proper defaults
|
|
324
|
+
const defaultConfig = { route: '/workbench', headers: {}, port: port || 3500 };
|
|
325
|
+
const config = { ...defaultConfig, ...analysis.config };
|
|
342
326
|
const logger = createLogger((process.env.AGENTUITY_LOG_LEVEL as LogLevel) || 'info');
|
|
343
327
|
try {
|
|
344
328
|
// Generate workbench files on the fly instead of using files from package
|
|
@@ -356,14 +340,13 @@ export async function bundle({
|
|
|
356
340
|
const workbenchBuildConfig: Bun.BuildConfig = {
|
|
357
341
|
entrypoints: [workbenchIndexFile],
|
|
358
342
|
outdir: join(outDir, 'workbench'),
|
|
359
|
-
define: workbenchDefine,
|
|
360
343
|
sourcemap: dev ? 'inline' : 'linked',
|
|
361
|
-
plugins: [AgentuityBundler],
|
|
344
|
+
plugins: [AgentuityBundler], // i dont think we need this plugin here
|
|
362
345
|
target: 'browser',
|
|
363
346
|
format: 'esm',
|
|
364
347
|
banner: `// Generated file. DO NOT EDIT`,
|
|
365
|
-
minify:
|
|
366
|
-
splitting:
|
|
348
|
+
minify: !dev, // Disable minification in dev to avoid module resolution issues
|
|
349
|
+
splitting: !dev, // Disable code splitting in dev to avoid relative import resolution issues
|
|
367
350
|
packages: 'bundle',
|
|
368
351
|
naming: {
|
|
369
352
|
entry: '[dir]/[name].[ext]',
|
|
@@ -390,7 +373,7 @@ export async function bundle({
|
|
|
390
373
|
}
|
|
391
374
|
// Clean up temp directory even on failure
|
|
392
375
|
rmSync(tempWorkbenchDir, { recursive: true, force: true });
|
|
393
|
-
|
|
376
|
+
logger.fatal('Workbench bundling failed');
|
|
394
377
|
}
|
|
395
378
|
} catch (error) {
|
|
396
379
|
logger.error('Failed to bundle workbench:', error);
|