@ekairos/structure 1.21.53-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clearDataset.tool.d.ts +14 -0
- package/dist/clearDataset.tool.d.ts.map +1 -0
- package/dist/clearDataset.tool.js +26 -0
- package/dist/clearDataset.tool.js.map +1 -0
- package/dist/completeObject.tool.d.ts +23 -0
- package/dist/completeObject.tool.d.ts.map +1 -0
- package/dist/completeObject.tool.js +80 -0
- package/dist/completeObject.tool.js.map +1 -0
- package/dist/completeRows.tool.d.ts +20 -0
- package/dist/completeRows.tool.d.ts.map +1 -0
- package/dist/completeRows.tool.js +134 -0
- package/dist/completeRows.tool.js.map +1 -0
- package/dist/dataset/steps.d.ts +142 -0
- package/dist/dataset/steps.d.ts.map +1 -0
- package/dist/dataset/steps.js +166 -0
- package/dist/dataset/steps.js.map +1 -0
- package/dist/datasetFiles.d.ts +6 -0
- package/dist/datasetFiles.d.ts.map +1 -0
- package/dist/datasetFiles.js +12 -0
- package/dist/datasetFiles.js.map +1 -0
- package/dist/domain.d.ts +2 -0
- package/dist/domain.d.ts.map +1 -0
- package/dist/domain.js +2 -0
- package/dist/domain.js.map +1 -0
- package/dist/executeCommand.tool.d.ts +35 -0
- package/dist/executeCommand.tool.d.ts.map +1 -0
- package/dist/executeCommand.tool.js +106 -0
- package/dist/executeCommand.tool.js.map +1 -0
- package/dist/file/steps.d.ts +8 -0
- package/dist/file/steps.d.ts.map +1 -0
- package/dist/file/steps.js +20 -0
- package/dist/file/steps.js.map +1 -0
- package/dist/generateSchema.tool.d.ts +22 -0
- package/dist/generateSchema.tool.d.ts.map +1 -0
- package/dist/generateSchema.tool.js +61 -0
- package/dist/generateSchema.tool.js.map +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/dist/prompts.d.ts +18 -0
- package/dist/prompts.d.ts.map +1 -0
- package/dist/prompts.js +62 -0
- package/dist/prompts.js.map +1 -0
- package/dist/sandbox/steps.d.ts +74 -0
- package/dist/sandbox/steps.d.ts.map +1 -0
- package/dist/sandbox/steps.js +104 -0
- package/dist/sandbox/steps.js.map +1 -0
- package/dist/schema.d.ts +2 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +33 -0
- package/dist/schema.js.map +1 -0
- package/dist/service.d.ts +41 -0
- package/dist/service.d.ts.map +1 -0
- package/dist/service.js +179 -0
- package/dist/service.js.map +1 -0
- package/dist/steps/commitFromEvents.step.d.ts +13 -0
- package/dist/steps/commitFromEvents.step.d.ts.map +1 -0
- package/dist/steps/commitFromEvents.step.js +82 -0
- package/dist/steps/commitFromEvents.step.js.map +1 -0
- package/dist/steps/persistObjectFromStory.step.d.ts +7 -0
- package/dist/steps/persistObjectFromStory.step.d.ts.map +1 -0
- package/dist/steps/persistObjectFromStory.step.js +90 -0
- package/dist/steps/persistObjectFromStory.step.js.map +1 -0
- package/dist/structure.d.ts +34 -0
- package/dist/structure.d.ts.map +1 -0
- package/dist/structure.js +443 -0
- package/dist/structure.js.map +1 -0
- package/dist/types/runtime.d.ts +56 -0
- package/dist/types/runtime.d.ts.map +1 -0
- package/dist/types/runtime.js +2 -0
- package/dist/types/runtime.js.map +1 -0
- package/package.json +49 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { resolveStoryRuntime } from "@ekairos/story/runtime";
|
|
2
|
+
export async function structureGetOrCreateContextStep(params) {
|
|
3
|
+
"use step";
|
|
4
|
+
try {
|
|
5
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
6
|
+
const ctx = await runtime.store.getOrCreateContext({ key: params.contextKey });
|
|
7
|
+
return { ok: true, data: ctx };
|
|
8
|
+
}
|
|
9
|
+
catch (error) {
|
|
10
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
11
|
+
return { ok: false, error: message };
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
export async function structureGetContextStep(params) {
|
|
15
|
+
"use step";
|
|
16
|
+
try {
|
|
17
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
18
|
+
const ctx = await runtime.store.getContext({ key: params.contextKey });
|
|
19
|
+
if (!ctx)
|
|
20
|
+
return { ok: false, error: "Context not found" };
|
|
21
|
+
return { ok: true, data: ctx };
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
25
|
+
return { ok: false, error: message };
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
export async function structureUpdateContextContentStep(params) {
|
|
29
|
+
"use step";
|
|
30
|
+
try {
|
|
31
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
32
|
+
const updated = await runtime.store.updateContextContent({ key: params.contextKey }, params.content);
|
|
33
|
+
return { ok: true, data: updated };
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
37
|
+
return { ok: false, error: message };
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
export async function structurePatchContextContentStep(params) {
|
|
41
|
+
"use step";
|
|
42
|
+
try {
|
|
43
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
44
|
+
const existing = await runtime.store.getOrCreateContext({ key: params.contextKey });
|
|
45
|
+
const existingContent = (existing?.content ?? {});
|
|
46
|
+
const existingStructure = (existingContent?.structure ?? {});
|
|
47
|
+
const patchStructure = (params.patch?.structure ?? {});
|
|
48
|
+
const next = {
|
|
49
|
+
...existingContent,
|
|
50
|
+
...params.patch,
|
|
51
|
+
structure: { ...existingStructure, ...patchStructure },
|
|
52
|
+
};
|
|
53
|
+
const updated = await runtime.store.updateContextContent({ key: params.contextKey }, next);
|
|
54
|
+
return { ok: true, data: updated };
|
|
55
|
+
}
|
|
56
|
+
catch (error) {
|
|
57
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
58
|
+
return { ok: false, error: message };
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
export async function structureUploadRowsOutputJsonlStep(params) {
|
|
62
|
+
"use step";
|
|
63
|
+
try {
|
|
64
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
65
|
+
const db = runtime.db;
|
|
66
|
+
const storagePath = `/structure/${params.structureId}/output.jsonl`;
|
|
67
|
+
const fileBuffer = Buffer.from(params.contentBase64 ?? "", "base64");
|
|
68
|
+
const uploadResult = await db.storage.uploadFile(storagePath, fileBuffer, {
|
|
69
|
+
contentType: "application/x-ndjson",
|
|
70
|
+
contentDisposition: "output.jsonl",
|
|
71
|
+
});
|
|
72
|
+
const fileId = uploadResult?.data?.id;
|
|
73
|
+
if (!fileId)
|
|
74
|
+
return { ok: false, error: "Failed to upload file to storage" };
|
|
75
|
+
return { ok: true, data: { fileId, storagePath } };
|
|
76
|
+
}
|
|
77
|
+
catch (error) {
|
|
78
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
79
|
+
return { ok: false, error: message };
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
export async function structureLinkRowsOutputFileToContextStep(params) {
|
|
83
|
+
"use step";
|
|
84
|
+
try {
|
|
85
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
86
|
+
const store = runtime.store;
|
|
87
|
+
const db = runtime.db;
|
|
88
|
+
const ctx = await store.getOrCreateContext({ key: params.contextKey });
|
|
89
|
+
const ctxId = ctx?.id;
|
|
90
|
+
if (!ctxId)
|
|
91
|
+
return { ok: false, error: "Context not found" };
|
|
92
|
+
await db.transact([db.tx.context_contexts[ctxId].link({ structure_output_file: params.fileId })]);
|
|
93
|
+
return { ok: true };
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
97
|
+
return { ok: false, error: message };
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
export async function structureUnlinkRowsOutputFileFromContextStep(params) {
|
|
101
|
+
"use step";
|
|
102
|
+
try {
|
|
103
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
104
|
+
const store = runtime.store;
|
|
105
|
+
const db = runtime.db;
|
|
106
|
+
const ctx = await store.getOrCreateContext({ key: params.contextKey });
|
|
107
|
+
const ctxId = ctx?.id;
|
|
108
|
+
if (!ctxId)
|
|
109
|
+
return { ok: false, error: "Context not found" };
|
|
110
|
+
await db.transact([db.tx.context_contexts[ctxId].unlink({ structure_output_file: params.fileId })]);
|
|
111
|
+
return { ok: true };
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
115
|
+
return { ok: false, error: message };
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
export async function structureGetContextWithRowsOutputFileStep(params) {
|
|
119
|
+
"use step";
|
|
120
|
+
try {
|
|
121
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
122
|
+
const db = runtime.db;
|
|
123
|
+
const query = (await db.query({
|
|
124
|
+
context_contexts: {
|
|
125
|
+
$: { where: { key: params.contextKey }, limit: 1 },
|
|
126
|
+
structure_output_file: {},
|
|
127
|
+
},
|
|
128
|
+
}));
|
|
129
|
+
const row = query.context_contexts?.[0];
|
|
130
|
+
if (!row)
|
|
131
|
+
return { ok: false, error: "Context not found" };
|
|
132
|
+
return { ok: true, data: row };
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
136
|
+
return { ok: false, error: message };
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
export async function structureReadRowsOutputJsonlStep(params) {
|
|
140
|
+
"use step";
|
|
141
|
+
try {
|
|
142
|
+
const contextKey = `structure:${params.structureId}`;
|
|
143
|
+
const runtime = await resolveStoryRuntime(params.env);
|
|
144
|
+
const db = runtime.db;
|
|
145
|
+
const query = (await db.query({
|
|
146
|
+
context_contexts: {
|
|
147
|
+
$: { where: { key: contextKey }, limit: 1 },
|
|
148
|
+
structure_output_file: {},
|
|
149
|
+
},
|
|
150
|
+
}));
|
|
151
|
+
const ctx = query.context_contexts?.[0];
|
|
152
|
+
if (!ctx)
|
|
153
|
+
return { ok: false, error: "Context not found" };
|
|
154
|
+
const linked = Array.isArray(ctx?.structure_output_file) ? ctx.structure_output_file[0] : ctx.structure_output_file;
|
|
155
|
+
const url = linked?.url;
|
|
156
|
+
if (!url)
|
|
157
|
+
return { ok: false, error: "Rows output file not found" };
|
|
158
|
+
const fileBuffer = await fetch(url).then((r) => r.arrayBuffer());
|
|
159
|
+
return { ok: true, data: { contentBase64: Buffer.from(fileBuffer).toString("base64") } };
|
|
160
|
+
}
|
|
161
|
+
catch (error) {
|
|
162
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
163
|
+
return { ok: false, error: message };
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
//# sourceMappingURL=steps.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"steps.js","sourceRoot":"","sources":["../../src/dataset/steps.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAqB,MAAM,wBAAwB,CAAA;AA8B/E,MAAM,CAAC,KAAK,UAAU,+BAA+B,CAAC,MAGrD;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,GAAG,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC9E,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAA;IAChC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,uBAAuB,CAAC,MAG7C;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,GAAG,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,GAAG;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;QAC1D,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAA;IAChC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iCAAiC,CAAC,MAIvD;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,oBAAoB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAA;QACpG,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAA;IACpC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,gCAAgC,CAAC,MAItD;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QACnF,MAAM,eAAe,GAAG,CAAC,QAAQ,EAAE,OAAO,IAAI,EAAE,CAA4B,CAAA;QAC5E,MAAM,iBAAiB,GAAG,CAAC,eAAe,EAAE,SAAS,IAAI,EAAE,CAA4B,CAAA;QACvF,MAAM,cAAc,GAAG,CAAE,MAAM,CAAC,KAAiC,EAAE,SAAS,IAAI,EAAE,CAA4B,CAAA;QAE9G,MAAM,IAAI,GAAG;YACX,GAAG,eAAe;YAClB,GAAG,MAAM,CAAC,KAAK;YACf,SAAS,EAAE,EAAE,GAAG,iBAAiB,EAAE,GAAG,cAAc,EAAE;SACvD,CAAA;QACD,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,oBAAoB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,EAAE,IAAW,CAAC,CAAA;QACjG,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAA;IACpC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,kCAAkC,CAAC,MAIxD;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;QACrB,MAAM,WAAW,GAAG,cAAc,MAAM,CAAC,WAAW,eAAe,CAAA;QACnE,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;QACpE,MAAM,YAAY,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE,UAAU,EAAE;YACxE,WAAW,EAAE,sBAAsB;YACnC,kBAAkB,EAAE,cAAc;SACnC,CAAC,CAAA;QACF,MAAM,MAAM,GAAG,YAAY,EAAE,IAAI,EAAE,EAAE,CAAA;QACrC,IAAI,CAAC,MAAM;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;QAC5E,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,CAAA;IACpD,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,wCAAwC,CAAC,MAI9D;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAA;QAC3B,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;QACrB,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,kBAAkB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QACtE,MAAM,KAAK,GAAG,GAAG,EAAE,EAAE,CAAA;QACrB,IAAI,CAAC,KAAK;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;QAE5D,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,qBAAqB,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QACjG,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;IACrB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,4CAA4C,CAAC,MAIlE;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAA;QAC3B,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;QACrB,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,kBAAkB,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QACtE,MAAM,KAAK,GAAG,GAAG,EAAE,EAAE,CAAA;QACrB,IAAI,CAAC,KAAK;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;QAE5D,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,EAAE,qBAAqB,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QACnG,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;IACrB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,yCAAyC,CAAC,MAG/D;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;QACrB,MAAM,KAAK,GAAG,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC;YAC5B,gBAAgB,EAAE;gBAChB,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,MAAM,CAAC,UAAU,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAClD,qBAAqB,EAAE,EAAE;aAC1B;SACF,CAAC,CAAQ,CAAA;QACV,MAAM,GAAG,GAAG,KAAK,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;QACvC,IAAI,CAAC,GAAG;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;QAC1D,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAA;IAChC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,gCAAgC,CAAC,MAGtD;IACC,UAAU,CAAA;IACV,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,aAAa,MAAM,CAAC,WAAW,EAAE,CAAA;QACpD,MAAM,OAAO,GAAiB,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QACnE,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;QACrB,MAAM,KAAK,GAAG,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC;YAC5B,gBAAgB,EAAE;gBAChB,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,UAAU,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3C,qBAAqB,EAAE,EAAE;aAC1B;SACF,CAAC,CAAQ,CAAA;QACV,MAAM,GAAG,GAAG,KAAK,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;QACvC,IAAI,CAAC,GAAG;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;QAC1D,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,EAAE,qBAAqB,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,qBAAqB,CAAA;QACnH,MAAM,GAAG,GAAG,MAAM,EAAE,GAAG,CAAA;QACvB,IAAI,CAAC,GAAG;YAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;QAEnE,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAA;QAChE,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,aAAa,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAA;IAC1F,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;QACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;IACtC,CAAC;AACH,CAAC","sourcesContent":["import { resolveStoryRuntime, type StoryRuntime } from \"@ekairos/story/runtime\"\n\nexport type StructureContextContent = {\n /**\n * IMPORTANT:\n * Story engine persists its own runtime state into `context.content`.\n * To avoid clobbering, all structure metadata is namespaced under `structure`.\n */\n structure?: {\n kind?: \"ekairos.structure\"\n version?: number\n structureId?: string\n orgId?: string\n createdAt?: number\n updatedAt?: number\n mode?: \"auto\" | \"schema\"\n output?: \"rows\" | \"object\"\n instructions?: string\n sources?: any[]\n outputSchema?: { title?: string; description?: string; schema: any }\n state?: string\n metrics?: { calculatedTotalRows?: number; actualGeneratedRowCount?: number }\n outputs?: {\n rows?: { format: \"jsonl\"; fileId: string; storagePath: string; rowCount?: number }\n object?: { value: any }\n }\n error?: { message: string }\n }\n}\n\nexport async function structureGetOrCreateContextStep(params: {\n env: any\n contextKey: string\n}): Promise<{ ok: true; data: any } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const ctx = await runtime.store.getOrCreateContext({ key: params.contextKey })\n return { ok: true, data: ctx }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureGetContextStep(params: {\n env: any\n contextKey: string\n}): Promise<{ ok: true; data: any } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const ctx = await runtime.store.getContext({ key: params.contextKey })\n if (!ctx) return { ok: false, error: \"Context not found\" }\n return { ok: true, data: ctx }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureUpdateContextContentStep(params: {\n env: any\n contextKey: string\n content: StructureContextContent\n}): Promise<{ ok: true; data: any } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const updated = await runtime.store.updateContextContent({ key: params.contextKey }, params.content)\n return { ok: true, data: updated }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structurePatchContextContentStep(params: {\n env: any\n contextKey: string\n patch: Partial<StructureContextContent>\n}): Promise<{ ok: true; data: any } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const existing = await runtime.store.getOrCreateContext({ key: params.contextKey })\n const existingContent = (existing?.content ?? {}) as Record<string, unknown>\n const existingStructure = (existingContent?.structure ?? {}) as Record<string, unknown>\n const patchStructure = ((params.patch as StructureContextContent)?.structure ?? {}) as Record<string, unknown>\n\n const next = {\n ...existingContent,\n ...params.patch,\n structure: { ...existingStructure, ...patchStructure },\n }\n const updated = await runtime.store.updateContextContent({ key: params.contextKey }, next as any)\n return { ok: true, data: updated }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureUploadRowsOutputJsonlStep(params: {\n env: any\n structureId: string\n contentBase64: string\n}): Promise<{ ok: true; data: { fileId: string; storagePath: string } } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const db = runtime.db\n const storagePath = `/structure/${params.structureId}/output.jsonl`\n const fileBuffer = Buffer.from(params.contentBase64 ?? \"\", \"base64\")\n const uploadResult = await db.storage.uploadFile(storagePath, fileBuffer, {\n contentType: \"application/x-ndjson\",\n contentDisposition: \"output.jsonl\",\n })\n const fileId = uploadResult?.data?.id\n if (!fileId) return { ok: false, error: \"Failed to upload file to storage\" }\n return { ok: true, data: { fileId, storagePath } }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureLinkRowsOutputFileToContextStep(params: {\n env: any\n contextKey: string\n fileId: string\n}): Promise<{ ok: true } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const store = runtime.store\n const db = runtime.db\n const ctx = await store.getOrCreateContext({ key: params.contextKey })\n const ctxId = ctx?.id\n if (!ctxId) return { ok: false, error: \"Context not found\" }\n\n await db.transact([db.tx.context_contexts[ctxId].link({ structure_output_file: params.fileId })])\n return { ok: true }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureUnlinkRowsOutputFileFromContextStep(params: {\n env: any\n contextKey: string\n fileId: string\n}): Promise<{ ok: true } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const store = runtime.store\n const db = runtime.db\n const ctx = await store.getOrCreateContext({ key: params.contextKey })\n const ctxId = ctx?.id\n if (!ctxId) return { ok: false, error: \"Context not found\" }\n\n await db.transact([db.tx.context_contexts[ctxId].unlink({ structure_output_file: params.fileId })])\n return { ok: true }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureGetContextWithRowsOutputFileStep(params: {\n env: any\n contextKey: string\n}): Promise<{ ok: true; data: any } | { ok: false; error: string }> {\n \"use step\"\n try {\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const db = runtime.db\n const query = (await db.query({\n context_contexts: {\n $: { where: { key: params.contextKey }, limit: 1 },\n structure_output_file: {},\n },\n })) as any\n const row = query.context_contexts?.[0]\n if (!row) return { ok: false, error: \"Context not found\" }\n return { ok: true, data: row }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\nexport async function structureReadRowsOutputJsonlStep(params: {\n env: any\n structureId: string\n}): Promise<{ ok: true; data: { contentBase64: string } } | { ok: false; error: string }> {\n \"use step\"\n try {\n const contextKey = `structure:${params.structureId}`\n const runtime: StoryRuntime = await resolveStoryRuntime(params.env)\n const db = runtime.db\n const query = (await db.query({\n context_contexts: {\n $: { where: { key: contextKey }, limit: 1 },\n structure_output_file: {},\n },\n })) as any\n const ctx = query.context_contexts?.[0]\n if (!ctx) return { ok: false, error: \"Context not found\" }\n const linked = Array.isArray(ctx?.structure_output_file) ? ctx.structure_output_file[0] : ctx.structure_output_file\n const url = linked?.url\n if (!url) return { ok: false, error: \"Rows output file not found\" }\n\n const fileBuffer = await fetch(url).then((r) => r.arrayBuffer())\n return { ok: true, data: { contentBase64: Buffer.from(fileBuffer).toString(\"base64\") } }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n}\n\n"]}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare const DATASET_OUTPUT_FILE_NAME = "output.jsonl";
|
|
2
|
+
export declare const DATASET_OUTPUT_SCHEMA_FILE_NAME = "output_schema.json";
|
|
3
|
+
export declare function getDatasetWorkstation(datasetId: string): string;
|
|
4
|
+
export declare function getDatasetOutputPath(datasetId: string): string;
|
|
5
|
+
export declare function getDatasetOutputSchemaPath(datasetId: string): string;
|
|
6
|
+
//# sourceMappingURL=datasetFiles.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"datasetFiles.d.ts","sourceRoot":"","sources":["../src/datasetFiles.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,wBAAwB,iBAAiB,CAAA;AACtD,eAAO,MAAM,+BAA+B,uBAAuB,CAAA;AAEnE,wBAAgB,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAE/D;AAED,wBAAgB,oBAAoB,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAE9D;AAED,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAEpE"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export const DATASET_OUTPUT_FILE_NAME = "output.jsonl";
|
|
2
|
+
export const DATASET_OUTPUT_SCHEMA_FILE_NAME = "output_schema.json";
|
|
3
|
+
export function getDatasetWorkstation(datasetId) {
|
|
4
|
+
return `/vercel/sandbox/datasets/${datasetId}`;
|
|
5
|
+
}
|
|
6
|
+
export function getDatasetOutputPath(datasetId) {
|
|
7
|
+
return `${getDatasetWorkstation(datasetId)}/${DATASET_OUTPUT_FILE_NAME}`;
|
|
8
|
+
}
|
|
9
|
+
export function getDatasetOutputSchemaPath(datasetId) {
|
|
10
|
+
return `${getDatasetWorkstation(datasetId)}/${DATASET_OUTPUT_SCHEMA_FILE_NAME}`;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=datasetFiles.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"datasetFiles.js","sourceRoot":"","sources":["../src/datasetFiles.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,wBAAwB,GAAG,cAAc,CAAA;AACtD,MAAM,CAAC,MAAM,+BAA+B,GAAG,oBAAoB,CAAA;AAEnE,MAAM,UAAU,qBAAqB,CAAC,SAAiB;IACrD,OAAO,4BAA4B,SAAS,EAAE,CAAA;AAChD,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,SAAiB;IACpD,OAAO,GAAG,qBAAqB,CAAC,SAAS,CAAC,IAAI,wBAAwB,EAAE,CAAA;AAC1E,CAAC;AAED,MAAM,UAAU,0BAA0B,CAAC,SAAiB;IAC1D,OAAO,GAAG,qBAAqB,CAAC,SAAS,CAAC,IAAI,+BAA+B,EAAE,CAAA;AACjF,CAAC","sourcesContent":["export const DATASET_OUTPUT_FILE_NAME = \"output.jsonl\"\nexport const DATASET_OUTPUT_SCHEMA_FILE_NAME = \"output_schema.json\"\n\nexport function getDatasetWorkstation(datasetId: string): string {\n return `/vercel/sandbox/datasets/${datasetId}`\n}\n\nexport function getDatasetOutputPath(datasetId: string): string {\n return `${getDatasetWorkstation(datasetId)}/${DATASET_OUTPUT_FILE_NAME}`\n}\n\nexport function getDatasetOutputSchemaPath(datasetId: string): string {\n return `${getDatasetWorkstation(datasetId)}/${DATASET_OUTPUT_SCHEMA_FILE_NAME}`\n}\n\n"]}
|
package/dist/domain.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"domain.d.ts","sourceRoot":"","sources":["../src/domain.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,UAAU,CAAA"}
|
package/dist/domain.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"domain.js","sourceRoot":"","sources":["../src/domain.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,UAAU,CAAA","sourcesContent":["export { structureDomain } from \"./schema\"\n\n"]}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
interface ExecuteCommandToolParams {
|
|
2
|
+
datasetId: string;
|
|
3
|
+
sandboxId: string;
|
|
4
|
+
env?: any;
|
|
5
|
+
}
|
|
6
|
+
export declare function createExecuteCommandTool({ datasetId, sandboxId, env }: ExecuteCommandToolParams): import("ai").Tool<{
|
|
7
|
+
pythonCode: string;
|
|
8
|
+
scriptName: string;
|
|
9
|
+
}, {
|
|
10
|
+
success: boolean;
|
|
11
|
+
exitCode: number;
|
|
12
|
+
stdout: string;
|
|
13
|
+
stderr: string;
|
|
14
|
+
scriptPath: string;
|
|
15
|
+
error: string;
|
|
16
|
+
stdoutTruncated: boolean;
|
|
17
|
+
stderrTruncated: boolean;
|
|
18
|
+
stdoutOriginalLength: number;
|
|
19
|
+
stderrOriginalLength: number;
|
|
20
|
+
message?: undefined;
|
|
21
|
+
} | {
|
|
22
|
+
success: boolean;
|
|
23
|
+
exitCode: number;
|
|
24
|
+
stdout: string;
|
|
25
|
+
stderr: string;
|
|
26
|
+
scriptPath: string;
|
|
27
|
+
message: string;
|
|
28
|
+
stdoutTruncated: boolean;
|
|
29
|
+
stderrTruncated: boolean;
|
|
30
|
+
stdoutOriginalLength: number;
|
|
31
|
+
stderrOriginalLength: number;
|
|
32
|
+
error?: undefined;
|
|
33
|
+
}>;
|
|
34
|
+
export {};
|
|
35
|
+
//# sourceMappingURL=executeCommand.tool.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"executeCommand.tool.d.ts","sourceRoot":"","sources":["../src/executeCommand.tool.ts"],"names":[],"mappings":"AAQA,UAAU,wBAAwB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,GAAG,CAAC,EAAE,GAAG,CAAA;CACV;AAiBD,wBAAgB,wBAAwB,CAAC,EAAE,SAAS,EAAE,SAAS,EAAE,GAAG,EAAE,EAAE,wBAAwB;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiG/F"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { tool } from "ai";
|
|
2
|
+
import { z } from "zod/v4";
|
|
3
|
+
import { runDatasetSandboxCommandStep, writeDatasetSandboxTextFileStep } from "./sandbox/steps";
|
|
4
|
+
import { getDatasetWorkstation } from "./datasetFiles";
|
|
5
|
+
const MAX_STDOUT_CHARS = 20000;
|
|
6
|
+
const MAX_STDERR_CHARS = 5000;
|
|
7
|
+
function normalizeScriptName(scriptName) {
|
|
8
|
+
// Keep the AI-provided scriptName, but:
|
|
9
|
+
// - strip a trailing ".py" if it was included
|
|
10
|
+
// - avoid extra dots/spaces in filenames
|
|
11
|
+
const raw = String(scriptName ?? "").trim();
|
|
12
|
+
const noExt = raw.toLowerCase().endsWith(".py") ? raw.slice(0, -3) : raw;
|
|
13
|
+
const cleaned = noExt
|
|
14
|
+
.replace(/\\/g, "_")
|
|
15
|
+
.replace(/\//g, "_")
|
|
16
|
+
.replace(/\s+/g, "_")
|
|
17
|
+
.replace(/\./g, "_")
|
|
18
|
+
.replace(/[^a-zA-Z0-9_-]/g, "");
|
|
19
|
+
return cleaned.length > 0 ? cleaned : "script";
|
|
20
|
+
}
|
|
21
|
+
export function createExecuteCommandTool({ datasetId, sandboxId, env }) {
|
|
22
|
+
return tool({
|
|
23
|
+
description: "Execute Python scripts in the sandbox. Always saves script to a file before executing. The tool's output is EXACTLY the script's stdout and includes the script file path for traceability. CRITICAL: Print concise, human-readable summaries only; do NOT print raw large data. For big results, write artifacts to files in the workstation and print their file paths. Always include progress/result prints.",
|
|
24
|
+
inputSchema: z.object({
|
|
25
|
+
pythonCode: z
|
|
26
|
+
.string()
|
|
27
|
+
.describe("Python code to execute. Saved to a file before running. MANDATORY: Use print() to report progress and final results. Keep prints concise; avoid dumping rows/JSON."),
|
|
28
|
+
scriptName: z
|
|
29
|
+
.string()
|
|
30
|
+
.describe("Name for the script file in snake_case (e.g., 'inspect_file', 'parse_csv', 'generate_output'). The file will be saved as <scriptName>.py in the workstation."),
|
|
31
|
+
}),
|
|
32
|
+
execute: async ({ pythonCode, scriptName }) => {
|
|
33
|
+
console.log("[ekairos/structure] executeCommand.tool execute begin");
|
|
34
|
+
console.log("[ekairos/structure] executeCommand.tool execute datasetId", datasetId);
|
|
35
|
+
console.log("[ekairos/structure] executeCommand.tool execute scriptName", scriptName);
|
|
36
|
+
const workstation = getDatasetWorkstation(datasetId);
|
|
37
|
+
const scriptNameWithExt = `${normalizeScriptName(scriptName)}.py`;
|
|
38
|
+
const scriptFile = `${workstation}/${scriptNameWithExt}`;
|
|
39
|
+
console.log("[ekairos/structure] executeCommand.tool execute writingScript");
|
|
40
|
+
await writeDatasetSandboxTextFileStep({
|
|
41
|
+
env,
|
|
42
|
+
sandboxId,
|
|
43
|
+
path: scriptFile,
|
|
44
|
+
text: pythonCode,
|
|
45
|
+
});
|
|
46
|
+
console.log("[ekairos/structure] executeCommand.tool execute runningPython");
|
|
47
|
+
const result = await runDatasetSandboxCommandStep({
|
|
48
|
+
env,
|
|
49
|
+
sandboxId,
|
|
50
|
+
cmd: "python",
|
|
51
|
+
args: [scriptFile],
|
|
52
|
+
});
|
|
53
|
+
const stdout = result.stdout || "";
|
|
54
|
+
const stderr = result.stderr || "";
|
|
55
|
+
const exitCode = result.exitCode;
|
|
56
|
+
const isStdoutTruncated = stdout.length > MAX_STDOUT_CHARS;
|
|
57
|
+
const isStderrTruncated = stderr.length > MAX_STDERR_CHARS;
|
|
58
|
+
const stdoutCapped = isStdoutTruncated ? stdout.slice(0, MAX_STDOUT_CHARS) : stdout;
|
|
59
|
+
const stderrCapped = isStderrTruncated ? stderr.slice(0, MAX_STDERR_CHARS) : stderr;
|
|
60
|
+
if (exitCode !== 0) {
|
|
61
|
+
console.log("[ekairos/structure] executeCommand.tool execute failedExitCode");
|
|
62
|
+
return {
|
|
63
|
+
success: false,
|
|
64
|
+
exitCode,
|
|
65
|
+
stdout: stdoutCapped,
|
|
66
|
+
stderr: stderrCapped,
|
|
67
|
+
scriptPath: scriptFile,
|
|
68
|
+
error: "Command failed",
|
|
69
|
+
stdoutTruncated: isStdoutTruncated,
|
|
70
|
+
stderrTruncated: isStderrTruncated,
|
|
71
|
+
stdoutOriginalLength: stdout.length,
|
|
72
|
+
stderrOriginalLength: stderr.length,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
if (stderr && (stderr.includes("Traceback") || stderr.toLowerCase().includes("error"))) {
|
|
76
|
+
console.log("[ekairos/structure] executeCommand.tool execute pythonErrorDetected");
|
|
77
|
+
return {
|
|
78
|
+
success: false,
|
|
79
|
+
exitCode,
|
|
80
|
+
stdout: stdoutCapped,
|
|
81
|
+
stderr: stderrCapped,
|
|
82
|
+
scriptPath: scriptFile,
|
|
83
|
+
error: "Python error detected",
|
|
84
|
+
stdoutTruncated: isStdoutTruncated,
|
|
85
|
+
stderrTruncated: isStderrTruncated,
|
|
86
|
+
stdoutOriginalLength: stdout.length,
|
|
87
|
+
stderrOriginalLength: stderr.length,
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
console.log("[ekairos/structure] executeCommand.tool execute ok");
|
|
91
|
+
return {
|
|
92
|
+
success: true,
|
|
93
|
+
exitCode,
|
|
94
|
+
stdout: stdoutCapped,
|
|
95
|
+
stderr: stderrCapped,
|
|
96
|
+
scriptPath: scriptFile,
|
|
97
|
+
message: "Command executed successfully",
|
|
98
|
+
stdoutTruncated: isStdoutTruncated,
|
|
99
|
+
stderrTruncated: isStderrTruncated,
|
|
100
|
+
stdoutOriginalLength: stdout.length,
|
|
101
|
+
stderrOriginalLength: stderr.length,
|
|
102
|
+
};
|
|
103
|
+
},
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
//# sourceMappingURL=executeCommand.tool.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"executeCommand.tool.js","sourceRoot":"","sources":["../src/executeCommand.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,IAAI,CAAA;AACzB,OAAO,EAAE,CAAC,EAAE,MAAM,QAAQ,CAAA;AAC1B,OAAO,EAAE,4BAA4B,EAAE,+BAA+B,EAAE,MAAM,iBAAiB,CAAA;AAC/F,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAA;AAEtD,MAAM,gBAAgB,GAAG,KAAK,CAAA;AAC9B,MAAM,gBAAgB,GAAG,IAAI,CAAA;AAQ7B,SAAS,mBAAmB,CAAC,UAAkB;IAC7C,wCAAwC;IACxC,8CAA8C;IAC9C,yCAAyC;IACzC,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE,CAAA;IAC3C,MAAM,KAAK,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;IACxE,MAAM,OAAO,GAAG,KAAK;SAClB,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC;SACnB,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC;SACnB,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC;SACnB,OAAO,CAAC,iBAAiB,EAAE,EAAE,CAAC,CAAA;IACjC,OAAO,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAA;AAChD,CAAC;AAED,MAAM,UAAU,wBAAwB,CAAC,EAAE,SAAS,EAAE,SAAS,EAAE,GAAG,EAA4B;IAC9F,OAAO,IAAI,CAAC;QACV,WAAW,EACT,kZAAkZ;QACpZ,WAAW,EAAE,CAAC,CAAC,MAAM,CAAC;YACpB,UAAU,EAAE,CAAC;iBACV,MAAM,EAAE;iBACR,QAAQ,CACP,oKAAoK,CACrK;YACH,UAAU,EAAE,CAAC;iBACV,MAAM,EAAE;iBACR,QAAQ,CACP,8JAA8J,CAC/J;SACJ,CAAC;QACF,OAAO,EAAE,KAAK,EAAE,EAAE,UAAU,EAAE,UAAU,EAA8C,EAAE,EAAE;YACxF,OAAO,CAAC,GAAG,CAAC,uDAAuD,CAAC,CAAA;YACpE,OAAO,CAAC,GAAG,CAAC,2DAA2D,EAAE,SAAS,CAAC,CAAA;YACnF,OAAO,CAAC,GAAG,CAAC,4DAA4D,EAAE,UAAU,CAAC,CAAA;YACrF,MAAM,WAAW,GAAG,qBAAqB,CAAC,SAAS,CAAC,CAAA;YACpD,MAAM,iBAAiB,GAAG,GAAG,mBAAmB,CAAC,UAAU,CAAC,KAAK,CAAA;YACjE,MAAM,UAAU,GAAG,GAAG,WAAW,IAAI,iBAAiB,EAAE,CAAA;YAExD,OAAO,CAAC,GAAG,CAAC,+DAA+D,CAAC,CAAA;YAC5E,MAAM,+BAA+B,CAAC;gBACpC,GAAG;gBACH,SAAS;gBACT,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,UAAU;aACjB,CAAC,CAAA;YAEF,OAAO,CAAC,GAAG,CAAC,+DAA+D,CAAC,CAAA;YAC5E,MAAM,MAAM,GAAG,MAAM,4BAA4B,CAAC;gBAChD,GAAG;gBACH,SAAS;gBACT,GAAG,EAAE,QAAQ;gBACb,IAAI,EAAE,CAAC,UAAU,CAAC;aACnB,CAAC,CAAA;YAEF,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;YAClC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;YAClC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAA;YAEhC,MAAM,iBAAiB,GAAG,MAAM,CAAC,MAAM,GAAG,gBAAgB,CAAA;YAC1D,MAAM,iBAAiB,GAAG,MAAM,CAAC,MAAM,GAAG,gBAAgB,CAAA;YAE1D,MAAM,YAAY,GAAG,iBAAiB,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,CAAC,CAAC,MAAM,CAAA;YACnF,MAAM,YAAY,GAAG,iBAAiB,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,CAAC,CAAC,MAAM,CAAA;YAEnF,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;gBACnB,OAAO,CAAC,GAAG,CAAC,gEAAgE,CAAC,CAAA;gBAC7E,OAAO;oBACL,OAAO,EAAE,KAAK;oBACd,QAAQ;oBACR,MAAM,EAAE,YAAY;oBACpB,MAAM,EAAE,YAAY;oBACpB,UAAU,EAAE,UAAU;oBACtB,KAAK,EAAE,gBAAgB;oBACvB,eAAe,EAAE,iBAAiB;oBAClC,eAAe,EAAE,iBAAiB;oBAClC,oBAAoB,EAAE,MAAM,CAAC,MAAM;oBACnC,oBAAoB,EAAE,MAAM,CAAC,MAAM;iBACpC,CAAA;YACH,CAAC;YAED,IAAI,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,CAAC,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC;gBACvF,OAAO,CAAC,GAAG,CAAC,qEAAqE,CAAC,CAAA;gBAClF,OAAO;oBACL,OAAO,EAAE,KAAK;oBACd,QAAQ;oBACR,MAAM,EAAE,YAAY;oBACpB,MAAM,EAAE,YAAY;oBACpB,UAAU,EAAE,UAAU;oBACtB,KAAK,EAAE,uBAAuB;oBAC9B,eAAe,EAAE,iBAAiB;oBAClC,eAAe,EAAE,iBAAiB;oBAClC,oBAAoB,EAAE,MAAM,CAAC,MAAM;oBACnC,oBAAoB,EAAE,MAAM,CAAC,MAAM;iBACpC,CAAA;YACH,CAAC;YAED,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAA;YACjE,OAAO;gBACL,OAAO,EAAE,IAAI;gBACb,QAAQ;gBACR,MAAM,EAAE,YAAY;gBACpB,MAAM,EAAE,YAAY;gBACpB,UAAU,EAAE,UAAU;gBACtB,OAAO,EAAE,+BAA+B;gBACxC,eAAe,EAAE,iBAAiB;gBAClC,eAAe,EAAE,iBAAiB;gBAClC,oBAAoB,EAAE,MAAM,CAAC,MAAM;gBACnC,oBAAoB,EAAE,MAAM,CAAC,MAAM;aACpC,CAAA;QACH,CAAC;KACF,CAAC,CAAA;AACJ,CAAC","sourcesContent":["import { tool } from \"ai\"\nimport { z } from \"zod/v4\"\nimport { runDatasetSandboxCommandStep, writeDatasetSandboxTextFileStep } from \"./sandbox/steps\"\nimport { getDatasetWorkstation } from \"./datasetFiles\"\n\nconst MAX_STDOUT_CHARS = 20000\nconst MAX_STDERR_CHARS = 5000\n\ninterface ExecuteCommandToolParams {\n datasetId: string\n sandboxId: string\n env?: any\n}\n\nfunction normalizeScriptName(scriptName: string): string {\n // Keep the AI-provided scriptName, but:\n // - strip a trailing \".py\" if it was included\n // - avoid extra dots/spaces in filenames\n const raw = String(scriptName ?? \"\").trim()\n const noExt = raw.toLowerCase().endsWith(\".py\") ? raw.slice(0, -3) : raw\n const cleaned = noExt\n .replace(/\\\\/g, \"_\")\n .replace(/\\//g, \"_\")\n .replace(/\\s+/g, \"_\")\n .replace(/\\./g, \"_\")\n .replace(/[^a-zA-Z0-9_-]/g, \"\")\n return cleaned.length > 0 ? cleaned : \"script\"\n}\n\nexport function createExecuteCommandTool({ datasetId, sandboxId, env }: ExecuteCommandToolParams) {\n return tool({\n description:\n \"Execute Python scripts in the sandbox. Always saves script to a file before executing. The tool's output is EXACTLY the script's stdout and includes the script file path for traceability. CRITICAL: Print concise, human-readable summaries only; do NOT print raw large data. For big results, write artifacts to files in the workstation and print their file paths. Always include progress/result prints.\",\n inputSchema: z.object({\n pythonCode: z\n .string()\n .describe(\n \"Python code to execute. Saved to a file before running. MANDATORY: Use print() to report progress and final results. Keep prints concise; avoid dumping rows/JSON.\",\n ),\n scriptName: z\n .string()\n .describe(\n \"Name for the script file in snake_case (e.g., 'inspect_file', 'parse_csv', 'generate_output'). The file will be saved as <scriptName>.py in the workstation.\",\n ),\n }),\n execute: async ({ pythonCode, scriptName }: { pythonCode: string; scriptName: string }) => {\n console.log(\"[ekairos/structure] executeCommand.tool execute begin\")\n console.log(\"[ekairos/structure] executeCommand.tool execute datasetId\", datasetId)\n console.log(\"[ekairos/structure] executeCommand.tool execute scriptName\", scriptName)\n const workstation = getDatasetWorkstation(datasetId)\n const scriptNameWithExt = `${normalizeScriptName(scriptName)}.py`\n const scriptFile = `${workstation}/${scriptNameWithExt}`\n\n console.log(\"[ekairos/structure] executeCommand.tool execute writingScript\")\n await writeDatasetSandboxTextFileStep({\n env,\n sandboxId,\n path: scriptFile,\n text: pythonCode,\n })\n\n console.log(\"[ekairos/structure] executeCommand.tool execute runningPython\")\n const result = await runDatasetSandboxCommandStep({\n env,\n sandboxId,\n cmd: \"python\",\n args: [scriptFile],\n })\n\n const stdout = result.stdout || \"\"\n const stderr = result.stderr || \"\"\n const exitCode = result.exitCode\n\n const isStdoutTruncated = stdout.length > MAX_STDOUT_CHARS\n const isStderrTruncated = stderr.length > MAX_STDERR_CHARS\n\n const stdoutCapped = isStdoutTruncated ? stdout.slice(0, MAX_STDOUT_CHARS) : stdout\n const stderrCapped = isStderrTruncated ? stderr.slice(0, MAX_STDERR_CHARS) : stderr\n\n if (exitCode !== 0) {\n console.log(\"[ekairos/structure] executeCommand.tool execute failedExitCode\")\n return {\n success: false,\n exitCode,\n stdout: stdoutCapped,\n stderr: stderrCapped,\n scriptPath: scriptFile,\n error: \"Command failed\",\n stdoutTruncated: isStdoutTruncated,\n stderrTruncated: isStderrTruncated,\n stdoutOriginalLength: stdout.length,\n stderrOriginalLength: stderr.length,\n }\n }\n\n if (stderr && (stderr.includes(\"Traceback\") || stderr.toLowerCase().includes(\"error\"))) {\n console.log(\"[ekairos/structure] executeCommand.tool execute pythonErrorDetected\")\n return {\n success: false,\n exitCode,\n stdout: stdoutCapped,\n stderr: stderrCapped,\n scriptPath: scriptFile,\n error: \"Python error detected\",\n stdoutTruncated: isStdoutTruncated,\n stderrTruncated: isStderrTruncated,\n stdoutOriginalLength: stdout.length,\n stderrOriginalLength: stderr.length,\n }\n }\n\n console.log(\"[ekairos/structure] executeCommand.tool execute ok\")\n return {\n success: true,\n exitCode,\n stdout: stdoutCapped,\n stderr: stderrCapped,\n scriptPath: scriptFile,\n message: \"Command executed successfully\",\n stdoutTruncated: isStdoutTruncated,\n stderrTruncated: isStderrTruncated,\n stdoutOriginalLength: stdout.length,\n stderrOriginalLength: stderr.length,\n }\n },\n })\n}\n\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"steps.d.ts","sourceRoot":"","sources":["../../src/file/steps.ts"],"names":[],"mappings":"AAGA,wBAAsB,mBAAmB,CAAC,MAAM,EAAE;IAChD,GAAG,EAAE,GAAG,CAAA;IACR,MAAM,EAAE,MAAM,CAAA;CACf,GAAG,OAAO,CAAC;IAAE,aAAa,EAAE,MAAM,CAAC;IAAC,kBAAkB,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,CAgBlE"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { resolveStoryRuntime } from "@ekairos/story/runtime";
|
|
2
|
+
import { DatasetService } from "../service";
|
|
3
|
+
export async function readInstantFileStep(params) {
|
|
4
|
+
"use step";
|
|
5
|
+
const runtime = (await resolveStoryRuntime(params.env));
|
|
6
|
+
const db = runtime.db;
|
|
7
|
+
const service = new DatasetService(db);
|
|
8
|
+
const file = await service.getFileById(params.fileId);
|
|
9
|
+
const fileRow = file?.$files?.[0];
|
|
10
|
+
const url = fileRow?.url;
|
|
11
|
+
if (!url) {
|
|
12
|
+
throw new Error("File not found or URL missing");
|
|
13
|
+
}
|
|
14
|
+
const fileBuffer = await fetch(url).then((response) => response.arrayBuffer());
|
|
15
|
+
return {
|
|
16
|
+
contentBase64: Buffer.from(fileBuffer).toString("base64"),
|
|
17
|
+
contentDisposition: fileRow["content-disposition"],
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=steps.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"steps.js","sourceRoot":"","sources":["../../src/file/steps.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAA;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,YAAY,CAAA;AAE3C,MAAM,CAAC,KAAK,UAAU,mBAAmB,CAAC,MAGzC;IACC,UAAU,CAAA;IACV,MAAM,OAAO,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAoB,CAAA;IAC1E,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAA;IACrB,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,IAAI,GAAG,MAAM,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;IACrD,MAAM,OAAO,GAAI,IAAuE,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;IACrG,MAAM,GAAG,GAAG,OAAO,EAAE,GAAG,CAAA;IACxB,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;IAClD,CAAC;IACD,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IAC9E,OAAO;QACL,aAAa,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QACzD,kBAAkB,EAAG,OAAmC,CAAC,qBAAqB,CAAuB;KACtG,CAAA;AACH,CAAC","sourcesContent":["import { resolveStoryRuntime } from \"@ekairos/story/runtime\"\nimport { DatasetService } from \"../service\"\n\nexport async function readInstantFileStep(params: {\n env: any\n fileId: string\n}): Promise<{ contentBase64: string; contentDisposition?: string }> {\n \"use step\"\n const runtime = (await resolveStoryRuntime(params.env)) as { db: unknown }\n const db = runtime.db\n const service = new DatasetService(db)\n const file = await service.getFileById(params.fileId)\n const fileRow = (file as { $files?: Array<{ url?: string } & Record<string, unknown>> })?.$files?.[0]\n const url = fileRow?.url\n if (!url) {\n throw new Error(\"File not found or URL missing\")\n }\n const fileBuffer = await fetch(url).then((response) => response.arrayBuffer())\n return {\n contentBase64: Buffer.from(fileBuffer).toString(\"base64\"),\n contentDisposition: (fileRow as Record<string, unknown>)[\"content-disposition\"] as string | undefined,\n }\n}\n\n"]}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
interface GenerateSchemaToolParams {
|
|
2
|
+
datasetId: string;
|
|
3
|
+
sandboxId: string;
|
|
4
|
+
env?: any;
|
|
5
|
+
}
|
|
6
|
+
export declare function createGenerateSchemaTool({ datasetId, sandboxId, env }: GenerateSchemaToolParams): import("ai").Tool<{
|
|
7
|
+
schemaTitle: string;
|
|
8
|
+
schemaDescription: string;
|
|
9
|
+
schemaJson: string;
|
|
10
|
+
}, {
|
|
11
|
+
success: boolean;
|
|
12
|
+
error: string;
|
|
13
|
+
message?: undefined;
|
|
14
|
+
schemaPath?: undefined;
|
|
15
|
+
} | {
|
|
16
|
+
success: boolean;
|
|
17
|
+
message: string;
|
|
18
|
+
schemaPath: string;
|
|
19
|
+
error?: undefined;
|
|
20
|
+
}>;
|
|
21
|
+
export {};
|
|
22
|
+
//# sourceMappingURL=generateSchema.tool.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateSchema.tool.d.ts","sourceRoot":"","sources":["../src/generateSchema.tool.ts"],"names":[],"mappings":"AAKA,UAAU,wBAAwB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,GAAG,CAAC,EAAE,GAAG,CAAA;CACV;AAED,wBAAgB,wBAAwB,CAAC,EAAE,SAAS,EAAE,SAAS,EAAE,GAAG,EAAE,EAAE,wBAAwB;;;;;;;;;;;;;;GAwD/F"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { tool } from "ai";
|
|
2
|
+
import { z } from "zod/v4";
|
|
3
|
+
import { writeDatasetSandboxTextFileStep } from "./sandbox/steps";
|
|
4
|
+
import { getDatasetOutputSchemaPath } from "./datasetFiles";
|
|
5
|
+
export function createGenerateSchemaTool({ datasetId, sandboxId, env }) {
|
|
6
|
+
return tool({
|
|
7
|
+
description: "Write the output JSON Schema to the sandbox so the story loop can lift it into context state. IMPORTANT: Field names must be lowerCamelCase. For rows output, schema should describe a single record. For object output, schema should describe the whole object.",
|
|
8
|
+
inputSchema: z.object({
|
|
9
|
+
schemaTitle: z.string().describe("Short schema title"),
|
|
10
|
+
schemaDescription: z.string().describe("Short schema description"),
|
|
11
|
+
schemaJson: z.string().describe("A JSON Schema string"),
|
|
12
|
+
}),
|
|
13
|
+
execute: async ({ schemaTitle, schemaDescription, schemaJson }) => {
|
|
14
|
+
console.log("[ekairos/structure] generateSchema.tool execute begin");
|
|
15
|
+
console.log("[ekairos/structure] generateSchema.tool execute datasetId", datasetId);
|
|
16
|
+
console.log("[ekairos/structure] generateSchema.tool execute sandboxId", sandboxId);
|
|
17
|
+
console.log("[ekairos/structure] generateSchema.tool execute schemaTitle", schemaTitle);
|
|
18
|
+
console.log("[ekairos/structure] generateSchema.tool execute schemaJsonLength", String(schemaJson ?? "").length);
|
|
19
|
+
const schemaData = {
|
|
20
|
+
title: schemaTitle,
|
|
21
|
+
description: schemaDescription,
|
|
22
|
+
schema: (() => {
|
|
23
|
+
try {
|
|
24
|
+
return JSON.parse(schemaJson);
|
|
25
|
+
}
|
|
26
|
+
catch (e) {
|
|
27
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
28
|
+
console.log("[ekairos/structure] generateSchema.tool execute invalidSchemaJson");
|
|
29
|
+
console.log("[ekairos/structure] generateSchema.tool execute invalidSchemaJsonMessage", message);
|
|
30
|
+
console.log("[ekairos/structure] generateSchema.tool execute invalidSchemaJsonPreview", String(schemaJson ?? "").slice(0, 300));
|
|
31
|
+
throw e;
|
|
32
|
+
}
|
|
33
|
+
})(),
|
|
34
|
+
};
|
|
35
|
+
const schemaPath = getDatasetOutputSchemaPath(datasetId);
|
|
36
|
+
try {
|
|
37
|
+
console.log("[ekairos/structure] generateSchema.tool execute writingSchemaPath", schemaPath);
|
|
38
|
+
await writeDatasetSandboxTextFileStep({
|
|
39
|
+
env,
|
|
40
|
+
sandboxId,
|
|
41
|
+
path: schemaPath,
|
|
42
|
+
text: JSON.stringify(schemaData, null, 2),
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
catch (e) {
|
|
46
|
+
console.log("[ekairos/structure] generateSchema.tool execute failed");
|
|
47
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
48
|
+
console.log("[ekairos/structure] generateSchema.tool execute failedMessage", message);
|
|
49
|
+
if (e instanceof Error && e.stack) {
|
|
50
|
+
console.log("[ekairos/structure] generateSchema.tool execute failedStack", e.stack);
|
|
51
|
+
}
|
|
52
|
+
console.log("[ekairos/structure] generateSchema.tool execute failedSchemaPath", schemaPath);
|
|
53
|
+
console.log("[ekairos/structure] generateSchema.tool execute failedHasEnv", Boolean(env));
|
|
54
|
+
return { success: false, error: message };
|
|
55
|
+
}
|
|
56
|
+
console.log("[ekairos/structure] generateSchema.tool execute ok");
|
|
57
|
+
return { success: true, message: "Schema written", schemaPath };
|
|
58
|
+
},
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
//# sourceMappingURL=generateSchema.tool.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateSchema.tool.js","sourceRoot":"","sources":["../src/generateSchema.tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,IAAI,CAAA;AACzB,OAAO,EAAE,CAAC,EAAE,MAAM,QAAQ,CAAA;AAC1B,OAAO,EAAE,+BAA+B,EAAE,MAAM,iBAAiB,CAAA;AACjE,OAAO,EAAE,0BAA0B,EAAE,MAAM,gBAAgB,CAAA;AAQ3D,MAAM,UAAU,wBAAwB,CAAC,EAAE,SAAS,EAAE,SAAS,EAAE,GAAG,EAA4B;IAC9F,OAAO,IAAI,CAAC;QACV,WAAW,EACT,mQAAmQ;QACrQ,WAAW,EAAE,CAAC,CAAC,MAAM,CAAC;YACpB,WAAW,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,oBAAoB,CAAC;YACtD,iBAAiB,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,0BAA0B,CAAC;YAClE,UAAU,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,sBAAsB,CAAC;SACxD,CAAC;QACF,OAAO,EAAE,KAAK,EAAE,EAAE,WAAW,EAAE,iBAAiB,EAAE,UAAU,EAA0E,EAAE,EAAE;YACxI,OAAO,CAAC,GAAG,CAAC,uDAAuD,CAAC,CAAA;YACpE,OAAO,CAAC,GAAG,CAAC,2DAA2D,EAAE,SAAS,CAAC,CAAA;YACnF,OAAO,CAAC,GAAG,CAAC,2DAA2D,EAAE,SAAS,CAAC,CAAA;YACnF,OAAO,CAAC,GAAG,CAAC,6DAA6D,EAAE,WAAW,CAAC,CAAA;YACvF,OAAO,CAAC,GAAG,CAAC,kEAAkE,EAAE,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAA;YAChH,MAAM,UAAU,GAAG;gBACjB,KAAK,EAAE,WAAW;gBAClB,WAAW,EAAE,iBAAiB;gBAC9B,MAAM,EAAE,CAAC,GAAG,EAAE;oBACZ,IAAI,CAAC;wBACH,OAAO,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA;oBAC/B,CAAC;oBAAC,OAAO,CAAC,EAAE,CAAC;wBACX,MAAM,OAAO,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;wBAC1D,OAAO,CAAC,GAAG,CAAC,mEAAmE,CAAC,CAAA;wBAChF,OAAO,CAAC,GAAG,CAAC,0EAA0E,EAAE,OAAO,CAAC,CAAA;wBAChG,OAAO,CAAC,GAAG,CAAC,0EAA0E,EAAE,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;wBAC/H,MAAM,CAAC,CAAA;oBACT,CAAC;gBACH,CAAC,CAAC,EAAE;aACL,CAAA;YAED,MAAM,UAAU,GAAG,0BAA0B,CAAC,SAAS,CAAC,CAAA;YACxD,IAAI,CAAC;gBACH,OAAO,CAAC,GAAG,CAAC,mEAAmE,EAAE,UAAU,CAAC,CAAA;gBAC5F,MAAM,+BAA+B,CAAC;oBACpC,GAAG;oBACH,SAAS;oBACT,IAAI,EAAE,UAAU;oBAChB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,IAAI,EAAE,CAAC,CAAC;iBAC1C,CAAC,CAAA;YACJ,CAAC;YAAC,OAAO,CAAC,EAAE,CAAC;gBACX,OAAO,CAAC,GAAG,CAAC,wDAAwD,CAAC,CAAA;gBACrE,MAAM,OAAO,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;gBAC1D,OAAO,CAAC,GAAG,CAAC,+DAA+D,EAAE,OAAO,CAAC,CAAA;gBACrF,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,CAAC,KAAK,EAAE,CAAC;oBAClC,OAAO,CAAC,GAAG,CAAC,6DAA6D,EAAE,CAAC,CAAC,KAAK,CAAC,CAAA;gBACrF,CAAC;gBACD,OAAO,CAAC,GAAG,CAAC,kEAAkE,EAAE,UAAU,CAAC,CAAA;gBAC3F,OAAO,CAAC,GAAG,CAAC,8DAA8D,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAA;gBACzF,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;YAC3C,CAAC;YAED,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAA;YACjE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,CAAA;QACjE,CAAC;KACF,CAAC,CAAA;AACJ,CAAC","sourcesContent":["import { tool } from \"ai\"\nimport { z } from \"zod/v4\"\nimport { writeDatasetSandboxTextFileStep } from \"./sandbox/steps\"\nimport { getDatasetOutputSchemaPath } from \"./datasetFiles\"\n\ninterface GenerateSchemaToolParams {\n datasetId: string\n sandboxId: string\n env?: any\n}\n\nexport function createGenerateSchemaTool({ datasetId, sandboxId, env }: GenerateSchemaToolParams) {\n return tool({\n description:\n \"Write the output JSON Schema to the sandbox so the story loop can lift it into context state. IMPORTANT: Field names must be lowerCamelCase. For rows output, schema should describe a single record. For object output, schema should describe the whole object.\",\n inputSchema: z.object({\n schemaTitle: z.string().describe(\"Short schema title\"),\n schemaDescription: z.string().describe(\"Short schema description\"),\n schemaJson: z.string().describe(\"A JSON Schema string\"),\n }),\n execute: async ({ schemaTitle, schemaDescription, schemaJson }: { schemaTitle: string; schemaDescription: string; schemaJson: string }) => {\n console.log(\"[ekairos/structure] generateSchema.tool execute begin\")\n console.log(\"[ekairos/structure] generateSchema.tool execute datasetId\", datasetId)\n console.log(\"[ekairos/structure] generateSchema.tool execute sandboxId\", sandboxId)\n console.log(\"[ekairos/structure] generateSchema.tool execute schemaTitle\", schemaTitle)\n console.log(\"[ekairos/structure] generateSchema.tool execute schemaJsonLength\", String(schemaJson ?? \"\").length)\n const schemaData = {\n title: schemaTitle,\n description: schemaDescription,\n schema: (() => {\n try {\n return JSON.parse(schemaJson)\n } catch (e) {\n const message = e instanceof Error ? e.message : String(e)\n console.log(\"[ekairos/structure] generateSchema.tool execute invalidSchemaJson\")\n console.log(\"[ekairos/structure] generateSchema.tool execute invalidSchemaJsonMessage\", message)\n console.log(\"[ekairos/structure] generateSchema.tool execute invalidSchemaJsonPreview\", String(schemaJson ?? \"\").slice(0, 300))\n throw e\n }\n })(),\n }\n\n const schemaPath = getDatasetOutputSchemaPath(datasetId)\n try {\n console.log(\"[ekairos/structure] generateSchema.tool execute writingSchemaPath\", schemaPath)\n await writeDatasetSandboxTextFileStep({\n env,\n sandboxId,\n path: schemaPath,\n text: JSON.stringify(schemaData, null, 2),\n })\n } catch (e) {\n console.log(\"[ekairos/structure] generateSchema.tool execute failed\")\n const message = e instanceof Error ? e.message : String(e)\n console.log(\"[ekairos/structure] generateSchema.tool execute failedMessage\", message)\n if (e instanceof Error && e.stack) {\n console.log(\"[ekairos/structure] generateSchema.tool execute failedStack\", e.stack)\n }\n console.log(\"[ekairos/structure] generateSchema.tool execute failedSchemaPath\", schemaPath)\n console.log(\"[ekairos/structure] generateSchema.tool execute failedHasEnv\", Boolean(env))\n return { success: false, error: message }\n }\n\n console.log(\"[ekairos/structure] generateSchema.tool execute ok\")\n return { success: true, message: \"Schema written\", schemaPath }\n },\n })\n}\n\n"]}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,UAAU,CAAA;AACxB,cAAc,WAAW,CAAA;AACzB,cAAc,gBAAgB,CAAA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,UAAU,CAAA;AACxB,cAAc,WAAW,CAAA;AACzB,cAAc,gBAAgB,CAAA","sourcesContent":["export * from \"./structure\"\nexport * from \"./schema\"\nexport * from \"./service\"\nexport * from \"./datasetFiles\"\n\n"]}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export type PreparedSource = {
|
|
2
|
+
kind: "file" | "dataset" | "text";
|
|
3
|
+
id: string;
|
|
4
|
+
path: string;
|
|
5
|
+
name?: string;
|
|
6
|
+
mimeType?: string;
|
|
7
|
+
};
|
|
8
|
+
export type StructurePromptContext = {
|
|
9
|
+
datasetId: string;
|
|
10
|
+
mode: "auto" | "schema";
|
|
11
|
+
output: "rows" | "object";
|
|
12
|
+
outputSchema?: any;
|
|
13
|
+
sources: PreparedSource[];
|
|
14
|
+
workstation: string;
|
|
15
|
+
outputPath: string;
|
|
16
|
+
};
|
|
17
|
+
export declare function buildStructurePrompt(ctx: StructurePromptContext): string;
|
|
18
|
+
//# sourceMappingURL=prompts.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.d.ts","sourceRoot":"","sources":["../src/prompts.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,CAAA;IACjC,EAAE,EAAE,MAAM,CAAA;IACV,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,CAAC,EAAE,MAAM,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,sBAAsB,GAAG;IACnC,SAAS,EAAE,MAAM,CAAA;IACjB,IAAI,EAAE,MAAM,GAAG,QAAQ,CAAA;IACvB,MAAM,EAAE,MAAM,GAAG,QAAQ,CAAA;IACzB,YAAY,CAAC,EAAE,GAAG,CAAA;IAClB,OAAO,EAAE,cAAc,EAAE,CAAA;IACzB,WAAW,EAAE,MAAM,CAAA;IACnB,UAAU,EAAE,MAAM,CAAA;CACnB,CAAA;AAED,wBAAgB,oBAAoB,CAAC,GAAG,EAAE,sBAAsB,GAAG,MAAM,CA4DxE"}
|