@ekairos/dataset 1.21.43-beta.0 → 1.21.53-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clearDataset.tool.d.ts +3 -5
- package/dist/clearDataset.tool.d.ts.map +1 -1
- package/dist/clearDataset.tool.js +8 -5
- package/dist/clearDataset.tool.js.map +1 -1
- package/dist/completeDataset.tool.d.ts +3 -5
- package/dist/completeDataset.tool.d.ts.map +1 -1
- package/dist/completeDataset.tool.js +22 -68
- package/dist/completeDataset.tool.js.map +1 -1
- package/dist/dataset/steps.d.ts +39 -0
- package/dist/dataset/steps.d.ts.map +1 -0
- package/dist/dataset/steps.js +77 -0
- package/dist/dataset/steps.js.map +1 -0
- package/dist/executeCommand.tool.d.ts +3 -5
- package/dist/executeCommand.tool.d.ts.map +1 -1
- package/dist/executeCommand.tool.js +17 -10
- package/dist/executeCommand.tool.js.map +1 -1
- package/dist/file/file-dataset.agent.d.ts +35 -36
- package/dist/file/file-dataset.agent.d.ts.map +1 -1
- package/dist/file/file-dataset.agent.js +141 -191
- package/dist/file/file-dataset.agent.js.map +1 -1
- package/dist/file/filepreview.d.ts +2 -3
- package/dist/file/filepreview.d.ts.map +1 -1
- package/dist/file/filepreview.js +36 -27
- package/dist/file/filepreview.js.map +1 -1
- package/dist/file/generateSchema.tool.d.ts +2 -5
- package/dist/file/generateSchema.tool.d.ts.map +1 -1
- package/dist/file/generateSchema.tool.js +4 -2
- package/dist/file/generateSchema.tool.js.map +1 -1
- package/dist/file/prompts.d.ts +2 -2
- package/dist/file/prompts.d.ts.map +1 -1
- package/dist/file/prompts.js.map +1 -1
- package/dist/file/steps.d.ts +9 -0
- package/dist/file/steps.d.ts.map +1 -0
- package/dist/file/steps.js +23 -0
- package/dist/file/steps.js.map +1 -0
- package/dist/sandbox/steps.d.ts +47 -0
- package/dist/sandbox/steps.d.ts.map +1 -0
- package/dist/sandbox/steps.js +94 -0
- package/dist/sandbox/steps.js.map +1 -0
- package/dist/sandbox.steps.d.ts +2 -0
- package/dist/sandbox.steps.d.ts.map +1 -0
- package/dist/sandbox.steps.js +18 -0
- package/dist/sandbox.steps.js.map +1 -0
- package/dist/schema.d.ts +5 -3
- package/dist/schema.d.ts.map +1 -1
- package/dist/schema.js +7 -3
- package/dist/schema.js.map +1 -1
- package/dist/service.d.ts +2 -4
- package/dist/service.d.ts.map +1 -1
- package/dist/service.js +2 -29
- package/dist/service.js.map +1 -1
- package/dist/transform/filepreview.d.ts +1 -2
- package/dist/transform/filepreview.d.ts.map +1 -1
- package/dist/transform/filepreview.js +61 -54
- package/dist/transform/filepreview.js.map +1 -1
- package/dist/transform/transform-dataset.agent.d.ts +19 -37
- package/dist/transform/transform-dataset.agent.d.ts.map +1 -1
- package/dist/transform/transform-dataset.agent.js +116 -224
- package/dist/transform/transform-dataset.agent.js.map +1 -1
- package/package.json +4 -4
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
4
|
-
exports.createTransformDatasetAgent = createTransformDatasetAgent;
|
|
3
|
+
exports.createTransformDatasetStory = createTransformDatasetStory;
|
|
5
4
|
const story_1 = require("@ekairos/story");
|
|
6
5
|
const completeDataset_tool_1 = require("../completeDataset.tool");
|
|
7
6
|
const executeCommand_tool_1 = require("../executeCommand.tool");
|
|
@@ -9,265 +8,158 @@ const clearDataset_tool_1 = require("../clearDataset.tool");
|
|
|
9
8
|
const prompts_1 = require("./prompts");
|
|
10
9
|
const datasetFiles_1 = require("../datasetFiles");
|
|
11
10
|
const admin_1 = require("@instantdb/admin");
|
|
12
|
-
const service_1 = require("../service");
|
|
13
11
|
const filepreview_1 = require("./filepreview");
|
|
14
|
-
|
|
12
|
+
const steps_1 = require("../dataset/steps");
|
|
13
|
+
const steps_2 = require("../sandbox/steps");
|
|
14
|
+
async function ensureSourcesInSandbox(env, sandboxId, datasetId, sourceDatasetIds, state) {
|
|
15
15
|
if (state.initialized) {
|
|
16
16
|
return { sourcePaths: state.sourcePaths, outputPath: (0, datasetFiles_1.getDatasetOutputPath)(datasetId) };
|
|
17
17
|
}
|
|
18
18
|
const workstation = (0, datasetFiles_1.getDatasetWorkstation)(datasetId);
|
|
19
|
-
await
|
|
20
|
-
cmd: "mkdir",
|
|
21
|
-
args: ["-p", workstation],
|
|
22
|
-
});
|
|
19
|
+
await (0, steps_2.runDatasetSandboxCommandStep)({ env, sandboxId, cmd: "mkdir", args: ["-p", workstation] });
|
|
23
20
|
const sourcePaths = [];
|
|
24
21
|
for (const sourceDatasetId of sourceDatasetIds) {
|
|
25
|
-
const existingSourcePath = (0, datasetFiles_1.getDatasetOutputPath)(sourceDatasetId);
|
|
26
|
-
const sourceExists = await sandbox.runCommand({
|
|
27
|
-
cmd: "test",
|
|
28
|
-
args: ["-f", existingSourcePath],
|
|
29
|
-
});
|
|
30
|
-
if (sourceExists.exitCode === 0) {
|
|
31
|
-
sourcePaths.push({ datasetId: sourceDatasetId, path: existingSourcePath });
|
|
32
|
-
continue;
|
|
33
|
-
}
|
|
34
|
-
const storagePath = `/dataset/${sourceDatasetId}/output.jsonl`;
|
|
35
|
-
const fileQuery = await db.query({
|
|
36
|
-
$files: {
|
|
37
|
-
$: {
|
|
38
|
-
where: { path: storagePath },
|
|
39
|
-
limit: 1,
|
|
40
|
-
},
|
|
41
|
-
},
|
|
42
|
-
});
|
|
43
|
-
const fileRecord = Array.isArray(fileQuery.$files) ? fileQuery.$files[0] : undefined;
|
|
44
|
-
if (!fileRecord || !fileRecord.url) {
|
|
45
|
-
throw new Error(`Source dataset output not found for datasetId=${sourceDatasetId}`);
|
|
46
|
-
}
|
|
47
|
-
const fileBuffer = await fetch(fileRecord.url).then((r) => r.arrayBuffer());
|
|
48
22
|
const sourcePath = `${workstation}/source_${sourceDatasetId}.jsonl`;
|
|
49
|
-
await
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
},
|
|
54
|
-
|
|
23
|
+
const source = await (0, steps_1.datasetReadOutputJsonlStep)({ env, datasetId: sourceDatasetId });
|
|
24
|
+
await (0, steps_2.writeDatasetSandboxFilesStep)({
|
|
25
|
+
env,
|
|
26
|
+
sandboxId,
|
|
27
|
+
files: [{ path: sourcePath, contentBase64: source.contentBase64 }],
|
|
28
|
+
});
|
|
55
29
|
sourcePaths.push({ datasetId: sourceDatasetId, path: sourcePath });
|
|
56
30
|
}
|
|
57
31
|
state.sourcePaths = sourcePaths;
|
|
58
32
|
state.initialized = true;
|
|
59
33
|
return { sourcePaths, outputPath: (0, datasetFiles_1.getDatasetOutputPath)(datasetId) };
|
|
60
34
|
}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
return previous;
|
|
35
|
+
function createTransformDatasetStoryDefinition(params) {
|
|
36
|
+
const datasetId = params.datasetId ?? (0, admin_1.id)();
|
|
37
|
+
const model = params.model ?? "openai/gpt-5";
|
|
38
|
+
const story = (0, story_1.createStory)("dataset.transform")
|
|
39
|
+
.context(async (stored, env) => {
|
|
40
|
+
const previous = stored?.content ?? {};
|
|
41
|
+
const sandboxState = previous?.sandboxState ?? { initialized: false, sourcePaths: [] };
|
|
42
|
+
const existingSandboxId = previous?.sandboxId ?? params.sandboxId ?? "";
|
|
43
|
+
let sandboxId = existingSandboxId;
|
|
44
|
+
if (!sandboxId) {
|
|
45
|
+
const created = await (0, steps_2.createDatasetSandboxStep)({ env, runtime: "python3.13", timeoutMs: 10 * 60 * 1000 });
|
|
46
|
+
sandboxId = created.sandboxId;
|
|
74
47
|
}
|
|
75
|
-
const { sourcePaths, outputPath } = await ensureSourcesInSandbox(
|
|
48
|
+
const { sourcePaths, outputPath } = await ensureSourcesInSandbox(env, sandboxId, datasetId, params.sourceDatasetIds, sandboxState);
|
|
76
49
|
const sourcePreviews = [];
|
|
77
|
-
for (const
|
|
50
|
+
for (const sp of sourcePaths) {
|
|
78
51
|
try {
|
|
79
|
-
const preview = await (0, filepreview_1.generateSourcePreview)(
|
|
80
|
-
sourcePreviews.push({ datasetId:
|
|
52
|
+
const preview = await (0, filepreview_1.generateSourcePreview)(env, sandboxId, sp.path, datasetId);
|
|
53
|
+
sourcePreviews.push({ datasetId: sp.datasetId, preview });
|
|
81
54
|
}
|
|
82
|
-
catch
|
|
83
|
-
|
|
55
|
+
catch {
|
|
56
|
+
// optional
|
|
84
57
|
}
|
|
85
58
|
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
schema: outputSchema,
|
|
90
|
-
status: "schema_complete",
|
|
91
|
-
});
|
|
92
|
-
}
|
|
93
|
-
catch (error) {
|
|
94
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
95
|
-
console.error(`[TransformDatasetAgent ${datasetId}] Failed to persist output schema:`, message);
|
|
96
|
-
}
|
|
97
|
-
return {
|
|
59
|
+
// Persist output schema on the dataset record (so completeDataset validates against it)
|
|
60
|
+
await (0, steps_1.datasetUpdateSchemaStep)({
|
|
61
|
+
env,
|
|
98
62
|
datasetId,
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
63
|
+
schema: params.outputSchema,
|
|
64
|
+
status: "schema_complete",
|
|
65
|
+
});
|
|
66
|
+
const promptContext = {
|
|
67
|
+
datasetId,
|
|
68
|
+
sourceDatasetIds: params.sourceDatasetIds,
|
|
69
|
+
outputSchema: params.outputSchema,
|
|
70
|
+
sandboxConfig: { sourcePaths, outputPath },
|
|
105
71
|
sourcePreviews: sourcePreviews.length > 0 ? sourcePreviews : undefined,
|
|
106
72
|
errors: [],
|
|
107
|
-
iterationCount: 0,
|
|
108
|
-
instructions,
|
|
109
73
|
};
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
const
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
74
|
+
const basePrompt = (0, prompts_1.buildTransformDatasetPrompt)(promptContext);
|
|
75
|
+
const userInstructions = String(params.instructions ?? "").trim();
|
|
76
|
+
const system = userInstructions
|
|
77
|
+
? [
|
|
78
|
+
"## USER INSTRUCTIONS",
|
|
79
|
+
"The following instructions were provided by the user. Apply them in addition to (and with higher priority than) the default instructions.",
|
|
80
|
+
"",
|
|
81
|
+
userInstructions,
|
|
82
|
+
"",
|
|
83
|
+
basePrompt,
|
|
84
|
+
].join("\n")
|
|
85
|
+
: basePrompt;
|
|
86
|
+
return {
|
|
87
|
+
...previous,
|
|
88
|
+
datasetId,
|
|
89
|
+
sandboxId,
|
|
90
|
+
sandboxState,
|
|
91
|
+
system,
|
|
92
|
+
sandboxConfig: { sourcePaths, outputPath },
|
|
123
93
|
};
|
|
124
|
-
let basePrompt = (0, prompts_1.buildTransformDatasetPrompt)(promptContext);
|
|
125
|
-
if (ctx.instructions) {
|
|
126
|
-
basePrompt += `\n\n## ADDITIONAL CONTEXT AND INSTRUCTIONS\n\n${ctx.instructions}`;
|
|
127
|
-
}
|
|
128
|
-
return basePrompt;
|
|
129
94
|
})
|
|
130
|
-
.
|
|
131
|
-
|
|
95
|
+
.narrative(async (stored) => {
|
|
96
|
+
return String(stored?.content?.system ?? "");
|
|
97
|
+
})
|
|
98
|
+
.actions(async (stored, env) => {
|
|
99
|
+
const sandboxId = stored?.content?.sandboxId ?? params.sandboxId ?? "";
|
|
132
100
|
return {
|
|
133
101
|
executeCommand: (0, executeCommand_tool_1.createExecuteCommandTool)({
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
102
|
+
datasetId,
|
|
103
|
+
sandboxId,
|
|
104
|
+
env,
|
|
137
105
|
}),
|
|
138
106
|
completeDataset: (0, completeDataset_tool_1.createCompleteDatasetTool)({
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
107
|
+
datasetId,
|
|
108
|
+
sandboxId,
|
|
109
|
+
env,
|
|
142
110
|
}),
|
|
143
111
|
clearDataset: (0, clearDataset_tool_1.createClearDatasetTool)({
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
112
|
+
datasetId,
|
|
113
|
+
sandboxId,
|
|
114
|
+
env,
|
|
147
115
|
}),
|
|
148
116
|
};
|
|
149
117
|
})
|
|
150
|
-
.
|
|
151
|
-
.
|
|
118
|
+
.shouldContinue(({ reactionEvent }) => {
|
|
119
|
+
return !(0, story_1.didToolExecute)(reactionEvent, "completeDataset");
|
|
120
|
+
})
|
|
121
|
+
.model(model)
|
|
122
|
+
.build();
|
|
123
|
+
return { datasetId, story };
|
|
124
|
+
}
|
|
125
|
+
function createTransformDatasetStory(params) {
|
|
126
|
+
const { datasetId, story } = createTransformDatasetStoryDefinition({
|
|
127
|
+
sourceDatasetIds: params.sourceDatasetIds,
|
|
128
|
+
outputSchema: params.outputSchema,
|
|
129
|
+
instructions: params.instructions,
|
|
130
|
+
datasetId: params.datasetId,
|
|
131
|
+
model: params.model,
|
|
132
|
+
sandboxId: params.sandboxId,
|
|
133
|
+
});
|
|
152
134
|
return {
|
|
153
|
-
build: () => builder.build(),
|
|
154
135
|
datasetId,
|
|
155
|
-
|
|
136
|
+
async transform(env, prompt) {
|
|
137
|
+
const datasetCountText = params.sourceDatasetIds.length === 1
|
|
138
|
+
? "the source dataset"
|
|
139
|
+
: `${params.sourceDatasetIds.length} source datasets`;
|
|
140
|
+
const triggerEvent = {
|
|
141
|
+
id: (0, admin_1.id)(),
|
|
142
|
+
type: story_1.USER_MESSAGE_TYPE,
|
|
143
|
+
channel: story_1.WEB_CHANNEL,
|
|
144
|
+
createdAt: new Date().toISOString(),
|
|
145
|
+
content: {
|
|
146
|
+
parts: [
|
|
147
|
+
{
|
|
148
|
+
type: "text",
|
|
149
|
+
text: prompt ??
|
|
150
|
+
`Transform ${datasetCountText} into a new dataset matching the provided output schema`,
|
|
151
|
+
},
|
|
152
|
+
],
|
|
153
|
+
},
|
|
154
|
+
};
|
|
155
|
+
await story.react(triggerEvent, {
|
|
156
|
+
env,
|
|
157
|
+
context: { key: `dataset:${datasetId}` },
|
|
158
|
+
options: { silent: true, preventClose: true, sendFinish: false, maxIterations: 20, maxModelSteps: 5 },
|
|
159
|
+
});
|
|
160
|
+
return { datasetId };
|
|
161
|
+
},
|
|
162
|
+
story,
|
|
156
163
|
};
|
|
157
164
|
}
|
|
158
|
-
class TransformDatasetAgent {
|
|
159
|
-
constructor(params) {
|
|
160
|
-
this.sourceDatasetIds = Array.isArray(params.sourceDatasetIds) ? params.sourceDatasetIds : [params.sourceDatasetIds];
|
|
161
|
-
this.outputSchema = params.outputSchema;
|
|
162
|
-
this.sandbox = params.sandbox;
|
|
163
|
-
this.db = params.db;
|
|
164
|
-
this.service = new service_1.DatasetService(params.db);
|
|
165
|
-
this.agentService = new story_1.AgentService(params.db);
|
|
166
|
-
this.instructions = params.instructions;
|
|
167
|
-
}
|
|
168
|
-
async getDataset() {
|
|
169
|
-
const agentResult = createTransformDatasetAgent({
|
|
170
|
-
sourceDatasetIds: this.sourceDatasetIds,
|
|
171
|
-
outputSchema: this.outputSchema,
|
|
172
|
-
sandbox: this.sandbox,
|
|
173
|
-
instructions: this.instructions,
|
|
174
|
-
db: this.db,
|
|
175
|
-
});
|
|
176
|
-
const agent = agentResult.build();
|
|
177
|
-
const datasetId = agentResult.datasetId;
|
|
178
|
-
const service = agentResult.service;
|
|
179
|
-
const datasetCountText = this.sourceDatasetIds.length === 1
|
|
180
|
-
? "the source dataset"
|
|
181
|
-
: `${this.sourceDatasetIds.length} source datasets`;
|
|
182
|
-
const userEvent = {
|
|
183
|
-
id: (0, admin_1.id)(),
|
|
184
|
-
type: story_1.USER_MESSAGE_TYPE,
|
|
185
|
-
channel: story_1.WEB_CHANNEL,
|
|
186
|
-
content: {
|
|
187
|
-
parts: [
|
|
188
|
-
{
|
|
189
|
-
type: "text",
|
|
190
|
-
text: `Transform ${datasetCountText} into a new dataset matching the provided output schema`,
|
|
191
|
-
},
|
|
192
|
-
],
|
|
193
|
-
},
|
|
194
|
-
createdAt: new Date().toISOString(),
|
|
195
|
-
};
|
|
196
|
-
const reaction = await agent.progressStream(userEvent, null);
|
|
197
|
-
const stream = reaction.stream;
|
|
198
|
-
const streamResult = await this.agentService.readEventStream(stream);
|
|
199
|
-
if (streamResult.persistedEvent?.status !== "completed") {
|
|
200
|
-
throw new Error(`Dataset transformation failed with status: ${streamResult.persistedEvent?.status}`);
|
|
201
|
-
}
|
|
202
|
-
const datasetResult = await service.getDatasetById(datasetId);
|
|
203
|
-
if (!datasetResult.ok) {
|
|
204
|
-
throw new Error(datasetResult.error);
|
|
205
|
-
}
|
|
206
|
-
const dataset = datasetResult.data;
|
|
207
|
-
return {
|
|
208
|
-
id: dataset.id,
|
|
209
|
-
status: dataset.status,
|
|
210
|
-
title: dataset.title,
|
|
211
|
-
schema: dataset.schema,
|
|
212
|
-
analysis: dataset.analysis,
|
|
213
|
-
calculatedTotalRows: dataset.calculatedTotalRows,
|
|
214
|
-
actualGeneratedRowCount: dataset.actualGeneratedRowCount,
|
|
215
|
-
createdAt: dataset.createdAt,
|
|
216
|
-
updatedAt: dataset.updatedAt,
|
|
217
|
-
};
|
|
218
|
-
}
|
|
219
|
-
async followUp(datasetId, feedback) {
|
|
220
|
-
const agentResult = createTransformDatasetAgent({
|
|
221
|
-
sourceDatasetIds: this.sourceDatasetIds,
|
|
222
|
-
outputSchema: this.outputSchema,
|
|
223
|
-
sandbox: this.sandbox,
|
|
224
|
-
instructions: this.instructions,
|
|
225
|
-
db: this.db,
|
|
226
|
-
});
|
|
227
|
-
const agent = agentResult.build();
|
|
228
|
-
const service = agentResult.service;
|
|
229
|
-
const userEvent = {
|
|
230
|
-
id: (0, admin_1.id)(),
|
|
231
|
-
type: story_1.USER_MESSAGE_TYPE,
|
|
232
|
-
channel: story_1.WEB_CHANNEL,
|
|
233
|
-
content: {
|
|
234
|
-
parts: [
|
|
235
|
-
{
|
|
236
|
-
type: "text",
|
|
237
|
-
text: feedback,
|
|
238
|
-
},
|
|
239
|
-
],
|
|
240
|
-
},
|
|
241
|
-
createdAt: new Date().toISOString(),
|
|
242
|
-
};
|
|
243
|
-
const contextResult = await this.service.getContextByDatasetId(datasetId);
|
|
244
|
-
if (!contextResult.ok) {
|
|
245
|
-
throw new Error(contextResult.error);
|
|
246
|
-
}
|
|
247
|
-
const contextId = contextResult.data.id;
|
|
248
|
-
const reaction = await agent.progressStream(userEvent, { id: contextId });
|
|
249
|
-
const stream = reaction.stream;
|
|
250
|
-
const streamResult = await this.agentService.readEventStream(stream);
|
|
251
|
-
if (streamResult.persistedEvent?.status !== "completed") {
|
|
252
|
-
throw new Error(`Dataset transformation iteration failed with status: ${streamResult.persistedEvent?.status}`);
|
|
253
|
-
}
|
|
254
|
-
const datasetResult = await service.getDatasetById(datasetId);
|
|
255
|
-
if (!datasetResult.ok) {
|
|
256
|
-
throw new Error(datasetResult.error);
|
|
257
|
-
}
|
|
258
|
-
const dataset = datasetResult.data;
|
|
259
|
-
return {
|
|
260
|
-
id: dataset.id,
|
|
261
|
-
status: dataset.status,
|
|
262
|
-
title: dataset.title,
|
|
263
|
-
schema: dataset.schema,
|
|
264
|
-
analysis: dataset.analysis,
|
|
265
|
-
calculatedTotalRows: dataset.calculatedTotalRows,
|
|
266
|
-
actualGeneratedRowCount: dataset.actualGeneratedRowCount,
|
|
267
|
-
createdAt: dataset.createdAt,
|
|
268
|
-
updatedAt: dataset.updatedAt,
|
|
269
|
-
};
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
exports.TransformDatasetAgent = TransformDatasetAgent;
|
|
273
165
|
//# sourceMappingURL=transform-dataset.agent.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"transform-dataset.agent.js","sourceRoot":"","sources":["../../src/transform/transform-dataset.agent.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"transform-dataset.agent.js","sourceRoot":"","sources":["../../src/transform/transform-dataset.agent.ts"],"names":[],"mappings":";;AAmMA,kEAsDC;AAzPD,0CAA4F;AAC5F,kEAAmE;AACnE,gEAAiE;AACjE,4DAA6D;AAC7D,uCAA+E;AAC/E,kDAA6E;AAC7E,4CAAqC;AACrC,+CAAoF;AACpF,4CAAsF;AACtF,4CAAuH;AA+BvH,KAAK,UAAU,sBAAsB,CACjC,GAAQ,EACR,SAAiB,EACjB,SAAiB,EACjB,gBAA0B,EAC1B,KAA4B;IAE5B,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC;QACpB,OAAO,EAAE,WAAW,EAAE,KAAK,CAAC,WAAW,EAAE,UAAU,EAAE,IAAA,mCAAoB,EAAC,SAAS,CAAC,EAAE,CAAA;IAC1F,CAAC;IAED,MAAM,WAAW,GAAG,IAAA,oCAAqB,EAAC,SAAS,CAAC,CAAA;IAEpD,MAAM,IAAA,oCAA4B,EAAC,EAAE,GAAG,EAAE,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,WAAW,CAAC,EAAE,CAAC,CAAA;IAE/F,MAAM,WAAW,GAA+C,EAAE,CAAA;IAElE,KAAK,MAAM,eAAe,IAAI,gBAAgB,EAAE,CAAC;QAC7C,MAAM,UAAU,GAAG,GAAG,WAAW,WAAW,eAAe,QAAQ,CAAA;QAEnE,MAAM,MAAM,GAAG,MAAM,IAAA,kCAA0B,EAAC,EAAE,GAAG,EAAE,SAAS,EAAE,eAAe,EAAE,CAAC,CAAA;QACpF,MAAM,IAAA,oCAA4B,EAAC;YAC/B,GAAG;YACH,SAAS;YACT,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,CAAC,aAAa,EAAE,CAAC;SACrE,CAAC,CAAA;QAEF,WAAW,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,eAAe,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;IACtE,CAAC;IAED,KAAK,CAAC,WAAW,GAAG,WAAW,CAAA;IAC/B,KAAK,CAAC,WAAW,GAAG,IAAI,CAAA;IAExB,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,IAAA,mCAAoB,EAAC,SAAS,CAAC,EAAE,CAAA;AACvE,CAAC;AAcD,SAAS,qCAAqC,CAC1C,MAAmC;IAEnC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAA,UAAE,GAAE,CAAA;IAC1C,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,cAAc,CAAA;IAE5C,MAAM,KAAK,GAAG,IAAA,mBAAW,EAAM,mBAAmB,CAAC;SAC9C,OAAO,CAAC,KAAK,EAAE,MAAW,EAAE,GAAQ,EAAE,EAAE;QACrC,MAAM,QAAQ,GAAI,MAAM,EAAE,OAAe,IAAI,EAAE,CAAA;QAC/C,MAAM,YAAY,GAA0B,QAAQ,EAAE,YAAY,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE,WAAW,EAAE,EAAE,EAAE,CAAA;QAC7G,MAAM,iBAAiB,GAAW,QAAQ,EAAE,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;QAE/E,IAAI,SAAS,GAAG,iBAAiB,CAAA;QACjC,IAAI,CAAC,SAAS,EAAE,CAAC;YACb,MAAM,OAAO,GAAG,MAAM,IAAA,gCAAwB,EAAC,EAAE,GAAG,EAAE,OAAO,EAAE,YAAY,EAAE,SAAS,EAAE,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC,CAAA;YACzG,SAAS,GAAG,OAAO,CAAC,SAAS,CAAA;QACjC,CAAC;QAEL,MAAM,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,MAAM,sBAAsB,CACxD,GAAG,EACH,SAAS,EACb,SAAS,EACL,MAAM,CAAC,gBAAgB,EACvB,YAAY,CACnB,CAAA;QAED,MAAM,cAAc,GAAyE,EAAE,CAAA;QAC3F,KAAK,MAAM,EAAE,IAAI,WAAW,EAAE,CAAC;YAC/B,IAAI,CAAC;gBACG,MAAM,OAAO,GAAG,MAAM,IAAA,mCAAqB,EAAC,GAAG,EAAE,SAAS,EAAE,EAAE,CAAC,IAAI,EAAE,SAAS,CAAC,CAAA;gBAC/E,cAAc,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,EAAE,CAAC,SAAS,EAAE,OAAO,EAAE,CAAC,CAAA;YACjE,CAAC;YAAC,MAAM,CAAC;gBACL,WAAW;YACf,CAAC;QACL,CAAC;QAEG,wFAAwF;QACxF,MAAM,IAAA,+BAAuB,EAAC;YAC1B,GAAG;YACH,SAAS;YACT,MAAM,EAAE,MAAM,CAAC,YAAY;YAC3B,MAAM,EAAE,iBAAiB;SAC5B,CAAC,CAAA;QAEF,MAAM,aAAa,GAA2B;YAC9C,SAAS;YACL,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;YACzC,YAAY,EAAE,MAAM,CAAC,YAAY;YACrC,aAAa,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE;YAC1C,cAAc,EAAE,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,SAAS;YACtE,MAAM,EAAE,EAAE;SACT,CAAA;QAED,MAAM,UAAU,GAAG,IAAA,qCAA2B,EAAC,aAAa,CAAC,CAAA;QAC7D,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE,CAAA;QACjE,MAAM,MAAM,GAAG,gBAAgB;YAC3B,CAAC,CAAC;gBACE,sBAAsB;gBACtB,2IAA2I;gBAC3I,EAAE;gBACF,gBAAgB;gBAChB,EAAE;gBACF,UAAU;aACb,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,CAAC,CAAC,UAAU,CAAA;QAEhB,OAAO;YACH,GAAG,QAAQ;YACX,SAAS;YACT,SAAS;YACT,YAAY;YACZ,MAAM;YACN,aAAa,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE;SAC7C,CAAA;IACL,CAAC,CAAC;SACD,SAAS,CAAC,KAAK,EAAE,MAAW,EAAE,EAAE;QAC7B,OAAO,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,IAAI,EAAE,CAAC,CAAA;IAChD,CAAC,CAAC;SACD,OAAO,CAAC,KAAK,EAAE,MAAW,EAAE,GAAQ,EAAE,EAAE;QACrC,MAAM,SAAS,GAAI,MAAM,EAAE,OAAO,EAAE,SAAoB,IAAI,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;QAClF,OAAO;YACP,cAAc,EAAE,IAAA,8CAAwB,EAAC;gBACjC,SAAS;gBACT,SAAS;gBACT,GAAG;aACV,CAAC;YACF,eAAe,EAAE,IAAA,gDAAyB,EAAC;gBACnC,SAAS;gBACT,SAAS;gBACT,GAAG;aACV,CAAC;YACF,YAAY,EAAE,IAAA,0CAAsB,EAAC;gBAC7B,SAAS;gBACT,SAAS;gBACT,GAAG;aACN,CAAC;SACM,CAAA;IACZ,CAAC,CAAC;SACD,cAAc,CAAC,CAAC,EAAE,aAAa,EAA0B,EAAE,EAAE;QAC1D,OAAO,CAAC,IAAA,sBAAc,EAAC,aAAoB,EAAE,iBAAiB,CAAC,CAAA;IACnE,CAAC,CAAC;SACL,KAAK,CAAC,KAAK,CAAC;SACZ,KAAK,EAAE,CAAA;IAEZ,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;AAC/B,CAAC;AAED,SAAgB,2BAA2B,CACvC,MAOC;IAED,MAAM,EAAE,SAAS,EAAE,KAAK,EAAE,GAAG,qCAAqC,CAAM;QACpE,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;QACzC,YAAY,EAAE,MAAM,CAAC,YAAY;QACjC,YAAY,EAAE,MAAM,CAAC,YAAY;QACjC,SAAS,EAAE,MAAM,CAAC,SAAS;QAC3B,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,SAAS,EAAE,MAAM,CAAC,SAAS;KAC9B,CAAC,CAAA;IAEF,OAAO;QACH,SAAS;QACT,KAAK,CAAC,SAAS,CAAC,GAAQ,EAAE,MAAe;YACrC,MAAM,gBAAgB,GAClB,MAAM,CAAC,gBAAgB,CAAC,MAAM,KAAK,CAAC;gBAChC,CAAC,CAAC,oBAAoB;gBACtB,CAAC,CAAC,GAAG,MAAM,CAAC,gBAAgB,CAAC,MAAM,kBAAkB,CAAA;YAEjE,MAAM,YAAY,GAAG;gBACjB,EAAE,EAAE,IAAA,UAAE,GAAE;gBACR,IAAI,EAAE,yBAAiB;gBACvB,OAAO,EAAE,mBAAW;gBACpB,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBACnC,OAAO,EAAE;oBACD,KAAK,EAAE;wBACH;4BACI,IAAI,EAAE,MAAM;4BACZ,IAAI,EACA,MAAM;gCACN,aAAa,gBAAgB,yDAAyD;yBAC7F;qBACJ;iBACR;aACG,CAAA;YAER,MAAM,KAAK,CAAC,KAAK,CAAC,YAAY,EAAE;gBACxB,GAAG;gBACH,OAAO,EAAE,EAAE,GAAG,EAAE,WAAW,SAAS,EAAE,EAAE;gBAC5C,OAAO,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,UAAU,EAAE,KAAK,EAAE,aAAa,EAAE,EAAE,EAAE,aAAa,EAAE,CAAC,EAAE;aACxG,CAAC,CAAA;YAEE,OAAO,EAAE,SAAS,EAAE,CAAA;QACxB,CAAC;QACD,KAAK;KACR,CAAA;AACL,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ekairos/dataset",
|
|
3
|
-
"version": "1.21.
|
|
3
|
+
"version": "1.21.53-beta.0",
|
|
4
4
|
"description": "Pulzar Dataset Tools",
|
|
5
5
|
"type": "commonjs",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -38,11 +38,11 @@
|
|
|
38
38
|
"typecheck": "tsc --noEmit"
|
|
39
39
|
},
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@ekairos/story": "^1.21.
|
|
42
|
-
"@ekairos/domain": "^1.21.
|
|
41
|
+
"@ekairos/story": "^1.21.53-beta.0",
|
|
42
|
+
"@ekairos/domain": "^1.21.53-beta.0",
|
|
43
|
+
"@ekairos/sandbox": "^1.21.50-beta.0",
|
|
43
44
|
"@instantdb/admin": "^0.22.13",
|
|
44
45
|
"@instantdb/core": "^0.22.13",
|
|
45
|
-
"@vercel/sandbox": "^0.0.23",
|
|
46
46
|
"ai": "^5.0.44",
|
|
47
47
|
"ajv": "^8.17.1",
|
|
48
48
|
"xmlbuilder2": "^3.1.1",
|