@ekairos/structure 1.21.53-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/clearDataset.tool.d.ts +14 -0
  2. package/dist/clearDataset.tool.d.ts.map +1 -0
  3. package/dist/clearDataset.tool.js +26 -0
  4. package/dist/clearDataset.tool.js.map +1 -0
  5. package/dist/completeObject.tool.d.ts +23 -0
  6. package/dist/completeObject.tool.d.ts.map +1 -0
  7. package/dist/completeObject.tool.js +80 -0
  8. package/dist/completeObject.tool.js.map +1 -0
  9. package/dist/completeRows.tool.d.ts +20 -0
  10. package/dist/completeRows.tool.d.ts.map +1 -0
  11. package/dist/completeRows.tool.js +134 -0
  12. package/dist/completeRows.tool.js.map +1 -0
  13. package/dist/dataset/steps.d.ts +142 -0
  14. package/dist/dataset/steps.d.ts.map +1 -0
  15. package/dist/dataset/steps.js +166 -0
  16. package/dist/dataset/steps.js.map +1 -0
  17. package/dist/datasetFiles.d.ts +6 -0
  18. package/dist/datasetFiles.d.ts.map +1 -0
  19. package/dist/datasetFiles.js +12 -0
  20. package/dist/datasetFiles.js.map +1 -0
  21. package/dist/domain.d.ts +2 -0
  22. package/dist/domain.d.ts.map +1 -0
  23. package/dist/domain.js +2 -0
  24. package/dist/domain.js.map +1 -0
  25. package/dist/executeCommand.tool.d.ts +35 -0
  26. package/dist/executeCommand.tool.d.ts.map +1 -0
  27. package/dist/executeCommand.tool.js +106 -0
  28. package/dist/executeCommand.tool.js.map +1 -0
  29. package/dist/file/steps.d.ts +8 -0
  30. package/dist/file/steps.d.ts.map +1 -0
  31. package/dist/file/steps.js +20 -0
  32. package/dist/file/steps.js.map +1 -0
  33. package/dist/generateSchema.tool.d.ts +22 -0
  34. package/dist/generateSchema.tool.d.ts.map +1 -0
  35. package/dist/generateSchema.tool.js +61 -0
  36. package/dist/generateSchema.tool.js.map +1 -0
  37. package/dist/index.d.ts +5 -0
  38. package/dist/index.d.ts.map +1 -0
  39. package/dist/index.js +5 -0
  40. package/dist/index.js.map +1 -0
  41. package/dist/prompts.d.ts +18 -0
  42. package/dist/prompts.d.ts.map +1 -0
  43. package/dist/prompts.js +62 -0
  44. package/dist/prompts.js.map +1 -0
  45. package/dist/sandbox/steps.d.ts +74 -0
  46. package/dist/sandbox/steps.d.ts.map +1 -0
  47. package/dist/sandbox/steps.js +104 -0
  48. package/dist/sandbox/steps.js.map +1 -0
  49. package/dist/schema.d.ts +2 -0
  50. package/dist/schema.d.ts.map +1 -0
  51. package/dist/schema.js +33 -0
  52. package/dist/schema.js.map +1 -0
  53. package/dist/service.d.ts +41 -0
  54. package/dist/service.d.ts.map +1 -0
  55. package/dist/service.js +179 -0
  56. package/dist/service.js.map +1 -0
  57. package/dist/steps/commitFromEvents.step.d.ts +13 -0
  58. package/dist/steps/commitFromEvents.step.d.ts.map +1 -0
  59. package/dist/steps/commitFromEvents.step.js +82 -0
  60. package/dist/steps/commitFromEvents.step.js.map +1 -0
  61. package/dist/steps/persistObjectFromStory.step.d.ts +7 -0
  62. package/dist/steps/persistObjectFromStory.step.d.ts.map +1 -0
  63. package/dist/steps/persistObjectFromStory.step.js +90 -0
  64. package/dist/steps/persistObjectFromStory.step.js.map +1 -0
  65. package/dist/structure.d.ts +34 -0
  66. package/dist/structure.d.ts.map +1 -0
  67. package/dist/structure.js +443 -0
  68. package/dist/structure.js.map +1 -0
  69. package/dist/types/runtime.d.ts +56 -0
  70. package/dist/types/runtime.d.ts.map +1 -0
  71. package/dist/types/runtime.js +2 -0
  72. package/dist/types/runtime.js.map +1 -0
  73. package/package.json +49 -0
@@ -0,0 +1,62 @@
1
+ export function buildStructurePrompt(ctx) {
2
+ const goalText = ctx.output === "rows"
3
+ ? "Generate a JSONL dataset (output.jsonl) where each line is a JSON object: {\"type\":\"row\",\"data\":{...}}."
4
+ : "Generate a single JSON object result that matches the output schema (when provided).";
5
+ const lines = [];
6
+ lines.push("## ROLE");
7
+ lines.push("You are an AI system that produces structured outputs from mixed sources (files, text, and existing datasets).");
8
+ lines.push("");
9
+ lines.push("## GOAL");
10
+ lines.push(goalText);
11
+ lines.push("");
12
+ lines.push("## CONTEXT");
13
+ lines.push(`DatasetId: ${ctx.datasetId}`);
14
+ lines.push("");
15
+ lines.push("## SOURCES");
16
+ for (const s of ctx.sources) {
17
+ lines.push(`- kind: ${s.kind}`);
18
+ lines.push(` id: ${s.id}`);
19
+ lines.push(` path: ${s.path}`);
20
+ if (s.name)
21
+ lines.push(` name: ${s.name}`);
22
+ if (s.mimeType)
23
+ lines.push(` mimeType: ${s.mimeType}`);
24
+ }
25
+ lines.push("");
26
+ lines.push("## SANDBOX");
27
+ lines.push(`Workstation: ${ctx.workstation}`);
28
+ lines.push(`OutputPath: ${ctx.outputPath}`);
29
+ lines.push("");
30
+ if (ctx.mode === "schema" && ctx.outputSchema) {
31
+ lines.push("## OUTPUT SCHEMA (JSON Schema)");
32
+ lines.push(JSON.stringify(ctx.outputSchema, null, 2));
33
+ lines.push("");
34
+ }
35
+ lines.push("## INSTRUCTIONS");
36
+ if (ctx.mode === "auto") {
37
+ lines.push("1) Inspect the Sources. If needed, use executeCommand to open/read files and explore structure (do not guess). Keep stdout concise.");
38
+ lines.push("2) Propose an output JSON Schema (lowerCamelCase field names). You may add derived fields if helpful, but justify them. Then call generateSchema.");
39
+ if (ctx.output === "rows") {
40
+ lines.push("3) Use executeCommand to read the sources and write output.jsonl at OutputPath. Each line must be {\"type\":\"row\",\"data\":{...}}. Keep prints concise.");
41
+ lines.push("4) Call complete to validate and persist the output.jsonl to Instant Storage and mark the dataset completed.");
42
+ }
43
+ else {
44
+ lines.push("3) Produce the final JSON object. If needed, use executeCommand to compute it. Then you MUST call complete. Prefer resultJson (inline JSON) for small objects; use resultPath only if the object is large.");
45
+ }
46
+ }
47
+ else {
48
+ if (ctx.output === "rows") {
49
+ lines.push("1) Use executeCommand to read the sources and write output.jsonl at OutputPath. Each line must be {\"type\":\"row\",\"data\":{...}}. Keep prints concise.");
50
+ lines.push("2) Call complete to validate and persist the output.jsonl to Instant Storage and mark the dataset completed.");
51
+ }
52
+ else {
53
+ lines.push("1) Produce the final JSON object. If needed, use executeCommand to compute it. Then you MUST call complete. Prefer resultJson (inline JSON) for small objects; use resultPath only if the object is large.");
54
+ }
55
+ }
56
+ lines.push("");
57
+ lines.push("## RULES");
58
+ lines.push("- Field names must be lowerCamelCase.");
59
+ lines.push("- Do not leak secrets. Do not print large raw datasets to stdout.");
60
+ return lines.join("\n");
61
+ }
62
+ //# sourceMappingURL=prompts.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"prompts.js","sourceRoot":"","sources":["../src/prompts.ts"],"names":[],"mappings":"AAkBA,MAAM,UAAU,oBAAoB,CAAC,GAA2B;IAC9D,MAAM,QAAQ,GACZ,GAAG,CAAC,MAAM,KAAK,MAAM;QACnB,CAAC,CAAC,8GAA8G;QAChH,CAAC,CAAC,sFAAsF,CAAA;IAE5F,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACrB,KAAK,CAAC,IAAI,CAAC,gHAAgH,CAAC,CAAA;IAC5H,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IACd,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACrB,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;IACpB,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IACd,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACxB,KAAK,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,SAAS,EAAE,CAAC,CAAA;IACzC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IACd,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACxB,KAAK,MAAM,CAAC,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;QAC5B,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC/B,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;QAC3B,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC/B,IAAI,CAAC,CAAC,IAAI;YAAE,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC3C,IAAI,CAAC,CAAC,QAAQ;YAAE,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;IACzD,CAAC;IACD,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IACd,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACxB,KAAK,CAAC,IAAI,CAAC,gBAAgB,GAAG,CAAC,WAAW,EAAE,CAAC,CAAA;IAC7C,KAAK,CAAC,IAAI,CAAC,eAAe,GAAG,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3C,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IAEd,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,YAAY,EAAE,CAAC;QAC9C,KAAK,CAAC,IAAI,CAAC,gCAAgC,CAAC,CAAA;QAC5C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;QACrD,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IAChB,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;IAC7B,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;QACxB,KAAK,CAAC,IAAI,CAAC,qIAAqI,CAAC,CAAA;QACjJ,KAAK,CAAC,IAAI,CAAC,mJAAmJ,CAAC,CAAA;QAC/J,IAAI,GAAG,CAAC,MAAM,KAAK,MAAM,EAAE,CAAC;YAC1B,KAAK,CAAC,IAAI,CAAC,2JAA2J,CAAC,CAAA;YACvK,KAAK,CAAC,IAAI,CAAC,8GAA8G,CAAC,CAAA;QAC5H,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,4MAA4M,CAAC,CAAA;QAC1N,CAAC;IACH,CAAC;SAAM,CAAC;QACN,IAAI,GAAG,CAAC,MAAM,KAAK,MAAM,EAAE,CAAC;YAC1B,KAAK,CAAC,IAAI,CAAC,2JAA2J,CAAC,CAAA;YACvK,KAAK,CAAC,IAAI,CAAC,8GAA8G,CAAC,CAAA;QAC5H,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,4MAA4M,CAAC,CAAA;QAC1N,CAAC;IACH,CAAC;IACD,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;IACd,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IACtB,KAAK,CAAC,IAAI,CAAC,uCAAuC,CAAC,CAAA;IACnD,KAAK,CAAC,IAAI,CAAC,mEAAmE,CAAC,CAAA;IAE/E,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;AACzB,CAAC","sourcesContent":["export type PreparedSource = {\n kind: \"file\" | \"dataset\" | \"text\"\n id: string\n path: string\n name?: string\n mimeType?: string\n}\n\nexport type StructurePromptContext = {\n datasetId: string\n mode: \"auto\" | \"schema\"\n output: \"rows\" | \"object\"\n outputSchema?: any\n sources: PreparedSource[]\n workstation: string\n outputPath: string\n}\n\nexport function buildStructurePrompt(ctx: StructurePromptContext): string {\n const goalText =\n ctx.output === \"rows\"\n ? \"Generate a JSONL dataset (output.jsonl) where each line is a JSON object: {\\\"type\\\":\\\"row\\\",\\\"data\\\":{...}}.\"\n : \"Generate a single JSON object result that matches the output schema (when provided).\"\n\n const lines: string[] = []\n lines.push(\"## ROLE\")\n lines.push(\"You are an AI system that produces structured outputs from mixed sources (files, text, and existing datasets).\")\n lines.push(\"\")\n lines.push(\"## GOAL\")\n lines.push(goalText)\n lines.push(\"\")\n lines.push(\"## CONTEXT\")\n lines.push(`DatasetId: ${ctx.datasetId}`)\n lines.push(\"\")\n lines.push(\"## SOURCES\")\n for (const s of ctx.sources) {\n lines.push(`- kind: ${s.kind}`)\n lines.push(` id: ${s.id}`)\n lines.push(` path: ${s.path}`)\n if (s.name) lines.push(` name: ${s.name}`)\n if (s.mimeType) lines.push(` mimeType: ${s.mimeType}`)\n }\n lines.push(\"\")\n lines.push(\"## SANDBOX\")\n lines.push(`Workstation: ${ctx.workstation}`)\n lines.push(`OutputPath: ${ctx.outputPath}`)\n lines.push(\"\")\n\n if (ctx.mode === \"schema\" && ctx.outputSchema) {\n lines.push(\"## OUTPUT SCHEMA (JSON Schema)\")\n lines.push(JSON.stringify(ctx.outputSchema, null, 2))\n lines.push(\"\")\n }\n\n lines.push(\"## INSTRUCTIONS\")\n if (ctx.mode === \"auto\") {\n lines.push(\"1) Inspect the Sources. If needed, use executeCommand to open/read files and explore structure (do not guess). Keep stdout concise.\")\n lines.push(\"2) Propose an output JSON Schema (lowerCamelCase field names). You may add derived fields if helpful, but justify them. Then call generateSchema.\")\n if (ctx.output === \"rows\") {\n lines.push(\"3) Use executeCommand to read the sources and write output.jsonl at OutputPath. Each line must be {\\\"type\\\":\\\"row\\\",\\\"data\\\":{...}}. Keep prints concise.\")\n lines.push(\"4) Call complete to validate and persist the output.jsonl to Instant Storage and mark the dataset completed.\")\n } else {\n lines.push(\"3) Produce the final JSON object. If needed, use executeCommand to compute it. Then you MUST call complete. Prefer resultJson (inline JSON) for small objects; use resultPath only if the object is large.\")\n }\n } else {\n if (ctx.output === \"rows\") {\n lines.push(\"1) Use executeCommand to read the sources and write output.jsonl at OutputPath. Each line must be {\\\"type\\\":\\\"row\\\",\\\"data\\\":{...}}. Keep prints concise.\")\n lines.push(\"2) Call complete to validate and persist the output.jsonl to Instant Storage and mark the dataset completed.\")\n } else {\n lines.push(\"1) Produce the final JSON object. If needed, use executeCommand to compute it. Then you MUST call complete. Prefer resultJson (inline JSON) for small objects; use resultPath only if the object is large.\")\n }\n }\n lines.push(\"\")\n lines.push(\"## RULES\")\n lines.push(\"- Field names must be lowerCamelCase.\")\n lines.push(\"- Do not leak secrets. Do not print large raw datasets to stdout.\")\n\n return lines.join(\"\\n\")\n}\n\n"]}
@@ -0,0 +1,74 @@
1
+ export type DatasetSandboxId = string;
2
+ export type CreateDatasetSandboxParams = {
3
+ runtime?: string;
4
+ timeoutMs?: number;
5
+ ports?: number[];
6
+ resources?: {
7
+ vcpus?: number;
8
+ };
9
+ purpose?: string;
10
+ params?: Record<string, any>;
11
+ };
12
+ export type DatasetSandboxRunCommandResult = {
13
+ exitCode: number;
14
+ stdout: string;
15
+ stderr: string;
16
+ };
17
+ export declare function createDatasetSandboxStep(params: {
18
+ env: any;
19
+ } & CreateDatasetSandboxParams): Promise<{
20
+ sandboxId: DatasetSandboxId;
21
+ }>;
22
+ export declare function runDatasetSandboxCommandStep(params: {
23
+ env: any;
24
+ sandboxId: DatasetSandboxId;
25
+ cmd: string;
26
+ args?: string[];
27
+ }): Promise<DatasetSandboxRunCommandResult>;
28
+ export declare function writeDatasetSandboxFilesStep(params: {
29
+ env: any;
30
+ sandboxId: DatasetSandboxId;
31
+ files: Array<{
32
+ path: string;
33
+ contentBase64: string;
34
+ }>;
35
+ }): Promise<void>;
36
+ /**
37
+ * Workflow-safe helper:
38
+ * Keep base64 encoding inside the step runtime (Node),
39
+ * so the workflow runtime never needs `Buffer`.
40
+ *
41
+ * Input/Output are serializable.
42
+ */
43
+ export declare function writeDatasetSandboxTextFileStep(params: {
44
+ env: any;
45
+ sandboxId: DatasetSandboxId;
46
+ path: string;
47
+ text: string;
48
+ }): Promise<void>;
49
+ export declare function readDatasetSandboxFileStep(params: {
50
+ env: any;
51
+ sandboxId: DatasetSandboxId;
52
+ path: string;
53
+ }): Promise<{
54
+ contentBase64: string;
55
+ }>;
56
+ /**
57
+ * Workflow-safe helper:
58
+ * Decode base64 -> utf-8 inside the step runtime (Node),
59
+ * so the workflow runtime never needs `Buffer`.
60
+ *
61
+ * Input/Output are serializable.
62
+ */
63
+ export declare function readDatasetSandboxTextFileStep(params: {
64
+ env: any;
65
+ sandboxId: DatasetSandboxId;
66
+ path: string;
67
+ }): Promise<{
68
+ text: string;
69
+ }>;
70
+ export declare function stopDatasetSandboxStep(params: {
71
+ env: any;
72
+ sandboxId: DatasetSandboxId;
73
+ }): Promise<void>;
74
+ //# sourceMappingURL=steps.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"steps.d.ts","sourceRoot":"","sources":["../../src/sandbox/steps.ts"],"names":[],"mappings":"AAEA,MAAM,MAAM,gBAAgB,GAAG,MAAM,CAAA;AAErC,MAAM,MAAM,0BAA0B,GAAG;IACvC,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,SAAS,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,CAAA;IAC9B,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;CAC7B,CAAA;AAED,MAAM,MAAM,8BAA8B,GAAG;IAC3C,QAAQ,EAAE,MAAM,CAAA;IAChB,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAsB,wBAAwB,CAC5C,MAAM,EAAE;IAAE,GAAG,EAAE,GAAG,CAAA;CAAE,GAAG,0BAA0B,GAChD,OAAO,CAAC;IAAE,SAAS,EAAE,gBAAgB,CAAA;CAAE,CAAC,CAY1C;AAED,wBAAsB,4BAA4B,CAAC,MAAM,EAAE;IACzD,GAAG,EAAE,GAAG,CAAA;IACR,SAAS,EAAE,gBAAgB,CAAA;IAC3B,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,EAAE,CAAA;CAChB,GAAG,OAAO,CAAC,8BAA8B,CAAC,CAkB1C;AAED,wBAAsB,4BAA4B,CAAC,MAAM,EAAE;IACzD,GAAG,EAAE,GAAG,CAAA;IACR,SAAS,EAAE,gBAAgB,CAAA;IAC3B,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;CACtD,GAAG,OAAO,CAAC,IAAI,CAAC,CAchB;AAED;;;;;;GAMG;AACH,wBAAsB,+BAA+B,CAAC,MAAM,EAAE;IAC5D,GAAG,EAAE,GAAG,CAAA;IACR,SAAS,EAAE,gBAAgB,CAAA;IAC3B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;CACb,GAAG,OAAO,CAAC,IAAI,CAAC,CAQhB;AAED,wBAAsB,0BAA0B,CAAC,MAAM,EAAE;IACvD,GAAG,EAAE,GAAG,CAAA;IACR,SAAS,EAAE,gBAAgB,CAAA;IAC3B,IAAI,EAAE,MAAM,CAAA;CACb,GAAG,OAAO,CAAC;IAAE,aAAa,EAAE,MAAM,CAAA;CAAE,CAAC,CAYrC;AAED;;;;;;GAMG;AACH,wBAAsB,8BAA8B,CAAC,MAAM,EAAE;IAC3D,GAAG,EAAE,GAAG,CAAA;IACR,SAAS,EAAE,gBAAgB,CAAA;IAC3B,IAAI,EAAE,MAAM,CAAA;CACb,GAAG,OAAO,CAAC;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,CAAC,CAK5B;AAED,wBAAsB,sBAAsB,CAAC,MAAM,EAAE;IAAE,GAAG,EAAE,GAAG,CAAC;IAAC,SAAS,EAAE,gBAAgB,CAAA;CAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAU7G"}
@@ -0,0 +1,104 @@
1
+ import { resolveStoryRuntime } from "@ekairos/story/runtime";
2
+ export async function createDatasetSandboxStep(params) {
3
+ "use step";
4
+ console.log("[ekairos/structure] sandbox.step createSandbox begin");
5
+ console.log("[ekairos/structure] sandbox.step createSandbox runtime", params.runtime);
6
+ const db = (await resolveStoryRuntime(params.env)).db;
7
+ const { SandboxService } = (await import("@ekairos/sandbox"));
8
+ const service = new SandboxService(db);
9
+ const created = await service.createSandbox(params);
10
+ if (!created.ok)
11
+ throw new Error(created.error);
12
+ console.log("[ekairos/structure] sandbox.step createSandbox ok");
13
+ console.log("[ekairos/structure] sandbox.step createSandbox sandboxId", created.data.sandboxId);
14
+ return { sandboxId: created.data.sandboxId };
15
+ }
16
+ export async function runDatasetSandboxCommandStep(params) {
17
+ "use step";
18
+ console.log("[ekairos/structure] sandbox.step runCommand begin");
19
+ console.log("[ekairos/structure] sandbox.step runCommand sandboxId", params.sandboxId);
20
+ console.log("[ekairos/structure] sandbox.step runCommand cmd", params.cmd);
21
+ const db = (await resolveStoryRuntime(params.env)).db;
22
+ const { SandboxService } = (await import("@ekairos/sandbox"));
23
+ const service = new SandboxService(db);
24
+ const result = await service.runCommand(params.sandboxId, params.cmd, params.args ?? []);
25
+ if (!result.ok)
26
+ throw new Error(result.error);
27
+ const normalized = {
28
+ exitCode: result.data.exitCode ?? (result.data.success ? 0 : 1),
29
+ stdout: result.data.output ?? "",
30
+ stderr: result.data.error ?? "",
31
+ };
32
+ console.log("[ekairos/structure] sandbox.step runCommand ok");
33
+ console.log("[ekairos/structure] sandbox.step runCommand exitCode", normalized.exitCode);
34
+ return normalized;
35
+ }
36
+ export async function writeDatasetSandboxFilesStep(params) {
37
+ "use step";
38
+ console.log("[ekairos/structure] sandbox.step writeFiles begin");
39
+ console.log("[ekairos/structure] sandbox.step writeFiles sandboxId", params.sandboxId);
40
+ console.log("[ekairos/structure] sandbox.step writeFiles paths", params.files.map((f) => f.path));
41
+ const db = (await resolveStoryRuntime(params.env)).db;
42
+ const { SandboxService } = (await import("@ekairos/sandbox"));
43
+ const service = new SandboxService(db);
44
+ const result = await service.writeFiles(params.sandboxId, params.files);
45
+ if (!result.ok)
46
+ throw new Error(result.error);
47
+ console.log("[ekairos/structure] sandbox.step writeFiles ok");
48
+ }
49
+ /**
50
+ * Workflow-safe helper:
51
+ * Keep base64 encoding inside the step runtime (Node),
52
+ * so the workflow runtime never needs `Buffer`.
53
+ *
54
+ * Input/Output are serializable.
55
+ */
56
+ export async function writeDatasetSandboxTextFileStep(params) {
57
+ "use step";
58
+ const contentBase64 = Buffer.from(String(params.text ?? ""), "utf-8").toString("base64");
59
+ await writeDatasetSandboxFilesStep({
60
+ env: params.env,
61
+ sandboxId: params.sandboxId,
62
+ files: [{ path: params.path, contentBase64 }],
63
+ });
64
+ }
65
+ export async function readDatasetSandboxFileStep(params) {
66
+ "use step";
67
+ console.log("[ekairos/structure] sandbox.step readFile begin");
68
+ console.log("[ekairos/structure] sandbox.step readFile sandboxId", params.sandboxId);
69
+ console.log("[ekairos/structure] sandbox.step readFile path", params.path);
70
+ const db = (await resolveStoryRuntime(params.env)).db;
71
+ const { SandboxService } = (await import("@ekairos/sandbox"));
72
+ const service = new SandboxService(db);
73
+ const result = await service.readFile(params.sandboxId, params.path);
74
+ if (!result.ok)
75
+ throw new Error(result.error);
76
+ console.log("[ekairos/structure] sandbox.step readFile ok");
77
+ return result.data;
78
+ }
79
+ /**
80
+ * Workflow-safe helper:
81
+ * Decode base64 -> utf-8 inside the step runtime (Node),
82
+ * so the workflow runtime never needs `Buffer`.
83
+ *
84
+ * Input/Output are serializable.
85
+ */
86
+ export async function readDatasetSandboxTextFileStep(params) {
87
+ "use step";
88
+ const res = await readDatasetSandboxFileStep(params);
89
+ const text = Buffer.from(res.contentBase64 ?? "", "base64").toString("utf-8");
90
+ return { text };
91
+ }
92
+ export async function stopDatasetSandboxStep(params) {
93
+ "use step";
94
+ console.log("[ekairos/structure] sandbox.step stopSandbox begin");
95
+ console.log("[ekairos/structure] sandbox.step stopSandbox sandboxId", params.sandboxId);
96
+ const db = (await resolveStoryRuntime(params.env)).db;
97
+ const { SandboxService } = (await import("@ekairos/sandbox"));
98
+ const service = new SandboxService(db);
99
+ const result = await service.stopSandbox(params.sandboxId);
100
+ if (!result.ok)
101
+ throw new Error(result.error);
102
+ console.log("[ekairos/structure] sandbox.step stopSandbox ok");
103
+ }
104
+ //# sourceMappingURL=steps.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"steps.js","sourceRoot":"","sources":["../../src/sandbox/steps.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAA;AAmB5D,MAAM,CAAC,KAAK,UAAU,wBAAwB,CAC5C,MAAiD;IAEjD,UAAU,CAAA;IACV,OAAO,CAAC,GAAG,CAAC,sDAAsD,CAAC,CAAA;IACnE,OAAO,CAAC,GAAG,CAAC,wDAAwD,EAAE,MAAM,CAAC,OAAO,CAAC,CAAA;IACrF,MAAM,EAAE,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAS,CAAA,CAAC,EAAE,CAAA;IAC5D,MAAM,EAAE,cAAc,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,kBAAkB,CAAC,CAAQ,CAAA;IACpE,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;IACnD,IAAI,CAAC,OAAO,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;IAC/C,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC,CAAA;IAChE,OAAO,CAAC,GAAG,CAAC,0DAA0D,EAAE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IAC/F,OAAO,EAAE,SAAS,EAAE,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;AAC9C,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,4BAA4B,CAAC,MAKlD;IACC,UAAU,CAAA;IACV,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC,CAAA;IAChE,OAAO,CAAC,GAAG,CAAC,uDAAuD,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;IACtF,OAAO,CAAC,GAAG,CAAC,iDAAiD,EAAE,MAAM,CAAC,GAAG,CAAC,CAAA;IAC1E,MAAM,EAAE,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAS,CAAA,CAAC,EAAE,CAAA;IAC5D,MAAM,EAAE,cAAc,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,kBAAkB,CAAC,CAAQ,CAAA;IACpE,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,IAAI,EAAE,CAAC,CAAA;IACxF,IAAI,CAAC,MAAM,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;IAC7C,MAAM,UAAU,GAAG;QACjB,QAAQ,EAAE,MAAM,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/D,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE;QAChC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;KAChC,CAAA;IACD,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAA;IAC7D,OAAO,CAAC,GAAG,CAAC,sDAAsD,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAA;IACxF,OAAO,UAAU,CAAA;AACnB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,4BAA4B,CAAC,MAIlD;IACC,UAAU,CAAA;IACV,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC,CAAA;IAChE,OAAO,CAAC,GAAG,CAAC,uDAAuD,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;IACtF,OAAO,CAAC,GAAG,CACT,mDAAmD,EACnD,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAChC,CAAA;IACD,MAAM,EAAE,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAS,CAAA,CAAC,EAAE,CAAA;IAC5D,MAAM,EAAE,cAAc,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,kBAAkB,CAAC,CAAQ,CAAA;IACpE,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IACvE,IAAI,CAAC,MAAM,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;IAC7C,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAA;AAC/D,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,+BAA+B,CAAC,MAKrD;IACC,UAAU,CAAA;IACV,MAAM,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;IACxF,MAAM,4BAA4B,CAAC;QACjC,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,SAAS,EAAE,MAAM,CAAC,SAAS;QAC3B,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,CAAC;KAC9C,CAAC,CAAA;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,0BAA0B,CAAC,MAIhD;IACC,UAAU,CAAA;IACV,OAAO,CAAC,GAAG,CAAC,iDAAiD,CAAC,CAAA;IAC9D,OAAO,CAAC,GAAG,CAAC,qDAAqD,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;IACpF,OAAO,CAAC,GAAG,CAAC,gDAAgD,EAAE,MAAM,CAAC,IAAI,CAAC,CAAA;IAC1E,MAAM,EAAE,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAS,CAAA,CAAC,EAAE,CAAA;IAC5D,MAAM,EAAE,cAAc,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,kBAAkB,CAAC,CAAQ,CAAA;IACpE,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,IAAI,CAAC,CAAA;IACpE,IAAI,CAAC,MAAM,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;IAC7C,OAAO,CAAC,GAAG,CAAC,8CAA8C,CAAC,CAAA;IAC3D,OAAO,MAAM,CAAC,IAAI,CAAA;AACpB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,8BAA8B,CAAC,MAIpD;IACC,UAAU,CAAA;IACV,MAAM,GAAG,GAAG,MAAM,0BAA0B,CAAC,MAAM,CAAC,CAAA;IACpD,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,aAAa,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;IAC7E,OAAO,EAAE,IAAI,EAAE,CAAA;AACjB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,sBAAsB,CAAC,MAAiD;IAC5F,UAAU,CAAA;IACV,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAA;IACjE,OAAO,CAAC,GAAG,CAAC,wDAAwD,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;IACvF,MAAM,EAAE,GAAG,CAAC,MAAM,mBAAmB,CAAC,MAAM,CAAC,GAAG,CAAS,CAAA,CAAC,EAAE,CAAA;IAC5D,MAAM,EAAE,cAAc,EAAE,GAAG,CAAC,MAAM,MAAM,CAAC,kBAAkB,CAAC,CAAQ,CAAA;IACpE,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,EAAE,CAAC,CAAA;IACtC,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAA;IAC1D,IAAI,CAAC,MAAM,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;IAC7C,OAAO,CAAC,GAAG,CAAC,iDAAiD,CAAC,CAAA;AAChE,CAAC","sourcesContent":["import { resolveStoryRuntime } from \"@ekairos/story/runtime\"\n\nexport type DatasetSandboxId = string\n\nexport type CreateDatasetSandboxParams = {\n runtime?: string\n timeoutMs?: number\n ports?: number[]\n resources?: { vcpus?: number }\n purpose?: string\n params?: Record<string, any>\n}\n\nexport type DatasetSandboxRunCommandResult = {\n exitCode: number\n stdout: string\n stderr: string\n}\n\nexport async function createDatasetSandboxStep(\n params: { env: any } & CreateDatasetSandboxParams,\n): Promise<{ sandboxId: DatasetSandboxId }> {\n \"use step\"\n console.log(\"[ekairos/structure] sandbox.step createSandbox begin\")\n console.log(\"[ekairos/structure] sandbox.step createSandbox runtime\", params.runtime)\n const db = (await resolveStoryRuntime(params.env) as any).db\n const { SandboxService } = (await import(\"@ekairos/sandbox\")) as any\n const service = new SandboxService(db)\n const created = await service.createSandbox(params)\n if (!created.ok) throw new Error(created.error)\n console.log(\"[ekairos/structure] sandbox.step createSandbox ok\")\n console.log(\"[ekairos/structure] sandbox.step createSandbox sandboxId\", created.data.sandboxId)\n return { sandboxId: created.data.sandboxId }\n}\n\nexport async function runDatasetSandboxCommandStep(params: {\n env: any\n sandboxId: DatasetSandboxId\n cmd: string\n args?: string[]\n}): Promise<DatasetSandboxRunCommandResult> {\n \"use step\"\n console.log(\"[ekairos/structure] sandbox.step runCommand begin\")\n console.log(\"[ekairos/structure] sandbox.step runCommand sandboxId\", params.sandboxId)\n console.log(\"[ekairos/structure] sandbox.step runCommand cmd\", params.cmd)\n const db = (await resolveStoryRuntime(params.env) as any).db\n const { SandboxService } = (await import(\"@ekairos/sandbox\")) as any\n const service = new SandboxService(db)\n const result = await service.runCommand(params.sandboxId, params.cmd, params.args ?? [])\n if (!result.ok) throw new Error(result.error)\n const normalized = {\n exitCode: result.data.exitCode ?? (result.data.success ? 0 : 1),\n stdout: result.data.output ?? \"\",\n stderr: result.data.error ?? \"\",\n }\n console.log(\"[ekairos/structure] sandbox.step runCommand ok\")\n console.log(\"[ekairos/structure] sandbox.step runCommand exitCode\", normalized.exitCode)\n return normalized\n}\n\nexport async function writeDatasetSandboxFilesStep(params: {\n env: any\n sandboxId: DatasetSandboxId\n files: Array<{ path: string; contentBase64: string }>\n}): Promise<void> {\n \"use step\"\n console.log(\"[ekairos/structure] sandbox.step writeFiles begin\")\n console.log(\"[ekairos/structure] sandbox.step writeFiles sandboxId\", params.sandboxId)\n console.log(\n \"[ekairos/structure] sandbox.step writeFiles paths\",\n params.files.map((f) => f.path),\n )\n const db = (await resolveStoryRuntime(params.env) as any).db\n const { SandboxService } = (await import(\"@ekairos/sandbox\")) as any\n const service = new SandboxService(db)\n const result = await service.writeFiles(params.sandboxId, params.files)\n if (!result.ok) throw new Error(result.error)\n console.log(\"[ekairos/structure] sandbox.step writeFiles ok\")\n}\n\n/**\n * Workflow-safe helper:\n * Keep base64 encoding inside the step runtime (Node),\n * so the workflow runtime never needs `Buffer`.\n *\n * Input/Output are serializable.\n */\nexport async function writeDatasetSandboxTextFileStep(params: {\n env: any\n sandboxId: DatasetSandboxId\n path: string\n text: string\n}): Promise<void> {\n \"use step\"\n const contentBase64 = Buffer.from(String(params.text ?? \"\"), \"utf-8\").toString(\"base64\")\n await writeDatasetSandboxFilesStep({\n env: params.env,\n sandboxId: params.sandboxId,\n files: [{ path: params.path, contentBase64 }],\n })\n}\n\nexport async function readDatasetSandboxFileStep(params: {\n env: any\n sandboxId: DatasetSandboxId\n path: string\n}): Promise<{ contentBase64: string }> {\n \"use step\"\n console.log(\"[ekairos/structure] sandbox.step readFile begin\")\n console.log(\"[ekairos/structure] sandbox.step readFile sandboxId\", params.sandboxId)\n console.log(\"[ekairos/structure] sandbox.step readFile path\", params.path)\n const db = (await resolveStoryRuntime(params.env) as any).db\n const { SandboxService } = (await import(\"@ekairos/sandbox\")) as any\n const service = new SandboxService(db)\n const result = await service.readFile(params.sandboxId, params.path)\n if (!result.ok) throw new Error(result.error)\n console.log(\"[ekairos/structure] sandbox.step readFile ok\")\n return result.data\n}\n\n/**\n * Workflow-safe helper:\n * Decode base64 -> utf-8 inside the step runtime (Node),\n * so the workflow runtime never needs `Buffer`.\n *\n * Input/Output are serializable.\n */\nexport async function readDatasetSandboxTextFileStep(params: {\n env: any\n sandboxId: DatasetSandboxId\n path: string\n}): Promise<{ text: string }> {\n \"use step\"\n const res = await readDatasetSandboxFileStep(params)\n const text = Buffer.from(res.contentBase64 ?? \"\", \"base64\").toString(\"utf-8\")\n return { text }\n}\n\nexport async function stopDatasetSandboxStep(params: { env: any; sandboxId: DatasetSandboxId }): Promise<void> {\n \"use step\"\n console.log(\"[ekairos/structure] sandbox.step stopSandbox begin\")\n console.log(\"[ekairos/structure] sandbox.step stopSandbox sandboxId\", params.sandboxId)\n const db = (await resolveStoryRuntime(params.env) as any).db\n const { SandboxService } = (await import(\"@ekairos/sandbox\")) as any\n const service = new SandboxService(db)\n const result = await service.stopSandbox(params.sandboxId)\n if (!result.ok) throw new Error(result.error)\n console.log(\"[ekairos/structure] sandbox.step stopSandbox ok\")\n}\n\n"]}
@@ -0,0 +1,2 @@
1
+ export declare const structureDomain: any;
2
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAiCA,eAAO,MAAM,eAAe,EAAE,GAEO,CAAA"}
package/dist/schema.js ADDED
@@ -0,0 +1,33 @@
1
+ import { i } from "@instantdb/core";
2
+ import { domain } from "@ekairos/domain";
3
+ import { storyDomain } from "@ekairos/story";
4
+ const entities = {
5
+ // Keep $files compatible with Instant's base file fields used by structure flows.
6
+ $files: i.entity({
7
+ path: i.string().optional().indexed(),
8
+ url: i.string().optional(),
9
+ name: i.string().optional(),
10
+ contentType: i.string().optional(),
11
+ size: i.number().optional(),
12
+ createdAt: i.number().optional().indexed(),
13
+ updatedAt: i.number().optional().indexed(),
14
+ "content-disposition": i.string().optional(),
15
+ }),
16
+ };
17
+ const links = {
18
+ /**
19
+ * Structure output link (rows):
20
+ *
21
+ * - `context_contexts.structure_output_file` points to the `$files` record for `output.jsonl`.
22
+ * - Reverse label is prefixed to avoid collisions across domains.
23
+ */
24
+ structureContextOutputFile: {
25
+ forward: { on: "context_contexts", has: "one", label: "structure_output_file" },
26
+ reverse: { on: "$files", has: "many", label: "structure_contexts" },
27
+ },
28
+ };
29
+ const rooms = {};
30
+ export const structureDomain = domain("structure")
31
+ .includes(storyDomain)
32
+ .schema({ entities, links, rooms });
33
+ //# sourceMappingURL=schema.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.js","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,iBAAiB,CAAA;AACnC,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAA;AACxC,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AAE5C,MAAM,QAAQ,GAAG;IACf,kFAAkF;IAClF,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC;QACf,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,EAAE;QACrC,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;QAC1B,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;QAC3B,WAAW,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;QAClC,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;QAC3B,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,EAAE;QAC1C,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,EAAE;QAC1C,qBAAqB,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;KAC7C,CAAC;CACM,CAAA;AAEV,MAAM,KAAK,GAAG;IACZ;;;;;OAKG;IACH,0BAA0B,EAAE;QAC1B,OAAO,EAAE,EAAE,EAAE,EAAE,kBAAkB,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE;QAC/E,OAAO,EAAE,EAAE,EAAE,EAAE,QAAQ,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE;KACpE;CACO,CAAA;AAEV,MAAM,KAAK,GAAG,EAAW,CAAA;AAEzB,MAAM,CAAC,MAAM,eAAe,GAAQ,MAAM,CAAC,WAAW,CAAC;KACpD,QAAQ,CAAC,WAAW,CAAC;KACrB,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAA","sourcesContent":["import { i } from \"@instantdb/core\"\nimport { domain } from \"@ekairos/domain\"\nimport { storyDomain } from \"@ekairos/story\"\n\nconst entities = {\n // Keep $files compatible with Instant's base file fields used by structure flows.\n $files: i.entity({\n path: i.string().optional().indexed(),\n url: i.string().optional(),\n name: i.string().optional(),\n contentType: i.string().optional(),\n size: i.number().optional(),\n createdAt: i.number().optional().indexed(),\n updatedAt: i.number().optional().indexed(),\n \"content-disposition\": i.string().optional(),\n }),\n} as const\n\nconst links = {\n /**\n * Structure output link (rows):\n *\n * - `context_contexts.structure_output_file` points to the `$files` record for `output.jsonl`.\n * - Reverse label is prefixed to avoid collisions across domains.\n */\n structureContextOutputFile: {\n forward: { on: \"context_contexts\", has: \"one\", label: \"structure_output_file\" },\n reverse: { on: \"$files\", has: \"many\", label: \"structure_contexts\" },\n },\n} as const\n\nconst rooms = {} as const\n\nexport const structureDomain: any = domain(\"structure\")\n .includes(storyDomain)\n .schema({ entities, links, rooms })\n\n"]}
@@ -0,0 +1,41 @@
1
+ export type ServiceResult<T = any> = {
2
+ ok: true;
3
+ data: T;
4
+ } | {
5
+ ok: false;
6
+ error: string;
7
+ };
8
+ /**
9
+ * Back-compat helper for reading structure outputs outside the workflow runtime.
10
+ *
11
+ * IMPORTANT: The source of truth is `context_contexts` (Story context) keyed by `structure:<id>`.
12
+ */
13
+ export declare class DatasetService {
14
+ private readonly db;
15
+ constructor(db: any);
16
+ private contextKey;
17
+ getDatasetById(datasetId: string): Promise<ServiceResult<any>>;
18
+ getFileById(fileId: string): Promise<any>;
19
+ readRecordsFromFile(datasetId: string): Promise<ServiceResult<AsyncGenerator<any, void, unknown>>>;
20
+ /**
21
+ * Back-compat: create a new structure context keyed by `structure:<id>`.
22
+ * This is not used by the primary `structure()` API, which creates contexts via Story runtime.
23
+ */
24
+ createDataset(params: {
25
+ id?: string;
26
+ }): Promise<ServiceResult<{
27
+ datasetId: string;
28
+ }>>;
29
+ uploadDatasetOutputFile(params: {
30
+ datasetId: string;
31
+ fileBuffer: Buffer;
32
+ }): Promise<ServiceResult<{
33
+ fileId: string;
34
+ storagePath: string;
35
+ }>>;
36
+ linkFileToDataset(params: {
37
+ datasetId: string;
38
+ fileId: string;
39
+ }): Promise<ServiceResult<void>>;
40
+ }
41
+ //# sourceMappingURL=service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"service.d.ts","sourceRoot":"","sources":["../src/service.ts"],"names":[],"mappings":"AAGA,MAAM,MAAM,aAAa,CAAC,CAAC,GAAG,GAAG,IAAI;IAAE,EAAE,EAAE,IAAI,CAAC;IAAC,IAAI,EAAE,CAAC,CAAA;CAAE,GAAG;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAA;AAIzF;;;;GAIG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAK;gBAEZ,EAAE,EAAE,GAAG;IAInB,OAAO,CAAC,UAAU;IAIZ,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;IAkB9D,WAAW,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;IAYzC,mBAAmB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,cAAc,CAAC,GAAG,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;IAmExG;;;OAGG;IACG,aAAa,CAAC,MAAM,EAAE;QAAE,EAAE,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC;QAAE,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IA2BrF,uBAAuB,CAAC,MAAM,EAAE;QACpC,SAAS,EAAE,MAAM,CAAA;QACjB,UAAU,EAAE,MAAM,CAAA;KACnB,GAAG,OAAO,CAAC,aAAa,CAAC;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,WAAW,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAoB7D,iBAAiB,CAAC,MAAM,EAAE;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;CAiBrG"}
@@ -0,0 +1,179 @@
1
+ /**
2
+ * Back-compat helper for reading structure outputs outside the workflow runtime.
3
+ *
4
+ * IMPORTANT: The source of truth is `context_contexts` (Story context) keyed by `structure:<id>`.
5
+ */
6
+ export class DatasetService {
7
+ constructor(db) {
8
+ this.db = db;
9
+ }
10
+ contextKey(structureId) {
11
+ return `structure:${structureId}`;
12
+ }
13
+ async getDatasetById(datasetId) {
14
+ try {
15
+ const key = this.contextKey(datasetId);
16
+ const res = await this.db.query({
17
+ context_contexts: {
18
+ $: { where: { key }, limit: 1 },
19
+ structure_output_file: {},
20
+ },
21
+ });
22
+ const ctx = res.context_contexts?.[0];
23
+ if (!ctx)
24
+ return { ok: false, error: "Context not found" };
25
+ return { ok: true, data: ctx };
26
+ }
27
+ catch (error) {
28
+ const message = error instanceof Error ? error.message : String(error);
29
+ return { ok: false, error: message };
30
+ }
31
+ }
32
+ async getFileById(fileId) {
33
+ const fileQuery = await this.db.query({
34
+ $files: {
35
+ $: {
36
+ where: { id: fileId },
37
+ limit: 1,
38
+ },
39
+ },
40
+ });
41
+ return fileQuery;
42
+ }
43
+ async readRecordsFromFile(datasetId) {
44
+ try {
45
+ const key = this.contextKey(datasetId);
46
+ const res = await this.db.query({
47
+ context_contexts: {
48
+ $: { where: { key }, limit: 1 },
49
+ structure_output_file: {},
50
+ },
51
+ });
52
+ const ctx = res.context_contexts?.[0];
53
+ const linked = Array.isArray(ctx?.structure_output_file) ? ctx.structure_output_file[0] : ctx?.structure_output_file;
54
+ const url = linked?.url;
55
+ if (!url)
56
+ return { ok: false, error: "Rows output file not found" };
57
+ async function* createGenerator(fileUrl) {
58
+ const response = await fetch(fileUrl);
59
+ if (!response.ok || !response.body) {
60
+ throw new Error("Failed to download rows output file");
61
+ }
62
+ const reader = response.body.getReader();
63
+ const decoder = new TextDecoder("utf-8");
64
+ let buffer = "";
65
+ while (true) {
66
+ const { value, done } = await reader.read();
67
+ if (done)
68
+ break;
69
+ buffer += decoder.decode(value, { stream: true });
70
+ let newlineIndex = buffer.indexOf("\n");
71
+ while (newlineIndex !== -1) {
72
+ const line = buffer.slice(0, newlineIndex);
73
+ buffer = buffer.slice(newlineIndex + 1);
74
+ const trimmed = line.trim();
75
+ if (trimmed) {
76
+ try {
77
+ const record = JSON.parse(trimmed);
78
+ yield record;
79
+ }
80
+ catch {
81
+ // skip invalid
82
+ }
83
+ }
84
+ newlineIndex = buffer.indexOf("\n");
85
+ }
86
+ }
87
+ const remaining = buffer.trim();
88
+ if (remaining) {
89
+ try {
90
+ yield JSON.parse(remaining);
91
+ }
92
+ catch {
93
+ // skip invalid
94
+ }
95
+ }
96
+ }
97
+ return { ok: true, data: createGenerator(url) };
98
+ }
99
+ catch (error) {
100
+ const message = error instanceof Error ? error.message : String(error);
101
+ return { ok: false, error: message };
102
+ }
103
+ }
104
+ /**
105
+ * Back-compat: create a new structure context keyed by `structure:<id>`.
106
+ * This is not used by the primary `structure()` API, which creates contexts via Story runtime.
107
+ */
108
+ async createDataset(params) {
109
+ try {
110
+ const datasetId = params.id ?? createUuidV4();
111
+ const key = this.contextKey(datasetId);
112
+ const existing = await this.db.query({
113
+ context_contexts: { $: { where: { key }, limit: 1 } },
114
+ });
115
+ const ctx = existing.context_contexts?.[0];
116
+ if (ctx)
117
+ return { ok: true, data: { datasetId } };
118
+ await this.db.transact([
119
+ this.db.tx.context_contexts[createUuidV4()].create({
120
+ createdAt: new Date(),
121
+ content: {},
122
+ key,
123
+ status: "open",
124
+ }),
125
+ ]);
126
+ return { ok: true, data: { datasetId } };
127
+ }
128
+ catch (error) {
129
+ const message = error instanceof Error ? error.message : String(error);
130
+ return { ok: false, error: message };
131
+ }
132
+ }
133
+ async uploadDatasetOutputFile(params) {
134
+ try {
135
+ const storagePath = `/structure/${params.datasetId}/output.jsonl`;
136
+ const uploadResult = await this.db.storage.uploadFile(storagePath, params.fileBuffer, {
137
+ contentType: "application/x-ndjson",
138
+ contentDisposition: "output.jsonl",
139
+ });
140
+ const fileId = uploadResult?.data?.id;
141
+ if (!fileId)
142
+ return { ok: false, error: "Failed to upload file to storage" };
143
+ const linkResult = await this.linkFileToDataset({ datasetId: params.datasetId, fileId });
144
+ if (!linkResult.ok)
145
+ return linkResult;
146
+ return { ok: true, data: { fileId, storagePath } };
147
+ }
148
+ catch (error) {
149
+ const message = error instanceof Error ? error.message : String(error);
150
+ return { ok: false, error: message };
151
+ }
152
+ }
153
+ async linkFileToDataset(params) {
154
+ try {
155
+ const key = this.contextKey(params.datasetId);
156
+ const res = await this.db.query({
157
+ context_contexts: { $: { where: { key }, limit: 1 } },
158
+ });
159
+ const ctx = res?.context_contexts?.[0];
160
+ const ctxId = ctx?.id;
161
+ if (!ctxId)
162
+ return { ok: false, error: "Context not found" };
163
+ await this.db.transact([this.db.tx.context_contexts[ctxId].link({ structure_output_file: params.fileId })]);
164
+ return { ok: true, data: undefined };
165
+ }
166
+ catch (error) {
167
+ const message = error instanceof Error ? error.message : String(error);
168
+ return { ok: false, error: message };
169
+ }
170
+ }
171
+ }
172
+ function createUuidV4() {
173
+ return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => {
174
+ const r = (Math.random() * 16) | 0;
175
+ const v = c === "x" ? r : (r & 0x3) | 0x8;
176
+ return v.toString(16);
177
+ });
178
+ }
179
+ //# sourceMappingURL=service.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"service.js","sourceRoot":"","sources":["../src/service.ts"],"names":[],"mappings":"AAOA;;;;GAIG;AACH,MAAM,OAAO,cAAc;IAGzB,YAAY,EAAO;QACjB,IAAI,CAAC,EAAE,GAAG,EAAE,CAAA;IACd,CAAC;IAEO,UAAU,CAAC,WAAmB;QACpC,OAAO,aAAa,WAAW,EAAE,CAAA;IACnC,CAAC;IAED,KAAK,CAAC,cAAc,CAAC,SAAiB;QACpC,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;YACtC,MAAM,GAAG,GAAQ,MAAO,IAAI,CAAC,EAAU,CAAC,KAAK,CAAC;gBAC5C,gBAAgB,EAAE;oBAChB,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAS,EAAE,KAAK,EAAE,CAAC,EAAE;oBACtC,qBAAqB,EAAE,EAAE;iBACnB;aACF,CAAC,CAAA;YACT,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;YACrC,IAAI,CAAC,GAAG;gBAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;YAC1D,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,CAAA;QAChC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;QACtC,CAAC;IACH,CAAC;IAED,KAAK,CAAC,WAAW,CAAC,MAAc;QAC9B,MAAM,SAAS,GAAQ,MAAO,IAAI,CAAC,EAAU,CAAC,KAAK,CAAC;YAClD,MAAM,EAAE;gBACN,CAAC,EAAE;oBACD,KAAK,EAAE,EAAE,EAAE,EAAE,MAAM,EAAS;oBAC5B,KAAK,EAAE,CAAC;iBACT;aACF;SACK,CAAC,CAAA;QACT,OAAO,SAAS,CAAA;IAClB,CAAC;IAED,KAAK,CAAC,mBAAmB,CAAC,SAAiB;QACzC,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;YACtC,MAAM,GAAG,GAAQ,MAAO,IAAI,CAAC,EAAU,CAAC,KAAK,CAAC;gBAC5C,gBAAgB,EAAE;oBAChB,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAS,EAAE,KAAK,EAAE,CAAC,EAAE;oBACtC,qBAAqB,EAAE,EAAE;iBACnB;aACF,CAAC,CAAA;YAET,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;YACrC,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,EAAE,qBAAqB,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,qBAAqB,CAAA;YACpH,MAAM,GAAG,GAAG,MAAM,EAAE,GAAG,CAAA;YACvB,IAAI,CAAC,GAAG;gBAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,4BAA4B,EAAE,CAAA;YAEnE,KAAK,SAAS,CAAC,CAAC,eAAe,CAAC,OAAe;gBAC7C,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,CAAA;gBACrC,IAAI,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;oBACnC,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAA;gBACxD,CAAC;gBAED,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;gBACxC,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAA;gBACxC,IAAI,MAAM,GAAG,EAAE,CAAA;gBAEf,OAAO,IAAI,EAAE,CAAC;oBACZ,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAA;oBAC3C,IAAI,IAAI;wBAAE,MAAK;oBAEf,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAA;oBACjD,IAAI,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;oBAEvC,OAAO,YAAY,KAAK,CAAC,CAAC,EAAE,CAAC;wBAC3B,MAAM,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,CAAA;wBAC1C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,YAAY,GAAG,CAAC,CAAC,CAAA;wBAEvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,EAAE,CAAA;wBAC3B,IAAI,OAAO,EAAE,CAAC;4BACZ,IAAI,CAAC;gCACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;gCAClC,MAAM,MAAM,CAAA;4BACd,CAAC;4BAAC,MAAM,CAAC;gCACP,eAAe;4BACjB,CAAC;wBACH,CAAC;wBAED,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;oBACrC,CAAC;gBACH,CAAC;gBAED,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,EAAE,CAAA;gBAC/B,IAAI,SAAS,EAAE,CAAC;oBACd,IAAI,CAAC;wBACH,MAAM,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAA;oBAC7B,CAAC;oBAAC,MAAM,CAAC;wBACP,eAAe;oBACjB,CAAC;gBACH,CAAC;YACH,CAAC;YAED,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,eAAe,CAAC,GAAG,CAAC,EAAE,CAAA;QACjD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;QACtC,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,aAAa,CAAC,MAAuB;QACzC,IAAI,CAAC;YACH,MAAM,SAAS,GAAG,MAAM,CAAC,EAAE,IAAI,YAAY,EAAE,CAAA;YAC7C,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;YAEtC,MAAM,QAAQ,GAAQ,MAAO,IAAI,CAAC,EAAU,CAAC,KAAK,CAAC;gBACjD,gBAAgB,EAAE,EAAE,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAS,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;aACtD,CAAC,CAAA;YACT,MAAM,GAAG,GAAG,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;YAC1C,IAAI,GAAG;gBAAE,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,SAAS,EAAE,EAAE,CAAA;YAEjD,MAAM,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC;gBACrB,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC;oBACjD,SAAS,EAAE,IAAI,IAAI,EAAE;oBACrB,OAAO,EAAE,EAAE;oBACX,GAAG;oBACH,MAAM,EAAE,MAAM;iBACR,CAAC;aACV,CAAC,CAAA;YAEF,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,SAAS,EAAE,EAAE,CAAA;QAC1C,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;QACtC,CAAC;IACH,CAAC;IAED,KAAK,CAAC,uBAAuB,CAAC,MAG7B;QACC,IAAI,CAAC;YACH,MAAM,WAAW,GAAG,cAAc,MAAM,CAAC,SAAS,eAAe,CAAA;YACjE,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE,MAAM,CAAC,UAAU,EAAE;gBACpF,WAAW,EAAE,sBAAsB;gBACnC,kBAAkB,EAAE,cAAc;aACnC,CAAC,CAAA;YACF,MAAM,MAAM,GAAG,YAAY,EAAE,IAAI,EAAE,EAAE,CAAA;YACrC,IAAI,CAAC,MAAM;gBAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,kCAAkC,EAAE,CAAA;YAE5E,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,EAAE,SAAS,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,EAAE,CAAC,CAAA;YACxF,IAAI,CAAC,UAAU,CAAC,EAAE;gBAAE,OAAO,UAAU,CAAA;YAErC,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,CAAA;QACpD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;QACtC,CAAC;IACH,CAAC;IAED,KAAK,CAAC,iBAAiB,CAAC,MAA6C;QACnE,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,CAAA;YAC7C,MAAM,GAAG,GAAQ,MAAO,IAAI,CAAC,EAAU,CAAC,KAAK,CAAC;gBAC5C,gBAAgB,EAAE,EAAE,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,GAAG,EAAS,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;aACtD,CAAC,CAAA;YACT,MAAM,GAAG,GAAG,GAAG,EAAE,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAA;YACtC,MAAM,KAAK,GAAG,GAAG,EAAE,EAAE,CAAA;YACrB,IAAI,CAAC,KAAK;gBAAE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAA;YAE5D,MAAM,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,qBAAqB,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;YAC3G,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE,CAAA;QACtC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YACtE,OAAO,EAAE,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAA;QACtC,CAAC;IACH,CAAC;CACF;AAED,SAAS,YAAY;IACnB,OAAO,sCAAsC,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,EAAE;QACnE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;QAClC,MAAM,CAAC,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,GAAG,GAAG,CAAA;QACzC,OAAO,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAA;IACvB,CAAC,CAAC,CAAA;AACJ,CAAC","sourcesContent":["import { SchemaOf } from \"@ekairos/domain\"\nimport { structureDomain } from \"./schema\"\n\nexport type ServiceResult<T = any> = { ok: true; data: T } | { ok: false; error: string }\n\ntype StructureSchemaType = SchemaOf<typeof structureDomain>\n\n/**\n * Back-compat helper for reading structure outputs outside the workflow runtime.\n *\n * IMPORTANT: The source of truth is `context_contexts` (Story context) keyed by `structure:<id>`.\n */\nexport class DatasetService {\n private readonly db: any\n\n constructor(db: any) {\n this.db = db\n }\n\n private contextKey(structureId: string) {\n return `structure:${structureId}`\n }\n\n async getDatasetById(datasetId: string): Promise<ServiceResult<any>> {\n try {\n const key = this.contextKey(datasetId)\n const res: any = await (this.db as any).query({\n context_contexts: {\n $: { where: { key } as any, limit: 1 },\n structure_output_file: {},\n } as any,\n } as any)\n const ctx = res.context_contexts?.[0]\n if (!ctx) return { ok: false, error: \"Context not found\" }\n return { ok: true, data: ctx }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n }\n\n async getFileById(fileId: string): Promise<any> {\n const fileQuery: any = await (this.db as any).query({\n $files: {\n $: {\n where: { id: fileId } as any,\n limit: 1,\n },\n },\n } as any)\n return fileQuery\n }\n\n async readRecordsFromFile(datasetId: string): Promise<ServiceResult<AsyncGenerator<any, void, unknown>>> {\n try {\n const key = this.contextKey(datasetId)\n const res: any = await (this.db as any).query({\n context_contexts: {\n $: { where: { key } as any, limit: 1 },\n structure_output_file: {},\n } as any,\n } as any)\n\n const ctx = res.context_contexts?.[0]\n const linked = Array.isArray(ctx?.structure_output_file) ? ctx.structure_output_file[0] : ctx?.structure_output_file\n const url = linked?.url\n if (!url) return { ok: false, error: \"Rows output file not found\" }\n\n async function* createGenerator(fileUrl: string): AsyncGenerator<any, void, unknown> {\n const response = await fetch(fileUrl)\n if (!response.ok || !response.body) {\n throw new Error(\"Failed to download rows output file\")\n }\n\n const reader = response.body.getReader()\n const decoder = new TextDecoder(\"utf-8\")\n let buffer = \"\"\n\n while (true) {\n const { value, done } = await reader.read()\n if (done) break\n\n buffer += decoder.decode(value, { stream: true })\n let newlineIndex = buffer.indexOf(\"\\n\")\n\n while (newlineIndex !== -1) {\n const line = buffer.slice(0, newlineIndex)\n buffer = buffer.slice(newlineIndex + 1)\n\n const trimmed = line.trim()\n if (trimmed) {\n try {\n const record = JSON.parse(trimmed)\n yield record\n } catch {\n // skip invalid\n }\n }\n\n newlineIndex = buffer.indexOf(\"\\n\")\n }\n }\n\n const remaining = buffer.trim()\n if (remaining) {\n try {\n yield JSON.parse(remaining)\n } catch {\n // skip invalid\n }\n }\n }\n\n return { ok: true, data: createGenerator(url) }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n }\n\n /**\n * Back-compat: create a new structure context keyed by `structure:<id>`.\n * This is not used by the primary `structure()` API, which creates contexts via Story runtime.\n */\n async createDataset(params: { id?: string }): Promise<ServiceResult<{ datasetId: string }>> {\n try {\n const datasetId = params.id ?? createUuidV4()\n const key = this.contextKey(datasetId)\n\n const existing: any = await (this.db as any).query({\n context_contexts: { $: { where: { key } as any, limit: 1 } },\n } as any)\n const ctx = existing.context_contexts?.[0]\n if (ctx) return { ok: true, data: { datasetId } }\n\n await this.db.transact([\n this.db.tx.context_contexts[createUuidV4()].create({\n createdAt: new Date(),\n content: {},\n key,\n status: \"open\",\n } as any),\n ])\n\n return { ok: true, data: { datasetId } }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n }\n\n async uploadDatasetOutputFile(params: {\n datasetId: string\n fileBuffer: Buffer\n }): Promise<ServiceResult<{ fileId: string; storagePath: string }>> {\n try {\n const storagePath = `/structure/${params.datasetId}/output.jsonl`\n const uploadResult = await this.db.storage.uploadFile(storagePath, params.fileBuffer, {\n contentType: \"application/x-ndjson\",\n contentDisposition: \"output.jsonl\",\n })\n const fileId = uploadResult?.data?.id\n if (!fileId) return { ok: false, error: \"Failed to upload file to storage\" }\n\n const linkResult = await this.linkFileToDataset({ datasetId: params.datasetId, fileId })\n if (!linkResult.ok) return linkResult\n\n return { ok: true, data: { fileId, storagePath } }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n }\n\n async linkFileToDataset(params: { datasetId: string; fileId: string }): Promise<ServiceResult<void>> {\n try {\n const key = this.contextKey(params.datasetId)\n const res: any = await (this.db as any).query({\n context_contexts: { $: { where: { key } as any, limit: 1 } },\n } as any)\n const ctx = res?.context_contexts?.[0]\n const ctxId = ctx?.id\n if (!ctxId) return { ok: false, error: \"Context not found\" }\n\n await this.db.transact([this.db.tx.context_contexts[ctxId].link({ structure_output_file: params.fileId })])\n return { ok: true, data: undefined }\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n return { ok: false, error: message }\n }\n }\n}\n\nfunction createUuidV4(): string {\n return \"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx\".replace(/[xy]/g, (c) => {\n const r = (Math.random() * 16) | 0\n const v = c === \"x\" ? r : (r & 0x3) | 0x8\n return v.toString(16)\n })\n}\n"]}
@@ -0,0 +1,13 @@
1
+ export declare function structureCommitFromEventsStep(params: {
2
+ env: any;
3
+ structureId: string;
4
+ }): Promise<{
5
+ ok: true;
6
+ data: {
7
+ committed: boolean;
8
+ };
9
+ } | {
10
+ ok: false;
11
+ error: string;
12
+ }>;
13
+ //# sourceMappingURL=commitFromEvents.step.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"commitFromEvents.step.d.ts","sourceRoot":"","sources":["../../src/steps/commitFromEvents.step.ts"],"names":[],"mappings":"AAuBA,wBAAsB,6BAA6B,CAAC,MAAM,EAAE;IAC1D,GAAG,EAAE,GAAG,CAAA;IACR,WAAW,EAAE,MAAM,CAAA;CACpB,GAAG,OAAO,CAAC;IAAE,EAAE,EAAE,IAAI,CAAC;IAAC,IAAI,EAAE;QAAE,SAAS,EAAE,OAAO,CAAA;KAAE,CAAA;CAAE,GAAG;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC,CAsErF"}