@ekairos/story 1.21.30-beta.0 → 1.21.32-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +174 -0
- package/dist/agent.builder.d.ts +52 -0
- package/dist/agent.builder.d.ts.map +1 -0
- package/dist/agent.builder.js +110 -0
- package/dist/agent.builder.js.map +1 -0
- package/dist/agent.d.ts +2 -119
- package/dist/agent.d.ts.map +1 -1
- package/dist/agent.engine.d.ts +75 -0
- package/dist/agent.engine.d.ts.map +1 -0
- package/dist/agent.engine.js +455 -0
- package/dist/agent.engine.js.map +1 -0
- package/dist/agent.js +8 -607
- package/dist/agent.js.map +1 -1
- package/dist/ekairos.config.d.ts +21 -0
- package/dist/ekairos.config.d.ts.map +1 -0
- package/dist/ekairos.config.js +26 -0
- package/dist/ekairos.config.js.map +1 -0
- package/dist/events.d.ts +11 -7
- package/dist/events.d.ts.map +1 -1
- package/dist/events.js +37 -210
- package/dist/events.js.map +1 -1
- package/dist/index.d.ts +4 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +7 -25
- package/dist/index.js.map +1 -1
- package/dist/legacy.story.d.ts +5 -0
- package/dist/legacy.story.d.ts.map +1 -0
- package/dist/legacy.story.js +15 -0
- package/dist/legacy.story.js.map +1 -0
- package/dist/runtime.d.ts +12 -0
- package/dist/runtime.js +12 -0
- package/dist/schema-document.d.ts +0 -1
- package/dist/schema-document.js +14 -18
- package/dist/schema-document.js.map +1 -1
- package/dist/schema.d.ts +0 -1
- package/dist/schema.js +22 -26
- package/dist/schema.js.map +1 -1
- package/dist/steps/do-story-stream-step.d.ts +29 -0
- package/dist/steps/do-story-stream-step.d.ts.map +1 -0
- package/dist/steps/do-story-stream-step.js +89 -0
- package/dist/steps/do-story-stream-step.js.map +1 -0
- package/dist/steps/index.d.ts +1 -3
- package/dist/steps/index.d.ts.map +1 -1
- package/dist/steps/index.js +3 -17
- package/dist/steps/index.js.map +1 -1
- package/dist/steps/store.steps.d.ts +43 -0
- package/dist/steps/store.steps.d.ts.map +1 -0
- package/dist/steps/store.steps.js +123 -0
- package/dist/steps/store.steps.js.map +1 -0
- package/dist/steps/story.steps.d.ts +35 -0
- package/dist/steps/story.steps.d.ts.map +1 -0
- package/dist/steps/story.steps.js +59 -0
- package/dist/steps/story.steps.js.map +1 -0
- package/dist/steps/stream.steps.d.ts +28 -0
- package/dist/steps/stream.steps.d.ts.map +1 -0
- package/dist/steps/stream.steps.js +75 -0
- package/dist/steps/stream.steps.js.map +1 -0
- package/dist/stores/instant.document-parser.d.ts +5 -0
- package/dist/stores/instant.document-parser.d.ts.map +1 -0
- package/dist/stores/instant.document-parser.js +116 -0
- package/dist/stores/instant.document-parser.js.map +1 -0
- package/dist/stores/instant.documents.d.ts +16 -0
- package/dist/stores/instant.documents.js +108 -0
- package/dist/stores/instant.store.d.ts +40 -0
- package/dist/stores/instant.store.d.ts.map +1 -0
- package/dist/stores/instant.store.js +207 -0
- package/dist/stores/instant.store.js.map +1 -0
- package/dist/story.builder.d.ts +116 -0
- package/dist/story.builder.d.ts.map +1 -0
- package/dist/story.builder.js +130 -0
- package/dist/story.builder.js.map +1 -0
- package/dist/story.config.d.ts +43 -0
- package/dist/story.config.d.ts.map +1 -0
- package/dist/story.config.js +57 -0
- package/dist/story.config.js.map +1 -0
- package/dist/story.d.ts +2 -50
- package/dist/story.d.ts.map +1 -1
- package/dist/story.engine.d.ts +174 -0
- package/dist/story.engine.d.ts.map +1 -0
- package/dist/story.engine.js +283 -0
- package/dist/story.engine.js.map +1 -0
- package/dist/story.js +6 -55
- package/dist/story.js.map +1 -1
- package/dist/story.legacy.d.ts +12 -0
- package/dist/story.legacy.d.ts.map +1 -0
- package/dist/story.legacy.js +15 -0
- package/dist/story.legacy.js.map +1 -0
- package/dist/story.registry.d.ts +21 -0
- package/dist/story.registry.d.ts.map +1 -0
- package/dist/story.registry.js +30 -0
- package/dist/story.registry.js.map +1 -0
- package/dist/story.store.d.ts +59 -0
- package/dist/story.store.d.ts.map +1 -0
- package/dist/story.store.js +1 -0
- package/dist/story.store.js.map +1 -0
- package/dist/story.streams.d.ts +55 -0
- package/dist/story.streams.d.ts.map +1 -0
- package/dist/story.streams.js +99 -0
- package/dist/story.streams.js.map +1 -0
- package/dist/story.toolcalls.d.ts +60 -0
- package/dist/story.toolcalls.d.ts.map +1 -0
- package/dist/story.toolcalls.js +73 -0
- package/dist/story.toolcalls.js.map +1 -0
- package/dist/tools-to-model-tools.d.ts +19 -0
- package/dist/tools-to-model-tools.js +21 -0
- package/dist/workflow.d.ts +20 -0
- package/dist/workflow.js +27 -0
- package/package.json +15 -4
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"store.steps.js","sourceRoot":"","sources":["../../src/steps/store.steps.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,mBAAmB,EAAyB,MAAM,iBAAiB,CAAA;AAG5E,MAAM,CAAC,KAAK,UAAU,UAAU;IAC9B,UAAU,CAAA;IACV,kDAAkD;IAClD,MAAM,IAAI,GAAI,UAAU,CAAC,MAAc,EAAE,UAAU,EAAE,EAAE,CAAA;IACvD,IAAI,IAAI;QAAE,OAAO,IAAI,CAAA;IAErB,WAAW;IACX,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAA;AAC/D,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,kBAAkB,CACtC,GAAqB,EACrB,iBAA2C;IAE3C,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAEhD,2EAA2E;IAC3E,IAAI,CAAC,iBAAiB,EAAE,CAAC;QACvB,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,kBAAkB,CAAI,IAAI,CAAC,CAAA;QACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,CAAA;IACjC,CAAC;IAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,UAAU,CAAI,iBAAiB,CAAC,CAAA;IAC7D,IAAI,QAAQ;QAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,EAAE,CAAA;IAExD,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,kBAAkB,CAAI,iBAAiB,CAAC,CAAA;IACpE,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,CAAA;AAC1C,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAC9B,GAAqB,EACrB,iBAAoC;IAEpC,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,UAAU,CAAI,iBAAiB,CAAC,CAAA;AACrD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACxC,GAAqB,EACrB,iBAAoC,EACpC,OAAU;IAEV,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,oBAAoB,CAAI,iBAAiB,EAAE,OAAO,CAAC,CAAA;AACxE,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACvC,GAAqB,EACrB,iBAAoC,EACpC,MAAuC;IAEvC,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,MAAM,KAAK,CAAC,mBAAmB,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;AAC5D,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,GAAqB,EACrB,iBAAoC,EACpC,KAAmB;IAEnB,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,SAAS,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAA;AACxD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,GAAqB,EACrB,OAAe,EACf,KAAmB;IAEnB,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAA;AAChD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,GAAqB,EACrB,iBAAoC;IAEpC,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAA;AACjD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,GAAqB,EACrB,iBAAoC,EACpC,cAAsB,EACtB,eAAuB;IAEvB,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,eAAe,CAAC,iBAAiB,EAAE,cAAc,EAAE,eAAe,CAAC,CAAA;AACxF,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,GAAqB,EACrB,iBAAoC,EACpC,WAAmB,EACnB,MAA8B;IAE9B,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,MAAM,KAAK,CAAC,iBAAiB,CAAC,iBAAiB,EAAE,WAAW,EAAE,MAAM,CAAC,CAAA;AACvE,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,GAAqB,EACrB,MAAsB;IAEtB,UAAU,CAAA;IACV,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,mBAAmB,CAAC,GAAG,CAAC,CAAA;IAChD,OAAO,MAAM,KAAK,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAA;AAClD,CAAC","sourcesContent":["import type { ModelMessage } from \"ai\"\r\n\r\nimport { resolveStoryRuntime, type StoryEnvironment } from \"../story.config\"\r\nimport type { ContextEvent, ContextIdentifier, StoredContext } from \"../story.store\"\r\n\r\nexport async function generateId(): Promise<string> {\r\n \"use step\"\r\n // Use crypto.randomUUID when available (Node 18+)\r\n const uuid = (globalThis.crypto as any)?.randomUUID?.()\r\n if (uuid) return uuid\r\n\r\n // Fallback\r\n return `${Date.now()}-${Math.random().toString(16).slice(2)}`\r\n}\r\n\r\nexport async function getOrCreateContext<C>(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier | null,\r\n): Promise<{ context: StoredContext<C>; isNew: boolean }> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n\r\n // Detect creation explicitly so the engine can run onContextCreated hooks.\r\n if (!contextIdentifier) {\r\n const context = await store.getOrCreateContext<C>(null)\r\n return { context, isNew: true }\r\n }\r\n\r\n const existing = await store.getContext<C>(contextIdentifier)\r\n if (existing) return { context: existing, isNew: false }\r\n\r\n const created = await store.getOrCreateContext<C>(contextIdentifier)\r\n return { context: created, isNew: true }\r\n}\r\n\r\nexport async function getContext<C>(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n): Promise<StoredContext<C> | null> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.getContext<C>(contextIdentifier)\r\n}\r\n\r\nexport async function updateContextContent<C>(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n content: C,\r\n): Promise<StoredContext<C>> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.updateContextContent<C>(contextIdentifier, content)\r\n}\r\n\r\nexport async function updateContextStatus(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n status: \"open\" | \"streaming\" | \"closed\",\r\n): Promise<void> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n await store.updateContextStatus(contextIdentifier, status)\r\n}\r\n\r\nexport async function saveEvent(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n event: ContextEvent,\r\n): Promise<ContextEvent> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.saveEvent(contextIdentifier, event)\r\n}\r\n\r\nexport async function updateEvent(\r\n env: StoryEnvironment,\r\n eventId: string,\r\n event: ContextEvent,\r\n): Promise<ContextEvent> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.updateEvent(eventId, event)\r\n}\r\n\r\nexport async function getEvents(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n): Promise<ContextEvent[]> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.getEvents(contextIdentifier)\r\n}\r\n\r\nexport async function createExecution(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n triggerEventId: string,\r\n reactionEventId: string,\r\n): Promise<{ id: string }> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.createExecution(contextIdentifier, triggerEventId, reactionEventId)\r\n}\r\n\r\nexport async function completeExecution(\r\n env: StoryEnvironment,\r\n contextIdentifier: ContextIdentifier,\r\n executionId: string,\r\n status: \"completed\" | \"failed\",\r\n): Promise<void> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n await store.completeExecution(contextIdentifier, executionId, status)\r\n}\r\n\r\nexport async function eventsToModelMessages(\r\n env: StoryEnvironment,\r\n events: ContextEvent[],\r\n): Promise<ModelMessage[]> {\r\n \"use step\"\r\n const { store } = await resolveStoryRuntime(env)\r\n return await store.eventsToModelMessages(events)\r\n}\r\n\r\n\r\n\r\n"]}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import type { ContextEvent, ContextIdentifier } from "../service";
|
|
2
|
+
import type { ProgressStreamOptions, Story } from "../story.engine";
|
|
3
|
+
/**
|
|
4
|
+
* A Workflow-compatible writable stream (e.g. `getWritable()` from useworkflow).
|
|
5
|
+
*
|
|
6
|
+
* The Workflow DevKit streams are standard Web Streams and can be passed around
|
|
7
|
+
* (serialized as references) by the workflow runtime.
|
|
8
|
+
*/
|
|
9
|
+
export type WorkflowWritableStream<TChunk = any> = WritableStream<TChunk>;
|
|
10
|
+
/**
|
|
11
|
+
* Creates Workflow step wrappers for a Story instance.
|
|
12
|
+
*
|
|
13
|
+
* Why:
|
|
14
|
+
* - Workflow functions must be deterministic.
|
|
15
|
+
* - All side-effects (LLM calls, DB calls, tool execution, stream IO) should happen in steps.
|
|
16
|
+
*
|
|
17
|
+
* Usage:
|
|
18
|
+
* - Inside a Workflow step, call `steps.progressToWritable(...)` and pass the workflow's writable stream.
|
|
19
|
+
*/
|
|
20
|
+
export declare function createStorySteps<Context>(story: Story<Context>): {
|
|
21
|
+
/**
|
|
22
|
+
* Runs `story.progressStream(...)` and pipes the resulting UI stream into the provided Workflow writable.
|
|
23
|
+
*
|
|
24
|
+
* This is the recommended integration point with Workflow DevKit streaming:
|
|
25
|
+
* - Workflow owns the stream.
|
|
26
|
+
* - Story writes chunks into it (via piping).
|
|
27
|
+
*/
|
|
28
|
+
progressToWritable(params: {
|
|
29
|
+
incomingEvent: ContextEvent;
|
|
30
|
+
contextIdentifier: ContextIdentifier | null;
|
|
31
|
+
options?: ProgressStreamOptions;
|
|
32
|
+
writable: WorkflowWritableStream<any>;
|
|
33
|
+
}): Promise<any>;
|
|
34
|
+
};
|
|
35
|
+
//# sourceMappingURL=story.steps.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"story.steps.d.ts","sourceRoot":"","sources":["../../src/steps/story.steps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AACjE,OAAO,KAAK,EAAE,qBAAqB,EAAE,KAAK,EAAE,MAAM,iBAAiB,CAAA;AAEnE;;;;;GAKG;AACH,MAAM,MAAM,sBAAsB,CAAC,MAAM,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,CAAC,CAAA;AA6BzE;;;;;;;;;GASG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC;IAE3D;;;;;;OAMG;+BAC8B;QAC/B,aAAa,EAAE,YAAY,CAAA;QAC3B,iBAAiB,EAAE,iBAAiB,GAAG,IAAI,CAAA;QAC3C,OAAO,CAAC,EAAE,qBAAqB,CAAA;QAC/B,QAAQ,EAAE,sBAAsB,CAAC,GAAG,CAAC,CAAA;KACtC;EAgBJ"}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createStorySteps = createStorySteps;
|
|
4
|
+
async function pipeReadableToWritable(readable, writable) {
|
|
5
|
+
const reader = readable.getReader();
|
|
6
|
+
const writer = writable.getWriter();
|
|
7
|
+
try {
|
|
8
|
+
while (true) {
|
|
9
|
+
const { done, value } = await reader.read();
|
|
10
|
+
if (done)
|
|
11
|
+
break;
|
|
12
|
+
await writer.write(value);
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
finally {
|
|
16
|
+
try {
|
|
17
|
+
writer.releaseLock();
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
// noop
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
reader.releaseLock();
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
// noop
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Creates Workflow step wrappers for a Story instance.
|
|
32
|
+
*
|
|
33
|
+
* Why:
|
|
34
|
+
* - Workflow functions must be deterministic.
|
|
35
|
+
* - All side-effects (LLM calls, DB calls, tool execution, stream IO) should happen in steps.
|
|
36
|
+
*
|
|
37
|
+
* Usage:
|
|
38
|
+
* - Inside a Workflow step, call `steps.progressToWritable(...)` and pass the workflow's writable stream.
|
|
39
|
+
*/
|
|
40
|
+
function createStorySteps(story) {
|
|
41
|
+
return {
|
|
42
|
+
/**
|
|
43
|
+
* Runs `story.progressStream(...)` and pipes the resulting UI stream into the provided Workflow writable.
|
|
44
|
+
*
|
|
45
|
+
* This is the recommended integration point with Workflow DevKit streaming:
|
|
46
|
+
* - Workflow owns the stream.
|
|
47
|
+
* - Story writes chunks into it (via piping).
|
|
48
|
+
*/
|
|
49
|
+
async progressToWritable(params) {
|
|
50
|
+
"use step";
|
|
51
|
+
const result = await story.progressStream(params.incomingEvent, params.contextIdentifier, params.options);
|
|
52
|
+
await pipeReadableToWritable(result.stream, params.writable);
|
|
53
|
+
// Return metadata (no stream) because Workflow already exposes the stream to clients.
|
|
54
|
+
const { stream: _stream, ...meta } = result;
|
|
55
|
+
return meta;
|
|
56
|
+
},
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
//# sourceMappingURL=story.steps.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"story.steps.js","sourceRoot":"","sources":["../../src/steps/story.steps.ts"],"names":[],"mappings":";;AAgDA,4CA8BC;AAnED,KAAK,UAAU,sBAAsB,CACnC,QAAgC,EAChC,QAAwC;IAExC,MAAM,MAAM,GAAG,QAAQ,CAAC,SAAS,EAAE,CAAA;IACnC,MAAM,MAAM,GAAG,QAAQ,CAAC,SAAS,EAAE,CAAA;IAEnC,IAAI,CAAC;QACH,OAAO,IAAI,EAAE,CAAC;YACZ,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAA;YAC3C,IAAI,IAAI;gBAAE,MAAK;YACf,MAAM,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;QAC3B,CAAC;IACH,CAAC;YAAS,CAAC;QACT,IAAI,CAAC;YACH,MAAM,CAAC,WAAW,EAAE,CAAA;QACtB,CAAC;QAAC,MAAM,CAAC;YACP,OAAO;QACT,CAAC;QACD,IAAI,CAAC;YACH,MAAM,CAAC,WAAW,EAAE,CAAA;QACtB,CAAC;QAAC,MAAM,CAAC;YACP,OAAO;QACT,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;;;;GASG;AACH,SAAgB,gBAAgB,CAAU,KAAqB;IAC7D,OAAO;QACL;;;;;;WAMG;QACH,KAAK,CAAC,kBAAkB,CAAC,MAKxB;YACC,UAAU,CAAA;YAEV,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,cAAc,CACvC,MAAM,CAAC,aAAa,EACpB,MAAM,CAAC,iBAAiB,EACxB,MAAM,CAAC,OAAO,CACf,CAAA;YAED,MAAM,sBAAsB,CAAC,MAAM,CAAC,MAAa,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEnE,sFAAsF;YACtF,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,GAAG,MAAa,CAAA;YAClD,OAAO,IAAI,CAAA;QACb,CAAC;KACF,CAAA;AACH,CAAC","sourcesContent":["import type { ContextEvent, ContextIdentifier } from \"../service\"\r\nimport type { ProgressStreamOptions, Story } from \"../story.engine\"\r\n\r\n/**\r\n * A Workflow-compatible writable stream (e.g. `getWritable()` from useworkflow).\r\n *\r\n * The Workflow DevKit streams are standard Web Streams and can be passed around\r\n * (serialized as references) by the workflow runtime.\r\n */\r\nexport type WorkflowWritableStream<TChunk = any> = WritableStream<TChunk>\r\n\r\nasync function pipeReadableToWritable<TChunk>(\r\n readable: ReadableStream<TChunk>,\r\n writable: WorkflowWritableStream<TChunk>,\r\n): Promise<void> {\r\n const reader = readable.getReader()\r\n const writer = writable.getWriter()\r\n\r\n try {\r\n while (true) {\r\n const { done, value } = await reader.read()\r\n if (done) break\r\n await writer.write(value)\r\n }\r\n } finally {\r\n try {\r\n writer.releaseLock()\r\n } catch {\r\n // noop\r\n }\r\n try {\r\n reader.releaseLock()\r\n } catch {\r\n // noop\r\n }\r\n }\r\n}\r\n\r\n/**\r\n * Creates Workflow step wrappers for a Story instance.\r\n *\r\n * Why:\r\n * - Workflow functions must be deterministic.\r\n * - All side-effects (LLM calls, DB calls, tool execution, stream IO) should happen in steps.\r\n *\r\n * Usage:\r\n * - Inside a Workflow step, call `steps.progressToWritable(...)` and pass the workflow's writable stream.\r\n */\r\nexport function createStorySteps<Context>(story: Story<Context>) {\r\n return {\r\n /**\r\n * Runs `story.progressStream(...)` and pipes the resulting UI stream into the provided Workflow writable.\r\n *\r\n * This is the recommended integration point with Workflow DevKit streaming:\r\n * - Workflow owns the stream.\r\n * - Story writes chunks into it (via piping).\r\n */\r\n async progressToWritable(params: {\r\n incomingEvent: ContextEvent\r\n contextIdentifier: ContextIdentifier | null\r\n options?: ProgressStreamOptions\r\n writable: WorkflowWritableStream<any>\r\n }) {\r\n \"use step\"\r\n\r\n const result = await story.progressStream(\r\n params.incomingEvent,\r\n params.contextIdentifier,\r\n params.options,\r\n )\r\n\r\n await pipeReadableToWritable(result.stream as any, params.writable)\r\n\r\n // Return metadata (no stream) because Workflow already exposes the stream to clients.\r\n const { stream: _stream, ...meta } = result as any\r\n return meta\r\n },\r\n }\r\n}\r\n\r\n\r\n"]}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
export declare function writeContextSubstate(params: {
|
|
2
|
+
/**
|
|
3
|
+
* Ephemeral substate key for the UI (story engine internal state).
|
|
4
|
+
*
|
|
5
|
+
* - Provide a string key like "actions" to set it
|
|
6
|
+
* - Provide null to clear it
|
|
7
|
+
*/
|
|
8
|
+
key: string | null;
|
|
9
|
+
transient?: boolean;
|
|
10
|
+
}): Promise<void>;
|
|
11
|
+
export declare function writeContextIdChunk(params: {
|
|
12
|
+
contextId: string;
|
|
13
|
+
}): Promise<void>;
|
|
14
|
+
export declare function writeToolOutputs(params: {
|
|
15
|
+
results: Array<{
|
|
16
|
+
toolCallId: string;
|
|
17
|
+
success: true;
|
|
18
|
+
output: unknown;
|
|
19
|
+
} | {
|
|
20
|
+
toolCallId: string;
|
|
21
|
+
success: false;
|
|
22
|
+
errorText: string;
|
|
23
|
+
}>;
|
|
24
|
+
}): Promise<void>;
|
|
25
|
+
export declare function closeStoryStream(params: {
|
|
26
|
+
preventClose?: boolean;
|
|
27
|
+
sendFinish?: boolean;
|
|
28
|
+
}): Promise<void>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream.steps.d.ts","sourceRoot":"","sources":["../../src/steps/stream.steps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,wBAAsB,mBAAmB,CAAC,MAAM,EAAE;IAChD,QAAQ,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;IACzC,SAAS,EAAE,MAAM,CAAC;CACnB,iBAaA;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE;IAC7C,QAAQ,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;IACzC,OAAO,EAAE,KAAK,CACV;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,IAAI,CAAC;QAAC,MAAM,EAAE,OAAO,CAAA;KAAE,GACtD;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,KAAK,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAC5D,CAAC;CACH,iBAuBA;AAED,wBAAsB,gBAAgB,CAAC,MAAM,EAAE;IAC7C,QAAQ,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;IACzC,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,UAAU,CAAC,EAAE,OAAO,CAAC;CACtB,iBAkBA"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { getWritable } from "workflow";
|
|
2
|
+
export async function writeContextSubstate(params) {
|
|
3
|
+
"use step";
|
|
4
|
+
const writable = getWritable();
|
|
5
|
+
const writer = writable.getWriter();
|
|
6
|
+
try {
|
|
7
|
+
await writer.write({
|
|
8
|
+
type: "data-context-substate",
|
|
9
|
+
data: { key: params.key },
|
|
10
|
+
transient: params.transient ?? true,
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
finally {
|
|
14
|
+
writer.releaseLock();
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
export async function writeContextIdChunk(params) {
|
|
18
|
+
"use step";
|
|
19
|
+
const writable = getWritable();
|
|
20
|
+
const writer = writable.getWriter();
|
|
21
|
+
try {
|
|
22
|
+
await writer.write({
|
|
23
|
+
type: "data-context-id",
|
|
24
|
+
id: params.contextId,
|
|
25
|
+
data: { contextId: params.contextId },
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
finally {
|
|
29
|
+
writer.releaseLock();
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
export async function writeToolOutputs(params) {
|
|
33
|
+
"use step";
|
|
34
|
+
const writable = getWritable();
|
|
35
|
+
const writer = writable.getWriter();
|
|
36
|
+
try {
|
|
37
|
+
for (const r of params.results) {
|
|
38
|
+
if (r.success) {
|
|
39
|
+
await writer.write({
|
|
40
|
+
type: "tool-output-available",
|
|
41
|
+
toolCallId: r.toolCallId,
|
|
42
|
+
output: r.output,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
await writer.write({
|
|
47
|
+
type: "tool-output-error",
|
|
48
|
+
toolCallId: r.toolCallId,
|
|
49
|
+
errorText: r.errorText,
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
finally {
|
|
55
|
+
writer.releaseLock();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
export async function closeStoryStream(params) {
|
|
59
|
+
"use step";
|
|
60
|
+
const sendFinish = params.sendFinish ?? true;
|
|
61
|
+
const preventClose = params.preventClose ?? false;
|
|
62
|
+
const writable = getWritable();
|
|
63
|
+
if (sendFinish) {
|
|
64
|
+
const writer = writable.getWriter();
|
|
65
|
+
try {
|
|
66
|
+
await writer.write({ type: "finish" });
|
|
67
|
+
}
|
|
68
|
+
finally {
|
|
69
|
+
writer.releaseLock();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
if (!preventClose) {
|
|
73
|
+
await writable.close();
|
|
74
|
+
}
|
|
75
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream.steps.js","sourceRoot":"","sources":["../../src/steps/stream.steps.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,KAAK,UAAU,mBAAmB,CAAC,MAGzC;IACC,UAAU,CAAC;IAEX,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAC;IAC3C,IAAI,CAAC;QACH,MAAM,MAAM,CAAC,KAAK,CAAC;YACjB,IAAI,EAAE,iBAAiB;YACvB,EAAE,EAAE,MAAM,CAAC,SAAS;YACpB,IAAI,EAAE,EAAE,SAAS,EAAE,MAAM,CAAC,SAAS,EAAE;SAC/B,CAAC,CAAC;IACZ,CAAC;YAAS,CAAC;QACT,MAAM,CAAC,WAAW,EAAE,CAAC;IACvB,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAAC,MAMtC;IACC,UAAU,CAAC;IAEX,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAC;IAC3C,IAAI,CAAC;QACH,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;YAC/B,IAAI,CAAC,CAAC,OAAO,EAAE,CAAC;gBACd,MAAM,MAAM,CAAC,KAAK,CAAC;oBACjB,IAAI,EAAE,uBAAuB;oBAC7B,UAAU,EAAE,CAAC,CAAC,UAAU;oBACxB,MAAM,EAAE,CAAC,CAAC,MAAa;iBACjB,CAAC,CAAC;YACZ,CAAC;iBAAM,CAAC;gBACN,MAAM,MAAM,CAAC,KAAK,CAAC;oBACjB,IAAI,EAAE,mBAAmB;oBACzB,UAAU,EAAE,CAAC,CAAC,UAAU;oBACxB,SAAS,EAAE,CAAC,CAAC,SAAS;iBAChB,CAAC,CAAC;YACZ,CAAC;QACH,CAAC;IACH,CAAC;YAAS,CAAC;QACT,MAAM,CAAC,WAAW,EAAE,CAAC;IACvB,CAAC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAAC,MAItC;IACC,UAAU,CAAC;IAEX,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,IAAI,CAAC;IAC7C,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,IAAI,KAAK,CAAC;IAElD,IAAI,UAAU,EAAE,CAAC;QACf,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAC;QAC3C,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAS,CAAC,CAAC;QAChD,CAAC;gBAAS,CAAC;YACT,MAAM,CAAC,WAAW,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;IAED,IAAI,CAAC,YAAY,EAAE,CAAC;QAClB,MAAM,MAAM,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;IAChC,CAAC;AACH,CAAC","sourcesContent":["import type { UIMessageChunk } from \"ai\";\r\n\r\nexport async function writeContextIdChunk(params: {\r\n writable: WritableStream<UIMessageChunk>;\r\n contextId: string;\r\n}) {\r\n \"use step\";\r\n\r\n const writer = params.writable.getWriter();\r\n try {\r\n await writer.write({\r\n type: \"data-context-id\",\r\n id: params.contextId,\r\n data: { contextId: params.contextId },\r\n } as any);\r\n } finally {\r\n writer.releaseLock();\r\n }\r\n}\r\n\r\nexport async function writeToolOutputs(params: {\r\n writable: WritableStream<UIMessageChunk>;\r\n results: Array<\r\n | { toolCallId: string; success: true; output: unknown }\r\n | { toolCallId: string; success: false; errorText: string }\r\n >;\r\n}) {\r\n \"use step\";\r\n\r\n const writer = params.writable.getWriter();\r\n try {\r\n for (const r of params.results) {\r\n if (r.success) {\r\n await writer.write({\r\n type: \"tool-output-available\",\r\n toolCallId: r.toolCallId,\r\n output: r.output as any,\r\n } as any);\r\n } else {\r\n await writer.write({\r\n type: \"tool-output-error\",\r\n toolCallId: r.toolCallId,\r\n errorText: r.errorText,\r\n } as any);\r\n }\r\n }\r\n } finally {\r\n writer.releaseLock();\r\n }\r\n}\r\n\r\nexport async function closeStoryStream(params: {\r\n writable: WritableStream<UIMessageChunk>;\r\n preventClose?: boolean;\r\n sendFinish?: boolean;\r\n}) {\r\n \"use step\";\r\n\r\n const sendFinish = params.sendFinish ?? true;\r\n const preventClose = params.preventClose ?? false;\r\n\r\n if (sendFinish) {\r\n const writer = params.writable.getWriter();\r\n try {\r\n await writer.write({ type: \"finish\" } as any);\r\n } finally {\r\n writer.releaseLock();\r\n }\r\n }\r\n\r\n if (!preventClose) {\r\n await params.writable.close();\r\n }\r\n}\r\n\r\n"]}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
3
|
+
* Returns the created documentId.
|
|
4
|
+
*/
|
|
5
|
+
export declare function parseAndStoreDocument(db: any, buffer: Buffer, fileName: string, path: string, fileId: string): Promise<string>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"instant.document-parser.d.ts","sourceRoot":"","sources":["../../src/stores/instant.document-parser.ts"],"names":[],"mappings":"AAsGA;;;GAGG;AACH,wBAAsB,qBAAqB,CACzC,EAAE,EAAE,GAAG,EACP,MAAM,EAAE,MAAM,EACd,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,MAAM,CAAC,CAsDjB"}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { id } from "@instantdb/admin";
|
|
2
|
+
const LLAMA_CLOUD_BASE_URL = "https://api.cloud.llamaindex.ai/api/v1";
|
|
3
|
+
async function uploadToLlamaCloud(buffer, fileName) {
|
|
4
|
+
const formData = new FormData();
|
|
5
|
+
const uint8Array = new Uint8Array(buffer);
|
|
6
|
+
const blob = new Blob([uint8Array], { type: "application/pdf" });
|
|
7
|
+
formData.append("file", blob, fileName);
|
|
8
|
+
formData.append("parse_mode", "parse_page_with_llm");
|
|
9
|
+
formData.append("high_res_ocr", "true");
|
|
10
|
+
formData.append("adaptive_long_table", "true");
|
|
11
|
+
formData.append("outlined_table_extraction", "true");
|
|
12
|
+
formData.append("output_tables_as_HTML", "true");
|
|
13
|
+
const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/upload`, {
|
|
14
|
+
method: "POST",
|
|
15
|
+
headers: {
|
|
16
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
17
|
+
},
|
|
18
|
+
body: formData,
|
|
19
|
+
});
|
|
20
|
+
if (!response.ok) {
|
|
21
|
+
const errorText = await response.text();
|
|
22
|
+
throw new Error(`LlamaCloud upload failed: ${response.status} ${errorText}`);
|
|
23
|
+
}
|
|
24
|
+
const result = (await response.json());
|
|
25
|
+
return result.id;
|
|
26
|
+
}
|
|
27
|
+
async function getJobStatus(jobId) {
|
|
28
|
+
const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}`, {
|
|
29
|
+
method: "GET",
|
|
30
|
+
headers: {
|
|
31
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
if (!response.ok) {
|
|
35
|
+
const errorText = await response.text();
|
|
36
|
+
throw new Error(`LlamaCloud status fetch failed: ${response.status} ${errorText}`);
|
|
37
|
+
}
|
|
38
|
+
return (await response.json());
|
|
39
|
+
}
|
|
40
|
+
async function getParseResult(jobId) {
|
|
41
|
+
const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}/result/markdown`, {
|
|
42
|
+
method: "GET",
|
|
43
|
+
headers: {
|
|
44
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
45
|
+
},
|
|
46
|
+
});
|
|
47
|
+
if (!response.ok) {
|
|
48
|
+
const errorText = await response.text();
|
|
49
|
+
throw new Error(`LlamaCloud result fetch failed: ${response.status} ${errorText}`);
|
|
50
|
+
}
|
|
51
|
+
return (await response.json());
|
|
52
|
+
}
|
|
53
|
+
async function waitForProcessing(jobId, maxAttempts = 60) {
|
|
54
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
55
|
+
const statusResponse = await getJobStatus(jobId);
|
|
56
|
+
if (statusResponse.status === "SUCCESS" || statusResponse.status === "COMPLETED") {
|
|
57
|
+
return await getParseResult(jobId);
|
|
58
|
+
}
|
|
59
|
+
if (statusResponse.status === "ERROR" || statusResponse.status === "FAILED") {
|
|
60
|
+
throw new Error(`LlamaCloud processing failed with status: ${statusResponse.status}`);
|
|
61
|
+
}
|
|
62
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
63
|
+
}
|
|
64
|
+
throw new Error("LlamaCloud processing timeout");
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
68
|
+
* Returns the created documentId.
|
|
69
|
+
*/
|
|
70
|
+
export async function parseAndStoreDocument(db, buffer, fileName, path, fileId) {
|
|
71
|
+
const existingDocument = await db.query({
|
|
72
|
+
document_documents: {
|
|
73
|
+
$: {
|
|
74
|
+
where: { "file.id": fileId },
|
|
75
|
+
},
|
|
76
|
+
file: {},
|
|
77
|
+
},
|
|
78
|
+
});
|
|
79
|
+
if (existingDocument.document_documents && existingDocument.document_documents.length > 0) {
|
|
80
|
+
return existingDocument.document_documents[0].id;
|
|
81
|
+
}
|
|
82
|
+
const jobId = await uploadToLlamaCloud(buffer, fileName);
|
|
83
|
+
const result = await waitForProcessing(jobId);
|
|
84
|
+
const pages = [];
|
|
85
|
+
if (result.markdown) {
|
|
86
|
+
pages.push({
|
|
87
|
+
id: id(),
|
|
88
|
+
text: result.markdown,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
if (result.pages && result.pages.length > 0) {
|
|
92
|
+
for (const page of result.pages) {
|
|
93
|
+
pages.push({
|
|
94
|
+
id: id(),
|
|
95
|
+
text: page.text,
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
if (pages.length === 0) {
|
|
100
|
+
throw new Error("No content extracted from document");
|
|
101
|
+
}
|
|
102
|
+
const documentId = id();
|
|
103
|
+
await db.transact([
|
|
104
|
+
db.tx.document_documents[documentId].update({
|
|
105
|
+
content: { pages },
|
|
106
|
+
name: fileName,
|
|
107
|
+
mimeType: "application/pdf",
|
|
108
|
+
createdAt: new Date(),
|
|
109
|
+
path,
|
|
110
|
+
}),
|
|
111
|
+
db.tx.document_documents[documentId].link({
|
|
112
|
+
file: fileId,
|
|
113
|
+
}),
|
|
114
|
+
]);
|
|
115
|
+
return documentId;
|
|
116
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"instant.document-parser.js","sourceRoot":"","sources":["../../src/stores/instant.document-parser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,kBAAkB,CAAC;AAEtC,MAAM,oBAAoB,GAAG,wCAAwC,CAAC;AAqBtE,KAAK,UAAU,kBAAkB,CAAC,MAAc,EAAE,QAAgB;IAChE,MAAM,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;IAEhC,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,MAAM,CAAC,CAAC;IAC1C,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC,CAAC,UAAU,CAAC,EAAE,EAAE,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAC;IACjE,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IACxC,QAAQ,CAAC,MAAM,CAAC,YAAY,EAAE,qBAAqB,CAAC,CAAC;IACrD,QAAQ,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;IACxC,QAAQ,CAAC,MAAM,CAAC,qBAAqB,EAAE,MAAM,CAAC,CAAC;IAC/C,QAAQ,CAAC,MAAM,CAAC,2BAA2B,EAAE,MAAM,CAAC,CAAC;IACrD,QAAQ,CAAC,MAAM,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,oBAAoB,iBAAiB,EAAE;QACrE,MAAM,EAAE,MAAM;QACd,OAAO,EAAE;YACP,aAAa,EAAE,UAAU,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE;SAC3D;QACD,IAAI,EAAE,QAAQ;KACf,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QACxC,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAC,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC;IAC/E,CAAC;IAED,MAAM,MAAM,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAwC,CAAC;IAC9E,OAAO,MAAM,CAAC,EAAE,CAAC;AACnB,CAAC;AAED,KAAK,UAAU,YAAY,CAAC,KAAa;IACvC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,oBAAoB,gBAAgB,KAAK,EAAE,EAAE;QAC3E,MAAM,EAAE,KAAK;QACb,OAAO,EAAE;YACP,aAAa,EAAE,UAAU,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE;SAC3D;KACF,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QACxC,MAAM,IAAI,KAAK,CAAC,mCAAmC,QAAQ,CAAC,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC;IACrF,CAAC;IAED,OAAO,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAwC,CAAC;AACxE,CAAC;AAED,KAAK,UAAU,cAAc,CAAC,KAAa;IACzC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,oBAAoB,gBAAgB,KAAK,kBAAkB,EAAE;QAC3F,MAAM,EAAE,KAAK;QACb,OAAO,EAAE;YACP,aAAa,EAAE,UAAU,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE;SAC3D;KACF,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QACxC,MAAM,IAAI,KAAK,CAAC,mCAAmC,QAAQ,CAAC,MAAM,IAAI,SAAS,EAAE,CAAC,CAAC;IACrF,CAAC;IAED,OAAO,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAwC,CAAC;AACxE,CAAC;AAED,KAAK,UAAU,iBAAiB,CAAC,KAAa,EAAE,cAAsB,EAAE;IACtE,KAAK,IAAI,OAAO,GAAG,CAAC,EAAE,OAAO,GAAG,WAAW,EAAE,OAAO,EAAE,EAAE,CAAC;QACvD,MAAM,cAAc,GAAG,MAAM,YAAY,CAAC,KAAK,CAAC,CAAC;QAEjD,IAAI,cAAc,CAAC,MAAM,KAAK,SAAS,IAAI,cAAc,CAAC,MAAM,KAAK,WAAW,EAAE,CAAC;YACjF,OAAO,MAAM,cAAc,CAAC,KAAK,CAAC,CAAC;QACrC,CAAC;QAED,IAAI,cAAc,CAAC,MAAM,KAAK,OAAO,IAAI,cAAc,CAAC,MAAM,KAAK,QAAQ,EAAE,CAAC;YAC5E,MAAM,IAAI,KAAK,CAAC,6CAA6C,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC;QACxF,CAAC;QAED,MAAM,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC;IAC5D,CAAC;IAED,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;AACnD,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,EAAO,EACP,MAAc,EACd,QAAgB,EAChB,IAAY,EACZ,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC;QACtC,kBAAkB,EAAE;YAClB,CAAC,EAAE;gBACD,KAAK,EAAE,EAAE,SAAS,EAAE,MAAM,EAAE;aAC7B;YACD,IAAI,EAAE,EAAE;SACT;KACF,CAAC,CAAC;IAEH,IAAI,gBAAgB,CAAC,kBAAkB,IAAI,gBAAgB,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1F,OAAO,gBAAgB,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACnD,CAAC;IAED,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACzD,MAAM,MAAM,GAAG,MAAM,iBAAiB,CAAC,KAAK,CAAC,CAAC;IAE9C,MAAM,KAAK,GAAmB,EAAE,CAAC;IAEjC,IAAI,MAAM,CAAC,QAAQ,EAAE,CAAC;QACpB,KAAK,CAAC,IAAI,CAAC;YACT,EAAE,EAAE,EAAE,EAAE;YACR,IAAI,EAAE,MAAM,CAAC,QAAQ;SACtB,CAAC,CAAC;IACL,CAAC;IAED,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC5C,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;YAChC,KAAK,CAAC,IAAI,CAAC;gBACT,EAAE,EAAE,EAAE,EAAE;gBACR,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;IACxD,CAAC;IAED,MAAM,UAAU,GAAG,EAAE,EAAE,CAAC;IACxB,MAAM,EAAE,CAAC,QAAQ,CAAC;QAChB,EAAE,CAAC,EAAE,CAAC,kBAAkB,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC;YAC1C,OAAO,EAAE,EAAE,KAAK,EAAE;YAClB,IAAI,EAAE,QAAQ;YACd,QAAQ,EAAE,iBAAiB;YAC3B,SAAS,EAAE,IAAI,IAAI,EAAE;YACrB,IAAI;SACL,CAAC;QACF,EAAE,CAAC,EAAE,CAAC,kBAAkB,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC;YACxC,IAAI,EAAE,MAAM;SACb,CAAC;KACH,CAAC,CAAC;IAEH,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["import { id } from \"@instantdb/admin\";\r\n\r\nconst LLAMA_CLOUD_BASE_URL = \"https://api.cloud.llamaindex.ai/api/v1\";\r\n\r\ninterface DocumentPage {\r\n id: string;\r\n text: string;\r\n}\r\n\r\ninterface LlamaParseUploadResponse {\r\n id: string;\r\n status: string;\r\n}\r\n\r\ninterface LlamaParseStatusResponse {\r\n status: string;\r\n}\r\n\r\ninterface LlamaParseResultResponse {\r\n markdown?: string;\r\n pages?: Array<{ page: number; text: string }>;\r\n}\r\n\r\nasync function uploadToLlamaCloud(buffer: Buffer, fileName: string): Promise<string> {\r\n const formData = new FormData();\r\n\r\n const uint8Array = new Uint8Array(buffer);\r\n const blob = new Blob([uint8Array], { type: \"application/pdf\" });\r\n formData.append(\"file\", blob, fileName);\r\n formData.append(\"parse_mode\", \"parse_page_with_llm\");\r\n formData.append(\"high_res_ocr\", \"true\");\r\n formData.append(\"adaptive_long_table\", \"true\");\r\n formData.append(\"outlined_table_extraction\", \"true\");\r\n formData.append(\"output_tables_as_HTML\", \"true\");\r\n\r\n const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/upload`, {\r\n method: \"POST\",\r\n headers: {\r\n Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,\r\n },\r\n body: formData,\r\n });\r\n\r\n if (!response.ok) {\r\n const errorText = await response.text();\r\n throw new Error(`LlamaCloud upload failed: ${response.status} ${errorText}`);\r\n }\r\n\r\n const result = (await response.json()) as unknown as LlamaParseUploadResponse;\r\n return result.id;\r\n}\r\n\r\nasync function getJobStatus(jobId: string): Promise<LlamaParseStatusResponse> {\r\n const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}`, {\r\n method: \"GET\",\r\n headers: {\r\n Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,\r\n },\r\n });\r\n\r\n if (!response.ok) {\r\n const errorText = await response.text();\r\n throw new Error(`LlamaCloud status fetch failed: ${response.status} ${errorText}`);\r\n }\r\n\r\n return (await response.json()) as unknown as LlamaParseStatusResponse;\r\n}\r\n\r\nasync function getParseResult(jobId: string): Promise<LlamaParseResultResponse> {\r\n const response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}/result/markdown`, {\r\n method: \"GET\",\r\n headers: {\r\n Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,\r\n },\r\n });\r\n\r\n if (!response.ok) {\r\n const errorText = await response.text();\r\n throw new Error(`LlamaCloud result fetch failed: ${response.status} ${errorText}`);\r\n }\r\n\r\n return (await response.json()) as unknown as LlamaParseResultResponse;\r\n}\r\n\r\nasync function waitForProcessing(jobId: string, maxAttempts: number = 60): Promise<LlamaParseResultResponse> {\r\n for (let attempt = 0; attempt < maxAttempts; attempt++) {\r\n const statusResponse = await getJobStatus(jobId);\r\n\r\n if (statusResponse.status === \"SUCCESS\" || statusResponse.status === \"COMPLETED\") {\r\n return await getParseResult(jobId);\r\n }\r\n\r\n if (statusResponse.status === \"ERROR\" || statusResponse.status === \"FAILED\") {\r\n throw new Error(`LlamaCloud processing failed with status: ${statusResponse.status}`);\r\n }\r\n\r\n await new Promise((resolve) => setTimeout(resolve, 2000));\r\n }\r\n\r\n throw new Error(\"LlamaCloud processing timeout\");\r\n}\r\n\r\n/**\r\n * Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).\r\n * Returns the created documentId.\r\n */\r\nexport async function parseAndStoreDocument(\r\n db: any,\r\n buffer: Buffer,\r\n fileName: string,\r\n path: string,\r\n fileId: string,\r\n): Promise<string> {\r\n const existingDocument = await db.query({\r\n document_documents: {\r\n $: {\r\n where: { \"file.id\": fileId },\r\n },\r\n file: {},\r\n },\r\n });\r\n\r\n if (existingDocument.document_documents && existingDocument.document_documents.length > 0) {\r\n return existingDocument.document_documents[0].id;\r\n }\r\n\r\n const jobId = await uploadToLlamaCloud(buffer, fileName);\r\n const result = await waitForProcessing(jobId);\r\n\r\n const pages: DocumentPage[] = [];\r\n\r\n if (result.markdown) {\r\n pages.push({\r\n id: id(),\r\n text: result.markdown,\r\n });\r\n }\r\n\r\n if (result.pages && result.pages.length > 0) {\r\n for (const page of result.pages) {\r\n pages.push({\r\n id: id(),\r\n text: page.text,\r\n });\r\n }\r\n }\r\n\r\n if (pages.length === 0) {\r\n throw new Error(\"No content extracted from document\");\r\n }\r\n\r\n const documentId = id();\r\n await db.transact([\r\n db.tx.document_documents[documentId].update({\r\n content: { pages },\r\n name: fileName,\r\n mimeType: \"application/pdf\",\r\n createdAt: new Date(),\r\n path,\r\n }),\r\n db.tx.document_documents[documentId].link({\r\n file: fileId,\r\n }),\r\n ]);\r\n\r\n return documentId;\r\n}\r\n\r\n\r\n"]}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { ContextEvent } from "../story.store";
|
|
2
|
+
export declare function coerceDocumentTextPages(documentRecord: any, opts?: {
|
|
3
|
+
pageLabelPrefix?: string;
|
|
4
|
+
}): string;
|
|
5
|
+
export declare function expandEventsWithInstantDocuments(params: {
|
|
6
|
+
db: any;
|
|
7
|
+
events: ContextEvent[];
|
|
8
|
+
/**
|
|
9
|
+
* Hard limit to avoid huge model inputs. Defaults to 120k chars of extracted text.
|
|
10
|
+
*/
|
|
11
|
+
maxChars?: number;
|
|
12
|
+
/**
|
|
13
|
+
* Event type used for derived document text. Defaults to "document.parsed".
|
|
14
|
+
*/
|
|
15
|
+
derivedEventType?: string;
|
|
16
|
+
}): Promise<ContextEvent[]>;
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { parseAndStoreDocument } from "./instant.document-parser";
|
|
2
|
+
function isFilePart(part) {
|
|
3
|
+
return Boolean(part &&
|
|
4
|
+
typeof part === "object" &&
|
|
5
|
+
(part.type === "file" || part?.providerMetadata?.instant));
|
|
6
|
+
}
|
|
7
|
+
export function coerceDocumentTextPages(documentRecord, opts) {
|
|
8
|
+
const pages = documentRecord?.content?.pages;
|
|
9
|
+
if (!Array.isArray(pages) || pages.length === 0)
|
|
10
|
+
return "";
|
|
11
|
+
const prefix = opts?.pageLabelPrefix ?? "Page";
|
|
12
|
+
return pages
|
|
13
|
+
.map((p, idx) => {
|
|
14
|
+
const text = typeof p?.text === "string" ? p.text : "";
|
|
15
|
+
return `\n\n--- ${prefix} ${idx + 1} ---\n\n${text}`;
|
|
16
|
+
})
|
|
17
|
+
.join("");
|
|
18
|
+
}
|
|
19
|
+
async function resolveInstantFileRecord(db, params) {
|
|
20
|
+
const fileId = params.fileId ? String(params.fileId) : null;
|
|
21
|
+
const filePath = params.path ? String(params.path) : null;
|
|
22
|
+
if (!fileId && !filePath)
|
|
23
|
+
return null;
|
|
24
|
+
if (fileId) {
|
|
25
|
+
const q = await db.query({
|
|
26
|
+
$files: { $: { where: { id: fileId }, limit: 1 }, document: {} },
|
|
27
|
+
});
|
|
28
|
+
return q?.$files?.[0] ?? null;
|
|
29
|
+
}
|
|
30
|
+
const q = await db.query({
|
|
31
|
+
$files: { $: { where: { path: filePath }, limit: 1 }, document: {} },
|
|
32
|
+
});
|
|
33
|
+
return q?.$files?.[0] ?? null;
|
|
34
|
+
}
|
|
35
|
+
async function ensureDocumentParsedForFile(db, params) {
|
|
36
|
+
const fileRecord = params.fileRecord;
|
|
37
|
+
const part = params.part;
|
|
38
|
+
let documentRecord = Array.isArray(fileRecord?.document)
|
|
39
|
+
? fileRecord.document?.[0]
|
|
40
|
+
: fileRecord.document;
|
|
41
|
+
if (documentRecord?.id)
|
|
42
|
+
return documentRecord;
|
|
43
|
+
const fileUrl = typeof fileRecord?.url === "string" ? fileRecord.url : "";
|
|
44
|
+
if (!fileUrl.startsWith("http://") && !fileUrl.startsWith("https://")) {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
const resp = await fetch(fileUrl);
|
|
48
|
+
if (!resp.ok)
|
|
49
|
+
throw new Error(`Failed to fetch file for parsing: HTTP ${resp.status}`);
|
|
50
|
+
const buffer = Buffer.from(await resp.arrayBuffer());
|
|
51
|
+
const name = (typeof part?.filename === "string" && part.filename) ||
|
|
52
|
+
(typeof fileRecord?.path === "string" && fileRecord.path) ||
|
|
53
|
+
"file";
|
|
54
|
+
const path = (typeof fileRecord?.path === "string" && fileRecord.path) ||
|
|
55
|
+
`/agent/${Date.now()}-${name}`;
|
|
56
|
+
const documentId = await parseAndStoreDocument(db, buffer, name, path, String(fileRecord.id));
|
|
57
|
+
const dq = await db.query({
|
|
58
|
+
document_documents: { $: { where: { id: documentId }, limit: 1 }, file: {} },
|
|
59
|
+
});
|
|
60
|
+
documentRecord = dq?.document_documents?.[0] ?? null;
|
|
61
|
+
return documentRecord;
|
|
62
|
+
}
|
|
63
|
+
export async function expandEventsWithInstantDocuments(params) {
|
|
64
|
+
const db = params.db;
|
|
65
|
+
const maxChars = typeof params.maxChars === "number" ? params.maxChars : 120000;
|
|
66
|
+
const derivedEventType = params.derivedEventType ?? "document.parsed";
|
|
67
|
+
const out = [];
|
|
68
|
+
for (const event of params.events) {
|
|
69
|
+
out.push(event);
|
|
70
|
+
const parts = event?.content?.parts;
|
|
71
|
+
if (!Array.isArray(parts) || parts.length === 0)
|
|
72
|
+
continue;
|
|
73
|
+
for (const part of parts) {
|
|
74
|
+
if (!isFilePart(part))
|
|
75
|
+
continue;
|
|
76
|
+
const instantMeta = part?.providerMetadata?.instant ?? {};
|
|
77
|
+
const fileId = instantMeta?.fileId ? String(instantMeta.fileId) : undefined;
|
|
78
|
+
const filePath = instantMeta?.path ? String(instantMeta.path) : undefined;
|
|
79
|
+
const fileRecord = await resolveInstantFileRecord(db, { fileId, path: filePath });
|
|
80
|
+
if (!fileRecord?.id)
|
|
81
|
+
continue;
|
|
82
|
+
const documentRecord = await ensureDocumentParsedForFile(db, { fileRecord, part });
|
|
83
|
+
const pageText = coerceDocumentTextPages(documentRecord);
|
|
84
|
+
if (!pageText)
|
|
85
|
+
continue;
|
|
86
|
+
const clipped = pageText.length > maxChars
|
|
87
|
+
? `${pageText.slice(0, maxChars)}\n\n[truncated: maxChars=${maxChars}]`
|
|
88
|
+
: pageText;
|
|
89
|
+
const derived = {
|
|
90
|
+
id: `derived:${event.id}:${String(fileRecord.id)}`,
|
|
91
|
+
type: derivedEventType,
|
|
92
|
+
channel: event.channel ?? "web",
|
|
93
|
+
createdAt: new Date().toISOString(),
|
|
94
|
+
content: {
|
|
95
|
+
parts: [
|
|
96
|
+
{
|
|
97
|
+
type: "text",
|
|
98
|
+
text: `Parsed document available.\nFile: "${String(fileRecord.path || part.filename || "file")}"\nProvider: llamacloud`,
|
|
99
|
+
},
|
|
100
|
+
{ type: "text", text: `Document transcription:${clipped}` },
|
|
101
|
+
],
|
|
102
|
+
},
|
|
103
|
+
};
|
|
104
|
+
out.push(derived);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
return out;
|
|
108
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import type { ModelMessage } from "ai";
|
|
2
|
+
import type { ContextEvent, ContextIdentifier, ContextStatus, StoredContext, StoryStore } from "../story.store";
|
|
3
|
+
export { parseAndStoreDocument } from "./instant.document-parser";
|
|
4
|
+
export { coerceDocumentTextPages, expandEventsWithInstantDocuments, } from "./instant.documents";
|
|
5
|
+
/**
|
|
6
|
+
* InstantDB-backed StoryStore.
|
|
7
|
+
*
|
|
8
|
+
* This is intentionally kept behind the store boundary so the core story engine
|
|
9
|
+
* can remain database-agnostic.
|
|
10
|
+
*/
|
|
11
|
+
export type InstantStoreDb = any;
|
|
12
|
+
export declare class InstantStore implements StoryStore {
|
|
13
|
+
private db;
|
|
14
|
+
constructor(db: InstantStoreDb);
|
|
15
|
+
getOrCreateContext<C>(contextIdentifier: ContextIdentifier | null): Promise<StoredContext<C>>;
|
|
16
|
+
private createContext;
|
|
17
|
+
getContext<C>(contextIdentifier: ContextIdentifier): Promise<StoredContext<C> | null>;
|
|
18
|
+
updateContextContent<C>(contextIdentifier: ContextIdentifier, content: C): Promise<StoredContext<C>>;
|
|
19
|
+
updateContextStatus(contextIdentifier: ContextIdentifier, status: ContextStatus): Promise<void>;
|
|
20
|
+
saveEvent(contextIdentifier: ContextIdentifier, event: ContextEvent): Promise<ContextEvent>;
|
|
21
|
+
updateEvent(eventId: string, event: ContextEvent): Promise<ContextEvent>;
|
|
22
|
+
getEvent(eventId: string): Promise<ContextEvent | null>;
|
|
23
|
+
getEvents(contextIdentifier: ContextIdentifier): Promise<ContextEvent[]>;
|
|
24
|
+
createExecution(contextIdentifier: ContextIdentifier, triggerEventId: string, reactionEventId: string): Promise<{
|
|
25
|
+
id: string;
|
|
26
|
+
}>;
|
|
27
|
+
completeExecution(contextIdentifier: ContextIdentifier, executionId: string, status: "completed" | "failed"): Promise<void>;
|
|
28
|
+
eventsToModelMessages(events: ContextEvent[]): Promise<ModelMessage[]>;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Helper to create a StoryRuntimeResolver that returns an InstantStore.
|
|
32
|
+
*
|
|
33
|
+
* This keeps the app-level `ekairos.ts` extremely small.
|
|
34
|
+
*/
|
|
35
|
+
export declare function createInstantStoreRuntime(params: {
|
|
36
|
+
getDb: (orgId: string) => Promise<InstantStoreDb> | InstantStoreDb;
|
|
37
|
+
getOrgId?: (env: Record<string, unknown>) => string;
|
|
38
|
+
}): (env: Record<string, unknown>) => Promise<{
|
|
39
|
+
store: InstantStore;
|
|
40
|
+
}>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"instant.store.d.ts","sourceRoot":"","sources":["../../src/stores/instant.store.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,IAAI,CAAA;AACtC,OAAO,KAAK,EACV,YAAY,EACZ,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,UAAU,EACX,MAAM,gBAAgB,CAAA;AACvB,OAAO,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAA;AAEjE;;;;;GAKG;AACH,MAAM,MAAM,cAAc,GAAG,GAAG,CAAA;AAEhC,qBAAa,YAAa,YAAW,UAAU;IAC7C,OAAO,CAAC,EAAE,CAAK;gBAEH,EAAE,EAAE,cAAc;IAIxB,kBAAkB,CAAC,CAAC,EACxB,iBAAiB,EAAE,iBAAiB,GAAG,IAAI,GAC1C,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;YAgBd,aAAa;IA8BrB,UAAU,CAAC,CAAC,EAChB,iBAAiB,EAAE,iBAAiB,GACnC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IA0B7B,oBAAoB,CAAC,CAAC,EAC1B,iBAAiB,EAAE,iBAAiB,EACpC,OAAO,EAAE,CAAC,GACT,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAgBtB,mBAAmB,CACvB,iBAAiB,EAAE,iBAAiB,EACpC,MAAM,EAAE,aAAa,GACpB,OAAO,CAAC,IAAI,CAAC;IAqBV,SAAS,CACb,iBAAiB,EAAE,iBAAiB,EACpC,KAAK,EAAE,YAAY,GAClB,OAAO,CAAC,YAAY,CAAC;IAqBlB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;IAOxE,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC;IASvD,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC;IAiBxE,eAAe,CACnB,iBAAiB,EAAE,iBAAiB,EACpC,cAAc,EAAE,MAAM,EACtB,eAAe,EAAE,MAAM,GACtB,OAAO,CAAC;QAAE,EAAE,EAAE,MAAM,CAAA;KAAE,CAAC;IA0BpB,iBAAiB,CACrB,iBAAiB,EAAE,iBAAiB,EACpC,WAAW,EAAE,MAAM,EACnB,MAAM,EAAE,WAAW,GAAG,QAAQ,GAC7B,OAAO,CAAC,IAAI,CAAC;IAcV,qBAAqB,CAAC,MAAM,EAAE,YAAY,EAAE,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC;CAG7E;AAED;;;;GAIG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE;IAChD,KAAK,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,CAAA;IAClE,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,MAAM,CAAA;CACpD,IAGe,KAAK,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;;GAgB3C"}
|