@ekairos/story 1.21.41-beta.0 → 1.21.43-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ekairos.config.js +1 -16
- package/dist/index.d.ts +6 -6
- package/dist/index.js +5 -5
- package/dist/runtime.d.ts +1 -2
- package/dist/runtime.js +1 -2
- package/dist/steps/reaction.steps.d.ts +25 -0
- package/dist/steps/reaction.steps.js +135 -0
- package/dist/steps/store.steps.d.ts +13 -28
- package/dist/steps/store.steps.js +20 -56
- package/dist/steps/stream.steps.d.ts +7 -0
- package/dist/steps/stream.steps.js +15 -0
- package/dist/stores/instant.document-parser.d.ts +1 -1
- package/dist/stores/instant.document-parser.js +175 -39
- package/dist/stores/instant.documents.js +82 -6
- package/dist/stores/instant.store.d.ts +2 -0
- package/dist/stores/instant.store.js +13 -0
- package/dist/story.builder.d.ts +4 -4
- package/dist/story.builder.js +2 -2
- package/dist/story.config.d.ts +0 -2
- package/dist/story.config.js +46 -39
- package/dist/story.d.ts +2 -2
- package/dist/story.engine.d.ts +2 -2
- package/dist/story.engine.js +19 -17
- package/dist/story.js +2 -2
- package/package.json +2 -2
package/dist/ekairos.config.js
CHANGED
|
@@ -1,15 +1,4 @@
|
|
|
1
1
|
import { configureStoryRuntime, configureStoryRuntimeBootstrap, isStoryRuntimeConfigured, } from "./story.config";
|
|
2
|
-
const GLOBAL_EKAIROS_CONFIG = Symbol.for("@ekairos/story.ekairosConfig");
|
|
3
|
-
function setGlobalEkairosConfig(config) {
|
|
4
|
-
try {
|
|
5
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
6
|
-
;
|
|
7
|
-
globalThis[GLOBAL_EKAIROS_CONFIG] = config;
|
|
8
|
-
}
|
|
9
|
-
catch {
|
|
10
|
-
// ignore
|
|
11
|
-
}
|
|
12
|
-
}
|
|
13
2
|
/**
|
|
14
3
|
* Creates a small "framework-style" config object that can be executed in the step runtime.
|
|
15
4
|
*
|
|
@@ -18,7 +7,7 @@ function setGlobalEkairosConfig(config) {
|
|
|
18
7
|
*/
|
|
19
8
|
export function createEkairosConfig(params) {
|
|
20
9
|
const stories = params.stories ?? [];
|
|
21
|
-
|
|
10
|
+
return {
|
|
22
11
|
stories,
|
|
23
12
|
runtime: params.runtime,
|
|
24
13
|
setup() {
|
|
@@ -34,8 +23,4 @@ export function createEkairosConfig(params) {
|
|
|
34
23
|
s.register();
|
|
35
24
|
},
|
|
36
25
|
};
|
|
37
|
-
// Register globally (process-level) so libraries/steps can access it transparently.
|
|
38
|
-
// Note: This does NOT call `setup()` (no runtime side-effects).
|
|
39
|
-
setGlobalEkairosConfig(config);
|
|
40
|
-
return config;
|
|
41
26
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
export { story, createStory, type StoryConfig, type StoryInstance, type StoryOptions, type StoryStreamOptions, Story, type RegistrableStoryBuilder, } from "./story";
|
|
2
|
-
export type { StoryStore, ContextIdentifier, StoredContext, ContextEvent, } from "./story.store";
|
|
3
|
-
export { registerStory, getStory, getStoryFactory, hasStory, listStories, type StoryKey, } from "./story.registry";
|
|
4
|
-
export { storyDomain } from "./schema";
|
|
5
|
-
export { didToolExecute } from "./story.toolcalls";
|
|
6
|
-
export { USER_MESSAGE_TYPE, ASSISTANT_MESSAGE_TYPE, SYSTEM_MESSAGE_TYPE, WEB_CHANNEL, AGENT_CHANNEL, EMAIL_CHANNEL, createUserEventFromUIMessages, createAssistantEventFromUIMessages, convertToUIMessage, convertEventToModelMessages, convertEventsToModelMessages, convertModelMessageToEvent, type ResponseMessage, } from "./events";
|
|
1
|
+
export { story, createStory, type StoryConfig, type StoryInstance, type StoryOptions, type StoryStreamOptions, Story, type RegistrableStoryBuilder, } from "./story.js";
|
|
2
|
+
export type { StoryStore, ContextIdentifier, StoredContext, ContextEvent, } from "./story.store.js";
|
|
3
|
+
export { registerStory, getStory, getStoryFactory, hasStory, listStories, type StoryKey, } from "./story.registry.js";
|
|
4
|
+
export { storyDomain } from "./schema.js";
|
|
5
|
+
export { didToolExecute } from "./story.toolcalls.js";
|
|
6
|
+
export { USER_MESSAGE_TYPE, ASSISTANT_MESSAGE_TYPE, SYSTEM_MESSAGE_TYPE, WEB_CHANNEL, AGENT_CHANNEL, EMAIL_CHANNEL, createUserEventFromUIMessages, createAssistantEventFromUIMessages, convertToUIMessage, convertEventToModelMessages, convertEventsToModelMessages, convertModelMessageToEvent, type ResponseMessage, } from "./events.js";
|
package/dist/index.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
export {
|
|
2
2
|
// Story API
|
|
3
|
-
story, createStory, Story, } from "./story";
|
|
4
|
-
export { registerStory, getStory, getStoryFactory, hasStory, listStories, } from "./story.registry";
|
|
5
|
-
export { storyDomain } from "./schema";
|
|
6
|
-
export { didToolExecute } from "./story.toolcalls";
|
|
7
|
-
export { USER_MESSAGE_TYPE, ASSISTANT_MESSAGE_TYPE, SYSTEM_MESSAGE_TYPE, WEB_CHANNEL, AGENT_CHANNEL, EMAIL_CHANNEL, createUserEventFromUIMessages, createAssistantEventFromUIMessages, convertToUIMessage, convertEventToModelMessages, convertEventsToModelMessages, convertModelMessageToEvent, } from "./events";
|
|
3
|
+
story, createStory, Story, } from "./story.js";
|
|
4
|
+
export { registerStory, getStory, getStoryFactory, hasStory, listStories, } from "./story.registry.js";
|
|
5
|
+
export { storyDomain } from "./schema.js";
|
|
6
|
+
export { didToolExecute } from "./story.toolcalls.js";
|
|
7
|
+
export { USER_MESSAGE_TYPE, ASSISTANT_MESSAGE_TYPE, SYSTEM_MESSAGE_TYPE, WEB_CHANNEL, AGENT_CHANNEL, EMAIL_CHANNEL, createUserEventFromUIMessages, createAssistantEventFromUIMessages, convertToUIMessage, convertEventToModelMessages, convertEventsToModelMessages, convertModelMessageToEvent, } from "./events.js";
|
package/dist/runtime.d.ts
CHANGED
|
@@ -8,6 +8,5 @@
|
|
|
8
8
|
* - Do NOT import this entrypoint from client/browser code.
|
|
9
9
|
* - Keep `@ekairos/story` main entrypoint safe to import from schema/domain modules.
|
|
10
10
|
*/
|
|
11
|
-
export { configureStoryRuntime, configureStoryRuntimeBootstrap,
|
|
11
|
+
export { configureStoryRuntime, configureStoryRuntimeBootstrap, isStoryRuntimeConfigured, resolveStoryRuntime, type StoryEnvironment, type StoryRuntime, type StoryRuntimeResolver, } from "./story.config";
|
|
12
12
|
export { createEkairosConfig, type EkairosConfig, type RegistrableStory, } from "./ekairos.config";
|
|
13
|
-
export { withEkairosRuntime } from "./next";
|
package/dist/runtime.js
CHANGED
|
@@ -8,6 +8,5 @@
|
|
|
8
8
|
* - Do NOT import this entrypoint from client/browser code.
|
|
9
9
|
* - Keep `@ekairos/story` main entrypoint safe to import from schema/domain modules.
|
|
10
10
|
*/
|
|
11
|
-
export { configureStoryRuntime, configureStoryRuntimeBootstrap,
|
|
11
|
+
export { configureStoryRuntime, configureStoryRuntimeBootstrap, isStoryRuntimeConfigured, resolveStoryRuntime, } from "./story.config";
|
|
12
12
|
export { createEkairosConfig, } from "./ekairos.config";
|
|
13
|
-
export { withEkairosRuntime } from "./next";
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { type ModelMessage } from "ai";
|
|
2
|
+
import type { StoryEnvironment } from "../story.config";
|
|
3
|
+
import type { ContextEvent, ContextIdentifier } from "../story.store";
|
|
4
|
+
import type { SerializableToolForModel } from "../tools-to-model-tools";
|
|
5
|
+
/**
|
|
6
|
+
* Executes a full "reaction" inside a single workflow step:
|
|
7
|
+
* - load events from store
|
|
8
|
+
* - convert events to model messages
|
|
9
|
+
* - run the streaming model call and emit chunks
|
|
10
|
+
* - extract tool calls from the resulting assistant event
|
|
11
|
+
*/
|
|
12
|
+
export declare function executeReaction(params: {
|
|
13
|
+
env: StoryEnvironment;
|
|
14
|
+
contextIdentifier: ContextIdentifier;
|
|
15
|
+
model: any;
|
|
16
|
+
system: string;
|
|
17
|
+
tools: Record<string, SerializableToolForModel>;
|
|
18
|
+
eventId: string;
|
|
19
|
+
maxSteps: number;
|
|
20
|
+
sendStart?: boolean;
|
|
21
|
+
}): Promise<{
|
|
22
|
+
assistantEvent: ContextEvent;
|
|
23
|
+
toolCalls: any[];
|
|
24
|
+
messagesForModel: ModelMessage[];
|
|
25
|
+
}>;
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import { jsonSchema, gateway, smoothStream, stepCountIs, streamText, } from "ai";
|
|
2
|
+
import { getWritable } from "workflow";
|
|
3
|
+
import { resolveStoryRuntime } from "../story.config";
|
|
4
|
+
import { extractToolCallsFromParts } from "../story.toolcalls";
|
|
5
|
+
function safeErrorJson(error) {
|
|
6
|
+
const seen = new WeakSet();
|
|
7
|
+
const redactKey = (k) => /token|authorization|cookie|secret|api[_-]?key|password/i.test(k);
|
|
8
|
+
const err = error;
|
|
9
|
+
const payload = {
|
|
10
|
+
name: err?.name,
|
|
11
|
+
message: err?.message,
|
|
12
|
+
status: err?.status,
|
|
13
|
+
body: err?.body,
|
|
14
|
+
data: err?.data,
|
|
15
|
+
stack: err?.stack,
|
|
16
|
+
};
|
|
17
|
+
try {
|
|
18
|
+
return JSON.stringify(payload, (k, v) => {
|
|
19
|
+
if (redactKey(k))
|
|
20
|
+
return "[redacted]";
|
|
21
|
+
if (typeof v === "string" && v.length > 5000)
|
|
22
|
+
return "[truncated-string]";
|
|
23
|
+
if (typeof v === "object" && v !== null) {
|
|
24
|
+
if (seen.has(v))
|
|
25
|
+
return "[circular]";
|
|
26
|
+
seen.add(v);
|
|
27
|
+
}
|
|
28
|
+
return v;
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
catch {
|
|
32
|
+
return JSON.stringify({ message: String(err?.message ?? "error") });
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Executes a full "reaction" inside a single workflow step:
|
|
37
|
+
* - load events from store
|
|
38
|
+
* - convert events to model messages
|
|
39
|
+
* - run the streaming model call and emit chunks
|
|
40
|
+
* - extract tool calls from the resulting assistant event
|
|
41
|
+
*/
|
|
42
|
+
export async function executeReaction(params) {
|
|
43
|
+
"use step";
|
|
44
|
+
const { store } = await resolveStoryRuntime(params.env);
|
|
45
|
+
console.log("executeReaction: begin");
|
|
46
|
+
let events;
|
|
47
|
+
try {
|
|
48
|
+
console.log("executeReaction: store.getEvents begin");
|
|
49
|
+
events = await store.getEvents(params.contextIdentifier);
|
|
50
|
+
console.log("executeReaction: store.getEvents ok");
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
console.error("executeReaction: store.getEvents failed");
|
|
54
|
+
throw error;
|
|
55
|
+
}
|
|
56
|
+
let messagesForModel;
|
|
57
|
+
try {
|
|
58
|
+
console.log("executeReaction: store.eventsToModelMessages begin");
|
|
59
|
+
messagesForModel = (await store.eventsToModelMessages(events));
|
|
60
|
+
console.log("executeReaction: store.eventsToModelMessages ok");
|
|
61
|
+
}
|
|
62
|
+
catch (error) {
|
|
63
|
+
console.error("executeReaction: store.eventsToModelMessages failed", safeErrorJson(error));
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
66
|
+
const writable = getWritable();
|
|
67
|
+
// Match DurableAgent-style model init behavior:
|
|
68
|
+
const resolvedModel = typeof params.model === "string"
|
|
69
|
+
? gateway(params.model)
|
|
70
|
+
: typeof params.model === "function"
|
|
71
|
+
? await params.model()
|
|
72
|
+
: params.model;
|
|
73
|
+
// Wrap plain JSON Schema objects so the AI SDK doesn't attempt Zod conversion at runtime.
|
|
74
|
+
const toolsForStreamText = {};
|
|
75
|
+
for (const [name, t] of Object.entries(params.tools)) {
|
|
76
|
+
toolsForStreamText[name] = {
|
|
77
|
+
description: t?.description,
|
|
78
|
+
inputSchema: jsonSchema(t.inputSchema),
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
console.log("executeReaction: streamText begin");
|
|
82
|
+
const result = streamText({
|
|
83
|
+
model: resolvedModel,
|
|
84
|
+
system: params.system,
|
|
85
|
+
messages: messagesForModel,
|
|
86
|
+
tools: toolsForStreamText,
|
|
87
|
+
toolChoice: "required",
|
|
88
|
+
stopWhen: stepCountIs(params.maxSteps),
|
|
89
|
+
experimental_transform: smoothStream({ delayInMs: 30, chunking: "word" }),
|
|
90
|
+
});
|
|
91
|
+
console.log("executeReaction: streamText ok");
|
|
92
|
+
// Ensure the underlying stream is consumed (AI SDK requirement)
|
|
93
|
+
result.consumeStream();
|
|
94
|
+
let resolveFinish;
|
|
95
|
+
let rejectFinish;
|
|
96
|
+
const finishPromise = new Promise((resolve, reject) => {
|
|
97
|
+
resolveFinish = resolve;
|
|
98
|
+
rejectFinish = reject;
|
|
99
|
+
});
|
|
100
|
+
const uiStream = result
|
|
101
|
+
.toUIMessageStream({
|
|
102
|
+
sendStart: Boolean(params.sendStart),
|
|
103
|
+
generateMessageId: () => params.eventId,
|
|
104
|
+
messageMetadata() {
|
|
105
|
+
return { eventId: params.eventId };
|
|
106
|
+
},
|
|
107
|
+
onFinish: ({ messages }) => {
|
|
108
|
+
const lastMessage = messages[messages.length - 1];
|
|
109
|
+
const event = {
|
|
110
|
+
id: params.eventId,
|
|
111
|
+
type: "assistant.message",
|
|
112
|
+
channel: "web",
|
|
113
|
+
createdAt: new Date().toISOString(),
|
|
114
|
+
content: { parts: lastMessage?.parts ?? [] },
|
|
115
|
+
};
|
|
116
|
+
resolveFinish(event);
|
|
117
|
+
},
|
|
118
|
+
onError: (e) => {
|
|
119
|
+
rejectFinish(e);
|
|
120
|
+
return e instanceof Error ? e.message : String(e);
|
|
121
|
+
},
|
|
122
|
+
})
|
|
123
|
+
// Filter out per-step finish boundary. Workflow will emit a single finish.
|
|
124
|
+
.pipeThrough(new TransformStream({
|
|
125
|
+
transform(chunk, controller) {
|
|
126
|
+
if (chunk.type === "finish")
|
|
127
|
+
return;
|
|
128
|
+
controller.enqueue(chunk);
|
|
129
|
+
},
|
|
130
|
+
}));
|
|
131
|
+
await uiStream.pipeTo(writable, { preventClose: true });
|
|
132
|
+
const assistantEvent = await finishPromise;
|
|
133
|
+
const toolCalls = extractToolCallsFromParts(assistantEvent?.content?.parts);
|
|
134
|
+
return { assistantEvent, toolCalls, messagesForModel };
|
|
135
|
+
}
|
|
@@ -1,43 +1,28 @@
|
|
|
1
|
-
import type { ModelMessage } from "ai";
|
|
2
1
|
import { type StoryEnvironment } from "../story.config";
|
|
3
2
|
import type { ContextEvent, ContextIdentifier, StoredContext } from "../story.store";
|
|
4
|
-
export declare function generateId(): Promise<string>;
|
|
5
|
-
export declare function getOrCreateContext<C>(env: StoryEnvironment, contextIdentifier: ContextIdentifier | null): Promise<{
|
|
6
|
-
context: StoredContext<C>;
|
|
7
|
-
isNew: boolean;
|
|
8
|
-
}>;
|
|
9
3
|
/**
|
|
10
|
-
*
|
|
4
|
+
* Initializes/ensures the story context exists and emits a single `data-context-id` chunk.
|
|
11
5
|
*
|
|
12
|
-
*
|
|
13
|
-
* - `getOrCreateContext(...)` and `writeContextIdChunk(...)` are semantically coupled.
|
|
14
|
-
* - Keeping them in a single step reduces step invocations per run (cheaper) without changing behavior.
|
|
6
|
+
* This is the "context init" boundary for the story engine.
|
|
15
7
|
*/
|
|
16
|
-
export declare function
|
|
8
|
+
export declare function initializeContext<C>(env: StoryEnvironment, contextIdentifier: ContextIdentifier | null): Promise<{
|
|
17
9
|
context: StoredContext<C>;
|
|
18
10
|
isNew: boolean;
|
|
19
11
|
}>;
|
|
20
|
-
export declare function getContext<C>(env: StoryEnvironment, contextIdentifier: ContextIdentifier): Promise<StoredContext<C> | null>;
|
|
21
|
-
/**
|
|
22
|
-
* Loads the state needed for a single Story loop iteration.
|
|
23
|
-
*
|
|
24
|
-
* This is a "read aggregation" step: it groups read-only store calls into a single workflow step
|
|
25
|
-
* invocation to reduce step overhead (cheaper) without changing behavior.
|
|
26
|
-
*/
|
|
27
|
-
export declare function loadTurnState<C>(params: {
|
|
28
|
-
env: StoryEnvironment;
|
|
29
|
-
contextIdentifier: ContextIdentifier;
|
|
30
|
-
}): Promise<{
|
|
31
|
-
context: StoredContext<C> | null;
|
|
32
|
-
events: ContextEvent[];
|
|
33
|
-
}>;
|
|
34
12
|
export declare function updateContextContent<C>(env: StoryEnvironment, contextIdentifier: ContextIdentifier, content: C): Promise<StoredContext<C>>;
|
|
35
13
|
export declare function updateContextStatus(env: StoryEnvironment, contextIdentifier: ContextIdentifier, status: "open" | "streaming" | "closed"): Promise<void>;
|
|
36
|
-
export declare function
|
|
14
|
+
export declare function saveTriggerEvent(env: StoryEnvironment, contextIdentifier: ContextIdentifier, event: ContextEvent): Promise<ContextEvent>;
|
|
15
|
+
export declare function saveReactionEvent(env: StoryEnvironment, contextIdentifier: ContextIdentifier, event: ContextEvent): Promise<ContextEvent>;
|
|
37
16
|
export declare function updateEvent(env: StoryEnvironment, eventId: string, event: ContextEvent): Promise<ContextEvent>;
|
|
38
|
-
export declare function getEvents(env: StoryEnvironment, contextIdentifier: ContextIdentifier): Promise<ContextEvent[]>;
|
|
39
17
|
export declare function createExecution(env: StoryEnvironment, contextIdentifier: ContextIdentifier, triggerEventId: string, reactionEventId: string): Promise<{
|
|
40
18
|
id: string;
|
|
41
19
|
}>;
|
|
20
|
+
export declare function createReactionEvent(params: {
|
|
21
|
+
env: StoryEnvironment;
|
|
22
|
+
contextIdentifier: ContextIdentifier;
|
|
23
|
+
triggerEventId: string;
|
|
24
|
+
}): Promise<{
|
|
25
|
+
reactionEventId: string;
|
|
26
|
+
executionId: string;
|
|
27
|
+
}>;
|
|
42
28
|
export declare function completeExecution(env: StoryEnvironment, contextIdentifier: ContextIdentifier, executionId: string, status: "completed" | "failed"): Promise<void>;
|
|
43
|
-
export declare function eventsToModelMessages(env: StoryEnvironment, events: ContextEvent[]): Promise<ModelMessage[]>;
|
|
@@ -1,36 +1,11 @@
|
|
|
1
1
|
import { getWritable } from "workflow";
|
|
2
2
|
import { resolveStoryRuntime } from "../story.config";
|
|
3
|
-
export async function generateId() {
|
|
4
|
-
"use step";
|
|
5
|
-
// Use crypto.randomUUID when available (Node 18+)
|
|
6
|
-
const uuid = globalThis.crypto?.randomUUID?.();
|
|
7
|
-
if (uuid)
|
|
8
|
-
return uuid;
|
|
9
|
-
// Fallback
|
|
10
|
-
return `${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
11
|
-
}
|
|
12
|
-
export async function getOrCreateContext(env, contextIdentifier) {
|
|
13
|
-
"use step";
|
|
14
|
-
const { store } = await resolveStoryRuntime(env);
|
|
15
|
-
// Detect creation explicitly so the engine can run onContextCreated hooks.
|
|
16
|
-
if (!contextIdentifier) {
|
|
17
|
-
const context = await store.getOrCreateContext(null);
|
|
18
|
-
return { context, isNew: true };
|
|
19
|
-
}
|
|
20
|
-
const existing = await store.getContext(contextIdentifier);
|
|
21
|
-
if (existing)
|
|
22
|
-
return { context: existing, isNew: false };
|
|
23
|
-
const created = await store.getOrCreateContext(contextIdentifier);
|
|
24
|
-
return { context: created, isNew: true };
|
|
25
|
-
}
|
|
26
3
|
/**
|
|
27
|
-
*
|
|
4
|
+
* Initializes/ensures the story context exists and emits a single `data-context-id` chunk.
|
|
28
5
|
*
|
|
29
|
-
*
|
|
30
|
-
* - `getOrCreateContext(...)` and `writeContextIdChunk(...)` are semantically coupled.
|
|
31
|
-
* - Keeping them in a single step reduces step invocations per run (cheaper) without changing behavior.
|
|
6
|
+
* This is the "context init" boundary for the story engine.
|
|
32
7
|
*/
|
|
33
|
-
export async function
|
|
8
|
+
export async function initializeContext(env, contextIdentifier) {
|
|
34
9
|
"use step";
|
|
35
10
|
const { store } = await resolveStoryRuntime(env);
|
|
36
11
|
// Detect creation explicitly so the engine can run onContextCreated hooks.
|
|
@@ -63,24 +38,6 @@ export async function ensureContextAndEmitContextId(env, contextIdentifier) {
|
|
|
63
38
|
}
|
|
64
39
|
return result;
|
|
65
40
|
}
|
|
66
|
-
export async function getContext(env, contextIdentifier) {
|
|
67
|
-
"use step";
|
|
68
|
-
const { store } = await resolveStoryRuntime(env);
|
|
69
|
-
return await store.getContext(contextIdentifier);
|
|
70
|
-
}
|
|
71
|
-
/**
|
|
72
|
-
* Loads the state needed for a single Story loop iteration.
|
|
73
|
-
*
|
|
74
|
-
* This is a "read aggregation" step: it groups read-only store calls into a single workflow step
|
|
75
|
-
* invocation to reduce step overhead (cheaper) without changing behavior.
|
|
76
|
-
*/
|
|
77
|
-
export async function loadTurnState(params) {
|
|
78
|
-
"use step";
|
|
79
|
-
const { store } = await resolveStoryRuntime(params.env);
|
|
80
|
-
const context = await store.getContext(params.contextIdentifier);
|
|
81
|
-
const events = await store.getEvents(params.contextIdentifier);
|
|
82
|
-
return { context, events };
|
|
83
|
-
}
|
|
84
41
|
export async function updateContextContent(env, contextIdentifier, content) {
|
|
85
42
|
"use step";
|
|
86
43
|
const { store } = await resolveStoryRuntime(env);
|
|
@@ -91,33 +48,40 @@ export async function updateContextStatus(env, contextIdentifier, status) {
|
|
|
91
48
|
const { store } = await resolveStoryRuntime(env);
|
|
92
49
|
await store.updateContextStatus(contextIdentifier, status);
|
|
93
50
|
}
|
|
94
|
-
export async function
|
|
51
|
+
export async function saveTriggerEvent(env, contextIdentifier, event) {
|
|
95
52
|
"use step";
|
|
96
53
|
const { store } = await resolveStoryRuntime(env);
|
|
97
54
|
return await store.saveEvent(contextIdentifier, event);
|
|
98
55
|
}
|
|
99
|
-
export async function
|
|
56
|
+
export async function saveReactionEvent(env, contextIdentifier, event) {
|
|
100
57
|
"use step";
|
|
101
58
|
const { store } = await resolveStoryRuntime(env);
|
|
102
|
-
return await store.
|
|
59
|
+
return await store.saveEvent(contextIdentifier, event);
|
|
103
60
|
}
|
|
104
|
-
export async function
|
|
61
|
+
export async function updateEvent(env, eventId, event) {
|
|
105
62
|
"use step";
|
|
106
63
|
const { store } = await resolveStoryRuntime(env);
|
|
107
|
-
return await store.
|
|
64
|
+
return await store.updateEvent(eventId, event);
|
|
108
65
|
}
|
|
109
66
|
export async function createExecution(env, contextIdentifier, triggerEventId, reactionEventId) {
|
|
110
67
|
"use step";
|
|
111
68
|
const { store } = await resolveStoryRuntime(env);
|
|
112
69
|
return await store.createExecution(contextIdentifier, triggerEventId, reactionEventId);
|
|
113
70
|
}
|
|
114
|
-
export async function
|
|
71
|
+
export async function createReactionEvent(params) {
|
|
115
72
|
"use step";
|
|
116
|
-
const { store } = await resolveStoryRuntime(env);
|
|
117
|
-
|
|
73
|
+
const { store } = await resolveStoryRuntime(params.env);
|
|
74
|
+
// Generate a new reaction event id inside the step boundary.
|
|
75
|
+
const uuid = globalThis.crypto?.randomUUID?.();
|
|
76
|
+
const reactionEventId = typeof uuid === "string"
|
|
77
|
+
? uuid
|
|
78
|
+
: `${Date.now()}-${Math.random().toString(16).slice(2)}`;
|
|
79
|
+
await store.updateContextStatus(params.contextIdentifier, "streaming");
|
|
80
|
+
const execution = await store.createExecution(params.contextIdentifier, params.triggerEventId, reactionEventId);
|
|
81
|
+
return { reactionEventId, executionId: execution.id };
|
|
118
82
|
}
|
|
119
|
-
export async function
|
|
83
|
+
export async function completeExecution(env, contextIdentifier, executionId, status) {
|
|
120
84
|
"use step";
|
|
121
85
|
const { store } = await resolveStoryRuntime(env);
|
|
122
|
-
|
|
86
|
+
await store.completeExecution(contextIdentifier, executionId, status);
|
|
123
87
|
}
|
|
@@ -11,6 +11,13 @@ export declare function writeContextSubstate(params: {
|
|
|
11
11
|
export declare function writeContextIdChunk(params: {
|
|
12
12
|
contextId: string;
|
|
13
13
|
}): Promise<void>;
|
|
14
|
+
export declare function writeStoryPing(params: {
|
|
15
|
+
/**
|
|
16
|
+
* Simple ping event to validate that the workflow stream is alive.
|
|
17
|
+
* This is intentionally generic so clients can ignore it safely.
|
|
18
|
+
*/
|
|
19
|
+
label?: string;
|
|
20
|
+
}): Promise<void>;
|
|
14
21
|
export declare function writeToolOutputs(params: {
|
|
15
22
|
results: Array<{
|
|
16
23
|
toolCallId: string;
|
|
@@ -29,6 +29,21 @@ export async function writeContextIdChunk(params) {
|
|
|
29
29
|
writer.releaseLock();
|
|
30
30
|
}
|
|
31
31
|
}
|
|
32
|
+
export async function writeStoryPing(params) {
|
|
33
|
+
"use step";
|
|
34
|
+
const writable = getWritable();
|
|
35
|
+
const writer = writable.getWriter();
|
|
36
|
+
try {
|
|
37
|
+
await writer.write({
|
|
38
|
+
type: "data-story-ping",
|
|
39
|
+
data: { label: params.label ?? "story-ping" },
|
|
40
|
+
transient: true,
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
finally {
|
|
44
|
+
writer.releaseLock();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
32
47
|
export async function writeToolOutputs(params) {
|
|
33
48
|
"use step";
|
|
34
49
|
const writable = getWritable();
|
|
@@ -2,4 +2,4 @@
|
|
|
2
2
|
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
3
3
|
* Returns the created documentId.
|
|
4
4
|
*/
|
|
5
|
-
export declare function parseAndStoreDocument(db: any, buffer: Buffer, fileName: string,
|
|
5
|
+
export declare function parseAndStoreDocument(db: any, buffer: Buffer, fileName: string, fileId: string): Promise<string>;
|
|
@@ -1,7 +1,75 @@
|
|
|
1
1
|
import { id } from "@instantdb/admin";
|
|
2
2
|
const LLAMA_CLOUD_BASE_URL = "https://api.cloud.llamaindex.ai/api/v1";
|
|
3
|
+
function safeErrorJson(error) {
|
|
4
|
+
const seen = new WeakSet();
|
|
5
|
+
const redactKey = (k) => /token|authorization|cookie|secret|api[_-]?key|password/i.test(k);
|
|
6
|
+
const err = error;
|
|
7
|
+
const payload = {
|
|
8
|
+
name: err?.name,
|
|
9
|
+
message: err?.message,
|
|
10
|
+
status: err?.status,
|
|
11
|
+
body: err?.body,
|
|
12
|
+
data: err?.data,
|
|
13
|
+
stack: err?.stack,
|
|
14
|
+
};
|
|
15
|
+
try {
|
|
16
|
+
return JSON.stringify(payload, (k, v) => {
|
|
17
|
+
if (redactKey(k))
|
|
18
|
+
return "[redacted]";
|
|
19
|
+
if (typeof v === "string" && v.length > 5000)
|
|
20
|
+
return "[truncated-string]";
|
|
21
|
+
if (typeof v === "object" && v !== null) {
|
|
22
|
+
if (seen.has(v))
|
|
23
|
+
return "[circular]";
|
|
24
|
+
seen.add(v);
|
|
25
|
+
}
|
|
26
|
+
return v;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return JSON.stringify({ message: String(err?.message ?? "error") });
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async function probeInstantDocumentSchema(db) {
|
|
34
|
+
// Best-effort probes to pinpoint missing schema pieces WITHOUT logging dynamic payloads.
|
|
35
|
+
// Each line is a static string.
|
|
36
|
+
try {
|
|
37
|
+
console.log("Instant schema probe: document_documents entity query begin");
|
|
38
|
+
await db.query({ document_documents: { $: { limit: 1 } } });
|
|
39
|
+
console.log("Instant schema probe: document_documents entity query ok");
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
console.error("Instant schema probe: document_documents entity query failed");
|
|
43
|
+
}
|
|
44
|
+
try {
|
|
45
|
+
console.log("Instant schema probe: document_documents.file link query begin");
|
|
46
|
+
await db.query({ document_documents: { $: { limit: 1 }, file: {} } });
|
|
47
|
+
console.log("Instant schema probe: document_documents.file link query ok");
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
console.error("Instant schema probe: document_documents.file link query failed");
|
|
51
|
+
}
|
|
52
|
+
try {
|
|
53
|
+
console.log("Instant schema probe: $files entity query begin");
|
|
54
|
+
await db.query({ $files: { $: { limit: 1 } } });
|
|
55
|
+
console.log("Instant schema probe: $files entity query ok");
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
console.error("Instant schema probe: $files entity query failed");
|
|
59
|
+
}
|
|
60
|
+
try {
|
|
61
|
+
console.log("Instant schema probe: $files.document link query begin");
|
|
62
|
+
await db.query({ $files: { $: { limit: 1 }, document: {} } });
|
|
63
|
+
console.log("Instant schema probe: $files.document link query ok");
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
console.error("Instant schema probe: $files.document link query failed");
|
|
67
|
+
}
|
|
68
|
+
}
|
|
3
69
|
async function uploadToLlamaCloud(buffer, fileName) {
|
|
70
|
+
console.log("LlamaCloud: upload begin");
|
|
4
71
|
const formData = new FormData();
|
|
72
|
+
console.log("LlamaCloud: upload build form-data begin");
|
|
5
73
|
const uint8Array = new Uint8Array(buffer);
|
|
6
74
|
const blob = new Blob([uint8Array], { type: "application/pdf" });
|
|
7
75
|
formData.append("file", blob, fileName);
|
|
@@ -10,77 +78,132 @@ async function uploadToLlamaCloud(buffer, fileName) {
|
|
|
10
78
|
formData.append("adaptive_long_table", "true");
|
|
11
79
|
formData.append("outlined_table_extraction", "true");
|
|
12
80
|
formData.append("output_tables_as_HTML", "true");
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
|
|
81
|
+
console.log("LlamaCloud: upload build form-data ok");
|
|
82
|
+
console.log("LlamaCloud: upload fetch begin");
|
|
83
|
+
let response;
|
|
84
|
+
try {
|
|
85
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/upload`, {
|
|
86
|
+
method: "POST",
|
|
87
|
+
headers: {
|
|
88
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
89
|
+
},
|
|
90
|
+
body: formData,
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
catch (error) {
|
|
94
|
+
console.log("LlamaCloud: upload fetch threw", safeErrorJson(error));
|
|
95
|
+
throw error;
|
|
96
|
+
}
|
|
97
|
+
console.log("LlamaCloud: upload fetch ok");
|
|
20
98
|
if (!response.ok) {
|
|
99
|
+
console.log("LlamaCloud: upload failed");
|
|
21
100
|
const errorText = await response.text();
|
|
22
101
|
throw new Error(`LlamaCloud upload failed: ${response.status} ${errorText}`);
|
|
23
102
|
}
|
|
103
|
+
console.log("LlamaCloud: upload ok");
|
|
24
104
|
const result = (await response.json());
|
|
25
105
|
return result.id;
|
|
26
106
|
}
|
|
27
107
|
async function getJobStatus(jobId) {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
}
|
|
33
|
-
|
|
108
|
+
console.log("LlamaCloud: status fetch begin");
|
|
109
|
+
console.log("LlamaCloud: status fetch request begin");
|
|
110
|
+
let response;
|
|
111
|
+
try {
|
|
112
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}`, {
|
|
113
|
+
method: "GET",
|
|
114
|
+
headers: {
|
|
115
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
console.log("LlamaCloud: status fetch threw", safeErrorJson(error));
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
console.log("LlamaCloud: status fetch request ok");
|
|
34
124
|
if (!response.ok) {
|
|
125
|
+
console.log("LlamaCloud: status fetch failed");
|
|
35
126
|
const errorText = await response.text();
|
|
36
127
|
throw new Error(`LlamaCloud status fetch failed: ${response.status} ${errorText}`);
|
|
37
128
|
}
|
|
129
|
+
console.log("LlamaCloud: status fetch ok");
|
|
38
130
|
return (await response.json());
|
|
39
131
|
}
|
|
40
132
|
async function getParseResult(jobId) {
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
}
|
|
46
|
-
|
|
133
|
+
console.log("LlamaCloud: result fetch begin");
|
|
134
|
+
console.log("LlamaCloud: result fetch request begin");
|
|
135
|
+
let response;
|
|
136
|
+
try {
|
|
137
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}/result/markdown`, {
|
|
138
|
+
method: "GET",
|
|
139
|
+
headers: {
|
|
140
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
catch (error) {
|
|
145
|
+
console.log("LlamaCloud: result fetch threw", safeErrorJson(error));
|
|
146
|
+
throw error;
|
|
147
|
+
}
|
|
148
|
+
console.log("LlamaCloud: result fetch request ok");
|
|
47
149
|
if (!response.ok) {
|
|
150
|
+
console.log("LlamaCloud: result fetch failed");
|
|
48
151
|
const errorText = await response.text();
|
|
49
152
|
throw new Error(`LlamaCloud result fetch failed: ${response.status} ${errorText}`);
|
|
50
153
|
}
|
|
154
|
+
console.log("LlamaCloud: result fetch ok");
|
|
51
155
|
return (await response.json());
|
|
52
156
|
}
|
|
53
157
|
async function waitForProcessing(jobId, maxAttempts = 60) {
|
|
158
|
+
console.log("LlamaCloud: waitForProcessing begin");
|
|
54
159
|
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
160
|
+
console.log("LlamaCloud: waitForProcessing poll");
|
|
55
161
|
const statusResponse = await getJobStatus(jobId);
|
|
56
162
|
if (statusResponse.status === "SUCCESS" || statusResponse.status === "COMPLETED") {
|
|
163
|
+
console.log("LlamaCloud: waitForProcessing completed");
|
|
57
164
|
return await getParseResult(jobId);
|
|
58
165
|
}
|
|
59
166
|
if (statusResponse.status === "ERROR" || statusResponse.status === "FAILED") {
|
|
167
|
+
console.log("LlamaCloud: waitForProcessing failed");
|
|
60
168
|
throw new Error(`LlamaCloud processing failed with status: ${statusResponse.status}`);
|
|
61
169
|
}
|
|
62
170
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
63
171
|
}
|
|
172
|
+
console.log("LlamaCloud: waitForProcessing timeout");
|
|
64
173
|
throw new Error("LlamaCloud processing timeout");
|
|
65
174
|
}
|
|
66
175
|
/**
|
|
67
176
|
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
68
177
|
* Returns the created documentId.
|
|
69
178
|
*/
|
|
70
|
-
export async function parseAndStoreDocument(db, buffer, fileName,
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
179
|
+
export async function parseAndStoreDocument(db, buffer, fileName, fileId) {
|
|
180
|
+
console.log("parseAndStoreDocument: begin");
|
|
181
|
+
console.log("parseAndStoreDocument: query existing begin");
|
|
182
|
+
let existingDocument;
|
|
183
|
+
try {
|
|
184
|
+
existingDocument = await db.query({
|
|
185
|
+
document_documents: {
|
|
186
|
+
$: {
|
|
187
|
+
where: { "file.id": fileId },
|
|
188
|
+
},
|
|
189
|
+
file: {},
|
|
75
190
|
},
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
console.error("parseAndStoreDocument: query existing failed", safeErrorJson(error));
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
console.log("parseAndStoreDocument: query existing ok");
|
|
79
198
|
if (existingDocument.document_documents && existingDocument.document_documents.length > 0) {
|
|
199
|
+
console.log("parseAndStoreDocument: existing document found");
|
|
80
200
|
return existingDocument.document_documents[0].id;
|
|
81
201
|
}
|
|
202
|
+
console.log("parseAndStoreDocument: no existing document; start upload");
|
|
82
203
|
const jobId = await uploadToLlamaCloud(buffer, fileName);
|
|
204
|
+
console.log("parseAndStoreDocument: upload ok; waitForProcessing begin");
|
|
83
205
|
const result = await waitForProcessing(jobId);
|
|
206
|
+
console.log("parseAndStoreDocument: waitForProcessing ok; build pages begin");
|
|
84
207
|
const pages = [];
|
|
85
208
|
if (result.markdown) {
|
|
86
209
|
pages.push({
|
|
@@ -97,20 +220,33 @@ export async function parseAndStoreDocument(db, buffer, fileName, path, fileId)
|
|
|
97
220
|
}
|
|
98
221
|
}
|
|
99
222
|
if (pages.length === 0) {
|
|
223
|
+
console.log("parseAndStoreDocument: no content extracted");
|
|
100
224
|
throw new Error("No content extracted from document");
|
|
101
225
|
}
|
|
102
226
|
const documentId = id();
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
227
|
+
console.log("parseAndStoreDocument: transact begin");
|
|
228
|
+
try {
|
|
229
|
+
console.log("parseAndStoreDocument: transact update document_documents begin");
|
|
230
|
+
console.log("parseAndStoreDocument: transact link document->file begin");
|
|
231
|
+
await db.transact([
|
|
232
|
+
db.tx.document_documents[documentId].update({
|
|
233
|
+
content: { pages },
|
|
234
|
+
name: fileName,
|
|
235
|
+
mimeType: "application/pdf",
|
|
236
|
+
createdAt: new Date(),
|
|
237
|
+
}),
|
|
238
|
+
db.tx.document_documents[documentId].link({
|
|
239
|
+
file: fileId,
|
|
240
|
+
}),
|
|
241
|
+
]);
|
|
242
|
+
}
|
|
243
|
+
catch (error) {
|
|
244
|
+
console.error("parseAndStoreDocument: transact failed", safeErrorJson(error));
|
|
245
|
+
// Diagnose missing schema attributes/links (static logs only).
|
|
246
|
+
await probeInstantDocumentSchema(db);
|
|
247
|
+
throw error;
|
|
248
|
+
}
|
|
249
|
+
console.log("parseAndStoreDocument: transact ok");
|
|
250
|
+
console.log("parseAndStoreDocument: end");
|
|
115
251
|
return documentId;
|
|
116
252
|
}
|
|
@@ -4,6 +4,14 @@ function isFilePart(part) {
|
|
|
4
4
|
typeof part === "object" &&
|
|
5
5
|
(part.type === "file" || part?.providerMetadata?.instant));
|
|
6
6
|
}
|
|
7
|
+
function formatAttachmentSummary(part) {
|
|
8
|
+
const instant = part?.providerMetadata?.instant ?? {};
|
|
9
|
+
const fileId = typeof instant?.fileId === "string" ? instant.fileId : "";
|
|
10
|
+
const filename = typeof part?.filename === "string" ? part.filename : "";
|
|
11
|
+
const mediaType = typeof part?.mediaType === "string" ? part.mediaType : "";
|
|
12
|
+
// Keep it compact; no URLs (can be signed/sensitive).
|
|
13
|
+
return `fileId="${fileId}" filename="${filename}" mediaType="${mediaType}"`;
|
|
14
|
+
}
|
|
7
15
|
export function coerceDocumentTextPages(documentRecord, opts) {
|
|
8
16
|
const pages = documentRecord?.content?.pages;
|
|
9
17
|
if (!Array.isArray(pages) || pages.length === 0)
|
|
@@ -17,75 +25,139 @@ export function coerceDocumentTextPages(documentRecord, opts) {
|
|
|
17
25
|
.join("");
|
|
18
26
|
}
|
|
19
27
|
async function resolveInstantFileRecord(db, params) {
|
|
28
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord begin");
|
|
20
29
|
const fileId = params.fileId ? String(params.fileId) : null;
|
|
21
30
|
const filePath = params.path ? String(params.path) : null;
|
|
22
31
|
if (!fileId && !filePath)
|
|
23
32
|
return null;
|
|
24
33
|
if (fileId) {
|
|
34
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by id");
|
|
25
35
|
const q = await db.query({
|
|
26
36
|
$files: { $: { where: { id: fileId }, limit: 1 }, document: {} },
|
|
27
37
|
});
|
|
38
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by id ok");
|
|
28
39
|
return q?.$files?.[0] ?? null;
|
|
29
40
|
}
|
|
41
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by path");
|
|
30
42
|
const q = await db.query({
|
|
31
43
|
$files: { $: { where: { path: filePath }, limit: 1 }, document: {} },
|
|
32
44
|
});
|
|
45
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by path ok");
|
|
33
46
|
return q?.$files?.[0] ?? null;
|
|
34
47
|
}
|
|
35
48
|
async function ensureDocumentParsedForFile(db, params) {
|
|
49
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile begin");
|
|
36
50
|
const fileRecord = params.fileRecord;
|
|
37
51
|
const part = params.part;
|
|
38
52
|
let documentRecord = Array.isArray(fileRecord?.document)
|
|
39
53
|
? fileRecord.document?.[0]
|
|
40
54
|
: fileRecord.document;
|
|
55
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile check existing document link");
|
|
41
56
|
if (documentRecord?.id)
|
|
42
57
|
return documentRecord;
|
|
58
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile no existing doc; fetch file url");
|
|
43
59
|
const fileUrl = typeof fileRecord?.url === "string" ? fileRecord.url : "";
|
|
44
60
|
if (!fileUrl.startsWith("http://") && !fileUrl.startsWith("https://")) {
|
|
61
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile invalid file url");
|
|
45
62
|
return null;
|
|
46
63
|
}
|
|
64
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile fetch begin");
|
|
47
65
|
const resp = await fetch(fileUrl);
|
|
66
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile fetch ok");
|
|
48
67
|
if (!resp.ok)
|
|
49
68
|
throw new Error(`Failed to fetch file for parsing: HTTP ${resp.status}`);
|
|
69
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile buffer begin");
|
|
50
70
|
const buffer = Buffer.from(await resp.arrayBuffer());
|
|
71
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile buffer ok");
|
|
51
72
|
const name = (typeof part?.filename === "string" && part.filename) ||
|
|
52
73
|
(typeof fileRecord?.path === "string" && fileRecord.path) ||
|
|
53
74
|
"file";
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
const
|
|
75
|
+
// NOTE: Do not invent fallback paths. If the file doesn't have a stable `path`,
|
|
76
|
+
// we don't fabricate one.
|
|
77
|
+
const path = typeof fileRecord?.path === "string" ? fileRecord.path : undefined;
|
|
78
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile parseAndStoreDocument begin");
|
|
79
|
+
const documentId = await parseAndStoreDocument(db, buffer, name, String(fileRecord.id));
|
|
80
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile parseAndStoreDocument ok");
|
|
81
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile query document_documents begin");
|
|
57
82
|
const dq = await db.query({
|
|
58
83
|
document_documents: { $: { where: { id: documentId }, limit: 1 }, file: {} },
|
|
59
84
|
});
|
|
85
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile query document_documents ok");
|
|
60
86
|
documentRecord = dq?.document_documents?.[0] ?? null;
|
|
61
87
|
return documentRecord;
|
|
62
88
|
}
|
|
63
89
|
export async function expandEventsWithInstantDocuments(params) {
|
|
90
|
+
console.log("expandEventsWithInstantDocuments: begin");
|
|
64
91
|
const db = params.db;
|
|
65
92
|
const maxChars = typeof params.maxChars === "number" ? params.maxChars : 120000;
|
|
66
93
|
const derivedEventType = params.derivedEventType ?? "document.parsed";
|
|
67
94
|
const out = [];
|
|
95
|
+
console.log("expandEventsWithInstantDocuments: loop events begin");
|
|
68
96
|
for (const event of params.events) {
|
|
69
|
-
out.push(event);
|
|
70
97
|
const parts = event?.content?.parts;
|
|
71
|
-
if (!Array.isArray(parts) || parts.length === 0)
|
|
98
|
+
if (!Array.isArray(parts) || parts.length === 0) {
|
|
99
|
+
out.push(event);
|
|
72
100
|
continue;
|
|
101
|
+
}
|
|
102
|
+
console.log("expandEventsWithInstantDocuments: inspect event parts");
|
|
103
|
+
const hadFileParts = parts.some((p) => isFilePart(p));
|
|
104
|
+
if (hadFileParts) {
|
|
105
|
+
// Do not forward file parts to the model (gateways may not support some media types).
|
|
106
|
+
// The derived `document.parsed` event contains the extracted text.
|
|
107
|
+
const filtered = parts.filter((p) => !isFilePart(p));
|
|
108
|
+
const attachmentSummaries = parts
|
|
109
|
+
.filter((p) => isFilePart(p))
|
|
110
|
+
.map((p) => formatAttachmentSummary(p))
|
|
111
|
+
.join("\n");
|
|
112
|
+
const attachmentInfoText = attachmentSummaries
|
|
113
|
+
? `Attachment info:\n${attachmentSummaries}`
|
|
114
|
+
: "Attachment info: (unavailable)";
|
|
115
|
+
const sanitized = {
|
|
116
|
+
...event,
|
|
117
|
+
content: {
|
|
118
|
+
...event?.content,
|
|
119
|
+
parts: [
|
|
120
|
+
...filtered,
|
|
121
|
+
{
|
|
122
|
+
type: "text",
|
|
123
|
+
text: "[Attachment omitted from model input. Parsed content will follow in a document.parsed event.]\n" +
|
|
124
|
+
attachmentInfoText,
|
|
125
|
+
},
|
|
126
|
+
],
|
|
127
|
+
},
|
|
128
|
+
};
|
|
129
|
+
out.push(sanitized);
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
out.push(event);
|
|
133
|
+
}
|
|
73
134
|
for (const part of parts) {
|
|
74
135
|
if (!isFilePart(part))
|
|
75
136
|
continue;
|
|
137
|
+
console.log("expandEventsWithInstantDocuments: file part detected");
|
|
76
138
|
const instantMeta = part?.providerMetadata?.instant ?? {};
|
|
77
139
|
const fileId = instantMeta?.fileId ? String(instantMeta.fileId) : undefined;
|
|
78
140
|
const filePath = instantMeta?.path ? String(instantMeta.path) : undefined;
|
|
141
|
+
console.log("expandEventsWithInstantDocuments: resolve file record begin");
|
|
79
142
|
const fileRecord = await resolveInstantFileRecord(db, { fileId, path: filePath });
|
|
143
|
+
console.log("expandEventsWithInstantDocuments: resolve file record ok");
|
|
80
144
|
if (!fileRecord?.id)
|
|
81
145
|
continue;
|
|
146
|
+
console.log("expandEventsWithInstantDocuments: ensure document parsed begin");
|
|
82
147
|
const documentRecord = await ensureDocumentParsedForFile(db, { fileRecord, part });
|
|
148
|
+
console.log("expandEventsWithInstantDocuments: ensure document parsed ok");
|
|
149
|
+
console.log("expandEventsWithInstantDocuments: coerce document pages begin");
|
|
83
150
|
const pageText = coerceDocumentTextPages(documentRecord);
|
|
151
|
+
console.log("expandEventsWithInstantDocuments: coerce document pages ok");
|
|
84
152
|
if (!pageText)
|
|
85
153
|
continue;
|
|
154
|
+
console.log("expandEventsWithInstantDocuments: clip extracted text begin");
|
|
86
155
|
const clipped = pageText.length > maxChars
|
|
87
156
|
? `${pageText.slice(0, maxChars)}\n\n[truncated: maxChars=${maxChars}]`
|
|
88
157
|
: pageText;
|
|
158
|
+
console.log("expandEventsWithInstantDocuments: clip extracted text ok");
|
|
159
|
+
console.log("expandEventsWithInstantDocuments: create derived document.parsed event");
|
|
160
|
+
const derivedAttachmentInfo = `Attachment info:\n${formatAttachmentSummary(part)}`;
|
|
89
161
|
const derived = {
|
|
90
162
|
id: `derived:${event.id}:${String(fileRecord.id)}`,
|
|
91
163
|
type: derivedEventType,
|
|
@@ -95,14 +167,18 @@ export async function expandEventsWithInstantDocuments(params) {
|
|
|
95
167
|
parts: [
|
|
96
168
|
{
|
|
97
169
|
type: "text",
|
|
98
|
-
text:
|
|
170
|
+
text: "Parsed document available.\n" +
|
|
171
|
+
derivedAttachmentInfo +
|
|
172
|
+
"\nProvider: llamacloud",
|
|
99
173
|
},
|
|
100
174
|
{ type: "text", text: `Document transcription:${clipped}` },
|
|
101
175
|
],
|
|
102
176
|
},
|
|
103
177
|
};
|
|
104
178
|
out.push(derived);
|
|
179
|
+
console.log("expandEventsWithInstantDocuments: derived event appended");
|
|
105
180
|
}
|
|
106
181
|
}
|
|
182
|
+
console.log("expandEventsWithInstantDocuments: end");
|
|
107
183
|
return out;
|
|
108
184
|
}
|
|
@@ -12,6 +12,8 @@ export type InstantStoreDb = any;
|
|
|
12
12
|
export declare class InstantStore implements StoryStore {
|
|
13
13
|
private db;
|
|
14
14
|
constructor(db: InstantStoreDb);
|
|
15
|
+
private debugEventExpansionEnabled;
|
|
16
|
+
private debugLog;
|
|
15
17
|
getOrCreateContext<C>(contextIdentifier: ContextIdentifier | null): Promise<StoredContext<C>>;
|
|
16
18
|
private createContext;
|
|
17
19
|
getContext<C>(contextIdentifier: ContextIdentifier): Promise<StoredContext<C> | null>;
|
|
@@ -7,6 +7,15 @@ export class InstantStore {
|
|
|
7
7
|
constructor(db) {
|
|
8
8
|
this.db = db;
|
|
9
9
|
}
|
|
10
|
+
debugEventExpansionEnabled() {
|
|
11
|
+
return process.env.EKAIROS_DEBUG_EVENT_EXPANSION === "1";
|
|
12
|
+
}
|
|
13
|
+
debugLog(message) {
|
|
14
|
+
if (!this.debugEventExpansionEnabled())
|
|
15
|
+
return;
|
|
16
|
+
// CRITICAL: static strings only (no dynamic values in logs).
|
|
17
|
+
console.log(message);
|
|
18
|
+
}
|
|
10
19
|
async getOrCreateContext(contextIdentifier) {
|
|
11
20
|
if (!contextIdentifier) {
|
|
12
21
|
return this.createContext();
|
|
@@ -172,14 +181,18 @@ export class InstantStore {
|
|
|
172
181
|
await this.db.transact(txs);
|
|
173
182
|
}
|
|
174
183
|
async eventsToModelMessages(events) {
|
|
184
|
+
this.debugLog("InstantStore.eventsToModelMessages: begin");
|
|
175
185
|
// Default behavior for Instant-backed stories:
|
|
176
186
|
// - Expand file parts into derived `document.parsed` events (persisting parsed content into document_documents)
|
|
177
187
|
// - Then convert expanded events to model messages
|
|
188
|
+
this.debugLog("InstantStore.eventsToModelMessages: expandEventsWithInstantDocuments begin");
|
|
178
189
|
const expanded = await expandEventsWithInstantDocuments({
|
|
179
190
|
db: this.db,
|
|
180
191
|
events,
|
|
181
192
|
derivedEventType: "document.parsed",
|
|
182
193
|
});
|
|
194
|
+
this.debugLog("InstantStore.eventsToModelMessages: expandEventsWithInstantDocuments ok");
|
|
195
|
+
this.debugLog("InstantStore.eventsToModelMessages: convertEventsToModelMessages begin");
|
|
183
196
|
return await convertEventsToModelMessages(expanded);
|
|
184
197
|
}
|
|
185
198
|
}
|
package/dist/story.builder.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import type { Tool } from "ai";
|
|
2
|
-
import type { StoryEnvironment } from "./story.config";
|
|
3
|
-
import { Story, type StoryModelInit, type StoryOptions, type ShouldContinue, type StoryShouldContinueArgs, type StoryReactParams } from "./story.engine";
|
|
4
|
-
import type { ContextEvent, StoredContext } from "./story.store";
|
|
5
|
-
import { type StoryKey } from "./story.registry";
|
|
2
|
+
import type { StoryEnvironment } from "./story.config.js";
|
|
3
|
+
import { Story, type StoryModelInit, type StoryOptions, type ShouldContinue, type StoryShouldContinueArgs, type StoryReactParams } from "./story.engine.js";
|
|
4
|
+
import type { ContextEvent, StoredContext } from "./story.store.js";
|
|
5
|
+
import { type StoryKey } from "./story.registry.js";
|
|
6
6
|
export interface StoryConfig<Context, Env extends StoryEnvironment = StoryEnvironment> {
|
|
7
7
|
context: (context: StoredContext<Context>, env: Env) => Promise<Context> | Context;
|
|
8
8
|
/**
|
package/dist/story.builder.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { Story, } from "./story.engine";
|
|
2
|
-
import { registerStory } from "./story.registry";
|
|
1
|
+
import { Story, } from "./story.engine.js";
|
|
2
|
+
import { registerStory } from "./story.registry.js";
|
|
3
3
|
export function story(config) {
|
|
4
4
|
class FunctionalStory extends Story {
|
|
5
5
|
constructor() {
|
package/dist/story.config.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import type { StoryStore } from "./story.store";
|
|
2
|
-
import type { EkairosConfig } from "./ekairos.config";
|
|
3
2
|
/**
|
|
4
3
|
* ## story.config.ts
|
|
5
4
|
*
|
|
@@ -17,7 +16,6 @@ export type StoryRuntime = {
|
|
|
17
16
|
store: StoryStore;
|
|
18
17
|
};
|
|
19
18
|
export type StoryRuntimeResolver<Env extends StoryEnvironment = StoryEnvironment> = (env: Env) => Promise<StoryRuntime> | StoryRuntime;
|
|
20
|
-
export declare function getEkairosConfig(): EkairosConfig | null;
|
|
21
19
|
/**
|
|
22
20
|
* Optional global bootstrap hook for step runtimes.
|
|
23
21
|
*
|
package/dist/story.config.js
CHANGED
|
@@ -1,37 +1,12 @@
|
|
|
1
|
+
import { pathToFileURL } from "node:url";
|
|
2
|
+
import { join } from "node:path";
|
|
1
3
|
let runtimeResolver = null;
|
|
2
|
-
const GLOBAL_RUNTIME_RESOLVER = Symbol.for("@ekairos/story.runtimeResolver");
|
|
3
|
-
const GLOBAL_RUNTIME_BOOTSTRAP = Symbol.for("@ekairos/story.runtimeBootstrap");
|
|
4
|
-
const GLOBAL_EKAIROS_CONFIG = Symbol.for("@ekairos/story.ekairosConfig");
|
|
5
|
-
function getGlobal(key) {
|
|
6
|
-
try {
|
|
7
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
8
|
-
const v = globalThis?.[key];
|
|
9
|
-
return v ?? null;
|
|
10
|
-
}
|
|
11
|
-
catch {
|
|
12
|
-
return null;
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
function setGlobal(key, value) {
|
|
16
|
-
try {
|
|
17
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18
|
-
;
|
|
19
|
-
globalThis[key] = value;
|
|
20
|
-
}
|
|
21
|
-
catch {
|
|
22
|
-
// ignore
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
4
|
function getRuntimeResolver() {
|
|
26
|
-
return runtimeResolver
|
|
27
|
-
}
|
|
28
|
-
export function getEkairosConfig() {
|
|
29
|
-
return getGlobal(GLOBAL_EKAIROS_CONFIG);
|
|
5
|
+
return runtimeResolver;
|
|
30
6
|
}
|
|
31
7
|
let runtimeBootstrap = null;
|
|
32
8
|
export function configureStoryRuntimeBootstrap(bootstrap) {
|
|
33
9
|
runtimeBootstrap = bootstrap;
|
|
34
|
-
setGlobal(GLOBAL_RUNTIME_BOOTSTRAP, bootstrap);
|
|
35
10
|
}
|
|
36
11
|
/**
|
|
37
12
|
* Configure the story runtime resolver (global).
|
|
@@ -41,7 +16,6 @@ export function configureStoryRuntimeBootstrap(bootstrap) {
|
|
|
41
16
|
*/
|
|
42
17
|
export function configureStoryRuntime(resolver) {
|
|
43
18
|
runtimeResolver = resolver;
|
|
44
|
-
setGlobal(GLOBAL_RUNTIME_RESOLVER, runtimeResolver);
|
|
45
19
|
}
|
|
46
20
|
export function isStoryRuntimeConfigured() {
|
|
47
21
|
return Boolean(runtimeResolver);
|
|
@@ -49,15 +23,45 @@ export function isStoryRuntimeConfigured() {
|
|
|
49
23
|
export async function resolveStoryRuntime(env) {
|
|
50
24
|
if (!getRuntimeResolver()) {
|
|
51
25
|
// Best-effort: allow the step runtime to self-bootstrap once.
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
//
|
|
56
|
-
//
|
|
26
|
+
if (runtimeBootstrap) {
|
|
27
|
+
await runtimeBootstrap();
|
|
28
|
+
}
|
|
29
|
+
// Convention bootstrap (portable, runtime-resolvable):
|
|
30
|
+
// If the host app provides an `ekairos.bootstrap.js` at the project root, we can load it
|
|
31
|
+
// from the step runtime using a file URL. This avoids relying on bundler-only aliases.
|
|
57
32
|
if (!getRuntimeResolver()) {
|
|
58
|
-
const
|
|
59
|
-
if (
|
|
60
|
-
|
|
33
|
+
const cwd = typeof process !== "undefined" && process.cwd ? process.cwd() : null;
|
|
34
|
+
if (cwd) {
|
|
35
|
+
const candidates = [
|
|
36
|
+
"ekairos.bootstrap.js",
|
|
37
|
+
"ekairos.bootstrap.cjs",
|
|
38
|
+
"ekairos.bootstrap.mjs",
|
|
39
|
+
];
|
|
40
|
+
for (const filename of candidates) {
|
|
41
|
+
try {
|
|
42
|
+
await import(pathToFileURL(join(cwd, filename)).href);
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// ignore
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
// Convention bootstrap (Next.js / monorepo apps):
|
|
52
|
+
// If the app exposes `src/ekairos.ts` and uses the `@/` alias, loading that module will
|
|
53
|
+
// run `ekairosConfig.setup()` which configures the resolver + bootstrap hook.
|
|
54
|
+
//
|
|
55
|
+
// This is intentionally ONLY attempted when runtime is missing, and is safe as long as
|
|
56
|
+
// `story.config` is not part of client bundles (see `@ekairos/story/runtime`).
|
|
57
|
+
if (!getRuntimeResolver()) {
|
|
58
|
+
try {
|
|
59
|
+
// @ts-expect-error - optional, app-provided convention module
|
|
60
|
+
await import("@/ekairos");
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
// ignore: module missing / alias not configured
|
|
64
|
+
}
|
|
61
65
|
}
|
|
62
66
|
// If bootstrap succeeded, proceed.
|
|
63
67
|
const resolver = getRuntimeResolver();
|
|
@@ -66,8 +70,11 @@ export async function resolveStoryRuntime(env) {
|
|
|
66
70
|
throw new Error([
|
|
67
71
|
"Story runtime is not configured.",
|
|
68
72
|
"",
|
|
69
|
-
"
|
|
70
|
-
"
|
|
73
|
+
"Convention:",
|
|
74
|
+
"- Create an app-level `ekairos.ts` that exports `ekairosConfig = createEkairosConfig({ runtime })`",
|
|
75
|
+
"- Ensure `ekairosConfig.setup()` runs in the step runtime (module load / worker boot).",
|
|
76
|
+
"",
|
|
77
|
+
"If you already have that file, ensure it is evaluated in the step runtime before calling story store steps.",
|
|
71
78
|
].join("\n"));
|
|
72
79
|
}
|
|
73
80
|
return await getRuntimeResolver()(env);
|
package/dist/story.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { Story, type StoryOptions, type StoryStreamOptions, type ShouldContinue, type StoryShouldContinueArgs, } from "./story.engine";
|
|
2
|
-
export { story, createStory, type StoryConfig, type StoryInstance, type RegistrableStoryBuilder, } from "./story.builder";
|
|
1
|
+
export { Story, type StoryOptions, type StoryStreamOptions, type ShouldContinue, type StoryShouldContinueArgs, } from "./story.engine.js";
|
|
2
|
+
export { story, createStory, type StoryConfig, type StoryInstance, type RegistrableStoryBuilder, } from "./story.builder.js";
|
package/dist/story.engine.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { Tool } from "ai";
|
|
2
|
-
import type { StoryEnvironment } from "./story.config";
|
|
3
|
-
import type { ContextEvent, ContextIdentifier, StoredContext } from "./story.store";
|
|
2
|
+
import type { StoryEnvironment } from "./story.config.js";
|
|
3
|
+
import type { ContextEvent, ContextIdentifier, StoredContext } from "./story.store.js";
|
|
4
4
|
export interface StoryOptions<Context = any, Env extends StoryEnvironment = StoryEnvironment> {
|
|
5
5
|
onContextCreated?: (args: {
|
|
6
6
|
env: Env;
|
package/dist/story.engine.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { applyToolExecutionResultToParts } from "./story.toolcalls";
|
|
2
|
-
import {
|
|
3
|
-
import { toolsToModelTools } from "./tools-to-model-tools";
|
|
4
|
-
import { closeStoryStream, writeContextSubstate, writeToolOutputs } from "./steps/stream.steps";
|
|
5
|
-
import { completeExecution,
|
|
1
|
+
import { applyToolExecutionResultToParts } from "./story.toolcalls.js";
|
|
2
|
+
import { executeReaction } from "./steps/reaction.steps.js";
|
|
3
|
+
import { toolsToModelTools } from "./tools-to-model-tools.js";
|
|
4
|
+
import { closeStoryStream, writeContextSubstate, writeStoryPing, writeToolOutputs } from "./steps/stream.steps.js";
|
|
5
|
+
import { completeExecution, createReactionEvent, initializeContext, saveReactionEvent, saveTriggerEvent, updateContextContent, updateContextStatus, updateEvent, } from "./steps/store.steps.js";
|
|
6
6
|
export class Story {
|
|
7
7
|
constructor(opts = {}) {
|
|
8
8
|
this.opts = opts;
|
|
@@ -59,7 +59,7 @@ export class Story {
|
|
|
59
59
|
const preventClose = params.options?.preventClose ?? false;
|
|
60
60
|
const sendFinish = params.options?.sendFinish ?? true;
|
|
61
61
|
// 1) Ensure context exists (step)
|
|
62
|
-
const ctxResult = await
|
|
62
|
+
const ctxResult = await initializeContext(params.env, params.contextIdentifier);
|
|
63
63
|
const currentContext = ctxResult.context;
|
|
64
64
|
const contextSelector = params.contextIdentifier?.id
|
|
65
65
|
? { id: String(params.contextIdentifier.id) }
|
|
@@ -70,12 +70,16 @@ export class Story {
|
|
|
70
70
|
await this.opts.onContextCreated?.({ env: params.env, context: currentContext });
|
|
71
71
|
}
|
|
72
72
|
// 2) Persist trigger event + create execution shell (steps)
|
|
73
|
-
const persistedTriggerEvent = await
|
|
73
|
+
const persistedTriggerEvent = await saveTriggerEvent(params.env, contextSelector, triggerEvent);
|
|
74
74
|
const triggerEventId = persistedTriggerEvent.id;
|
|
75
|
-
const reactionEventId = await
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
75
|
+
const { reactionEventId, executionId } = await createReactionEvent({
|
|
76
|
+
env: params.env,
|
|
77
|
+
contextIdentifier: contextSelector,
|
|
78
|
+
triggerEventId,
|
|
79
|
+
});
|
|
80
|
+
// Emit a simple ping chunk early so clients can validate that streaming works end-to-end.
|
|
81
|
+
// This should be ignored safely by clients that don't care about it.
|
|
82
|
+
await writeStoryPing({ label: "story-start" });
|
|
79
83
|
let reactionEvent = null;
|
|
80
84
|
// Latest persisted context state for this run (we keep it in memory; store is updated via steps).
|
|
81
85
|
let updatedContext = currentContext;
|
|
@@ -95,7 +99,6 @@ export class Story {
|
|
|
95
99
|
};
|
|
96
100
|
try {
|
|
97
101
|
for (let iter = 0; iter < maxIterations; iter++) {
|
|
98
|
-
const events = await getEvents(params.env, contextSelector);
|
|
99
102
|
// Normalize/initialize context (workflow-level; may call steps if needed)
|
|
100
103
|
const nextContent = await this.initialize(updatedContext, params.env);
|
|
101
104
|
updatedContext = await updateContextContent(params.env, contextSelector, nextContent);
|
|
@@ -105,12 +108,11 @@ export class Story {
|
|
|
105
108
|
// IMPORTANT: step args must be serializable.
|
|
106
109
|
// Match DurableAgent behavior: convert tool input schemas to plain JSON Schema in workflow context.
|
|
107
110
|
const toolsForModel = toolsToModelTools(toolsAll);
|
|
108
|
-
const
|
|
109
|
-
|
|
110
|
-
|
|
111
|
+
const { assistantEvent, toolCalls, messagesForModel } = await executeReaction({
|
|
112
|
+
env: params.env,
|
|
113
|
+
contextIdentifier: contextSelector,
|
|
111
114
|
model: this.getModel(updatedContext, params.env),
|
|
112
115
|
system: systemPrompt,
|
|
113
|
-
messages: messagesForModel,
|
|
114
116
|
tools: toolsForModel,
|
|
115
117
|
eventId: reactionEventId,
|
|
116
118
|
maxSteps: maxModelSteps,
|
|
@@ -119,7 +121,7 @@ export class Story {
|
|
|
119
121
|
});
|
|
120
122
|
// Persist/append the assistant event for this iteration
|
|
121
123
|
if (!reactionEvent) {
|
|
122
|
-
reactionEvent = await
|
|
124
|
+
reactionEvent = await saveReactionEvent(params.env, contextSelector, {
|
|
123
125
|
...assistantEvent,
|
|
124
126
|
status: "pending",
|
|
125
127
|
});
|
package/dist/story.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ekairos/story",
|
|
3
|
-
"version": "1.21.
|
|
3
|
+
"version": "1.21.43-beta.0",
|
|
4
4
|
"description": "Pulzar Story - Workflow-based AI Stories",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -48,7 +48,7 @@
|
|
|
48
48
|
},
|
|
49
49
|
"dependencies": {
|
|
50
50
|
"@ai-sdk/openai": "^2.0.52",
|
|
51
|
-
"@ekairos/domain": "^1.21.
|
|
51
|
+
"@ekairos/domain": "^1.21.43-beta.0",
|
|
52
52
|
"@instantdb/admin": "^0.22.13",
|
|
53
53
|
"@instantdb/core": "^0.22.13",
|
|
54
54
|
"@vercel/sandbox": "^0.0.23",
|