@ekairos/story 1.21.41-beta.0 → 1.21.52-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ekairos.config.js +1 -16
- package/dist/index.d.ts +6 -6
- package/dist/index.js +5 -5
- package/dist/runtime.d.ts +1 -2
- package/dist/runtime.js +1 -2
- package/dist/steps/reaction.steps.d.ts +26 -0
- package/dist/steps/reaction.steps.js +137 -0
- package/dist/steps/store.steps.d.ts +15 -28
- package/dist/steps/store.steps.js +35 -67
- package/dist/steps/stream.steps.d.ts +7 -0
- package/dist/steps/stream.steps.js +15 -0
- package/dist/stores/instant.document-parser.d.ts +1 -1
- package/dist/stores/instant.document-parser.js +175 -39
- package/dist/stores/instant.documents.js +82 -6
- package/dist/stores/instant.store.d.ts +2 -0
- package/dist/stores/instant.store.js +16 -1
- package/dist/story.builder.d.ts +4 -4
- package/dist/story.builder.js +2 -2
- package/dist/story.config.d.ts +1 -2
- package/dist/story.config.js +31 -39
- package/dist/story.d.ts +2 -2
- package/dist/story.engine.d.ts +10 -2
- package/dist/story.engine.js +99 -29
- package/dist/story.js +2 -2
- package/package.json +16 -2
|
@@ -1,7 +1,75 @@
|
|
|
1
1
|
import { id } from "@instantdb/admin";
|
|
2
2
|
const LLAMA_CLOUD_BASE_URL = "https://api.cloud.llamaindex.ai/api/v1";
|
|
3
|
+
function safeErrorJson(error) {
|
|
4
|
+
const seen = new WeakSet();
|
|
5
|
+
const redactKey = (k) => /token|authorization|cookie|secret|api[_-]?key|password/i.test(k);
|
|
6
|
+
const err = error;
|
|
7
|
+
const payload = {
|
|
8
|
+
name: err?.name,
|
|
9
|
+
message: err?.message,
|
|
10
|
+
status: err?.status,
|
|
11
|
+
body: err?.body,
|
|
12
|
+
data: err?.data,
|
|
13
|
+
stack: err?.stack,
|
|
14
|
+
};
|
|
15
|
+
try {
|
|
16
|
+
return JSON.stringify(payload, (k, v) => {
|
|
17
|
+
if (redactKey(k))
|
|
18
|
+
return "[redacted]";
|
|
19
|
+
if (typeof v === "string" && v.length > 5000)
|
|
20
|
+
return "[truncated-string]";
|
|
21
|
+
if (typeof v === "object" && v !== null) {
|
|
22
|
+
if (seen.has(v))
|
|
23
|
+
return "[circular]";
|
|
24
|
+
seen.add(v);
|
|
25
|
+
}
|
|
26
|
+
return v;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return JSON.stringify({ message: String(err?.message ?? "error") });
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async function probeInstantDocumentSchema(db) {
|
|
34
|
+
// Best-effort probes to pinpoint missing schema pieces WITHOUT logging dynamic payloads.
|
|
35
|
+
// Each line is a static string.
|
|
36
|
+
try {
|
|
37
|
+
console.log("Instant schema probe: document_documents entity query begin");
|
|
38
|
+
await db.query({ document_documents: { $: { limit: 1 } } });
|
|
39
|
+
console.log("Instant schema probe: document_documents entity query ok");
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
console.error("Instant schema probe: document_documents entity query failed");
|
|
43
|
+
}
|
|
44
|
+
try {
|
|
45
|
+
console.log("Instant schema probe: document_documents.file link query begin");
|
|
46
|
+
await db.query({ document_documents: { $: { limit: 1 }, file: {} } });
|
|
47
|
+
console.log("Instant schema probe: document_documents.file link query ok");
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
console.error("Instant schema probe: document_documents.file link query failed");
|
|
51
|
+
}
|
|
52
|
+
try {
|
|
53
|
+
console.log("Instant schema probe: $files entity query begin");
|
|
54
|
+
await db.query({ $files: { $: { limit: 1 } } });
|
|
55
|
+
console.log("Instant schema probe: $files entity query ok");
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
console.error("Instant schema probe: $files entity query failed");
|
|
59
|
+
}
|
|
60
|
+
try {
|
|
61
|
+
console.log("Instant schema probe: $files.document link query begin");
|
|
62
|
+
await db.query({ $files: { $: { limit: 1 }, document: {} } });
|
|
63
|
+
console.log("Instant schema probe: $files.document link query ok");
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
console.error("Instant schema probe: $files.document link query failed");
|
|
67
|
+
}
|
|
68
|
+
}
|
|
3
69
|
async function uploadToLlamaCloud(buffer, fileName) {
|
|
70
|
+
console.log("LlamaCloud: upload begin");
|
|
4
71
|
const formData = new FormData();
|
|
72
|
+
console.log("LlamaCloud: upload build form-data begin");
|
|
5
73
|
const uint8Array = new Uint8Array(buffer);
|
|
6
74
|
const blob = new Blob([uint8Array], { type: "application/pdf" });
|
|
7
75
|
formData.append("file", blob, fileName);
|
|
@@ -10,77 +78,132 @@ async function uploadToLlamaCloud(buffer, fileName) {
|
|
|
10
78
|
formData.append("adaptive_long_table", "true");
|
|
11
79
|
formData.append("outlined_table_extraction", "true");
|
|
12
80
|
formData.append("output_tables_as_HTML", "true");
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
|
|
81
|
+
console.log("LlamaCloud: upload build form-data ok");
|
|
82
|
+
console.log("LlamaCloud: upload fetch begin");
|
|
83
|
+
let response;
|
|
84
|
+
try {
|
|
85
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/upload`, {
|
|
86
|
+
method: "POST",
|
|
87
|
+
headers: {
|
|
88
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
89
|
+
},
|
|
90
|
+
body: formData,
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
catch (error) {
|
|
94
|
+
console.log("LlamaCloud: upload fetch threw", safeErrorJson(error));
|
|
95
|
+
throw error;
|
|
96
|
+
}
|
|
97
|
+
console.log("LlamaCloud: upload fetch ok");
|
|
20
98
|
if (!response.ok) {
|
|
99
|
+
console.log("LlamaCloud: upload failed");
|
|
21
100
|
const errorText = await response.text();
|
|
22
101
|
throw new Error(`LlamaCloud upload failed: ${response.status} ${errorText}`);
|
|
23
102
|
}
|
|
103
|
+
console.log("LlamaCloud: upload ok");
|
|
24
104
|
const result = (await response.json());
|
|
25
105
|
return result.id;
|
|
26
106
|
}
|
|
27
107
|
async function getJobStatus(jobId) {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
}
|
|
33
|
-
|
|
108
|
+
console.log("LlamaCloud: status fetch begin");
|
|
109
|
+
console.log("LlamaCloud: status fetch request begin");
|
|
110
|
+
let response;
|
|
111
|
+
try {
|
|
112
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}`, {
|
|
113
|
+
method: "GET",
|
|
114
|
+
headers: {
|
|
115
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
console.log("LlamaCloud: status fetch threw", safeErrorJson(error));
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
console.log("LlamaCloud: status fetch request ok");
|
|
34
124
|
if (!response.ok) {
|
|
125
|
+
console.log("LlamaCloud: status fetch failed");
|
|
35
126
|
const errorText = await response.text();
|
|
36
127
|
throw new Error(`LlamaCloud status fetch failed: ${response.status} ${errorText}`);
|
|
37
128
|
}
|
|
129
|
+
console.log("LlamaCloud: status fetch ok");
|
|
38
130
|
return (await response.json());
|
|
39
131
|
}
|
|
40
132
|
async function getParseResult(jobId) {
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
}
|
|
46
|
-
|
|
133
|
+
console.log("LlamaCloud: result fetch begin");
|
|
134
|
+
console.log("LlamaCloud: result fetch request begin");
|
|
135
|
+
let response;
|
|
136
|
+
try {
|
|
137
|
+
response = await fetch(`${LLAMA_CLOUD_BASE_URL}/parsing/job/${jobId}/result/markdown`, {
|
|
138
|
+
method: "GET",
|
|
139
|
+
headers: {
|
|
140
|
+
Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`,
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
catch (error) {
|
|
145
|
+
console.log("LlamaCloud: result fetch threw", safeErrorJson(error));
|
|
146
|
+
throw error;
|
|
147
|
+
}
|
|
148
|
+
console.log("LlamaCloud: result fetch request ok");
|
|
47
149
|
if (!response.ok) {
|
|
150
|
+
console.log("LlamaCloud: result fetch failed");
|
|
48
151
|
const errorText = await response.text();
|
|
49
152
|
throw new Error(`LlamaCloud result fetch failed: ${response.status} ${errorText}`);
|
|
50
153
|
}
|
|
154
|
+
console.log("LlamaCloud: result fetch ok");
|
|
51
155
|
return (await response.json());
|
|
52
156
|
}
|
|
53
157
|
async function waitForProcessing(jobId, maxAttempts = 60) {
|
|
158
|
+
console.log("LlamaCloud: waitForProcessing begin");
|
|
54
159
|
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
160
|
+
console.log("LlamaCloud: waitForProcessing poll");
|
|
55
161
|
const statusResponse = await getJobStatus(jobId);
|
|
56
162
|
if (statusResponse.status === "SUCCESS" || statusResponse.status === "COMPLETED") {
|
|
163
|
+
console.log("LlamaCloud: waitForProcessing completed");
|
|
57
164
|
return await getParseResult(jobId);
|
|
58
165
|
}
|
|
59
166
|
if (statusResponse.status === "ERROR" || statusResponse.status === "FAILED") {
|
|
167
|
+
console.log("LlamaCloud: waitForProcessing failed");
|
|
60
168
|
throw new Error(`LlamaCloud processing failed with status: ${statusResponse.status}`);
|
|
61
169
|
}
|
|
62
170
|
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
63
171
|
}
|
|
172
|
+
console.log("LlamaCloud: waitForProcessing timeout");
|
|
64
173
|
throw new Error("LlamaCloud processing timeout");
|
|
65
174
|
}
|
|
66
175
|
/**
|
|
67
176
|
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
68
177
|
* Returns the created documentId.
|
|
69
178
|
*/
|
|
70
|
-
export async function parseAndStoreDocument(db, buffer, fileName,
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
179
|
+
export async function parseAndStoreDocument(db, buffer, fileName, fileId) {
|
|
180
|
+
console.log("parseAndStoreDocument: begin");
|
|
181
|
+
console.log("parseAndStoreDocument: query existing begin");
|
|
182
|
+
let existingDocument;
|
|
183
|
+
try {
|
|
184
|
+
existingDocument = await db.query({
|
|
185
|
+
document_documents: {
|
|
186
|
+
$: {
|
|
187
|
+
where: { "file.id": fileId },
|
|
188
|
+
},
|
|
189
|
+
file: {},
|
|
75
190
|
},
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
console.error("parseAndStoreDocument: query existing failed", safeErrorJson(error));
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
console.log("parseAndStoreDocument: query existing ok");
|
|
79
198
|
if (existingDocument.document_documents && existingDocument.document_documents.length > 0) {
|
|
199
|
+
console.log("parseAndStoreDocument: existing document found");
|
|
80
200
|
return existingDocument.document_documents[0].id;
|
|
81
201
|
}
|
|
202
|
+
console.log("parseAndStoreDocument: no existing document; start upload");
|
|
82
203
|
const jobId = await uploadToLlamaCloud(buffer, fileName);
|
|
204
|
+
console.log("parseAndStoreDocument: upload ok; waitForProcessing begin");
|
|
83
205
|
const result = await waitForProcessing(jobId);
|
|
206
|
+
console.log("parseAndStoreDocument: waitForProcessing ok; build pages begin");
|
|
84
207
|
const pages = [];
|
|
85
208
|
if (result.markdown) {
|
|
86
209
|
pages.push({
|
|
@@ -97,20 +220,33 @@ export async function parseAndStoreDocument(db, buffer, fileName, path, fileId)
|
|
|
97
220
|
}
|
|
98
221
|
}
|
|
99
222
|
if (pages.length === 0) {
|
|
223
|
+
console.log("parseAndStoreDocument: no content extracted");
|
|
100
224
|
throw new Error("No content extracted from document");
|
|
101
225
|
}
|
|
102
226
|
const documentId = id();
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
227
|
+
console.log("parseAndStoreDocument: transact begin");
|
|
228
|
+
try {
|
|
229
|
+
console.log("parseAndStoreDocument: transact update document_documents begin");
|
|
230
|
+
console.log("parseAndStoreDocument: transact link document->file begin");
|
|
231
|
+
await db.transact([
|
|
232
|
+
db.tx.document_documents[documentId].update({
|
|
233
|
+
content: { pages },
|
|
234
|
+
name: fileName,
|
|
235
|
+
mimeType: "application/pdf",
|
|
236
|
+
createdAt: new Date(),
|
|
237
|
+
}),
|
|
238
|
+
db.tx.document_documents[documentId].link({
|
|
239
|
+
file: fileId,
|
|
240
|
+
}),
|
|
241
|
+
]);
|
|
242
|
+
}
|
|
243
|
+
catch (error) {
|
|
244
|
+
console.error("parseAndStoreDocument: transact failed", safeErrorJson(error));
|
|
245
|
+
// Diagnose missing schema attributes/links (static logs only).
|
|
246
|
+
await probeInstantDocumentSchema(db);
|
|
247
|
+
throw error;
|
|
248
|
+
}
|
|
249
|
+
console.log("parseAndStoreDocument: transact ok");
|
|
250
|
+
console.log("parseAndStoreDocument: end");
|
|
115
251
|
return documentId;
|
|
116
252
|
}
|
|
@@ -4,6 +4,14 @@ function isFilePart(part) {
|
|
|
4
4
|
typeof part === "object" &&
|
|
5
5
|
(part.type === "file" || part?.providerMetadata?.instant));
|
|
6
6
|
}
|
|
7
|
+
function formatAttachmentSummary(part) {
|
|
8
|
+
const instant = part?.providerMetadata?.instant ?? {};
|
|
9
|
+
const fileId = typeof instant?.fileId === "string" ? instant.fileId : "";
|
|
10
|
+
const filename = typeof part?.filename === "string" ? part.filename : "";
|
|
11
|
+
const mediaType = typeof part?.mediaType === "string" ? part.mediaType : "";
|
|
12
|
+
// Keep it compact; no URLs (can be signed/sensitive).
|
|
13
|
+
return `fileId="${fileId}" filename="${filename}" mediaType="${mediaType}"`;
|
|
14
|
+
}
|
|
7
15
|
export function coerceDocumentTextPages(documentRecord, opts) {
|
|
8
16
|
const pages = documentRecord?.content?.pages;
|
|
9
17
|
if (!Array.isArray(pages) || pages.length === 0)
|
|
@@ -17,75 +25,139 @@ export function coerceDocumentTextPages(documentRecord, opts) {
|
|
|
17
25
|
.join("");
|
|
18
26
|
}
|
|
19
27
|
async function resolveInstantFileRecord(db, params) {
|
|
28
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord begin");
|
|
20
29
|
const fileId = params.fileId ? String(params.fileId) : null;
|
|
21
30
|
const filePath = params.path ? String(params.path) : null;
|
|
22
31
|
if (!fileId && !filePath)
|
|
23
32
|
return null;
|
|
24
33
|
if (fileId) {
|
|
34
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by id");
|
|
25
35
|
const q = await db.query({
|
|
26
36
|
$files: { $: { where: { id: fileId }, limit: 1 }, document: {} },
|
|
27
37
|
});
|
|
38
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by id ok");
|
|
28
39
|
return q?.$files?.[0] ?? null;
|
|
29
40
|
}
|
|
41
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by path");
|
|
30
42
|
const q = await db.query({
|
|
31
43
|
$files: { $: { where: { path: filePath }, limit: 1 }, document: {} },
|
|
32
44
|
});
|
|
45
|
+
console.log("expandEventsWithInstantDocuments: resolveInstantFileRecord query by path ok");
|
|
33
46
|
return q?.$files?.[0] ?? null;
|
|
34
47
|
}
|
|
35
48
|
async function ensureDocumentParsedForFile(db, params) {
|
|
49
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile begin");
|
|
36
50
|
const fileRecord = params.fileRecord;
|
|
37
51
|
const part = params.part;
|
|
38
52
|
let documentRecord = Array.isArray(fileRecord?.document)
|
|
39
53
|
? fileRecord.document?.[0]
|
|
40
54
|
: fileRecord.document;
|
|
55
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile check existing document link");
|
|
41
56
|
if (documentRecord?.id)
|
|
42
57
|
return documentRecord;
|
|
58
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile no existing doc; fetch file url");
|
|
43
59
|
const fileUrl = typeof fileRecord?.url === "string" ? fileRecord.url : "";
|
|
44
60
|
if (!fileUrl.startsWith("http://") && !fileUrl.startsWith("https://")) {
|
|
61
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile invalid file url");
|
|
45
62
|
return null;
|
|
46
63
|
}
|
|
64
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile fetch begin");
|
|
47
65
|
const resp = await fetch(fileUrl);
|
|
66
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile fetch ok");
|
|
48
67
|
if (!resp.ok)
|
|
49
68
|
throw new Error(`Failed to fetch file for parsing: HTTP ${resp.status}`);
|
|
69
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile buffer begin");
|
|
50
70
|
const buffer = Buffer.from(await resp.arrayBuffer());
|
|
71
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile buffer ok");
|
|
51
72
|
const name = (typeof part?.filename === "string" && part.filename) ||
|
|
52
73
|
(typeof fileRecord?.path === "string" && fileRecord.path) ||
|
|
53
74
|
"file";
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
const
|
|
75
|
+
// NOTE: Do not invent fallback paths. If the file doesn't have a stable `path`,
|
|
76
|
+
// we don't fabricate one.
|
|
77
|
+
const path = typeof fileRecord?.path === "string" ? fileRecord.path : undefined;
|
|
78
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile parseAndStoreDocument begin");
|
|
79
|
+
const documentId = await parseAndStoreDocument(db, buffer, name, String(fileRecord.id));
|
|
80
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile parseAndStoreDocument ok");
|
|
81
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile query document_documents begin");
|
|
57
82
|
const dq = await db.query({
|
|
58
83
|
document_documents: { $: { where: { id: documentId }, limit: 1 }, file: {} },
|
|
59
84
|
});
|
|
85
|
+
console.log("expandEventsWithInstantDocuments: ensureDocumentParsedForFile query document_documents ok");
|
|
60
86
|
documentRecord = dq?.document_documents?.[0] ?? null;
|
|
61
87
|
return documentRecord;
|
|
62
88
|
}
|
|
63
89
|
export async function expandEventsWithInstantDocuments(params) {
|
|
90
|
+
console.log("expandEventsWithInstantDocuments: begin");
|
|
64
91
|
const db = params.db;
|
|
65
92
|
const maxChars = typeof params.maxChars === "number" ? params.maxChars : 120000;
|
|
66
93
|
const derivedEventType = params.derivedEventType ?? "document.parsed";
|
|
67
94
|
const out = [];
|
|
95
|
+
console.log("expandEventsWithInstantDocuments: loop events begin");
|
|
68
96
|
for (const event of params.events) {
|
|
69
|
-
out.push(event);
|
|
70
97
|
const parts = event?.content?.parts;
|
|
71
|
-
if (!Array.isArray(parts) || parts.length === 0)
|
|
98
|
+
if (!Array.isArray(parts) || parts.length === 0) {
|
|
99
|
+
out.push(event);
|
|
72
100
|
continue;
|
|
101
|
+
}
|
|
102
|
+
console.log("expandEventsWithInstantDocuments: inspect event parts");
|
|
103
|
+
const hadFileParts = parts.some((p) => isFilePart(p));
|
|
104
|
+
if (hadFileParts) {
|
|
105
|
+
// Do not forward file parts to the model (gateways may not support some media types).
|
|
106
|
+
// The derived `document.parsed` event contains the extracted text.
|
|
107
|
+
const filtered = parts.filter((p) => !isFilePart(p));
|
|
108
|
+
const attachmentSummaries = parts
|
|
109
|
+
.filter((p) => isFilePart(p))
|
|
110
|
+
.map((p) => formatAttachmentSummary(p))
|
|
111
|
+
.join("\n");
|
|
112
|
+
const attachmentInfoText = attachmentSummaries
|
|
113
|
+
? `Attachment info:\n${attachmentSummaries}`
|
|
114
|
+
: "Attachment info: (unavailable)";
|
|
115
|
+
const sanitized = {
|
|
116
|
+
...event,
|
|
117
|
+
content: {
|
|
118
|
+
...event?.content,
|
|
119
|
+
parts: [
|
|
120
|
+
...filtered,
|
|
121
|
+
{
|
|
122
|
+
type: "text",
|
|
123
|
+
text: "[Attachment omitted from model input. Parsed content will follow in a document.parsed event.]\n" +
|
|
124
|
+
attachmentInfoText,
|
|
125
|
+
},
|
|
126
|
+
],
|
|
127
|
+
},
|
|
128
|
+
};
|
|
129
|
+
out.push(sanitized);
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
out.push(event);
|
|
133
|
+
}
|
|
73
134
|
for (const part of parts) {
|
|
74
135
|
if (!isFilePart(part))
|
|
75
136
|
continue;
|
|
137
|
+
console.log("expandEventsWithInstantDocuments: file part detected");
|
|
76
138
|
const instantMeta = part?.providerMetadata?.instant ?? {};
|
|
77
139
|
const fileId = instantMeta?.fileId ? String(instantMeta.fileId) : undefined;
|
|
78
140
|
const filePath = instantMeta?.path ? String(instantMeta.path) : undefined;
|
|
141
|
+
console.log("expandEventsWithInstantDocuments: resolve file record begin");
|
|
79
142
|
const fileRecord = await resolveInstantFileRecord(db, { fileId, path: filePath });
|
|
143
|
+
console.log("expandEventsWithInstantDocuments: resolve file record ok");
|
|
80
144
|
if (!fileRecord?.id)
|
|
81
145
|
continue;
|
|
146
|
+
console.log("expandEventsWithInstantDocuments: ensure document parsed begin");
|
|
82
147
|
const documentRecord = await ensureDocumentParsedForFile(db, { fileRecord, part });
|
|
148
|
+
console.log("expandEventsWithInstantDocuments: ensure document parsed ok");
|
|
149
|
+
console.log("expandEventsWithInstantDocuments: coerce document pages begin");
|
|
83
150
|
const pageText = coerceDocumentTextPages(documentRecord);
|
|
151
|
+
console.log("expandEventsWithInstantDocuments: coerce document pages ok");
|
|
84
152
|
if (!pageText)
|
|
85
153
|
continue;
|
|
154
|
+
console.log("expandEventsWithInstantDocuments: clip extracted text begin");
|
|
86
155
|
const clipped = pageText.length > maxChars
|
|
87
156
|
? `${pageText.slice(0, maxChars)}\n\n[truncated: maxChars=${maxChars}]`
|
|
88
157
|
: pageText;
|
|
158
|
+
console.log("expandEventsWithInstantDocuments: clip extracted text ok");
|
|
159
|
+
console.log("expandEventsWithInstantDocuments: create derived document.parsed event");
|
|
160
|
+
const derivedAttachmentInfo = `Attachment info:\n${formatAttachmentSummary(part)}`;
|
|
89
161
|
const derived = {
|
|
90
162
|
id: `derived:${event.id}:${String(fileRecord.id)}`,
|
|
91
163
|
type: derivedEventType,
|
|
@@ -95,14 +167,18 @@ export async function expandEventsWithInstantDocuments(params) {
|
|
|
95
167
|
parts: [
|
|
96
168
|
{
|
|
97
169
|
type: "text",
|
|
98
|
-
text:
|
|
170
|
+
text: "Parsed document available.\n" +
|
|
171
|
+
derivedAttachmentInfo +
|
|
172
|
+
"\nProvider: llamacloud",
|
|
99
173
|
},
|
|
100
174
|
{ type: "text", text: `Document transcription:${clipped}` },
|
|
101
175
|
],
|
|
102
176
|
},
|
|
103
177
|
};
|
|
104
178
|
out.push(derived);
|
|
179
|
+
console.log("expandEventsWithInstantDocuments: derived event appended");
|
|
105
180
|
}
|
|
106
181
|
}
|
|
182
|
+
console.log("expandEventsWithInstantDocuments: end");
|
|
107
183
|
return out;
|
|
108
184
|
}
|
|
@@ -12,6 +12,8 @@ export type InstantStoreDb = any;
|
|
|
12
12
|
export declare class InstantStore implements StoryStore {
|
|
13
13
|
private db;
|
|
14
14
|
constructor(db: InstantStoreDb);
|
|
15
|
+
private debugEventExpansionEnabled;
|
|
16
|
+
private debugLog;
|
|
15
17
|
getOrCreateContext<C>(contextIdentifier: ContextIdentifier | null): Promise<StoredContext<C>>;
|
|
16
18
|
private createContext;
|
|
17
19
|
getContext<C>(contextIdentifier: ContextIdentifier): Promise<StoredContext<C> | null>;
|
|
@@ -7,6 +7,15 @@ export class InstantStore {
|
|
|
7
7
|
constructor(db) {
|
|
8
8
|
this.db = db;
|
|
9
9
|
}
|
|
10
|
+
debugEventExpansionEnabled() {
|
|
11
|
+
return process.env.EKAIROS_DEBUG_EVENT_EXPANSION === "1";
|
|
12
|
+
}
|
|
13
|
+
debugLog(message) {
|
|
14
|
+
if (!this.debugEventExpansionEnabled())
|
|
15
|
+
return;
|
|
16
|
+
// CRITICAL: static strings only (no dynamic values in logs).
|
|
17
|
+
console.log(message);
|
|
18
|
+
}
|
|
10
19
|
async getOrCreateContext(contextIdentifier) {
|
|
11
20
|
if (!contextIdentifier) {
|
|
12
21
|
return this.createContext();
|
|
@@ -126,7 +135,9 @@ export class InstantStore {
|
|
|
126
135
|
async getEvents(contextIdentifier) {
|
|
127
136
|
const contextWhere = contextIdentifier.id
|
|
128
137
|
? { context: contextIdentifier.id }
|
|
129
|
-
:
|
|
138
|
+
: // IMPORTANT: `lookup("key", ...)` is valid in transactions, but not in query filters for links.
|
|
139
|
+
// Use nested where on the linked context's indexed `key` instead.
|
|
140
|
+
{ "context.key": contextIdentifier.key };
|
|
130
141
|
const res = await this.db.query({
|
|
131
142
|
context_events: {
|
|
132
143
|
$: {
|
|
@@ -172,14 +183,18 @@ export class InstantStore {
|
|
|
172
183
|
await this.db.transact(txs);
|
|
173
184
|
}
|
|
174
185
|
async eventsToModelMessages(events) {
|
|
186
|
+
this.debugLog("InstantStore.eventsToModelMessages: begin");
|
|
175
187
|
// Default behavior for Instant-backed stories:
|
|
176
188
|
// - Expand file parts into derived `document.parsed` events (persisting parsed content into document_documents)
|
|
177
189
|
// - Then convert expanded events to model messages
|
|
190
|
+
this.debugLog("InstantStore.eventsToModelMessages: expandEventsWithInstantDocuments begin");
|
|
178
191
|
const expanded = await expandEventsWithInstantDocuments({
|
|
179
192
|
db: this.db,
|
|
180
193
|
events,
|
|
181
194
|
derivedEventType: "document.parsed",
|
|
182
195
|
});
|
|
196
|
+
this.debugLog("InstantStore.eventsToModelMessages: expandEventsWithInstantDocuments ok");
|
|
197
|
+
this.debugLog("InstantStore.eventsToModelMessages: convertEventsToModelMessages begin");
|
|
183
198
|
return await convertEventsToModelMessages(expanded);
|
|
184
199
|
}
|
|
185
200
|
}
|
package/dist/story.builder.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import type { Tool } from "ai";
|
|
2
|
-
import type { StoryEnvironment } from "./story.config";
|
|
3
|
-
import { Story, type StoryModelInit, type StoryOptions, type ShouldContinue, type StoryShouldContinueArgs, type StoryReactParams } from "./story.engine";
|
|
4
|
-
import type { ContextEvent, StoredContext } from "./story.store";
|
|
5
|
-
import { type StoryKey } from "./story.registry";
|
|
2
|
+
import type { StoryEnvironment } from "./story.config.js";
|
|
3
|
+
import { Story, type StoryModelInit, type StoryOptions, type ShouldContinue, type StoryShouldContinueArgs, type StoryReactParams } from "./story.engine.js";
|
|
4
|
+
import type { ContextEvent, StoredContext } from "./story.store.js";
|
|
5
|
+
import { type StoryKey } from "./story.registry.js";
|
|
6
6
|
export interface StoryConfig<Context, Env extends StoryEnvironment = StoryEnvironment> {
|
|
7
7
|
context: (context: StoredContext<Context>, env: Env) => Promise<Context> | Context;
|
|
8
8
|
/**
|
package/dist/story.builder.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { Story, } from "./story.engine";
|
|
2
|
-
import { registerStory } from "./story.registry";
|
|
1
|
+
import { Story, } from "./story.engine.js";
|
|
2
|
+
import { registerStory } from "./story.registry.js";
|
|
3
3
|
export function story(config) {
|
|
4
4
|
class FunctionalStory extends Story {
|
|
5
5
|
constructor() {
|
package/dist/story.config.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import type { StoryStore } from "./story.store";
|
|
2
|
-
import type { EkairosConfig } from "./ekairos.config";
|
|
3
2
|
/**
|
|
4
3
|
* ## story.config.ts
|
|
5
4
|
*
|
|
@@ -15,9 +14,9 @@ import type { EkairosConfig } from "./ekairos.config";
|
|
|
15
14
|
export type StoryEnvironment = Record<string, unknown>;
|
|
16
15
|
export type StoryRuntime = {
|
|
17
16
|
store: StoryStore;
|
|
17
|
+
db: any;
|
|
18
18
|
};
|
|
19
19
|
export type StoryRuntimeResolver<Env extends StoryEnvironment = StoryEnvironment> = (env: Env) => Promise<StoryRuntime> | StoryRuntime;
|
|
20
|
-
export declare function getEkairosConfig(): EkairosConfig | null;
|
|
21
20
|
/**
|
|
22
21
|
* Optional global bootstrap hook for step runtimes.
|
|
23
22
|
*
|
package/dist/story.config.js
CHANGED
|
@@ -1,37 +1,12 @@
|
|
|
1
|
+
import { pathToFileURL } from "node:url";
|
|
2
|
+
import { join } from "node:path";
|
|
1
3
|
let runtimeResolver = null;
|
|
2
|
-
const GLOBAL_RUNTIME_RESOLVER = Symbol.for("@ekairos/story.runtimeResolver");
|
|
3
|
-
const GLOBAL_RUNTIME_BOOTSTRAP = Symbol.for("@ekairos/story.runtimeBootstrap");
|
|
4
|
-
const GLOBAL_EKAIROS_CONFIG = Symbol.for("@ekairos/story.ekairosConfig");
|
|
5
|
-
function getGlobal(key) {
|
|
6
|
-
try {
|
|
7
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
8
|
-
const v = globalThis?.[key];
|
|
9
|
-
return v ?? null;
|
|
10
|
-
}
|
|
11
|
-
catch {
|
|
12
|
-
return null;
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
function setGlobal(key, value) {
|
|
16
|
-
try {
|
|
17
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18
|
-
;
|
|
19
|
-
globalThis[key] = value;
|
|
20
|
-
}
|
|
21
|
-
catch {
|
|
22
|
-
// ignore
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
4
|
function getRuntimeResolver() {
|
|
26
|
-
return runtimeResolver
|
|
27
|
-
}
|
|
28
|
-
export function getEkairosConfig() {
|
|
29
|
-
return getGlobal(GLOBAL_EKAIROS_CONFIG);
|
|
5
|
+
return runtimeResolver;
|
|
30
6
|
}
|
|
31
7
|
let runtimeBootstrap = null;
|
|
32
8
|
export function configureStoryRuntimeBootstrap(bootstrap) {
|
|
33
9
|
runtimeBootstrap = bootstrap;
|
|
34
|
-
setGlobal(GLOBAL_RUNTIME_BOOTSTRAP, bootstrap);
|
|
35
10
|
}
|
|
36
11
|
/**
|
|
37
12
|
* Configure the story runtime resolver (global).
|
|
@@ -41,7 +16,6 @@ export function configureStoryRuntimeBootstrap(bootstrap) {
|
|
|
41
16
|
*/
|
|
42
17
|
export function configureStoryRuntime(resolver) {
|
|
43
18
|
runtimeResolver = resolver;
|
|
44
|
-
setGlobal(GLOBAL_RUNTIME_RESOLVER, runtimeResolver);
|
|
45
19
|
}
|
|
46
20
|
export function isStoryRuntimeConfigured() {
|
|
47
21
|
return Boolean(runtimeResolver);
|
|
@@ -49,15 +23,30 @@ export function isStoryRuntimeConfigured() {
|
|
|
49
23
|
export async function resolveStoryRuntime(env) {
|
|
50
24
|
if (!getRuntimeResolver()) {
|
|
51
25
|
// Best-effort: allow the step runtime to self-bootstrap once.
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
//
|
|
56
|
-
//
|
|
26
|
+
if (runtimeBootstrap) {
|
|
27
|
+
await runtimeBootstrap();
|
|
28
|
+
}
|
|
29
|
+
// Convention bootstrap (portable, runtime-resolvable):
|
|
30
|
+
// If the host app provides an `ekairos.bootstrap.js` at the project root, we can load it
|
|
31
|
+
// from the step runtime using a file URL. This avoids relying on bundler-only aliases.
|
|
57
32
|
if (!getRuntimeResolver()) {
|
|
58
|
-
const
|
|
59
|
-
if (
|
|
60
|
-
|
|
33
|
+
const cwd = typeof process !== "undefined" && process.cwd ? process.cwd() : null;
|
|
34
|
+
if (cwd) {
|
|
35
|
+
const candidates = [
|
|
36
|
+
"ekairos.bootstrap.js",
|
|
37
|
+
"ekairos.bootstrap.cjs",
|
|
38
|
+
"ekairos.bootstrap.mjs",
|
|
39
|
+
];
|
|
40
|
+
for (const filename of candidates) {
|
|
41
|
+
try {
|
|
42
|
+
await import(pathToFileURL(join(cwd, filename)).href);
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// ignore
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
61
50
|
}
|
|
62
51
|
// If bootstrap succeeded, proceed.
|
|
63
52
|
const resolver = getRuntimeResolver();
|
|
@@ -66,8 +55,11 @@ export async function resolveStoryRuntime(env) {
|
|
|
66
55
|
throw new Error([
|
|
67
56
|
"Story runtime is not configured.",
|
|
68
57
|
"",
|
|
69
|
-
"
|
|
70
|
-
"
|
|
58
|
+
"Convention:",
|
|
59
|
+
"- Create an app-level `ekairos.ts` that exports `ekairosConfig = createEkairosConfig({ runtime })`",
|
|
60
|
+
"- Ensure `ekairosConfig.setup()` runs in the step runtime (module load / worker boot).",
|
|
61
|
+
"",
|
|
62
|
+
"If you already have that file, ensure it is evaluated in the step runtime before calling story store steps.",
|
|
71
63
|
].join("\n"));
|
|
72
64
|
}
|
|
73
65
|
return await getRuntimeResolver()(env);
|
package/dist/story.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { Story, type StoryOptions, type StoryStreamOptions, type ShouldContinue, type StoryShouldContinueArgs, } from "./story.engine";
|
|
2
|
-
export { story, createStory, type StoryConfig, type StoryInstance, type RegistrableStoryBuilder, } from "./story.builder";
|
|
1
|
+
export { Story, type StoryOptions, type StoryStreamOptions, type ShouldContinue, type StoryShouldContinueArgs, } from "./story.engine.js";
|
|
2
|
+
export { story, createStory, type StoryConfig, type StoryInstance, type RegistrableStoryBuilder, } from "./story.builder.js";
|