@ekairos/events 1.22.4-beta.development.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +115 -0
- package/dist/codex.d.ts +95 -0
- package/dist/codex.js +91 -0
- package/dist/context.builder.d.ts +62 -0
- package/dist/context.builder.js +143 -0
- package/dist/context.config.d.ts +9 -0
- package/dist/context.config.js +30 -0
- package/dist/context.contract.d.ts +47 -0
- package/dist/context.contract.js +132 -0
- package/dist/context.d.ts +4 -0
- package/dist/context.durable.d.ts +5 -0
- package/dist/context.durable.js +13 -0
- package/dist/context.engine.d.ts +216 -0
- package/dist/context.engine.js +1098 -0
- package/dist/context.events.d.ts +55 -0
- package/dist/context.events.js +431 -0
- package/dist/context.hooks.d.ts +21 -0
- package/dist/context.hooks.js +31 -0
- package/dist/context.js +3 -0
- package/dist/context.parts.d.ts +241 -0
- package/dist/context.parts.js +360 -0
- package/dist/context.reactor.d.ts +3 -0
- package/dist/context.reactor.js +2 -0
- package/dist/context.registry.d.ts +13 -0
- package/dist/context.registry.js +30 -0
- package/dist/context.skill.d.ts +9 -0
- package/dist/context.skill.js +1 -0
- package/dist/context.step-stream.d.ts +26 -0
- package/dist/context.step-stream.js +59 -0
- package/dist/context.store.d.ts +85 -0
- package/dist/context.store.js +1 -0
- package/dist/context.stream.d.ts +148 -0
- package/dist/context.stream.js +141 -0
- package/dist/context.toolcalls.d.ts +60 -0
- package/dist/context.toolcalls.js +117 -0
- package/dist/env.d.ts +3 -0
- package/dist/env.js +53 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.js +11 -0
- package/dist/mcp.d.ts +1 -0
- package/dist/mcp.js +1 -0
- package/dist/mirror.d.ts +41 -0
- package/dist/mirror.js +1 -0
- package/dist/oidc.d.ts +7 -0
- package/dist/oidc.js +25 -0
- package/dist/polyfills/dom-events.d.ts +1 -0
- package/dist/polyfills/dom-events.js +89 -0
- package/dist/react.d.ts +42 -0
- package/dist/react.js +88 -0
- package/dist/reactors/ai-sdk.chunk-map.d.ts +12 -0
- package/dist/reactors/ai-sdk.chunk-map.js +143 -0
- package/dist/reactors/ai-sdk.reactor.d.ts +33 -0
- package/dist/reactors/ai-sdk.reactor.js +65 -0
- package/dist/reactors/ai-sdk.step.d.ts +48 -0
- package/dist/reactors/ai-sdk.step.js +343 -0
- package/dist/reactors/scripted.reactor.d.ts +17 -0
- package/dist/reactors/scripted.reactor.js +51 -0
- package/dist/reactors/types.d.ts +52 -0
- package/dist/reactors/types.js +1 -0
- package/dist/runtime.d.ts +19 -0
- package/dist/runtime.js +26 -0
- package/dist/runtime.step.d.ts +9 -0
- package/dist/runtime.step.js +7 -0
- package/dist/schema.d.ts +2 -0
- package/dist/schema.js +191 -0
- package/dist/steps/do-context-stream-step.d.ts +34 -0
- package/dist/steps/do-context-stream-step.js +96 -0
- package/dist/steps/mirror.steps.d.ts +6 -0
- package/dist/steps/mirror.steps.js +48 -0
- package/dist/steps/store.steps.d.ts +96 -0
- package/dist/steps/store.steps.js +595 -0
- package/dist/steps/stream.steps.d.ts +86 -0
- package/dist/steps/stream.steps.js +270 -0
- package/dist/steps/trace.steps.d.ts +38 -0
- package/dist/steps/trace.steps.js +270 -0
- package/dist/stores/instant.document-parser.d.ts +6 -0
- package/dist/stores/instant.document-parser.js +210 -0
- package/dist/stores/instant.documents.d.ts +16 -0
- package/dist/stores/instant.documents.js +152 -0
- package/dist/stores/instant.store.d.ts +66 -0
- package/dist/stores/instant.store.js +575 -0
- package/dist/tools-to-model-tools.d.ts +19 -0
- package/dist/tools-to-model-tools.js +21 -0
- package/package.json +142 -0
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import { contextStreamByteLength, parseContextStepStreamChunk, } from "../context.step-stream.js";
|
|
2
|
+
export async function writeContextEvents(params) {
|
|
3
|
+
"use step";
|
|
4
|
+
const writable = params.writable;
|
|
5
|
+
if (!writable || !params.events.length)
|
|
6
|
+
return;
|
|
7
|
+
const writer = writable.getWriter();
|
|
8
|
+
try {
|
|
9
|
+
for (const event of params.events) {
|
|
10
|
+
await writer.write({
|
|
11
|
+
type: `data-${String(event.type)}`,
|
|
12
|
+
data: event,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
finally {
|
|
17
|
+
writer.releaseLock();
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
export async function closeContextStream(params) {
|
|
21
|
+
"use step";
|
|
22
|
+
const sendFinish = params.sendFinish ?? true;
|
|
23
|
+
const preventClose = params.preventClose ?? false;
|
|
24
|
+
const writable = params.writable;
|
|
25
|
+
if (!writable)
|
|
26
|
+
return;
|
|
27
|
+
if (sendFinish) {
|
|
28
|
+
const writer = writable.getWriter();
|
|
29
|
+
try {
|
|
30
|
+
await writer.write({ type: "finish" });
|
|
31
|
+
}
|
|
32
|
+
finally {
|
|
33
|
+
writer.releaseLock();
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
if (!preventClose) {
|
|
37
|
+
await writable.close();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
function asRecord(value) {
|
|
41
|
+
return value && typeof value === "object" ? value : {};
|
|
42
|
+
}
|
|
43
|
+
function asString(value) {
|
|
44
|
+
return typeof value === "string" ? value.trim() : "";
|
|
45
|
+
}
|
|
46
|
+
function createUnsetStreamLinkTx(db, executionId, label, streamId) {
|
|
47
|
+
try {
|
|
48
|
+
return db.tx.event_executions[executionId].unlink({ [label]: streamId });
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
export function createContextStepStreamClientId(stepId) {
|
|
55
|
+
const normalized = String(stepId ?? "").trim();
|
|
56
|
+
if (!normalized) {
|
|
57
|
+
throw new Error("createContextStepStreamClientId: stepId is required.");
|
|
58
|
+
}
|
|
59
|
+
return `event-step:${normalized}`;
|
|
60
|
+
}
|
|
61
|
+
export async function createPersistedContextStepStream(params) {
|
|
62
|
+
"use step";
|
|
63
|
+
const { getContextRuntime } = await import("../runtime.js");
|
|
64
|
+
const runtime = await getContextRuntime(params.env);
|
|
65
|
+
const db = runtime?.db;
|
|
66
|
+
if (!db?.streams?.createWriteStream) {
|
|
67
|
+
throw new Error("InstantDB streams are not available on the configured runtime. Upgrade @instantdb/admin to a streams-capable version.");
|
|
68
|
+
}
|
|
69
|
+
const clientId = asString(params.clientId) || createContextStepStreamClientId(params.stepId);
|
|
70
|
+
const startedAt = new Date();
|
|
71
|
+
const writeStream = db.streams.createWriteStream({
|
|
72
|
+
clientId,
|
|
73
|
+
});
|
|
74
|
+
const streamId = await writeStream.streamId();
|
|
75
|
+
await db.transact([
|
|
76
|
+
db.tx.event_steps[params.stepId]
|
|
77
|
+
.update({
|
|
78
|
+
streamId,
|
|
79
|
+
streamClientId: clientId,
|
|
80
|
+
streamStartedAt: startedAt,
|
|
81
|
+
streamFinishedAt: null,
|
|
82
|
+
streamAbortReason: null,
|
|
83
|
+
updatedAt: new Date(),
|
|
84
|
+
})
|
|
85
|
+
.link({ stream: streamId }),
|
|
86
|
+
db.tx.event_executions[params.executionId]
|
|
87
|
+
.update({
|
|
88
|
+
activeStreamId: streamId,
|
|
89
|
+
activeStreamClientId: clientId,
|
|
90
|
+
lastStreamId: streamId,
|
|
91
|
+
lastStreamClientId: clientId,
|
|
92
|
+
updatedAt: new Date(),
|
|
93
|
+
})
|
|
94
|
+
.link({ activeStream: streamId, lastStream: streamId }),
|
|
95
|
+
]);
|
|
96
|
+
return {
|
|
97
|
+
stream: writeStream,
|
|
98
|
+
streamId,
|
|
99
|
+
clientId,
|
|
100
|
+
executionId: params.executionId,
|
|
101
|
+
stepId: params.stepId,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
async function finalizePersistedContextStepStream(params) {
|
|
105
|
+
"use step";
|
|
106
|
+
const { getContextRuntime } = await import("../runtime.js");
|
|
107
|
+
const runtime = await getContextRuntime(params.env);
|
|
108
|
+
const db = runtime?.db;
|
|
109
|
+
const writer = params.session.stream.getWriter();
|
|
110
|
+
try {
|
|
111
|
+
if (params.mode === "abort") {
|
|
112
|
+
await writer.abort(params.abortReason ?? "aborted");
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
await writer.close();
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
finally {
|
|
119
|
+
writer.releaseLock();
|
|
120
|
+
}
|
|
121
|
+
const now = new Date();
|
|
122
|
+
const txs = [
|
|
123
|
+
db.tx.event_steps[params.session.stepId].update({
|
|
124
|
+
streamFinishedAt: now,
|
|
125
|
+
streamAbortReason: params.mode === "abort" ? params.abortReason ?? "aborted" : null,
|
|
126
|
+
updatedAt: now,
|
|
127
|
+
}),
|
|
128
|
+
db.tx.event_executions[params.session.executionId].update({
|
|
129
|
+
activeStreamId: null,
|
|
130
|
+
activeStreamClientId: null,
|
|
131
|
+
lastStreamId: params.session.streamId,
|
|
132
|
+
lastStreamClientId: params.session.clientId,
|
|
133
|
+
updatedAt: now,
|
|
134
|
+
}),
|
|
135
|
+
];
|
|
136
|
+
const unsetActive = createUnsetStreamLinkTx(db, params.session.executionId, "activeStream", params.session.streamId);
|
|
137
|
+
if (unsetActive)
|
|
138
|
+
txs.push(unsetActive);
|
|
139
|
+
await db.transact(txs);
|
|
140
|
+
}
|
|
141
|
+
export async function closePersistedContextStepStream(params) {
|
|
142
|
+
return await finalizePersistedContextStepStream({
|
|
143
|
+
env: params.env,
|
|
144
|
+
session: params.session,
|
|
145
|
+
mode: "close",
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
export async function abortPersistedContextStepStream(params) {
|
|
149
|
+
return await finalizePersistedContextStepStream({
|
|
150
|
+
env: params.env,
|
|
151
|
+
session: params.session,
|
|
152
|
+
mode: "abort",
|
|
153
|
+
abortReason: params.reason,
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
export async function readPersistedContextStepStream(params) {
|
|
157
|
+
if (!params.db?.streams?.createReadStream) {
|
|
158
|
+
throw new Error("InstantDB streams are not available on the provided runtime.");
|
|
159
|
+
}
|
|
160
|
+
const clientId = asString(params.clientId);
|
|
161
|
+
const streamId = asString(params.streamId);
|
|
162
|
+
if (!clientId && !streamId) {
|
|
163
|
+
throw new Error("readPersistedContextStepStream requires clientId or streamId.");
|
|
164
|
+
}
|
|
165
|
+
const startOffset = typeof params.byteOffset === "number" && Number.isFinite(params.byteOffset)
|
|
166
|
+
? Math.max(0, params.byteOffset)
|
|
167
|
+
: 0;
|
|
168
|
+
const stream = params.db.streams.createReadStream({
|
|
169
|
+
clientId: clientId || undefined,
|
|
170
|
+
streamId: streamId || undefined,
|
|
171
|
+
byteOffset: startOffset,
|
|
172
|
+
});
|
|
173
|
+
const chunks = [];
|
|
174
|
+
let byteOffset = startOffset;
|
|
175
|
+
let buffer = "";
|
|
176
|
+
for await (const rawChunk of stream) {
|
|
177
|
+
const encoded = typeof rawChunk === "string" ? rawChunk : String(rawChunk ?? "");
|
|
178
|
+
if (!encoded)
|
|
179
|
+
continue;
|
|
180
|
+
byteOffset += contextStreamByteLength(encoded);
|
|
181
|
+
buffer += encoded;
|
|
182
|
+
const lines = buffer.split("\n");
|
|
183
|
+
buffer = lines.pop() ?? "";
|
|
184
|
+
for (const line of lines) {
|
|
185
|
+
const trimmed = line.trim();
|
|
186
|
+
if (!trimmed)
|
|
187
|
+
continue;
|
|
188
|
+
const parsed = parseContextStepStreamChunk(trimmed);
|
|
189
|
+
chunks.push(parsed);
|
|
190
|
+
await params.onChunk?.(parsed);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
const trailing = buffer.trim();
|
|
194
|
+
if (trailing) {
|
|
195
|
+
const parsed = parseContextStepStreamChunk(trailing);
|
|
196
|
+
chunks.push(parsed);
|
|
197
|
+
await params.onChunk?.(parsed);
|
|
198
|
+
}
|
|
199
|
+
return {
|
|
200
|
+
chunks,
|
|
201
|
+
byteOffset,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
export async function resolveContextExecutionStreamPointer(params) {
|
|
205
|
+
const snapshot = await params.db.query({
|
|
206
|
+
event_contexts: {
|
|
207
|
+
$: {
|
|
208
|
+
where: { id: params.contextId },
|
|
209
|
+
limit: 1,
|
|
210
|
+
},
|
|
211
|
+
currentExecution: {},
|
|
212
|
+
},
|
|
213
|
+
});
|
|
214
|
+
const contextRow = Array.isArray(snapshot?.event_contexts)
|
|
215
|
+
? snapshot.event_contexts[0]
|
|
216
|
+
: null;
|
|
217
|
+
const executionRow = asRecord(contextRow?.currentExecution);
|
|
218
|
+
const executionId = asString(executionRow.id);
|
|
219
|
+
if (!executionId)
|
|
220
|
+
return null;
|
|
221
|
+
const activeStreamClientId = asString(executionRow.activeStreamClientId);
|
|
222
|
+
const activeStreamId = asString(executionRow.activeStreamId);
|
|
223
|
+
if (activeStreamClientId || activeStreamId) {
|
|
224
|
+
return {
|
|
225
|
+
executionId,
|
|
226
|
+
status: asString(executionRow.status) || null,
|
|
227
|
+
source: "active",
|
|
228
|
+
clientId: activeStreamClientId || null,
|
|
229
|
+
streamId: activeStreamId || null,
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
const lastStreamClientId = asString(executionRow.lastStreamClientId);
|
|
233
|
+
const lastStreamId = asString(executionRow.lastStreamId);
|
|
234
|
+
if (lastStreamClientId || lastStreamId) {
|
|
235
|
+
return {
|
|
236
|
+
executionId,
|
|
237
|
+
status: asString(executionRow.status) || null,
|
|
238
|
+
source: "last",
|
|
239
|
+
clientId: lastStreamClientId || null,
|
|
240
|
+
streamId: lastStreamId || null,
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
return {
|
|
244
|
+
executionId,
|
|
245
|
+
status: asString(executionRow.status) || null,
|
|
246
|
+
source: "none",
|
|
247
|
+
clientId: null,
|
|
248
|
+
streamId: null,
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
export async function waitForContextExecutionStreamPointer(params) {
|
|
252
|
+
const timeoutMs = typeof params.timeoutMs === "number" && Number.isFinite(params.timeoutMs)
|
|
253
|
+
? Math.max(0, params.timeoutMs)
|
|
254
|
+
: 15000;
|
|
255
|
+
const pollMs = typeof params.pollMs === "number" && Number.isFinite(params.pollMs)
|
|
256
|
+
? Math.max(10, params.pollMs)
|
|
257
|
+
: 125;
|
|
258
|
+
const deadline = Date.now() + timeoutMs;
|
|
259
|
+
while (Date.now() <= deadline) {
|
|
260
|
+
const pointer = await resolveContextExecutionStreamPointer({
|
|
261
|
+
db: params.db,
|
|
262
|
+
contextId: params.contextId,
|
|
263
|
+
});
|
|
264
|
+
if (pointer && (pointer.clientId || pointer.streamId)) {
|
|
265
|
+
return pointer;
|
|
266
|
+
}
|
|
267
|
+
await new Promise((resolve) => setTimeout(resolve, pollMs));
|
|
268
|
+
}
|
|
269
|
+
return null;
|
|
270
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import "../polyfills/dom-events.js";
|
|
2
|
+
import type { ContextEnvironment } from "../context.config.js";
|
|
3
|
+
import type { TraceEventKind } from "../context.contract.js";
|
|
4
|
+
export type ContextTraceEventWrite = {
|
|
5
|
+
workflowRunId: string;
|
|
6
|
+
eventId: string;
|
|
7
|
+
eventKind: TraceEventKind;
|
|
8
|
+
seq?: number;
|
|
9
|
+
eventAt?: string;
|
|
10
|
+
contextKey?: string;
|
|
11
|
+
spanId?: string;
|
|
12
|
+
parentSpanId?: string;
|
|
13
|
+
contextId?: string;
|
|
14
|
+
executionId?: string;
|
|
15
|
+
stepId?: string;
|
|
16
|
+
contextEventId?: string;
|
|
17
|
+
toolCallId?: string;
|
|
18
|
+
partKey?: string;
|
|
19
|
+
partIdx?: number;
|
|
20
|
+
isDeleted?: boolean;
|
|
21
|
+
aiProvider?: string;
|
|
22
|
+
aiModel?: string;
|
|
23
|
+
promptTokens?: number;
|
|
24
|
+
promptTokensCached?: number;
|
|
25
|
+
promptTokensUncached?: number;
|
|
26
|
+
completionTokens?: number;
|
|
27
|
+
totalTokens?: number;
|
|
28
|
+
latencyMs?: number;
|
|
29
|
+
cacheCostUsd?: number;
|
|
30
|
+
computeCostUsd?: number;
|
|
31
|
+
costUsd?: number;
|
|
32
|
+
payload?: unknown;
|
|
33
|
+
testId?: string;
|
|
34
|
+
};
|
|
35
|
+
export declare function writeContextTraceEvents(params: {
|
|
36
|
+
env: ContextEnvironment;
|
|
37
|
+
events: ContextTraceEventWrite[];
|
|
38
|
+
}): Promise<void>;
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import "../polyfills/dom-events.js";
|
|
2
|
+
import { lookup } from "@instantdb/admin";
|
|
3
|
+
function requireBaseUrl() {
|
|
4
|
+
const baseUrl = process.env.EKAIROS_CORE_BASE_URL ||
|
|
5
|
+
process.env.EKAIROS_TRACES_BASE_URL ||
|
|
6
|
+
process.env.EKAIROS_BASE_URL;
|
|
7
|
+
if (!baseUrl) {
|
|
8
|
+
throw new Error("[context/trace] Missing EKAIROS_CORE_BASE_URL (or EKAIROS_TRACES_BASE_URL)");
|
|
9
|
+
}
|
|
10
|
+
return baseUrl.replace(/\/$/, "");
|
|
11
|
+
}
|
|
12
|
+
function requireToken() {
|
|
13
|
+
// Preferred: Clerk org API key (opaque token) for ekairos-core.
|
|
14
|
+
const apiKey = process.env.EKAIROS_CLERK_API_KEY;
|
|
15
|
+
if (apiKey)
|
|
16
|
+
return apiKey;
|
|
17
|
+
throw new Error("[context/trace] Missing EKAIROS_CLERK_API_KEY");
|
|
18
|
+
}
|
|
19
|
+
let jwtCache = null;
|
|
20
|
+
function parseJwtExpMs(token) {
|
|
21
|
+
const parts = token.split(".");
|
|
22
|
+
if (parts.length !== 3)
|
|
23
|
+
return null;
|
|
24
|
+
try {
|
|
25
|
+
const payload = JSON.parse(Buffer.from(parts[1].replace(/-/g, "+").replace(/_/g, "/"), "base64").toString("utf-8"));
|
|
26
|
+
const exp = typeof payload?.exp === "number" ? payload.exp : null;
|
|
27
|
+
return exp ? exp * 1000 : null;
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async function getTraceAuthHeader(baseUrl, projectId) {
|
|
34
|
+
const apiKey = requireToken();
|
|
35
|
+
const now = Date.now();
|
|
36
|
+
if (jwtCache && jwtCache.expMs - 60000 > now) {
|
|
37
|
+
return `Bearer ${jwtCache.token}`;
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const res = await fetch(`${baseUrl}/api/context/traces/auth`, {
|
|
41
|
+
method: "POST",
|
|
42
|
+
headers: {
|
|
43
|
+
"content-type": "application/json",
|
|
44
|
+
authorization: `Bearer ${apiKey}`,
|
|
45
|
+
},
|
|
46
|
+
body: JSON.stringify({ projectId }),
|
|
47
|
+
});
|
|
48
|
+
if (res.ok) {
|
|
49
|
+
const json = (await res.json());
|
|
50
|
+
const token = typeof json?.token === "string" ? json.token : "";
|
|
51
|
+
const expMs = parseJwtExpMs(token) ?? now + 60 * 60 * 1000;
|
|
52
|
+
if (token) {
|
|
53
|
+
jwtCache = { token, expMs };
|
|
54
|
+
return `Bearer ${token}`;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
catch {
|
|
59
|
+
// fall back to API key below
|
|
60
|
+
}
|
|
61
|
+
return `Bearer ${apiKey}`;
|
|
62
|
+
}
|
|
63
|
+
async function readProjectId() {
|
|
64
|
+
try {
|
|
65
|
+
const { getRuntimeProjectId } = await import("@ekairos/domain/runtime");
|
|
66
|
+
const fromConfig = String(getRuntimeProjectId?.() || "").trim();
|
|
67
|
+
if (fromConfig)
|
|
68
|
+
return fromConfig;
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
// ignore and fall back to env
|
|
72
|
+
}
|
|
73
|
+
const fallback = typeof process !== "undefined" && process.env
|
|
74
|
+
? String(process.env.EKAIROS_PROJECT_ID || "").trim()
|
|
75
|
+
: "";
|
|
76
|
+
return fallback;
|
|
77
|
+
}
|
|
78
|
+
export async function writeContextTraceEvents(params) {
|
|
79
|
+
if (!params.events?.length)
|
|
80
|
+
return;
|
|
81
|
+
const envTrace = params.env?.traces;
|
|
82
|
+
// Tracing must NEVER break workflows by default.
|
|
83
|
+
// Use EKAIROS_TRACES_STRICT=1 if you want to fail hard.
|
|
84
|
+
const strict = envTrace?.strict === true || process.env.EKAIROS_TRACES_STRICT === "1";
|
|
85
|
+
// 1) Local trace persistence (InstantDB source of truth).
|
|
86
|
+
try {
|
|
87
|
+
const { getContextRuntime } = await import("../runtime.js");
|
|
88
|
+
const runtime = await getContextRuntime(params.env);
|
|
89
|
+
const db = runtime?.db;
|
|
90
|
+
if (db) {
|
|
91
|
+
const now = new Date();
|
|
92
|
+
const orgId = typeof params.env?.orgId === "string"
|
|
93
|
+
? String(params.env.orgId)
|
|
94
|
+
: "";
|
|
95
|
+
const projectId = await readProjectId();
|
|
96
|
+
const byRun = new Map();
|
|
97
|
+
for (const ev of params.events) {
|
|
98
|
+
const runId = String(ev.workflowRunId || "");
|
|
99
|
+
if (!runId)
|
|
100
|
+
continue;
|
|
101
|
+
if (!byRun.has(runId))
|
|
102
|
+
byRun.set(runId, []);
|
|
103
|
+
byRun.get(runId).push(ev);
|
|
104
|
+
}
|
|
105
|
+
const seqByRun = new Map();
|
|
106
|
+
const existingCountByRun = new Map();
|
|
107
|
+
for (const [runId] of byRun) {
|
|
108
|
+
let existingCount = 0;
|
|
109
|
+
try {
|
|
110
|
+
const q = await db.query({
|
|
111
|
+
event_trace_runs: {
|
|
112
|
+
$: { where: { workflowRunId: runId }, limit: 1 },
|
|
113
|
+
},
|
|
114
|
+
});
|
|
115
|
+
const row = q?.event_trace_runs?.[0];
|
|
116
|
+
existingCount = Number(row?.eventsCount ?? 0) || 0;
|
|
117
|
+
}
|
|
118
|
+
catch {
|
|
119
|
+
// ignore
|
|
120
|
+
}
|
|
121
|
+
existingCountByRun.set(runId, existingCount);
|
|
122
|
+
seqByRun.set(runId, existingCount);
|
|
123
|
+
}
|
|
124
|
+
const txs = [];
|
|
125
|
+
const spanTxs = [];
|
|
126
|
+
for (const ev of params.events) {
|
|
127
|
+
const runId = String(ev.workflowRunId || "");
|
|
128
|
+
if (!runId)
|
|
129
|
+
continue;
|
|
130
|
+
const key = `${runId}:${String(ev.eventId || "")}`;
|
|
131
|
+
if (!key.includes(":"))
|
|
132
|
+
continue;
|
|
133
|
+
const eventAt = typeof ev.eventAt === "string" && ev.eventAt
|
|
134
|
+
? new Date(ev.eventAt)
|
|
135
|
+
: undefined;
|
|
136
|
+
let seq = Number.isFinite(Number(ev.seq)) ? Number(ev.seq) : undefined;
|
|
137
|
+
if (typeof seq !== "number") {
|
|
138
|
+
const current = seqByRun.get(runId) ?? 0;
|
|
139
|
+
const next = current + 1;
|
|
140
|
+
seqByRun.set(runId, next);
|
|
141
|
+
seq = next;
|
|
142
|
+
}
|
|
143
|
+
ev.seq = seq;
|
|
144
|
+
txs.push(db.tx.event_trace_events[lookup("key", key)].update({
|
|
145
|
+
key,
|
|
146
|
+
workflowRunId: runId,
|
|
147
|
+
seq,
|
|
148
|
+
eventId: String(ev.eventId || ""),
|
|
149
|
+
eventKind: String(ev.eventKind || ""),
|
|
150
|
+
eventAt: eventAt ?? undefined,
|
|
151
|
+
ingestedAt: now,
|
|
152
|
+
orgId: orgId || undefined,
|
|
153
|
+
projectId: projectId || undefined,
|
|
154
|
+
contextKey: ev.contextKey,
|
|
155
|
+
spanId: ev.spanId,
|
|
156
|
+
parentSpanId: ev.parentSpanId,
|
|
157
|
+
contextId: ev.contextId,
|
|
158
|
+
executionId: ev.executionId,
|
|
159
|
+
stepId: ev.stepId,
|
|
160
|
+
contextEventId: ev.contextEventId,
|
|
161
|
+
toolCallId: ev.toolCallId,
|
|
162
|
+
partKey: ev.partKey,
|
|
163
|
+
partIdx: ev.partIdx,
|
|
164
|
+
isDeleted: ev.isDeleted === true,
|
|
165
|
+
aiProvider: ev.aiProvider,
|
|
166
|
+
aiModel: ev.aiModel,
|
|
167
|
+
promptTokens: ev.promptTokens,
|
|
168
|
+
promptTokensCached: ev.promptTokensCached,
|
|
169
|
+
promptTokensUncached: ev.promptTokensUncached,
|
|
170
|
+
completionTokens: ev.completionTokens,
|
|
171
|
+
totalTokens: ev.totalTokens,
|
|
172
|
+
latencyMs: ev.latencyMs,
|
|
173
|
+
cacheCostUsd: ev.cacheCostUsd,
|
|
174
|
+
computeCostUsd: ev.computeCostUsd,
|
|
175
|
+
costUsd: ev.costUsd,
|
|
176
|
+
payload: ev.payload,
|
|
177
|
+
}));
|
|
178
|
+
if (ev.eventKind === "context.step" || ev.eventKind === "workflow.step") {
|
|
179
|
+
const spanId = String(ev.stepId || ev.eventId || key);
|
|
180
|
+
spanTxs.push(db.tx.event_trace_spans[lookup("spanId", spanId)].update({
|
|
181
|
+
spanId,
|
|
182
|
+
parentSpanId: ev.parentSpanId,
|
|
183
|
+
workflowRunId: runId,
|
|
184
|
+
executionId: ev.executionId,
|
|
185
|
+
stepId: ev.stepId,
|
|
186
|
+
kind: ev.eventKind,
|
|
187
|
+
name: ev.eventKind,
|
|
188
|
+
status: "completed",
|
|
189
|
+
startedAt: eventAt ?? now,
|
|
190
|
+
endedAt: eventAt ?? now,
|
|
191
|
+
durationMs: 0,
|
|
192
|
+
payload: ev.payload,
|
|
193
|
+
}));
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
if (txs.length) {
|
|
197
|
+
await db.transact(txs);
|
|
198
|
+
}
|
|
199
|
+
if (spanTxs.length) {
|
|
200
|
+
await db.transact(spanTxs);
|
|
201
|
+
}
|
|
202
|
+
for (const [runId, events] of byRun) {
|
|
203
|
+
const eventDates = events
|
|
204
|
+
.map((e) => typeof e.eventAt === "string" && e.eventAt
|
|
205
|
+
? new Date(e.eventAt)
|
|
206
|
+
: now)
|
|
207
|
+
.filter((d) => !Number.isNaN(d.getTime()));
|
|
208
|
+
const firstEventAt = eventDates.length
|
|
209
|
+
? new Date(Math.min(...eventDates.map((d) => d.getTime())))
|
|
210
|
+
: now;
|
|
211
|
+
const lastEventAt = eventDates.length
|
|
212
|
+
? new Date(Math.max(...eventDates.map((d) => d.getTime())))
|
|
213
|
+
: now;
|
|
214
|
+
const existingCount = existingCountByRun.get(runId) ?? 0;
|
|
215
|
+
await db.transact([
|
|
216
|
+
db.tx.event_trace_runs[lookup("workflowRunId", runId)].update({
|
|
217
|
+
workflowRunId: runId,
|
|
218
|
+
orgId: orgId || undefined,
|
|
219
|
+
projectId: projectId || undefined,
|
|
220
|
+
firstEventAt,
|
|
221
|
+
lastEventAt,
|
|
222
|
+
lastIngestedAt: now,
|
|
223
|
+
eventsCount: existingCount + events.length,
|
|
224
|
+
}),
|
|
225
|
+
]);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
catch (e) {
|
|
230
|
+
if (strict)
|
|
231
|
+
throw e;
|
|
232
|
+
}
|
|
233
|
+
let baseUrl = "";
|
|
234
|
+
try {
|
|
235
|
+
baseUrl = envTrace?.baseUrl ? String(envTrace.baseUrl).replace(/\/$/, "") : requireBaseUrl();
|
|
236
|
+
}
|
|
237
|
+
catch (e) {
|
|
238
|
+
if (strict)
|
|
239
|
+
throw e;
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const projectId = envTrace?.projectId ? String(envTrace.projectId).trim() : await readProjectId();
|
|
243
|
+
if (!projectId) {
|
|
244
|
+
if (strict)
|
|
245
|
+
throw new Error("[context/trace] Missing projectId (ekairosConfig or EKAIROS_PROJECT_ID)");
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
const authHeader = envTrace?.apiKey
|
|
249
|
+
? `Bearer ${String(envTrace.apiKey).trim()}`
|
|
250
|
+
: await getTraceAuthHeader(baseUrl, projectId);
|
|
251
|
+
const res = await fetch(`${baseUrl}/api/context/traces/ingest`, {
|
|
252
|
+
method: "POST",
|
|
253
|
+
headers: {
|
|
254
|
+
"content-type": "application/json",
|
|
255
|
+
authorization: authHeader,
|
|
256
|
+
},
|
|
257
|
+
body: JSON.stringify({ projectId, events: params.events }),
|
|
258
|
+
});
|
|
259
|
+
if (!res.ok) {
|
|
260
|
+
const text = await res.text().catch(() => "");
|
|
261
|
+
if (strict) {
|
|
262
|
+
throw new Error(`[context/trace] ekairos-core ingest failed (${res.status}): ${text}`);
|
|
263
|
+
}
|
|
264
|
+
if (process.env.PLAYWRIGHT_TEST === "1") {
|
|
265
|
+
// eslint-disable-next-line no-console
|
|
266
|
+
console.warn(`[context/trace] ingest failed (${res.status}): ${text}`);
|
|
267
|
+
}
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import "../polyfills/dom-events.js";
|
|
2
|
+
/**
|
|
3
|
+
* Parses a document with LlamaParse and stores it in InstantDB (document_documents + link to file).
|
|
4
|
+
* Returns the created documentId.
|
|
5
|
+
*/
|
|
6
|
+
export declare function parseAndStoreDocument(db: any, buffer: Buffer, fileName: string, fileId: string): Promise<string>;
|