@fallom/trace 0.1.11 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +215 -178
- package/dist/chunk-2BP4H4AD.mjs +3012 -0
- package/dist/chunk-7P6ASYW6.mjs +9 -0
- package/dist/chunk-K7HYYE4Y.mjs +2930 -0
- package/dist/chunk-KAZ5NEU2.mjs +2237 -0
- package/dist/chunk-KMA4IPED.mjs +252 -0
- package/dist/chunk-W6M2RQ3W.mjs +251 -0
- package/dist/index.d.mts +210 -248
- package/dist/index.d.ts +210 -248
- package/dist/index.js +947 -776
- package/dist/index.mjs +746 -576
- package/dist/models-2Y6DRQPS.mjs +9 -0
- package/dist/models-BUHMMTWK.mjs +9 -0
- package/dist/models-JIO5LVMB.mjs +8 -0
- package/dist/models-JKMOBZUO.mjs +8 -0
- package/dist/prompts-XSZHTCX7.mjs +15 -0
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -20,249 +20,248 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
20
20
|
};
|
|
21
21
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
22
22
|
|
|
23
|
-
// src/
|
|
24
|
-
var
|
|
25
|
-
__export(
|
|
26
|
-
clearPromptContext: () => clearPromptContext,
|
|
23
|
+
// src/models.ts
|
|
24
|
+
var models_exports = {};
|
|
25
|
+
__export(models_exports, {
|
|
27
26
|
get: () => get,
|
|
28
|
-
|
|
29
|
-
getPromptContext: () => getPromptContext,
|
|
30
|
-
init: () => init
|
|
27
|
+
init: () => init2
|
|
31
28
|
});
|
|
32
|
-
function
|
|
33
|
-
if (
|
|
34
|
-
console.log(`[Fallom
|
|
29
|
+
function log4(msg) {
|
|
30
|
+
if (debugMode2) {
|
|
31
|
+
console.log(`[Fallom] ${msg}`);
|
|
35
32
|
}
|
|
36
33
|
}
|
|
37
|
-
function
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
if (!
|
|
34
|
+
function init2(options = {}) {
|
|
35
|
+
apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
36
|
+
baseUrl2 = options.baseUrl || process.env.FALLOM_CONFIGS_URL || process.env.FALLOM_BASE_URL || "https://configs.fallom.com";
|
|
37
|
+
initialized2 = true;
|
|
38
|
+
if (!apiKey2) {
|
|
42
39
|
return;
|
|
43
40
|
}
|
|
44
|
-
|
|
41
|
+
fetchConfigs().catch(() => {
|
|
45
42
|
});
|
|
46
43
|
if (!syncInterval) {
|
|
47
44
|
syncInterval = setInterval(() => {
|
|
48
|
-
|
|
45
|
+
fetchConfigs().catch(() => {
|
|
49
46
|
});
|
|
50
47
|
}, 3e4);
|
|
51
48
|
syncInterval.unref();
|
|
52
49
|
}
|
|
53
50
|
}
|
|
54
51
|
function ensureInit() {
|
|
55
|
-
if (!
|
|
52
|
+
if (!initialized2) {
|
|
56
53
|
try {
|
|
57
|
-
|
|
54
|
+
init2();
|
|
58
55
|
} catch {
|
|
59
56
|
}
|
|
60
57
|
}
|
|
61
58
|
}
|
|
62
|
-
async function
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
59
|
+
async function fetchConfigs(timeout = SYNC_TIMEOUT) {
|
|
60
|
+
if (!apiKey2) {
|
|
61
|
+
log4("_fetchConfigs: No API key, skipping");
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
67
64
|
try {
|
|
65
|
+
log4(`Fetching configs from ${baseUrl2}/configs`);
|
|
68
66
|
const controller = new AbortController();
|
|
69
67
|
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
70
|
-
const resp = await fetch(`${
|
|
71
|
-
headers: { Authorization: `Bearer ${
|
|
68
|
+
const resp = await fetch(`${baseUrl2}/configs`, {
|
|
69
|
+
headers: { Authorization: `Bearer ${apiKey2}` },
|
|
72
70
|
signal: controller.signal
|
|
73
71
|
});
|
|
74
72
|
clearTimeout(timeoutId);
|
|
73
|
+
log4(`Response status: ${resp.status}`);
|
|
75
74
|
if (resp.ok) {
|
|
76
75
|
const data = await resp.json();
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
76
|
+
const configs = data.configs || [];
|
|
77
|
+
log4(`Got ${configs.length} configs: ${configs.map((c) => c.key)}`);
|
|
78
|
+
for (const c of configs) {
|
|
79
|
+
const key = c.key;
|
|
80
|
+
const version = c.version || 1;
|
|
81
|
+
log4(`Config '${key}' v${version}: ${JSON.stringify(c.variants)}`);
|
|
82
|
+
if (!configCache.has(key)) {
|
|
83
|
+
configCache.set(key, { versions: /* @__PURE__ */ new Map(), latest: null });
|
|
80
84
|
}
|
|
81
|
-
const cached =
|
|
82
|
-
cached.versions.set(
|
|
83
|
-
|
|
84
|
-
userTemplate: p.user_template
|
|
85
|
-
});
|
|
86
|
-
cached.current = p.version;
|
|
85
|
+
const cached = configCache.get(key);
|
|
86
|
+
cached.versions.set(version, c);
|
|
87
|
+
cached.latest = version;
|
|
87
88
|
}
|
|
89
|
+
} else {
|
|
90
|
+
log4(`Fetch failed: ${resp.statusText}`);
|
|
88
91
|
}
|
|
89
|
-
} catch {
|
|
92
|
+
} catch (e) {
|
|
93
|
+
log4(`Fetch exception: ${e}`);
|
|
90
94
|
}
|
|
91
95
|
}
|
|
92
|
-
async function
|
|
93
|
-
if (!
|
|
96
|
+
async function fetchSpecificVersion(configKey, version, timeout = SYNC_TIMEOUT) {
|
|
97
|
+
if (!apiKey2) return null;
|
|
94
98
|
try {
|
|
95
99
|
const controller = new AbortController();
|
|
96
100
|
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
97
|
-
const resp = await fetch(
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
+
const resp = await fetch(
|
|
102
|
+
`${baseUrl2}/configs/${configKey}/version/${version}`,
|
|
103
|
+
{
|
|
104
|
+
headers: { Authorization: `Bearer ${apiKey2}` },
|
|
105
|
+
signal: controller.signal
|
|
106
|
+
}
|
|
107
|
+
);
|
|
101
108
|
clearTimeout(timeoutId);
|
|
102
109
|
if (resp.ok) {
|
|
103
|
-
const
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
promptABCache.set(t.key, { versions: /* @__PURE__ */ new Map(), current: null });
|
|
107
|
-
}
|
|
108
|
-
const cached = promptABCache.get(t.key);
|
|
109
|
-
cached.versions.set(t.version, { variants: t.variants });
|
|
110
|
-
cached.current = t.version;
|
|
110
|
+
const config = await resp.json();
|
|
111
|
+
if (!configCache.has(configKey)) {
|
|
112
|
+
configCache.set(configKey, { versions: /* @__PURE__ */ new Map(), latest: null });
|
|
111
113
|
}
|
|
114
|
+
configCache.get(configKey).versions.set(version, config);
|
|
115
|
+
return config;
|
|
112
116
|
}
|
|
113
117
|
} catch {
|
|
114
118
|
}
|
|
119
|
+
return null;
|
|
115
120
|
}
|
|
116
|
-
function
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
const key = varName.trim();
|
|
120
|
-
return key in variables ? String(variables[key]) : match;
|
|
121
|
-
});
|
|
122
|
-
}
|
|
123
|
-
function setPromptContext(ctx) {
|
|
124
|
-
promptContext = ctx;
|
|
125
|
-
}
|
|
126
|
-
function getPromptContext() {
|
|
127
|
-
const ctx = promptContext;
|
|
128
|
-
promptContext = null;
|
|
129
|
-
return ctx;
|
|
130
|
-
}
|
|
131
|
-
async function get(promptKey, options = {}) {
|
|
132
|
-
const { variables, version, debug = false } = options;
|
|
133
|
-
debugMode = debug;
|
|
134
|
-
ensureInit();
|
|
135
|
-
log(`get() called: promptKey=${promptKey}`);
|
|
136
|
-
let promptData = promptCache.get(promptKey);
|
|
137
|
-
if (!promptData) {
|
|
138
|
-
log("Not in cache, fetching...");
|
|
139
|
-
await fetchPrompts(SYNC_TIMEOUT);
|
|
140
|
-
promptData = promptCache.get(promptKey);
|
|
141
|
-
}
|
|
142
|
-
if (!promptData) {
|
|
143
|
-
throw new Error(
|
|
144
|
-
`Prompt '${promptKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
145
|
-
);
|
|
146
|
-
}
|
|
147
|
-
const targetVersion = version ?? promptData.current;
|
|
148
|
-
const content = promptData.versions.get(targetVersion);
|
|
149
|
-
if (!content) {
|
|
150
|
-
throw new Error(
|
|
151
|
-
`Prompt '${promptKey}' version ${targetVersion} not found.`
|
|
152
|
-
);
|
|
153
|
-
}
|
|
154
|
-
const system = replaceVariables(content.systemPrompt, variables);
|
|
155
|
-
const user = replaceVariables(content.userTemplate, variables);
|
|
156
|
-
setPromptContext({
|
|
157
|
-
promptKey,
|
|
158
|
-
promptVersion: targetVersion
|
|
159
|
-
});
|
|
160
|
-
log(`\u2705 Got prompt: ${promptKey} v${targetVersion}`);
|
|
161
|
-
return {
|
|
162
|
-
key: promptKey,
|
|
163
|
-
version: targetVersion,
|
|
164
|
-
system,
|
|
165
|
-
user
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
async function getAB(abTestKey, sessionId, options = {}) {
|
|
169
|
-
const { variables, debug = false } = options;
|
|
170
|
-
debugMode = debug;
|
|
121
|
+
async function get(configKey, sessionId, options = {}) {
|
|
122
|
+
const { version, fallback, debug = false } = options;
|
|
123
|
+
debugMode2 = debug;
|
|
171
124
|
ensureInit();
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
if (!abData) {
|
|
180
|
-
throw new Error(
|
|
181
|
-
`Prompt A/B test '${abTestKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
182
|
-
);
|
|
183
|
-
}
|
|
184
|
-
const currentVersion = abData.current;
|
|
185
|
-
const versionData = abData.versions.get(currentVersion);
|
|
186
|
-
if (!versionData) {
|
|
187
|
-
throw new Error(`Prompt A/B test '${abTestKey}' has no current version.`);
|
|
188
|
-
}
|
|
189
|
-
const { variants } = versionData;
|
|
190
|
-
log(`A/B test '${abTestKey}' has ${variants?.length ?? 0} variants`);
|
|
191
|
-
log(`Version data: ${JSON.stringify(versionData, null, 2)}`);
|
|
192
|
-
if (!variants || variants.length === 0) {
|
|
193
|
-
throw new Error(
|
|
194
|
-
`Prompt A/B test '${abTestKey}' has no variants configured.`
|
|
125
|
+
log4(
|
|
126
|
+
`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`
|
|
127
|
+
);
|
|
128
|
+
try {
|
|
129
|
+
let configData = configCache.get(configKey);
|
|
130
|
+
log4(
|
|
131
|
+
`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`
|
|
195
132
|
);
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
cumulative += variants[i].weight * 1e4;
|
|
204
|
-
if (hashVal < cumulative) {
|
|
205
|
-
selectedVariant = variants[i];
|
|
206
|
-
selectedIndex = i;
|
|
207
|
-
break;
|
|
133
|
+
if (!configData) {
|
|
134
|
+
log4("Not in cache, fetching...");
|
|
135
|
+
await fetchConfigs(SYNC_TIMEOUT);
|
|
136
|
+
configData = configCache.get(configKey);
|
|
137
|
+
log4(
|
|
138
|
+
`After fetch, cache lookup: ${configData ? "found" : "still not found"}`
|
|
139
|
+
);
|
|
208
140
|
}
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
141
|
+
if (!configData) {
|
|
142
|
+
log4(`Config not found, using fallback: ${fallback}`);
|
|
143
|
+
if (fallback) {
|
|
144
|
+
console.warn(
|
|
145
|
+
`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`
|
|
146
|
+
);
|
|
147
|
+
return returnModel(configKey, sessionId, fallback, 0);
|
|
148
|
+
}
|
|
149
|
+
throw new Error(
|
|
150
|
+
`Config '${configKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
let config;
|
|
154
|
+
let targetVersion;
|
|
155
|
+
if (version !== void 0) {
|
|
156
|
+
config = configData.versions.get(version);
|
|
157
|
+
if (!config) {
|
|
158
|
+
config = await fetchSpecificVersion(configKey, version, SYNC_TIMEOUT) || void 0;
|
|
159
|
+
}
|
|
160
|
+
if (!config) {
|
|
161
|
+
if (fallback) {
|
|
162
|
+
console.warn(
|
|
163
|
+
`[Fallom WARNING] Config '${configKey}' version ${version} not found, using fallback: ${fallback}`
|
|
164
|
+
);
|
|
165
|
+
return returnModel(configKey, sessionId, fallback, 0);
|
|
166
|
+
}
|
|
167
|
+
throw new Error(`Config '${configKey}' version ${version} not found.`);
|
|
168
|
+
}
|
|
169
|
+
targetVersion = version;
|
|
170
|
+
} else {
|
|
171
|
+
targetVersion = configData.latest;
|
|
172
|
+
config = configData.versions.get(targetVersion);
|
|
173
|
+
if (!config) {
|
|
174
|
+
if (fallback) {
|
|
175
|
+
console.warn(
|
|
176
|
+
`[Fallom WARNING] Config '${configKey}' has no cached version, using fallback: ${fallback}`
|
|
177
|
+
);
|
|
178
|
+
return returnModel(configKey, sessionId, fallback, 0);
|
|
179
|
+
}
|
|
180
|
+
throw new Error(`Config '${configKey}' has no cached version.`);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
const variantsRaw = config.variants;
|
|
184
|
+
const configVersion = config.version || targetVersion;
|
|
185
|
+
const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
|
|
186
|
+
log4(
|
|
187
|
+
`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(
|
|
188
|
+
variants
|
|
189
|
+
)}`
|
|
220
190
|
);
|
|
191
|
+
const hashBytes = (0, import_crypto.createHash)("md5").update(sessionId).digest();
|
|
192
|
+
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
193
|
+
log4(`Session hash: ${hashVal} (out of 1,000,000)`);
|
|
194
|
+
let cumulative = 0;
|
|
195
|
+
let assignedModel = variants[variants.length - 1].model;
|
|
196
|
+
for (const v of variants) {
|
|
197
|
+
const oldCumulative = cumulative;
|
|
198
|
+
cumulative += v.weight * 1e4;
|
|
199
|
+
log4(
|
|
200
|
+
`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`
|
|
201
|
+
);
|
|
202
|
+
if (hashVal < cumulative) {
|
|
203
|
+
assignedModel = v.model;
|
|
204
|
+
break;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
log4(`\u2705 Assigned model: ${assignedModel}`);
|
|
208
|
+
return returnModel(configKey, sessionId, assignedModel, configVersion);
|
|
209
|
+
} catch (e) {
|
|
210
|
+
if (e instanceof Error && e.message.includes("not found")) {
|
|
211
|
+
throw e;
|
|
212
|
+
}
|
|
213
|
+
if (fallback) {
|
|
214
|
+
console.warn(
|
|
215
|
+
`[Fallom WARNING] Error getting model for '${configKey}': ${e}. Using fallback: ${fallback}`
|
|
216
|
+
);
|
|
217
|
+
return returnModel(configKey, sessionId, fallback, 0);
|
|
218
|
+
}
|
|
219
|
+
throw e;
|
|
221
220
|
}
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
if (
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
);
|
|
221
|
+
}
|
|
222
|
+
function returnModel(configKey, sessionId, model, version) {
|
|
223
|
+
if (version > 0) {
|
|
224
|
+
recordSession(configKey, version, sessionId, model).catch(() => {
|
|
225
|
+
});
|
|
228
226
|
}
|
|
229
|
-
|
|
230
|
-
const user = replaceVariables(content.userTemplate, variables);
|
|
231
|
-
setPromptContext({
|
|
232
|
-
promptKey,
|
|
233
|
-
promptVersion: targetVersion,
|
|
234
|
-
abTestKey,
|
|
235
|
-
variantIndex: selectedIndex
|
|
236
|
-
});
|
|
237
|
-
log(
|
|
238
|
-
`\u2705 Got prompt from A/B: ${promptKey} v${targetVersion} (variant ${selectedIndex})`
|
|
239
|
-
);
|
|
240
|
-
return {
|
|
241
|
-
key: promptKey,
|
|
242
|
-
version: targetVersion,
|
|
243
|
-
system,
|
|
244
|
-
user,
|
|
245
|
-
abTestKey,
|
|
246
|
-
variantIndex: selectedIndex
|
|
247
|
-
};
|
|
227
|
+
return model;
|
|
248
228
|
}
|
|
249
|
-
function
|
|
250
|
-
|
|
229
|
+
async function recordSession(configKey, version, sessionId, model) {
|
|
230
|
+
if (!apiKey2) return;
|
|
231
|
+
try {
|
|
232
|
+
const controller = new AbortController();
|
|
233
|
+
const timeoutId = setTimeout(() => controller.abort(), RECORD_TIMEOUT);
|
|
234
|
+
await fetch(`${baseUrl2}/sessions`, {
|
|
235
|
+
method: "POST",
|
|
236
|
+
headers: {
|
|
237
|
+
Authorization: `Bearer ${apiKey2}`,
|
|
238
|
+
"Content-Type": "application/json"
|
|
239
|
+
},
|
|
240
|
+
body: JSON.stringify({
|
|
241
|
+
config_key: configKey,
|
|
242
|
+
config_version: version,
|
|
243
|
+
session_id: sessionId,
|
|
244
|
+
assigned_model: model
|
|
245
|
+
}),
|
|
246
|
+
signal: controller.signal
|
|
247
|
+
});
|
|
248
|
+
clearTimeout(timeoutId);
|
|
249
|
+
} catch {
|
|
250
|
+
}
|
|
251
251
|
}
|
|
252
|
-
var import_crypto,
|
|
253
|
-
var
|
|
254
|
-
"src/
|
|
252
|
+
var import_crypto, apiKey2, baseUrl2, initialized2, syncInterval, debugMode2, configCache, SYNC_TIMEOUT, RECORD_TIMEOUT;
|
|
253
|
+
var init_models = __esm({
|
|
254
|
+
"src/models.ts"() {
|
|
255
255
|
"use strict";
|
|
256
256
|
import_crypto = require("crypto");
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
257
|
+
apiKey2 = null;
|
|
258
|
+
baseUrl2 = "https://configs.fallom.com";
|
|
259
|
+
initialized2 = false;
|
|
260
260
|
syncInterval = null;
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
promptABCache = /* @__PURE__ */ new Map();
|
|
264
|
-
promptContext = null;
|
|
261
|
+
debugMode2 = false;
|
|
262
|
+
configCache = /* @__PURE__ */ new Map();
|
|
265
263
|
SYNC_TIMEOUT = 2e3;
|
|
264
|
+
RECORD_TIMEOUT = 1e3;
|
|
266
265
|
}
|
|
267
266
|
});
|
|
268
267
|
|
|
@@ -270,11 +269,13 @@ var init_prompts = __esm({
|
|
|
270
269
|
var index_exports = {};
|
|
271
270
|
__export(index_exports, {
|
|
272
271
|
FallomExporter: () => FallomExporter,
|
|
272
|
+
FallomSession: () => FallomSession,
|
|
273
273
|
clearMastraPrompt: () => clearMastraPrompt,
|
|
274
274
|
default: () => index_default,
|
|
275
275
|
init: () => init4,
|
|
276
276
|
models: () => models_exports,
|
|
277
277
|
prompts: () => prompts_exports,
|
|
278
|
+
session: () => session,
|
|
278
279
|
setMastraPrompt: () => setMastraPrompt,
|
|
279
280
|
setMastraPromptAB: () => setMastraPromptAB,
|
|
280
281
|
trace: () => trace_exports
|
|
@@ -284,19 +285,13 @@ module.exports = __toCommonJS(index_exports);
|
|
|
284
285
|
// src/trace.ts
|
|
285
286
|
var trace_exports = {};
|
|
286
287
|
__export(trace_exports, {
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
setSession: () => setSession,
|
|
292
|
-
shutdown: () => shutdown,
|
|
293
|
-
span: () => span,
|
|
294
|
-
wrapAISDK: () => wrapAISDK,
|
|
295
|
-
wrapAnthropic: () => wrapAnthropic,
|
|
296
|
-
wrapGoogleAI: () => wrapGoogleAI,
|
|
297
|
-
wrapMastraAgent: () => wrapMastraAgent,
|
|
298
|
-
wrapOpenAI: () => wrapOpenAI
|
|
288
|
+
FallomSession: () => FallomSession,
|
|
289
|
+
init: () => init,
|
|
290
|
+
session: () => session,
|
|
291
|
+
shutdown: () => shutdown
|
|
299
292
|
});
|
|
293
|
+
|
|
294
|
+
// src/trace/core.ts
|
|
300
295
|
var import_async_hooks = require("async_hooks");
|
|
301
296
|
var import_sdk_node = require("@opentelemetry/sdk-node");
|
|
302
297
|
var import_exporter_trace_otlp_http = require("@opentelemetry/exporter-trace-otlp-http");
|
|
@@ -898,40 +893,39 @@ var Resource = (
|
|
|
898
893
|
})()
|
|
899
894
|
);
|
|
900
895
|
|
|
901
|
-
// src/trace.ts
|
|
902
|
-
var
|
|
903
|
-
var
|
|
904
|
-
var
|
|
905
|
-
var
|
|
906
|
-
var
|
|
896
|
+
// src/trace/core.ts
|
|
897
|
+
var traceContextStorage = new import_async_hooks.AsyncLocalStorage();
|
|
898
|
+
var fallbackTraceContext = null;
|
|
899
|
+
var apiKey = null;
|
|
900
|
+
var baseUrl = "https://traces.fallom.com";
|
|
901
|
+
var initialized = false;
|
|
907
902
|
var captureContent = true;
|
|
908
|
-
var
|
|
903
|
+
var debugMode = false;
|
|
909
904
|
var sdk = null;
|
|
910
|
-
function
|
|
911
|
-
if (
|
|
905
|
+
function log(...args) {
|
|
906
|
+
if (debugMode) console.log("[Fallom]", ...args);
|
|
907
|
+
}
|
|
908
|
+
function getTraceContextStorage() {
|
|
909
|
+
return traceContextStorage;
|
|
910
|
+
}
|
|
911
|
+
function getFallbackTraceContext() {
|
|
912
|
+
return fallbackTraceContext;
|
|
913
|
+
}
|
|
914
|
+
function isInitialized() {
|
|
915
|
+
return initialized;
|
|
916
|
+
}
|
|
917
|
+
function shouldCaptureContent() {
|
|
918
|
+
return captureContent;
|
|
919
|
+
}
|
|
920
|
+
function isDebugMode() {
|
|
921
|
+
return debugMode;
|
|
912
922
|
}
|
|
913
923
|
var fallomSpanProcessor = {
|
|
914
|
-
onStart(
|
|
915
|
-
|
|
916
|
-
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
917
|
-
if (ctx) {
|
|
918
|
-
span2.setAttribute("fallom.config_key", ctx.configKey);
|
|
919
|
-
span2.setAttribute("fallom.session_id", ctx.sessionId);
|
|
920
|
-
if (ctx.customerId) {
|
|
921
|
-
span2.setAttribute("fallom.customer_id", ctx.customerId);
|
|
922
|
-
}
|
|
923
|
-
log2(
|
|
924
|
-
" Added session context:",
|
|
925
|
-
ctx.configKey,
|
|
926
|
-
ctx.sessionId,
|
|
927
|
-
ctx.customerId
|
|
928
|
-
);
|
|
929
|
-
} else {
|
|
930
|
-
log2(" No session context available");
|
|
931
|
-
}
|
|
924
|
+
onStart(span, _parentContext) {
|
|
925
|
+
log("\u{1F4CD} Span started:", span.name || "unknown");
|
|
932
926
|
},
|
|
933
|
-
onEnd(
|
|
934
|
-
|
|
927
|
+
onEnd(span) {
|
|
928
|
+
log("\u2705 Span ended:", span.name, "duration:", span.duration);
|
|
935
929
|
},
|
|
936
930
|
shutdown() {
|
|
937
931
|
return Promise.resolve();
|
|
@@ -940,47 +934,6 @@ var fallomSpanProcessor = {
|
|
|
940
934
|
return Promise.resolve();
|
|
941
935
|
}
|
|
942
936
|
};
|
|
943
|
-
async function init2(options = {}) {
|
|
944
|
-
if (initialized2) return;
|
|
945
|
-
debugMode2 = options.debug ?? false;
|
|
946
|
-
log2("\u{1F680} Initializing Fallom tracing...");
|
|
947
|
-
apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
948
|
-
baseUrl2 = options.baseUrl || process.env.FALLOM_TRACES_URL || process.env.FALLOM_BASE_URL || "https://traces.fallom.com";
|
|
949
|
-
const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
|
|
950
|
-
if (envCapture === "false" || envCapture === "0" || envCapture === "no") {
|
|
951
|
-
captureContent = false;
|
|
952
|
-
} else {
|
|
953
|
-
captureContent = options.captureContent ?? true;
|
|
954
|
-
}
|
|
955
|
-
if (!apiKey2) {
|
|
956
|
-
throw new Error(
|
|
957
|
-
"No API key provided. Set FALLOM_API_KEY environment variable or pass apiKey parameter."
|
|
958
|
-
);
|
|
959
|
-
}
|
|
960
|
-
initialized2 = true;
|
|
961
|
-
log2("\u{1F4E1} Exporter URL:", `${baseUrl2}/v1/traces`);
|
|
962
|
-
const exporter = new import_exporter_trace_otlp_http.OTLPTraceExporter({
|
|
963
|
-
url: `${baseUrl2}/v1/traces`,
|
|
964
|
-
headers: {
|
|
965
|
-
Authorization: `Bearer ${apiKey2}`
|
|
966
|
-
}
|
|
967
|
-
});
|
|
968
|
-
const instrumentations = await getInstrumentations();
|
|
969
|
-
log2("\u{1F527} Loaded instrumentations:", instrumentations.length);
|
|
970
|
-
sdk = new import_sdk_node.NodeSDK({
|
|
971
|
-
resource: new Resource({
|
|
972
|
-
"service.name": "fallom-traced-app"
|
|
973
|
-
}),
|
|
974
|
-
traceExporter: exporter,
|
|
975
|
-
spanProcessor: fallomSpanProcessor,
|
|
976
|
-
instrumentations
|
|
977
|
-
});
|
|
978
|
-
sdk.start();
|
|
979
|
-
log2("\u2705 SDK started");
|
|
980
|
-
process.on("SIGTERM", () => {
|
|
981
|
-
sdk?.shutdown().catch(console.error);
|
|
982
|
-
});
|
|
983
|
-
}
|
|
984
937
|
async function getInstrumentations() {
|
|
985
938
|
const instrumentations = [];
|
|
986
939
|
await tryAddInstrumentation(
|
|
@@ -1028,82 +981,97 @@ async function tryAddInstrumentation(instrumentations, pkg, className) {
|
|
|
1028
981
|
instrumentations.push(
|
|
1029
982
|
new InstrumentationClass({ traceContent: captureContent })
|
|
1030
983
|
);
|
|
1031
|
-
|
|
984
|
+
log(` \u2705 Loaded ${pkg}`);
|
|
1032
985
|
} else {
|
|
1033
|
-
|
|
986
|
+
log(
|
|
1034
987
|
` \u26A0\uFE0F ${pkg} loaded but ${className} not found. Available:`,
|
|
1035
988
|
Object.keys(mod)
|
|
1036
989
|
);
|
|
1037
|
-
}
|
|
1038
|
-
} catch
|
|
1039
|
-
|
|
1040
|
-
}
|
|
1041
|
-
}
|
|
1042
|
-
function setSession(configKey, sessionId, customerId) {
|
|
1043
|
-
const store = sessionStorage.getStore();
|
|
1044
|
-
if (store) {
|
|
1045
|
-
store.configKey = configKey;
|
|
1046
|
-
store.sessionId = sessionId;
|
|
1047
|
-
store.customerId = customerId;
|
|
1048
|
-
}
|
|
1049
|
-
fallbackSession = { configKey, sessionId, customerId };
|
|
1050
|
-
}
|
|
1051
|
-
function runWithSession(configKey, sessionId, customerIdOrFn, fn) {
|
|
1052
|
-
if (typeof customerIdOrFn === "function") {
|
|
1053
|
-
return sessionStorage.run({ configKey, sessionId }, customerIdOrFn);
|
|
990
|
+
}
|
|
991
|
+
} catch {
|
|
992
|
+
log(` \u274C ${pkg} not installed`);
|
|
1054
993
|
}
|
|
1055
|
-
return sessionStorage.run(
|
|
1056
|
-
{ configKey, sessionId, customerId: customerIdOrFn },
|
|
1057
|
-
fn
|
|
1058
|
-
);
|
|
1059
|
-
}
|
|
1060
|
-
function getSession() {
|
|
1061
|
-
return sessionStorage.getStore() || fallbackSession || void 0;
|
|
1062
994
|
}
|
|
1063
|
-
function
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
995
|
+
async function init(options = {}) {
|
|
996
|
+
if (initialized) return;
|
|
997
|
+
debugMode = options.debug ?? false;
|
|
998
|
+
log("\u{1F680} Initializing Fallom tracing...");
|
|
999
|
+
apiKey = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
1000
|
+
baseUrl = options.baseUrl || process.env.FALLOM_TRACES_URL || process.env.FALLOM_BASE_URL || "https://traces.fallom.com";
|
|
1001
|
+
const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
|
|
1002
|
+
if (envCapture === "false" || envCapture === "0" || envCapture === "no") {
|
|
1003
|
+
captureContent = false;
|
|
1004
|
+
} else {
|
|
1005
|
+
captureContent = options.captureContent ?? true;
|
|
1069
1006
|
}
|
|
1070
|
-
|
|
1071
|
-
const configKey = options.configKey || ctx?.configKey;
|
|
1072
|
-
const sessionId = options.sessionId || ctx?.sessionId;
|
|
1073
|
-
if (!configKey || !sessionId) {
|
|
1007
|
+
if (!apiKey) {
|
|
1074
1008
|
throw new Error(
|
|
1075
|
-
"No
|
|
1009
|
+
"No API key provided. Set FALLOM_API_KEY environment variable or pass apiKey parameter."
|
|
1076
1010
|
);
|
|
1077
1011
|
}
|
|
1078
|
-
|
|
1012
|
+
initialized = true;
|
|
1013
|
+
log("\u{1F4E1} Exporter URL:", `${baseUrl}/v1/traces`);
|
|
1014
|
+
const exporter = new import_exporter_trace_otlp_http.OTLPTraceExporter({
|
|
1015
|
+
url: `${baseUrl}/v1/traces`,
|
|
1016
|
+
headers: {
|
|
1017
|
+
Authorization: `Bearer ${apiKey}`
|
|
1018
|
+
}
|
|
1019
|
+
});
|
|
1020
|
+
const instrumentations = await getInstrumentations();
|
|
1021
|
+
log("\u{1F527} Loaded instrumentations:", instrumentations.length);
|
|
1022
|
+
sdk = new import_sdk_node.NodeSDK({
|
|
1023
|
+
resource: new Resource({
|
|
1024
|
+
"service.name": "fallom-traced-app"
|
|
1025
|
+
}),
|
|
1026
|
+
traceExporter: exporter,
|
|
1027
|
+
spanProcessor: fallomSpanProcessor,
|
|
1028
|
+
instrumentations
|
|
1029
|
+
});
|
|
1030
|
+
sdk.start();
|
|
1031
|
+
log("\u2705 SDK started");
|
|
1032
|
+
process.on("SIGTERM", () => {
|
|
1033
|
+
sdk?.shutdown().catch(console.error);
|
|
1079
1034
|
});
|
|
1080
1035
|
}
|
|
1081
|
-
async function
|
|
1036
|
+
async function shutdown() {
|
|
1037
|
+
if (sdk) {
|
|
1038
|
+
await sdk.shutdown();
|
|
1039
|
+
initialized = false;
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
async function sendTrace(trace) {
|
|
1043
|
+
const url = `${baseUrl}/v1/traces`;
|
|
1044
|
+
log("\u{1F4E4} Sending trace to:", url);
|
|
1045
|
+
log(" Session:", trace.session_id, "Config:", trace.config_key);
|
|
1082
1046
|
try {
|
|
1083
1047
|
const controller = new AbortController();
|
|
1084
1048
|
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
1085
|
-
await fetch(
|
|
1049
|
+
const response = await fetch(url, {
|
|
1086
1050
|
method: "POST",
|
|
1087
1051
|
headers: {
|
|
1088
|
-
Authorization: `Bearer ${
|
|
1052
|
+
Authorization: `Bearer ${apiKey}`,
|
|
1089
1053
|
"Content-Type": "application/json"
|
|
1090
1054
|
},
|
|
1091
|
-
body: JSON.stringify(
|
|
1092
|
-
config_key: configKey,
|
|
1093
|
-
session_id: sessionId,
|
|
1094
|
-
data
|
|
1095
|
-
}),
|
|
1055
|
+
body: JSON.stringify(trace),
|
|
1096
1056
|
signal: controller.signal
|
|
1097
1057
|
});
|
|
1098
1058
|
clearTimeout(timeoutId);
|
|
1099
|
-
|
|
1059
|
+
if (!response.ok) {
|
|
1060
|
+
const text = await response.text();
|
|
1061
|
+
log("\u274C Trace send failed:", response.status, text);
|
|
1062
|
+
} else {
|
|
1063
|
+
log("\u2705 Trace sent:", trace.name, trace.model);
|
|
1064
|
+
}
|
|
1065
|
+
} catch (err) {
|
|
1066
|
+
log("\u274C Trace send error:", err instanceof Error ? err.message : err);
|
|
1100
1067
|
}
|
|
1101
1068
|
}
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1069
|
+
|
|
1070
|
+
// src/trace/utils.ts
|
|
1071
|
+
function generateHexId(length) {
|
|
1072
|
+
const bytes = new Uint8Array(length / 2);
|
|
1073
|
+
crypto.getRandomValues(bytes);
|
|
1074
|
+
return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
1107
1075
|
}
|
|
1108
1076
|
function messagesToOtelAttributes(messages, completion, model, responseId) {
|
|
1109
1077
|
const attrs = {};
|
|
@@ -1131,70 +1099,39 @@ function messagesToOtelAttributes(messages, completion, model, responseId) {
|
|
|
1131
1099
|
}
|
|
1132
1100
|
return attrs;
|
|
1133
1101
|
}
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
1138
|
-
}
|
|
1139
|
-
var traceContextStorage = new import_async_hooks.AsyncLocalStorage();
|
|
1140
|
-
var fallbackTraceContext = null;
|
|
1141
|
-
async function sendTrace(trace) {
|
|
1142
|
-
const url = `${baseUrl2}/v1/traces`;
|
|
1143
|
-
log2("\u{1F4E4} Sending trace to:", url);
|
|
1144
|
-
log2(" Session:", trace.session_id, "Config:", trace.config_key);
|
|
1145
|
-
try {
|
|
1146
|
-
const controller = new AbortController();
|
|
1147
|
-
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
1148
|
-
const response = await fetch(url, {
|
|
1149
|
-
method: "POST",
|
|
1150
|
-
headers: {
|
|
1151
|
-
Authorization: `Bearer ${apiKey2}`,
|
|
1152
|
-
"Content-Type": "application/json"
|
|
1153
|
-
},
|
|
1154
|
-
body: JSON.stringify(trace),
|
|
1155
|
-
signal: controller.signal
|
|
1156
|
-
});
|
|
1157
|
-
clearTimeout(timeoutId);
|
|
1158
|
-
if (!response.ok) {
|
|
1159
|
-
const text = await response.text();
|
|
1160
|
-
log2("\u274C Trace send failed:", response.status, text);
|
|
1161
|
-
} else {
|
|
1162
|
-
log2("\u2705 Trace sent:", trace.name, trace.model);
|
|
1163
|
-
}
|
|
1164
|
-
} catch (err) {
|
|
1165
|
-
log2("\u274C Trace send error:", err instanceof Error ? err.message : err);
|
|
1166
|
-
}
|
|
1167
|
-
}
|
|
1168
|
-
function wrapOpenAI(client) {
|
|
1102
|
+
|
|
1103
|
+
// src/trace/wrappers/openai.ts
|
|
1104
|
+
function wrapOpenAI(client, sessionCtx) {
|
|
1169
1105
|
const originalCreate = client.chat.completions.create.bind(
|
|
1170
1106
|
client.chat.completions
|
|
1171
1107
|
);
|
|
1108
|
+
const ctx = sessionCtx;
|
|
1172
1109
|
client.chat.completions.create = async function(...args) {
|
|
1173
|
-
|
|
1174
|
-
if (!ctx || !initialized2) {
|
|
1110
|
+
if (!isInitialized()) {
|
|
1175
1111
|
return originalCreate(...args);
|
|
1176
1112
|
}
|
|
1177
|
-
|
|
1178
|
-
try {
|
|
1179
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1180
|
-
promptCtx = getPromptContext2();
|
|
1181
|
-
} catch {
|
|
1182
|
-
}
|
|
1183
|
-
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1113
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1184
1114
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1185
1115
|
const spanId = generateHexId(16);
|
|
1186
1116
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1187
1117
|
const params = args[0] || {};
|
|
1188
1118
|
const startTime = Date.now();
|
|
1119
|
+
const captureContent2 = shouldCaptureContent();
|
|
1189
1120
|
try {
|
|
1190
1121
|
const response = await originalCreate(...args);
|
|
1191
1122
|
const endTime = Date.now();
|
|
1192
|
-
const attributes =
|
|
1123
|
+
const attributes = captureContent2 ? messagesToOtelAttributes(
|
|
1193
1124
|
params?.messages,
|
|
1194
1125
|
response?.choices?.[0]?.message,
|
|
1195
1126
|
response?.model || params?.model,
|
|
1196
1127
|
response?.id
|
|
1197
|
-
) :
|
|
1128
|
+
) : {};
|
|
1129
|
+
if (response?.usage) {
|
|
1130
|
+
attributes["fallom.raw.usage"] = JSON.stringify(response.usage);
|
|
1131
|
+
}
|
|
1132
|
+
if (response?.choices?.[0]?.finish_reason) {
|
|
1133
|
+
attributes["gen_ai.response.finish_reason"] = response.choices[0].finish_reason;
|
|
1134
|
+
}
|
|
1198
1135
|
sendTrace({
|
|
1199
1136
|
config_key: ctx.configKey,
|
|
1200
1137
|
session_id: ctx.sessionId,
|
|
@@ -1212,17 +1149,13 @@ function wrapOpenAI(client) {
|
|
|
1212
1149
|
prompt_tokens: response?.usage?.prompt_tokens,
|
|
1213
1150
|
completion_tokens: response?.usage?.completion_tokens,
|
|
1214
1151
|
total_tokens: response?.usage?.total_tokens,
|
|
1215
|
-
attributes
|
|
1216
|
-
prompt_key: promptCtx?.promptKey,
|
|
1217
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1218
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1219
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1152
|
+
attributes: Object.keys(attributes).length > 0 ? attributes : void 0
|
|
1220
1153
|
}).catch(() => {
|
|
1221
1154
|
});
|
|
1222
1155
|
return response;
|
|
1223
1156
|
} catch (error) {
|
|
1224
1157
|
const endTime = Date.now();
|
|
1225
|
-
const attributes =
|
|
1158
|
+
const attributes = captureContent2 ? messagesToOtelAttributes(
|
|
1226
1159
|
params?.messages,
|
|
1227
1160
|
void 0,
|
|
1228
1161
|
params?.model,
|
|
@@ -1246,11 +1179,7 @@ function wrapOpenAI(client) {
|
|
|
1246
1179
|
duration_ms: endTime - startTime,
|
|
1247
1180
|
status: "ERROR",
|
|
1248
1181
|
error_message: error?.message,
|
|
1249
|
-
attributes
|
|
1250
|
-
prompt_key: promptCtx?.promptKey,
|
|
1251
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1252
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1253
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1182
|
+
attributes
|
|
1254
1183
|
}).catch(() => {
|
|
1255
1184
|
});
|
|
1256
1185
|
throw error;
|
|
@@ -1258,37 +1187,40 @@ function wrapOpenAI(client) {
|
|
|
1258
1187
|
};
|
|
1259
1188
|
return client;
|
|
1260
1189
|
}
|
|
1261
|
-
|
|
1190
|
+
|
|
1191
|
+
// src/trace/wrappers/anthropic.ts
|
|
1192
|
+
function wrapAnthropic(client, sessionCtx) {
|
|
1262
1193
|
const originalCreate = client.messages.create.bind(client.messages);
|
|
1194
|
+
const ctx = sessionCtx;
|
|
1263
1195
|
client.messages.create = async function(...args) {
|
|
1264
|
-
|
|
1265
|
-
if (!ctx || !initialized2) {
|
|
1196
|
+
if (!isInitialized()) {
|
|
1266
1197
|
return originalCreate(...args);
|
|
1267
1198
|
}
|
|
1268
|
-
|
|
1269
|
-
try {
|
|
1270
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1271
|
-
promptCtx = getPromptContext2();
|
|
1272
|
-
} catch {
|
|
1273
|
-
}
|
|
1274
|
-
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1199
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1275
1200
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1276
1201
|
const spanId = generateHexId(16);
|
|
1277
1202
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1278
1203
|
const params = args[0] || {};
|
|
1279
1204
|
const startTime = Date.now();
|
|
1205
|
+
const captureContent2 = shouldCaptureContent();
|
|
1280
1206
|
try {
|
|
1281
1207
|
const response = await originalCreate(...args);
|
|
1282
1208
|
const endTime = Date.now();
|
|
1283
|
-
const attributes =
|
|
1209
|
+
const attributes = captureContent2 ? messagesToOtelAttributes(
|
|
1284
1210
|
params?.messages,
|
|
1285
1211
|
{ role: "assistant", content: response?.content?.[0]?.text || "" },
|
|
1286
1212
|
response?.model || params?.model,
|
|
1287
1213
|
response?.id
|
|
1288
|
-
) :
|
|
1289
|
-
if (
|
|
1214
|
+
) : {};
|
|
1215
|
+
if (params?.system) {
|
|
1290
1216
|
attributes["gen_ai.system_prompt"] = params.system;
|
|
1291
1217
|
}
|
|
1218
|
+
if (response?.usage) {
|
|
1219
|
+
attributes["fallom.raw.usage"] = JSON.stringify(response.usage);
|
|
1220
|
+
}
|
|
1221
|
+
if (response?.stop_reason) {
|
|
1222
|
+
attributes["gen_ai.response.finish_reason"] = response.stop_reason;
|
|
1223
|
+
}
|
|
1292
1224
|
sendTrace({
|
|
1293
1225
|
config_key: ctx.configKey,
|
|
1294
1226
|
session_id: ctx.sessionId,
|
|
@@ -1306,17 +1238,13 @@ function wrapAnthropic(client) {
|
|
|
1306
1238
|
prompt_tokens: response?.usage?.input_tokens,
|
|
1307
1239
|
completion_tokens: response?.usage?.output_tokens,
|
|
1308
1240
|
total_tokens: (response?.usage?.input_tokens || 0) + (response?.usage?.output_tokens || 0),
|
|
1309
|
-
attributes
|
|
1310
|
-
prompt_key: promptCtx?.promptKey,
|
|
1311
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1312
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1313
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1241
|
+
attributes: Object.keys(attributes).length > 0 ? attributes : void 0
|
|
1314
1242
|
}).catch(() => {
|
|
1315
1243
|
});
|
|
1316
1244
|
return response;
|
|
1317
1245
|
} catch (error) {
|
|
1318
1246
|
const endTime = Date.now();
|
|
1319
|
-
const attributes =
|
|
1247
|
+
const attributes = captureContent2 ? messagesToOtelAttributes(
|
|
1320
1248
|
params?.messages,
|
|
1321
1249
|
void 0,
|
|
1322
1250
|
params?.model,
|
|
@@ -1343,11 +1271,7 @@ function wrapAnthropic(client) {
|
|
|
1343
1271
|
duration_ms: endTime - startTime,
|
|
1344
1272
|
status: "ERROR",
|
|
1345
1273
|
error_message: error?.message,
|
|
1346
|
-
attributes
|
|
1347
|
-
prompt_key: promptCtx?.promptKey,
|
|
1348
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1349
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1350
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1274
|
+
attributes
|
|
1351
1275
|
}).catch(() => {
|
|
1352
1276
|
});
|
|
1353
1277
|
throw error;
|
|
@@ -1355,24 +1279,21 @@ function wrapAnthropic(client) {
|
|
|
1355
1279
|
};
|
|
1356
1280
|
return client;
|
|
1357
1281
|
}
|
|
1358
|
-
|
|
1282
|
+
|
|
1283
|
+
// src/trace/wrappers/google-ai.ts
|
|
1284
|
+
function wrapGoogleAI(model, sessionCtx) {
|
|
1359
1285
|
const originalGenerate = model.generateContent.bind(model);
|
|
1286
|
+
const ctx = sessionCtx;
|
|
1360
1287
|
model.generateContent = async function(...args) {
|
|
1361
|
-
|
|
1362
|
-
if (!ctx || !initialized2) {
|
|
1288
|
+
if (!isInitialized()) {
|
|
1363
1289
|
return originalGenerate(...args);
|
|
1364
1290
|
}
|
|
1365
|
-
|
|
1366
|
-
try {
|
|
1367
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1368
|
-
promptCtx = getPromptContext2();
|
|
1369
|
-
} catch {
|
|
1370
|
-
}
|
|
1371
|
-
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1291
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1372
1292
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1373
1293
|
const spanId = generateHexId(16);
|
|
1374
1294
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1375
1295
|
const startTime = Date.now();
|
|
1296
|
+
const captureContent2 = shouldCaptureContent();
|
|
1376
1297
|
try {
|
|
1377
1298
|
const response = await originalGenerate(...args);
|
|
1378
1299
|
const endTime = Date.now();
|
|
@@ -1380,7 +1301,7 @@ function wrapGoogleAI(model) {
|
|
|
1380
1301
|
const usage = result?.usageMetadata;
|
|
1381
1302
|
const modelName = model?.model || "gemini";
|
|
1382
1303
|
const attributes = {};
|
|
1383
|
-
if (
|
|
1304
|
+
if (captureContent2) {
|
|
1384
1305
|
attributes["gen_ai.request.model"] = modelName;
|
|
1385
1306
|
attributes["gen_ai.response.model"] = modelName;
|
|
1386
1307
|
const input = args[0];
|
|
@@ -1399,6 +1320,13 @@ function wrapGoogleAI(model) {
|
|
|
1399
1320
|
attributes["gen_ai.completion.0.content"] = outputText;
|
|
1400
1321
|
}
|
|
1401
1322
|
}
|
|
1323
|
+
if (usage) {
|
|
1324
|
+
attributes["fallom.raw.usage"] = JSON.stringify(usage);
|
|
1325
|
+
}
|
|
1326
|
+
const candidate = result?.candidates?.[0];
|
|
1327
|
+
if (candidate?.finishReason) {
|
|
1328
|
+
attributes["gen_ai.response.finish_reason"] = candidate.finishReason;
|
|
1329
|
+
}
|
|
1402
1330
|
sendTrace({
|
|
1403
1331
|
config_key: ctx.configKey,
|
|
1404
1332
|
session_id: ctx.sessionId,
|
|
@@ -1416,11 +1344,7 @@ function wrapGoogleAI(model) {
|
|
|
1416
1344
|
prompt_tokens: usage?.promptTokenCount,
|
|
1417
1345
|
completion_tokens: usage?.candidatesTokenCount,
|
|
1418
1346
|
total_tokens: usage?.totalTokenCount,
|
|
1419
|
-
attributes:
|
|
1420
|
-
prompt_key: promptCtx?.promptKey,
|
|
1421
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1422
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1423
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1347
|
+
attributes: Object.keys(attributes).length > 0 ? attributes : void 0
|
|
1424
1348
|
}).catch(() => {
|
|
1425
1349
|
});
|
|
1426
1350
|
return response;
|
|
@@ -1428,7 +1352,7 @@ function wrapGoogleAI(model) {
|
|
|
1428
1352
|
const endTime = Date.now();
|
|
1429
1353
|
const modelName = model?.model || "gemini";
|
|
1430
1354
|
const attributes = {};
|
|
1431
|
-
if (
|
|
1355
|
+
if (captureContent2) {
|
|
1432
1356
|
attributes["gen_ai.request.model"] = modelName;
|
|
1433
1357
|
attributes["error.message"] = error?.message;
|
|
1434
1358
|
const input = args[0];
|
|
@@ -1452,11 +1376,7 @@ function wrapGoogleAI(model) {
|
|
|
1452
1376
|
duration_ms: endTime - startTime,
|
|
1453
1377
|
status: "ERROR",
|
|
1454
1378
|
error_message: error?.message,
|
|
1455
|
-
attributes:
|
|
1456
|
-
prompt_key: promptCtx?.promptKey,
|
|
1457
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1458
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1459
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1379
|
+
attributes: captureContent2 ? attributes : void 0
|
|
1460
1380
|
}).catch(() => {
|
|
1461
1381
|
});
|
|
1462
1382
|
throw error;
|
|
@@ -1464,39 +1384,70 @@ function wrapGoogleAI(model) {
|
|
|
1464
1384
|
};
|
|
1465
1385
|
return model;
|
|
1466
1386
|
}
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1387
|
+
|
|
1388
|
+
// src/trace/wrappers/vercel-ai/utils.ts
|
|
1389
|
+
function extractUsageFromResult(result, directUsage) {
|
|
1390
|
+
let usage = directUsage ?? result?.usage;
|
|
1391
|
+
const isValidNumber = (v) => v !== null && v !== void 0 && !Number.isNaN(v);
|
|
1392
|
+
let promptTokens = isValidNumber(usage?.promptTokens) ? usage.promptTokens : void 0;
|
|
1393
|
+
let completionTokens = isValidNumber(usage?.completionTokens) ? usage.completionTokens : void 0;
|
|
1394
|
+
let totalTokens = isValidNumber(usage?.totalTokens) ? usage.totalTokens : void 0;
|
|
1395
|
+
let cost;
|
|
1396
|
+
const orUsage = result?.experimental_providerMetadata?.openrouter?.usage;
|
|
1397
|
+
if (orUsage) {
|
|
1398
|
+
if (promptTokens === void 0 && isValidNumber(orUsage.promptTokens)) {
|
|
1399
|
+
promptTokens = orUsage.promptTokens;
|
|
1400
|
+
}
|
|
1401
|
+
if (completionTokens === void 0 && isValidNumber(orUsage.completionTokens)) {
|
|
1402
|
+
completionTokens = orUsage.completionTokens;
|
|
1403
|
+
}
|
|
1404
|
+
if (totalTokens === void 0 && isValidNumber(orUsage.totalTokens)) {
|
|
1405
|
+
totalTokens = orUsage.totalTokens;
|
|
1406
|
+
}
|
|
1407
|
+
if (isValidNumber(orUsage.cost)) {
|
|
1408
|
+
cost = orUsage.cost;
|
|
1409
|
+
}
|
|
1410
|
+
}
|
|
1411
|
+
if (totalTokens === void 0 && (promptTokens !== void 0 || completionTokens !== void 0)) {
|
|
1412
|
+
totalTokens = (promptTokens ?? 0) + (completionTokens ?? 0);
|
|
1413
|
+
}
|
|
1414
|
+
return { promptTokens, completionTokens, totalTokens, cost };
|
|
1475
1415
|
}
|
|
1476
|
-
|
|
1416
|
+
|
|
1417
|
+
// src/trace/wrappers/vercel-ai/generate-text.ts
|
|
1418
|
+
function createGenerateTextWrapper(aiModule, sessionCtx, debug = false) {
|
|
1419
|
+
const ctx = sessionCtx;
|
|
1477
1420
|
return async (...args) => {
|
|
1478
|
-
|
|
1479
|
-
if (!ctx || !initialized2) {
|
|
1421
|
+
if (!isInitialized()) {
|
|
1480
1422
|
return aiModule.generateText(...args);
|
|
1481
1423
|
}
|
|
1482
|
-
|
|
1483
|
-
try {
|
|
1484
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1485
|
-
promptCtx = getPromptContext2();
|
|
1486
|
-
} catch {
|
|
1487
|
-
}
|
|
1488
|
-
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1424
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1489
1425
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1490
1426
|
const spanId = generateHexId(16);
|
|
1491
1427
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1492
1428
|
const params = args[0] || {};
|
|
1493
1429
|
const startTime = Date.now();
|
|
1430
|
+
const captureContent2 = shouldCaptureContent();
|
|
1494
1431
|
try {
|
|
1495
1432
|
const result = await aiModule.generateText(...args);
|
|
1496
1433
|
const endTime = Date.now();
|
|
1434
|
+
if (debug || isDebugMode()) {
|
|
1435
|
+
console.log(
|
|
1436
|
+
"\n\u{1F50D} [Fallom Debug] generateText result keys:",
|
|
1437
|
+
Object.keys(result || {})
|
|
1438
|
+
);
|
|
1439
|
+
console.log(
|
|
1440
|
+
"\u{1F50D} [Fallom Debug] result.usage:",
|
|
1441
|
+
JSON.stringify(result?.usage, null, 2)
|
|
1442
|
+
);
|
|
1443
|
+
console.log(
|
|
1444
|
+
"\u{1F50D} [Fallom Debug] result.experimental_providerMetadata:",
|
|
1445
|
+
JSON.stringify(result?.experimental_providerMetadata, null, 2)
|
|
1446
|
+
);
|
|
1447
|
+
}
|
|
1497
1448
|
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1498
1449
|
const attributes = {};
|
|
1499
|
-
if (
|
|
1450
|
+
if (captureContent2) {
|
|
1500
1451
|
attributes["gen_ai.request.model"] = modelId;
|
|
1501
1452
|
attributes["gen_ai.response.model"] = modelId;
|
|
1502
1453
|
if (params?.prompt) {
|
|
@@ -1517,6 +1468,18 @@ function createGenerateTextWrapper(aiModule) {
|
|
|
1517
1468
|
attributes["gen_ai.response.id"] = result.response.id;
|
|
1518
1469
|
}
|
|
1519
1470
|
}
|
|
1471
|
+
if (result?.usage) {
|
|
1472
|
+
attributes["fallom.raw.usage"] = JSON.stringify(result.usage);
|
|
1473
|
+
}
|
|
1474
|
+
if (result?.experimental_providerMetadata) {
|
|
1475
|
+
attributes["fallom.raw.providerMetadata"] = JSON.stringify(
|
|
1476
|
+
result.experimental_providerMetadata
|
|
1477
|
+
);
|
|
1478
|
+
}
|
|
1479
|
+
if (result?.finishReason) {
|
|
1480
|
+
attributes["gen_ai.response.finish_reason"] = result.finishReason;
|
|
1481
|
+
}
|
|
1482
|
+
const usage = extractUsageFromResult(result);
|
|
1520
1483
|
sendTrace({
|
|
1521
1484
|
config_key: ctx.configKey,
|
|
1522
1485
|
session_id: ctx.sessionId,
|
|
@@ -1531,14 +1494,10 @@ function createGenerateTextWrapper(aiModule) {
|
|
|
1531
1494
|
end_time: new Date(endTime).toISOString(),
|
|
1532
1495
|
duration_ms: endTime - startTime,
|
|
1533
1496
|
status: "OK",
|
|
1534
|
-
prompt_tokens:
|
|
1535
|
-
completion_tokens:
|
|
1536
|
-
total_tokens:
|
|
1537
|
-
attributes:
|
|
1538
|
-
prompt_key: promptCtx?.promptKey,
|
|
1539
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1540
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1541
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1497
|
+
prompt_tokens: usage.promptTokens,
|
|
1498
|
+
completion_tokens: usage.completionTokens,
|
|
1499
|
+
total_tokens: usage.totalTokens,
|
|
1500
|
+
attributes: captureContent2 ? attributes : void 0
|
|
1542
1501
|
}).catch(() => {
|
|
1543
1502
|
});
|
|
1544
1503
|
return result;
|
|
@@ -1559,44 +1518,58 @@ function createGenerateTextWrapper(aiModule) {
|
|
|
1559
1518
|
end_time: new Date(endTime).toISOString(),
|
|
1560
1519
|
duration_ms: endTime - startTime,
|
|
1561
1520
|
status: "ERROR",
|
|
1562
|
-
error_message: error?.message
|
|
1563
|
-
prompt_key: promptCtx?.promptKey,
|
|
1564
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1565
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1566
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1521
|
+
error_message: error?.message
|
|
1567
1522
|
}).catch(() => {
|
|
1568
1523
|
});
|
|
1569
1524
|
throw error;
|
|
1570
1525
|
}
|
|
1571
1526
|
};
|
|
1572
1527
|
}
|
|
1573
|
-
|
|
1528
|
+
|
|
1529
|
+
// src/trace/wrappers/vercel-ai/stream-text.ts
|
|
1530
|
+
function log2(...args) {
|
|
1531
|
+
if (isDebugMode()) console.log("[Fallom]", ...args);
|
|
1532
|
+
}
|
|
1533
|
+
function createStreamTextWrapper(aiModule, sessionCtx, debug = false) {
|
|
1534
|
+
const ctx = sessionCtx;
|
|
1574
1535
|
return async (...args) => {
|
|
1575
|
-
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1576
1536
|
const params = args[0] || {};
|
|
1577
1537
|
const startTime = Date.now();
|
|
1538
|
+
const captureContent2 = shouldCaptureContent();
|
|
1578
1539
|
const result = await aiModule.streamText(...args);
|
|
1579
|
-
if (!
|
|
1540
|
+
if (!isInitialized()) {
|
|
1580
1541
|
return result;
|
|
1581
1542
|
}
|
|
1582
|
-
const traceCtx =
|
|
1543
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1583
1544
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1584
1545
|
const spanId = generateHexId(16);
|
|
1585
1546
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1586
1547
|
let firstTokenTime = null;
|
|
1587
1548
|
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1588
|
-
let promptCtx = null;
|
|
1589
|
-
try {
|
|
1590
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1591
|
-
promptCtx = getPromptContext2();
|
|
1592
|
-
} catch {
|
|
1593
|
-
}
|
|
1594
1549
|
if (result?.usage) {
|
|
1595
|
-
result.usage.then((
|
|
1550
|
+
result.usage.then(async (rawUsage) => {
|
|
1596
1551
|
const endTime = Date.now();
|
|
1597
|
-
|
|
1552
|
+
if (debug || isDebugMode()) {
|
|
1553
|
+
console.log(
|
|
1554
|
+
"\n\u{1F50D} [Fallom Debug] streamText usage:",
|
|
1555
|
+
JSON.stringify(rawUsage, null, 2)
|
|
1556
|
+
);
|
|
1557
|
+
}
|
|
1558
|
+
log2("\u{1F4CA} streamText usage:", JSON.stringify(rawUsage, null, 2));
|
|
1559
|
+
let providerMetadata = result?.experimental_providerMetadata;
|
|
1560
|
+
if (providerMetadata && typeof providerMetadata.then === "function") {
|
|
1561
|
+
try {
|
|
1562
|
+
providerMetadata = await providerMetadata;
|
|
1563
|
+
} catch {
|
|
1564
|
+
providerMetadata = void 0;
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
const usage = extractUsageFromResult(
|
|
1568
|
+
{ experimental_providerMetadata: providerMetadata },
|
|
1569
|
+
rawUsage
|
|
1570
|
+
);
|
|
1598
1571
|
const attributes = {};
|
|
1599
|
-
if (
|
|
1572
|
+
if (captureContent2) {
|
|
1600
1573
|
attributes["gen_ai.request.model"] = modelId;
|
|
1601
1574
|
if (params?.prompt) {
|
|
1602
1575
|
attributes["gen_ai.prompt.0.role"] = "user";
|
|
@@ -1606,6 +1579,12 @@ function createStreamTextWrapper(aiModule) {
|
|
|
1606
1579
|
if (firstTokenTime) {
|
|
1607
1580
|
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1608
1581
|
}
|
|
1582
|
+
if (rawUsage) {
|
|
1583
|
+
attributes["fallom.raw.usage"] = JSON.stringify(rawUsage);
|
|
1584
|
+
}
|
|
1585
|
+
if (providerMetadata) {
|
|
1586
|
+
attributes["fallom.raw.providerMetadata"] = JSON.stringify(providerMetadata);
|
|
1587
|
+
}
|
|
1609
1588
|
const tracePayload = {
|
|
1610
1589
|
config_key: ctx.configKey,
|
|
1611
1590
|
session_id: ctx.sessionId,
|
|
@@ -1620,15 +1599,11 @@ function createStreamTextWrapper(aiModule) {
|
|
|
1620
1599
|
end_time: new Date(endTime).toISOString(),
|
|
1621
1600
|
duration_ms: endTime - startTime,
|
|
1622
1601
|
status: "OK",
|
|
1623
|
-
prompt_tokens: usage
|
|
1624
|
-
completion_tokens: usage
|
|
1625
|
-
total_tokens: usage
|
|
1602
|
+
prompt_tokens: usage.promptTokens,
|
|
1603
|
+
completion_tokens: usage.completionTokens,
|
|
1604
|
+
total_tokens: usage.totalTokens,
|
|
1626
1605
|
time_to_first_token_ms: firstTokenTime ? firstTokenTime - startTime : void 0,
|
|
1627
|
-
attributes:
|
|
1628
|
-
prompt_key: promptCtx?.promptKey,
|
|
1629
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1630
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1631
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1606
|
+
attributes: captureContent2 ? attributes : void 0
|
|
1632
1607
|
};
|
|
1633
1608
|
sendTrace(tracePayload).catch(() => {
|
|
1634
1609
|
});
|
|
@@ -1649,11 +1624,7 @@ function createStreamTextWrapper(aiModule) {
|
|
|
1649
1624
|
end_time: new Date(endTime).toISOString(),
|
|
1650
1625
|
duration_ms: endTime - startTime,
|
|
1651
1626
|
status: "ERROR",
|
|
1652
|
-
error_message: error?.message
|
|
1653
|
-
prompt_key: promptCtx?.promptKey,
|
|
1654
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1655
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1656
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1627
|
+
error_message: error?.message
|
|
1657
1628
|
}).catch(() => {
|
|
1658
1629
|
});
|
|
1659
1630
|
});
|
|
@@ -1681,30 +1652,37 @@ function createStreamTextWrapper(aiModule) {
|
|
|
1681
1652
|
return result;
|
|
1682
1653
|
};
|
|
1683
1654
|
}
|
|
1684
|
-
|
|
1655
|
+
|
|
1656
|
+
// src/trace/wrappers/vercel-ai/generate-object.ts
|
|
1657
|
+
function createGenerateObjectWrapper(aiModule, sessionCtx, debug = false) {
|
|
1658
|
+
const ctx = sessionCtx;
|
|
1685
1659
|
return async (...args) => {
|
|
1686
|
-
|
|
1687
|
-
if (!ctx || !initialized2) {
|
|
1660
|
+
if (!isInitialized()) {
|
|
1688
1661
|
return aiModule.generateObject(...args);
|
|
1689
1662
|
}
|
|
1690
|
-
|
|
1691
|
-
try {
|
|
1692
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1693
|
-
promptCtx = getPromptContext2();
|
|
1694
|
-
} catch {
|
|
1695
|
-
}
|
|
1696
|
-
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1663
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1697
1664
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1698
1665
|
const spanId = generateHexId(16);
|
|
1699
1666
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1700
1667
|
const params = args[0] || {};
|
|
1701
1668
|
const startTime = Date.now();
|
|
1669
|
+
const captureContent2 = shouldCaptureContent();
|
|
1702
1670
|
try {
|
|
1703
1671
|
const result = await aiModule.generateObject(...args);
|
|
1704
1672
|
const endTime = Date.now();
|
|
1673
|
+
if (debug || isDebugMode()) {
|
|
1674
|
+
console.log(
|
|
1675
|
+
"\n\u{1F50D} [Fallom Debug] generateObject result keys:",
|
|
1676
|
+
Object.keys(result || {})
|
|
1677
|
+
);
|
|
1678
|
+
console.log(
|
|
1679
|
+
"\u{1F50D} [Fallom Debug] result.usage:",
|
|
1680
|
+
JSON.stringify(result?.usage, null, 2)
|
|
1681
|
+
);
|
|
1682
|
+
}
|
|
1705
1683
|
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1706
1684
|
const attributes = {};
|
|
1707
|
-
if (
|
|
1685
|
+
if (captureContent2) {
|
|
1708
1686
|
attributes["gen_ai.request.model"] = modelId;
|
|
1709
1687
|
attributes["gen_ai.response.model"] = modelId;
|
|
1710
1688
|
if (result?.object) {
|
|
@@ -1714,6 +1692,18 @@ function createGenerateObjectWrapper(aiModule) {
|
|
|
1714
1692
|
);
|
|
1715
1693
|
}
|
|
1716
1694
|
}
|
|
1695
|
+
if (result?.usage) {
|
|
1696
|
+
attributes["fallom.raw.usage"] = JSON.stringify(result.usage);
|
|
1697
|
+
}
|
|
1698
|
+
if (result?.experimental_providerMetadata) {
|
|
1699
|
+
attributes["fallom.raw.providerMetadata"] = JSON.stringify(
|
|
1700
|
+
result.experimental_providerMetadata
|
|
1701
|
+
);
|
|
1702
|
+
}
|
|
1703
|
+
if (result?.finishReason) {
|
|
1704
|
+
attributes["gen_ai.response.finish_reason"] = result.finishReason;
|
|
1705
|
+
}
|
|
1706
|
+
const usage = extractUsageFromResult(result);
|
|
1717
1707
|
sendTrace({
|
|
1718
1708
|
config_key: ctx.configKey,
|
|
1719
1709
|
session_id: ctx.sessionId,
|
|
@@ -1728,14 +1718,10 @@ function createGenerateObjectWrapper(aiModule) {
|
|
|
1728
1718
|
end_time: new Date(endTime).toISOString(),
|
|
1729
1719
|
duration_ms: endTime - startTime,
|
|
1730
1720
|
status: "OK",
|
|
1731
|
-
prompt_tokens:
|
|
1732
|
-
completion_tokens:
|
|
1733
|
-
total_tokens:
|
|
1734
|
-
attributes:
|
|
1735
|
-
prompt_key: promptCtx?.promptKey,
|
|
1736
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1737
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1738
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1721
|
+
prompt_tokens: usage.promptTokens,
|
|
1722
|
+
completion_tokens: usage.completionTokens,
|
|
1723
|
+
total_tokens: usage.totalTokens,
|
|
1724
|
+
attributes: captureContent2 ? attributes : void 0
|
|
1739
1725
|
}).catch(() => {
|
|
1740
1726
|
});
|
|
1741
1727
|
return result;
|
|
@@ -1756,50 +1742,70 @@ function createGenerateObjectWrapper(aiModule) {
|
|
|
1756
1742
|
end_time: new Date(endTime).toISOString(),
|
|
1757
1743
|
duration_ms: endTime - startTime,
|
|
1758
1744
|
status: "ERROR",
|
|
1759
|
-
error_message: error?.message
|
|
1760
|
-
prompt_key: promptCtx?.promptKey,
|
|
1761
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1762
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1763
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1745
|
+
error_message: error?.message
|
|
1764
1746
|
}).catch(() => {
|
|
1765
1747
|
});
|
|
1766
1748
|
throw error;
|
|
1767
1749
|
}
|
|
1768
1750
|
};
|
|
1769
1751
|
}
|
|
1770
|
-
|
|
1752
|
+
|
|
1753
|
+
// src/trace/wrappers/vercel-ai/stream-object.ts
|
|
1754
|
+
function log3(...args) {
|
|
1755
|
+
if (isDebugMode()) console.log("[Fallom]", ...args);
|
|
1756
|
+
}
|
|
1757
|
+
function createStreamObjectWrapper(aiModule, sessionCtx, debug = false) {
|
|
1758
|
+
const ctx = sessionCtx;
|
|
1771
1759
|
return async (...args) => {
|
|
1772
|
-
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1773
1760
|
const params = args[0] || {};
|
|
1774
1761
|
const startTime = Date.now();
|
|
1762
|
+
const captureContent2 = shouldCaptureContent();
|
|
1775
1763
|
const result = await aiModule.streamObject(...args);
|
|
1776
|
-
|
|
1777
|
-
if (!
|
|
1764
|
+
log3("\u{1F50D} streamObject result keys:", Object.keys(result || {}));
|
|
1765
|
+
if (!isInitialized()) {
|
|
1778
1766
|
return result;
|
|
1779
1767
|
}
|
|
1780
|
-
const traceCtx =
|
|
1768
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
1781
1769
|
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1782
1770
|
const spanId = generateHexId(16);
|
|
1783
1771
|
const parentSpanId = traceCtx?.parentSpanId;
|
|
1784
1772
|
let firstTokenTime = null;
|
|
1785
1773
|
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1786
|
-
let promptCtx = null;
|
|
1787
|
-
try {
|
|
1788
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1789
|
-
promptCtx = getPromptContext2();
|
|
1790
|
-
} catch {
|
|
1791
|
-
}
|
|
1792
1774
|
if (result?.usage) {
|
|
1793
|
-
result.usage.then((
|
|
1775
|
+
result.usage.then(async (rawUsage) => {
|
|
1794
1776
|
const endTime = Date.now();
|
|
1795
|
-
|
|
1777
|
+
if (debug || isDebugMode()) {
|
|
1778
|
+
console.log(
|
|
1779
|
+
"\n\u{1F50D} [Fallom Debug] streamObject usage:",
|
|
1780
|
+
JSON.stringify(rawUsage, null, 2)
|
|
1781
|
+
);
|
|
1782
|
+
}
|
|
1783
|
+
log3("\u{1F4CA} streamObject usage:", JSON.stringify(rawUsage, null, 2));
|
|
1784
|
+
let providerMetadata = result?.experimental_providerMetadata;
|
|
1785
|
+
if (providerMetadata && typeof providerMetadata.then === "function") {
|
|
1786
|
+
try {
|
|
1787
|
+
providerMetadata = await providerMetadata;
|
|
1788
|
+
} catch {
|
|
1789
|
+
providerMetadata = void 0;
|
|
1790
|
+
}
|
|
1791
|
+
}
|
|
1792
|
+
const usage = extractUsageFromResult(
|
|
1793
|
+
{ experimental_providerMetadata: providerMetadata },
|
|
1794
|
+
rawUsage
|
|
1795
|
+
);
|
|
1796
1796
|
const attributes = {};
|
|
1797
|
-
if (
|
|
1797
|
+
if (captureContent2) {
|
|
1798
1798
|
attributes["gen_ai.request.model"] = modelId;
|
|
1799
1799
|
}
|
|
1800
1800
|
if (firstTokenTime) {
|
|
1801
1801
|
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1802
1802
|
}
|
|
1803
|
+
if (rawUsage) {
|
|
1804
|
+
attributes["fallom.raw.usage"] = JSON.stringify(rawUsage);
|
|
1805
|
+
}
|
|
1806
|
+
if (providerMetadata) {
|
|
1807
|
+
attributes["fallom.raw.providerMetadata"] = JSON.stringify(providerMetadata);
|
|
1808
|
+
}
|
|
1803
1809
|
sendTrace({
|
|
1804
1810
|
config_key: ctx.configKey,
|
|
1805
1811
|
session_id: ctx.sessionId,
|
|
@@ -1814,14 +1820,10 @@ function createStreamObjectWrapper(aiModule) {
|
|
|
1814
1820
|
end_time: new Date(endTime).toISOString(),
|
|
1815
1821
|
duration_ms: endTime - startTime,
|
|
1816
1822
|
status: "OK",
|
|
1817
|
-
prompt_tokens: usage
|
|
1818
|
-
completion_tokens: usage
|
|
1819
|
-
total_tokens: usage
|
|
1820
|
-
attributes:
|
|
1821
|
-
prompt_key: promptCtx?.promptKey,
|
|
1822
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1823
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1824
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1823
|
+
prompt_tokens: usage.promptTokens,
|
|
1824
|
+
completion_tokens: usage.completionTokens,
|
|
1825
|
+
total_tokens: usage.totalTokens,
|
|
1826
|
+
attributes: captureContent2 ? attributes : void 0
|
|
1825
1827
|
}).catch(() => {
|
|
1826
1828
|
});
|
|
1827
1829
|
}).catch((error) => {
|
|
@@ -1840,11 +1842,7 @@ function createStreamObjectWrapper(aiModule) {
|
|
|
1840
1842
|
end_time: new Date(endTime).toISOString(),
|
|
1841
1843
|
duration_ms: endTime - startTime,
|
|
1842
1844
|
status: "ERROR",
|
|
1843
|
-
error_message: error?.message
|
|
1844
|
-
prompt_key: promptCtx?.promptKey,
|
|
1845
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1846
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1847
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1845
|
+
error_message: error?.message
|
|
1848
1846
|
}).catch(() => {
|
|
1849
1847
|
});
|
|
1850
1848
|
});
|
|
@@ -1855,7 +1853,7 @@ function createStreamObjectWrapper(aiModule) {
|
|
|
1855
1853
|
for await (const chunk of originalStream) {
|
|
1856
1854
|
if (!firstTokenTime) {
|
|
1857
1855
|
firstTokenTime = Date.now();
|
|
1858
|
-
|
|
1856
|
+
log3("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1859
1857
|
}
|
|
1860
1858
|
yield chunk;
|
|
1861
1859
|
}
|
|
@@ -1872,20 +1870,27 @@ function createStreamObjectWrapper(aiModule) {
|
|
|
1872
1870
|
return result;
|
|
1873
1871
|
};
|
|
1874
1872
|
}
|
|
1875
|
-
|
|
1873
|
+
|
|
1874
|
+
// src/trace/wrappers/vercel-ai/index.ts
|
|
1875
|
+
function wrapAISDK(ai, sessionCtx, options) {
|
|
1876
|
+
const debug = options?.debug ?? false;
|
|
1877
|
+
return {
|
|
1878
|
+
generateText: createGenerateTextWrapper(ai, sessionCtx, debug),
|
|
1879
|
+
streamText: createStreamTextWrapper(ai, sessionCtx, debug),
|
|
1880
|
+
generateObject: ai.generateObject ? createGenerateObjectWrapper(ai, sessionCtx, debug) : void 0,
|
|
1881
|
+
streamObject: ai.streamObject ? createStreamObjectWrapper(ai, sessionCtx, debug) : void 0
|
|
1882
|
+
};
|
|
1883
|
+
}
|
|
1884
|
+
|
|
1885
|
+
// src/trace/wrappers/mastra.ts
|
|
1886
|
+
function wrapMastraAgent(agent, sessionCtx) {
|
|
1876
1887
|
const originalGenerate = agent.generate.bind(agent);
|
|
1877
1888
|
const agentName = agent.name || "MastraAgent";
|
|
1889
|
+
const ctx = sessionCtx;
|
|
1878
1890
|
agent.generate = async function(...args) {
|
|
1879
|
-
|
|
1880
|
-
if (!ctx || !initialized2) {
|
|
1891
|
+
if (!isInitialized()) {
|
|
1881
1892
|
return originalGenerate(...args);
|
|
1882
1893
|
}
|
|
1883
|
-
let promptCtx = null;
|
|
1884
|
-
try {
|
|
1885
|
-
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1886
|
-
promptCtx = getPromptContext2();
|
|
1887
|
-
} catch {
|
|
1888
|
-
}
|
|
1889
1894
|
const traceId = generateHexId(32);
|
|
1890
1895
|
const spanId = generateHexId(16);
|
|
1891
1896
|
const startTime = Date.now();
|
|
@@ -1957,11 +1962,7 @@ function wrapMastraAgent(agent) {
|
|
|
1957
1962
|
prompt_tokens: result?.usage?.promptTokens,
|
|
1958
1963
|
completion_tokens: result?.usage?.completionTokens,
|
|
1959
1964
|
total_tokens: result?.usage?.totalTokens,
|
|
1960
|
-
attributes
|
|
1961
|
-
prompt_key: promptCtx?.promptKey,
|
|
1962
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1963
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1964
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1965
|
+
attributes
|
|
1965
1966
|
};
|
|
1966
1967
|
sendTrace(traceData).catch(() => {
|
|
1967
1968
|
});
|
|
@@ -1980,11 +1981,7 @@ function wrapMastraAgent(agent) {
|
|
|
1980
1981
|
end_time: new Date(endTime).toISOString(),
|
|
1981
1982
|
duration_ms: endTime - startTime,
|
|
1982
1983
|
status: "ERROR",
|
|
1983
|
-
error_message: error instanceof Error ? error.message : String(error)
|
|
1984
|
-
prompt_key: promptCtx?.promptKey,
|
|
1985
|
-
prompt_version: promptCtx?.promptVersion,
|
|
1986
|
-
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1987
|
-
prompt_variant_index: promptCtx?.variantIndex
|
|
1984
|
+
error_message: error instanceof Error ? error.message : String(error)
|
|
1988
1985
|
};
|
|
1989
1986
|
sendTrace(traceData).catch(() => {
|
|
1990
1987
|
});
|
|
@@ -1994,38 +1991,216 @@ function wrapMastraAgent(agent) {
|
|
|
1994
1991
|
return agent;
|
|
1995
1992
|
}
|
|
1996
1993
|
|
|
1997
|
-
// src/
|
|
1998
|
-
var
|
|
1999
|
-
|
|
1994
|
+
// src/trace/session.ts
|
|
1995
|
+
var FallomSession = class {
|
|
1996
|
+
constructor(options) {
|
|
1997
|
+
this.ctx = {
|
|
1998
|
+
configKey: options.configKey,
|
|
1999
|
+
sessionId: options.sessionId,
|
|
2000
|
+
customerId: options.customerId
|
|
2001
|
+
};
|
|
2002
|
+
}
|
|
2003
|
+
/** Get the session context. */
|
|
2004
|
+
getContext() {
|
|
2005
|
+
return { ...this.ctx };
|
|
2006
|
+
}
|
|
2007
|
+
/**
|
|
2008
|
+
* Get model assignment for this session (A/B testing).
|
|
2009
|
+
*/
|
|
2010
|
+
async getModel(configKeyOrOptions, options) {
|
|
2011
|
+
let configKey;
|
|
2012
|
+
let opts;
|
|
2013
|
+
if (typeof configKeyOrOptions === "string") {
|
|
2014
|
+
configKey = configKeyOrOptions;
|
|
2015
|
+
opts = options || {};
|
|
2016
|
+
} else {
|
|
2017
|
+
configKey = this.ctx.configKey;
|
|
2018
|
+
opts = configKeyOrOptions || {};
|
|
2019
|
+
}
|
|
2020
|
+
const { get: get3 } = await Promise.resolve().then(() => (init_models(), models_exports));
|
|
2021
|
+
return get3(configKey, this.ctx.sessionId, opts);
|
|
2022
|
+
}
|
|
2023
|
+
/**
|
|
2024
|
+
* Wrap a Vercel AI SDK model to trace all calls.
|
|
2025
|
+
*/
|
|
2026
|
+
traceModel(model) {
|
|
2027
|
+
const ctx = this.ctx;
|
|
2028
|
+
const tracedModel = Object.create(model);
|
|
2029
|
+
if (model.doGenerate) {
|
|
2030
|
+
const originalDoGenerate = model.doGenerate.bind(model);
|
|
2031
|
+
tracedModel.doGenerate = async function(...args) {
|
|
2032
|
+
if (!isInitialized()) return originalDoGenerate(...args);
|
|
2033
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
2034
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
2035
|
+
const spanId = generateHexId(16);
|
|
2036
|
+
const startTime = Date.now();
|
|
2037
|
+
try {
|
|
2038
|
+
const result = await originalDoGenerate(...args);
|
|
2039
|
+
const endTime = Date.now();
|
|
2040
|
+
const modelId = model.modelId || "unknown";
|
|
2041
|
+
const usage = result?.usage || result?.rawResponse?.usage;
|
|
2042
|
+
sendTrace({
|
|
2043
|
+
config_key: ctx.configKey,
|
|
2044
|
+
session_id: ctx.sessionId,
|
|
2045
|
+
customer_id: ctx.customerId,
|
|
2046
|
+
trace_id: traceId,
|
|
2047
|
+
span_id: spanId,
|
|
2048
|
+
parent_span_id: traceCtx?.parentSpanId,
|
|
2049
|
+
name: "generateText",
|
|
2050
|
+
kind: "llm",
|
|
2051
|
+
model: modelId,
|
|
2052
|
+
start_time: new Date(startTime).toISOString(),
|
|
2053
|
+
end_time: new Date(endTime).toISOString(),
|
|
2054
|
+
duration_ms: endTime - startTime,
|
|
2055
|
+
status: "OK",
|
|
2056
|
+
prompt_tokens: usage?.promptTokens,
|
|
2057
|
+
completion_tokens: usage?.completionTokens,
|
|
2058
|
+
total_tokens: usage?.totalTokens,
|
|
2059
|
+
attributes: shouldCaptureContent() && usage ? { "fallom.raw.usage": JSON.stringify(usage) } : void 0
|
|
2060
|
+
}).catch(() => {
|
|
2061
|
+
});
|
|
2062
|
+
return result;
|
|
2063
|
+
} catch (error) {
|
|
2064
|
+
const endTime = Date.now();
|
|
2065
|
+
sendTrace({
|
|
2066
|
+
config_key: ctx.configKey,
|
|
2067
|
+
session_id: ctx.sessionId,
|
|
2068
|
+
customer_id: ctx.customerId,
|
|
2069
|
+
trace_id: traceId,
|
|
2070
|
+
span_id: spanId,
|
|
2071
|
+
parent_span_id: traceCtx?.parentSpanId,
|
|
2072
|
+
name: "generateText",
|
|
2073
|
+
kind: "llm",
|
|
2074
|
+
model: model.modelId || "unknown",
|
|
2075
|
+
start_time: new Date(startTime).toISOString(),
|
|
2076
|
+
end_time: new Date(endTime).toISOString(),
|
|
2077
|
+
duration_ms: endTime - startTime,
|
|
2078
|
+
status: "ERROR",
|
|
2079
|
+
error_message: error instanceof Error ? error.message : String(error)
|
|
2080
|
+
}).catch(() => {
|
|
2081
|
+
});
|
|
2082
|
+
throw error;
|
|
2083
|
+
}
|
|
2084
|
+
};
|
|
2085
|
+
}
|
|
2086
|
+
if (model.doStream) {
|
|
2087
|
+
const originalDoStream = model.doStream.bind(model);
|
|
2088
|
+
tracedModel.doStream = async function(...args) {
|
|
2089
|
+
if (!isInitialized()) return originalDoStream(...args);
|
|
2090
|
+
const traceCtx = getTraceContextStorage().getStore() || getFallbackTraceContext();
|
|
2091
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
2092
|
+
const spanId = generateHexId(16);
|
|
2093
|
+
const startTime = Date.now();
|
|
2094
|
+
const modelId = model.modelId || "unknown";
|
|
2095
|
+
try {
|
|
2096
|
+
const result = await originalDoStream(...args);
|
|
2097
|
+
sendTrace({
|
|
2098
|
+
config_key: ctx.configKey,
|
|
2099
|
+
session_id: ctx.sessionId,
|
|
2100
|
+
customer_id: ctx.customerId,
|
|
2101
|
+
trace_id: traceId,
|
|
2102
|
+
span_id: spanId,
|
|
2103
|
+
parent_span_id: traceCtx?.parentSpanId,
|
|
2104
|
+
name: "streamText",
|
|
2105
|
+
kind: "llm",
|
|
2106
|
+
model: modelId,
|
|
2107
|
+
start_time: new Date(startTime).toISOString(),
|
|
2108
|
+
end_time: new Date(Date.now()).toISOString(),
|
|
2109
|
+
duration_ms: Date.now() - startTime,
|
|
2110
|
+
status: "OK",
|
|
2111
|
+
is_streaming: true
|
|
2112
|
+
}).catch(() => {
|
|
2113
|
+
});
|
|
2114
|
+
return result;
|
|
2115
|
+
} catch (error) {
|
|
2116
|
+
sendTrace({
|
|
2117
|
+
config_key: ctx.configKey,
|
|
2118
|
+
session_id: ctx.sessionId,
|
|
2119
|
+
customer_id: ctx.customerId,
|
|
2120
|
+
trace_id: traceId,
|
|
2121
|
+
span_id: spanId,
|
|
2122
|
+
parent_span_id: traceCtx?.parentSpanId,
|
|
2123
|
+
name: "streamText",
|
|
2124
|
+
kind: "llm",
|
|
2125
|
+
model: modelId,
|
|
2126
|
+
start_time: new Date(startTime).toISOString(),
|
|
2127
|
+
end_time: new Date(Date.now()).toISOString(),
|
|
2128
|
+
duration_ms: Date.now() - startTime,
|
|
2129
|
+
status: "ERROR",
|
|
2130
|
+
error_message: error instanceof Error ? error.message : String(error),
|
|
2131
|
+
is_streaming: true
|
|
2132
|
+
}).catch(() => {
|
|
2133
|
+
});
|
|
2134
|
+
throw error;
|
|
2135
|
+
}
|
|
2136
|
+
};
|
|
2137
|
+
}
|
|
2138
|
+
return tracedModel;
|
|
2139
|
+
}
|
|
2140
|
+
/** Wrap OpenAI client. Delegates to shared wrapper. */
|
|
2141
|
+
wrapOpenAI(client) {
|
|
2142
|
+
return wrapOpenAI(client, this.ctx);
|
|
2143
|
+
}
|
|
2144
|
+
/** Wrap Anthropic client. Delegates to shared wrapper. */
|
|
2145
|
+
wrapAnthropic(client) {
|
|
2146
|
+
return wrapAnthropic(client, this.ctx);
|
|
2147
|
+
}
|
|
2148
|
+
/** Wrap Google AI model. Delegates to shared wrapper. */
|
|
2149
|
+
wrapGoogleAI(model) {
|
|
2150
|
+
return wrapGoogleAI(model, this.ctx);
|
|
2151
|
+
}
|
|
2152
|
+
/** Wrap Vercel AI SDK. Delegates to shared wrapper. */
|
|
2153
|
+
wrapAISDK(ai, options) {
|
|
2154
|
+
return wrapAISDK(ai, this.ctx, options);
|
|
2155
|
+
}
|
|
2156
|
+
/** Wrap Mastra agent. Delegates to shared wrapper. */
|
|
2157
|
+
wrapMastraAgent(agent) {
|
|
2158
|
+
return wrapMastraAgent(agent, this.ctx);
|
|
2159
|
+
}
|
|
2160
|
+
};
|
|
2161
|
+
function session(options) {
|
|
2162
|
+
return new FallomSession(options);
|
|
2163
|
+
}
|
|
2164
|
+
|
|
2165
|
+
// src/index.ts
|
|
2166
|
+
init_models();
|
|
2167
|
+
|
|
2168
|
+
// src/prompts.ts
|
|
2169
|
+
var prompts_exports = {};
|
|
2170
|
+
__export(prompts_exports, {
|
|
2171
|
+
clearPromptContext: () => clearPromptContext,
|
|
2000
2172
|
get: () => get2,
|
|
2173
|
+
getAB: () => getAB,
|
|
2174
|
+
getPromptContext: () => getPromptContext,
|
|
2001
2175
|
init: () => init3
|
|
2002
2176
|
});
|
|
2003
2177
|
var import_crypto2 = require("crypto");
|
|
2004
2178
|
var apiKey3 = null;
|
|
2005
|
-
var baseUrl3 = "https://
|
|
2179
|
+
var baseUrl3 = "https://prompts.fallom.com";
|
|
2006
2180
|
var initialized3 = false;
|
|
2007
2181
|
var syncInterval2 = null;
|
|
2008
2182
|
var debugMode3 = false;
|
|
2009
|
-
var
|
|
2183
|
+
var promptCache = /* @__PURE__ */ new Map();
|
|
2184
|
+
var promptABCache = /* @__PURE__ */ new Map();
|
|
2185
|
+
var promptContext = null;
|
|
2010
2186
|
var SYNC_TIMEOUT2 = 2e3;
|
|
2011
|
-
|
|
2012
|
-
function log3(msg) {
|
|
2187
|
+
function log5(msg) {
|
|
2013
2188
|
if (debugMode3) {
|
|
2014
|
-
console.log(`[Fallom] ${msg}`);
|
|
2189
|
+
console.log(`[Fallom Prompts] ${msg}`);
|
|
2015
2190
|
}
|
|
2016
2191
|
}
|
|
2017
2192
|
function init3(options = {}) {
|
|
2018
2193
|
apiKey3 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
2019
|
-
baseUrl3 = options.baseUrl || process.env.
|
|
2194
|
+
baseUrl3 = options.baseUrl || process.env.FALLOM_PROMPTS_URL || process.env.FALLOM_BASE_URL || "https://prompts.fallom.com";
|
|
2020
2195
|
initialized3 = true;
|
|
2021
2196
|
if (!apiKey3) {
|
|
2022
2197
|
return;
|
|
2023
2198
|
}
|
|
2024
|
-
|
|
2199
|
+
fetchAll().catch(() => {
|
|
2025
2200
|
});
|
|
2026
2201
|
if (!syncInterval2) {
|
|
2027
2202
|
syncInterval2 = setInterval(() => {
|
|
2028
|
-
|
|
2203
|
+
fetchAll().catch(() => {
|
|
2029
2204
|
});
|
|
2030
2205
|
}, 3e4);
|
|
2031
2206
|
syncInterval2.unref();
|
|
@@ -2039,231 +2214,221 @@ function ensureInit2() {
|
|
|
2039
2214
|
}
|
|
2040
2215
|
}
|
|
2041
2216
|
}
|
|
2042
|
-
async function
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2217
|
+
async function fetchAll() {
|
|
2218
|
+
await Promise.all([fetchPrompts(), fetchPromptABTests()]);
|
|
2219
|
+
}
|
|
2220
|
+
async function fetchPrompts(timeout = SYNC_TIMEOUT2) {
|
|
2221
|
+
if (!apiKey3) return;
|
|
2222
|
+
try {
|
|
2223
|
+
const controller = new AbortController();
|
|
2224
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
2225
|
+
const resp = await fetch(`${baseUrl3}/prompts`, {
|
|
2226
|
+
headers: { Authorization: `Bearer ${apiKey3}` },
|
|
2227
|
+
signal: controller.signal
|
|
2228
|
+
});
|
|
2229
|
+
clearTimeout(timeoutId);
|
|
2230
|
+
if (resp.ok) {
|
|
2231
|
+
const data = await resp.json();
|
|
2232
|
+
for (const p of data.prompts || []) {
|
|
2233
|
+
if (!promptCache.has(p.key)) {
|
|
2234
|
+
promptCache.set(p.key, { versions: /* @__PURE__ */ new Map(), current: null });
|
|
2235
|
+
}
|
|
2236
|
+
const cached = promptCache.get(p.key);
|
|
2237
|
+
cached.versions.set(p.version, {
|
|
2238
|
+
systemPrompt: p.system_prompt,
|
|
2239
|
+
userTemplate: p.user_template
|
|
2240
|
+
});
|
|
2241
|
+
cached.current = p.version;
|
|
2242
|
+
}
|
|
2243
|
+
}
|
|
2244
|
+
} catch {
|
|
2046
2245
|
}
|
|
2246
|
+
}
|
|
2247
|
+
async function fetchPromptABTests(timeout = SYNC_TIMEOUT2) {
|
|
2248
|
+
if (!apiKey3) return;
|
|
2047
2249
|
try {
|
|
2048
|
-
log3(`Fetching configs from ${baseUrl3}/configs`);
|
|
2049
2250
|
const controller = new AbortController();
|
|
2050
2251
|
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
2051
|
-
const resp = await fetch(`${baseUrl3}/
|
|
2252
|
+
const resp = await fetch(`${baseUrl3}/prompt-ab-tests`, {
|
|
2052
2253
|
headers: { Authorization: `Bearer ${apiKey3}` },
|
|
2053
2254
|
signal: controller.signal
|
|
2054
2255
|
});
|
|
2055
2256
|
clearTimeout(timeoutId);
|
|
2056
|
-
log3(`Response status: ${resp.status}`);
|
|
2057
2257
|
if (resp.ok) {
|
|
2058
2258
|
const data = await resp.json();
|
|
2059
|
-
const
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
const key = c.key;
|
|
2063
|
-
const version = c.version || 1;
|
|
2064
|
-
log3(`Config '${key}' v${version}: ${JSON.stringify(c.variants)}`);
|
|
2065
|
-
if (!configCache.has(key)) {
|
|
2066
|
-
configCache.set(key, { versions: /* @__PURE__ */ new Map(), latest: null });
|
|
2259
|
+
for (const t of data.prompt_ab_tests || []) {
|
|
2260
|
+
if (!promptABCache.has(t.key)) {
|
|
2261
|
+
promptABCache.set(t.key, { versions: /* @__PURE__ */ new Map(), current: null });
|
|
2067
2262
|
}
|
|
2068
|
-
const cached =
|
|
2069
|
-
cached.versions.set(version,
|
|
2070
|
-
cached.
|
|
2263
|
+
const cached = promptABCache.get(t.key);
|
|
2264
|
+
cached.versions.set(t.version, { variants: t.variants });
|
|
2265
|
+
cached.current = t.version;
|
|
2071
2266
|
}
|
|
2072
|
-
} else {
|
|
2073
|
-
log3(`Fetch failed: ${resp.statusText}`);
|
|
2074
2267
|
}
|
|
2075
|
-
} catch
|
|
2076
|
-
log3(`Fetch exception: ${e}`);
|
|
2268
|
+
} catch {
|
|
2077
2269
|
}
|
|
2078
2270
|
}
|
|
2079
|
-
|
|
2080
|
-
if (!
|
|
2081
|
-
|
|
2082
|
-
const
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2271
|
+
function replaceVariables(template, variables) {
|
|
2272
|
+
if (!variables) return template;
|
|
2273
|
+
return template.replace(/\{\{(\s*\w+\s*)\}\}/g, (match, varName) => {
|
|
2274
|
+
const key = varName.trim();
|
|
2275
|
+
return key in variables ? String(variables[key]) : match;
|
|
2276
|
+
});
|
|
2277
|
+
}
|
|
2278
|
+
function setPromptContext(ctx) {
|
|
2279
|
+
promptContext = ctx;
|
|
2280
|
+
}
|
|
2281
|
+
function getPromptContext() {
|
|
2282
|
+
const ctx = promptContext;
|
|
2283
|
+
promptContext = null;
|
|
2284
|
+
return ctx;
|
|
2285
|
+
}
|
|
2286
|
+
async function get2(promptKey, options = {}) {
|
|
2287
|
+
const { variables, version, debug = false } = options;
|
|
2288
|
+
debugMode3 = debug;
|
|
2289
|
+
ensureInit2();
|
|
2290
|
+
log5(`get() called: promptKey=${promptKey}`);
|
|
2291
|
+
let promptData = promptCache.get(promptKey);
|
|
2292
|
+
if (!promptData) {
|
|
2293
|
+
log5("Not in cache, fetching...");
|
|
2294
|
+
await fetchPrompts(SYNC_TIMEOUT2);
|
|
2295
|
+
promptData = promptCache.get(promptKey);
|
|
2296
|
+
}
|
|
2297
|
+
if (!promptData) {
|
|
2298
|
+
throw new Error(
|
|
2299
|
+
`Prompt '${promptKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
2090
2300
|
);
|
|
2091
|
-
clearTimeout(timeoutId);
|
|
2092
|
-
if (resp.ok) {
|
|
2093
|
-
const config = await resp.json();
|
|
2094
|
-
if (!configCache.has(configKey)) {
|
|
2095
|
-
configCache.set(configKey, { versions: /* @__PURE__ */ new Map(), latest: null });
|
|
2096
|
-
}
|
|
2097
|
-
configCache.get(configKey).versions.set(version, config);
|
|
2098
|
-
return config;
|
|
2099
|
-
}
|
|
2100
|
-
} catch {
|
|
2101
2301
|
}
|
|
2102
|
-
|
|
2302
|
+
const targetVersion = version ?? promptData.current;
|
|
2303
|
+
const content = promptData.versions.get(targetVersion);
|
|
2304
|
+
if (!content) {
|
|
2305
|
+
throw new Error(
|
|
2306
|
+
`Prompt '${promptKey}' version ${targetVersion} not found.`
|
|
2307
|
+
);
|
|
2308
|
+
}
|
|
2309
|
+
const system = replaceVariables(content.systemPrompt, variables);
|
|
2310
|
+
const user = replaceVariables(content.userTemplate, variables);
|
|
2311
|
+
setPromptContext({
|
|
2312
|
+
promptKey,
|
|
2313
|
+
promptVersion: targetVersion
|
|
2314
|
+
});
|
|
2315
|
+
log5(`\u2705 Got prompt: ${promptKey} v${targetVersion}`);
|
|
2316
|
+
return {
|
|
2317
|
+
key: promptKey,
|
|
2318
|
+
version: targetVersion,
|
|
2319
|
+
system,
|
|
2320
|
+
user
|
|
2321
|
+
};
|
|
2103
2322
|
}
|
|
2104
|
-
async function
|
|
2105
|
-
const {
|
|
2323
|
+
async function getAB(abTestKey, sessionId, options = {}) {
|
|
2324
|
+
const { variables, debug = false } = options;
|
|
2106
2325
|
debugMode3 = debug;
|
|
2107
2326
|
ensureInit2();
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
)
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
2114
|
-
|
|
2327
|
+
log5(`getAB() called: abTestKey=${abTestKey}, sessionId=${sessionId}`);
|
|
2328
|
+
let abData = promptABCache.get(abTestKey);
|
|
2329
|
+
if (!abData) {
|
|
2330
|
+
log5("Not in cache, fetching...");
|
|
2331
|
+
await fetchPromptABTests(SYNC_TIMEOUT2);
|
|
2332
|
+
abData = promptABCache.get(abTestKey);
|
|
2333
|
+
}
|
|
2334
|
+
if (!abData) {
|
|
2335
|
+
throw new Error(
|
|
2336
|
+
`Prompt A/B test '${abTestKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
2115
2337
|
);
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`
|
|
2129
|
-
);
|
|
2130
|
-
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
2131
|
-
}
|
|
2132
|
-
throw new Error(
|
|
2133
|
-
`Config '${configKey}' not found. Check that it exists in your Fallom dashboard.`
|
|
2134
|
-
);
|
|
2135
|
-
}
|
|
2136
|
-
let config;
|
|
2137
|
-
let targetVersion;
|
|
2138
|
-
if (version !== void 0) {
|
|
2139
|
-
config = configData.versions.get(version);
|
|
2140
|
-
if (!config) {
|
|
2141
|
-
config = await fetchSpecificVersion(configKey, version, SYNC_TIMEOUT2) || void 0;
|
|
2142
|
-
}
|
|
2143
|
-
if (!config) {
|
|
2144
|
-
if (fallback) {
|
|
2145
|
-
console.warn(
|
|
2146
|
-
`[Fallom WARNING] Config '${configKey}' version ${version} not found, using fallback: ${fallback}`
|
|
2147
|
-
);
|
|
2148
|
-
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
2149
|
-
}
|
|
2150
|
-
throw new Error(`Config '${configKey}' version ${version} not found.`);
|
|
2151
|
-
}
|
|
2152
|
-
targetVersion = version;
|
|
2153
|
-
} else {
|
|
2154
|
-
targetVersion = configData.latest;
|
|
2155
|
-
config = configData.versions.get(targetVersion);
|
|
2156
|
-
if (!config) {
|
|
2157
|
-
if (fallback) {
|
|
2158
|
-
console.warn(
|
|
2159
|
-
`[Fallom WARNING] Config '${configKey}' has no cached version, using fallback: ${fallback}`
|
|
2160
|
-
);
|
|
2161
|
-
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
2162
|
-
}
|
|
2163
|
-
throw new Error(`Config '${configKey}' has no cached version.`);
|
|
2164
|
-
}
|
|
2165
|
-
}
|
|
2166
|
-
const variantsRaw = config.variants;
|
|
2167
|
-
const configVersion = config.version || targetVersion;
|
|
2168
|
-
const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
|
|
2169
|
-
log3(
|
|
2170
|
-
`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(
|
|
2171
|
-
variants
|
|
2172
|
-
)}`
|
|
2338
|
+
}
|
|
2339
|
+
const currentVersion = abData.current;
|
|
2340
|
+
const versionData = abData.versions.get(currentVersion);
|
|
2341
|
+
if (!versionData) {
|
|
2342
|
+
throw new Error(`Prompt A/B test '${abTestKey}' has no current version.`);
|
|
2343
|
+
}
|
|
2344
|
+
const { variants } = versionData;
|
|
2345
|
+
log5(`A/B test '${abTestKey}' has ${variants?.length ?? 0} variants`);
|
|
2346
|
+
log5(`Version data: ${JSON.stringify(versionData, null, 2)}`);
|
|
2347
|
+
if (!variants || variants.length === 0) {
|
|
2348
|
+
throw new Error(
|
|
2349
|
+
`Prompt A/B test '${abTestKey}' has no variants configured.`
|
|
2173
2350
|
);
|
|
2174
|
-
|
|
2175
|
-
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
|
|
2181
|
-
|
|
2182
|
-
|
|
2183
|
-
|
|
2184
|
-
|
|
2185
|
-
|
|
2186
|
-
assignedModel = v.model;
|
|
2187
|
-
break;
|
|
2188
|
-
}
|
|
2189
|
-
}
|
|
2190
|
-
log3(`\u2705 Assigned model: ${assignedModel}`);
|
|
2191
|
-
return returnWithTrace(configKey, sessionId, assignedModel, configVersion);
|
|
2192
|
-
} catch (e) {
|
|
2193
|
-
if (e instanceof Error && e.message.includes("not found")) {
|
|
2194
|
-
throw e;
|
|
2195
|
-
}
|
|
2196
|
-
if (fallback) {
|
|
2197
|
-
console.warn(
|
|
2198
|
-
`[Fallom WARNING] Error getting model for '${configKey}': ${e}. Using fallback: ${fallback}`
|
|
2199
|
-
);
|
|
2200
|
-
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
2351
|
+
}
|
|
2352
|
+
const hashBytes = (0, import_crypto2.createHash)("md5").update(sessionId).digest();
|
|
2353
|
+
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
2354
|
+
let cumulative = 0;
|
|
2355
|
+
let selectedVariant = variants[variants.length - 1];
|
|
2356
|
+
let selectedIndex = variants.length - 1;
|
|
2357
|
+
for (let i = 0; i < variants.length; i++) {
|
|
2358
|
+
cumulative += variants[i].weight * 1e4;
|
|
2359
|
+
if (hashVal < cumulative) {
|
|
2360
|
+
selectedVariant = variants[i];
|
|
2361
|
+
selectedIndex = i;
|
|
2362
|
+
break;
|
|
2201
2363
|
}
|
|
2202
|
-
throw e;
|
|
2203
2364
|
}
|
|
2204
|
-
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
|
|
2208
|
-
|
|
2365
|
+
const promptKey = selectedVariant.prompt_key;
|
|
2366
|
+
const promptVersion = selectedVariant.prompt_version;
|
|
2367
|
+
let promptData = promptCache.get(promptKey);
|
|
2368
|
+
if (!promptData) {
|
|
2369
|
+
await fetchPrompts(SYNC_TIMEOUT2);
|
|
2370
|
+
promptData = promptCache.get(promptKey);
|
|
2209
2371
|
}
|
|
2210
|
-
if (
|
|
2211
|
-
|
|
2212
|
-
|
|
2372
|
+
if (!promptData) {
|
|
2373
|
+
throw new Error(
|
|
2374
|
+
`Prompt '${promptKey}' (from A/B test '${abTestKey}') not found.`
|
|
2375
|
+
);
|
|
2213
2376
|
}
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
|
|
2217
|
-
|
|
2218
|
-
|
|
2219
|
-
|
|
2220
|
-
const timeoutId = setTimeout(() => controller.abort(), RECORD_TIMEOUT);
|
|
2221
|
-
await fetch(`${baseUrl3}/sessions`, {
|
|
2222
|
-
method: "POST",
|
|
2223
|
-
headers: {
|
|
2224
|
-
Authorization: `Bearer ${apiKey3}`,
|
|
2225
|
-
"Content-Type": "application/json"
|
|
2226
|
-
},
|
|
2227
|
-
body: JSON.stringify({
|
|
2228
|
-
config_key: configKey,
|
|
2229
|
-
config_version: version,
|
|
2230
|
-
session_id: sessionId,
|
|
2231
|
-
assigned_model: model
|
|
2232
|
-
}),
|
|
2233
|
-
signal: controller.signal
|
|
2234
|
-
});
|
|
2235
|
-
clearTimeout(timeoutId);
|
|
2236
|
-
} catch {
|
|
2377
|
+
const targetVersion = promptVersion ?? promptData.current;
|
|
2378
|
+
const content = promptData.versions.get(targetVersion);
|
|
2379
|
+
if (!content) {
|
|
2380
|
+
throw new Error(
|
|
2381
|
+
`Prompt '${promptKey}' version ${targetVersion} not found.`
|
|
2382
|
+
);
|
|
2237
2383
|
}
|
|
2384
|
+
const system = replaceVariables(content.systemPrompt, variables);
|
|
2385
|
+
const user = replaceVariables(content.userTemplate, variables);
|
|
2386
|
+
setPromptContext({
|
|
2387
|
+
promptKey,
|
|
2388
|
+
promptVersion: targetVersion,
|
|
2389
|
+
abTestKey,
|
|
2390
|
+
variantIndex: selectedIndex
|
|
2391
|
+
});
|
|
2392
|
+
log5(
|
|
2393
|
+
`\u2705 Got prompt from A/B: ${promptKey} v${targetVersion} (variant ${selectedIndex})`
|
|
2394
|
+
);
|
|
2395
|
+
return {
|
|
2396
|
+
key: promptKey,
|
|
2397
|
+
version: targetVersion,
|
|
2398
|
+
system,
|
|
2399
|
+
user,
|
|
2400
|
+
abTestKey,
|
|
2401
|
+
variantIndex: selectedIndex
|
|
2402
|
+
};
|
|
2403
|
+
}
|
|
2404
|
+
function clearPromptContext() {
|
|
2405
|
+
promptContext = null;
|
|
2238
2406
|
}
|
|
2239
|
-
|
|
2240
|
-
// src/index.ts
|
|
2241
|
-
init_prompts();
|
|
2242
2407
|
|
|
2243
2408
|
// src/init.ts
|
|
2244
|
-
|
|
2409
|
+
init_models();
|
|
2245
2410
|
async function init4(options = {}) {
|
|
2246
2411
|
const tracesUrl = options.tracesUrl || process.env.FALLOM_TRACES_URL || "https://traces.fallom.com";
|
|
2247
2412
|
const configsUrl = options.configsUrl || process.env.FALLOM_CONFIGS_URL || "https://configs.fallom.com";
|
|
2248
2413
|
const promptsUrl = options.promptsUrl || process.env.FALLOM_PROMPTS_URL || "https://prompts.fallom.com";
|
|
2249
|
-
await
|
|
2414
|
+
await init({
|
|
2250
2415
|
apiKey: options.apiKey,
|
|
2251
2416
|
baseUrl: tracesUrl,
|
|
2252
2417
|
captureContent: options.captureContent,
|
|
2253
2418
|
debug: options.debug
|
|
2254
2419
|
});
|
|
2255
|
-
|
|
2420
|
+
init2({
|
|
2256
2421
|
apiKey: options.apiKey,
|
|
2257
2422
|
baseUrl: configsUrl
|
|
2258
2423
|
});
|
|
2259
|
-
|
|
2424
|
+
init3({
|
|
2260
2425
|
apiKey: options.apiKey,
|
|
2261
2426
|
baseUrl: promptsUrl
|
|
2262
2427
|
});
|
|
2263
2428
|
}
|
|
2264
2429
|
|
|
2265
2430
|
// src/mastra.ts
|
|
2266
|
-
var
|
|
2431
|
+
var import_core12 = require("@opentelemetry/core");
|
|
2267
2432
|
var promptContext2 = {};
|
|
2268
2433
|
function setMastraPrompt(promptKey, version) {
|
|
2269
2434
|
promptContext2 = {
|
|
@@ -2290,9 +2455,13 @@ var FallomExporter = class {
|
|
|
2290
2455
|
this.apiKey = options.apiKey ?? process.env.FALLOM_API_KEY ?? "";
|
|
2291
2456
|
this.baseUrl = options.baseUrl ?? "https://traces.fallom.com";
|
|
2292
2457
|
this.debug = options.debug ?? false;
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2458
|
+
this.session = options.session;
|
|
2459
|
+
if (this.debug) {
|
|
2460
|
+
console.log("[FallomExporter] Constructor called");
|
|
2461
|
+
console.log("[FallomExporter] API key present:", !!this.apiKey);
|
|
2462
|
+
console.log("[FallomExporter] Base URL:", this.baseUrl);
|
|
2463
|
+
console.log("[FallomExporter] Session:", this.session);
|
|
2464
|
+
}
|
|
2296
2465
|
if (!this.apiKey) {
|
|
2297
2466
|
console.warn(
|
|
2298
2467
|
"[FallomExporter] No API key provided. Set FALLOM_API_KEY env var or pass apiKey option."
|
|
@@ -2309,15 +2478,15 @@ var FallomExporter = class {
|
|
|
2309
2478
|
*/
|
|
2310
2479
|
export(spans, resultCallback) {
|
|
2311
2480
|
if (spans.length === 0) {
|
|
2312
|
-
resultCallback({ code:
|
|
2481
|
+
resultCallback({ code: import_core12.ExportResultCode.SUCCESS });
|
|
2313
2482
|
return;
|
|
2314
2483
|
}
|
|
2315
2484
|
this.log(`Exporting ${spans.length} spans...`);
|
|
2316
2485
|
if (this.debug) {
|
|
2317
|
-
for (const
|
|
2318
|
-
this.log(` - ${
|
|
2486
|
+
for (const span of spans) {
|
|
2487
|
+
this.log(` - ${span.name}`, {
|
|
2319
2488
|
attributes: Object.fromEntries(
|
|
2320
|
-
Object.entries(
|
|
2489
|
+
Object.entries(span.attributes).filter(
|
|
2321
2490
|
([k]) => k.startsWith("gen_ai") || k.startsWith("llm")
|
|
2322
2491
|
)
|
|
2323
2492
|
)
|
|
@@ -2326,11 +2495,11 @@ var FallomExporter = class {
|
|
|
2326
2495
|
}
|
|
2327
2496
|
const exportPromise = this.sendSpans(spans).then(() => {
|
|
2328
2497
|
this.log("Export successful");
|
|
2329
|
-
resultCallback({ code:
|
|
2498
|
+
resultCallback({ code: import_core12.ExportResultCode.SUCCESS });
|
|
2330
2499
|
}).catch((error) => {
|
|
2331
2500
|
console.error("[FallomExporter] Export failed:", error);
|
|
2332
2501
|
resultCallback({
|
|
2333
|
-
code:
|
|
2502
|
+
code: import_core12.ExportResultCode.FAILED,
|
|
2334
2503
|
error: error instanceof Error ? error : new Error(String(error))
|
|
2335
2504
|
});
|
|
2336
2505
|
});
|
|
@@ -2353,20 +2522,19 @@ var FallomExporter = class {
|
|
|
2353
2522
|
* Send spans to Fallom's OTLP endpoint.
|
|
2354
2523
|
*/
|
|
2355
2524
|
async sendSpans(spans) {
|
|
2356
|
-
const session = getSession();
|
|
2357
2525
|
const resourceSpans = this.spansToOtlpJson(spans);
|
|
2358
2526
|
const headers = {
|
|
2359
2527
|
"Content-Type": "application/json",
|
|
2360
2528
|
Authorization: `Bearer ${this.apiKey}`
|
|
2361
2529
|
};
|
|
2362
|
-
if (session?.configKey) {
|
|
2363
|
-
headers["X-Fallom-Config-Key"] = session.configKey;
|
|
2530
|
+
if (this.session?.configKey) {
|
|
2531
|
+
headers["X-Fallom-Config-Key"] = this.session.configKey;
|
|
2364
2532
|
}
|
|
2365
|
-
if (session?.sessionId) {
|
|
2366
|
-
headers["X-Fallom-Session-Id"] = session.sessionId;
|
|
2533
|
+
if (this.session?.sessionId) {
|
|
2534
|
+
headers["X-Fallom-Session-Id"] = this.session.sessionId;
|
|
2367
2535
|
}
|
|
2368
|
-
if (session?.customerId) {
|
|
2369
|
-
headers["X-Fallom-Customer-Id"] = session.customerId;
|
|
2536
|
+
if (this.session?.customerId) {
|
|
2537
|
+
headers["X-Fallom-Customer-Id"] = this.session.customerId;
|
|
2370
2538
|
}
|
|
2371
2539
|
if (promptContext2.promptKey) {
|
|
2372
2540
|
headers["X-Fallom-Prompt-Key"] = promptContext2.promptKey;
|
|
@@ -2403,12 +2571,12 @@ var FallomExporter = class {
|
|
|
2403
2571
|
*/
|
|
2404
2572
|
spansToOtlpJson(spans) {
|
|
2405
2573
|
const resourceMap = /* @__PURE__ */ new Map();
|
|
2406
|
-
for (const
|
|
2407
|
-
const resourceKey = JSON.stringify(
|
|
2574
|
+
for (const span of spans) {
|
|
2575
|
+
const resourceKey = JSON.stringify(span.resource.attributes);
|
|
2408
2576
|
if (!resourceMap.has(resourceKey)) {
|
|
2409
2577
|
resourceMap.set(resourceKey, []);
|
|
2410
2578
|
}
|
|
2411
|
-
resourceMap.get(resourceKey).push(
|
|
2579
|
+
resourceMap.get(resourceKey).push(span);
|
|
2412
2580
|
}
|
|
2413
2581
|
const resourceSpans = [];
|
|
2414
2582
|
for (const [_resourceKey, resourceSpanList] of resourceMap) {
|
|
@@ -2423,7 +2591,7 @@ var FallomExporter = class {
|
|
|
2423
2591
|
name: firstSpan.instrumentationLibrary.name,
|
|
2424
2592
|
version: firstSpan.instrumentationLibrary.version
|
|
2425
2593
|
},
|
|
2426
|
-
spans: resourceSpanList.map((
|
|
2594
|
+
spans: resourceSpanList.map((span) => this.spanToOtlp(span))
|
|
2427
2595
|
}
|
|
2428
2596
|
]
|
|
2429
2597
|
});
|
|
@@ -2433,21 +2601,21 @@ var FallomExporter = class {
|
|
|
2433
2601
|
/**
|
|
2434
2602
|
* Convert a single span to OTLP format.
|
|
2435
2603
|
*/
|
|
2436
|
-
spanToOtlp(
|
|
2604
|
+
spanToOtlp(span) {
|
|
2437
2605
|
return {
|
|
2438
|
-
traceId:
|
|
2439
|
-
spanId:
|
|
2440
|
-
parentSpanId:
|
|
2441
|
-
name:
|
|
2442
|
-
kind:
|
|
2443
|
-
startTimeUnixNano: this.hrTimeToNanos(
|
|
2444
|
-
endTimeUnixNano: this.hrTimeToNanos(
|
|
2445
|
-
attributes: this.attributesToOtlp(
|
|
2606
|
+
traceId: span.spanContext().traceId,
|
|
2607
|
+
spanId: span.spanContext().spanId,
|
|
2608
|
+
parentSpanId: span.parentSpanId,
|
|
2609
|
+
name: span.name,
|
|
2610
|
+
kind: span.kind,
|
|
2611
|
+
startTimeUnixNano: this.hrTimeToNanos(span.startTime),
|
|
2612
|
+
endTimeUnixNano: this.hrTimeToNanos(span.endTime),
|
|
2613
|
+
attributes: this.attributesToOtlp(span.attributes),
|
|
2446
2614
|
status: {
|
|
2447
|
-
code:
|
|
2448
|
-
message:
|
|
2615
|
+
code: span.status.code,
|
|
2616
|
+
message: span.status.message
|
|
2449
2617
|
},
|
|
2450
|
-
events:
|
|
2618
|
+
events: span.events.map((event) => ({
|
|
2451
2619
|
timeUnixNano: this.hrTimeToNanos(event.time),
|
|
2452
2620
|
name: event.name,
|
|
2453
2621
|
attributes: this.attributesToOtlp(event.attributes || {})
|
|
@@ -2498,20 +2666,23 @@ var FallomExporter = class {
|
|
|
2498
2666
|
};
|
|
2499
2667
|
|
|
2500
2668
|
// src/index.ts
|
|
2501
|
-
|
|
2669
|
+
init_models();
|
|
2502
2670
|
var index_default = {
|
|
2503
2671
|
init: init4,
|
|
2504
2672
|
trace: trace_exports,
|
|
2505
2673
|
models: models_exports,
|
|
2506
|
-
prompts: prompts_exports
|
|
2674
|
+
prompts: prompts_exports,
|
|
2675
|
+
session
|
|
2507
2676
|
};
|
|
2508
2677
|
// Annotate the CommonJS export names for ESM import in node:
|
|
2509
2678
|
0 && (module.exports = {
|
|
2510
2679
|
FallomExporter,
|
|
2680
|
+
FallomSession,
|
|
2511
2681
|
clearMastraPrompt,
|
|
2512
2682
|
init,
|
|
2513
2683
|
models,
|
|
2514
2684
|
prompts,
|
|
2685
|
+
session,
|
|
2515
2686
|
setMastraPrompt,
|
|
2516
2687
|
setMastraPromptAB,
|
|
2517
2688
|
trace
|