@fallom/trace 0.1.6 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/chunk-6MSTRIK4.mjs +255 -0
- package/dist/chunk-H2EACSBT.mjs +255 -0
- package/dist/index.d.mts +189 -6
- package/dist/index.d.ts +189 -6
- package/dist/index.js +853 -67
- package/dist/index.mjs +844 -69
- package/dist/prompts-VAN5E3L4.mjs +14 -0
- package/dist/prompts-ZSLS4DHO.mjs +14 -0
- package/package.json +3 -1
package/dist/index.mjs
CHANGED
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
__export,
|
|
3
3
|
init,
|
|
4
4
|
prompts_exports
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-6MSTRIK4.mjs";
|
|
6
6
|
|
|
7
7
|
// src/trace.ts
|
|
8
8
|
var trace_exports = {};
|
|
@@ -14,8 +14,10 @@ __export(trace_exports, {
|
|
|
14
14
|
setSession: () => setSession,
|
|
15
15
|
shutdown: () => shutdown,
|
|
16
16
|
span: () => span,
|
|
17
|
+
wrapAISDK: () => wrapAISDK,
|
|
17
18
|
wrapAnthropic: () => wrapAnthropic,
|
|
18
19
|
wrapGoogleAI: () => wrapGoogleAI,
|
|
20
|
+
wrapMastraAgent: () => wrapMastraAgent,
|
|
19
21
|
wrapOpenAI: () => wrapOpenAI
|
|
20
22
|
});
|
|
21
23
|
import { AsyncLocalStorage } from "async_hooks";
|
|
@@ -25,7 +27,7 @@ import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http";
|
|
|
25
27
|
// node_modules/@opentelemetry/resources/build/esm/Resource.js
|
|
26
28
|
import { diag } from "@opentelemetry/api";
|
|
27
29
|
|
|
28
|
-
// node_modules/@opentelemetry/semantic-conventions/build/esm/resource/SemanticResourceAttributes.js
|
|
30
|
+
// node_modules/@opentelemetry/resources/node_modules/@opentelemetry/semantic-conventions/build/esm/resource/SemanticResourceAttributes.js
|
|
29
31
|
var SemanticResourceAttributes = {
|
|
30
32
|
/**
|
|
31
33
|
* Name of the cloud provider.
|
|
@@ -405,35 +407,9 @@ var SemanticResourceAttributes = {
|
|
|
405
407
|
*/
|
|
406
408
|
WEBENGINE_DESCRIPTION: "webengine.description"
|
|
407
409
|
};
|
|
408
|
-
var TelemetrySdkLanguageValues = {
|
|
409
|
-
/** cpp. */
|
|
410
|
-
CPP: "cpp",
|
|
411
|
-
/** dotnet. */
|
|
412
|
-
DOTNET: "dotnet",
|
|
413
|
-
/** erlang. */
|
|
414
|
-
ERLANG: "erlang",
|
|
415
|
-
/** go. */
|
|
416
|
-
GO: "go",
|
|
417
|
-
/** java. */
|
|
418
|
-
JAVA: "java",
|
|
419
|
-
/** nodejs. */
|
|
420
|
-
NODEJS: "nodejs",
|
|
421
|
-
/** php. */
|
|
422
|
-
PHP: "php",
|
|
423
|
-
/** python. */
|
|
424
|
-
PYTHON: "python",
|
|
425
|
-
/** ruby. */
|
|
426
|
-
RUBY: "ruby",
|
|
427
|
-
/** webjs. */
|
|
428
|
-
WEBJS: "webjs"
|
|
429
|
-
};
|
|
430
|
-
|
|
431
|
-
// node_modules/@opentelemetry/core/build/esm/version.js
|
|
432
|
-
var VERSION = "1.19.0";
|
|
433
410
|
|
|
434
|
-
// node_modules/@opentelemetry/
|
|
435
|
-
|
|
436
|
-
var SDK_INFO = (_a = {}, _a[SemanticResourceAttributes.TELEMETRY_SDK_NAME] = "opentelemetry", _a[SemanticResourceAttributes.PROCESS_RUNTIME_NAME] = "node", _a[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] = TelemetrySdkLanguageValues.NODEJS, _a[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] = VERSION, _a);
|
|
411
|
+
// node_modules/@opentelemetry/resources/build/esm/Resource.js
|
|
412
|
+
import { SDK_INFO } from "@opentelemetry/core";
|
|
437
413
|
|
|
438
414
|
// node_modules/@opentelemetry/resources/build/esm/platform/node/default-service-name.js
|
|
439
415
|
function defaultServiceName() {
|
|
@@ -570,10 +546,10 @@ var Resource = (
|
|
|
570
546
|
(function() {
|
|
571
547
|
function Resource2(attributes, asyncAttributesPromise) {
|
|
572
548
|
var _this = this;
|
|
573
|
-
var
|
|
549
|
+
var _a;
|
|
574
550
|
this._attributes = attributes;
|
|
575
551
|
this.asyncAttributesPending = asyncAttributesPromise != null;
|
|
576
|
-
this._syncAttributes = (
|
|
552
|
+
this._syncAttributes = (_a = this._attributes) !== null && _a !== void 0 ? _a : {};
|
|
577
553
|
this._asyncAttributesPromise = asyncAttributesPromise === null || asyncAttributesPromise === void 0 ? void 0 : asyncAttributesPromise.then(function(asyncAttributes) {
|
|
578
554
|
_this._attributes = Object.assign({}, _this._attributes, asyncAttributes);
|
|
579
555
|
_this.asyncAttributesPending = false;
|
|
@@ -588,30 +564,30 @@ var Resource = (
|
|
|
588
564
|
return Resource2.EMPTY;
|
|
589
565
|
};
|
|
590
566
|
Resource2.default = function() {
|
|
591
|
-
var
|
|
592
|
-
return new Resource2((
|
|
567
|
+
var _a;
|
|
568
|
+
return new Resource2((_a = {}, _a[SemanticResourceAttributes.SERVICE_NAME] = defaultServiceName(), _a[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] = SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], _a[SemanticResourceAttributes.TELEMETRY_SDK_NAME] = SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME], _a[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] = SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], _a));
|
|
593
569
|
};
|
|
594
570
|
Object.defineProperty(Resource2.prototype, "attributes", {
|
|
595
571
|
get: function() {
|
|
596
|
-
var
|
|
572
|
+
var _a;
|
|
597
573
|
if (this.asyncAttributesPending) {
|
|
598
574
|
diag.error("Accessing resource attributes before async attributes settled");
|
|
599
575
|
}
|
|
600
|
-
return (
|
|
576
|
+
return (_a = this._attributes) !== null && _a !== void 0 ? _a : {};
|
|
601
577
|
},
|
|
602
578
|
enumerable: false,
|
|
603
579
|
configurable: true
|
|
604
580
|
});
|
|
605
581
|
Resource2.prototype.waitForAsyncAttributes = function() {
|
|
606
582
|
return __awaiter(this, void 0, void 0, function() {
|
|
607
|
-
return __generator(this, function(
|
|
608
|
-
switch (
|
|
583
|
+
return __generator(this, function(_a) {
|
|
584
|
+
switch (_a.label) {
|
|
609
585
|
case 0:
|
|
610
586
|
if (!this.asyncAttributesPending) return [3, 2];
|
|
611
587
|
return [4, this._asyncAttributesPromise];
|
|
612
588
|
case 1:
|
|
613
|
-
|
|
614
|
-
|
|
589
|
+
_a.sent();
|
|
590
|
+
_a.label = 2;
|
|
615
591
|
case 2:
|
|
616
592
|
return [
|
|
617
593
|
2
|
|
@@ -623,19 +599,19 @@ var Resource = (
|
|
|
623
599
|
};
|
|
624
600
|
Resource2.prototype.merge = function(other) {
|
|
625
601
|
var _this = this;
|
|
626
|
-
var
|
|
602
|
+
var _a;
|
|
627
603
|
if (!other)
|
|
628
604
|
return this;
|
|
629
|
-
var mergedSyncAttributes = __assign(__assign({}, this._syncAttributes), (
|
|
605
|
+
var mergedSyncAttributes = __assign(__assign({}, this._syncAttributes), (_a = other._syncAttributes) !== null && _a !== void 0 ? _a : other.attributes);
|
|
630
606
|
if (!this._asyncAttributesPromise && !other._asyncAttributesPromise) {
|
|
631
607
|
return new Resource2(mergedSyncAttributes);
|
|
632
608
|
}
|
|
633
609
|
var mergedAttributesPromise = Promise.all([
|
|
634
610
|
this._asyncAttributesPromise,
|
|
635
611
|
other._asyncAttributesPromise
|
|
636
|
-
]).then(function(
|
|
612
|
+
]).then(function(_a2) {
|
|
637
613
|
var _b;
|
|
638
|
-
var _c = __read(
|
|
614
|
+
var _c = __read(_a2, 2), thisAsyncAttributes = _c[0], otherAsyncAttributes = _c[1];
|
|
639
615
|
return __assign(__assign(__assign(__assign({}, _this._syncAttributes), thisAsyncAttributes), (_b = other._syncAttributes) !== null && _b !== void 0 ? _b : other.attributes), otherAsyncAttributes);
|
|
640
616
|
});
|
|
641
617
|
return new Resource2(mergedSyncAttributes, mergedAttributesPromise);
|
|
@@ -649,7 +625,7 @@ var Resource = (
|
|
|
649
625
|
var sessionStorage = new AsyncLocalStorage();
|
|
650
626
|
var fallbackSession = null;
|
|
651
627
|
var apiKey = null;
|
|
652
|
-
var baseUrl = "https://
|
|
628
|
+
var baseUrl = "https://traces.fallom.com";
|
|
653
629
|
var initialized = false;
|
|
654
630
|
var captureContent = true;
|
|
655
631
|
var debugMode = false;
|
|
@@ -692,7 +668,7 @@ async function init2(options = {}) {
|
|
|
692
668
|
debugMode = options.debug ?? false;
|
|
693
669
|
log("\u{1F680} Initializing Fallom tracing...");
|
|
694
670
|
apiKey = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
695
|
-
baseUrl = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
671
|
+
baseUrl = options.baseUrl || process.env.FALLOM_TRACES_URL || process.env.FALLOM_BASE_URL || "https://traces.fallom.com";
|
|
696
672
|
const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
|
|
697
673
|
if (envCapture === "false" || envCapture === "0" || envCapture === "no") {
|
|
698
674
|
captureContent = false;
|
|
@@ -864,12 +840,12 @@ function messagesToOtelAttributes(messages, completion, model, responseId) {
|
|
|
864
840
|
if (messages) {
|
|
865
841
|
messages.forEach((msg, i) => {
|
|
866
842
|
attrs[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
867
|
-
attrs[`gen_ai.prompt.${i}.content`] = msg.content;
|
|
843
|
+
attrs[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
868
844
|
});
|
|
869
845
|
}
|
|
870
846
|
if (completion) {
|
|
871
847
|
attrs["gen_ai.completion.0.role"] = completion.role;
|
|
872
|
-
attrs["gen_ai.completion.0.content"] = completion.content;
|
|
848
|
+
attrs["gen_ai.completion.0.content"] = typeof completion.content === "string" ? completion.content : JSON.stringify(completion.content);
|
|
873
849
|
if (completion.tool_calls) {
|
|
874
850
|
attrs["gen_ai.completion.0.tool_calls"] = JSON.stringify(
|
|
875
851
|
completion.tool_calls
|
|
@@ -886,10 +862,13 @@ function generateHexId(length) {
|
|
|
886
862
|
var traceContextStorage = new AsyncLocalStorage();
|
|
887
863
|
var fallbackTraceContext = null;
|
|
888
864
|
async function sendTrace(trace) {
|
|
865
|
+
const url = `${baseUrl}/v1/traces`;
|
|
866
|
+
log("\u{1F4E4} Sending trace to:", url);
|
|
867
|
+
log(" Session:", trace.session_id, "Config:", trace.config_key);
|
|
889
868
|
try {
|
|
890
869
|
const controller = new AbortController();
|
|
891
870
|
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
892
|
-
await fetch(
|
|
871
|
+
const response = await fetch(url, {
|
|
893
872
|
method: "POST",
|
|
894
873
|
headers: {
|
|
895
874
|
Authorization: `Bearer ${apiKey}`,
|
|
@@ -899,8 +878,14 @@ async function sendTrace(trace) {
|
|
|
899
878
|
signal: controller.signal
|
|
900
879
|
});
|
|
901
880
|
clearTimeout(timeoutId);
|
|
902
|
-
|
|
903
|
-
|
|
881
|
+
if (!response.ok) {
|
|
882
|
+
const text = await response.text();
|
|
883
|
+
log("\u274C Trace send failed:", response.status, text);
|
|
884
|
+
} else {
|
|
885
|
+
log("\u2705 Trace sent:", trace.name, trace.model);
|
|
886
|
+
}
|
|
887
|
+
} catch (err) {
|
|
888
|
+
log("\u274C Trace send error:", err instanceof Error ? err.message : err);
|
|
904
889
|
}
|
|
905
890
|
}
|
|
906
891
|
function wrapOpenAI(client) {
|
|
@@ -914,7 +899,7 @@ function wrapOpenAI(client) {
|
|
|
914
899
|
}
|
|
915
900
|
let promptCtx = null;
|
|
916
901
|
try {
|
|
917
|
-
const { getPromptContext } = await import("./prompts-
|
|
902
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
918
903
|
promptCtx = getPromptContext();
|
|
919
904
|
} catch {
|
|
920
905
|
}
|
|
@@ -1005,7 +990,7 @@ function wrapAnthropic(client) {
|
|
|
1005
990
|
}
|
|
1006
991
|
let promptCtx = null;
|
|
1007
992
|
try {
|
|
1008
|
-
const { getPromptContext } = await import("./prompts-
|
|
993
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1009
994
|
promptCtx = getPromptContext();
|
|
1010
995
|
} catch {
|
|
1011
996
|
}
|
|
@@ -1102,7 +1087,7 @@ function wrapGoogleAI(model) {
|
|
|
1102
1087
|
}
|
|
1103
1088
|
let promptCtx = null;
|
|
1104
1089
|
try {
|
|
1105
|
-
const { getPromptContext } = await import("./prompts-
|
|
1090
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1106
1091
|
promptCtx = getPromptContext();
|
|
1107
1092
|
} catch {
|
|
1108
1093
|
}
|
|
@@ -1202,6 +1187,535 @@ function wrapGoogleAI(model) {
|
|
|
1202
1187
|
};
|
|
1203
1188
|
return model;
|
|
1204
1189
|
}
|
|
1190
|
+
function wrapAISDK(ai) {
|
|
1191
|
+
const aiModule = ai;
|
|
1192
|
+
return {
|
|
1193
|
+
generateText: createGenerateTextWrapper(aiModule),
|
|
1194
|
+
streamText: createStreamTextWrapper(aiModule),
|
|
1195
|
+
generateObject: aiModule.generateObject ? createGenerateObjectWrapper(aiModule) : void 0,
|
|
1196
|
+
streamObject: aiModule.streamObject ? createStreamObjectWrapper(aiModule) : void 0
|
|
1197
|
+
};
|
|
1198
|
+
}
|
|
1199
|
+
function createGenerateTextWrapper(aiModule) {
|
|
1200
|
+
return async (...args) => {
|
|
1201
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1202
|
+
if (!ctx || !initialized) {
|
|
1203
|
+
return aiModule.generateText(...args);
|
|
1204
|
+
}
|
|
1205
|
+
let promptCtx = null;
|
|
1206
|
+
try {
|
|
1207
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1208
|
+
promptCtx = getPromptContext();
|
|
1209
|
+
} catch {
|
|
1210
|
+
}
|
|
1211
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1212
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1213
|
+
const spanId = generateHexId(16);
|
|
1214
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1215
|
+
const params = args[0] || {};
|
|
1216
|
+
const startTime = Date.now();
|
|
1217
|
+
try {
|
|
1218
|
+
const result = await aiModule.generateText(...args);
|
|
1219
|
+
const endTime = Date.now();
|
|
1220
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1221
|
+
const attributes = {};
|
|
1222
|
+
if (captureContent) {
|
|
1223
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1224
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1225
|
+
if (params?.prompt) {
|
|
1226
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1227
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1228
|
+
}
|
|
1229
|
+
if (params?.messages) {
|
|
1230
|
+
params.messages.forEach((msg, i) => {
|
|
1231
|
+
attributes[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
1232
|
+
attributes[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1233
|
+
});
|
|
1234
|
+
}
|
|
1235
|
+
if (result?.text) {
|
|
1236
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1237
|
+
attributes["gen_ai.completion.0.content"] = result.text;
|
|
1238
|
+
}
|
|
1239
|
+
if (result?.response?.id) {
|
|
1240
|
+
attributes["gen_ai.response.id"] = result.response.id;
|
|
1241
|
+
}
|
|
1242
|
+
}
|
|
1243
|
+
sendTrace({
|
|
1244
|
+
config_key: ctx.configKey,
|
|
1245
|
+
session_id: ctx.sessionId,
|
|
1246
|
+
customer_id: ctx.customerId,
|
|
1247
|
+
trace_id: traceId,
|
|
1248
|
+
span_id: spanId,
|
|
1249
|
+
parent_span_id: parentSpanId,
|
|
1250
|
+
name: "generateText",
|
|
1251
|
+
kind: "llm",
|
|
1252
|
+
model: modelId,
|
|
1253
|
+
start_time: new Date(startTime).toISOString(),
|
|
1254
|
+
end_time: new Date(endTime).toISOString(),
|
|
1255
|
+
duration_ms: endTime - startTime,
|
|
1256
|
+
status: "OK",
|
|
1257
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1258
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1259
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1260
|
+
attributes: captureContent ? attributes : void 0,
|
|
1261
|
+
prompt_key: promptCtx?.promptKey,
|
|
1262
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1263
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1264
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1265
|
+
}).catch(() => {
|
|
1266
|
+
});
|
|
1267
|
+
return result;
|
|
1268
|
+
} catch (error) {
|
|
1269
|
+
const endTime = Date.now();
|
|
1270
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1271
|
+
sendTrace({
|
|
1272
|
+
config_key: ctx.configKey,
|
|
1273
|
+
session_id: ctx.sessionId,
|
|
1274
|
+
customer_id: ctx.customerId,
|
|
1275
|
+
trace_id: traceId,
|
|
1276
|
+
span_id: spanId,
|
|
1277
|
+
parent_span_id: parentSpanId,
|
|
1278
|
+
name: "generateText",
|
|
1279
|
+
kind: "llm",
|
|
1280
|
+
model: modelId,
|
|
1281
|
+
start_time: new Date(startTime).toISOString(),
|
|
1282
|
+
end_time: new Date(endTime).toISOString(),
|
|
1283
|
+
duration_ms: endTime - startTime,
|
|
1284
|
+
status: "ERROR",
|
|
1285
|
+
error_message: error?.message,
|
|
1286
|
+
prompt_key: promptCtx?.promptKey,
|
|
1287
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1288
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1289
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1290
|
+
}).catch(() => {
|
|
1291
|
+
});
|
|
1292
|
+
throw error;
|
|
1293
|
+
}
|
|
1294
|
+
};
|
|
1295
|
+
}
|
|
1296
|
+
function createStreamTextWrapper(aiModule) {
|
|
1297
|
+
return async (...args) => {
|
|
1298
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1299
|
+
const params = args[0] || {};
|
|
1300
|
+
const startTime = Date.now();
|
|
1301
|
+
const result = await aiModule.streamText(...args);
|
|
1302
|
+
if (!ctx || !initialized) {
|
|
1303
|
+
return result;
|
|
1304
|
+
}
|
|
1305
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1306
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1307
|
+
const spanId = generateHexId(16);
|
|
1308
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1309
|
+
let firstTokenTime = null;
|
|
1310
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1311
|
+
let promptCtx = null;
|
|
1312
|
+
try {
|
|
1313
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1314
|
+
promptCtx = getPromptContext();
|
|
1315
|
+
} catch {
|
|
1316
|
+
}
|
|
1317
|
+
if (result?.usage) {
|
|
1318
|
+
result.usage.then((usage) => {
|
|
1319
|
+
const endTime = Date.now();
|
|
1320
|
+
log("\u{1F4CA} streamText usage:", JSON.stringify(usage, null, 2));
|
|
1321
|
+
const attributes = {};
|
|
1322
|
+
if (captureContent) {
|
|
1323
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1324
|
+
if (params?.prompt) {
|
|
1325
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1326
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1327
|
+
}
|
|
1328
|
+
}
|
|
1329
|
+
if (firstTokenTime) {
|
|
1330
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1331
|
+
}
|
|
1332
|
+
const tracePayload = {
|
|
1333
|
+
config_key: ctx.configKey,
|
|
1334
|
+
session_id: ctx.sessionId,
|
|
1335
|
+
customer_id: ctx.customerId,
|
|
1336
|
+
trace_id: traceId,
|
|
1337
|
+
span_id: spanId,
|
|
1338
|
+
parent_span_id: parentSpanId,
|
|
1339
|
+
name: "streamText",
|
|
1340
|
+
kind: "llm",
|
|
1341
|
+
model: modelId,
|
|
1342
|
+
start_time: new Date(startTime).toISOString(),
|
|
1343
|
+
end_time: new Date(endTime).toISOString(),
|
|
1344
|
+
duration_ms: endTime - startTime,
|
|
1345
|
+
status: "OK",
|
|
1346
|
+
prompt_tokens: usage?.promptTokens,
|
|
1347
|
+
completion_tokens: usage?.completionTokens,
|
|
1348
|
+
total_tokens: usage?.totalTokens,
|
|
1349
|
+
time_to_first_token_ms: firstTokenTime ? firstTokenTime - startTime : void 0,
|
|
1350
|
+
attributes: captureContent ? attributes : void 0,
|
|
1351
|
+
prompt_key: promptCtx?.promptKey,
|
|
1352
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1353
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1354
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1355
|
+
};
|
|
1356
|
+
sendTrace(tracePayload).catch(() => {
|
|
1357
|
+
});
|
|
1358
|
+
}).catch((error) => {
|
|
1359
|
+
const endTime = Date.now();
|
|
1360
|
+
log("\u274C streamText error:", error?.message);
|
|
1361
|
+
sendTrace({
|
|
1362
|
+
config_key: ctx.configKey,
|
|
1363
|
+
session_id: ctx.sessionId,
|
|
1364
|
+
customer_id: ctx.customerId,
|
|
1365
|
+
trace_id: traceId,
|
|
1366
|
+
span_id: spanId,
|
|
1367
|
+
parent_span_id: parentSpanId,
|
|
1368
|
+
name: "streamText",
|
|
1369
|
+
kind: "llm",
|
|
1370
|
+
model: modelId,
|
|
1371
|
+
start_time: new Date(startTime).toISOString(),
|
|
1372
|
+
end_time: new Date(endTime).toISOString(),
|
|
1373
|
+
duration_ms: endTime - startTime,
|
|
1374
|
+
status: "ERROR",
|
|
1375
|
+
error_message: error?.message,
|
|
1376
|
+
prompt_key: promptCtx?.promptKey,
|
|
1377
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1378
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1379
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1380
|
+
}).catch(() => {
|
|
1381
|
+
});
|
|
1382
|
+
});
|
|
1383
|
+
}
|
|
1384
|
+
if (result?.textStream) {
|
|
1385
|
+
const originalTextStream = result.textStream;
|
|
1386
|
+
const wrappedTextStream = (async function* () {
|
|
1387
|
+
for await (const chunk of originalTextStream) {
|
|
1388
|
+
if (!firstTokenTime) {
|
|
1389
|
+
firstTokenTime = Date.now();
|
|
1390
|
+
log("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1391
|
+
}
|
|
1392
|
+
yield chunk;
|
|
1393
|
+
}
|
|
1394
|
+
})();
|
|
1395
|
+
return new Proxy(result, {
|
|
1396
|
+
get(target, prop) {
|
|
1397
|
+
if (prop === "textStream") {
|
|
1398
|
+
return wrappedTextStream;
|
|
1399
|
+
}
|
|
1400
|
+
return target[prop];
|
|
1401
|
+
}
|
|
1402
|
+
});
|
|
1403
|
+
}
|
|
1404
|
+
return result;
|
|
1405
|
+
};
|
|
1406
|
+
}
|
|
1407
|
+
function createGenerateObjectWrapper(aiModule) {
|
|
1408
|
+
return async (...args) => {
|
|
1409
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1410
|
+
if (!ctx || !initialized) {
|
|
1411
|
+
return aiModule.generateObject(...args);
|
|
1412
|
+
}
|
|
1413
|
+
let promptCtx = null;
|
|
1414
|
+
try {
|
|
1415
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1416
|
+
promptCtx = getPromptContext();
|
|
1417
|
+
} catch {
|
|
1418
|
+
}
|
|
1419
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1420
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1421
|
+
const spanId = generateHexId(16);
|
|
1422
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1423
|
+
const params = args[0] || {};
|
|
1424
|
+
const startTime = Date.now();
|
|
1425
|
+
try {
|
|
1426
|
+
const result = await aiModule.generateObject(...args);
|
|
1427
|
+
const endTime = Date.now();
|
|
1428
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1429
|
+
const attributes = {};
|
|
1430
|
+
if (captureContent) {
|
|
1431
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1432
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1433
|
+
if (result?.object) {
|
|
1434
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1435
|
+
attributes["gen_ai.completion.0.content"] = JSON.stringify(
|
|
1436
|
+
result.object
|
|
1437
|
+
);
|
|
1438
|
+
}
|
|
1439
|
+
}
|
|
1440
|
+
sendTrace({
|
|
1441
|
+
config_key: ctx.configKey,
|
|
1442
|
+
session_id: ctx.sessionId,
|
|
1443
|
+
customer_id: ctx.customerId,
|
|
1444
|
+
trace_id: traceId,
|
|
1445
|
+
span_id: spanId,
|
|
1446
|
+
parent_span_id: parentSpanId,
|
|
1447
|
+
name: "generateObject",
|
|
1448
|
+
kind: "llm",
|
|
1449
|
+
model: modelId,
|
|
1450
|
+
start_time: new Date(startTime).toISOString(),
|
|
1451
|
+
end_time: new Date(endTime).toISOString(),
|
|
1452
|
+
duration_ms: endTime - startTime,
|
|
1453
|
+
status: "OK",
|
|
1454
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1455
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1456
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1457
|
+
attributes: captureContent ? attributes : void 0,
|
|
1458
|
+
prompt_key: promptCtx?.promptKey,
|
|
1459
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1460
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1461
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1462
|
+
}).catch(() => {
|
|
1463
|
+
});
|
|
1464
|
+
return result;
|
|
1465
|
+
} catch (error) {
|
|
1466
|
+
const endTime = Date.now();
|
|
1467
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1468
|
+
sendTrace({
|
|
1469
|
+
config_key: ctx.configKey,
|
|
1470
|
+
session_id: ctx.sessionId,
|
|
1471
|
+
customer_id: ctx.customerId,
|
|
1472
|
+
trace_id: traceId,
|
|
1473
|
+
span_id: spanId,
|
|
1474
|
+
parent_span_id: parentSpanId,
|
|
1475
|
+
name: "generateObject",
|
|
1476
|
+
kind: "llm",
|
|
1477
|
+
model: modelId,
|
|
1478
|
+
start_time: new Date(startTime).toISOString(),
|
|
1479
|
+
end_time: new Date(endTime).toISOString(),
|
|
1480
|
+
duration_ms: endTime - startTime,
|
|
1481
|
+
status: "ERROR",
|
|
1482
|
+
error_message: error?.message,
|
|
1483
|
+
prompt_key: promptCtx?.promptKey,
|
|
1484
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1485
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1486
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1487
|
+
}).catch(() => {
|
|
1488
|
+
});
|
|
1489
|
+
throw error;
|
|
1490
|
+
}
|
|
1491
|
+
};
|
|
1492
|
+
}
|
|
1493
|
+
function createStreamObjectWrapper(aiModule) {
|
|
1494
|
+
return async (...args) => {
|
|
1495
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1496
|
+
const params = args[0] || {};
|
|
1497
|
+
const startTime = Date.now();
|
|
1498
|
+
const result = await aiModule.streamObject(...args);
|
|
1499
|
+
log("\u{1F50D} streamObject result keys:", Object.keys(result || {}));
|
|
1500
|
+
if (!ctx || !initialized) {
|
|
1501
|
+
return result;
|
|
1502
|
+
}
|
|
1503
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1504
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1505
|
+
const spanId = generateHexId(16);
|
|
1506
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1507
|
+
let firstTokenTime = null;
|
|
1508
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1509
|
+
let promptCtx = null;
|
|
1510
|
+
try {
|
|
1511
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1512
|
+
promptCtx = getPromptContext();
|
|
1513
|
+
} catch {
|
|
1514
|
+
}
|
|
1515
|
+
if (result?.usage) {
|
|
1516
|
+
result.usage.then((usage) => {
|
|
1517
|
+
const endTime = Date.now();
|
|
1518
|
+
log("\u{1F4CA} streamObject usage:", JSON.stringify(usage, null, 2));
|
|
1519
|
+
const attributes = {};
|
|
1520
|
+
if (captureContent) {
|
|
1521
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1522
|
+
}
|
|
1523
|
+
if (firstTokenTime) {
|
|
1524
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1525
|
+
}
|
|
1526
|
+
sendTrace({
|
|
1527
|
+
config_key: ctx.configKey,
|
|
1528
|
+
session_id: ctx.sessionId,
|
|
1529
|
+
customer_id: ctx.customerId,
|
|
1530
|
+
trace_id: traceId,
|
|
1531
|
+
span_id: spanId,
|
|
1532
|
+
parent_span_id: parentSpanId,
|
|
1533
|
+
name: "streamObject",
|
|
1534
|
+
kind: "llm",
|
|
1535
|
+
model: modelId,
|
|
1536
|
+
start_time: new Date(startTime).toISOString(),
|
|
1537
|
+
end_time: new Date(endTime).toISOString(),
|
|
1538
|
+
duration_ms: endTime - startTime,
|
|
1539
|
+
status: "OK",
|
|
1540
|
+
prompt_tokens: usage?.promptTokens,
|
|
1541
|
+
completion_tokens: usage?.completionTokens,
|
|
1542
|
+
total_tokens: usage?.totalTokens,
|
|
1543
|
+
attributes: captureContent ? attributes : void 0,
|
|
1544
|
+
prompt_key: promptCtx?.promptKey,
|
|
1545
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1546
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1547
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1548
|
+
}).catch(() => {
|
|
1549
|
+
});
|
|
1550
|
+
}).catch((error) => {
|
|
1551
|
+
const endTime = Date.now();
|
|
1552
|
+
sendTrace({
|
|
1553
|
+
config_key: ctx.configKey,
|
|
1554
|
+
session_id: ctx.sessionId,
|
|
1555
|
+
customer_id: ctx.customerId,
|
|
1556
|
+
trace_id: traceId,
|
|
1557
|
+
span_id: spanId,
|
|
1558
|
+
parent_span_id: parentSpanId,
|
|
1559
|
+
name: "streamObject",
|
|
1560
|
+
kind: "llm",
|
|
1561
|
+
model: modelId,
|
|
1562
|
+
start_time: new Date(startTime).toISOString(),
|
|
1563
|
+
end_time: new Date(endTime).toISOString(),
|
|
1564
|
+
duration_ms: endTime - startTime,
|
|
1565
|
+
status: "ERROR",
|
|
1566
|
+
error_message: error?.message,
|
|
1567
|
+
prompt_key: promptCtx?.promptKey,
|
|
1568
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1569
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1570
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1571
|
+
}).catch(() => {
|
|
1572
|
+
});
|
|
1573
|
+
});
|
|
1574
|
+
}
|
|
1575
|
+
if (result?.partialObjectStream) {
|
|
1576
|
+
const originalStream = result.partialObjectStream;
|
|
1577
|
+
const wrappedStream = (async function* () {
|
|
1578
|
+
for await (const chunk of originalStream) {
|
|
1579
|
+
if (!firstTokenTime) {
|
|
1580
|
+
firstTokenTime = Date.now();
|
|
1581
|
+
log("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1582
|
+
}
|
|
1583
|
+
yield chunk;
|
|
1584
|
+
}
|
|
1585
|
+
})();
|
|
1586
|
+
return new Proxy(result, {
|
|
1587
|
+
get(target, prop) {
|
|
1588
|
+
if (prop === "partialObjectStream") {
|
|
1589
|
+
return wrappedStream;
|
|
1590
|
+
}
|
|
1591
|
+
return target[prop];
|
|
1592
|
+
}
|
|
1593
|
+
});
|
|
1594
|
+
}
|
|
1595
|
+
return result;
|
|
1596
|
+
};
|
|
1597
|
+
}
|
|
1598
|
+
function wrapMastraAgent(agent) {
|
|
1599
|
+
const originalGenerate = agent.generate.bind(agent);
|
|
1600
|
+
const agentName = agent.name || "MastraAgent";
|
|
1601
|
+
agent.generate = async function(...args) {
|
|
1602
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1603
|
+
if (!ctx || !initialized) {
|
|
1604
|
+
return originalGenerate(...args);
|
|
1605
|
+
}
|
|
1606
|
+
let promptCtx = null;
|
|
1607
|
+
try {
|
|
1608
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1609
|
+
promptCtx = getPromptContext();
|
|
1610
|
+
} catch {
|
|
1611
|
+
}
|
|
1612
|
+
const traceId = generateHexId(32);
|
|
1613
|
+
const spanId = generateHexId(16);
|
|
1614
|
+
const startTime = Date.now();
|
|
1615
|
+
const messages = args[0] || [];
|
|
1616
|
+
try {
|
|
1617
|
+
const result = await originalGenerate(...args);
|
|
1618
|
+
const endTime = Date.now();
|
|
1619
|
+
const model = result?.model?.modelId || "unknown";
|
|
1620
|
+
const toolCalls = [];
|
|
1621
|
+
if (result?.steps?.length) {
|
|
1622
|
+
for (const step of result.steps) {
|
|
1623
|
+
if (step.toolCalls?.length) {
|
|
1624
|
+
for (let i = 0; i < step.toolCalls.length; i++) {
|
|
1625
|
+
const tc = step.toolCalls[i];
|
|
1626
|
+
const tr = step.toolResults?.[i];
|
|
1627
|
+
toolCalls.push({
|
|
1628
|
+
name: tc.toolName,
|
|
1629
|
+
arguments: tc.args,
|
|
1630
|
+
result: tr?.result
|
|
1631
|
+
});
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
const attributes = {
|
|
1637
|
+
"gen_ai.system": "Mastra",
|
|
1638
|
+
"gen_ai.request.model": model,
|
|
1639
|
+
"gen_ai.response.model": model,
|
|
1640
|
+
"fallom.source": "mastra-agent",
|
|
1641
|
+
"llm.request.type": "chat"
|
|
1642
|
+
};
|
|
1643
|
+
if (Array.isArray(messages)) {
|
|
1644
|
+
messages.forEach((msg, i) => {
|
|
1645
|
+
attributes[`gen_ai.prompt.${i}.role`] = msg.role || "user";
|
|
1646
|
+
attributes[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1647
|
+
});
|
|
1648
|
+
}
|
|
1649
|
+
if (result?.text) {
|
|
1650
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1651
|
+
attributes["gen_ai.completion.0.content"] = result.text;
|
|
1652
|
+
attributes["gen_ai.completion.0.finish_reason"] = "stop";
|
|
1653
|
+
}
|
|
1654
|
+
if (toolCalls.length > 0) {
|
|
1655
|
+
attributes["fallom.tool_calls"] = JSON.stringify(toolCalls);
|
|
1656
|
+
toolCalls.forEach((tc, i) => {
|
|
1657
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.name`] = tc.name;
|
|
1658
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.type`] = "function";
|
|
1659
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.arguments`] = JSON.stringify(tc.arguments);
|
|
1660
|
+
});
|
|
1661
|
+
}
|
|
1662
|
+
if (result?.usage) {
|
|
1663
|
+
attributes["gen_ai.usage.prompt_tokens"] = result.usage.promptTokens;
|
|
1664
|
+
attributes["gen_ai.usage.completion_tokens"] = result.usage.completionTokens;
|
|
1665
|
+
attributes["llm.usage.total_tokens"] = result.usage.totalTokens;
|
|
1666
|
+
}
|
|
1667
|
+
const traceData = {
|
|
1668
|
+
config_key: ctx.configKey,
|
|
1669
|
+
session_id: ctx.sessionId,
|
|
1670
|
+
customer_id: ctx.customerId,
|
|
1671
|
+
trace_id: traceId,
|
|
1672
|
+
span_id: spanId,
|
|
1673
|
+
name: `mastra.${agentName}.generate`,
|
|
1674
|
+
kind: "client",
|
|
1675
|
+
model,
|
|
1676
|
+
start_time: new Date(startTime).toISOString(),
|
|
1677
|
+
end_time: new Date(endTime).toISOString(),
|
|
1678
|
+
duration_ms: endTime - startTime,
|
|
1679
|
+
status: "OK",
|
|
1680
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1681
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1682
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1683
|
+
attributes,
|
|
1684
|
+
prompt_key: promptCtx?.promptKey,
|
|
1685
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1686
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1687
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1688
|
+
};
|
|
1689
|
+
sendTrace(traceData).catch(() => {
|
|
1690
|
+
});
|
|
1691
|
+
return result;
|
|
1692
|
+
} catch (error) {
|
|
1693
|
+
const endTime = Date.now();
|
|
1694
|
+
const traceData = {
|
|
1695
|
+
config_key: ctx.configKey,
|
|
1696
|
+
session_id: ctx.sessionId,
|
|
1697
|
+
customer_id: ctx.customerId,
|
|
1698
|
+
trace_id: traceId,
|
|
1699
|
+
span_id: spanId,
|
|
1700
|
+
name: `mastra.${agentName}.generate`,
|
|
1701
|
+
kind: "client",
|
|
1702
|
+
start_time: new Date(startTime).toISOString(),
|
|
1703
|
+
end_time: new Date(endTime).toISOString(),
|
|
1704
|
+
duration_ms: endTime - startTime,
|
|
1705
|
+
status: "ERROR",
|
|
1706
|
+
error_message: error instanceof Error ? error.message : String(error),
|
|
1707
|
+
prompt_key: promptCtx?.promptKey,
|
|
1708
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1709
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1710
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1711
|
+
};
|
|
1712
|
+
sendTrace(traceData).catch(() => {
|
|
1713
|
+
});
|
|
1714
|
+
throw error;
|
|
1715
|
+
}
|
|
1716
|
+
};
|
|
1717
|
+
return agent;
|
|
1718
|
+
}
|
|
1205
1719
|
|
|
1206
1720
|
// src/models.ts
|
|
1207
1721
|
var models_exports = {};
|
|
@@ -1211,7 +1725,7 @@ __export(models_exports, {
|
|
|
1211
1725
|
});
|
|
1212
1726
|
import { createHash } from "crypto";
|
|
1213
1727
|
var apiKey2 = null;
|
|
1214
|
-
var baseUrl2 = "https://
|
|
1728
|
+
var baseUrl2 = "https://configs.fallom.com";
|
|
1215
1729
|
var initialized2 = false;
|
|
1216
1730
|
var syncInterval = null;
|
|
1217
1731
|
var debugMode2 = false;
|
|
@@ -1225,7 +1739,7 @@ function log2(msg) {
|
|
|
1225
1739
|
}
|
|
1226
1740
|
function init3(options = {}) {
|
|
1227
1741
|
apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
1228
|
-
baseUrl2 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
1742
|
+
baseUrl2 = options.baseUrl || process.env.FALLOM_CONFIGS_URL || process.env.FALLOM_BASE_URL || "https://configs.fallom.com";
|
|
1229
1743
|
initialized2 = true;
|
|
1230
1744
|
if (!apiKey2) {
|
|
1231
1745
|
return;
|
|
@@ -1314,20 +1828,28 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1314
1828
|
const { version, fallback, debug = false } = options;
|
|
1315
1829
|
debugMode2 = debug;
|
|
1316
1830
|
ensureInit();
|
|
1317
|
-
log2(
|
|
1831
|
+
log2(
|
|
1832
|
+
`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`
|
|
1833
|
+
);
|
|
1318
1834
|
try {
|
|
1319
1835
|
let configData = configCache.get(configKey);
|
|
1320
|
-
log2(
|
|
1836
|
+
log2(
|
|
1837
|
+
`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`
|
|
1838
|
+
);
|
|
1321
1839
|
if (!configData) {
|
|
1322
1840
|
log2("Not in cache, fetching...");
|
|
1323
1841
|
await fetchConfigs(SYNC_TIMEOUT);
|
|
1324
1842
|
configData = configCache.get(configKey);
|
|
1325
|
-
log2(
|
|
1843
|
+
log2(
|
|
1844
|
+
`After fetch, cache lookup: ${configData ? "found" : "still not found"}`
|
|
1845
|
+
);
|
|
1326
1846
|
}
|
|
1327
1847
|
if (!configData) {
|
|
1328
1848
|
log2(`Config not found, using fallback: ${fallback}`);
|
|
1329
1849
|
if (fallback) {
|
|
1330
|
-
console.warn(
|
|
1850
|
+
console.warn(
|
|
1851
|
+
`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`
|
|
1852
|
+
);
|
|
1331
1853
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1332
1854
|
}
|
|
1333
1855
|
throw new Error(
|
|
@@ -1343,7 +1865,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1343
1865
|
}
|
|
1344
1866
|
if (!config) {
|
|
1345
1867
|
if (fallback) {
|
|
1346
|
-
console.warn(
|
|
1868
|
+
console.warn(
|
|
1869
|
+
`[Fallom WARNING] Config '${configKey}' version ${version} not found, using fallback: ${fallback}`
|
|
1870
|
+
);
|
|
1347
1871
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1348
1872
|
}
|
|
1349
1873
|
throw new Error(`Config '${configKey}' version ${version} not found.`);
|
|
@@ -1354,7 +1878,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1354
1878
|
config = configData.versions.get(targetVersion);
|
|
1355
1879
|
if (!config) {
|
|
1356
1880
|
if (fallback) {
|
|
1357
|
-
console.warn(
|
|
1881
|
+
console.warn(
|
|
1882
|
+
`[Fallom WARNING] Config '${configKey}' has no cached version, using fallback: ${fallback}`
|
|
1883
|
+
);
|
|
1358
1884
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1359
1885
|
}
|
|
1360
1886
|
throw new Error(`Config '${configKey}' has no cached version.`);
|
|
@@ -1363,7 +1889,11 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1363
1889
|
const variantsRaw = config.variants;
|
|
1364
1890
|
const configVersion = config.version || targetVersion;
|
|
1365
1891
|
const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
|
|
1366
|
-
log2(
|
|
1892
|
+
log2(
|
|
1893
|
+
`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(
|
|
1894
|
+
variants
|
|
1895
|
+
)}`
|
|
1896
|
+
);
|
|
1367
1897
|
const hashBytes = createHash("md5").update(sessionId).digest();
|
|
1368
1898
|
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
1369
1899
|
log2(`Session hash: ${hashVal} (out of 1,000,000)`);
|
|
@@ -1372,7 +1902,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1372
1902
|
for (const v of variants) {
|
|
1373
1903
|
const oldCumulative = cumulative;
|
|
1374
1904
|
cumulative += v.weight * 1e4;
|
|
1375
|
-
log2(
|
|
1905
|
+
log2(
|
|
1906
|
+
`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`
|
|
1907
|
+
);
|
|
1376
1908
|
if (hashVal < cumulative) {
|
|
1377
1909
|
assignedModel = v.model;
|
|
1378
1910
|
break;
|
|
@@ -1385,7 +1917,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1385
1917
|
throw e;
|
|
1386
1918
|
}
|
|
1387
1919
|
if (fallback) {
|
|
1388
|
-
console.warn(
|
|
1920
|
+
console.warn(
|
|
1921
|
+
`[Fallom WARNING] Error getting model for '${configKey}': ${e}. Using fallback: ${fallback}`
|
|
1922
|
+
);
|
|
1389
1923
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1390
1924
|
}
|
|
1391
1925
|
throw e;
|
|
@@ -1428,23 +1962,260 @@ async function recordSession(configKey, version, sessionId, model) {
|
|
|
1428
1962
|
|
|
1429
1963
|
// src/init.ts
|
|
1430
1964
|
async function init4(options = {}) {
|
|
1431
|
-
const
|
|
1965
|
+
const tracesUrl = options.tracesUrl || process.env.FALLOM_TRACES_URL || "https://traces.fallom.com";
|
|
1966
|
+
const configsUrl = options.configsUrl || process.env.FALLOM_CONFIGS_URL || "https://configs.fallom.com";
|
|
1967
|
+
const promptsUrl = options.promptsUrl || process.env.FALLOM_PROMPTS_URL || "https://prompts.fallom.com";
|
|
1432
1968
|
await init2({
|
|
1433
1969
|
apiKey: options.apiKey,
|
|
1434
|
-
baseUrl:
|
|
1970
|
+
baseUrl: tracesUrl,
|
|
1435
1971
|
captureContent: options.captureContent,
|
|
1436
1972
|
debug: options.debug
|
|
1437
1973
|
});
|
|
1438
1974
|
init3({
|
|
1439
1975
|
apiKey: options.apiKey,
|
|
1440
|
-
baseUrl:
|
|
1976
|
+
baseUrl: configsUrl
|
|
1441
1977
|
});
|
|
1442
1978
|
init({
|
|
1443
1979
|
apiKey: options.apiKey,
|
|
1444
|
-
baseUrl:
|
|
1980
|
+
baseUrl: promptsUrl
|
|
1445
1981
|
});
|
|
1446
1982
|
}
|
|
1447
1983
|
|
|
1984
|
+
// src/mastra.ts
|
|
1985
|
+
import { ExportResultCode } from "@opentelemetry/core";
|
|
1986
|
+
var promptContext = {};
|
|
1987
|
+
function setMastraPrompt(promptKey, version) {
|
|
1988
|
+
promptContext = {
|
|
1989
|
+
promptKey,
|
|
1990
|
+
promptVersion: version,
|
|
1991
|
+
promptAbTestKey: void 0,
|
|
1992
|
+
promptVariantIndex: void 0
|
|
1993
|
+
};
|
|
1994
|
+
}
|
|
1995
|
+
function setMastraPromptAB(abTestKey, variantIndex) {
|
|
1996
|
+
promptContext = {
|
|
1997
|
+
promptKey: void 0,
|
|
1998
|
+
promptVersion: void 0,
|
|
1999
|
+
promptAbTestKey: abTestKey,
|
|
2000
|
+
promptVariantIndex: variantIndex
|
|
2001
|
+
};
|
|
2002
|
+
}
|
|
2003
|
+
function clearMastraPrompt() {
|
|
2004
|
+
promptContext = {};
|
|
2005
|
+
}
|
|
2006
|
+
var FallomExporter = class {
|
|
2007
|
+
constructor(options = {}) {
|
|
2008
|
+
this.pendingExports = [];
|
|
2009
|
+
this.apiKey = options.apiKey ?? process.env.FALLOM_API_KEY ?? "";
|
|
2010
|
+
this.baseUrl = options.baseUrl ?? "https://traces.fallom.com";
|
|
2011
|
+
this.debug = options.debug ?? false;
|
|
2012
|
+
console.log("[FallomExporter] Constructor called, debug:", this.debug);
|
|
2013
|
+
console.log("[FallomExporter] API key present:", !!this.apiKey);
|
|
2014
|
+
console.log("[FallomExporter] Base URL:", this.baseUrl);
|
|
2015
|
+
if (!this.apiKey) {
|
|
2016
|
+
console.warn(
|
|
2017
|
+
"[FallomExporter] No API key provided. Set FALLOM_API_KEY env var or pass apiKey option."
|
|
2018
|
+
);
|
|
2019
|
+
}
|
|
2020
|
+
}
|
|
2021
|
+
log(...args) {
|
|
2022
|
+
if (this.debug) {
|
|
2023
|
+
console.log("[FallomExporter]", ...args);
|
|
2024
|
+
}
|
|
2025
|
+
}
|
|
2026
|
+
/**
|
|
2027
|
+
* Export spans to Fallom.
|
|
2028
|
+
*/
|
|
2029
|
+
export(spans, resultCallback) {
|
|
2030
|
+
if (spans.length === 0) {
|
|
2031
|
+
resultCallback({ code: ExportResultCode.SUCCESS });
|
|
2032
|
+
return;
|
|
2033
|
+
}
|
|
2034
|
+
this.log(`Exporting ${spans.length} spans...`);
|
|
2035
|
+
if (this.debug) {
|
|
2036
|
+
for (const span2 of spans) {
|
|
2037
|
+
this.log(` - ${span2.name}`, {
|
|
2038
|
+
attributes: Object.fromEntries(
|
|
2039
|
+
Object.entries(span2.attributes).filter(
|
|
2040
|
+
([k]) => k.startsWith("gen_ai") || k.startsWith("llm")
|
|
2041
|
+
)
|
|
2042
|
+
)
|
|
2043
|
+
});
|
|
2044
|
+
}
|
|
2045
|
+
}
|
|
2046
|
+
const exportPromise = this.sendSpans(spans).then(() => {
|
|
2047
|
+
this.log("Export successful");
|
|
2048
|
+
resultCallback({ code: ExportResultCode.SUCCESS });
|
|
2049
|
+
}).catch((error) => {
|
|
2050
|
+
console.error("[FallomExporter] Export failed:", error);
|
|
2051
|
+
resultCallback({
|
|
2052
|
+
code: ExportResultCode.FAILED,
|
|
2053
|
+
error: error instanceof Error ? error : new Error(String(error))
|
|
2054
|
+
});
|
|
2055
|
+
});
|
|
2056
|
+
this.pendingExports.push(exportPromise);
|
|
2057
|
+
}
|
|
2058
|
+
/**
|
|
2059
|
+
* Shutdown the exporter, waiting for pending exports.
|
|
2060
|
+
*/
|
|
2061
|
+
async shutdown() {
|
|
2062
|
+
await Promise.all(this.pendingExports);
|
|
2063
|
+
this.pendingExports = [];
|
|
2064
|
+
}
|
|
2065
|
+
/**
|
|
2066
|
+
* Force flush pending exports.
|
|
2067
|
+
*/
|
|
2068
|
+
async forceFlush() {
|
|
2069
|
+
await Promise.all(this.pendingExports);
|
|
2070
|
+
}
|
|
2071
|
+
/**
|
|
2072
|
+
* Send spans to Fallom's OTLP endpoint.
|
|
2073
|
+
*/
|
|
2074
|
+
async sendSpans(spans) {
|
|
2075
|
+
const session = getSession();
|
|
2076
|
+
const resourceSpans = this.spansToOtlpJson(spans);
|
|
2077
|
+
const headers = {
|
|
2078
|
+
"Content-Type": "application/json",
|
|
2079
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
2080
|
+
};
|
|
2081
|
+
if (session?.configKey) {
|
|
2082
|
+
headers["X-Fallom-Config-Key"] = session.configKey;
|
|
2083
|
+
}
|
|
2084
|
+
if (session?.sessionId) {
|
|
2085
|
+
headers["X-Fallom-Session-Id"] = session.sessionId;
|
|
2086
|
+
}
|
|
2087
|
+
if (session?.customerId) {
|
|
2088
|
+
headers["X-Fallom-Customer-Id"] = session.customerId;
|
|
2089
|
+
}
|
|
2090
|
+
if (promptContext.promptKey) {
|
|
2091
|
+
headers["X-Fallom-Prompt-Key"] = promptContext.promptKey;
|
|
2092
|
+
}
|
|
2093
|
+
if (promptContext.promptVersion !== void 0) {
|
|
2094
|
+
headers["X-Fallom-Prompt-Version"] = String(promptContext.promptVersion);
|
|
2095
|
+
}
|
|
2096
|
+
if (promptContext.promptAbTestKey) {
|
|
2097
|
+
headers["X-Fallom-Prompt-AB-Test"] = promptContext.promptAbTestKey;
|
|
2098
|
+
}
|
|
2099
|
+
if (promptContext.promptVariantIndex !== void 0) {
|
|
2100
|
+
headers["X-Fallom-Prompt-Variant"] = String(
|
|
2101
|
+
promptContext.promptVariantIndex
|
|
2102
|
+
);
|
|
2103
|
+
}
|
|
2104
|
+
const endpoint = `${this.baseUrl}/v1/traces`;
|
|
2105
|
+
this.log("Sending to", endpoint);
|
|
2106
|
+
this.log("Headers:", {
|
|
2107
|
+
...headers,
|
|
2108
|
+
Authorization: "Bearer ***"
|
|
2109
|
+
});
|
|
2110
|
+
const response = await fetch(endpoint, {
|
|
2111
|
+
method: "POST",
|
|
2112
|
+
headers,
|
|
2113
|
+
body: JSON.stringify({ resourceSpans })
|
|
2114
|
+
});
|
|
2115
|
+
if (!response.ok) {
|
|
2116
|
+
const text = await response.text();
|
|
2117
|
+
throw new Error(`Failed to export: ${response.status} ${text}`);
|
|
2118
|
+
}
|
|
2119
|
+
}
|
|
2120
|
+
/**
|
|
2121
|
+
* Convert OpenTelemetry spans to OTLP JSON format.
|
|
2122
|
+
*/
|
|
2123
|
+
spansToOtlpJson(spans) {
|
|
2124
|
+
const resourceMap = /* @__PURE__ */ new Map();
|
|
2125
|
+
for (const span2 of spans) {
|
|
2126
|
+
const resourceKey = JSON.stringify(span2.resource.attributes);
|
|
2127
|
+
if (!resourceMap.has(resourceKey)) {
|
|
2128
|
+
resourceMap.set(resourceKey, []);
|
|
2129
|
+
}
|
|
2130
|
+
resourceMap.get(resourceKey).push(span2);
|
|
2131
|
+
}
|
|
2132
|
+
const resourceSpans = [];
|
|
2133
|
+
for (const [_resourceKey, resourceSpanList] of resourceMap) {
|
|
2134
|
+
const firstSpan = resourceSpanList[0];
|
|
2135
|
+
resourceSpans.push({
|
|
2136
|
+
resource: {
|
|
2137
|
+
attributes: this.attributesToOtlp(firstSpan.resource.attributes)
|
|
2138
|
+
},
|
|
2139
|
+
scopeSpans: [
|
|
2140
|
+
{
|
|
2141
|
+
scope: {
|
|
2142
|
+
name: firstSpan.instrumentationLibrary.name,
|
|
2143
|
+
version: firstSpan.instrumentationLibrary.version
|
|
2144
|
+
},
|
|
2145
|
+
spans: resourceSpanList.map((span2) => this.spanToOtlp(span2))
|
|
2146
|
+
}
|
|
2147
|
+
]
|
|
2148
|
+
});
|
|
2149
|
+
}
|
|
2150
|
+
return resourceSpans;
|
|
2151
|
+
}
|
|
2152
|
+
/**
|
|
2153
|
+
* Convert a single span to OTLP format.
|
|
2154
|
+
*/
|
|
2155
|
+
spanToOtlp(span2) {
|
|
2156
|
+
return {
|
|
2157
|
+
traceId: span2.spanContext().traceId,
|
|
2158
|
+
spanId: span2.spanContext().spanId,
|
|
2159
|
+
parentSpanId: span2.parentSpanId,
|
|
2160
|
+
name: span2.name,
|
|
2161
|
+
kind: span2.kind,
|
|
2162
|
+
startTimeUnixNano: this.hrTimeToNanos(span2.startTime),
|
|
2163
|
+
endTimeUnixNano: this.hrTimeToNanos(span2.endTime),
|
|
2164
|
+
attributes: this.attributesToOtlp(span2.attributes),
|
|
2165
|
+
status: {
|
|
2166
|
+
code: span2.status.code,
|
|
2167
|
+
message: span2.status.message
|
|
2168
|
+
},
|
|
2169
|
+
events: span2.events.map((event) => ({
|
|
2170
|
+
timeUnixNano: this.hrTimeToNanos(event.time),
|
|
2171
|
+
name: event.name,
|
|
2172
|
+
attributes: this.attributesToOtlp(event.attributes || {})
|
|
2173
|
+
}))
|
|
2174
|
+
};
|
|
2175
|
+
}
|
|
2176
|
+
/**
|
|
2177
|
+
* Convert attributes to OTLP format.
|
|
2178
|
+
*/
|
|
2179
|
+
attributesToOtlp(attrs) {
|
|
2180
|
+
return Object.entries(attrs).map(([key, value]) => ({
|
|
2181
|
+
key,
|
|
2182
|
+
value: this.valueToOtlp(value)
|
|
2183
|
+
}));
|
|
2184
|
+
}
|
|
2185
|
+
/**
|
|
2186
|
+
* Convert a value to OTLP AnyValue format.
|
|
2187
|
+
*/
|
|
2188
|
+
valueToOtlp(value) {
|
|
2189
|
+
if (typeof value === "string") {
|
|
2190
|
+
return { stringValue: value };
|
|
2191
|
+
}
|
|
2192
|
+
if (typeof value === "number") {
|
|
2193
|
+
if (Number.isInteger(value)) {
|
|
2194
|
+
return { intValue: value };
|
|
2195
|
+
}
|
|
2196
|
+
return { doubleValue: value };
|
|
2197
|
+
}
|
|
2198
|
+
if (typeof value === "boolean") {
|
|
2199
|
+
return { boolValue: value };
|
|
2200
|
+
}
|
|
2201
|
+
if (Array.isArray(value)) {
|
|
2202
|
+
return {
|
|
2203
|
+
arrayValue: {
|
|
2204
|
+
values: value.map((v) => this.valueToOtlp(v))
|
|
2205
|
+
}
|
|
2206
|
+
};
|
|
2207
|
+
}
|
|
2208
|
+
return { stringValue: String(value) };
|
|
2209
|
+
}
|
|
2210
|
+
/**
|
|
2211
|
+
* Convert HrTime to nanoseconds string.
|
|
2212
|
+
*/
|
|
2213
|
+
hrTimeToNanos(hrTime) {
|
|
2214
|
+
const [seconds, nanos] = hrTime;
|
|
2215
|
+
return String(BigInt(seconds) * BigInt(1e9) + BigInt(nanos));
|
|
2216
|
+
}
|
|
2217
|
+
};
|
|
2218
|
+
|
|
1448
2219
|
// src/index.ts
|
|
1449
2220
|
var index_default = {
|
|
1450
2221
|
init: init4,
|
|
@@ -1453,9 +2224,13 @@ var index_default = {
|
|
|
1453
2224
|
prompts: prompts_exports
|
|
1454
2225
|
};
|
|
1455
2226
|
export {
|
|
2227
|
+
FallomExporter,
|
|
2228
|
+
clearMastraPrompt,
|
|
1456
2229
|
index_default as default,
|
|
1457
2230
|
init4 as init,
|
|
1458
2231
|
models_exports as models,
|
|
1459
2232
|
prompts_exports as prompts,
|
|
2233
|
+
setMastraPrompt,
|
|
2234
|
+
setMastraPromptAB,
|
|
1460
2235
|
trace_exports as trace
|
|
1461
2236
|
};
|