@fallom/trace 0.1.6 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/chunk-6MSTRIK4.mjs +255 -0
- package/dist/chunk-H2EACSBT.mjs +255 -0
- package/dist/index.d.mts +189 -6
- package/dist/index.d.ts +189 -6
- package/dist/index.js +853 -67
- package/dist/index.mjs +844 -69
- package/dist/prompts-VAN5E3L4.mjs +14 -0
- package/dist/prompts-ZSLS4DHO.mjs +14 -0
- package/package.json +3 -1
package/dist/index.js
CHANGED
|
@@ -36,7 +36,7 @@ function log(msg) {
|
|
|
36
36
|
}
|
|
37
37
|
function init(options = {}) {
|
|
38
38
|
apiKey = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
39
|
-
baseUrl = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
39
|
+
baseUrl = options.baseUrl || process.env.FALLOM_PROMPTS_URL || process.env.FALLOM_BASE_URL || "https://prompts.fallom.com";
|
|
40
40
|
initialized = true;
|
|
41
41
|
if (!apiKey) {
|
|
42
42
|
return;
|
|
@@ -187,6 +187,13 @@ async function getAB(abTestKey, sessionId, options = {}) {
|
|
|
187
187
|
throw new Error(`Prompt A/B test '${abTestKey}' has no current version.`);
|
|
188
188
|
}
|
|
189
189
|
const { variants } = versionData;
|
|
190
|
+
log(`A/B test '${abTestKey}' has ${variants?.length ?? 0} variants`);
|
|
191
|
+
log(`Version data: ${JSON.stringify(versionData, null, 2)}`);
|
|
192
|
+
if (!variants || variants.length === 0) {
|
|
193
|
+
throw new Error(
|
|
194
|
+
`Prompt A/B test '${abTestKey}' has no variants configured.`
|
|
195
|
+
);
|
|
196
|
+
}
|
|
190
197
|
const hashBytes = (0, import_crypto.createHash)("md5").update(sessionId).digest();
|
|
191
198
|
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
192
199
|
let cumulative = 0;
|
|
@@ -248,7 +255,7 @@ var init_prompts = __esm({
|
|
|
248
255
|
"use strict";
|
|
249
256
|
import_crypto = require("crypto");
|
|
250
257
|
apiKey = null;
|
|
251
|
-
baseUrl = "https://
|
|
258
|
+
baseUrl = "https://prompts.fallom.com";
|
|
252
259
|
initialized = false;
|
|
253
260
|
syncInterval = null;
|
|
254
261
|
debugMode = false;
|
|
@@ -262,10 +269,14 @@ var init_prompts = __esm({
|
|
|
262
269
|
// src/index.ts
|
|
263
270
|
var index_exports = {};
|
|
264
271
|
__export(index_exports, {
|
|
272
|
+
FallomExporter: () => FallomExporter,
|
|
273
|
+
clearMastraPrompt: () => clearMastraPrompt,
|
|
265
274
|
default: () => index_default,
|
|
266
275
|
init: () => init4,
|
|
267
276
|
models: () => models_exports,
|
|
268
277
|
prompts: () => prompts_exports,
|
|
278
|
+
setMastraPrompt: () => setMastraPrompt,
|
|
279
|
+
setMastraPromptAB: () => setMastraPromptAB,
|
|
269
280
|
trace: () => trace_exports
|
|
270
281
|
});
|
|
271
282
|
module.exports = __toCommonJS(index_exports);
|
|
@@ -280,8 +291,10 @@ __export(trace_exports, {
|
|
|
280
291
|
setSession: () => setSession,
|
|
281
292
|
shutdown: () => shutdown,
|
|
282
293
|
span: () => span,
|
|
294
|
+
wrapAISDK: () => wrapAISDK,
|
|
283
295
|
wrapAnthropic: () => wrapAnthropic,
|
|
284
296
|
wrapGoogleAI: () => wrapGoogleAI,
|
|
297
|
+
wrapMastraAgent: () => wrapMastraAgent,
|
|
285
298
|
wrapOpenAI: () => wrapOpenAI
|
|
286
299
|
});
|
|
287
300
|
var import_async_hooks = require("async_hooks");
|
|
@@ -291,7 +304,7 @@ var import_exporter_trace_otlp_http = require("@opentelemetry/exporter-trace-otl
|
|
|
291
304
|
// node_modules/@opentelemetry/resources/build/esm/Resource.js
|
|
292
305
|
var import_api = require("@opentelemetry/api");
|
|
293
306
|
|
|
294
|
-
// node_modules/@opentelemetry/semantic-conventions/build/esm/resource/SemanticResourceAttributes.js
|
|
307
|
+
// node_modules/@opentelemetry/resources/node_modules/@opentelemetry/semantic-conventions/build/esm/resource/SemanticResourceAttributes.js
|
|
295
308
|
var SemanticResourceAttributes = {
|
|
296
309
|
/**
|
|
297
310
|
* Name of the cloud provider.
|
|
@@ -671,35 +684,9 @@ var SemanticResourceAttributes = {
|
|
|
671
684
|
*/
|
|
672
685
|
WEBENGINE_DESCRIPTION: "webengine.description"
|
|
673
686
|
};
|
|
674
|
-
var TelemetrySdkLanguageValues = {
|
|
675
|
-
/** cpp. */
|
|
676
|
-
CPP: "cpp",
|
|
677
|
-
/** dotnet. */
|
|
678
|
-
DOTNET: "dotnet",
|
|
679
|
-
/** erlang. */
|
|
680
|
-
ERLANG: "erlang",
|
|
681
|
-
/** go. */
|
|
682
|
-
GO: "go",
|
|
683
|
-
/** java. */
|
|
684
|
-
JAVA: "java",
|
|
685
|
-
/** nodejs. */
|
|
686
|
-
NODEJS: "nodejs",
|
|
687
|
-
/** php. */
|
|
688
|
-
PHP: "php",
|
|
689
|
-
/** python. */
|
|
690
|
-
PYTHON: "python",
|
|
691
|
-
/** ruby. */
|
|
692
|
-
RUBY: "ruby",
|
|
693
|
-
/** webjs. */
|
|
694
|
-
WEBJS: "webjs"
|
|
695
|
-
};
|
|
696
|
-
|
|
697
|
-
// node_modules/@opentelemetry/core/build/esm/version.js
|
|
698
|
-
var VERSION = "1.19.0";
|
|
699
687
|
|
|
700
|
-
// node_modules/@opentelemetry/
|
|
701
|
-
var
|
|
702
|
-
var SDK_INFO = (_a = {}, _a[SemanticResourceAttributes.TELEMETRY_SDK_NAME] = "opentelemetry", _a[SemanticResourceAttributes.PROCESS_RUNTIME_NAME] = "node", _a[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] = TelemetrySdkLanguageValues.NODEJS, _a[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] = VERSION, _a);
|
|
688
|
+
// node_modules/@opentelemetry/resources/build/esm/Resource.js
|
|
689
|
+
var import_core = require("@opentelemetry/core");
|
|
703
690
|
|
|
704
691
|
// node_modules/@opentelemetry/resources/build/esm/platform/node/default-service-name.js
|
|
705
692
|
function defaultServiceName() {
|
|
@@ -836,10 +823,10 @@ var Resource = (
|
|
|
836
823
|
(function() {
|
|
837
824
|
function Resource2(attributes, asyncAttributesPromise) {
|
|
838
825
|
var _this = this;
|
|
839
|
-
var
|
|
826
|
+
var _a;
|
|
840
827
|
this._attributes = attributes;
|
|
841
828
|
this.asyncAttributesPending = asyncAttributesPromise != null;
|
|
842
|
-
this._syncAttributes = (
|
|
829
|
+
this._syncAttributes = (_a = this._attributes) !== null && _a !== void 0 ? _a : {};
|
|
843
830
|
this._asyncAttributesPromise = asyncAttributesPromise === null || asyncAttributesPromise === void 0 ? void 0 : asyncAttributesPromise.then(function(asyncAttributes) {
|
|
844
831
|
_this._attributes = Object.assign({}, _this._attributes, asyncAttributes);
|
|
845
832
|
_this.asyncAttributesPending = false;
|
|
@@ -854,30 +841,30 @@ var Resource = (
|
|
|
854
841
|
return Resource2.EMPTY;
|
|
855
842
|
};
|
|
856
843
|
Resource2.default = function() {
|
|
857
|
-
var
|
|
858
|
-
return new Resource2((
|
|
844
|
+
var _a;
|
|
845
|
+
return new Resource2((_a = {}, _a[SemanticResourceAttributes.SERVICE_NAME] = defaultServiceName(), _a[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE] = import_core.SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE], _a[SemanticResourceAttributes.TELEMETRY_SDK_NAME] = import_core.SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_NAME], _a[SemanticResourceAttributes.TELEMETRY_SDK_VERSION] = import_core.SDK_INFO[SemanticResourceAttributes.TELEMETRY_SDK_VERSION], _a));
|
|
859
846
|
};
|
|
860
847
|
Object.defineProperty(Resource2.prototype, "attributes", {
|
|
861
848
|
get: function() {
|
|
862
|
-
var
|
|
849
|
+
var _a;
|
|
863
850
|
if (this.asyncAttributesPending) {
|
|
864
851
|
import_api.diag.error("Accessing resource attributes before async attributes settled");
|
|
865
852
|
}
|
|
866
|
-
return (
|
|
853
|
+
return (_a = this._attributes) !== null && _a !== void 0 ? _a : {};
|
|
867
854
|
},
|
|
868
855
|
enumerable: false,
|
|
869
856
|
configurable: true
|
|
870
857
|
});
|
|
871
858
|
Resource2.prototype.waitForAsyncAttributes = function() {
|
|
872
859
|
return __awaiter(this, void 0, void 0, function() {
|
|
873
|
-
return __generator(this, function(
|
|
874
|
-
switch (
|
|
860
|
+
return __generator(this, function(_a) {
|
|
861
|
+
switch (_a.label) {
|
|
875
862
|
case 0:
|
|
876
863
|
if (!this.asyncAttributesPending) return [3, 2];
|
|
877
864
|
return [4, this._asyncAttributesPromise];
|
|
878
865
|
case 1:
|
|
879
|
-
|
|
880
|
-
|
|
866
|
+
_a.sent();
|
|
867
|
+
_a.label = 2;
|
|
881
868
|
case 2:
|
|
882
869
|
return [
|
|
883
870
|
2
|
|
@@ -889,19 +876,19 @@ var Resource = (
|
|
|
889
876
|
};
|
|
890
877
|
Resource2.prototype.merge = function(other) {
|
|
891
878
|
var _this = this;
|
|
892
|
-
var
|
|
879
|
+
var _a;
|
|
893
880
|
if (!other)
|
|
894
881
|
return this;
|
|
895
|
-
var mergedSyncAttributes = __assign(__assign({}, this._syncAttributes), (
|
|
882
|
+
var mergedSyncAttributes = __assign(__assign({}, this._syncAttributes), (_a = other._syncAttributes) !== null && _a !== void 0 ? _a : other.attributes);
|
|
896
883
|
if (!this._asyncAttributesPromise && !other._asyncAttributesPromise) {
|
|
897
884
|
return new Resource2(mergedSyncAttributes);
|
|
898
885
|
}
|
|
899
886
|
var mergedAttributesPromise = Promise.all([
|
|
900
887
|
this._asyncAttributesPromise,
|
|
901
888
|
other._asyncAttributesPromise
|
|
902
|
-
]).then(function(
|
|
889
|
+
]).then(function(_a2) {
|
|
903
890
|
var _b;
|
|
904
|
-
var _c = __read(
|
|
891
|
+
var _c = __read(_a2, 2), thisAsyncAttributes = _c[0], otherAsyncAttributes = _c[1];
|
|
905
892
|
return __assign(__assign(__assign(__assign({}, _this._syncAttributes), thisAsyncAttributes), (_b = other._syncAttributes) !== null && _b !== void 0 ? _b : other.attributes), otherAsyncAttributes);
|
|
906
893
|
});
|
|
907
894
|
return new Resource2(mergedSyncAttributes, mergedAttributesPromise);
|
|
@@ -915,7 +902,7 @@ var Resource = (
|
|
|
915
902
|
var sessionStorage = new import_async_hooks.AsyncLocalStorage();
|
|
916
903
|
var fallbackSession = null;
|
|
917
904
|
var apiKey2 = null;
|
|
918
|
-
var baseUrl2 = "https://
|
|
905
|
+
var baseUrl2 = "https://traces.fallom.com";
|
|
919
906
|
var initialized2 = false;
|
|
920
907
|
var captureContent = true;
|
|
921
908
|
var debugMode2 = false;
|
|
@@ -958,7 +945,7 @@ async function init2(options = {}) {
|
|
|
958
945
|
debugMode2 = options.debug ?? false;
|
|
959
946
|
log2("\u{1F680} Initializing Fallom tracing...");
|
|
960
947
|
apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
961
|
-
baseUrl2 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
948
|
+
baseUrl2 = options.baseUrl || process.env.FALLOM_TRACES_URL || process.env.FALLOM_BASE_URL || "https://traces.fallom.com";
|
|
962
949
|
const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
|
|
963
950
|
if (envCapture === "false" || envCapture === "0" || envCapture === "no") {
|
|
964
951
|
captureContent = false;
|
|
@@ -1130,12 +1117,12 @@ function messagesToOtelAttributes(messages, completion, model, responseId) {
|
|
|
1130
1117
|
if (messages) {
|
|
1131
1118
|
messages.forEach((msg, i) => {
|
|
1132
1119
|
attrs[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
1133
|
-
attrs[`gen_ai.prompt.${i}.content`] = msg.content;
|
|
1120
|
+
attrs[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1134
1121
|
});
|
|
1135
1122
|
}
|
|
1136
1123
|
if (completion) {
|
|
1137
1124
|
attrs["gen_ai.completion.0.role"] = completion.role;
|
|
1138
|
-
attrs["gen_ai.completion.0.content"] = completion.content;
|
|
1125
|
+
attrs["gen_ai.completion.0.content"] = typeof completion.content === "string" ? completion.content : JSON.stringify(completion.content);
|
|
1139
1126
|
if (completion.tool_calls) {
|
|
1140
1127
|
attrs["gen_ai.completion.0.tool_calls"] = JSON.stringify(
|
|
1141
1128
|
completion.tool_calls
|
|
@@ -1152,10 +1139,13 @@ function generateHexId(length) {
|
|
|
1152
1139
|
var traceContextStorage = new import_async_hooks.AsyncLocalStorage();
|
|
1153
1140
|
var fallbackTraceContext = null;
|
|
1154
1141
|
async function sendTrace(trace) {
|
|
1142
|
+
const url = `${baseUrl2}/v1/traces`;
|
|
1143
|
+
log2("\u{1F4E4} Sending trace to:", url);
|
|
1144
|
+
log2(" Session:", trace.session_id, "Config:", trace.config_key);
|
|
1155
1145
|
try {
|
|
1156
1146
|
const controller = new AbortController();
|
|
1157
1147
|
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
1158
|
-
await fetch(
|
|
1148
|
+
const response = await fetch(url, {
|
|
1159
1149
|
method: "POST",
|
|
1160
1150
|
headers: {
|
|
1161
1151
|
Authorization: `Bearer ${apiKey2}`,
|
|
@@ -1165,8 +1155,14 @@ async function sendTrace(trace) {
|
|
|
1165
1155
|
signal: controller.signal
|
|
1166
1156
|
});
|
|
1167
1157
|
clearTimeout(timeoutId);
|
|
1168
|
-
|
|
1169
|
-
|
|
1158
|
+
if (!response.ok) {
|
|
1159
|
+
const text = await response.text();
|
|
1160
|
+
log2("\u274C Trace send failed:", response.status, text);
|
|
1161
|
+
} else {
|
|
1162
|
+
log2("\u2705 Trace sent:", trace.name, trace.model);
|
|
1163
|
+
}
|
|
1164
|
+
} catch (err) {
|
|
1165
|
+
log2("\u274C Trace send error:", err instanceof Error ? err.message : err);
|
|
1170
1166
|
}
|
|
1171
1167
|
}
|
|
1172
1168
|
function wrapOpenAI(client) {
|
|
@@ -1468,6 +1464,535 @@ function wrapGoogleAI(model) {
|
|
|
1468
1464
|
};
|
|
1469
1465
|
return model;
|
|
1470
1466
|
}
|
|
1467
|
+
function wrapAISDK(ai) {
|
|
1468
|
+
const aiModule = ai;
|
|
1469
|
+
return {
|
|
1470
|
+
generateText: createGenerateTextWrapper(aiModule),
|
|
1471
|
+
streamText: createStreamTextWrapper(aiModule),
|
|
1472
|
+
generateObject: aiModule.generateObject ? createGenerateObjectWrapper(aiModule) : void 0,
|
|
1473
|
+
streamObject: aiModule.streamObject ? createStreamObjectWrapper(aiModule) : void 0
|
|
1474
|
+
};
|
|
1475
|
+
}
|
|
1476
|
+
function createGenerateTextWrapper(aiModule) {
|
|
1477
|
+
return async (...args) => {
|
|
1478
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1479
|
+
if (!ctx || !initialized2) {
|
|
1480
|
+
return aiModule.generateText(...args);
|
|
1481
|
+
}
|
|
1482
|
+
let promptCtx = null;
|
|
1483
|
+
try {
|
|
1484
|
+
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1485
|
+
promptCtx = getPromptContext2();
|
|
1486
|
+
} catch {
|
|
1487
|
+
}
|
|
1488
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1489
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1490
|
+
const spanId = generateHexId(16);
|
|
1491
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1492
|
+
const params = args[0] || {};
|
|
1493
|
+
const startTime = Date.now();
|
|
1494
|
+
try {
|
|
1495
|
+
const result = await aiModule.generateText(...args);
|
|
1496
|
+
const endTime = Date.now();
|
|
1497
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1498
|
+
const attributes = {};
|
|
1499
|
+
if (captureContent) {
|
|
1500
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1501
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1502
|
+
if (params?.prompt) {
|
|
1503
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1504
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1505
|
+
}
|
|
1506
|
+
if (params?.messages) {
|
|
1507
|
+
params.messages.forEach((msg, i) => {
|
|
1508
|
+
attributes[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
1509
|
+
attributes[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1510
|
+
});
|
|
1511
|
+
}
|
|
1512
|
+
if (result?.text) {
|
|
1513
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1514
|
+
attributes["gen_ai.completion.0.content"] = result.text;
|
|
1515
|
+
}
|
|
1516
|
+
if (result?.response?.id) {
|
|
1517
|
+
attributes["gen_ai.response.id"] = result.response.id;
|
|
1518
|
+
}
|
|
1519
|
+
}
|
|
1520
|
+
sendTrace({
|
|
1521
|
+
config_key: ctx.configKey,
|
|
1522
|
+
session_id: ctx.sessionId,
|
|
1523
|
+
customer_id: ctx.customerId,
|
|
1524
|
+
trace_id: traceId,
|
|
1525
|
+
span_id: spanId,
|
|
1526
|
+
parent_span_id: parentSpanId,
|
|
1527
|
+
name: "generateText",
|
|
1528
|
+
kind: "llm",
|
|
1529
|
+
model: modelId,
|
|
1530
|
+
start_time: new Date(startTime).toISOString(),
|
|
1531
|
+
end_time: new Date(endTime).toISOString(),
|
|
1532
|
+
duration_ms: endTime - startTime,
|
|
1533
|
+
status: "OK",
|
|
1534
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1535
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1536
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1537
|
+
attributes: captureContent ? attributes : void 0,
|
|
1538
|
+
prompt_key: promptCtx?.promptKey,
|
|
1539
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1540
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1541
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1542
|
+
}).catch(() => {
|
|
1543
|
+
});
|
|
1544
|
+
return result;
|
|
1545
|
+
} catch (error) {
|
|
1546
|
+
const endTime = Date.now();
|
|
1547
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1548
|
+
sendTrace({
|
|
1549
|
+
config_key: ctx.configKey,
|
|
1550
|
+
session_id: ctx.sessionId,
|
|
1551
|
+
customer_id: ctx.customerId,
|
|
1552
|
+
trace_id: traceId,
|
|
1553
|
+
span_id: spanId,
|
|
1554
|
+
parent_span_id: parentSpanId,
|
|
1555
|
+
name: "generateText",
|
|
1556
|
+
kind: "llm",
|
|
1557
|
+
model: modelId,
|
|
1558
|
+
start_time: new Date(startTime).toISOString(),
|
|
1559
|
+
end_time: new Date(endTime).toISOString(),
|
|
1560
|
+
duration_ms: endTime - startTime,
|
|
1561
|
+
status: "ERROR",
|
|
1562
|
+
error_message: error?.message,
|
|
1563
|
+
prompt_key: promptCtx?.promptKey,
|
|
1564
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1565
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1566
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1567
|
+
}).catch(() => {
|
|
1568
|
+
});
|
|
1569
|
+
throw error;
|
|
1570
|
+
}
|
|
1571
|
+
};
|
|
1572
|
+
}
|
|
1573
|
+
function createStreamTextWrapper(aiModule) {
|
|
1574
|
+
return async (...args) => {
|
|
1575
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1576
|
+
const params = args[0] || {};
|
|
1577
|
+
const startTime = Date.now();
|
|
1578
|
+
const result = await aiModule.streamText(...args);
|
|
1579
|
+
if (!ctx || !initialized2) {
|
|
1580
|
+
return result;
|
|
1581
|
+
}
|
|
1582
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1583
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1584
|
+
const spanId = generateHexId(16);
|
|
1585
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1586
|
+
let firstTokenTime = null;
|
|
1587
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1588
|
+
let promptCtx = null;
|
|
1589
|
+
try {
|
|
1590
|
+
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1591
|
+
promptCtx = getPromptContext2();
|
|
1592
|
+
} catch {
|
|
1593
|
+
}
|
|
1594
|
+
if (result?.usage) {
|
|
1595
|
+
result.usage.then((usage) => {
|
|
1596
|
+
const endTime = Date.now();
|
|
1597
|
+
log2("\u{1F4CA} streamText usage:", JSON.stringify(usage, null, 2));
|
|
1598
|
+
const attributes = {};
|
|
1599
|
+
if (captureContent) {
|
|
1600
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1601
|
+
if (params?.prompt) {
|
|
1602
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1603
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1604
|
+
}
|
|
1605
|
+
}
|
|
1606
|
+
if (firstTokenTime) {
|
|
1607
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1608
|
+
}
|
|
1609
|
+
const tracePayload = {
|
|
1610
|
+
config_key: ctx.configKey,
|
|
1611
|
+
session_id: ctx.sessionId,
|
|
1612
|
+
customer_id: ctx.customerId,
|
|
1613
|
+
trace_id: traceId,
|
|
1614
|
+
span_id: spanId,
|
|
1615
|
+
parent_span_id: parentSpanId,
|
|
1616
|
+
name: "streamText",
|
|
1617
|
+
kind: "llm",
|
|
1618
|
+
model: modelId,
|
|
1619
|
+
start_time: new Date(startTime).toISOString(),
|
|
1620
|
+
end_time: new Date(endTime).toISOString(),
|
|
1621
|
+
duration_ms: endTime - startTime,
|
|
1622
|
+
status: "OK",
|
|
1623
|
+
prompt_tokens: usage?.promptTokens,
|
|
1624
|
+
completion_tokens: usage?.completionTokens,
|
|
1625
|
+
total_tokens: usage?.totalTokens,
|
|
1626
|
+
time_to_first_token_ms: firstTokenTime ? firstTokenTime - startTime : void 0,
|
|
1627
|
+
attributes: captureContent ? attributes : void 0,
|
|
1628
|
+
prompt_key: promptCtx?.promptKey,
|
|
1629
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1630
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1631
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1632
|
+
};
|
|
1633
|
+
sendTrace(tracePayload).catch(() => {
|
|
1634
|
+
});
|
|
1635
|
+
}).catch((error) => {
|
|
1636
|
+
const endTime = Date.now();
|
|
1637
|
+
log2("\u274C streamText error:", error?.message);
|
|
1638
|
+
sendTrace({
|
|
1639
|
+
config_key: ctx.configKey,
|
|
1640
|
+
session_id: ctx.sessionId,
|
|
1641
|
+
customer_id: ctx.customerId,
|
|
1642
|
+
trace_id: traceId,
|
|
1643
|
+
span_id: spanId,
|
|
1644
|
+
parent_span_id: parentSpanId,
|
|
1645
|
+
name: "streamText",
|
|
1646
|
+
kind: "llm",
|
|
1647
|
+
model: modelId,
|
|
1648
|
+
start_time: new Date(startTime).toISOString(),
|
|
1649
|
+
end_time: new Date(endTime).toISOString(),
|
|
1650
|
+
duration_ms: endTime - startTime,
|
|
1651
|
+
status: "ERROR",
|
|
1652
|
+
error_message: error?.message,
|
|
1653
|
+
prompt_key: promptCtx?.promptKey,
|
|
1654
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1655
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1656
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1657
|
+
}).catch(() => {
|
|
1658
|
+
});
|
|
1659
|
+
});
|
|
1660
|
+
}
|
|
1661
|
+
if (result?.textStream) {
|
|
1662
|
+
const originalTextStream = result.textStream;
|
|
1663
|
+
const wrappedTextStream = (async function* () {
|
|
1664
|
+
for await (const chunk of originalTextStream) {
|
|
1665
|
+
if (!firstTokenTime) {
|
|
1666
|
+
firstTokenTime = Date.now();
|
|
1667
|
+
log2("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1668
|
+
}
|
|
1669
|
+
yield chunk;
|
|
1670
|
+
}
|
|
1671
|
+
})();
|
|
1672
|
+
return new Proxy(result, {
|
|
1673
|
+
get(target, prop) {
|
|
1674
|
+
if (prop === "textStream") {
|
|
1675
|
+
return wrappedTextStream;
|
|
1676
|
+
}
|
|
1677
|
+
return target[prop];
|
|
1678
|
+
}
|
|
1679
|
+
});
|
|
1680
|
+
}
|
|
1681
|
+
return result;
|
|
1682
|
+
};
|
|
1683
|
+
}
|
|
1684
|
+
function createGenerateObjectWrapper(aiModule) {
|
|
1685
|
+
return async (...args) => {
|
|
1686
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1687
|
+
if (!ctx || !initialized2) {
|
|
1688
|
+
return aiModule.generateObject(...args);
|
|
1689
|
+
}
|
|
1690
|
+
let promptCtx = null;
|
|
1691
|
+
try {
|
|
1692
|
+
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1693
|
+
promptCtx = getPromptContext2();
|
|
1694
|
+
} catch {
|
|
1695
|
+
}
|
|
1696
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1697
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1698
|
+
const spanId = generateHexId(16);
|
|
1699
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1700
|
+
const params = args[0] || {};
|
|
1701
|
+
const startTime = Date.now();
|
|
1702
|
+
try {
|
|
1703
|
+
const result = await aiModule.generateObject(...args);
|
|
1704
|
+
const endTime = Date.now();
|
|
1705
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1706
|
+
const attributes = {};
|
|
1707
|
+
if (captureContent) {
|
|
1708
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1709
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1710
|
+
if (result?.object) {
|
|
1711
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1712
|
+
attributes["gen_ai.completion.0.content"] = JSON.stringify(
|
|
1713
|
+
result.object
|
|
1714
|
+
);
|
|
1715
|
+
}
|
|
1716
|
+
}
|
|
1717
|
+
sendTrace({
|
|
1718
|
+
config_key: ctx.configKey,
|
|
1719
|
+
session_id: ctx.sessionId,
|
|
1720
|
+
customer_id: ctx.customerId,
|
|
1721
|
+
trace_id: traceId,
|
|
1722
|
+
span_id: spanId,
|
|
1723
|
+
parent_span_id: parentSpanId,
|
|
1724
|
+
name: "generateObject",
|
|
1725
|
+
kind: "llm",
|
|
1726
|
+
model: modelId,
|
|
1727
|
+
start_time: new Date(startTime).toISOString(),
|
|
1728
|
+
end_time: new Date(endTime).toISOString(),
|
|
1729
|
+
duration_ms: endTime - startTime,
|
|
1730
|
+
status: "OK",
|
|
1731
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1732
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1733
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1734
|
+
attributes: captureContent ? attributes : void 0,
|
|
1735
|
+
prompt_key: promptCtx?.promptKey,
|
|
1736
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1737
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1738
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1739
|
+
}).catch(() => {
|
|
1740
|
+
});
|
|
1741
|
+
return result;
|
|
1742
|
+
} catch (error) {
|
|
1743
|
+
const endTime = Date.now();
|
|
1744
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1745
|
+
sendTrace({
|
|
1746
|
+
config_key: ctx.configKey,
|
|
1747
|
+
session_id: ctx.sessionId,
|
|
1748
|
+
customer_id: ctx.customerId,
|
|
1749
|
+
trace_id: traceId,
|
|
1750
|
+
span_id: spanId,
|
|
1751
|
+
parent_span_id: parentSpanId,
|
|
1752
|
+
name: "generateObject",
|
|
1753
|
+
kind: "llm",
|
|
1754
|
+
model: modelId,
|
|
1755
|
+
start_time: new Date(startTime).toISOString(),
|
|
1756
|
+
end_time: new Date(endTime).toISOString(),
|
|
1757
|
+
duration_ms: endTime - startTime,
|
|
1758
|
+
status: "ERROR",
|
|
1759
|
+
error_message: error?.message,
|
|
1760
|
+
prompt_key: promptCtx?.promptKey,
|
|
1761
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1762
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1763
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1764
|
+
}).catch(() => {
|
|
1765
|
+
});
|
|
1766
|
+
throw error;
|
|
1767
|
+
}
|
|
1768
|
+
};
|
|
1769
|
+
}
|
|
1770
|
+
function createStreamObjectWrapper(aiModule) {
|
|
1771
|
+
return async (...args) => {
|
|
1772
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1773
|
+
const params = args[0] || {};
|
|
1774
|
+
const startTime = Date.now();
|
|
1775
|
+
const result = await aiModule.streamObject(...args);
|
|
1776
|
+
log2("\u{1F50D} streamObject result keys:", Object.keys(result || {}));
|
|
1777
|
+
if (!ctx || !initialized2) {
|
|
1778
|
+
return result;
|
|
1779
|
+
}
|
|
1780
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1781
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1782
|
+
const spanId = generateHexId(16);
|
|
1783
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1784
|
+
let firstTokenTime = null;
|
|
1785
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1786
|
+
let promptCtx = null;
|
|
1787
|
+
try {
|
|
1788
|
+
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1789
|
+
promptCtx = getPromptContext2();
|
|
1790
|
+
} catch {
|
|
1791
|
+
}
|
|
1792
|
+
if (result?.usage) {
|
|
1793
|
+
result.usage.then((usage) => {
|
|
1794
|
+
const endTime = Date.now();
|
|
1795
|
+
log2("\u{1F4CA} streamObject usage:", JSON.stringify(usage, null, 2));
|
|
1796
|
+
const attributes = {};
|
|
1797
|
+
if (captureContent) {
|
|
1798
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1799
|
+
}
|
|
1800
|
+
if (firstTokenTime) {
|
|
1801
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1802
|
+
}
|
|
1803
|
+
sendTrace({
|
|
1804
|
+
config_key: ctx.configKey,
|
|
1805
|
+
session_id: ctx.sessionId,
|
|
1806
|
+
customer_id: ctx.customerId,
|
|
1807
|
+
trace_id: traceId,
|
|
1808
|
+
span_id: spanId,
|
|
1809
|
+
parent_span_id: parentSpanId,
|
|
1810
|
+
name: "streamObject",
|
|
1811
|
+
kind: "llm",
|
|
1812
|
+
model: modelId,
|
|
1813
|
+
start_time: new Date(startTime).toISOString(),
|
|
1814
|
+
end_time: new Date(endTime).toISOString(),
|
|
1815
|
+
duration_ms: endTime - startTime,
|
|
1816
|
+
status: "OK",
|
|
1817
|
+
prompt_tokens: usage?.promptTokens,
|
|
1818
|
+
completion_tokens: usage?.completionTokens,
|
|
1819
|
+
total_tokens: usage?.totalTokens,
|
|
1820
|
+
attributes: captureContent ? attributes : void 0,
|
|
1821
|
+
prompt_key: promptCtx?.promptKey,
|
|
1822
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1823
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1824
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1825
|
+
}).catch(() => {
|
|
1826
|
+
});
|
|
1827
|
+
}).catch((error) => {
|
|
1828
|
+
const endTime = Date.now();
|
|
1829
|
+
sendTrace({
|
|
1830
|
+
config_key: ctx.configKey,
|
|
1831
|
+
session_id: ctx.sessionId,
|
|
1832
|
+
customer_id: ctx.customerId,
|
|
1833
|
+
trace_id: traceId,
|
|
1834
|
+
span_id: spanId,
|
|
1835
|
+
parent_span_id: parentSpanId,
|
|
1836
|
+
name: "streamObject",
|
|
1837
|
+
kind: "llm",
|
|
1838
|
+
model: modelId,
|
|
1839
|
+
start_time: new Date(startTime).toISOString(),
|
|
1840
|
+
end_time: new Date(endTime).toISOString(),
|
|
1841
|
+
duration_ms: endTime - startTime,
|
|
1842
|
+
status: "ERROR",
|
|
1843
|
+
error_message: error?.message,
|
|
1844
|
+
prompt_key: promptCtx?.promptKey,
|
|
1845
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1846
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1847
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1848
|
+
}).catch(() => {
|
|
1849
|
+
});
|
|
1850
|
+
});
|
|
1851
|
+
}
|
|
1852
|
+
if (result?.partialObjectStream) {
|
|
1853
|
+
const originalStream = result.partialObjectStream;
|
|
1854
|
+
const wrappedStream = (async function* () {
|
|
1855
|
+
for await (const chunk of originalStream) {
|
|
1856
|
+
if (!firstTokenTime) {
|
|
1857
|
+
firstTokenTime = Date.now();
|
|
1858
|
+
log2("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1859
|
+
}
|
|
1860
|
+
yield chunk;
|
|
1861
|
+
}
|
|
1862
|
+
})();
|
|
1863
|
+
return new Proxy(result, {
|
|
1864
|
+
get(target, prop) {
|
|
1865
|
+
if (prop === "partialObjectStream") {
|
|
1866
|
+
return wrappedStream;
|
|
1867
|
+
}
|
|
1868
|
+
return target[prop];
|
|
1869
|
+
}
|
|
1870
|
+
});
|
|
1871
|
+
}
|
|
1872
|
+
return result;
|
|
1873
|
+
};
|
|
1874
|
+
}
|
|
1875
|
+
function wrapMastraAgent(agent) {
|
|
1876
|
+
const originalGenerate = agent.generate.bind(agent);
|
|
1877
|
+
const agentName = agent.name || "MastraAgent";
|
|
1878
|
+
agent.generate = async function(...args) {
|
|
1879
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1880
|
+
if (!ctx || !initialized2) {
|
|
1881
|
+
return originalGenerate(...args);
|
|
1882
|
+
}
|
|
1883
|
+
let promptCtx = null;
|
|
1884
|
+
try {
|
|
1885
|
+
const { getPromptContext: getPromptContext2 } = await Promise.resolve().then(() => (init_prompts(), prompts_exports));
|
|
1886
|
+
promptCtx = getPromptContext2();
|
|
1887
|
+
} catch {
|
|
1888
|
+
}
|
|
1889
|
+
const traceId = generateHexId(32);
|
|
1890
|
+
const spanId = generateHexId(16);
|
|
1891
|
+
const startTime = Date.now();
|
|
1892
|
+
const messages = args[0] || [];
|
|
1893
|
+
try {
|
|
1894
|
+
const result = await originalGenerate(...args);
|
|
1895
|
+
const endTime = Date.now();
|
|
1896
|
+
const model = result?.model?.modelId || "unknown";
|
|
1897
|
+
const toolCalls = [];
|
|
1898
|
+
if (result?.steps?.length) {
|
|
1899
|
+
for (const step of result.steps) {
|
|
1900
|
+
if (step.toolCalls?.length) {
|
|
1901
|
+
for (let i = 0; i < step.toolCalls.length; i++) {
|
|
1902
|
+
const tc = step.toolCalls[i];
|
|
1903
|
+
const tr = step.toolResults?.[i];
|
|
1904
|
+
toolCalls.push({
|
|
1905
|
+
name: tc.toolName,
|
|
1906
|
+
arguments: tc.args,
|
|
1907
|
+
result: tr?.result
|
|
1908
|
+
});
|
|
1909
|
+
}
|
|
1910
|
+
}
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
const attributes = {
|
|
1914
|
+
"gen_ai.system": "Mastra",
|
|
1915
|
+
"gen_ai.request.model": model,
|
|
1916
|
+
"gen_ai.response.model": model,
|
|
1917
|
+
"fallom.source": "mastra-agent",
|
|
1918
|
+
"llm.request.type": "chat"
|
|
1919
|
+
};
|
|
1920
|
+
if (Array.isArray(messages)) {
|
|
1921
|
+
messages.forEach((msg, i) => {
|
|
1922
|
+
attributes[`gen_ai.prompt.${i}.role`] = msg.role || "user";
|
|
1923
|
+
attributes[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1924
|
+
});
|
|
1925
|
+
}
|
|
1926
|
+
if (result?.text) {
|
|
1927
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1928
|
+
attributes["gen_ai.completion.0.content"] = result.text;
|
|
1929
|
+
attributes["gen_ai.completion.0.finish_reason"] = "stop";
|
|
1930
|
+
}
|
|
1931
|
+
if (toolCalls.length > 0) {
|
|
1932
|
+
attributes["fallom.tool_calls"] = JSON.stringify(toolCalls);
|
|
1933
|
+
toolCalls.forEach((tc, i) => {
|
|
1934
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.name`] = tc.name;
|
|
1935
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.type`] = "function";
|
|
1936
|
+
attributes[`gen_ai.completion.0.tool_calls.${i}.arguments`] = JSON.stringify(tc.arguments);
|
|
1937
|
+
});
|
|
1938
|
+
}
|
|
1939
|
+
if (result?.usage) {
|
|
1940
|
+
attributes["gen_ai.usage.prompt_tokens"] = result.usage.promptTokens;
|
|
1941
|
+
attributes["gen_ai.usage.completion_tokens"] = result.usage.completionTokens;
|
|
1942
|
+
attributes["llm.usage.total_tokens"] = result.usage.totalTokens;
|
|
1943
|
+
}
|
|
1944
|
+
const traceData = {
|
|
1945
|
+
config_key: ctx.configKey,
|
|
1946
|
+
session_id: ctx.sessionId,
|
|
1947
|
+
customer_id: ctx.customerId,
|
|
1948
|
+
trace_id: traceId,
|
|
1949
|
+
span_id: spanId,
|
|
1950
|
+
name: `mastra.${agentName}.generate`,
|
|
1951
|
+
kind: "client",
|
|
1952
|
+
model,
|
|
1953
|
+
start_time: new Date(startTime).toISOString(),
|
|
1954
|
+
end_time: new Date(endTime).toISOString(),
|
|
1955
|
+
duration_ms: endTime - startTime,
|
|
1956
|
+
status: "OK",
|
|
1957
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1958
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1959
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1960
|
+
attributes,
|
|
1961
|
+
prompt_key: promptCtx?.promptKey,
|
|
1962
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1963
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1964
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1965
|
+
};
|
|
1966
|
+
sendTrace(traceData).catch(() => {
|
|
1967
|
+
});
|
|
1968
|
+
return result;
|
|
1969
|
+
} catch (error) {
|
|
1970
|
+
const endTime = Date.now();
|
|
1971
|
+
const traceData = {
|
|
1972
|
+
config_key: ctx.configKey,
|
|
1973
|
+
session_id: ctx.sessionId,
|
|
1974
|
+
customer_id: ctx.customerId,
|
|
1975
|
+
trace_id: traceId,
|
|
1976
|
+
span_id: spanId,
|
|
1977
|
+
name: `mastra.${agentName}.generate`,
|
|
1978
|
+
kind: "client",
|
|
1979
|
+
start_time: new Date(startTime).toISOString(),
|
|
1980
|
+
end_time: new Date(endTime).toISOString(),
|
|
1981
|
+
duration_ms: endTime - startTime,
|
|
1982
|
+
status: "ERROR",
|
|
1983
|
+
error_message: error instanceof Error ? error.message : String(error),
|
|
1984
|
+
prompt_key: promptCtx?.promptKey,
|
|
1985
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1986
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1987
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1988
|
+
};
|
|
1989
|
+
sendTrace(traceData).catch(() => {
|
|
1990
|
+
});
|
|
1991
|
+
throw error;
|
|
1992
|
+
}
|
|
1993
|
+
};
|
|
1994
|
+
return agent;
|
|
1995
|
+
}
|
|
1471
1996
|
|
|
1472
1997
|
// src/models.ts
|
|
1473
1998
|
var models_exports = {};
|
|
@@ -1477,7 +2002,7 @@ __export(models_exports, {
|
|
|
1477
2002
|
});
|
|
1478
2003
|
var import_crypto2 = require("crypto");
|
|
1479
2004
|
var apiKey3 = null;
|
|
1480
|
-
var baseUrl3 = "https://
|
|
2005
|
+
var baseUrl3 = "https://configs.fallom.com";
|
|
1481
2006
|
var initialized3 = false;
|
|
1482
2007
|
var syncInterval2 = null;
|
|
1483
2008
|
var debugMode3 = false;
|
|
@@ -1491,7 +2016,7 @@ function log3(msg) {
|
|
|
1491
2016
|
}
|
|
1492
2017
|
function init3(options = {}) {
|
|
1493
2018
|
apiKey3 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
1494
|
-
baseUrl3 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
2019
|
+
baseUrl3 = options.baseUrl || process.env.FALLOM_CONFIGS_URL || process.env.FALLOM_BASE_URL || "https://configs.fallom.com";
|
|
1495
2020
|
initialized3 = true;
|
|
1496
2021
|
if (!apiKey3) {
|
|
1497
2022
|
return;
|
|
@@ -1580,20 +2105,28 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1580
2105
|
const { version, fallback, debug = false } = options;
|
|
1581
2106
|
debugMode3 = debug;
|
|
1582
2107
|
ensureInit2();
|
|
1583
|
-
log3(
|
|
2108
|
+
log3(
|
|
2109
|
+
`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`
|
|
2110
|
+
);
|
|
1584
2111
|
try {
|
|
1585
2112
|
let configData = configCache.get(configKey);
|
|
1586
|
-
log3(
|
|
2113
|
+
log3(
|
|
2114
|
+
`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`
|
|
2115
|
+
);
|
|
1587
2116
|
if (!configData) {
|
|
1588
2117
|
log3("Not in cache, fetching...");
|
|
1589
2118
|
await fetchConfigs(SYNC_TIMEOUT2);
|
|
1590
2119
|
configData = configCache.get(configKey);
|
|
1591
|
-
log3(
|
|
2120
|
+
log3(
|
|
2121
|
+
`After fetch, cache lookup: ${configData ? "found" : "still not found"}`
|
|
2122
|
+
);
|
|
1592
2123
|
}
|
|
1593
2124
|
if (!configData) {
|
|
1594
2125
|
log3(`Config not found, using fallback: ${fallback}`);
|
|
1595
2126
|
if (fallback) {
|
|
1596
|
-
console.warn(
|
|
2127
|
+
console.warn(
|
|
2128
|
+
`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`
|
|
2129
|
+
);
|
|
1597
2130
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1598
2131
|
}
|
|
1599
2132
|
throw new Error(
|
|
@@ -1609,7 +2142,9 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1609
2142
|
}
|
|
1610
2143
|
if (!config) {
|
|
1611
2144
|
if (fallback) {
|
|
1612
|
-
console.warn(
|
|
2145
|
+
console.warn(
|
|
2146
|
+
`[Fallom WARNING] Config '${configKey}' version ${version} not found, using fallback: ${fallback}`
|
|
2147
|
+
);
|
|
1613
2148
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1614
2149
|
}
|
|
1615
2150
|
throw new Error(`Config '${configKey}' version ${version} not found.`);
|
|
@@ -1620,7 +2155,9 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1620
2155
|
config = configData.versions.get(targetVersion);
|
|
1621
2156
|
if (!config) {
|
|
1622
2157
|
if (fallback) {
|
|
1623
|
-
console.warn(
|
|
2158
|
+
console.warn(
|
|
2159
|
+
`[Fallom WARNING] Config '${configKey}' has no cached version, using fallback: ${fallback}`
|
|
2160
|
+
);
|
|
1624
2161
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1625
2162
|
}
|
|
1626
2163
|
throw new Error(`Config '${configKey}' has no cached version.`);
|
|
@@ -1629,7 +2166,11 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1629
2166
|
const variantsRaw = config.variants;
|
|
1630
2167
|
const configVersion = config.version || targetVersion;
|
|
1631
2168
|
const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
|
|
1632
|
-
log3(
|
|
2169
|
+
log3(
|
|
2170
|
+
`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(
|
|
2171
|
+
variants
|
|
2172
|
+
)}`
|
|
2173
|
+
);
|
|
1633
2174
|
const hashBytes = (0, import_crypto2.createHash)("md5").update(sessionId).digest();
|
|
1634
2175
|
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
1635
2176
|
log3(`Session hash: ${hashVal} (out of 1,000,000)`);
|
|
@@ -1638,7 +2179,9 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1638
2179
|
for (const v of variants) {
|
|
1639
2180
|
const oldCumulative = cumulative;
|
|
1640
2181
|
cumulative += v.weight * 1e4;
|
|
1641
|
-
log3(
|
|
2182
|
+
log3(
|
|
2183
|
+
`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`
|
|
2184
|
+
);
|
|
1642
2185
|
if (hashVal < cumulative) {
|
|
1643
2186
|
assignedModel = v.model;
|
|
1644
2187
|
break;
|
|
@@ -1651,7 +2194,9 @@ async function get2(configKey, sessionId, options = {}) {
|
|
|
1651
2194
|
throw e;
|
|
1652
2195
|
}
|
|
1653
2196
|
if (fallback) {
|
|
1654
|
-
console.warn(
|
|
2197
|
+
console.warn(
|
|
2198
|
+
`[Fallom WARNING] Error getting model for '${configKey}': ${e}. Using fallback: ${fallback}`
|
|
2199
|
+
);
|
|
1655
2200
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1656
2201
|
}
|
|
1657
2202
|
throw e;
|
|
@@ -1698,23 +2243,260 @@ init_prompts();
|
|
|
1698
2243
|
// src/init.ts
|
|
1699
2244
|
init_prompts();
|
|
1700
2245
|
async function init4(options = {}) {
|
|
1701
|
-
const
|
|
2246
|
+
const tracesUrl = options.tracesUrl || process.env.FALLOM_TRACES_URL || "https://traces.fallom.com";
|
|
2247
|
+
const configsUrl = options.configsUrl || process.env.FALLOM_CONFIGS_URL || "https://configs.fallom.com";
|
|
2248
|
+
const promptsUrl = options.promptsUrl || process.env.FALLOM_PROMPTS_URL || "https://prompts.fallom.com";
|
|
1702
2249
|
await init2({
|
|
1703
2250
|
apiKey: options.apiKey,
|
|
1704
|
-
baseUrl:
|
|
2251
|
+
baseUrl: tracesUrl,
|
|
1705
2252
|
captureContent: options.captureContent,
|
|
1706
2253
|
debug: options.debug
|
|
1707
2254
|
});
|
|
1708
2255
|
init3({
|
|
1709
2256
|
apiKey: options.apiKey,
|
|
1710
|
-
baseUrl:
|
|
2257
|
+
baseUrl: configsUrl
|
|
1711
2258
|
});
|
|
1712
2259
|
init({
|
|
1713
2260
|
apiKey: options.apiKey,
|
|
1714
|
-
baseUrl:
|
|
2261
|
+
baseUrl: promptsUrl
|
|
1715
2262
|
});
|
|
1716
2263
|
}
|
|
1717
2264
|
|
|
2265
|
+
// src/mastra.ts
|
|
2266
|
+
var import_core2 = require("@opentelemetry/core");
|
|
2267
|
+
var promptContext2 = {};
|
|
2268
|
+
function setMastraPrompt(promptKey, version) {
|
|
2269
|
+
promptContext2 = {
|
|
2270
|
+
promptKey,
|
|
2271
|
+
promptVersion: version,
|
|
2272
|
+
promptAbTestKey: void 0,
|
|
2273
|
+
promptVariantIndex: void 0
|
|
2274
|
+
};
|
|
2275
|
+
}
|
|
2276
|
+
function setMastraPromptAB(abTestKey, variantIndex) {
|
|
2277
|
+
promptContext2 = {
|
|
2278
|
+
promptKey: void 0,
|
|
2279
|
+
promptVersion: void 0,
|
|
2280
|
+
promptAbTestKey: abTestKey,
|
|
2281
|
+
promptVariantIndex: variantIndex
|
|
2282
|
+
};
|
|
2283
|
+
}
|
|
2284
|
+
function clearMastraPrompt() {
|
|
2285
|
+
promptContext2 = {};
|
|
2286
|
+
}
|
|
2287
|
+
var FallomExporter = class {
|
|
2288
|
+
constructor(options = {}) {
|
|
2289
|
+
this.pendingExports = [];
|
|
2290
|
+
this.apiKey = options.apiKey ?? process.env.FALLOM_API_KEY ?? "";
|
|
2291
|
+
this.baseUrl = options.baseUrl ?? "https://traces.fallom.com";
|
|
2292
|
+
this.debug = options.debug ?? false;
|
|
2293
|
+
console.log("[FallomExporter] Constructor called, debug:", this.debug);
|
|
2294
|
+
console.log("[FallomExporter] API key present:", !!this.apiKey);
|
|
2295
|
+
console.log("[FallomExporter] Base URL:", this.baseUrl);
|
|
2296
|
+
if (!this.apiKey) {
|
|
2297
|
+
console.warn(
|
|
2298
|
+
"[FallomExporter] No API key provided. Set FALLOM_API_KEY env var or pass apiKey option."
|
|
2299
|
+
);
|
|
2300
|
+
}
|
|
2301
|
+
}
|
|
2302
|
+
log(...args) {
|
|
2303
|
+
if (this.debug) {
|
|
2304
|
+
console.log("[FallomExporter]", ...args);
|
|
2305
|
+
}
|
|
2306
|
+
}
|
|
2307
|
+
/**
|
|
2308
|
+
* Export spans to Fallom.
|
|
2309
|
+
*/
|
|
2310
|
+
export(spans, resultCallback) {
|
|
2311
|
+
if (spans.length === 0) {
|
|
2312
|
+
resultCallback({ code: import_core2.ExportResultCode.SUCCESS });
|
|
2313
|
+
return;
|
|
2314
|
+
}
|
|
2315
|
+
this.log(`Exporting ${spans.length} spans...`);
|
|
2316
|
+
if (this.debug) {
|
|
2317
|
+
for (const span2 of spans) {
|
|
2318
|
+
this.log(` - ${span2.name}`, {
|
|
2319
|
+
attributes: Object.fromEntries(
|
|
2320
|
+
Object.entries(span2.attributes).filter(
|
|
2321
|
+
([k]) => k.startsWith("gen_ai") || k.startsWith("llm")
|
|
2322
|
+
)
|
|
2323
|
+
)
|
|
2324
|
+
});
|
|
2325
|
+
}
|
|
2326
|
+
}
|
|
2327
|
+
const exportPromise = this.sendSpans(spans).then(() => {
|
|
2328
|
+
this.log("Export successful");
|
|
2329
|
+
resultCallback({ code: import_core2.ExportResultCode.SUCCESS });
|
|
2330
|
+
}).catch((error) => {
|
|
2331
|
+
console.error("[FallomExporter] Export failed:", error);
|
|
2332
|
+
resultCallback({
|
|
2333
|
+
code: import_core2.ExportResultCode.FAILED,
|
|
2334
|
+
error: error instanceof Error ? error : new Error(String(error))
|
|
2335
|
+
});
|
|
2336
|
+
});
|
|
2337
|
+
this.pendingExports.push(exportPromise);
|
|
2338
|
+
}
|
|
2339
|
+
/**
|
|
2340
|
+
* Shutdown the exporter, waiting for pending exports.
|
|
2341
|
+
*/
|
|
2342
|
+
async shutdown() {
|
|
2343
|
+
await Promise.all(this.pendingExports);
|
|
2344
|
+
this.pendingExports = [];
|
|
2345
|
+
}
|
|
2346
|
+
/**
|
|
2347
|
+
* Force flush pending exports.
|
|
2348
|
+
*/
|
|
2349
|
+
async forceFlush() {
|
|
2350
|
+
await Promise.all(this.pendingExports);
|
|
2351
|
+
}
|
|
2352
|
+
/**
|
|
2353
|
+
* Send spans to Fallom's OTLP endpoint.
|
|
2354
|
+
*/
|
|
2355
|
+
async sendSpans(spans) {
|
|
2356
|
+
const session = getSession();
|
|
2357
|
+
const resourceSpans = this.spansToOtlpJson(spans);
|
|
2358
|
+
const headers = {
|
|
2359
|
+
"Content-Type": "application/json",
|
|
2360
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
2361
|
+
};
|
|
2362
|
+
if (session?.configKey) {
|
|
2363
|
+
headers["X-Fallom-Config-Key"] = session.configKey;
|
|
2364
|
+
}
|
|
2365
|
+
if (session?.sessionId) {
|
|
2366
|
+
headers["X-Fallom-Session-Id"] = session.sessionId;
|
|
2367
|
+
}
|
|
2368
|
+
if (session?.customerId) {
|
|
2369
|
+
headers["X-Fallom-Customer-Id"] = session.customerId;
|
|
2370
|
+
}
|
|
2371
|
+
if (promptContext2.promptKey) {
|
|
2372
|
+
headers["X-Fallom-Prompt-Key"] = promptContext2.promptKey;
|
|
2373
|
+
}
|
|
2374
|
+
if (promptContext2.promptVersion !== void 0) {
|
|
2375
|
+
headers["X-Fallom-Prompt-Version"] = String(promptContext2.promptVersion);
|
|
2376
|
+
}
|
|
2377
|
+
if (promptContext2.promptAbTestKey) {
|
|
2378
|
+
headers["X-Fallom-Prompt-AB-Test"] = promptContext2.promptAbTestKey;
|
|
2379
|
+
}
|
|
2380
|
+
if (promptContext2.promptVariantIndex !== void 0) {
|
|
2381
|
+
headers["X-Fallom-Prompt-Variant"] = String(
|
|
2382
|
+
promptContext2.promptVariantIndex
|
|
2383
|
+
);
|
|
2384
|
+
}
|
|
2385
|
+
const endpoint = `${this.baseUrl}/v1/traces`;
|
|
2386
|
+
this.log("Sending to", endpoint);
|
|
2387
|
+
this.log("Headers:", {
|
|
2388
|
+
...headers,
|
|
2389
|
+
Authorization: "Bearer ***"
|
|
2390
|
+
});
|
|
2391
|
+
const response = await fetch(endpoint, {
|
|
2392
|
+
method: "POST",
|
|
2393
|
+
headers,
|
|
2394
|
+
body: JSON.stringify({ resourceSpans })
|
|
2395
|
+
});
|
|
2396
|
+
if (!response.ok) {
|
|
2397
|
+
const text = await response.text();
|
|
2398
|
+
throw new Error(`Failed to export: ${response.status} ${text}`);
|
|
2399
|
+
}
|
|
2400
|
+
}
|
|
2401
|
+
/**
|
|
2402
|
+
* Convert OpenTelemetry spans to OTLP JSON format.
|
|
2403
|
+
*/
|
|
2404
|
+
spansToOtlpJson(spans) {
|
|
2405
|
+
const resourceMap = /* @__PURE__ */ new Map();
|
|
2406
|
+
for (const span2 of spans) {
|
|
2407
|
+
const resourceKey = JSON.stringify(span2.resource.attributes);
|
|
2408
|
+
if (!resourceMap.has(resourceKey)) {
|
|
2409
|
+
resourceMap.set(resourceKey, []);
|
|
2410
|
+
}
|
|
2411
|
+
resourceMap.get(resourceKey).push(span2);
|
|
2412
|
+
}
|
|
2413
|
+
const resourceSpans = [];
|
|
2414
|
+
for (const [_resourceKey, resourceSpanList] of resourceMap) {
|
|
2415
|
+
const firstSpan = resourceSpanList[0];
|
|
2416
|
+
resourceSpans.push({
|
|
2417
|
+
resource: {
|
|
2418
|
+
attributes: this.attributesToOtlp(firstSpan.resource.attributes)
|
|
2419
|
+
},
|
|
2420
|
+
scopeSpans: [
|
|
2421
|
+
{
|
|
2422
|
+
scope: {
|
|
2423
|
+
name: firstSpan.instrumentationLibrary.name,
|
|
2424
|
+
version: firstSpan.instrumentationLibrary.version
|
|
2425
|
+
},
|
|
2426
|
+
spans: resourceSpanList.map((span2) => this.spanToOtlp(span2))
|
|
2427
|
+
}
|
|
2428
|
+
]
|
|
2429
|
+
});
|
|
2430
|
+
}
|
|
2431
|
+
return resourceSpans;
|
|
2432
|
+
}
|
|
2433
|
+
/**
|
|
2434
|
+
* Convert a single span to OTLP format.
|
|
2435
|
+
*/
|
|
2436
|
+
spanToOtlp(span2) {
|
|
2437
|
+
return {
|
|
2438
|
+
traceId: span2.spanContext().traceId,
|
|
2439
|
+
spanId: span2.spanContext().spanId,
|
|
2440
|
+
parentSpanId: span2.parentSpanId,
|
|
2441
|
+
name: span2.name,
|
|
2442
|
+
kind: span2.kind,
|
|
2443
|
+
startTimeUnixNano: this.hrTimeToNanos(span2.startTime),
|
|
2444
|
+
endTimeUnixNano: this.hrTimeToNanos(span2.endTime),
|
|
2445
|
+
attributes: this.attributesToOtlp(span2.attributes),
|
|
2446
|
+
status: {
|
|
2447
|
+
code: span2.status.code,
|
|
2448
|
+
message: span2.status.message
|
|
2449
|
+
},
|
|
2450
|
+
events: span2.events.map((event) => ({
|
|
2451
|
+
timeUnixNano: this.hrTimeToNanos(event.time),
|
|
2452
|
+
name: event.name,
|
|
2453
|
+
attributes: this.attributesToOtlp(event.attributes || {})
|
|
2454
|
+
}))
|
|
2455
|
+
};
|
|
2456
|
+
}
|
|
2457
|
+
/**
|
|
2458
|
+
* Convert attributes to OTLP format.
|
|
2459
|
+
*/
|
|
2460
|
+
attributesToOtlp(attrs) {
|
|
2461
|
+
return Object.entries(attrs).map(([key, value]) => ({
|
|
2462
|
+
key,
|
|
2463
|
+
value: this.valueToOtlp(value)
|
|
2464
|
+
}));
|
|
2465
|
+
}
|
|
2466
|
+
/**
|
|
2467
|
+
* Convert a value to OTLP AnyValue format.
|
|
2468
|
+
*/
|
|
2469
|
+
valueToOtlp(value) {
|
|
2470
|
+
if (typeof value === "string") {
|
|
2471
|
+
return { stringValue: value };
|
|
2472
|
+
}
|
|
2473
|
+
if (typeof value === "number") {
|
|
2474
|
+
if (Number.isInteger(value)) {
|
|
2475
|
+
return { intValue: value };
|
|
2476
|
+
}
|
|
2477
|
+
return { doubleValue: value };
|
|
2478
|
+
}
|
|
2479
|
+
if (typeof value === "boolean") {
|
|
2480
|
+
return { boolValue: value };
|
|
2481
|
+
}
|
|
2482
|
+
if (Array.isArray(value)) {
|
|
2483
|
+
return {
|
|
2484
|
+
arrayValue: {
|
|
2485
|
+
values: value.map((v) => this.valueToOtlp(v))
|
|
2486
|
+
}
|
|
2487
|
+
};
|
|
2488
|
+
}
|
|
2489
|
+
return { stringValue: String(value) };
|
|
2490
|
+
}
|
|
2491
|
+
/**
|
|
2492
|
+
* Convert HrTime to nanoseconds string.
|
|
2493
|
+
*/
|
|
2494
|
+
hrTimeToNanos(hrTime) {
|
|
2495
|
+
const [seconds, nanos] = hrTime;
|
|
2496
|
+
return String(BigInt(seconds) * BigInt(1e9) + BigInt(nanos));
|
|
2497
|
+
}
|
|
2498
|
+
};
|
|
2499
|
+
|
|
1718
2500
|
// src/index.ts
|
|
1719
2501
|
init_prompts();
|
|
1720
2502
|
var index_default = {
|
|
@@ -1725,8 +2507,12 @@ var index_default = {
|
|
|
1725
2507
|
};
|
|
1726
2508
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1727
2509
|
0 && (module.exports = {
|
|
2510
|
+
FallomExporter,
|
|
2511
|
+
clearMastraPrompt,
|
|
1728
2512
|
init,
|
|
1729
2513
|
models,
|
|
1730
2514
|
prompts,
|
|
2515
|
+
setMastraPrompt,
|
|
2516
|
+
setMastraPromptAB,
|
|
1731
2517
|
trace
|
|
1732
2518
|
});
|