@fallom/trace 0.1.5 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/chunk-6MSTRIK4.mjs +255 -0
- package/dist/chunk-H2EACSBT.mjs +255 -0
- package/dist/index.d.mts +53 -5
- package/dist/index.d.ts +53 -5
- package/dist/index.js +614 -30
- package/dist/index.mjs +609 -32
- package/dist/prompts-VAN5E3L4.mjs +14 -0
- package/dist/prompts-ZSLS4DHO.mjs +14 -0
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
__export,
|
|
3
3
|
init,
|
|
4
4
|
prompts_exports
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-6MSTRIK4.mjs";
|
|
6
6
|
|
|
7
7
|
// src/trace.ts
|
|
8
8
|
var trace_exports = {};
|
|
@@ -14,6 +14,7 @@ __export(trace_exports, {
|
|
|
14
14
|
setSession: () => setSession,
|
|
15
15
|
shutdown: () => shutdown,
|
|
16
16
|
span: () => span,
|
|
17
|
+
wrapAISDK: () => wrapAISDK,
|
|
17
18
|
wrapAnthropic: () => wrapAnthropic,
|
|
18
19
|
wrapGoogleAI: () => wrapGoogleAI,
|
|
19
20
|
wrapOpenAI: () => wrapOpenAI
|
|
@@ -649,7 +650,7 @@ var Resource = (
|
|
|
649
650
|
var sessionStorage = new AsyncLocalStorage();
|
|
650
651
|
var fallbackSession = null;
|
|
651
652
|
var apiKey = null;
|
|
652
|
-
var baseUrl = "https://
|
|
653
|
+
var baseUrl = "https://traces.fallom.com";
|
|
653
654
|
var initialized = false;
|
|
654
655
|
var captureContent = true;
|
|
655
656
|
var debugMode = false;
|
|
@@ -692,7 +693,7 @@ async function init2(options = {}) {
|
|
|
692
693
|
debugMode = options.debug ?? false;
|
|
693
694
|
log("\u{1F680} Initializing Fallom tracing...");
|
|
694
695
|
apiKey = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
695
|
-
baseUrl = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
696
|
+
baseUrl = options.baseUrl || process.env.FALLOM_TRACES_URL || process.env.FALLOM_BASE_URL || "https://traces.fallom.com";
|
|
696
697
|
const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
|
|
697
698
|
if (envCapture === "false" || envCapture === "0" || envCapture === "no") {
|
|
698
699
|
captureContent = false;
|
|
@@ -852,11 +853,47 @@ async function shutdown() {
|
|
|
852
853
|
initialized = false;
|
|
853
854
|
}
|
|
854
855
|
}
|
|
856
|
+
function messagesToOtelAttributes(messages, completion, model, responseId) {
|
|
857
|
+
const attrs = {};
|
|
858
|
+
if (model) {
|
|
859
|
+
attrs["gen_ai.request.model"] = model;
|
|
860
|
+
attrs["gen_ai.response.model"] = model;
|
|
861
|
+
}
|
|
862
|
+
if (responseId) {
|
|
863
|
+
attrs["gen_ai.response.id"] = responseId;
|
|
864
|
+
}
|
|
865
|
+
if (messages) {
|
|
866
|
+
messages.forEach((msg, i) => {
|
|
867
|
+
attrs[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
868
|
+
attrs[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
869
|
+
});
|
|
870
|
+
}
|
|
871
|
+
if (completion) {
|
|
872
|
+
attrs["gen_ai.completion.0.role"] = completion.role;
|
|
873
|
+
attrs["gen_ai.completion.0.content"] = typeof completion.content === "string" ? completion.content : JSON.stringify(completion.content);
|
|
874
|
+
if (completion.tool_calls) {
|
|
875
|
+
attrs["gen_ai.completion.0.tool_calls"] = JSON.stringify(
|
|
876
|
+
completion.tool_calls
|
|
877
|
+
);
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
return attrs;
|
|
881
|
+
}
|
|
882
|
+
function generateHexId(length) {
|
|
883
|
+
const bytes = new Uint8Array(length / 2);
|
|
884
|
+
crypto.getRandomValues(bytes);
|
|
885
|
+
return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
886
|
+
}
|
|
887
|
+
var traceContextStorage = new AsyncLocalStorage();
|
|
888
|
+
var fallbackTraceContext = null;
|
|
855
889
|
async function sendTrace(trace) {
|
|
890
|
+
const url = `${baseUrl}/v1/traces`;
|
|
891
|
+
log("\u{1F4E4} Sending trace to:", url);
|
|
892
|
+
log(" Session:", trace.session_id, "Config:", trace.config_key);
|
|
856
893
|
try {
|
|
857
894
|
const controller = new AbortController();
|
|
858
895
|
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
859
|
-
await fetch(
|
|
896
|
+
const response = await fetch(url, {
|
|
860
897
|
method: "POST",
|
|
861
898
|
headers: {
|
|
862
899
|
Authorization: `Bearer ${apiKey}`,
|
|
@@ -866,8 +903,14 @@ async function sendTrace(trace) {
|
|
|
866
903
|
signal: controller.signal
|
|
867
904
|
});
|
|
868
905
|
clearTimeout(timeoutId);
|
|
869
|
-
|
|
870
|
-
|
|
906
|
+
if (!response.ok) {
|
|
907
|
+
const text = await response.text();
|
|
908
|
+
log("\u274C Trace send failed:", response.status, text);
|
|
909
|
+
} else {
|
|
910
|
+
log("\u2705 Trace sent:", trace.name, trace.model);
|
|
911
|
+
}
|
|
912
|
+
} catch (err) {
|
|
913
|
+
log("\u274C Trace send error:", err instanceof Error ? err.message : err);
|
|
871
914
|
}
|
|
872
915
|
}
|
|
873
916
|
function wrapOpenAI(client) {
|
|
@@ -881,20 +924,34 @@ function wrapOpenAI(client) {
|
|
|
881
924
|
}
|
|
882
925
|
let promptCtx = null;
|
|
883
926
|
try {
|
|
884
|
-
const { getPromptContext } = await import("./prompts-
|
|
927
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
885
928
|
promptCtx = getPromptContext();
|
|
886
929
|
} catch {
|
|
887
930
|
}
|
|
931
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
932
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
933
|
+
const spanId = generateHexId(16);
|
|
934
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
888
935
|
const params = args[0] || {};
|
|
889
936
|
const startTime = Date.now();
|
|
890
937
|
try {
|
|
891
938
|
const response = await originalCreate(...args);
|
|
892
939
|
const endTime = Date.now();
|
|
940
|
+
const attributes = captureContent ? messagesToOtelAttributes(
|
|
941
|
+
params?.messages,
|
|
942
|
+
response?.choices?.[0]?.message,
|
|
943
|
+
response?.model || params?.model,
|
|
944
|
+
response?.id
|
|
945
|
+
) : void 0;
|
|
893
946
|
sendTrace({
|
|
894
947
|
config_key: ctx.configKey,
|
|
895
948
|
session_id: ctx.sessionId,
|
|
896
949
|
customer_id: ctx.customerId,
|
|
950
|
+
trace_id: traceId,
|
|
951
|
+
span_id: spanId,
|
|
952
|
+
parent_span_id: parentSpanId,
|
|
897
953
|
name: "chat.completions.create",
|
|
954
|
+
kind: "llm",
|
|
898
955
|
model: response?.model || params?.model,
|
|
899
956
|
start_time: new Date(startTime).toISOString(),
|
|
900
957
|
end_time: new Date(endTime).toISOString(),
|
|
@@ -903,8 +960,7 @@ function wrapOpenAI(client) {
|
|
|
903
960
|
prompt_tokens: response?.usage?.prompt_tokens,
|
|
904
961
|
completion_tokens: response?.usage?.completion_tokens,
|
|
905
962
|
total_tokens: response?.usage?.total_tokens,
|
|
906
|
-
|
|
907
|
-
output: captureContent ? response?.choices?.[0]?.message?.content : void 0,
|
|
963
|
+
attributes,
|
|
908
964
|
prompt_key: promptCtx?.promptKey,
|
|
909
965
|
prompt_version: promptCtx?.promptVersion,
|
|
910
966
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -914,17 +970,31 @@ function wrapOpenAI(client) {
|
|
|
914
970
|
return response;
|
|
915
971
|
} catch (error) {
|
|
916
972
|
const endTime = Date.now();
|
|
973
|
+
const attributes = captureContent ? messagesToOtelAttributes(
|
|
974
|
+
params?.messages,
|
|
975
|
+
void 0,
|
|
976
|
+
params?.model,
|
|
977
|
+
void 0
|
|
978
|
+
) : void 0;
|
|
979
|
+
if (attributes) {
|
|
980
|
+
attributes["error.message"] = error?.message;
|
|
981
|
+
}
|
|
917
982
|
sendTrace({
|
|
918
983
|
config_key: ctx.configKey,
|
|
919
984
|
session_id: ctx.sessionId,
|
|
920
985
|
customer_id: ctx.customerId,
|
|
986
|
+
trace_id: traceId,
|
|
987
|
+
span_id: spanId,
|
|
988
|
+
parent_span_id: parentSpanId,
|
|
921
989
|
name: "chat.completions.create",
|
|
990
|
+
kind: "llm",
|
|
922
991
|
model: params?.model,
|
|
923
992
|
start_time: new Date(startTime).toISOString(),
|
|
924
993
|
end_time: new Date(endTime).toISOString(),
|
|
925
994
|
duration_ms: endTime - startTime,
|
|
926
995
|
status: "ERROR",
|
|
927
996
|
error_message: error?.message,
|
|
997
|
+
attributes,
|
|
928
998
|
prompt_key: promptCtx?.promptKey,
|
|
929
999
|
prompt_version: promptCtx?.promptVersion,
|
|
930
1000
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -945,20 +1015,37 @@ function wrapAnthropic(client) {
|
|
|
945
1015
|
}
|
|
946
1016
|
let promptCtx = null;
|
|
947
1017
|
try {
|
|
948
|
-
const { getPromptContext } = await import("./prompts-
|
|
1018
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
949
1019
|
promptCtx = getPromptContext();
|
|
950
1020
|
} catch {
|
|
951
1021
|
}
|
|
1022
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1023
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1024
|
+
const spanId = generateHexId(16);
|
|
1025
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
952
1026
|
const params = args[0] || {};
|
|
953
1027
|
const startTime = Date.now();
|
|
954
1028
|
try {
|
|
955
1029
|
const response = await originalCreate(...args);
|
|
956
1030
|
const endTime = Date.now();
|
|
1031
|
+
const attributes = captureContent ? messagesToOtelAttributes(
|
|
1032
|
+
params?.messages,
|
|
1033
|
+
{ role: "assistant", content: response?.content?.[0]?.text || "" },
|
|
1034
|
+
response?.model || params?.model,
|
|
1035
|
+
response?.id
|
|
1036
|
+
) : void 0;
|
|
1037
|
+
if (attributes && params?.system) {
|
|
1038
|
+
attributes["gen_ai.system_prompt"] = params.system;
|
|
1039
|
+
}
|
|
957
1040
|
sendTrace({
|
|
958
1041
|
config_key: ctx.configKey,
|
|
959
1042
|
session_id: ctx.sessionId,
|
|
960
1043
|
customer_id: ctx.customerId,
|
|
1044
|
+
trace_id: traceId,
|
|
1045
|
+
span_id: spanId,
|
|
1046
|
+
parent_span_id: parentSpanId,
|
|
961
1047
|
name: "messages.create",
|
|
1048
|
+
kind: "llm",
|
|
962
1049
|
model: response?.model || params?.model,
|
|
963
1050
|
start_time: new Date(startTime).toISOString(),
|
|
964
1051
|
end_time: new Date(endTime).toISOString(),
|
|
@@ -967,8 +1054,7 @@ function wrapAnthropic(client) {
|
|
|
967
1054
|
prompt_tokens: response?.usage?.input_tokens,
|
|
968
1055
|
completion_tokens: response?.usage?.output_tokens,
|
|
969
1056
|
total_tokens: (response?.usage?.input_tokens || 0) + (response?.usage?.output_tokens || 0),
|
|
970
|
-
|
|
971
|
-
output: captureContent ? response?.content?.[0]?.text : void 0,
|
|
1057
|
+
attributes,
|
|
972
1058
|
prompt_key: promptCtx?.promptKey,
|
|
973
1059
|
prompt_version: promptCtx?.promptVersion,
|
|
974
1060
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -978,17 +1064,34 @@ function wrapAnthropic(client) {
|
|
|
978
1064
|
return response;
|
|
979
1065
|
} catch (error) {
|
|
980
1066
|
const endTime = Date.now();
|
|
1067
|
+
const attributes = captureContent ? messagesToOtelAttributes(
|
|
1068
|
+
params?.messages,
|
|
1069
|
+
void 0,
|
|
1070
|
+
params?.model,
|
|
1071
|
+
void 0
|
|
1072
|
+
) : void 0;
|
|
1073
|
+
if (attributes) {
|
|
1074
|
+
attributes["error.message"] = error?.message;
|
|
1075
|
+
if (params?.system) {
|
|
1076
|
+
attributes["gen_ai.system_prompt"] = params.system;
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
981
1079
|
sendTrace({
|
|
982
1080
|
config_key: ctx.configKey,
|
|
983
1081
|
session_id: ctx.sessionId,
|
|
984
1082
|
customer_id: ctx.customerId,
|
|
1083
|
+
trace_id: traceId,
|
|
1084
|
+
span_id: spanId,
|
|
1085
|
+
parent_span_id: parentSpanId,
|
|
985
1086
|
name: "messages.create",
|
|
1087
|
+
kind: "llm",
|
|
986
1088
|
model: params?.model,
|
|
987
1089
|
start_time: new Date(startTime).toISOString(),
|
|
988
1090
|
end_time: new Date(endTime).toISOString(),
|
|
989
1091
|
duration_ms: endTime - startTime,
|
|
990
1092
|
status: "ERROR",
|
|
991
1093
|
error_message: error?.message,
|
|
1094
|
+
attributes,
|
|
992
1095
|
prompt_key: promptCtx?.promptKey,
|
|
993
1096
|
prompt_version: promptCtx?.promptVersion,
|
|
994
1097
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -1009,22 +1112,51 @@ function wrapGoogleAI(model) {
|
|
|
1009
1112
|
}
|
|
1010
1113
|
let promptCtx = null;
|
|
1011
1114
|
try {
|
|
1012
|
-
const { getPromptContext } = await import("./prompts-
|
|
1115
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1013
1116
|
promptCtx = getPromptContext();
|
|
1014
1117
|
} catch {
|
|
1015
1118
|
}
|
|
1119
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1120
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1121
|
+
const spanId = generateHexId(16);
|
|
1122
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1016
1123
|
const startTime = Date.now();
|
|
1017
1124
|
try {
|
|
1018
1125
|
const response = await originalGenerate(...args);
|
|
1019
1126
|
const endTime = Date.now();
|
|
1020
1127
|
const result = response?.response;
|
|
1021
1128
|
const usage = result?.usageMetadata;
|
|
1129
|
+
const modelName = model?.model || "gemini";
|
|
1130
|
+
const attributes = {};
|
|
1131
|
+
if (captureContent) {
|
|
1132
|
+
attributes["gen_ai.request.model"] = modelName;
|
|
1133
|
+
attributes["gen_ai.response.model"] = modelName;
|
|
1134
|
+
const input = args[0];
|
|
1135
|
+
if (typeof input === "string") {
|
|
1136
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1137
|
+
attributes["gen_ai.prompt.0.content"] = input;
|
|
1138
|
+
} else if (input?.contents) {
|
|
1139
|
+
input.contents.forEach((content, i) => {
|
|
1140
|
+
attributes[`gen_ai.prompt.${i}.role`] = content.role || "user";
|
|
1141
|
+
attributes[`gen_ai.prompt.${i}.content`] = content.parts?.[0]?.text || JSON.stringify(content.parts);
|
|
1142
|
+
});
|
|
1143
|
+
}
|
|
1144
|
+
const outputText = result?.text?.();
|
|
1145
|
+
if (outputText) {
|
|
1146
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1147
|
+
attributes["gen_ai.completion.0.content"] = outputText;
|
|
1148
|
+
}
|
|
1149
|
+
}
|
|
1022
1150
|
sendTrace({
|
|
1023
1151
|
config_key: ctx.configKey,
|
|
1024
1152
|
session_id: ctx.sessionId,
|
|
1025
1153
|
customer_id: ctx.customerId,
|
|
1154
|
+
trace_id: traceId,
|
|
1155
|
+
span_id: spanId,
|
|
1156
|
+
parent_span_id: parentSpanId,
|
|
1026
1157
|
name: "generateContent",
|
|
1027
|
-
|
|
1158
|
+
kind: "llm",
|
|
1159
|
+
model: modelName,
|
|
1028
1160
|
start_time: new Date(startTime).toISOString(),
|
|
1029
1161
|
end_time: new Date(endTime).toISOString(),
|
|
1030
1162
|
duration_ms: endTime - startTime,
|
|
@@ -1032,8 +1164,7 @@ function wrapGoogleAI(model) {
|
|
|
1032
1164
|
prompt_tokens: usage?.promptTokenCount,
|
|
1033
1165
|
completion_tokens: usage?.candidatesTokenCount,
|
|
1034
1166
|
total_tokens: usage?.totalTokenCount,
|
|
1035
|
-
|
|
1036
|
-
output: captureContent ? result?.text?.() : void 0,
|
|
1167
|
+
attributes: captureContent ? attributes : void 0,
|
|
1037
1168
|
prompt_key: promptCtx?.promptKey,
|
|
1038
1169
|
prompt_version: promptCtx?.promptVersion,
|
|
1039
1170
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -1043,17 +1174,33 @@ function wrapGoogleAI(model) {
|
|
|
1043
1174
|
return response;
|
|
1044
1175
|
} catch (error) {
|
|
1045
1176
|
const endTime = Date.now();
|
|
1177
|
+
const modelName = model?.model || "gemini";
|
|
1178
|
+
const attributes = {};
|
|
1179
|
+
if (captureContent) {
|
|
1180
|
+
attributes["gen_ai.request.model"] = modelName;
|
|
1181
|
+
attributes["error.message"] = error?.message;
|
|
1182
|
+
const input = args[0];
|
|
1183
|
+
if (typeof input === "string") {
|
|
1184
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1185
|
+
attributes["gen_ai.prompt.0.content"] = input;
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1046
1188
|
sendTrace({
|
|
1047
1189
|
config_key: ctx.configKey,
|
|
1048
1190
|
session_id: ctx.sessionId,
|
|
1049
1191
|
customer_id: ctx.customerId,
|
|
1192
|
+
trace_id: traceId,
|
|
1193
|
+
span_id: spanId,
|
|
1194
|
+
parent_span_id: parentSpanId,
|
|
1050
1195
|
name: "generateContent",
|
|
1051
|
-
|
|
1196
|
+
kind: "llm",
|
|
1197
|
+
model: modelName,
|
|
1052
1198
|
start_time: new Date(startTime).toISOString(),
|
|
1053
1199
|
end_time: new Date(endTime).toISOString(),
|
|
1054
1200
|
duration_ms: endTime - startTime,
|
|
1055
1201
|
status: "ERROR",
|
|
1056
1202
|
error_message: error?.message,
|
|
1203
|
+
attributes: captureContent ? attributes : void 0,
|
|
1057
1204
|
prompt_key: promptCtx?.promptKey,
|
|
1058
1205
|
prompt_version: promptCtx?.promptVersion,
|
|
1059
1206
|
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
@@ -1065,6 +1212,414 @@ function wrapGoogleAI(model) {
|
|
|
1065
1212
|
};
|
|
1066
1213
|
return model;
|
|
1067
1214
|
}
|
|
1215
|
+
function wrapAISDK(ai) {
|
|
1216
|
+
const aiModule = ai;
|
|
1217
|
+
return {
|
|
1218
|
+
generateText: createGenerateTextWrapper(aiModule),
|
|
1219
|
+
streamText: createStreamTextWrapper(aiModule),
|
|
1220
|
+
generateObject: aiModule.generateObject ? createGenerateObjectWrapper(aiModule) : void 0,
|
|
1221
|
+
streamObject: aiModule.streamObject ? createStreamObjectWrapper(aiModule) : void 0
|
|
1222
|
+
};
|
|
1223
|
+
}
|
|
1224
|
+
function createGenerateTextWrapper(aiModule) {
|
|
1225
|
+
return async (...args) => {
|
|
1226
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1227
|
+
if (!ctx || !initialized) {
|
|
1228
|
+
return aiModule.generateText(...args);
|
|
1229
|
+
}
|
|
1230
|
+
let promptCtx = null;
|
|
1231
|
+
try {
|
|
1232
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1233
|
+
promptCtx = getPromptContext();
|
|
1234
|
+
} catch {
|
|
1235
|
+
}
|
|
1236
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1237
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1238
|
+
const spanId = generateHexId(16);
|
|
1239
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1240
|
+
const params = args[0] || {};
|
|
1241
|
+
const startTime = Date.now();
|
|
1242
|
+
try {
|
|
1243
|
+
const result = await aiModule.generateText(...args);
|
|
1244
|
+
const endTime = Date.now();
|
|
1245
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1246
|
+
const attributes = {};
|
|
1247
|
+
if (captureContent) {
|
|
1248
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1249
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1250
|
+
if (params?.prompt) {
|
|
1251
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1252
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1253
|
+
}
|
|
1254
|
+
if (params?.messages) {
|
|
1255
|
+
params.messages.forEach((msg, i) => {
|
|
1256
|
+
attributes[`gen_ai.prompt.${i}.role`] = msg.role;
|
|
1257
|
+
attributes[`gen_ai.prompt.${i}.content`] = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
|
|
1258
|
+
});
|
|
1259
|
+
}
|
|
1260
|
+
if (result?.text) {
|
|
1261
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1262
|
+
attributes["gen_ai.completion.0.content"] = result.text;
|
|
1263
|
+
}
|
|
1264
|
+
if (result?.response?.id) {
|
|
1265
|
+
attributes["gen_ai.response.id"] = result.response.id;
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
sendTrace({
|
|
1269
|
+
config_key: ctx.configKey,
|
|
1270
|
+
session_id: ctx.sessionId,
|
|
1271
|
+
customer_id: ctx.customerId,
|
|
1272
|
+
trace_id: traceId,
|
|
1273
|
+
span_id: spanId,
|
|
1274
|
+
parent_span_id: parentSpanId,
|
|
1275
|
+
name: "generateText",
|
|
1276
|
+
kind: "llm",
|
|
1277
|
+
model: modelId,
|
|
1278
|
+
start_time: new Date(startTime).toISOString(),
|
|
1279
|
+
end_time: new Date(endTime).toISOString(),
|
|
1280
|
+
duration_ms: endTime - startTime,
|
|
1281
|
+
status: "OK",
|
|
1282
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1283
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1284
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1285
|
+
attributes: captureContent ? attributes : void 0,
|
|
1286
|
+
prompt_key: promptCtx?.promptKey,
|
|
1287
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1288
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1289
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1290
|
+
}).catch(() => {
|
|
1291
|
+
});
|
|
1292
|
+
return result;
|
|
1293
|
+
} catch (error) {
|
|
1294
|
+
const endTime = Date.now();
|
|
1295
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1296
|
+
sendTrace({
|
|
1297
|
+
config_key: ctx.configKey,
|
|
1298
|
+
session_id: ctx.sessionId,
|
|
1299
|
+
customer_id: ctx.customerId,
|
|
1300
|
+
trace_id: traceId,
|
|
1301
|
+
span_id: spanId,
|
|
1302
|
+
parent_span_id: parentSpanId,
|
|
1303
|
+
name: "generateText",
|
|
1304
|
+
kind: "llm",
|
|
1305
|
+
model: modelId,
|
|
1306
|
+
start_time: new Date(startTime).toISOString(),
|
|
1307
|
+
end_time: new Date(endTime).toISOString(),
|
|
1308
|
+
duration_ms: endTime - startTime,
|
|
1309
|
+
status: "ERROR",
|
|
1310
|
+
error_message: error?.message,
|
|
1311
|
+
prompt_key: promptCtx?.promptKey,
|
|
1312
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1313
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1314
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1315
|
+
}).catch(() => {
|
|
1316
|
+
});
|
|
1317
|
+
throw error;
|
|
1318
|
+
}
|
|
1319
|
+
};
|
|
1320
|
+
}
|
|
1321
|
+
function createStreamTextWrapper(aiModule) {
|
|
1322
|
+
return async (...args) => {
|
|
1323
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1324
|
+
const params = args[0] || {};
|
|
1325
|
+
const startTime = Date.now();
|
|
1326
|
+
const result = await aiModule.streamText(...args);
|
|
1327
|
+
if (!ctx || !initialized) {
|
|
1328
|
+
return result;
|
|
1329
|
+
}
|
|
1330
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1331
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1332
|
+
const spanId = generateHexId(16);
|
|
1333
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1334
|
+
let firstTokenTime = null;
|
|
1335
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1336
|
+
let promptCtx = null;
|
|
1337
|
+
try {
|
|
1338
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1339
|
+
promptCtx = getPromptContext();
|
|
1340
|
+
} catch {
|
|
1341
|
+
}
|
|
1342
|
+
if (result?.usage) {
|
|
1343
|
+
result.usage.then((usage) => {
|
|
1344
|
+
const endTime = Date.now();
|
|
1345
|
+
log("\u{1F4CA} streamText usage:", JSON.stringify(usage, null, 2));
|
|
1346
|
+
const attributes = {};
|
|
1347
|
+
if (captureContent) {
|
|
1348
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1349
|
+
if (params?.prompt) {
|
|
1350
|
+
attributes["gen_ai.prompt.0.role"] = "user";
|
|
1351
|
+
attributes["gen_ai.prompt.0.content"] = params.prompt;
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
if (firstTokenTime) {
|
|
1355
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1356
|
+
}
|
|
1357
|
+
const tracePayload = {
|
|
1358
|
+
config_key: ctx.configKey,
|
|
1359
|
+
session_id: ctx.sessionId,
|
|
1360
|
+
customer_id: ctx.customerId,
|
|
1361
|
+
trace_id: traceId,
|
|
1362
|
+
span_id: spanId,
|
|
1363
|
+
parent_span_id: parentSpanId,
|
|
1364
|
+
name: "streamText",
|
|
1365
|
+
kind: "llm",
|
|
1366
|
+
model: modelId,
|
|
1367
|
+
start_time: new Date(startTime).toISOString(),
|
|
1368
|
+
end_time: new Date(endTime).toISOString(),
|
|
1369
|
+
duration_ms: endTime - startTime,
|
|
1370
|
+
status: "OK",
|
|
1371
|
+
prompt_tokens: usage?.promptTokens,
|
|
1372
|
+
completion_tokens: usage?.completionTokens,
|
|
1373
|
+
total_tokens: usage?.totalTokens,
|
|
1374
|
+
time_to_first_token_ms: firstTokenTime ? firstTokenTime - startTime : void 0,
|
|
1375
|
+
attributes: captureContent ? attributes : void 0,
|
|
1376
|
+
prompt_key: promptCtx?.promptKey,
|
|
1377
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1378
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1379
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1380
|
+
};
|
|
1381
|
+
sendTrace(tracePayload).catch(() => {
|
|
1382
|
+
});
|
|
1383
|
+
}).catch((error) => {
|
|
1384
|
+
const endTime = Date.now();
|
|
1385
|
+
log("\u274C streamText error:", error?.message);
|
|
1386
|
+
sendTrace({
|
|
1387
|
+
config_key: ctx.configKey,
|
|
1388
|
+
session_id: ctx.sessionId,
|
|
1389
|
+
customer_id: ctx.customerId,
|
|
1390
|
+
trace_id: traceId,
|
|
1391
|
+
span_id: spanId,
|
|
1392
|
+
parent_span_id: parentSpanId,
|
|
1393
|
+
name: "streamText",
|
|
1394
|
+
kind: "llm",
|
|
1395
|
+
model: modelId,
|
|
1396
|
+
start_time: new Date(startTime).toISOString(),
|
|
1397
|
+
end_time: new Date(endTime).toISOString(),
|
|
1398
|
+
duration_ms: endTime - startTime,
|
|
1399
|
+
status: "ERROR",
|
|
1400
|
+
error_message: error?.message,
|
|
1401
|
+
prompt_key: promptCtx?.promptKey,
|
|
1402
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1403
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1404
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1405
|
+
}).catch(() => {
|
|
1406
|
+
});
|
|
1407
|
+
});
|
|
1408
|
+
}
|
|
1409
|
+
if (result?.textStream) {
|
|
1410
|
+
const originalTextStream = result.textStream;
|
|
1411
|
+
const wrappedTextStream = (async function* () {
|
|
1412
|
+
for await (const chunk of originalTextStream) {
|
|
1413
|
+
if (!firstTokenTime) {
|
|
1414
|
+
firstTokenTime = Date.now();
|
|
1415
|
+
log("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1416
|
+
}
|
|
1417
|
+
yield chunk;
|
|
1418
|
+
}
|
|
1419
|
+
})();
|
|
1420
|
+
return new Proxy(result, {
|
|
1421
|
+
get(target, prop) {
|
|
1422
|
+
if (prop === "textStream") {
|
|
1423
|
+
return wrappedTextStream;
|
|
1424
|
+
}
|
|
1425
|
+
return target[prop];
|
|
1426
|
+
}
|
|
1427
|
+
});
|
|
1428
|
+
}
|
|
1429
|
+
return result;
|
|
1430
|
+
};
|
|
1431
|
+
}
|
|
1432
|
+
function createGenerateObjectWrapper(aiModule) {
|
|
1433
|
+
return async (...args) => {
|
|
1434
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1435
|
+
if (!ctx || !initialized) {
|
|
1436
|
+
return aiModule.generateObject(...args);
|
|
1437
|
+
}
|
|
1438
|
+
let promptCtx = null;
|
|
1439
|
+
try {
|
|
1440
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1441
|
+
promptCtx = getPromptContext();
|
|
1442
|
+
} catch {
|
|
1443
|
+
}
|
|
1444
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1445
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1446
|
+
const spanId = generateHexId(16);
|
|
1447
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1448
|
+
const params = args[0] || {};
|
|
1449
|
+
const startTime = Date.now();
|
|
1450
|
+
try {
|
|
1451
|
+
const result = await aiModule.generateObject(...args);
|
|
1452
|
+
const endTime = Date.now();
|
|
1453
|
+
const modelId = result?.response?.modelId || params?.model?.modelId || String(params?.model || "unknown");
|
|
1454
|
+
const attributes = {};
|
|
1455
|
+
if (captureContent) {
|
|
1456
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1457
|
+
attributes["gen_ai.response.model"] = modelId;
|
|
1458
|
+
if (result?.object) {
|
|
1459
|
+
attributes["gen_ai.completion.0.role"] = "assistant";
|
|
1460
|
+
attributes["gen_ai.completion.0.content"] = JSON.stringify(
|
|
1461
|
+
result.object
|
|
1462
|
+
);
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
sendTrace({
|
|
1466
|
+
config_key: ctx.configKey,
|
|
1467
|
+
session_id: ctx.sessionId,
|
|
1468
|
+
customer_id: ctx.customerId,
|
|
1469
|
+
trace_id: traceId,
|
|
1470
|
+
span_id: spanId,
|
|
1471
|
+
parent_span_id: parentSpanId,
|
|
1472
|
+
name: "generateObject",
|
|
1473
|
+
kind: "llm",
|
|
1474
|
+
model: modelId,
|
|
1475
|
+
start_time: new Date(startTime).toISOString(),
|
|
1476
|
+
end_time: new Date(endTime).toISOString(),
|
|
1477
|
+
duration_ms: endTime - startTime,
|
|
1478
|
+
status: "OK",
|
|
1479
|
+
prompt_tokens: result?.usage?.promptTokens,
|
|
1480
|
+
completion_tokens: result?.usage?.completionTokens,
|
|
1481
|
+
total_tokens: result?.usage?.totalTokens,
|
|
1482
|
+
attributes: captureContent ? attributes : void 0,
|
|
1483
|
+
prompt_key: promptCtx?.promptKey,
|
|
1484
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1485
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1486
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1487
|
+
}).catch(() => {
|
|
1488
|
+
});
|
|
1489
|
+
return result;
|
|
1490
|
+
} catch (error) {
|
|
1491
|
+
const endTime = Date.now();
|
|
1492
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1493
|
+
sendTrace({
|
|
1494
|
+
config_key: ctx.configKey,
|
|
1495
|
+
session_id: ctx.sessionId,
|
|
1496
|
+
customer_id: ctx.customerId,
|
|
1497
|
+
trace_id: traceId,
|
|
1498
|
+
span_id: spanId,
|
|
1499
|
+
parent_span_id: parentSpanId,
|
|
1500
|
+
name: "generateObject",
|
|
1501
|
+
kind: "llm",
|
|
1502
|
+
model: modelId,
|
|
1503
|
+
start_time: new Date(startTime).toISOString(),
|
|
1504
|
+
end_time: new Date(endTime).toISOString(),
|
|
1505
|
+
duration_ms: endTime - startTime,
|
|
1506
|
+
status: "ERROR",
|
|
1507
|
+
error_message: error?.message,
|
|
1508
|
+
prompt_key: promptCtx?.promptKey,
|
|
1509
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1510
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1511
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1512
|
+
}).catch(() => {
|
|
1513
|
+
});
|
|
1514
|
+
throw error;
|
|
1515
|
+
}
|
|
1516
|
+
};
|
|
1517
|
+
}
|
|
1518
|
+
function createStreamObjectWrapper(aiModule) {
|
|
1519
|
+
return async (...args) => {
|
|
1520
|
+
const ctx = sessionStorage.getStore() || fallbackSession;
|
|
1521
|
+
const params = args[0] || {};
|
|
1522
|
+
const startTime = Date.now();
|
|
1523
|
+
const result = await aiModule.streamObject(...args);
|
|
1524
|
+
log("\u{1F50D} streamObject result keys:", Object.keys(result || {}));
|
|
1525
|
+
if (!ctx || !initialized) {
|
|
1526
|
+
return result;
|
|
1527
|
+
}
|
|
1528
|
+
const traceCtx = traceContextStorage.getStore() || fallbackTraceContext;
|
|
1529
|
+
const traceId = traceCtx?.traceId || generateHexId(32);
|
|
1530
|
+
const spanId = generateHexId(16);
|
|
1531
|
+
const parentSpanId = traceCtx?.parentSpanId;
|
|
1532
|
+
let firstTokenTime = null;
|
|
1533
|
+
const modelId = params?.model?.modelId || String(params?.model || "unknown");
|
|
1534
|
+
let promptCtx = null;
|
|
1535
|
+
try {
|
|
1536
|
+
const { getPromptContext } = await import("./prompts-VAN5E3L4.mjs");
|
|
1537
|
+
promptCtx = getPromptContext();
|
|
1538
|
+
} catch {
|
|
1539
|
+
}
|
|
1540
|
+
if (result?.usage) {
|
|
1541
|
+
result.usage.then((usage) => {
|
|
1542
|
+
const endTime = Date.now();
|
|
1543
|
+
log("\u{1F4CA} streamObject usage:", JSON.stringify(usage, null, 2));
|
|
1544
|
+
const attributes = {};
|
|
1545
|
+
if (captureContent) {
|
|
1546
|
+
attributes["gen_ai.request.model"] = modelId;
|
|
1547
|
+
}
|
|
1548
|
+
if (firstTokenTime) {
|
|
1549
|
+
attributes["gen_ai.time_to_first_token_ms"] = firstTokenTime - startTime;
|
|
1550
|
+
}
|
|
1551
|
+
sendTrace({
|
|
1552
|
+
config_key: ctx.configKey,
|
|
1553
|
+
session_id: ctx.sessionId,
|
|
1554
|
+
customer_id: ctx.customerId,
|
|
1555
|
+
trace_id: traceId,
|
|
1556
|
+
span_id: spanId,
|
|
1557
|
+
parent_span_id: parentSpanId,
|
|
1558
|
+
name: "streamObject",
|
|
1559
|
+
kind: "llm",
|
|
1560
|
+
model: modelId,
|
|
1561
|
+
start_time: new Date(startTime).toISOString(),
|
|
1562
|
+
end_time: new Date(endTime).toISOString(),
|
|
1563
|
+
duration_ms: endTime - startTime,
|
|
1564
|
+
status: "OK",
|
|
1565
|
+
prompt_tokens: usage?.promptTokens,
|
|
1566
|
+
completion_tokens: usage?.completionTokens,
|
|
1567
|
+
total_tokens: usage?.totalTokens,
|
|
1568
|
+
attributes: captureContent ? attributes : void 0,
|
|
1569
|
+
prompt_key: promptCtx?.promptKey,
|
|
1570
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1571
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1572
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1573
|
+
}).catch(() => {
|
|
1574
|
+
});
|
|
1575
|
+
}).catch((error) => {
|
|
1576
|
+
const endTime = Date.now();
|
|
1577
|
+
sendTrace({
|
|
1578
|
+
config_key: ctx.configKey,
|
|
1579
|
+
session_id: ctx.sessionId,
|
|
1580
|
+
customer_id: ctx.customerId,
|
|
1581
|
+
trace_id: traceId,
|
|
1582
|
+
span_id: spanId,
|
|
1583
|
+
parent_span_id: parentSpanId,
|
|
1584
|
+
name: "streamObject",
|
|
1585
|
+
kind: "llm",
|
|
1586
|
+
model: modelId,
|
|
1587
|
+
start_time: new Date(startTime).toISOString(),
|
|
1588
|
+
end_time: new Date(endTime).toISOString(),
|
|
1589
|
+
duration_ms: endTime - startTime,
|
|
1590
|
+
status: "ERROR",
|
|
1591
|
+
error_message: error?.message,
|
|
1592
|
+
prompt_key: promptCtx?.promptKey,
|
|
1593
|
+
prompt_version: promptCtx?.promptVersion,
|
|
1594
|
+
prompt_ab_test_key: promptCtx?.abTestKey,
|
|
1595
|
+
prompt_variant_index: promptCtx?.variantIndex
|
|
1596
|
+
}).catch(() => {
|
|
1597
|
+
});
|
|
1598
|
+
});
|
|
1599
|
+
}
|
|
1600
|
+
if (result?.partialObjectStream) {
|
|
1601
|
+
const originalStream = result.partialObjectStream;
|
|
1602
|
+
const wrappedStream = (async function* () {
|
|
1603
|
+
for await (const chunk of originalStream) {
|
|
1604
|
+
if (!firstTokenTime) {
|
|
1605
|
+
firstTokenTime = Date.now();
|
|
1606
|
+
log("\u23F1\uFE0F Time to first token:", firstTokenTime - startTime, "ms");
|
|
1607
|
+
}
|
|
1608
|
+
yield chunk;
|
|
1609
|
+
}
|
|
1610
|
+
})();
|
|
1611
|
+
return new Proxy(result, {
|
|
1612
|
+
get(target, prop) {
|
|
1613
|
+
if (prop === "partialObjectStream") {
|
|
1614
|
+
return wrappedStream;
|
|
1615
|
+
}
|
|
1616
|
+
return target[prop];
|
|
1617
|
+
}
|
|
1618
|
+
});
|
|
1619
|
+
}
|
|
1620
|
+
return result;
|
|
1621
|
+
};
|
|
1622
|
+
}
|
|
1068
1623
|
|
|
1069
1624
|
// src/models.ts
|
|
1070
1625
|
var models_exports = {};
|
|
@@ -1074,7 +1629,7 @@ __export(models_exports, {
|
|
|
1074
1629
|
});
|
|
1075
1630
|
import { createHash } from "crypto";
|
|
1076
1631
|
var apiKey2 = null;
|
|
1077
|
-
var baseUrl2 = "https://
|
|
1632
|
+
var baseUrl2 = "https://configs.fallom.com";
|
|
1078
1633
|
var initialized2 = false;
|
|
1079
1634
|
var syncInterval = null;
|
|
1080
1635
|
var debugMode2 = false;
|
|
@@ -1088,7 +1643,7 @@ function log2(msg) {
|
|
|
1088
1643
|
}
|
|
1089
1644
|
function init3(options = {}) {
|
|
1090
1645
|
apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
|
|
1091
|
-
baseUrl2 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://
|
|
1646
|
+
baseUrl2 = options.baseUrl || process.env.FALLOM_CONFIGS_URL || process.env.FALLOM_BASE_URL || "https://configs.fallom.com";
|
|
1092
1647
|
initialized2 = true;
|
|
1093
1648
|
if (!apiKey2) {
|
|
1094
1649
|
return;
|
|
@@ -1177,20 +1732,28 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1177
1732
|
const { version, fallback, debug = false } = options;
|
|
1178
1733
|
debugMode2 = debug;
|
|
1179
1734
|
ensureInit();
|
|
1180
|
-
log2(
|
|
1735
|
+
log2(
|
|
1736
|
+
`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`
|
|
1737
|
+
);
|
|
1181
1738
|
try {
|
|
1182
1739
|
let configData = configCache.get(configKey);
|
|
1183
|
-
log2(
|
|
1740
|
+
log2(
|
|
1741
|
+
`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`
|
|
1742
|
+
);
|
|
1184
1743
|
if (!configData) {
|
|
1185
1744
|
log2("Not in cache, fetching...");
|
|
1186
1745
|
await fetchConfigs(SYNC_TIMEOUT);
|
|
1187
1746
|
configData = configCache.get(configKey);
|
|
1188
|
-
log2(
|
|
1747
|
+
log2(
|
|
1748
|
+
`After fetch, cache lookup: ${configData ? "found" : "still not found"}`
|
|
1749
|
+
);
|
|
1189
1750
|
}
|
|
1190
1751
|
if (!configData) {
|
|
1191
1752
|
log2(`Config not found, using fallback: ${fallback}`);
|
|
1192
1753
|
if (fallback) {
|
|
1193
|
-
console.warn(
|
|
1754
|
+
console.warn(
|
|
1755
|
+
`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`
|
|
1756
|
+
);
|
|
1194
1757
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1195
1758
|
}
|
|
1196
1759
|
throw new Error(
|
|
@@ -1206,7 +1769,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1206
1769
|
}
|
|
1207
1770
|
if (!config) {
|
|
1208
1771
|
if (fallback) {
|
|
1209
|
-
console.warn(
|
|
1772
|
+
console.warn(
|
|
1773
|
+
`[Fallom WARNING] Config '${configKey}' version ${version} not found, using fallback: ${fallback}`
|
|
1774
|
+
);
|
|
1210
1775
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1211
1776
|
}
|
|
1212
1777
|
throw new Error(`Config '${configKey}' version ${version} not found.`);
|
|
@@ -1217,7 +1782,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1217
1782
|
config = configData.versions.get(targetVersion);
|
|
1218
1783
|
if (!config) {
|
|
1219
1784
|
if (fallback) {
|
|
1220
|
-
console.warn(
|
|
1785
|
+
console.warn(
|
|
1786
|
+
`[Fallom WARNING] Config '${configKey}' has no cached version, using fallback: ${fallback}`
|
|
1787
|
+
);
|
|
1221
1788
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1222
1789
|
}
|
|
1223
1790
|
throw new Error(`Config '${configKey}' has no cached version.`);
|
|
@@ -1226,7 +1793,11 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1226
1793
|
const variantsRaw = config.variants;
|
|
1227
1794
|
const configVersion = config.version || targetVersion;
|
|
1228
1795
|
const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
|
|
1229
|
-
log2(
|
|
1796
|
+
log2(
|
|
1797
|
+
`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(
|
|
1798
|
+
variants
|
|
1799
|
+
)}`
|
|
1800
|
+
);
|
|
1230
1801
|
const hashBytes = createHash("md5").update(sessionId).digest();
|
|
1231
1802
|
const hashVal = hashBytes.readUInt32BE(0) % 1e6;
|
|
1232
1803
|
log2(`Session hash: ${hashVal} (out of 1,000,000)`);
|
|
@@ -1235,7 +1806,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1235
1806
|
for (const v of variants) {
|
|
1236
1807
|
const oldCumulative = cumulative;
|
|
1237
1808
|
cumulative += v.weight * 1e4;
|
|
1238
|
-
log2(
|
|
1809
|
+
log2(
|
|
1810
|
+
`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`
|
|
1811
|
+
);
|
|
1239
1812
|
if (hashVal < cumulative) {
|
|
1240
1813
|
assignedModel = v.model;
|
|
1241
1814
|
break;
|
|
@@ -1248,7 +1821,9 @@ async function get(configKey, sessionId, options = {}) {
|
|
|
1248
1821
|
throw e;
|
|
1249
1822
|
}
|
|
1250
1823
|
if (fallback) {
|
|
1251
|
-
console.warn(
|
|
1824
|
+
console.warn(
|
|
1825
|
+
`[Fallom WARNING] Error getting model for '${configKey}': ${e}. Using fallback: ${fallback}`
|
|
1826
|
+
);
|
|
1252
1827
|
return returnWithTrace(configKey, sessionId, fallback, 0);
|
|
1253
1828
|
}
|
|
1254
1829
|
throw e;
|
|
@@ -1291,20 +1866,22 @@ async function recordSession(configKey, version, sessionId, model) {
|
|
|
1291
1866
|
|
|
1292
1867
|
// src/init.ts
|
|
1293
1868
|
async function init4(options = {}) {
|
|
1294
|
-
const
|
|
1869
|
+
const tracesUrl = options.tracesUrl || process.env.FALLOM_TRACES_URL || "https://traces.fallom.com";
|
|
1870
|
+
const configsUrl = options.configsUrl || process.env.FALLOM_CONFIGS_URL || "https://configs.fallom.com";
|
|
1871
|
+
const promptsUrl = options.promptsUrl || process.env.FALLOM_PROMPTS_URL || "https://prompts.fallom.com";
|
|
1295
1872
|
await init2({
|
|
1296
1873
|
apiKey: options.apiKey,
|
|
1297
|
-
baseUrl:
|
|
1874
|
+
baseUrl: tracesUrl,
|
|
1298
1875
|
captureContent: options.captureContent,
|
|
1299
1876
|
debug: options.debug
|
|
1300
1877
|
});
|
|
1301
1878
|
init3({
|
|
1302
1879
|
apiKey: options.apiKey,
|
|
1303
|
-
baseUrl:
|
|
1880
|
+
baseUrl: configsUrl
|
|
1304
1881
|
});
|
|
1305
1882
|
init({
|
|
1306
1883
|
apiKey: options.apiKey,
|
|
1307
|
-
baseUrl:
|
|
1884
|
+
baseUrl: promptsUrl
|
|
1308
1885
|
});
|
|
1309
1886
|
}
|
|
1310
1887
|
|