@lelemondev/sdk 0.3.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +536 -93
- package/dist/express-Dt5wT6_n.d.mts +67 -0
- package/dist/express-Dt5wT6_n.d.ts +67 -0
- package/dist/express.d.mts +1 -0
- package/dist/express.d.ts +1 -0
- package/dist/express.js +21 -0
- package/dist/express.js.map +1 -0
- package/dist/express.mjs +19 -0
- package/dist/express.mjs.map +1 -0
- package/dist/hono-Dzmu77iW.d.mts +80 -0
- package/dist/hono-Dzmu77iW.d.ts +80 -0
- package/dist/hono.d.mts +1 -0
- package/dist/hono.d.ts +1 -0
- package/dist/hono.js +23 -0
- package/dist/hono.js.map +1 -0
- package/dist/hono.mjs +21 -0
- package/dist/hono.mjs.map +1 -0
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +949 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +949 -3
- package/dist/index.mjs.map +1 -1
- package/dist/integrations.d.mts +4 -0
- package/dist/integrations.d.ts +4 -0
- package/dist/integrations.js +93 -0
- package/dist/integrations.js.map +1 -0
- package/dist/integrations.mjs +88 -0
- package/dist/integrations.mjs.map +1 -0
- package/dist/lambda-CAuiF9dH.d.mts +79 -0
- package/dist/lambda-CAuiF9dH.d.ts +79 -0
- package/dist/lambda.d.mts +1 -0
- package/dist/lambda.d.ts +1 -0
- package/dist/lambda.js +21 -0
- package/dist/lambda.js.map +1 -0
- package/dist/lambda.mjs +19 -0
- package/dist/lambda.mjs.map +1 -0
- package/dist/next-BC9PmEho.d.mts +100 -0
- package/dist/next-BC9PmEho.d.ts +100 -0
- package/dist/next.d.mts +1 -0
- package/dist/next.d.ts +1 -0
- package/dist/next.js +33 -0
- package/dist/next.js.map +1 -0
- package/dist/next.mjs +30 -0
- package/dist/next.mjs.map +1 -0
- package/package.json +59 -11
package/dist/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
/* @
|
|
1
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
2
2
|
var __defProp = Object.defineProperty;
|
|
3
3
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
4
4
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
@@ -309,7 +309,7 @@ function canHandle(client) {
|
|
|
309
309
|
const constructorName = client.constructor?.name;
|
|
310
310
|
if (constructorName === "OpenAI") return true;
|
|
311
311
|
const c = client;
|
|
312
|
-
return !!(c.chat && c.completions);
|
|
312
|
+
return !!(c.chat && c.completions) || !!c.responses;
|
|
313
313
|
}
|
|
314
314
|
function wrapChatCreate(originalFn) {
|
|
315
315
|
return async function wrappedChatCreate(...args) {
|
|
@@ -349,6 +349,45 @@ function wrapChatCreate(originalFn) {
|
|
|
349
349
|
}
|
|
350
350
|
};
|
|
351
351
|
}
|
|
352
|
+
function wrapResponsesCreate(originalFn) {
|
|
353
|
+
return async function wrappedResponsesCreate(...args) {
|
|
354
|
+
const startTime = Date.now();
|
|
355
|
+
const request = args[0] || {};
|
|
356
|
+
const isStreaming = request.stream === true;
|
|
357
|
+
try {
|
|
358
|
+
const response = await originalFn(...args);
|
|
359
|
+
if (isStreaming && isAsyncIterable(response)) {
|
|
360
|
+
return wrapResponsesStream(response, request, startTime);
|
|
361
|
+
}
|
|
362
|
+
const durationMs = Date.now() - startTime;
|
|
363
|
+
const extracted = extractResponsesResult(response);
|
|
364
|
+
captureTrace({
|
|
365
|
+
provider: PROVIDER_NAME,
|
|
366
|
+
model: request.model || "unknown",
|
|
367
|
+
input: { instructions: request.instructions, input: request.input },
|
|
368
|
+
output: extracted.output,
|
|
369
|
+
inputTokens: extracted.inputTokens,
|
|
370
|
+
outputTokens: extracted.outputTokens,
|
|
371
|
+
durationMs,
|
|
372
|
+
status: "success",
|
|
373
|
+
streaming: false,
|
|
374
|
+
metadata: extracted.metadata
|
|
375
|
+
});
|
|
376
|
+
return response;
|
|
377
|
+
} catch (error) {
|
|
378
|
+
const durationMs = Date.now() - startTime;
|
|
379
|
+
captureError({
|
|
380
|
+
provider: PROVIDER_NAME,
|
|
381
|
+
model: request.model || "unknown",
|
|
382
|
+
input: { instructions: request.instructions, input: request.input },
|
|
383
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
384
|
+
durationMs,
|
|
385
|
+
streaming: isStreaming
|
|
386
|
+
});
|
|
387
|
+
throw error;
|
|
388
|
+
}
|
|
389
|
+
};
|
|
390
|
+
}
|
|
352
391
|
function wrapCompletionCreate(originalFn) {
|
|
353
392
|
return async function wrappedCompletionCreate(...args) {
|
|
354
393
|
const startTime = Date.now();
|
|
@@ -540,6 +579,85 @@ function extractStreamChunkTokens(chunk) {
|
|
|
540
579
|
return null;
|
|
541
580
|
}
|
|
542
581
|
}
|
|
582
|
+
function extractResponsesResult(response) {
|
|
583
|
+
let output = response.output_text || null;
|
|
584
|
+
if (!output && response.output && Array.isArray(response.output)) {
|
|
585
|
+
const textItems = response.output.filter((item) => {
|
|
586
|
+
const i = item;
|
|
587
|
+
return i.type === "message" || i.type === "text";
|
|
588
|
+
}).map((item) => {
|
|
589
|
+
const i = item;
|
|
590
|
+
if (i.type === "message" && i.content) {
|
|
591
|
+
const content = i.content;
|
|
592
|
+
return content.filter((c) => c.type === "text" || c.type === "output_text").map((c) => c.text || "").join("");
|
|
593
|
+
}
|
|
594
|
+
return i.text || "";
|
|
595
|
+
});
|
|
596
|
+
output = textItems.join("");
|
|
597
|
+
}
|
|
598
|
+
const usage = response.usage || {};
|
|
599
|
+
const inputTokens = isValidNumber(usage.input_tokens) ? usage.input_tokens : 0;
|
|
600
|
+
const outputTokens = isValidNumber(usage.output_tokens) ? usage.output_tokens : 0;
|
|
601
|
+
const metadata = {};
|
|
602
|
+
if (response.id) {
|
|
603
|
+
metadata.responseId = response.id;
|
|
604
|
+
}
|
|
605
|
+
return {
|
|
606
|
+
output,
|
|
607
|
+
inputTokens,
|
|
608
|
+
outputTokens,
|
|
609
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
610
|
+
};
|
|
611
|
+
}
|
|
612
|
+
async function* wrapResponsesStream(stream, request, startTime) {
|
|
613
|
+
const chunks = [];
|
|
614
|
+
let inputTokens = 0;
|
|
615
|
+
let outputTokens = 0;
|
|
616
|
+
let error = null;
|
|
617
|
+
try {
|
|
618
|
+
for await (const event of stream) {
|
|
619
|
+
const e = event;
|
|
620
|
+
if (e.type === "response.output_text.delta" && e.delta) {
|
|
621
|
+
chunks.push(e.delta);
|
|
622
|
+
}
|
|
623
|
+
if (e.type === "response.done" && e.response) {
|
|
624
|
+
const resp = e.response;
|
|
625
|
+
if (resp.usage) {
|
|
626
|
+
inputTokens = resp.usage.input_tokens || 0;
|
|
627
|
+
outputTokens = resp.usage.output_tokens || 0;
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
yield event;
|
|
631
|
+
}
|
|
632
|
+
} catch (err) {
|
|
633
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
634
|
+
throw err;
|
|
635
|
+
} finally {
|
|
636
|
+
const durationMs = Date.now() - startTime;
|
|
637
|
+
if (error) {
|
|
638
|
+
captureError({
|
|
639
|
+
provider: PROVIDER_NAME,
|
|
640
|
+
model: request.model || "unknown",
|
|
641
|
+
input: { instructions: request.instructions, input: request.input },
|
|
642
|
+
error,
|
|
643
|
+
durationMs,
|
|
644
|
+
streaming: true
|
|
645
|
+
});
|
|
646
|
+
} else {
|
|
647
|
+
captureTrace({
|
|
648
|
+
provider: PROVIDER_NAME,
|
|
649
|
+
model: request.model || "unknown",
|
|
650
|
+
input: { instructions: request.instructions, input: request.input },
|
|
651
|
+
output: chunks.join(""),
|
|
652
|
+
inputTokens,
|
|
653
|
+
outputTokens,
|
|
654
|
+
durationMs,
|
|
655
|
+
status: "success",
|
|
656
|
+
streaming: true
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
}
|
|
543
661
|
|
|
544
662
|
// src/providers/anthropic.ts
|
|
545
663
|
var PROVIDER_NAME2 = "anthropic";
|
|
@@ -763,6 +881,802 @@ function extractTokens2(response) {
|
|
|
763
881
|
}
|
|
764
882
|
}
|
|
765
883
|
|
|
884
|
+
// src/providers/bedrock.ts
|
|
885
|
+
var PROVIDER_NAME3 = "bedrock";
|
|
886
|
+
function canHandle3(client) {
|
|
887
|
+
if (!client || typeof client !== "object") return false;
|
|
888
|
+
const constructorName = client.constructor?.name;
|
|
889
|
+
if (constructorName === "BedrockRuntimeClient") return true;
|
|
890
|
+
const c = client;
|
|
891
|
+
if (typeof c.send !== "function") return false;
|
|
892
|
+
if (!c.config || typeof c.config !== "object") return false;
|
|
893
|
+
return "region" in c.config;
|
|
894
|
+
}
|
|
895
|
+
function wrap(client) {
|
|
896
|
+
const bedrockClient = client;
|
|
897
|
+
return new Proxy(bedrockClient, {
|
|
898
|
+
get(target, prop, receiver) {
|
|
899
|
+
const value = Reflect.get(target, prop, receiver);
|
|
900
|
+
if (prop === "send" && typeof value === "function") {
|
|
901
|
+
return wrapSend(value.bind(target));
|
|
902
|
+
}
|
|
903
|
+
return value;
|
|
904
|
+
}
|
|
905
|
+
});
|
|
906
|
+
}
|
|
907
|
+
function wrapSend(originalSend) {
|
|
908
|
+
return async function tracedSend(command) {
|
|
909
|
+
const commandName = command.constructor?.name || "";
|
|
910
|
+
switch (commandName) {
|
|
911
|
+
case "ConverseCommand":
|
|
912
|
+
return handleConverse(originalSend, command);
|
|
913
|
+
case "ConverseStreamCommand":
|
|
914
|
+
return handleConverseStream(originalSend, command);
|
|
915
|
+
case "InvokeModelCommand":
|
|
916
|
+
return handleInvokeModel(originalSend, command);
|
|
917
|
+
case "InvokeModelWithResponseStreamCommand":
|
|
918
|
+
return handleInvokeModelStream(originalSend, command);
|
|
919
|
+
default:
|
|
920
|
+
return originalSend(command);
|
|
921
|
+
}
|
|
922
|
+
};
|
|
923
|
+
}
|
|
924
|
+
async function handleConverse(send, command) {
|
|
925
|
+
const startTime = Date.now();
|
|
926
|
+
const input = command.input;
|
|
927
|
+
try {
|
|
928
|
+
const response = await send(command);
|
|
929
|
+
const durationMs = Date.now() - startTime;
|
|
930
|
+
const extracted = extractConverseOutput(response);
|
|
931
|
+
captureTrace({
|
|
932
|
+
provider: PROVIDER_NAME3,
|
|
933
|
+
model: input.modelId || "unknown",
|
|
934
|
+
input: { system: input.system, messages: input.messages },
|
|
935
|
+
output: extracted.output,
|
|
936
|
+
inputTokens: extracted.inputTokens,
|
|
937
|
+
outputTokens: extracted.outputTokens,
|
|
938
|
+
durationMs,
|
|
939
|
+
status: "success",
|
|
940
|
+
streaming: false,
|
|
941
|
+
metadata: {
|
|
942
|
+
stopReason: response.stopReason,
|
|
943
|
+
hasToolUse: extracted.hasToolUse,
|
|
944
|
+
cacheReadTokens: extracted.cacheReadTokens,
|
|
945
|
+
cacheWriteTokens: extracted.cacheWriteTokens,
|
|
946
|
+
latencyMs: response.metrics?.latencyMs
|
|
947
|
+
}
|
|
948
|
+
});
|
|
949
|
+
return response;
|
|
950
|
+
} catch (error) {
|
|
951
|
+
captureError({
|
|
952
|
+
provider: PROVIDER_NAME3,
|
|
953
|
+
model: input.modelId || "unknown",
|
|
954
|
+
input: { system: input.system, messages: input.messages },
|
|
955
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
956
|
+
durationMs: Date.now() - startTime,
|
|
957
|
+
streaming: false
|
|
958
|
+
});
|
|
959
|
+
throw error;
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
async function handleConverseStream(send, command) {
|
|
963
|
+
const startTime = Date.now();
|
|
964
|
+
const input = command.input;
|
|
965
|
+
try {
|
|
966
|
+
const response = await send(command);
|
|
967
|
+
if (response.stream) {
|
|
968
|
+
return {
|
|
969
|
+
...response,
|
|
970
|
+
stream: wrapConverseStream(response.stream, input, startTime)
|
|
971
|
+
};
|
|
972
|
+
}
|
|
973
|
+
return response;
|
|
974
|
+
} catch (error) {
|
|
975
|
+
captureError({
|
|
976
|
+
provider: PROVIDER_NAME3,
|
|
977
|
+
model: input.modelId || "unknown",
|
|
978
|
+
input: { system: input.system, messages: input.messages },
|
|
979
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
980
|
+
durationMs: Date.now() - startTime,
|
|
981
|
+
streaming: true
|
|
982
|
+
});
|
|
983
|
+
throw error;
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
async function* wrapConverseStream(stream, input, startTime) {
|
|
987
|
+
const chunks = [];
|
|
988
|
+
let inputTokens = 0;
|
|
989
|
+
let outputTokens = 0;
|
|
990
|
+
let error = null;
|
|
991
|
+
try {
|
|
992
|
+
for await (const event of stream) {
|
|
993
|
+
if (event.contentBlockDelta?.delta?.text) {
|
|
994
|
+
chunks.push(event.contentBlockDelta.delta.text);
|
|
995
|
+
}
|
|
996
|
+
if (event.metadata?.usage) {
|
|
997
|
+
inputTokens = event.metadata.usage.inputTokens || 0;
|
|
998
|
+
outputTokens = event.metadata.usage.outputTokens || 0;
|
|
999
|
+
}
|
|
1000
|
+
yield event;
|
|
1001
|
+
}
|
|
1002
|
+
} catch (err) {
|
|
1003
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1004
|
+
throw err;
|
|
1005
|
+
} finally {
|
|
1006
|
+
const durationMs = Date.now() - startTime;
|
|
1007
|
+
if (error) {
|
|
1008
|
+
captureError({
|
|
1009
|
+
provider: PROVIDER_NAME3,
|
|
1010
|
+
model: input.modelId || "unknown",
|
|
1011
|
+
input: { system: input.system, messages: input.messages },
|
|
1012
|
+
error,
|
|
1013
|
+
durationMs,
|
|
1014
|
+
streaming: true
|
|
1015
|
+
});
|
|
1016
|
+
} else {
|
|
1017
|
+
captureTrace({
|
|
1018
|
+
provider: PROVIDER_NAME3,
|
|
1019
|
+
model: input.modelId || "unknown",
|
|
1020
|
+
input: { system: input.system, messages: input.messages },
|
|
1021
|
+
output: chunks.join(""),
|
|
1022
|
+
inputTokens,
|
|
1023
|
+
outputTokens,
|
|
1024
|
+
durationMs,
|
|
1025
|
+
status: "success",
|
|
1026
|
+
streaming: true
|
|
1027
|
+
});
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
1030
|
+
}
|
|
1031
|
+
async function handleInvokeModel(send, command) {
|
|
1032
|
+
const startTime = Date.now();
|
|
1033
|
+
const input = command.input;
|
|
1034
|
+
try {
|
|
1035
|
+
const response = await send(command);
|
|
1036
|
+
const durationMs = Date.now() - startTime;
|
|
1037
|
+
const parsed = parseInvokeModelBody(response.body);
|
|
1038
|
+
captureTrace({
|
|
1039
|
+
provider: PROVIDER_NAME3,
|
|
1040
|
+
model: input.modelId || "unknown",
|
|
1041
|
+
input: parseRequestBody(input.body),
|
|
1042
|
+
output: parsed.output,
|
|
1043
|
+
inputTokens: parsed.inputTokens,
|
|
1044
|
+
outputTokens: parsed.outputTokens,
|
|
1045
|
+
durationMs,
|
|
1046
|
+
status: "success",
|
|
1047
|
+
streaming: false
|
|
1048
|
+
});
|
|
1049
|
+
return response;
|
|
1050
|
+
} catch (error) {
|
|
1051
|
+
captureError({
|
|
1052
|
+
provider: PROVIDER_NAME3,
|
|
1053
|
+
model: input.modelId || "unknown",
|
|
1054
|
+
input: parseRequestBody(input.body),
|
|
1055
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1056
|
+
durationMs: Date.now() - startTime,
|
|
1057
|
+
streaming: false
|
|
1058
|
+
});
|
|
1059
|
+
throw error;
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
async function handleInvokeModelStream(send, command) {
|
|
1063
|
+
const startTime = Date.now();
|
|
1064
|
+
const input = command.input;
|
|
1065
|
+
try {
|
|
1066
|
+
const response = await send(command);
|
|
1067
|
+
if (response.body) {
|
|
1068
|
+
return {
|
|
1069
|
+
...response,
|
|
1070
|
+
body: wrapInvokeModelStream(response.body, input, startTime)
|
|
1071
|
+
};
|
|
1072
|
+
}
|
|
1073
|
+
return response;
|
|
1074
|
+
} catch (error) {
|
|
1075
|
+
captureError({
|
|
1076
|
+
provider: PROVIDER_NAME3,
|
|
1077
|
+
model: input.modelId || "unknown",
|
|
1078
|
+
input: parseRequestBody(input.body),
|
|
1079
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1080
|
+
durationMs: Date.now() - startTime,
|
|
1081
|
+
streaming: true
|
|
1082
|
+
});
|
|
1083
|
+
throw error;
|
|
1084
|
+
}
|
|
1085
|
+
}
|
|
1086
|
+
async function* wrapInvokeModelStream(stream, input, startTime) {
|
|
1087
|
+
const chunks = [];
|
|
1088
|
+
let inputTokens = 0;
|
|
1089
|
+
let outputTokens = 0;
|
|
1090
|
+
let error = null;
|
|
1091
|
+
try {
|
|
1092
|
+
for await (const event of stream) {
|
|
1093
|
+
if (event.chunk?.bytes) {
|
|
1094
|
+
const parsed = tryParseStreamChunk(event.chunk.bytes);
|
|
1095
|
+
if (parsed) {
|
|
1096
|
+
if (parsed.text) chunks.push(parsed.text);
|
|
1097
|
+
if (parsed.inputTokens) inputTokens = parsed.inputTokens;
|
|
1098
|
+
if (parsed.outputTokens) outputTokens = parsed.outputTokens;
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
yield event;
|
|
1102
|
+
}
|
|
1103
|
+
} catch (err) {
|
|
1104
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1105
|
+
throw err;
|
|
1106
|
+
} finally {
|
|
1107
|
+
const durationMs = Date.now() - startTime;
|
|
1108
|
+
if (error) {
|
|
1109
|
+
captureError({
|
|
1110
|
+
provider: PROVIDER_NAME3,
|
|
1111
|
+
model: input.modelId || "unknown",
|
|
1112
|
+
input: parseRequestBody(input.body),
|
|
1113
|
+
error,
|
|
1114
|
+
durationMs,
|
|
1115
|
+
streaming: true
|
|
1116
|
+
});
|
|
1117
|
+
} else {
|
|
1118
|
+
captureTrace({
|
|
1119
|
+
provider: PROVIDER_NAME3,
|
|
1120
|
+
model: input.modelId || "unknown",
|
|
1121
|
+
input: parseRequestBody(input.body),
|
|
1122
|
+
output: chunks.join(""),
|
|
1123
|
+
inputTokens,
|
|
1124
|
+
outputTokens,
|
|
1125
|
+
durationMs,
|
|
1126
|
+
status: "success",
|
|
1127
|
+
streaming: true
|
|
1128
|
+
});
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
function extractConverseOutput(response) {
|
|
1133
|
+
const content = response.output?.message?.content;
|
|
1134
|
+
const hasToolUse = Array.isArray(content) && content.some((c) => c.toolUse);
|
|
1135
|
+
const output = safeExtract(() => {
|
|
1136
|
+
if (!Array.isArray(content)) return null;
|
|
1137
|
+
if (hasToolUse) {
|
|
1138
|
+
return content;
|
|
1139
|
+
}
|
|
1140
|
+
return content.map((c) => c.text || "").join("");
|
|
1141
|
+
}, null);
|
|
1142
|
+
const usage = response.usage || {};
|
|
1143
|
+
return {
|
|
1144
|
+
output,
|
|
1145
|
+
inputTokens: isValidNumber(usage.inputTokens) ? usage.inputTokens : 0,
|
|
1146
|
+
outputTokens: isValidNumber(usage.outputTokens) ? usage.outputTokens : 0,
|
|
1147
|
+
cacheReadTokens: isValidNumber(usage.cacheReadInputTokens) ? usage.cacheReadInputTokens : 0,
|
|
1148
|
+
cacheWriteTokens: isValidNumber(usage.cacheWriteInputTokens) ? usage.cacheWriteInputTokens : 0,
|
|
1149
|
+
hasToolUse
|
|
1150
|
+
};
|
|
1151
|
+
}
|
|
1152
|
+
function parseInvokeModelBody(body) {
|
|
1153
|
+
try {
|
|
1154
|
+
const text = new TextDecoder().decode(body);
|
|
1155
|
+
const parsed = JSON.parse(text);
|
|
1156
|
+
if (parsed.content && Array.isArray(parsed.content)) {
|
|
1157
|
+
const output = parsed.content.filter((c) => c.type === "text").map((c) => c.text || "").join("");
|
|
1158
|
+
return {
|
|
1159
|
+
output,
|
|
1160
|
+
inputTokens: parsed.usage?.input_tokens || 0,
|
|
1161
|
+
outputTokens: parsed.usage?.output_tokens || 0
|
|
1162
|
+
};
|
|
1163
|
+
}
|
|
1164
|
+
if (parsed.results) {
|
|
1165
|
+
return {
|
|
1166
|
+
output: parsed.results[0]?.outputText || parsed.results,
|
|
1167
|
+
inputTokens: parsed.inputTextTokenCount || 0,
|
|
1168
|
+
outputTokens: parsed.results[0]?.tokenCount || 0
|
|
1169
|
+
};
|
|
1170
|
+
}
|
|
1171
|
+
if (parsed.generation) {
|
|
1172
|
+
return {
|
|
1173
|
+
output: parsed.generation,
|
|
1174
|
+
inputTokens: parsed.prompt_token_count || 0,
|
|
1175
|
+
outputTokens: parsed.generation_token_count || 0
|
|
1176
|
+
};
|
|
1177
|
+
}
|
|
1178
|
+
return { output: parsed, inputTokens: 0, outputTokens: 0 };
|
|
1179
|
+
} catch {
|
|
1180
|
+
return { output: null, inputTokens: 0, outputTokens: 0 };
|
|
1181
|
+
}
|
|
1182
|
+
}
|
|
1183
|
+
function parseRequestBody(body) {
|
|
1184
|
+
try {
|
|
1185
|
+
const text = typeof body === "string" ? body : new TextDecoder().decode(body);
|
|
1186
|
+
return JSON.parse(text);
|
|
1187
|
+
} catch {
|
|
1188
|
+
return body;
|
|
1189
|
+
}
|
|
1190
|
+
}
|
|
1191
|
+
function tryParseStreamChunk(bytes) {
|
|
1192
|
+
try {
|
|
1193
|
+
const text = new TextDecoder().decode(bytes);
|
|
1194
|
+
const parsed = JSON.parse(text);
|
|
1195
|
+
if (parsed.type === "content_block_delta" && parsed.delta?.text) {
|
|
1196
|
+
return { text: parsed.delta.text };
|
|
1197
|
+
}
|
|
1198
|
+
if (parsed.type === "message_delta" && parsed.usage) {
|
|
1199
|
+
return { outputTokens: parsed.usage.output_tokens };
|
|
1200
|
+
}
|
|
1201
|
+
if (parsed.type === "message_start" && parsed.message?.usage) {
|
|
1202
|
+
return { inputTokens: parsed.message.usage.input_tokens };
|
|
1203
|
+
}
|
|
1204
|
+
return null;
|
|
1205
|
+
} catch {
|
|
1206
|
+
return null;
|
|
1207
|
+
}
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
// src/providers/gemini.ts
|
|
1211
|
+
var PROVIDER_NAME4 = "gemini";
|
|
1212
|
+
function canHandle4(client) {
|
|
1213
|
+
if (!client || typeof client !== "object") return false;
|
|
1214
|
+
const constructorName = client.constructor?.name;
|
|
1215
|
+
if (constructorName === "GoogleGenerativeAI") return true;
|
|
1216
|
+
if (constructorName === "GoogleGenAI") return true;
|
|
1217
|
+
const c = client;
|
|
1218
|
+
if (typeof c.getGenerativeModel === "function") return true;
|
|
1219
|
+
if (c.models && typeof c.models.generate === "function") {
|
|
1220
|
+
return true;
|
|
1221
|
+
}
|
|
1222
|
+
return false;
|
|
1223
|
+
}
|
|
1224
|
+
function wrap2(client) {
|
|
1225
|
+
const geminiClient = client;
|
|
1226
|
+
return new Proxy(geminiClient, {
|
|
1227
|
+
get(target, prop, receiver) {
|
|
1228
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1229
|
+
if (prop === "getGenerativeModel" && typeof value === "function") {
|
|
1230
|
+
return wrapGetGenerativeModel(value.bind(target));
|
|
1231
|
+
}
|
|
1232
|
+
return value;
|
|
1233
|
+
}
|
|
1234
|
+
});
|
|
1235
|
+
}
|
|
1236
|
+
function wrapGetGenerativeModel(originalFn) {
|
|
1237
|
+
return function wrappedGetGenerativeModel(config) {
|
|
1238
|
+
const model = originalFn(config);
|
|
1239
|
+
return wrapGenerativeModel(model, config.model);
|
|
1240
|
+
};
|
|
1241
|
+
}
|
|
1242
|
+
function wrapGenerativeModel(model, modelName) {
|
|
1243
|
+
return new Proxy(model, {
|
|
1244
|
+
get(target, prop, receiver) {
|
|
1245
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1246
|
+
if (prop === "generateContent" && typeof value === "function") {
|
|
1247
|
+
return wrapGenerateContent(value.bind(target), modelName);
|
|
1248
|
+
}
|
|
1249
|
+
if (prop === "generateContentStream" && typeof value === "function") {
|
|
1250
|
+
return wrapGenerateContentStream(value.bind(target), modelName);
|
|
1251
|
+
}
|
|
1252
|
+
if (prop === "startChat" && typeof value === "function") {
|
|
1253
|
+
return wrapStartChat(value.bind(target), modelName);
|
|
1254
|
+
}
|
|
1255
|
+
return value;
|
|
1256
|
+
}
|
|
1257
|
+
});
|
|
1258
|
+
}
|
|
1259
|
+
function wrapGenerateContent(originalFn, modelName) {
|
|
1260
|
+
return async function wrappedGenerateContent(request) {
|
|
1261
|
+
const startTime = Date.now();
|
|
1262
|
+
const input = extractInput(request);
|
|
1263
|
+
try {
|
|
1264
|
+
const result = await originalFn(request);
|
|
1265
|
+
const durationMs = Date.now() - startTime;
|
|
1266
|
+
const extracted = extractGenerateContentResult(result);
|
|
1267
|
+
captureTrace({
|
|
1268
|
+
provider: PROVIDER_NAME4,
|
|
1269
|
+
model: modelName,
|
|
1270
|
+
input,
|
|
1271
|
+
output: extracted.output,
|
|
1272
|
+
inputTokens: extracted.inputTokens,
|
|
1273
|
+
outputTokens: extracted.outputTokens,
|
|
1274
|
+
durationMs,
|
|
1275
|
+
status: "success",
|
|
1276
|
+
streaming: false,
|
|
1277
|
+
metadata: extracted.metadata
|
|
1278
|
+
});
|
|
1279
|
+
return result;
|
|
1280
|
+
} catch (error) {
|
|
1281
|
+
captureError({
|
|
1282
|
+
provider: PROVIDER_NAME4,
|
|
1283
|
+
model: modelName,
|
|
1284
|
+
input,
|
|
1285
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1286
|
+
durationMs: Date.now() - startTime,
|
|
1287
|
+
streaming: false
|
|
1288
|
+
});
|
|
1289
|
+
throw error;
|
|
1290
|
+
}
|
|
1291
|
+
};
|
|
1292
|
+
}
|
|
1293
|
+
function wrapGenerateContentStream(originalFn, modelName) {
|
|
1294
|
+
return async function wrappedGenerateContentStream(request) {
|
|
1295
|
+
const startTime = Date.now();
|
|
1296
|
+
const input = extractInput(request);
|
|
1297
|
+
try {
|
|
1298
|
+
const result = await originalFn(request);
|
|
1299
|
+
const wrappedStream = wrapStream3(result.stream, modelName, input, startTime);
|
|
1300
|
+
return {
|
|
1301
|
+
...result,
|
|
1302
|
+
stream: wrappedStream
|
|
1303
|
+
};
|
|
1304
|
+
} catch (error) {
|
|
1305
|
+
captureError({
|
|
1306
|
+
provider: PROVIDER_NAME4,
|
|
1307
|
+
model: modelName,
|
|
1308
|
+
input,
|
|
1309
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1310
|
+
durationMs: Date.now() - startTime,
|
|
1311
|
+
streaming: true
|
|
1312
|
+
});
|
|
1313
|
+
throw error;
|
|
1314
|
+
}
|
|
1315
|
+
};
|
|
1316
|
+
}
|
|
1317
|
+
async function* wrapStream3(stream, modelName, input, startTime) {
|
|
1318
|
+
const chunks = [];
|
|
1319
|
+
let inputTokens = 0;
|
|
1320
|
+
let outputTokens = 0;
|
|
1321
|
+
let cachedTokens = 0;
|
|
1322
|
+
let thoughtsTokens = 0;
|
|
1323
|
+
let error = null;
|
|
1324
|
+
try {
|
|
1325
|
+
for await (const chunk of stream) {
|
|
1326
|
+
try {
|
|
1327
|
+
const text = chunk.text();
|
|
1328
|
+
if (text) {
|
|
1329
|
+
chunks.push(text);
|
|
1330
|
+
}
|
|
1331
|
+
} catch {
|
|
1332
|
+
}
|
|
1333
|
+
if (chunk.usageMetadata) {
|
|
1334
|
+
inputTokens = chunk.usageMetadata.promptTokenCount || 0;
|
|
1335
|
+
outputTokens = chunk.usageMetadata.candidatesTokenCount || 0;
|
|
1336
|
+
cachedTokens = chunk.usageMetadata.cachedContentTokenCount || 0;
|
|
1337
|
+
thoughtsTokens = chunk.usageMetadata.thoughtsTokenCount || 0;
|
|
1338
|
+
}
|
|
1339
|
+
yield chunk;
|
|
1340
|
+
}
|
|
1341
|
+
} catch (err) {
|
|
1342
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1343
|
+
throw err;
|
|
1344
|
+
} finally {
|
|
1345
|
+
const durationMs = Date.now() - startTime;
|
|
1346
|
+
if (error) {
|
|
1347
|
+
captureError({
|
|
1348
|
+
provider: PROVIDER_NAME4,
|
|
1349
|
+
model: modelName,
|
|
1350
|
+
input,
|
|
1351
|
+
error,
|
|
1352
|
+
durationMs,
|
|
1353
|
+
streaming: true
|
|
1354
|
+
});
|
|
1355
|
+
} else {
|
|
1356
|
+
captureTrace({
|
|
1357
|
+
provider: PROVIDER_NAME4,
|
|
1358
|
+
model: modelName,
|
|
1359
|
+
input,
|
|
1360
|
+
output: chunks.join(""),
|
|
1361
|
+
inputTokens,
|
|
1362
|
+
outputTokens,
|
|
1363
|
+
durationMs,
|
|
1364
|
+
status: "success",
|
|
1365
|
+
streaming: true,
|
|
1366
|
+
metadata: {
|
|
1367
|
+
cachedTokens: cachedTokens > 0 ? cachedTokens : void 0,
|
|
1368
|
+
thoughtsTokens: thoughtsTokens > 0 ? thoughtsTokens : void 0
|
|
1369
|
+
}
|
|
1370
|
+
});
|
|
1371
|
+
}
|
|
1372
|
+
}
|
|
1373
|
+
}
|
|
1374
|
+
function wrapStartChat(originalFn, modelName) {
|
|
1375
|
+
return function wrappedStartChat(config) {
|
|
1376
|
+
const chat = originalFn(config);
|
|
1377
|
+
return wrapChatSession(chat, modelName);
|
|
1378
|
+
};
|
|
1379
|
+
}
|
|
1380
|
+
function wrapChatSession(chat, modelName) {
|
|
1381
|
+
return new Proxy(chat, {
|
|
1382
|
+
get(target, prop, receiver) {
|
|
1383
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1384
|
+
if (prop === "sendMessage" && typeof value === "function") {
|
|
1385
|
+
return wrapSendMessage(value.bind(target), modelName);
|
|
1386
|
+
}
|
|
1387
|
+
if (prop === "sendMessageStream" && typeof value === "function") {
|
|
1388
|
+
return wrapSendMessageStream(value.bind(target), modelName);
|
|
1389
|
+
}
|
|
1390
|
+
return value;
|
|
1391
|
+
}
|
|
1392
|
+
});
|
|
1393
|
+
}
|
|
1394
|
+
function wrapSendMessage(originalFn, modelName) {
|
|
1395
|
+
return async function wrappedSendMessage(request) {
|
|
1396
|
+
const startTime = Date.now();
|
|
1397
|
+
const input = typeof request === "string" ? request : request;
|
|
1398
|
+
try {
|
|
1399
|
+
const result = await originalFn(request);
|
|
1400
|
+
const durationMs = Date.now() - startTime;
|
|
1401
|
+
const extracted = extractGenerateContentResult(result);
|
|
1402
|
+
captureTrace({
|
|
1403
|
+
provider: PROVIDER_NAME4,
|
|
1404
|
+
model: modelName,
|
|
1405
|
+
input,
|
|
1406
|
+
output: extracted.output,
|
|
1407
|
+
inputTokens: extracted.inputTokens,
|
|
1408
|
+
outputTokens: extracted.outputTokens,
|
|
1409
|
+
durationMs,
|
|
1410
|
+
status: "success",
|
|
1411
|
+
streaming: false,
|
|
1412
|
+
metadata: extracted.metadata
|
|
1413
|
+
});
|
|
1414
|
+
return result;
|
|
1415
|
+
} catch (error) {
|
|
1416
|
+
captureError({
|
|
1417
|
+
provider: PROVIDER_NAME4,
|
|
1418
|
+
model: modelName,
|
|
1419
|
+
input,
|
|
1420
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1421
|
+
durationMs: Date.now() - startTime,
|
|
1422
|
+
streaming: false
|
|
1423
|
+
});
|
|
1424
|
+
throw error;
|
|
1425
|
+
}
|
|
1426
|
+
};
|
|
1427
|
+
}
|
|
1428
|
+
function wrapSendMessageStream(originalFn, modelName) {
|
|
1429
|
+
return async function wrappedSendMessageStream(request) {
|
|
1430
|
+
const startTime = Date.now();
|
|
1431
|
+
const input = typeof request === "string" ? request : request;
|
|
1432
|
+
try {
|
|
1433
|
+
const result = await originalFn(request);
|
|
1434
|
+
const wrappedStream = wrapStream3(result.stream, modelName, input, startTime);
|
|
1435
|
+
return {
|
|
1436
|
+
...result,
|
|
1437
|
+
stream: wrappedStream
|
|
1438
|
+
};
|
|
1439
|
+
} catch (error) {
|
|
1440
|
+
captureError({
|
|
1441
|
+
provider: PROVIDER_NAME4,
|
|
1442
|
+
model: modelName,
|
|
1443
|
+
input,
|
|
1444
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1445
|
+
durationMs: Date.now() - startTime,
|
|
1446
|
+
streaming: true
|
|
1447
|
+
});
|
|
1448
|
+
throw error;
|
|
1449
|
+
}
|
|
1450
|
+
};
|
|
1451
|
+
}
|
|
1452
|
+
function extractInput(request) {
|
|
1453
|
+
if (typeof request === "string") {
|
|
1454
|
+
return request;
|
|
1455
|
+
}
|
|
1456
|
+
if (request.contents) {
|
|
1457
|
+
return request.contents;
|
|
1458
|
+
}
|
|
1459
|
+
return request;
|
|
1460
|
+
}
|
|
1461
|
+
function extractGenerateContentResult(result) {
|
|
1462
|
+
const response = result.response;
|
|
1463
|
+
let output = null;
|
|
1464
|
+
try {
|
|
1465
|
+
output = response.text();
|
|
1466
|
+
} catch {
|
|
1467
|
+
output = safeExtract(() => {
|
|
1468
|
+
const content = response.candidates?.[0]?.content;
|
|
1469
|
+
if (content?.parts) {
|
|
1470
|
+
return content.parts;
|
|
1471
|
+
}
|
|
1472
|
+
return null;
|
|
1473
|
+
}, null);
|
|
1474
|
+
}
|
|
1475
|
+
const usage = response.usageMetadata;
|
|
1476
|
+
const inputTokens = isValidNumber(usage?.promptTokenCount) ? usage.promptTokenCount : 0;
|
|
1477
|
+
const outputTokens = isValidNumber(usage?.candidatesTokenCount) ? usage.candidatesTokenCount : 0;
|
|
1478
|
+
const metadata = {};
|
|
1479
|
+
if (usage?.cachedContentTokenCount && usage.cachedContentTokenCount > 0) {
|
|
1480
|
+
metadata.cachedTokens = usage.cachedContentTokenCount;
|
|
1481
|
+
}
|
|
1482
|
+
if (usage?.thoughtsTokenCount && usage.thoughtsTokenCount > 0) {
|
|
1483
|
+
metadata.thoughtsTokens = usage.thoughtsTokenCount;
|
|
1484
|
+
}
|
|
1485
|
+
const finishReason = response.candidates?.[0]?.finishReason;
|
|
1486
|
+
if (finishReason) {
|
|
1487
|
+
metadata.finishReason = finishReason;
|
|
1488
|
+
}
|
|
1489
|
+
return {
|
|
1490
|
+
output,
|
|
1491
|
+
inputTokens,
|
|
1492
|
+
outputTokens,
|
|
1493
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
1494
|
+
};
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
// src/providers/openrouter.ts
|
|
1498
|
+
var OPENROUTER_BASE_URL = "openrouter.ai";
|
|
1499
|
+
var PROVIDER_NAME5 = "openrouter";
|
|
1500
|
+
function canHandle5(client) {
|
|
1501
|
+
if (!client || typeof client !== "object") return false;
|
|
1502
|
+
const c = client;
|
|
1503
|
+
if (!c.chat?.completions?.create) return false;
|
|
1504
|
+
const baseURL = c.baseURL || "";
|
|
1505
|
+
return baseURL.includes(OPENROUTER_BASE_URL);
|
|
1506
|
+
}
|
|
1507
|
+
function wrap3(client) {
|
|
1508
|
+
const openrouterClient = client;
|
|
1509
|
+
return new Proxy(openrouterClient, {
|
|
1510
|
+
get(target, prop, receiver) {
|
|
1511
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1512
|
+
if (prop === "chat" && value && typeof value === "object") {
|
|
1513
|
+
return wrapChatNamespace(value);
|
|
1514
|
+
}
|
|
1515
|
+
return value;
|
|
1516
|
+
}
|
|
1517
|
+
});
|
|
1518
|
+
}
|
|
1519
|
+
function wrapChatNamespace(chat) {
|
|
1520
|
+
if (!chat) return chat;
|
|
1521
|
+
return new Proxy(chat, {
|
|
1522
|
+
get(target, prop, receiver) {
|
|
1523
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1524
|
+
if (prop === "completions" && value && typeof value === "object") {
|
|
1525
|
+
return wrapChatCompletions(value);
|
|
1526
|
+
}
|
|
1527
|
+
return value;
|
|
1528
|
+
}
|
|
1529
|
+
});
|
|
1530
|
+
}
|
|
1531
|
+
function wrapChatCompletions(completions) {
|
|
1532
|
+
return new Proxy(completions, {
|
|
1533
|
+
get(target, prop, receiver) {
|
|
1534
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1535
|
+
if (prop === "create" && typeof value === "function") {
|
|
1536
|
+
return wrapChatCreate2(value.bind(target));
|
|
1537
|
+
}
|
|
1538
|
+
return value;
|
|
1539
|
+
}
|
|
1540
|
+
});
|
|
1541
|
+
}
|
|
1542
|
+
function wrapChatCreate2(originalFn) {
|
|
1543
|
+
return async function wrappedChatCreate(...args) {
|
|
1544
|
+
const startTime = Date.now();
|
|
1545
|
+
const request = args[0] || {};
|
|
1546
|
+
const isStreaming = request.stream === true;
|
|
1547
|
+
try {
|
|
1548
|
+
const response = await originalFn(...args);
|
|
1549
|
+
if (isStreaming && isAsyncIterable3(response)) {
|
|
1550
|
+
return wrapStream4(response, request, startTime);
|
|
1551
|
+
}
|
|
1552
|
+
const durationMs = Date.now() - startTime;
|
|
1553
|
+
const extracted = extractChatCompletion2(response);
|
|
1554
|
+
captureTrace({
|
|
1555
|
+
provider: PROVIDER_NAME5,
|
|
1556
|
+
model: request.model || extracted.model || "unknown",
|
|
1557
|
+
input: request.messages,
|
|
1558
|
+
output: extracted.output,
|
|
1559
|
+
inputTokens: extracted.tokens?.inputTokens || 0,
|
|
1560
|
+
outputTokens: extracted.tokens?.outputTokens || 0,
|
|
1561
|
+
durationMs,
|
|
1562
|
+
status: "success",
|
|
1563
|
+
streaming: false,
|
|
1564
|
+
metadata: extracted.metadata
|
|
1565
|
+
});
|
|
1566
|
+
return response;
|
|
1567
|
+
} catch (error) {
|
|
1568
|
+
const durationMs = Date.now() - startTime;
|
|
1569
|
+
captureError({
|
|
1570
|
+
provider: PROVIDER_NAME5,
|
|
1571
|
+
model: request.model || "unknown",
|
|
1572
|
+
input: request.messages,
|
|
1573
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1574
|
+
durationMs,
|
|
1575
|
+
streaming: isStreaming
|
|
1576
|
+
});
|
|
1577
|
+
throw error;
|
|
1578
|
+
}
|
|
1579
|
+
};
|
|
1580
|
+
}
|
|
1581
|
+
function isAsyncIterable3(value) {
|
|
1582
|
+
return value != null && typeof value[Symbol.asyncIterator] === "function";
|
|
1583
|
+
}
|
|
1584
|
+
async function* wrapStream4(stream, request, startTime) {
|
|
1585
|
+
const chunks = [];
|
|
1586
|
+
let tokens = null;
|
|
1587
|
+
let generationId = null;
|
|
1588
|
+
let error = null;
|
|
1589
|
+
try {
|
|
1590
|
+
for await (const chunk of stream) {
|
|
1591
|
+
const c = chunk;
|
|
1592
|
+
if (!generationId && c.id) {
|
|
1593
|
+
generationId = c.id;
|
|
1594
|
+
}
|
|
1595
|
+
const content = c.choices?.[0]?.delta?.content;
|
|
1596
|
+
if (content) {
|
|
1597
|
+
chunks.push(content);
|
|
1598
|
+
}
|
|
1599
|
+
const chunkTokens = extractStreamChunkTokens2(c);
|
|
1600
|
+
if (chunkTokens) {
|
|
1601
|
+
tokens = chunkTokens;
|
|
1602
|
+
}
|
|
1603
|
+
yield chunk;
|
|
1604
|
+
}
|
|
1605
|
+
} catch (err) {
|
|
1606
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1607
|
+
throw err;
|
|
1608
|
+
} finally {
|
|
1609
|
+
const durationMs = Date.now() - startTime;
|
|
1610
|
+
const output = chunks.join("");
|
|
1611
|
+
const metadata = {};
|
|
1612
|
+
if (generationId) {
|
|
1613
|
+
metadata.generationId = generationId;
|
|
1614
|
+
}
|
|
1615
|
+
if (error) {
|
|
1616
|
+
captureError({
|
|
1617
|
+
provider: PROVIDER_NAME5,
|
|
1618
|
+
model: request.model || "unknown",
|
|
1619
|
+
input: request.messages,
|
|
1620
|
+
error,
|
|
1621
|
+
durationMs,
|
|
1622
|
+
streaming: true
|
|
1623
|
+
});
|
|
1624
|
+
} else {
|
|
1625
|
+
captureTrace({
|
|
1626
|
+
provider: PROVIDER_NAME5,
|
|
1627
|
+
model: request.model || "unknown",
|
|
1628
|
+
input: request.messages,
|
|
1629
|
+
output,
|
|
1630
|
+
inputTokens: tokens?.inputTokens || 0,
|
|
1631
|
+
outputTokens: tokens?.outputTokens || 0,
|
|
1632
|
+
durationMs,
|
|
1633
|
+
status: "success",
|
|
1634
|
+
streaming: true,
|
|
1635
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : void 0
|
|
1636
|
+
});
|
|
1637
|
+
}
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
function extractChatCompletion2(response) {
|
|
1641
|
+
const model = response.model || null;
|
|
1642
|
+
const output = response.choices?.[0]?.message?.content || null;
|
|
1643
|
+
const tokens = extractTokens3(response);
|
|
1644
|
+
const metadata = {};
|
|
1645
|
+
if (response.id) {
|
|
1646
|
+
metadata.generationId = response.id;
|
|
1647
|
+
}
|
|
1648
|
+
return {
|
|
1649
|
+
model,
|
|
1650
|
+
output,
|
|
1651
|
+
tokens,
|
|
1652
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
1653
|
+
};
|
|
1654
|
+
}
|
|
1655
|
+
function extractTokens3(response) {
|
|
1656
|
+
const usage = response.usage;
|
|
1657
|
+
if (!usage) return null;
|
|
1658
|
+
const promptTokens = usage.prompt_tokens;
|
|
1659
|
+
const completionTokens = usage.completion_tokens;
|
|
1660
|
+
const totalTokens = usage.total_tokens;
|
|
1661
|
+
if (!isValidNumber(promptTokens) && !isValidNumber(completionTokens)) {
|
|
1662
|
+
return null;
|
|
1663
|
+
}
|
|
1664
|
+
return {
|
|
1665
|
+
inputTokens: isValidNumber(promptTokens) ? promptTokens : 0,
|
|
1666
|
+
outputTokens: isValidNumber(completionTokens) ? completionTokens : 0,
|
|
1667
|
+
totalTokens: isValidNumber(totalTokens) ? totalTokens : 0
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
function extractStreamChunkTokens2(chunk) {
|
|
1671
|
+
const usage = chunk.usage;
|
|
1672
|
+
if (!usage) return null;
|
|
1673
|
+
return {
|
|
1674
|
+
inputTokens: isValidNumber(usage.prompt_tokens) ? usage.prompt_tokens : 0,
|
|
1675
|
+
outputTokens: isValidNumber(usage.completion_tokens) ? usage.completion_tokens : 0,
|
|
1676
|
+
totalTokens: 0
|
|
1677
|
+
};
|
|
1678
|
+
}
|
|
1679
|
+
|
|
766
1680
|
// src/observe.ts
|
|
767
1681
|
function observe(client, options) {
|
|
768
1682
|
if (options) {
|
|
@@ -772,6 +1686,12 @@ function observe(client, options) {
|
|
|
772
1686
|
if (config.disabled) {
|
|
773
1687
|
return client;
|
|
774
1688
|
}
|
|
1689
|
+
if (canHandle5(client)) {
|
|
1690
|
+
if (config.debug) {
|
|
1691
|
+
console.log("[Lelemon] Wrapping OpenRouter client");
|
|
1692
|
+
}
|
|
1693
|
+
return wrap3(client);
|
|
1694
|
+
}
|
|
775
1695
|
if (canHandle(client)) {
|
|
776
1696
|
if (config.debug) {
|
|
777
1697
|
console.log("[Lelemon] Wrapping OpenAI client");
|
|
@@ -784,8 +1704,20 @@ function observe(client, options) {
|
|
|
784
1704
|
}
|
|
785
1705
|
return wrapAnthropic(client);
|
|
786
1706
|
}
|
|
1707
|
+
if (canHandle3(client)) {
|
|
1708
|
+
if (config.debug) {
|
|
1709
|
+
console.log("[Lelemon] Wrapping Bedrock client");
|
|
1710
|
+
}
|
|
1711
|
+
return wrap(client);
|
|
1712
|
+
}
|
|
1713
|
+
if (canHandle4(client)) {
|
|
1714
|
+
if (config.debug) {
|
|
1715
|
+
console.log("[Lelemon] Wrapping Gemini client");
|
|
1716
|
+
}
|
|
1717
|
+
return wrap2(client);
|
|
1718
|
+
}
|
|
787
1719
|
console.warn(
|
|
788
|
-
"[Lelemon] Unknown client type. Tracing not enabled. Supported: OpenAI, Anthropic"
|
|
1720
|
+
"[Lelemon] Unknown client type. Tracing not enabled. Supported: OpenAI, OpenRouter, Anthropic, Bedrock, Gemini"
|
|
789
1721
|
);
|
|
790
1722
|
return client;
|
|
791
1723
|
}
|
|
@@ -797,6 +1729,9 @@ function wrapOpenAI(client) {
|
|
|
797
1729
|
if (prop === "chat" && value && typeof value === "object") {
|
|
798
1730
|
return wrapOpenAIChat(value);
|
|
799
1731
|
}
|
|
1732
|
+
if (prop === "responses" && value && typeof value === "object") {
|
|
1733
|
+
return wrapOpenAIResponses(value);
|
|
1734
|
+
}
|
|
800
1735
|
if (prop === "completions" && value && typeof value === "object") {
|
|
801
1736
|
return wrapOpenAICompletions(value);
|
|
802
1737
|
}
|
|
@@ -829,6 +1764,17 @@ function wrapOpenAIChatCompletions(completions) {
|
|
|
829
1764
|
}
|
|
830
1765
|
});
|
|
831
1766
|
}
|
|
1767
|
+
function wrapOpenAIResponses(responses) {
|
|
1768
|
+
return new Proxy(responses, {
|
|
1769
|
+
get(target, prop, receiver) {
|
|
1770
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1771
|
+
if (prop === "create" && typeof value === "function") {
|
|
1772
|
+
return wrapResponsesCreate(value.bind(target));
|
|
1773
|
+
}
|
|
1774
|
+
return value;
|
|
1775
|
+
}
|
|
1776
|
+
});
|
|
1777
|
+
}
|
|
832
1778
|
function wrapOpenAICompletions(completions) {
|
|
833
1779
|
return new Proxy(completions, {
|
|
834
1780
|
get(target, prop, receiver) {
|