@lelemondev/sdk 0.3.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +536 -93
- package/dist/express-Dt5wT6_n.d.mts +67 -0
- package/dist/express-Dt5wT6_n.d.ts +67 -0
- package/dist/express.d.mts +1 -0
- package/dist/express.d.ts +1 -0
- package/dist/express.js +21 -0
- package/dist/express.js.map +1 -0
- package/dist/express.mjs +19 -0
- package/dist/express.mjs.map +1 -0
- package/dist/hono-Dzmu77iW.d.mts +80 -0
- package/dist/hono-Dzmu77iW.d.ts +80 -0
- package/dist/hono.d.mts +1 -0
- package/dist/hono.d.ts +1 -0
- package/dist/hono.js +23 -0
- package/dist/hono.js.map +1 -0
- package/dist/hono.mjs +21 -0
- package/dist/hono.mjs.map +1 -0
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +949 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +949 -3
- package/dist/index.mjs.map +1 -1
- package/dist/integrations.d.mts +4 -0
- package/dist/integrations.d.ts +4 -0
- package/dist/integrations.js +93 -0
- package/dist/integrations.js.map +1 -0
- package/dist/integrations.mjs +88 -0
- package/dist/integrations.mjs.map +1 -0
- package/dist/lambda-CAuiF9dH.d.mts +79 -0
- package/dist/lambda-CAuiF9dH.d.ts +79 -0
- package/dist/lambda.d.mts +1 -0
- package/dist/lambda.d.ts +1 -0
- package/dist/lambda.js +21 -0
- package/dist/lambda.js.map +1 -0
- package/dist/lambda.mjs +19 -0
- package/dist/lambda.mjs.map +1 -0
- package/dist/next-BC9PmEho.d.mts +100 -0
- package/dist/next-BC9PmEho.d.ts +100 -0
- package/dist/next.d.mts +1 -0
- package/dist/next.d.ts +1 -0
- package/dist/next.js +33 -0
- package/dist/next.js.map +1 -0
- package/dist/next.mjs +30 -0
- package/dist/next.mjs.map +1 -0
- package/package.json +59 -11
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
/* @
|
|
3
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
4
4
|
var __defProp = Object.defineProperty;
|
|
5
5
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
6
6
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
@@ -311,7 +311,7 @@ function canHandle(client) {
|
|
|
311
311
|
const constructorName = client.constructor?.name;
|
|
312
312
|
if (constructorName === "OpenAI") return true;
|
|
313
313
|
const c = client;
|
|
314
|
-
return !!(c.chat && c.completions);
|
|
314
|
+
return !!(c.chat && c.completions) || !!c.responses;
|
|
315
315
|
}
|
|
316
316
|
function wrapChatCreate(originalFn) {
|
|
317
317
|
return async function wrappedChatCreate(...args) {
|
|
@@ -351,6 +351,45 @@ function wrapChatCreate(originalFn) {
|
|
|
351
351
|
}
|
|
352
352
|
};
|
|
353
353
|
}
|
|
354
|
+
function wrapResponsesCreate(originalFn) {
|
|
355
|
+
return async function wrappedResponsesCreate(...args) {
|
|
356
|
+
const startTime = Date.now();
|
|
357
|
+
const request = args[0] || {};
|
|
358
|
+
const isStreaming = request.stream === true;
|
|
359
|
+
try {
|
|
360
|
+
const response = await originalFn(...args);
|
|
361
|
+
if (isStreaming && isAsyncIterable(response)) {
|
|
362
|
+
return wrapResponsesStream(response, request, startTime);
|
|
363
|
+
}
|
|
364
|
+
const durationMs = Date.now() - startTime;
|
|
365
|
+
const extracted = extractResponsesResult(response);
|
|
366
|
+
captureTrace({
|
|
367
|
+
provider: PROVIDER_NAME,
|
|
368
|
+
model: request.model || "unknown",
|
|
369
|
+
input: { instructions: request.instructions, input: request.input },
|
|
370
|
+
output: extracted.output,
|
|
371
|
+
inputTokens: extracted.inputTokens,
|
|
372
|
+
outputTokens: extracted.outputTokens,
|
|
373
|
+
durationMs,
|
|
374
|
+
status: "success",
|
|
375
|
+
streaming: false,
|
|
376
|
+
metadata: extracted.metadata
|
|
377
|
+
});
|
|
378
|
+
return response;
|
|
379
|
+
} catch (error) {
|
|
380
|
+
const durationMs = Date.now() - startTime;
|
|
381
|
+
captureError({
|
|
382
|
+
provider: PROVIDER_NAME,
|
|
383
|
+
model: request.model || "unknown",
|
|
384
|
+
input: { instructions: request.instructions, input: request.input },
|
|
385
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
386
|
+
durationMs,
|
|
387
|
+
streaming: isStreaming
|
|
388
|
+
});
|
|
389
|
+
throw error;
|
|
390
|
+
}
|
|
391
|
+
};
|
|
392
|
+
}
|
|
354
393
|
function wrapCompletionCreate(originalFn) {
|
|
355
394
|
return async function wrappedCompletionCreate(...args) {
|
|
356
395
|
const startTime = Date.now();
|
|
@@ -542,6 +581,85 @@ function extractStreamChunkTokens(chunk) {
|
|
|
542
581
|
return null;
|
|
543
582
|
}
|
|
544
583
|
}
|
|
584
|
+
function extractResponsesResult(response) {
|
|
585
|
+
let output = response.output_text || null;
|
|
586
|
+
if (!output && response.output && Array.isArray(response.output)) {
|
|
587
|
+
const textItems = response.output.filter((item) => {
|
|
588
|
+
const i = item;
|
|
589
|
+
return i.type === "message" || i.type === "text";
|
|
590
|
+
}).map((item) => {
|
|
591
|
+
const i = item;
|
|
592
|
+
if (i.type === "message" && i.content) {
|
|
593
|
+
const content = i.content;
|
|
594
|
+
return content.filter((c) => c.type === "text" || c.type === "output_text").map((c) => c.text || "").join("");
|
|
595
|
+
}
|
|
596
|
+
return i.text || "";
|
|
597
|
+
});
|
|
598
|
+
output = textItems.join("");
|
|
599
|
+
}
|
|
600
|
+
const usage = response.usage || {};
|
|
601
|
+
const inputTokens = isValidNumber(usage.input_tokens) ? usage.input_tokens : 0;
|
|
602
|
+
const outputTokens = isValidNumber(usage.output_tokens) ? usage.output_tokens : 0;
|
|
603
|
+
const metadata = {};
|
|
604
|
+
if (response.id) {
|
|
605
|
+
metadata.responseId = response.id;
|
|
606
|
+
}
|
|
607
|
+
return {
|
|
608
|
+
output,
|
|
609
|
+
inputTokens,
|
|
610
|
+
outputTokens,
|
|
611
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
async function* wrapResponsesStream(stream, request, startTime) {
|
|
615
|
+
const chunks = [];
|
|
616
|
+
let inputTokens = 0;
|
|
617
|
+
let outputTokens = 0;
|
|
618
|
+
let error = null;
|
|
619
|
+
try {
|
|
620
|
+
for await (const event of stream) {
|
|
621
|
+
const e = event;
|
|
622
|
+
if (e.type === "response.output_text.delta" && e.delta) {
|
|
623
|
+
chunks.push(e.delta);
|
|
624
|
+
}
|
|
625
|
+
if (e.type === "response.done" && e.response) {
|
|
626
|
+
const resp = e.response;
|
|
627
|
+
if (resp.usage) {
|
|
628
|
+
inputTokens = resp.usage.input_tokens || 0;
|
|
629
|
+
outputTokens = resp.usage.output_tokens || 0;
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
yield event;
|
|
633
|
+
}
|
|
634
|
+
} catch (err) {
|
|
635
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
636
|
+
throw err;
|
|
637
|
+
} finally {
|
|
638
|
+
const durationMs = Date.now() - startTime;
|
|
639
|
+
if (error) {
|
|
640
|
+
captureError({
|
|
641
|
+
provider: PROVIDER_NAME,
|
|
642
|
+
model: request.model || "unknown",
|
|
643
|
+
input: { instructions: request.instructions, input: request.input },
|
|
644
|
+
error,
|
|
645
|
+
durationMs,
|
|
646
|
+
streaming: true
|
|
647
|
+
});
|
|
648
|
+
} else {
|
|
649
|
+
captureTrace({
|
|
650
|
+
provider: PROVIDER_NAME,
|
|
651
|
+
model: request.model || "unknown",
|
|
652
|
+
input: { instructions: request.instructions, input: request.input },
|
|
653
|
+
output: chunks.join(""),
|
|
654
|
+
inputTokens,
|
|
655
|
+
outputTokens,
|
|
656
|
+
durationMs,
|
|
657
|
+
status: "success",
|
|
658
|
+
streaming: true
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
}
|
|
545
663
|
|
|
546
664
|
// src/providers/anthropic.ts
|
|
547
665
|
var PROVIDER_NAME2 = "anthropic";
|
|
@@ -765,6 +883,802 @@ function extractTokens2(response) {
|
|
|
765
883
|
}
|
|
766
884
|
}
|
|
767
885
|
|
|
886
|
+
// src/providers/bedrock.ts
|
|
887
|
+
var PROVIDER_NAME3 = "bedrock";
|
|
888
|
+
function canHandle3(client) {
|
|
889
|
+
if (!client || typeof client !== "object") return false;
|
|
890
|
+
const constructorName = client.constructor?.name;
|
|
891
|
+
if (constructorName === "BedrockRuntimeClient") return true;
|
|
892
|
+
const c = client;
|
|
893
|
+
if (typeof c.send !== "function") return false;
|
|
894
|
+
if (!c.config || typeof c.config !== "object") return false;
|
|
895
|
+
return "region" in c.config;
|
|
896
|
+
}
|
|
897
|
+
function wrap(client) {
|
|
898
|
+
const bedrockClient = client;
|
|
899
|
+
return new Proxy(bedrockClient, {
|
|
900
|
+
get(target, prop, receiver) {
|
|
901
|
+
const value = Reflect.get(target, prop, receiver);
|
|
902
|
+
if (prop === "send" && typeof value === "function") {
|
|
903
|
+
return wrapSend(value.bind(target));
|
|
904
|
+
}
|
|
905
|
+
return value;
|
|
906
|
+
}
|
|
907
|
+
});
|
|
908
|
+
}
|
|
909
|
+
function wrapSend(originalSend) {
|
|
910
|
+
return async function tracedSend(command) {
|
|
911
|
+
const commandName = command.constructor?.name || "";
|
|
912
|
+
switch (commandName) {
|
|
913
|
+
case "ConverseCommand":
|
|
914
|
+
return handleConverse(originalSend, command);
|
|
915
|
+
case "ConverseStreamCommand":
|
|
916
|
+
return handleConverseStream(originalSend, command);
|
|
917
|
+
case "InvokeModelCommand":
|
|
918
|
+
return handleInvokeModel(originalSend, command);
|
|
919
|
+
case "InvokeModelWithResponseStreamCommand":
|
|
920
|
+
return handleInvokeModelStream(originalSend, command);
|
|
921
|
+
default:
|
|
922
|
+
return originalSend(command);
|
|
923
|
+
}
|
|
924
|
+
};
|
|
925
|
+
}
|
|
926
|
+
async function handleConverse(send, command) {
|
|
927
|
+
const startTime = Date.now();
|
|
928
|
+
const input = command.input;
|
|
929
|
+
try {
|
|
930
|
+
const response = await send(command);
|
|
931
|
+
const durationMs = Date.now() - startTime;
|
|
932
|
+
const extracted = extractConverseOutput(response);
|
|
933
|
+
captureTrace({
|
|
934
|
+
provider: PROVIDER_NAME3,
|
|
935
|
+
model: input.modelId || "unknown",
|
|
936
|
+
input: { system: input.system, messages: input.messages },
|
|
937
|
+
output: extracted.output,
|
|
938
|
+
inputTokens: extracted.inputTokens,
|
|
939
|
+
outputTokens: extracted.outputTokens,
|
|
940
|
+
durationMs,
|
|
941
|
+
status: "success",
|
|
942
|
+
streaming: false,
|
|
943
|
+
metadata: {
|
|
944
|
+
stopReason: response.stopReason,
|
|
945
|
+
hasToolUse: extracted.hasToolUse,
|
|
946
|
+
cacheReadTokens: extracted.cacheReadTokens,
|
|
947
|
+
cacheWriteTokens: extracted.cacheWriteTokens,
|
|
948
|
+
latencyMs: response.metrics?.latencyMs
|
|
949
|
+
}
|
|
950
|
+
});
|
|
951
|
+
return response;
|
|
952
|
+
} catch (error) {
|
|
953
|
+
captureError({
|
|
954
|
+
provider: PROVIDER_NAME3,
|
|
955
|
+
model: input.modelId || "unknown",
|
|
956
|
+
input: { system: input.system, messages: input.messages },
|
|
957
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
958
|
+
durationMs: Date.now() - startTime,
|
|
959
|
+
streaming: false
|
|
960
|
+
});
|
|
961
|
+
throw error;
|
|
962
|
+
}
|
|
963
|
+
}
|
|
964
|
+
async function handleConverseStream(send, command) {
|
|
965
|
+
const startTime = Date.now();
|
|
966
|
+
const input = command.input;
|
|
967
|
+
try {
|
|
968
|
+
const response = await send(command);
|
|
969
|
+
if (response.stream) {
|
|
970
|
+
return {
|
|
971
|
+
...response,
|
|
972
|
+
stream: wrapConverseStream(response.stream, input, startTime)
|
|
973
|
+
};
|
|
974
|
+
}
|
|
975
|
+
return response;
|
|
976
|
+
} catch (error) {
|
|
977
|
+
captureError({
|
|
978
|
+
provider: PROVIDER_NAME3,
|
|
979
|
+
model: input.modelId || "unknown",
|
|
980
|
+
input: { system: input.system, messages: input.messages },
|
|
981
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
982
|
+
durationMs: Date.now() - startTime,
|
|
983
|
+
streaming: true
|
|
984
|
+
});
|
|
985
|
+
throw error;
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
async function* wrapConverseStream(stream, input, startTime) {
|
|
989
|
+
const chunks = [];
|
|
990
|
+
let inputTokens = 0;
|
|
991
|
+
let outputTokens = 0;
|
|
992
|
+
let error = null;
|
|
993
|
+
try {
|
|
994
|
+
for await (const event of stream) {
|
|
995
|
+
if (event.contentBlockDelta?.delta?.text) {
|
|
996
|
+
chunks.push(event.contentBlockDelta.delta.text);
|
|
997
|
+
}
|
|
998
|
+
if (event.metadata?.usage) {
|
|
999
|
+
inputTokens = event.metadata.usage.inputTokens || 0;
|
|
1000
|
+
outputTokens = event.metadata.usage.outputTokens || 0;
|
|
1001
|
+
}
|
|
1002
|
+
yield event;
|
|
1003
|
+
}
|
|
1004
|
+
} catch (err) {
|
|
1005
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1006
|
+
throw err;
|
|
1007
|
+
} finally {
|
|
1008
|
+
const durationMs = Date.now() - startTime;
|
|
1009
|
+
if (error) {
|
|
1010
|
+
captureError({
|
|
1011
|
+
provider: PROVIDER_NAME3,
|
|
1012
|
+
model: input.modelId || "unknown",
|
|
1013
|
+
input: { system: input.system, messages: input.messages },
|
|
1014
|
+
error,
|
|
1015
|
+
durationMs,
|
|
1016
|
+
streaming: true
|
|
1017
|
+
});
|
|
1018
|
+
} else {
|
|
1019
|
+
captureTrace({
|
|
1020
|
+
provider: PROVIDER_NAME3,
|
|
1021
|
+
model: input.modelId || "unknown",
|
|
1022
|
+
input: { system: input.system, messages: input.messages },
|
|
1023
|
+
output: chunks.join(""),
|
|
1024
|
+
inputTokens,
|
|
1025
|
+
outputTokens,
|
|
1026
|
+
durationMs,
|
|
1027
|
+
status: "success",
|
|
1028
|
+
streaming: true
|
|
1029
|
+
});
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
async function handleInvokeModel(send, command) {
|
|
1034
|
+
const startTime = Date.now();
|
|
1035
|
+
const input = command.input;
|
|
1036
|
+
try {
|
|
1037
|
+
const response = await send(command);
|
|
1038
|
+
const durationMs = Date.now() - startTime;
|
|
1039
|
+
const parsed = parseInvokeModelBody(response.body);
|
|
1040
|
+
captureTrace({
|
|
1041
|
+
provider: PROVIDER_NAME3,
|
|
1042
|
+
model: input.modelId || "unknown",
|
|
1043
|
+
input: parseRequestBody(input.body),
|
|
1044
|
+
output: parsed.output,
|
|
1045
|
+
inputTokens: parsed.inputTokens,
|
|
1046
|
+
outputTokens: parsed.outputTokens,
|
|
1047
|
+
durationMs,
|
|
1048
|
+
status: "success",
|
|
1049
|
+
streaming: false
|
|
1050
|
+
});
|
|
1051
|
+
return response;
|
|
1052
|
+
} catch (error) {
|
|
1053
|
+
captureError({
|
|
1054
|
+
provider: PROVIDER_NAME3,
|
|
1055
|
+
model: input.modelId || "unknown",
|
|
1056
|
+
input: parseRequestBody(input.body),
|
|
1057
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1058
|
+
durationMs: Date.now() - startTime,
|
|
1059
|
+
streaming: false
|
|
1060
|
+
});
|
|
1061
|
+
throw error;
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
1064
|
+
async function handleInvokeModelStream(send, command) {
|
|
1065
|
+
const startTime = Date.now();
|
|
1066
|
+
const input = command.input;
|
|
1067
|
+
try {
|
|
1068
|
+
const response = await send(command);
|
|
1069
|
+
if (response.body) {
|
|
1070
|
+
return {
|
|
1071
|
+
...response,
|
|
1072
|
+
body: wrapInvokeModelStream(response.body, input, startTime)
|
|
1073
|
+
};
|
|
1074
|
+
}
|
|
1075
|
+
return response;
|
|
1076
|
+
} catch (error) {
|
|
1077
|
+
captureError({
|
|
1078
|
+
provider: PROVIDER_NAME3,
|
|
1079
|
+
model: input.modelId || "unknown",
|
|
1080
|
+
input: parseRequestBody(input.body),
|
|
1081
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1082
|
+
durationMs: Date.now() - startTime,
|
|
1083
|
+
streaming: true
|
|
1084
|
+
});
|
|
1085
|
+
throw error;
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
async function* wrapInvokeModelStream(stream, input, startTime) {
|
|
1089
|
+
const chunks = [];
|
|
1090
|
+
let inputTokens = 0;
|
|
1091
|
+
let outputTokens = 0;
|
|
1092
|
+
let error = null;
|
|
1093
|
+
try {
|
|
1094
|
+
for await (const event of stream) {
|
|
1095
|
+
if (event.chunk?.bytes) {
|
|
1096
|
+
const parsed = tryParseStreamChunk(event.chunk.bytes);
|
|
1097
|
+
if (parsed) {
|
|
1098
|
+
if (parsed.text) chunks.push(parsed.text);
|
|
1099
|
+
if (parsed.inputTokens) inputTokens = parsed.inputTokens;
|
|
1100
|
+
if (parsed.outputTokens) outputTokens = parsed.outputTokens;
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
yield event;
|
|
1104
|
+
}
|
|
1105
|
+
} catch (err) {
|
|
1106
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1107
|
+
throw err;
|
|
1108
|
+
} finally {
|
|
1109
|
+
const durationMs = Date.now() - startTime;
|
|
1110
|
+
if (error) {
|
|
1111
|
+
captureError({
|
|
1112
|
+
provider: PROVIDER_NAME3,
|
|
1113
|
+
model: input.modelId || "unknown",
|
|
1114
|
+
input: parseRequestBody(input.body),
|
|
1115
|
+
error,
|
|
1116
|
+
durationMs,
|
|
1117
|
+
streaming: true
|
|
1118
|
+
});
|
|
1119
|
+
} else {
|
|
1120
|
+
captureTrace({
|
|
1121
|
+
provider: PROVIDER_NAME3,
|
|
1122
|
+
model: input.modelId || "unknown",
|
|
1123
|
+
input: parseRequestBody(input.body),
|
|
1124
|
+
output: chunks.join(""),
|
|
1125
|
+
inputTokens,
|
|
1126
|
+
outputTokens,
|
|
1127
|
+
durationMs,
|
|
1128
|
+
status: "success",
|
|
1129
|
+
streaming: true
|
|
1130
|
+
});
|
|
1131
|
+
}
|
|
1132
|
+
}
|
|
1133
|
+
}
|
|
1134
|
+
function extractConverseOutput(response) {
|
|
1135
|
+
const content = response.output?.message?.content;
|
|
1136
|
+
const hasToolUse = Array.isArray(content) && content.some((c) => c.toolUse);
|
|
1137
|
+
const output = safeExtract(() => {
|
|
1138
|
+
if (!Array.isArray(content)) return null;
|
|
1139
|
+
if (hasToolUse) {
|
|
1140
|
+
return content;
|
|
1141
|
+
}
|
|
1142
|
+
return content.map((c) => c.text || "").join("");
|
|
1143
|
+
}, null);
|
|
1144
|
+
const usage = response.usage || {};
|
|
1145
|
+
return {
|
|
1146
|
+
output,
|
|
1147
|
+
inputTokens: isValidNumber(usage.inputTokens) ? usage.inputTokens : 0,
|
|
1148
|
+
outputTokens: isValidNumber(usage.outputTokens) ? usage.outputTokens : 0,
|
|
1149
|
+
cacheReadTokens: isValidNumber(usage.cacheReadInputTokens) ? usage.cacheReadInputTokens : 0,
|
|
1150
|
+
cacheWriteTokens: isValidNumber(usage.cacheWriteInputTokens) ? usage.cacheWriteInputTokens : 0,
|
|
1151
|
+
hasToolUse
|
|
1152
|
+
};
|
|
1153
|
+
}
|
|
1154
|
+
function parseInvokeModelBody(body) {
|
|
1155
|
+
try {
|
|
1156
|
+
const text = new TextDecoder().decode(body);
|
|
1157
|
+
const parsed = JSON.parse(text);
|
|
1158
|
+
if (parsed.content && Array.isArray(parsed.content)) {
|
|
1159
|
+
const output = parsed.content.filter((c) => c.type === "text").map((c) => c.text || "").join("");
|
|
1160
|
+
return {
|
|
1161
|
+
output,
|
|
1162
|
+
inputTokens: parsed.usage?.input_tokens || 0,
|
|
1163
|
+
outputTokens: parsed.usage?.output_tokens || 0
|
|
1164
|
+
};
|
|
1165
|
+
}
|
|
1166
|
+
if (parsed.results) {
|
|
1167
|
+
return {
|
|
1168
|
+
output: parsed.results[0]?.outputText || parsed.results,
|
|
1169
|
+
inputTokens: parsed.inputTextTokenCount || 0,
|
|
1170
|
+
outputTokens: parsed.results[0]?.tokenCount || 0
|
|
1171
|
+
};
|
|
1172
|
+
}
|
|
1173
|
+
if (parsed.generation) {
|
|
1174
|
+
return {
|
|
1175
|
+
output: parsed.generation,
|
|
1176
|
+
inputTokens: parsed.prompt_token_count || 0,
|
|
1177
|
+
outputTokens: parsed.generation_token_count || 0
|
|
1178
|
+
};
|
|
1179
|
+
}
|
|
1180
|
+
return { output: parsed, inputTokens: 0, outputTokens: 0 };
|
|
1181
|
+
} catch {
|
|
1182
|
+
return { output: null, inputTokens: 0, outputTokens: 0 };
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
function parseRequestBody(body) {
|
|
1186
|
+
try {
|
|
1187
|
+
const text = typeof body === "string" ? body : new TextDecoder().decode(body);
|
|
1188
|
+
return JSON.parse(text);
|
|
1189
|
+
} catch {
|
|
1190
|
+
return body;
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
function tryParseStreamChunk(bytes) {
|
|
1194
|
+
try {
|
|
1195
|
+
const text = new TextDecoder().decode(bytes);
|
|
1196
|
+
const parsed = JSON.parse(text);
|
|
1197
|
+
if (parsed.type === "content_block_delta" && parsed.delta?.text) {
|
|
1198
|
+
return { text: parsed.delta.text };
|
|
1199
|
+
}
|
|
1200
|
+
if (parsed.type === "message_delta" && parsed.usage) {
|
|
1201
|
+
return { outputTokens: parsed.usage.output_tokens };
|
|
1202
|
+
}
|
|
1203
|
+
if (parsed.type === "message_start" && parsed.message?.usage) {
|
|
1204
|
+
return { inputTokens: parsed.message.usage.input_tokens };
|
|
1205
|
+
}
|
|
1206
|
+
return null;
|
|
1207
|
+
} catch {
|
|
1208
|
+
return null;
|
|
1209
|
+
}
|
|
1210
|
+
}
|
|
1211
|
+
|
|
1212
|
+
// src/providers/gemini.ts
|
|
1213
|
+
var PROVIDER_NAME4 = "gemini";
|
|
1214
|
+
function canHandle4(client) {
|
|
1215
|
+
if (!client || typeof client !== "object") return false;
|
|
1216
|
+
const constructorName = client.constructor?.name;
|
|
1217
|
+
if (constructorName === "GoogleGenerativeAI") return true;
|
|
1218
|
+
if (constructorName === "GoogleGenAI") return true;
|
|
1219
|
+
const c = client;
|
|
1220
|
+
if (typeof c.getGenerativeModel === "function") return true;
|
|
1221
|
+
if (c.models && typeof c.models.generate === "function") {
|
|
1222
|
+
return true;
|
|
1223
|
+
}
|
|
1224
|
+
return false;
|
|
1225
|
+
}
|
|
1226
|
+
function wrap2(client) {
|
|
1227
|
+
const geminiClient = client;
|
|
1228
|
+
return new Proxy(geminiClient, {
|
|
1229
|
+
get(target, prop, receiver) {
|
|
1230
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1231
|
+
if (prop === "getGenerativeModel" && typeof value === "function") {
|
|
1232
|
+
return wrapGetGenerativeModel(value.bind(target));
|
|
1233
|
+
}
|
|
1234
|
+
return value;
|
|
1235
|
+
}
|
|
1236
|
+
});
|
|
1237
|
+
}
|
|
1238
|
+
function wrapGetGenerativeModel(originalFn) {
|
|
1239
|
+
return function wrappedGetGenerativeModel(config) {
|
|
1240
|
+
const model = originalFn(config);
|
|
1241
|
+
return wrapGenerativeModel(model, config.model);
|
|
1242
|
+
};
|
|
1243
|
+
}
|
|
1244
|
+
function wrapGenerativeModel(model, modelName) {
|
|
1245
|
+
return new Proxy(model, {
|
|
1246
|
+
get(target, prop, receiver) {
|
|
1247
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1248
|
+
if (prop === "generateContent" && typeof value === "function") {
|
|
1249
|
+
return wrapGenerateContent(value.bind(target), modelName);
|
|
1250
|
+
}
|
|
1251
|
+
if (prop === "generateContentStream" && typeof value === "function") {
|
|
1252
|
+
return wrapGenerateContentStream(value.bind(target), modelName);
|
|
1253
|
+
}
|
|
1254
|
+
if (prop === "startChat" && typeof value === "function") {
|
|
1255
|
+
return wrapStartChat(value.bind(target), modelName);
|
|
1256
|
+
}
|
|
1257
|
+
return value;
|
|
1258
|
+
}
|
|
1259
|
+
});
|
|
1260
|
+
}
|
|
1261
|
+
function wrapGenerateContent(originalFn, modelName) {
|
|
1262
|
+
return async function wrappedGenerateContent(request) {
|
|
1263
|
+
const startTime = Date.now();
|
|
1264
|
+
const input = extractInput(request);
|
|
1265
|
+
try {
|
|
1266
|
+
const result = await originalFn(request);
|
|
1267
|
+
const durationMs = Date.now() - startTime;
|
|
1268
|
+
const extracted = extractGenerateContentResult(result);
|
|
1269
|
+
captureTrace({
|
|
1270
|
+
provider: PROVIDER_NAME4,
|
|
1271
|
+
model: modelName,
|
|
1272
|
+
input,
|
|
1273
|
+
output: extracted.output,
|
|
1274
|
+
inputTokens: extracted.inputTokens,
|
|
1275
|
+
outputTokens: extracted.outputTokens,
|
|
1276
|
+
durationMs,
|
|
1277
|
+
status: "success",
|
|
1278
|
+
streaming: false,
|
|
1279
|
+
metadata: extracted.metadata
|
|
1280
|
+
});
|
|
1281
|
+
return result;
|
|
1282
|
+
} catch (error) {
|
|
1283
|
+
captureError({
|
|
1284
|
+
provider: PROVIDER_NAME4,
|
|
1285
|
+
model: modelName,
|
|
1286
|
+
input,
|
|
1287
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1288
|
+
durationMs: Date.now() - startTime,
|
|
1289
|
+
streaming: false
|
|
1290
|
+
});
|
|
1291
|
+
throw error;
|
|
1292
|
+
}
|
|
1293
|
+
};
|
|
1294
|
+
}
|
|
1295
|
+
function wrapGenerateContentStream(originalFn, modelName) {
|
|
1296
|
+
return async function wrappedGenerateContentStream(request) {
|
|
1297
|
+
const startTime = Date.now();
|
|
1298
|
+
const input = extractInput(request);
|
|
1299
|
+
try {
|
|
1300
|
+
const result = await originalFn(request);
|
|
1301
|
+
const wrappedStream = wrapStream3(result.stream, modelName, input, startTime);
|
|
1302
|
+
return {
|
|
1303
|
+
...result,
|
|
1304
|
+
stream: wrappedStream
|
|
1305
|
+
};
|
|
1306
|
+
} catch (error) {
|
|
1307
|
+
captureError({
|
|
1308
|
+
provider: PROVIDER_NAME4,
|
|
1309
|
+
model: modelName,
|
|
1310
|
+
input,
|
|
1311
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1312
|
+
durationMs: Date.now() - startTime,
|
|
1313
|
+
streaming: true
|
|
1314
|
+
});
|
|
1315
|
+
throw error;
|
|
1316
|
+
}
|
|
1317
|
+
};
|
|
1318
|
+
}
|
|
1319
|
+
async function* wrapStream3(stream, modelName, input, startTime) {
|
|
1320
|
+
const chunks = [];
|
|
1321
|
+
let inputTokens = 0;
|
|
1322
|
+
let outputTokens = 0;
|
|
1323
|
+
let cachedTokens = 0;
|
|
1324
|
+
let thoughtsTokens = 0;
|
|
1325
|
+
let error = null;
|
|
1326
|
+
try {
|
|
1327
|
+
for await (const chunk of stream) {
|
|
1328
|
+
try {
|
|
1329
|
+
const text = chunk.text();
|
|
1330
|
+
if (text) {
|
|
1331
|
+
chunks.push(text);
|
|
1332
|
+
}
|
|
1333
|
+
} catch {
|
|
1334
|
+
}
|
|
1335
|
+
if (chunk.usageMetadata) {
|
|
1336
|
+
inputTokens = chunk.usageMetadata.promptTokenCount || 0;
|
|
1337
|
+
outputTokens = chunk.usageMetadata.candidatesTokenCount || 0;
|
|
1338
|
+
cachedTokens = chunk.usageMetadata.cachedContentTokenCount || 0;
|
|
1339
|
+
thoughtsTokens = chunk.usageMetadata.thoughtsTokenCount || 0;
|
|
1340
|
+
}
|
|
1341
|
+
yield chunk;
|
|
1342
|
+
}
|
|
1343
|
+
} catch (err) {
|
|
1344
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1345
|
+
throw err;
|
|
1346
|
+
} finally {
|
|
1347
|
+
const durationMs = Date.now() - startTime;
|
|
1348
|
+
if (error) {
|
|
1349
|
+
captureError({
|
|
1350
|
+
provider: PROVIDER_NAME4,
|
|
1351
|
+
model: modelName,
|
|
1352
|
+
input,
|
|
1353
|
+
error,
|
|
1354
|
+
durationMs,
|
|
1355
|
+
streaming: true
|
|
1356
|
+
});
|
|
1357
|
+
} else {
|
|
1358
|
+
captureTrace({
|
|
1359
|
+
provider: PROVIDER_NAME4,
|
|
1360
|
+
model: modelName,
|
|
1361
|
+
input,
|
|
1362
|
+
output: chunks.join(""),
|
|
1363
|
+
inputTokens,
|
|
1364
|
+
outputTokens,
|
|
1365
|
+
durationMs,
|
|
1366
|
+
status: "success",
|
|
1367
|
+
streaming: true,
|
|
1368
|
+
metadata: {
|
|
1369
|
+
cachedTokens: cachedTokens > 0 ? cachedTokens : void 0,
|
|
1370
|
+
thoughtsTokens: thoughtsTokens > 0 ? thoughtsTokens : void 0
|
|
1371
|
+
}
|
|
1372
|
+
});
|
|
1373
|
+
}
|
|
1374
|
+
}
|
|
1375
|
+
}
|
|
1376
|
+
function wrapStartChat(originalFn, modelName) {
|
|
1377
|
+
return function wrappedStartChat(config) {
|
|
1378
|
+
const chat = originalFn(config);
|
|
1379
|
+
return wrapChatSession(chat, modelName);
|
|
1380
|
+
};
|
|
1381
|
+
}
|
|
1382
|
+
function wrapChatSession(chat, modelName) {
|
|
1383
|
+
return new Proxy(chat, {
|
|
1384
|
+
get(target, prop, receiver) {
|
|
1385
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1386
|
+
if (prop === "sendMessage" && typeof value === "function") {
|
|
1387
|
+
return wrapSendMessage(value.bind(target), modelName);
|
|
1388
|
+
}
|
|
1389
|
+
if (prop === "sendMessageStream" && typeof value === "function") {
|
|
1390
|
+
return wrapSendMessageStream(value.bind(target), modelName);
|
|
1391
|
+
}
|
|
1392
|
+
return value;
|
|
1393
|
+
}
|
|
1394
|
+
});
|
|
1395
|
+
}
|
|
1396
|
+
function wrapSendMessage(originalFn, modelName) {
|
|
1397
|
+
return async function wrappedSendMessage(request) {
|
|
1398
|
+
const startTime = Date.now();
|
|
1399
|
+
const input = typeof request === "string" ? request : request;
|
|
1400
|
+
try {
|
|
1401
|
+
const result = await originalFn(request);
|
|
1402
|
+
const durationMs = Date.now() - startTime;
|
|
1403
|
+
const extracted = extractGenerateContentResult(result);
|
|
1404
|
+
captureTrace({
|
|
1405
|
+
provider: PROVIDER_NAME4,
|
|
1406
|
+
model: modelName,
|
|
1407
|
+
input,
|
|
1408
|
+
output: extracted.output,
|
|
1409
|
+
inputTokens: extracted.inputTokens,
|
|
1410
|
+
outputTokens: extracted.outputTokens,
|
|
1411
|
+
durationMs,
|
|
1412
|
+
status: "success",
|
|
1413
|
+
streaming: false,
|
|
1414
|
+
metadata: extracted.metadata
|
|
1415
|
+
});
|
|
1416
|
+
return result;
|
|
1417
|
+
} catch (error) {
|
|
1418
|
+
captureError({
|
|
1419
|
+
provider: PROVIDER_NAME4,
|
|
1420
|
+
model: modelName,
|
|
1421
|
+
input,
|
|
1422
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1423
|
+
durationMs: Date.now() - startTime,
|
|
1424
|
+
streaming: false
|
|
1425
|
+
});
|
|
1426
|
+
throw error;
|
|
1427
|
+
}
|
|
1428
|
+
};
|
|
1429
|
+
}
|
|
1430
|
+
function wrapSendMessageStream(originalFn, modelName) {
|
|
1431
|
+
return async function wrappedSendMessageStream(request) {
|
|
1432
|
+
const startTime = Date.now();
|
|
1433
|
+
const input = typeof request === "string" ? request : request;
|
|
1434
|
+
try {
|
|
1435
|
+
const result = await originalFn(request);
|
|
1436
|
+
const wrappedStream = wrapStream3(result.stream, modelName, input, startTime);
|
|
1437
|
+
return {
|
|
1438
|
+
...result,
|
|
1439
|
+
stream: wrappedStream
|
|
1440
|
+
};
|
|
1441
|
+
} catch (error) {
|
|
1442
|
+
captureError({
|
|
1443
|
+
provider: PROVIDER_NAME4,
|
|
1444
|
+
model: modelName,
|
|
1445
|
+
input,
|
|
1446
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1447
|
+
durationMs: Date.now() - startTime,
|
|
1448
|
+
streaming: true
|
|
1449
|
+
});
|
|
1450
|
+
throw error;
|
|
1451
|
+
}
|
|
1452
|
+
};
|
|
1453
|
+
}
|
|
1454
|
+
function extractInput(request) {
|
|
1455
|
+
if (typeof request === "string") {
|
|
1456
|
+
return request;
|
|
1457
|
+
}
|
|
1458
|
+
if (request.contents) {
|
|
1459
|
+
return request.contents;
|
|
1460
|
+
}
|
|
1461
|
+
return request;
|
|
1462
|
+
}
|
|
1463
|
+
function extractGenerateContentResult(result) {
|
|
1464
|
+
const response = result.response;
|
|
1465
|
+
let output = null;
|
|
1466
|
+
try {
|
|
1467
|
+
output = response.text();
|
|
1468
|
+
} catch {
|
|
1469
|
+
output = safeExtract(() => {
|
|
1470
|
+
const content = response.candidates?.[0]?.content;
|
|
1471
|
+
if (content?.parts) {
|
|
1472
|
+
return content.parts;
|
|
1473
|
+
}
|
|
1474
|
+
return null;
|
|
1475
|
+
}, null);
|
|
1476
|
+
}
|
|
1477
|
+
const usage = response.usageMetadata;
|
|
1478
|
+
const inputTokens = isValidNumber(usage?.promptTokenCount) ? usage.promptTokenCount : 0;
|
|
1479
|
+
const outputTokens = isValidNumber(usage?.candidatesTokenCount) ? usage.candidatesTokenCount : 0;
|
|
1480
|
+
const metadata = {};
|
|
1481
|
+
if (usage?.cachedContentTokenCount && usage.cachedContentTokenCount > 0) {
|
|
1482
|
+
metadata.cachedTokens = usage.cachedContentTokenCount;
|
|
1483
|
+
}
|
|
1484
|
+
if (usage?.thoughtsTokenCount && usage.thoughtsTokenCount > 0) {
|
|
1485
|
+
metadata.thoughtsTokens = usage.thoughtsTokenCount;
|
|
1486
|
+
}
|
|
1487
|
+
const finishReason = response.candidates?.[0]?.finishReason;
|
|
1488
|
+
if (finishReason) {
|
|
1489
|
+
metadata.finishReason = finishReason;
|
|
1490
|
+
}
|
|
1491
|
+
return {
|
|
1492
|
+
output,
|
|
1493
|
+
inputTokens,
|
|
1494
|
+
outputTokens,
|
|
1495
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
1496
|
+
};
|
|
1497
|
+
}
|
|
1498
|
+
|
|
1499
|
+
// src/providers/openrouter.ts
|
|
1500
|
+
var OPENROUTER_BASE_URL = "openrouter.ai";
|
|
1501
|
+
var PROVIDER_NAME5 = "openrouter";
|
|
1502
|
+
function canHandle5(client) {
|
|
1503
|
+
if (!client || typeof client !== "object") return false;
|
|
1504
|
+
const c = client;
|
|
1505
|
+
if (!c.chat?.completions?.create) return false;
|
|
1506
|
+
const baseURL = c.baseURL || "";
|
|
1507
|
+
return baseURL.includes(OPENROUTER_BASE_URL);
|
|
1508
|
+
}
|
|
1509
|
+
function wrap3(client) {
|
|
1510
|
+
const openrouterClient = client;
|
|
1511
|
+
return new Proxy(openrouterClient, {
|
|
1512
|
+
get(target, prop, receiver) {
|
|
1513
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1514
|
+
if (prop === "chat" && value && typeof value === "object") {
|
|
1515
|
+
return wrapChatNamespace(value);
|
|
1516
|
+
}
|
|
1517
|
+
return value;
|
|
1518
|
+
}
|
|
1519
|
+
});
|
|
1520
|
+
}
|
|
1521
|
+
function wrapChatNamespace(chat) {
|
|
1522
|
+
if (!chat) return chat;
|
|
1523
|
+
return new Proxy(chat, {
|
|
1524
|
+
get(target, prop, receiver) {
|
|
1525
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1526
|
+
if (prop === "completions" && value && typeof value === "object") {
|
|
1527
|
+
return wrapChatCompletions(value);
|
|
1528
|
+
}
|
|
1529
|
+
return value;
|
|
1530
|
+
}
|
|
1531
|
+
});
|
|
1532
|
+
}
|
|
1533
|
+
function wrapChatCompletions(completions) {
|
|
1534
|
+
return new Proxy(completions, {
|
|
1535
|
+
get(target, prop, receiver) {
|
|
1536
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1537
|
+
if (prop === "create" && typeof value === "function") {
|
|
1538
|
+
return wrapChatCreate2(value.bind(target));
|
|
1539
|
+
}
|
|
1540
|
+
return value;
|
|
1541
|
+
}
|
|
1542
|
+
});
|
|
1543
|
+
}
|
|
1544
|
+
function wrapChatCreate2(originalFn) {
|
|
1545
|
+
return async function wrappedChatCreate(...args) {
|
|
1546
|
+
const startTime = Date.now();
|
|
1547
|
+
const request = args[0] || {};
|
|
1548
|
+
const isStreaming = request.stream === true;
|
|
1549
|
+
try {
|
|
1550
|
+
const response = await originalFn(...args);
|
|
1551
|
+
if (isStreaming && isAsyncIterable3(response)) {
|
|
1552
|
+
return wrapStream4(response, request, startTime);
|
|
1553
|
+
}
|
|
1554
|
+
const durationMs = Date.now() - startTime;
|
|
1555
|
+
const extracted = extractChatCompletion2(response);
|
|
1556
|
+
captureTrace({
|
|
1557
|
+
provider: PROVIDER_NAME5,
|
|
1558
|
+
model: request.model || extracted.model || "unknown",
|
|
1559
|
+
input: request.messages,
|
|
1560
|
+
output: extracted.output,
|
|
1561
|
+
inputTokens: extracted.tokens?.inputTokens || 0,
|
|
1562
|
+
outputTokens: extracted.tokens?.outputTokens || 0,
|
|
1563
|
+
durationMs,
|
|
1564
|
+
status: "success",
|
|
1565
|
+
streaming: false,
|
|
1566
|
+
metadata: extracted.metadata
|
|
1567
|
+
});
|
|
1568
|
+
return response;
|
|
1569
|
+
} catch (error) {
|
|
1570
|
+
const durationMs = Date.now() - startTime;
|
|
1571
|
+
captureError({
|
|
1572
|
+
provider: PROVIDER_NAME5,
|
|
1573
|
+
model: request.model || "unknown",
|
|
1574
|
+
input: request.messages,
|
|
1575
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
1576
|
+
durationMs,
|
|
1577
|
+
streaming: isStreaming
|
|
1578
|
+
});
|
|
1579
|
+
throw error;
|
|
1580
|
+
}
|
|
1581
|
+
};
|
|
1582
|
+
}
|
|
1583
|
+
function isAsyncIterable3(value) {
|
|
1584
|
+
return value != null && typeof value[Symbol.asyncIterator] === "function";
|
|
1585
|
+
}
|
|
1586
|
+
async function* wrapStream4(stream, request, startTime) {
|
|
1587
|
+
const chunks = [];
|
|
1588
|
+
let tokens = null;
|
|
1589
|
+
let generationId = null;
|
|
1590
|
+
let error = null;
|
|
1591
|
+
try {
|
|
1592
|
+
for await (const chunk of stream) {
|
|
1593
|
+
const c = chunk;
|
|
1594
|
+
if (!generationId && c.id) {
|
|
1595
|
+
generationId = c.id;
|
|
1596
|
+
}
|
|
1597
|
+
const content = c.choices?.[0]?.delta?.content;
|
|
1598
|
+
if (content) {
|
|
1599
|
+
chunks.push(content);
|
|
1600
|
+
}
|
|
1601
|
+
const chunkTokens = extractStreamChunkTokens2(c);
|
|
1602
|
+
if (chunkTokens) {
|
|
1603
|
+
tokens = chunkTokens;
|
|
1604
|
+
}
|
|
1605
|
+
yield chunk;
|
|
1606
|
+
}
|
|
1607
|
+
} catch (err) {
|
|
1608
|
+
error = err instanceof Error ? err : new Error(String(err));
|
|
1609
|
+
throw err;
|
|
1610
|
+
} finally {
|
|
1611
|
+
const durationMs = Date.now() - startTime;
|
|
1612
|
+
const output = chunks.join("");
|
|
1613
|
+
const metadata = {};
|
|
1614
|
+
if (generationId) {
|
|
1615
|
+
metadata.generationId = generationId;
|
|
1616
|
+
}
|
|
1617
|
+
if (error) {
|
|
1618
|
+
captureError({
|
|
1619
|
+
provider: PROVIDER_NAME5,
|
|
1620
|
+
model: request.model || "unknown",
|
|
1621
|
+
input: request.messages,
|
|
1622
|
+
error,
|
|
1623
|
+
durationMs,
|
|
1624
|
+
streaming: true
|
|
1625
|
+
});
|
|
1626
|
+
} else {
|
|
1627
|
+
captureTrace({
|
|
1628
|
+
provider: PROVIDER_NAME5,
|
|
1629
|
+
model: request.model || "unknown",
|
|
1630
|
+
input: request.messages,
|
|
1631
|
+
output,
|
|
1632
|
+
inputTokens: tokens?.inputTokens || 0,
|
|
1633
|
+
outputTokens: tokens?.outputTokens || 0,
|
|
1634
|
+
durationMs,
|
|
1635
|
+
status: "success",
|
|
1636
|
+
streaming: true,
|
|
1637
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : void 0
|
|
1638
|
+
});
|
|
1639
|
+
}
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
function extractChatCompletion2(response) {
|
|
1643
|
+
const model = response.model || null;
|
|
1644
|
+
const output = response.choices?.[0]?.message?.content || null;
|
|
1645
|
+
const tokens = extractTokens3(response);
|
|
1646
|
+
const metadata = {};
|
|
1647
|
+
if (response.id) {
|
|
1648
|
+
metadata.generationId = response.id;
|
|
1649
|
+
}
|
|
1650
|
+
return {
|
|
1651
|
+
model,
|
|
1652
|
+
output,
|
|
1653
|
+
tokens,
|
|
1654
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : {}
|
|
1655
|
+
};
|
|
1656
|
+
}
|
|
1657
|
+
function extractTokens3(response) {
|
|
1658
|
+
const usage = response.usage;
|
|
1659
|
+
if (!usage) return null;
|
|
1660
|
+
const promptTokens = usage.prompt_tokens;
|
|
1661
|
+
const completionTokens = usage.completion_tokens;
|
|
1662
|
+
const totalTokens = usage.total_tokens;
|
|
1663
|
+
if (!isValidNumber(promptTokens) && !isValidNumber(completionTokens)) {
|
|
1664
|
+
return null;
|
|
1665
|
+
}
|
|
1666
|
+
return {
|
|
1667
|
+
inputTokens: isValidNumber(promptTokens) ? promptTokens : 0,
|
|
1668
|
+
outputTokens: isValidNumber(completionTokens) ? completionTokens : 0,
|
|
1669
|
+
totalTokens: isValidNumber(totalTokens) ? totalTokens : 0
|
|
1670
|
+
};
|
|
1671
|
+
}
|
|
1672
|
+
function extractStreamChunkTokens2(chunk) {
|
|
1673
|
+
const usage = chunk.usage;
|
|
1674
|
+
if (!usage) return null;
|
|
1675
|
+
return {
|
|
1676
|
+
inputTokens: isValidNumber(usage.prompt_tokens) ? usage.prompt_tokens : 0,
|
|
1677
|
+
outputTokens: isValidNumber(usage.completion_tokens) ? usage.completion_tokens : 0,
|
|
1678
|
+
totalTokens: 0
|
|
1679
|
+
};
|
|
1680
|
+
}
|
|
1681
|
+
|
|
768
1682
|
// src/observe.ts
|
|
769
1683
|
function observe(client, options) {
|
|
770
1684
|
if (options) {
|
|
@@ -774,6 +1688,12 @@ function observe(client, options) {
|
|
|
774
1688
|
if (config.disabled) {
|
|
775
1689
|
return client;
|
|
776
1690
|
}
|
|
1691
|
+
if (canHandle5(client)) {
|
|
1692
|
+
if (config.debug) {
|
|
1693
|
+
console.log("[Lelemon] Wrapping OpenRouter client");
|
|
1694
|
+
}
|
|
1695
|
+
return wrap3(client);
|
|
1696
|
+
}
|
|
777
1697
|
if (canHandle(client)) {
|
|
778
1698
|
if (config.debug) {
|
|
779
1699
|
console.log("[Lelemon] Wrapping OpenAI client");
|
|
@@ -786,8 +1706,20 @@ function observe(client, options) {
|
|
|
786
1706
|
}
|
|
787
1707
|
return wrapAnthropic(client);
|
|
788
1708
|
}
|
|
1709
|
+
if (canHandle3(client)) {
|
|
1710
|
+
if (config.debug) {
|
|
1711
|
+
console.log("[Lelemon] Wrapping Bedrock client");
|
|
1712
|
+
}
|
|
1713
|
+
return wrap(client);
|
|
1714
|
+
}
|
|
1715
|
+
if (canHandle4(client)) {
|
|
1716
|
+
if (config.debug) {
|
|
1717
|
+
console.log("[Lelemon] Wrapping Gemini client");
|
|
1718
|
+
}
|
|
1719
|
+
return wrap2(client);
|
|
1720
|
+
}
|
|
789
1721
|
console.warn(
|
|
790
|
-
"[Lelemon] Unknown client type. Tracing not enabled. Supported: OpenAI, Anthropic"
|
|
1722
|
+
"[Lelemon] Unknown client type. Tracing not enabled. Supported: OpenAI, OpenRouter, Anthropic, Bedrock, Gemini"
|
|
791
1723
|
);
|
|
792
1724
|
return client;
|
|
793
1725
|
}
|
|
@@ -799,6 +1731,9 @@ function wrapOpenAI(client) {
|
|
|
799
1731
|
if (prop === "chat" && value && typeof value === "object") {
|
|
800
1732
|
return wrapOpenAIChat(value);
|
|
801
1733
|
}
|
|
1734
|
+
if (prop === "responses" && value && typeof value === "object") {
|
|
1735
|
+
return wrapOpenAIResponses(value);
|
|
1736
|
+
}
|
|
802
1737
|
if (prop === "completions" && value && typeof value === "object") {
|
|
803
1738
|
return wrapOpenAICompletions(value);
|
|
804
1739
|
}
|
|
@@ -831,6 +1766,17 @@ function wrapOpenAIChatCompletions(completions) {
|
|
|
831
1766
|
}
|
|
832
1767
|
});
|
|
833
1768
|
}
|
|
1769
|
+
function wrapOpenAIResponses(responses) {
|
|
1770
|
+
return new Proxy(responses, {
|
|
1771
|
+
get(target, prop, receiver) {
|
|
1772
|
+
const value = Reflect.get(target, prop, receiver);
|
|
1773
|
+
if (prop === "create" && typeof value === "function") {
|
|
1774
|
+
return wrapResponsesCreate(value.bind(target));
|
|
1775
|
+
}
|
|
1776
|
+
return value;
|
|
1777
|
+
}
|
|
1778
|
+
});
|
|
1779
|
+
}
|
|
834
1780
|
function wrapOpenAICompletions(completions) {
|
|
835
1781
|
return new Proxy(completions, {
|
|
836
1782
|
get(target, prop, receiver) {
|