@librechat/agents 3.1.57 → 3.1.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/agents/AgentContext.cjs +326 -62
- package/dist/cjs/agents/AgentContext.cjs.map +1 -1
- package/dist/cjs/common/enum.cjs +13 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/events.cjs +7 -27
- package/dist/cjs/events.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +303 -222
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +4 -4
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
- package/dist/cjs/llm/bedrock/utils/message_inputs.cjs +6 -2
- package/dist/cjs/llm/bedrock/utils/message_inputs.cjs.map +1 -1
- package/dist/cjs/llm/init.cjs +60 -0
- package/dist/cjs/llm/init.cjs.map +1 -0
- package/dist/cjs/llm/invoke.cjs +90 -0
- package/dist/cjs/llm/invoke.cjs.map +1 -0
- package/dist/cjs/llm/openai/index.cjs +2 -0
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/request.cjs +41 -0
- package/dist/cjs/llm/request.cjs.map +1 -0
- package/dist/cjs/main.cjs +40 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/messages/cache.cjs +76 -89
- package/dist/cjs/messages/cache.cjs.map +1 -1
- package/dist/cjs/messages/contextPruning.cjs +156 -0
- package/dist/cjs/messages/contextPruning.cjs.map +1 -0
- package/dist/cjs/messages/contextPruningSettings.cjs +53 -0
- package/dist/cjs/messages/contextPruningSettings.cjs.map +1 -0
- package/dist/cjs/messages/core.cjs +23 -37
- package/dist/cjs/messages/core.cjs.map +1 -1
- package/dist/cjs/messages/format.cjs +156 -11
- package/dist/cjs/messages/format.cjs.map +1 -1
- package/dist/cjs/messages/prune.cjs +1161 -49
- package/dist/cjs/messages/prune.cjs.map +1 -1
- package/dist/cjs/messages/reducer.cjs +87 -0
- package/dist/cjs/messages/reducer.cjs.map +1 -0
- package/dist/cjs/run.cjs +81 -42
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +54 -7
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/summarization/index.cjs +75 -0
- package/dist/cjs/summarization/index.cjs.map +1 -0
- package/dist/cjs/summarization/node.cjs +663 -0
- package/dist/cjs/summarization/node.cjs.map +1 -0
- package/dist/cjs/tools/ToolNode.cjs +16 -8
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/handlers.cjs +2 -0
- package/dist/cjs/tools/handlers.cjs.map +1 -1
- package/dist/cjs/utils/errors.cjs +115 -0
- package/dist/cjs/utils/errors.cjs.map +1 -0
- package/dist/cjs/utils/events.cjs +17 -0
- package/dist/cjs/utils/events.cjs.map +1 -1
- package/dist/cjs/utils/handlers.cjs +16 -0
- package/dist/cjs/utils/handlers.cjs.map +1 -1
- package/dist/cjs/utils/llm.cjs +10 -0
- package/dist/cjs/utils/llm.cjs.map +1 -1
- package/dist/cjs/utils/tokens.cjs +247 -14
- package/dist/cjs/utils/tokens.cjs.map +1 -1
- package/dist/cjs/utils/truncation.cjs +107 -0
- package/dist/cjs/utils/truncation.cjs.map +1 -0
- package/dist/esm/agents/AgentContext.mjs +325 -61
- package/dist/esm/agents/AgentContext.mjs.map +1 -1
- package/dist/esm/common/enum.mjs +13 -0
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/events.mjs +8 -28
- package/dist/esm/events.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +307 -226
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +4 -4
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
- package/dist/esm/llm/bedrock/utils/message_inputs.mjs +6 -2
- package/dist/esm/llm/bedrock/utils/message_inputs.mjs.map +1 -1
- package/dist/esm/llm/init.mjs +58 -0
- package/dist/esm/llm/init.mjs.map +1 -0
- package/dist/esm/llm/invoke.mjs +87 -0
- package/dist/esm/llm/invoke.mjs.map +1 -0
- package/dist/esm/llm/openai/index.mjs +2 -0
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/request.mjs +38 -0
- package/dist/esm/llm/request.mjs.map +1 -0
- package/dist/esm/main.mjs +13 -3
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/messages/cache.mjs +76 -89
- package/dist/esm/messages/cache.mjs.map +1 -1
- package/dist/esm/messages/contextPruning.mjs +154 -0
- package/dist/esm/messages/contextPruning.mjs.map +1 -0
- package/dist/esm/messages/contextPruningSettings.mjs +50 -0
- package/dist/esm/messages/contextPruningSettings.mjs.map +1 -0
- package/dist/esm/messages/core.mjs +23 -37
- package/dist/esm/messages/core.mjs.map +1 -1
- package/dist/esm/messages/format.mjs +156 -11
- package/dist/esm/messages/format.mjs.map +1 -1
- package/dist/esm/messages/prune.mjs +1158 -52
- package/dist/esm/messages/prune.mjs.map +1 -1
- package/dist/esm/messages/reducer.mjs +83 -0
- package/dist/esm/messages/reducer.mjs.map +1 -0
- package/dist/esm/run.mjs +82 -43
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +54 -7
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/summarization/index.mjs +73 -0
- package/dist/esm/summarization/index.mjs.map +1 -0
- package/dist/esm/summarization/node.mjs +659 -0
- package/dist/esm/summarization/node.mjs.map +1 -0
- package/dist/esm/tools/ToolNode.mjs +16 -8
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/handlers.mjs +2 -0
- package/dist/esm/tools/handlers.mjs.map +1 -1
- package/dist/esm/utils/errors.mjs +111 -0
- package/dist/esm/utils/errors.mjs.map +1 -0
- package/dist/esm/utils/events.mjs +17 -1
- package/dist/esm/utils/events.mjs.map +1 -1
- package/dist/esm/utils/handlers.mjs +16 -0
- package/dist/esm/utils/handlers.mjs.map +1 -1
- package/dist/esm/utils/llm.mjs +10 -1
- package/dist/esm/utils/llm.mjs.map +1 -1
- package/dist/esm/utils/tokens.mjs +245 -15
- package/dist/esm/utils/tokens.mjs.map +1 -1
- package/dist/esm/utils/truncation.mjs +102 -0
- package/dist/esm/utils/truncation.mjs.map +1 -0
- package/dist/types/agents/AgentContext.d.ts +124 -6
- package/dist/types/common/enum.d.ts +14 -1
- package/dist/types/graphs/Graph.d.ts +22 -27
- package/dist/types/index.d.ts +5 -0
- package/dist/types/llm/init.d.ts +18 -0
- package/dist/types/llm/invoke.d.ts +48 -0
- package/dist/types/llm/request.d.ts +14 -0
- package/dist/types/messages/contextPruning.d.ts +42 -0
- package/dist/types/messages/contextPruningSettings.d.ts +44 -0
- package/dist/types/messages/core.d.ts +1 -1
- package/dist/types/messages/format.d.ts +17 -1
- package/dist/types/messages/index.d.ts +3 -0
- package/dist/types/messages/prune.d.ts +162 -1
- package/dist/types/messages/reducer.d.ts +18 -0
- package/dist/types/run.d.ts +12 -1
- package/dist/types/summarization/index.d.ts +20 -0
- package/dist/types/summarization/node.d.ts +29 -0
- package/dist/types/tools/ToolNode.d.ts +3 -1
- package/dist/types/types/graph.d.ts +44 -6
- package/dist/types/types/index.d.ts +1 -0
- package/dist/types/types/run.d.ts +30 -0
- package/dist/types/types/stream.d.ts +31 -4
- package/dist/types/types/summarize.d.ts +47 -0
- package/dist/types/types/tools.d.ts +7 -0
- package/dist/types/utils/errors.d.ts +28 -0
- package/dist/types/utils/events.d.ts +13 -0
- package/dist/types/utils/index.d.ts +2 -0
- package/dist/types/utils/llm.d.ts +4 -0
- package/dist/types/utils/tokens.d.ts +14 -1
- package/dist/types/utils/truncation.d.ts +49 -0
- package/package.json +1 -1
- package/src/agents/AgentContext.ts +388 -58
- package/src/agents/__tests__/AgentContext.test.ts +265 -5
- package/src/common/enum.ts +13 -0
- package/src/events.ts +9 -39
- package/src/graphs/Graph.ts +468 -331
- package/src/index.ts +7 -0
- package/src/llm/anthropic/llm.spec.ts +3 -3
- package/src/llm/anthropic/utils/message_inputs.ts +6 -4
- package/src/llm/bedrock/llm.spec.ts +1 -1
- package/src/llm/bedrock/utils/message_inputs.ts +6 -2
- package/src/llm/init.ts +63 -0
- package/src/llm/invoke.ts +144 -0
- package/src/llm/request.ts +55 -0
- package/src/messages/__tests__/observationMasking.test.ts +221 -0
- package/src/messages/cache.ts +77 -102
- package/src/messages/contextPruning.ts +191 -0
- package/src/messages/contextPruningSettings.ts +90 -0
- package/src/messages/core.ts +32 -53
- package/src/messages/ensureThinkingBlock.test.ts +39 -39
- package/src/messages/format.ts +227 -15
- package/src/messages/formatAgentMessages.test.ts +511 -1
- package/src/messages/index.ts +3 -0
- package/src/messages/prune.ts +1548 -62
- package/src/messages/reducer.ts +22 -0
- package/src/run.ts +104 -51
- package/src/scripts/bedrock-merge-test.ts +1 -1
- package/src/scripts/test-thinking-handoff-bedrock.ts +1 -1
- package/src/scripts/test-thinking-handoff.ts +1 -1
- package/src/scripts/thinking-bedrock.ts +1 -1
- package/src/scripts/thinking.ts +1 -1
- package/src/specs/anthropic.simple.test.ts +1 -1
- package/src/specs/multi-agent-summarization.test.ts +396 -0
- package/src/specs/prune.test.ts +1196 -23
- package/src/specs/summarization-unit.test.ts +868 -0
- package/src/specs/summarization.test.ts +3810 -0
- package/src/specs/summarize-prune.test.ts +376 -0
- package/src/specs/thinking-handoff.test.ts +10 -10
- package/src/specs/thinking-prune.test.ts +7 -4
- package/src/specs/token-accounting-e2e.test.ts +1034 -0
- package/src/specs/token-accounting-pipeline.test.ts +882 -0
- package/src/specs/token-distribution-edge-case.test.ts +25 -26
- package/src/splitStream.test.ts +42 -33
- package/src/stream.ts +64 -11
- package/src/summarization/__tests__/aggregator.test.ts +153 -0
- package/src/summarization/__tests__/node.test.ts +708 -0
- package/src/summarization/__tests__/trigger.test.ts +50 -0
- package/src/summarization/index.ts +102 -0
- package/src/summarization/node.ts +982 -0
- package/src/tools/ToolNode.ts +25 -3
- package/src/types/graph.ts +62 -7
- package/src/types/index.ts +1 -0
- package/src/types/run.ts +32 -0
- package/src/types/stream.ts +45 -5
- package/src/types/summarize.ts +58 -0
- package/src/types/tools.ts +7 -0
- package/src/utils/errors.ts +117 -0
- package/src/utils/events.ts +31 -0
- package/src/utils/handlers.ts +18 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/llm.ts +12 -0
- package/src/utils/tokens.ts +336 -18
- package/src/utils/truncation.ts +124 -0
- package/src/scripts/image.ts +0 -180
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Context overflow error detection utilities.
|
|
5
|
+
*
|
|
6
|
+
* Identifies provider-specific error messages that indicate the request
|
|
7
|
+
* exceeded the model's context window. Used by the overflow recovery loop
|
|
8
|
+
* to decide whether to retry with truncation/compaction vs. propagating
|
|
9
|
+
* the error.
|
|
10
|
+
*/
|
|
11
|
+
/**
|
|
12
|
+
* Exact phrases that definitively indicate a context overflow error.
|
|
13
|
+
* These are returned by various LLM providers when the prompt is too large.
|
|
14
|
+
*/
|
|
15
|
+
const CONTEXT_OVERFLOW_PHRASES = [
|
|
16
|
+
'request_too_large',
|
|
17
|
+
'context length exceeded',
|
|
18
|
+
'maximum context length',
|
|
19
|
+
'prompt is too long',
|
|
20
|
+
'exceeds model context window',
|
|
21
|
+
'exceeds the model',
|
|
22
|
+
'too large for model',
|
|
23
|
+
'context_length_exceeded',
|
|
24
|
+
'max_tokens',
|
|
25
|
+
'token limit',
|
|
26
|
+
'input too long',
|
|
27
|
+
'payload too large',
|
|
28
|
+
'content_too_large',
|
|
29
|
+
];
|
|
30
|
+
/**
|
|
31
|
+
* HTTP status codes and broader hints that suggest context overflow.
|
|
32
|
+
* Used by the less-strict `isLikelyContextOverflowError`.
|
|
33
|
+
*/
|
|
34
|
+
const CONTEXT_OVERFLOW_HINT_RE = /413|too large|too long|context.*exceed|exceed.*context|token.*limit|limit.*token|prompt.*size|size.*limit|maximum.*length|length.*maximum/i;
|
|
35
|
+
/**
|
|
36
|
+
* Patterns that should NOT be treated as context overflow even if they
|
|
37
|
+
* contain words like "limit" or "too large".
|
|
38
|
+
*/
|
|
39
|
+
const FALSE_POSITIVE_RE = /rate.?limit|too many requests|quota|billing|auth|permission|forbidden/i;
|
|
40
|
+
/**
|
|
41
|
+
* Extracts a human-readable error message from an unknown error value.
|
|
42
|
+
*/
|
|
43
|
+
function extractErrorMessage(error) {
|
|
44
|
+
if (error == null) {
|
|
45
|
+
return '';
|
|
46
|
+
}
|
|
47
|
+
if (typeof error === 'string') {
|
|
48
|
+
return error;
|
|
49
|
+
}
|
|
50
|
+
if (error instanceof Error) {
|
|
51
|
+
return error.message;
|
|
52
|
+
}
|
|
53
|
+
if (typeof error === 'object') {
|
|
54
|
+
const record = error;
|
|
55
|
+
if (typeof record.message === 'string') {
|
|
56
|
+
return record.message;
|
|
57
|
+
}
|
|
58
|
+
if (typeof record.error === 'string') {
|
|
59
|
+
return record.error;
|
|
60
|
+
}
|
|
61
|
+
if (typeof record.error === 'object' &&
|
|
62
|
+
record.error != null &&
|
|
63
|
+
typeof record.error.message === 'string') {
|
|
64
|
+
return record.error.message;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
return JSON.stringify(error);
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
return String(error);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Returns true if the error message definitively indicates a context
|
|
76
|
+
* overflow / prompt-too-large error from the provider.
|
|
77
|
+
*
|
|
78
|
+
* This is the strict check: only matches known, unambiguous phrases.
|
|
79
|
+
* Use this when you want high confidence before taking recovery action.
|
|
80
|
+
*/
|
|
81
|
+
function isContextOverflowError(errorMessage) {
|
|
82
|
+
if (!errorMessage) {
|
|
83
|
+
return false;
|
|
84
|
+
}
|
|
85
|
+
const lower = errorMessage.toLowerCase();
|
|
86
|
+
if (FALSE_POSITIVE_RE.test(lower)) {
|
|
87
|
+
return false;
|
|
88
|
+
}
|
|
89
|
+
return CONTEXT_OVERFLOW_PHRASES.some((phrase) => lower.includes(phrase));
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Returns true if the error message likely indicates a context overflow.
|
|
93
|
+
* Uses broader heuristic matching (regex) in addition to exact phrases.
|
|
94
|
+
*
|
|
95
|
+
* May produce false positives for unusual error messages. Use this when
|
|
96
|
+
* the cost of a false positive (one extra retry) is acceptable.
|
|
97
|
+
*/
|
|
98
|
+
function isLikelyContextOverflowError(errorMessage) {
|
|
99
|
+
if (!errorMessage) {
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
if (isContextOverflowError(errorMessage)) {
|
|
103
|
+
return true;
|
|
104
|
+
}
|
|
105
|
+
const lower = errorMessage.toLowerCase();
|
|
106
|
+
if (FALSE_POSITIVE_RE.test(lower)) {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
return CONTEXT_OVERFLOW_HINT_RE.test(lower);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
exports.extractErrorMessage = extractErrorMessage;
|
|
113
|
+
exports.isContextOverflowError = isContextOverflowError;
|
|
114
|
+
exports.isLikelyContextOverflowError = isLikelyContextOverflowError;
|
|
115
|
+
//# sourceMappingURL=errors.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.cjs","sources":["../../../src/utils/errors.ts"],"sourcesContent":["/**\n * Context overflow error detection utilities.\n *\n * Identifies provider-specific error messages that indicate the request\n * exceeded the model's context window. Used by the overflow recovery loop\n * to decide whether to retry with truncation/compaction vs. propagating\n * the error.\n */\n\n/**\n * Exact phrases that definitively indicate a context overflow error.\n * These are returned by various LLM providers when the prompt is too large.\n */\nconst CONTEXT_OVERFLOW_PHRASES = [\n 'request_too_large',\n 'context length exceeded',\n 'maximum context length',\n 'prompt is too long',\n 'exceeds model context window',\n 'exceeds the model',\n 'too large for model',\n 'context_length_exceeded',\n 'max_tokens',\n 'token limit',\n 'input too long',\n 'payload too large',\n 'content_too_large',\n] as const;\n\n/**\n * HTTP status codes and broader hints that suggest context overflow.\n * Used by the less-strict `isLikelyContextOverflowError`.\n */\nconst CONTEXT_OVERFLOW_HINT_RE =\n /413|too large|too long|context.*exceed|exceed.*context|token.*limit|limit.*token|prompt.*size|size.*limit|maximum.*length|length.*maximum/i;\n\n/**\n * Patterns that should NOT be treated as context overflow even if they\n * contain words like \"limit\" or \"too large\".\n */\nconst FALSE_POSITIVE_RE =\n /rate.?limit|too many requests|quota|billing|auth|permission|forbidden/i;\n\n/**\n * Extracts a human-readable error message from an unknown error value.\n */\nexport function extractErrorMessage(error: unknown): string {\n if (error == null) {\n return '';\n }\n if (typeof error === 'string') {\n return error;\n }\n if (error instanceof Error) {\n return error.message;\n }\n if (typeof error === 'object') {\n const record = error as Record<string, unknown>;\n if (typeof record.message === 'string') {\n return record.message;\n }\n if (typeof record.error === 'string') {\n return record.error;\n }\n if (\n typeof record.error === 'object' &&\n record.error != null &&\n typeof (record.error as Record<string, unknown>).message === 'string'\n ) {\n return (record.error as Record<string, unknown>).message as string;\n }\n }\n try {\n return JSON.stringify(error);\n } catch {\n return String(error);\n }\n}\n\n/**\n * Returns true if the error message definitively indicates a context\n * overflow / prompt-too-large error from the provider.\n *\n * This is the strict check: only matches known, unambiguous phrases.\n * Use this when you want high confidence before taking recovery action.\n */\nexport function isContextOverflowError(errorMessage?: string): boolean {\n if (!errorMessage) {\n return false;\n }\n const lower = errorMessage.toLowerCase();\n if (FALSE_POSITIVE_RE.test(lower)) {\n return false;\n }\n return CONTEXT_OVERFLOW_PHRASES.some((phrase) => lower.includes(phrase));\n}\n\n/**\n * Returns true if the error message likely indicates a context overflow.\n * Uses broader heuristic matching (regex) in addition to exact phrases.\n *\n * May produce false positives for unusual error messages. Use this when\n * the cost of a false positive (one extra retry) is acceptable.\n */\nexport function isLikelyContextOverflowError(errorMessage?: string): boolean {\n if (!errorMessage) {\n return false;\n }\n if (isContextOverflowError(errorMessage)) {\n return true;\n }\n const lower = errorMessage.toLowerCase();\n if (FALSE_POSITIVE_RE.test(lower)) {\n return false;\n }\n return CONTEXT_OVERFLOW_HINT_RE.test(lower);\n}\n"],"names":[],"mappings":";;AAAA;;;;;;;AAOG;AAEH;;;AAGG;AACH,MAAM,wBAAwB,GAAG;IAC/B,mBAAmB;IACnB,yBAAyB;IACzB,wBAAwB;IACxB,oBAAoB;IACpB,8BAA8B;IAC9B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,YAAY;IACZ,aAAa;IACb,gBAAgB;IAChB,mBAAmB;IACnB,mBAAmB;CACX;AAEV;;;AAGG;AACH,MAAM,wBAAwB,GAC5B,4IAA4I;AAE9I;;;AAGG;AACH,MAAM,iBAAiB,GACrB,wEAAwE;AAE1E;;AAEG;AACG,SAAU,mBAAmB,CAAC,KAAc,EAAA;AAChD,IAAA,IAAI,KAAK,IAAI,IAAI,EAAE;AACjB,QAAA,OAAO,EAAE;IACX;AACA,IAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AAC7B,QAAA,OAAO,KAAK;IACd;AACA,IAAA,IAAI,KAAK,YAAY,KAAK,EAAE;QAC1B,OAAO,KAAK,CAAC,OAAO;IACtB;AACA,IAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,MAAM,GAAG,KAAgC;AAC/C,QAAA,IAAI,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ,EAAE;YACtC,OAAO,MAAM,CAAC,OAAO;QACvB;AACA,QAAA,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ,EAAE;YACpC,OAAO,MAAM,CAAC,KAAK;QACrB;AACA,QAAA,IACE,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ;YAChC,MAAM,CAAC,KAAK,IAAI,IAAI;YACpB,OAAQ,MAAM,CAAC,KAAiC,CAAC,OAAO,KAAK,QAAQ,EACrE;AACA,YAAA,OAAQ,MAAM,CAAC,KAAiC,CAAC,OAAiB;QACpE;IACF;AACA,IAAA,IAAI;AACF,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B;AAAE,IAAA,MAAM;AACN,QAAA,OAAO,MAAM,CAAC,KAAK,CAAC;IACtB;AACF;AAEA;;;;;;AAMG;AACG,SAAU,sBAAsB,CAAC,YAAqB,EAAA;IAC1D,IAAI,CAAC,YAAY,EAAE;AACjB,QAAA,OAAO,KAAK;IACd;AACA,IAAA,MAAM,KAAK,GAAG,YAAY,CAAC,WAAW,EAAE;AACxC,IAAA,IAAI,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;AACjC,QAAA,OAAO,KAAK;IACd;AACA,IAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;AAC1E;AAEA;;;;;;AAMG;AACG,SAAU,4BAA4B,CAAC,YAAqB,EAAA;IAChE,IAAI,CAAC,YAAY,EAAE;AACjB,QAAA,OAAO,KAAK;IACd;AACA,IAAA,IAAI,sBAAsB,CAAC,YAAY,CAAC,EAAE;AACxC,QAAA,OAAO,IAAI;IACb;AACA,IAAA,MAAM,KAAK,GAAG,YAAY,CAAC,WAAW,EAAE;AACxC,IAAA,IAAI,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;AACjC,QAAA,OAAO,KAAK;IACd;AACA,IAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,KAAK,CAAC;AAC7C;;;;;;"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var dispatch = require('@langchain/core/callbacks/dispatch');
|
|
4
|
+
var _enum = require('../common/enum.cjs');
|
|
4
5
|
|
|
5
6
|
/* eslint-disable no-console */
|
|
6
7
|
// src/utils/events.ts
|
|
@@ -26,6 +27,22 @@ async function safeDispatchCustomEvent(event, payload, config) {
|
|
|
26
27
|
console.error('Error dispatching custom event:', e);
|
|
27
28
|
}
|
|
28
29
|
}
|
|
30
|
+
/**
|
|
31
|
+
* Fire-and-forget diagnostic log event.
|
|
32
|
+
* Debug-level logs are gated behind AGENT_DEBUG_LOGGING=true to avoid
|
|
33
|
+
* overhead in production. Info/warn/error always flow through.
|
|
34
|
+
* Pass `force: true` to bypass the env-var gate (e.g. invoke timing).
|
|
35
|
+
*/
|
|
36
|
+
function emitAgentLog(config, level, scope, message, data, meta, options) {
|
|
37
|
+
if (!config)
|
|
38
|
+
return;
|
|
39
|
+
if (level === 'debug' &&
|
|
40
|
+
!(options?.force ?? false) &&
|
|
41
|
+
process.env.AGENT_DEBUG_LOGGING !== 'true')
|
|
42
|
+
return;
|
|
43
|
+
void safeDispatchCustomEvent(_enum.GraphEvents.ON_AGENT_LOG, { level, scope, message, data, ...meta }, config);
|
|
44
|
+
}
|
|
29
45
|
|
|
46
|
+
exports.emitAgentLog = emitAgentLog;
|
|
30
47
|
exports.safeDispatchCustomEvent = safeDispatchCustomEvent;
|
|
31
48
|
//# sourceMappingURL=events.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"events.cjs","sources":["../../../src/utils/events.ts"],"sourcesContent":["/* eslint-disable no-console */\n// src/utils/events.ts\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport type { RunnableConfig } from '@langchain/core/runnables';\n\n/**\n * Safely dispatches a custom event and properly awaits it to avoid\n * race conditions where events are dispatched after run cleanup.\n */\nexport async function safeDispatchCustomEvent(\n event: string,\n payload: unknown,\n config?: RunnableConfig\n): Promise<void> {\n try {\n await dispatchCustomEvent(event, payload, config);\n } catch (e) {\n // Check if this is the known EventStreamCallbackHandler error\n if (\n e instanceof Error &&\n e.message.includes('handleCustomEvent: Run ID') &&\n e.message.includes('not found in run map')\n ) {\n // Suppress this specific error - it's expected during parallel execution\n // when EventStreamCallbackHandler loses track of run IDs\n // console.debug('Suppressed error dispatching custom event:', e);\n return;\n }\n // Log other errors\n console.error('Error dispatching custom event:', e);\n }\n}\n"],"names":["dispatchCustomEvent"],"mappings":"
|
|
1
|
+
{"version":3,"file":"events.cjs","sources":["../../../src/utils/events.ts"],"sourcesContent":["/* eslint-disable no-console */\n// src/utils/events.ts\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type { AgentLogEvent } from '@/types/graph';\nimport { GraphEvents } from '@/common';\n\n/**\n * Safely dispatches a custom event and properly awaits it to avoid\n * race conditions where events are dispatched after run cleanup.\n */\nexport async function safeDispatchCustomEvent(\n event: string,\n payload: unknown,\n config?: RunnableConfig\n): Promise<void> {\n try {\n await dispatchCustomEvent(event, payload, config);\n } catch (e) {\n // Check if this is the known EventStreamCallbackHandler error\n if (\n e instanceof Error &&\n e.message.includes('handleCustomEvent: Run ID') &&\n e.message.includes('not found in run map')\n ) {\n // Suppress this specific error - it's expected during parallel execution\n // when EventStreamCallbackHandler loses track of run IDs\n // console.debug('Suppressed error dispatching custom event:', e);\n return;\n }\n // Log other errors\n console.error('Error dispatching custom event:', e);\n }\n}\n\n/**\n * Fire-and-forget diagnostic log event.\n * Debug-level logs are gated behind AGENT_DEBUG_LOGGING=true to avoid\n * overhead in production. Info/warn/error always flow through.\n * Pass `force: true` to bypass the env-var gate (e.g. invoke timing).\n */\nexport function emitAgentLog(\n config: RunnableConfig | undefined,\n level: AgentLogEvent['level'],\n scope: AgentLogEvent['scope'],\n message: string,\n data?: Record<string, unknown>,\n meta?: { runId?: string; agentId?: string },\n options?: { force?: boolean }\n): void {\n if (!config) return;\n if (\n level === 'debug' &&\n !(options?.force ?? false) &&\n process.env.AGENT_DEBUG_LOGGING !== 'true'\n )\n return;\n void safeDispatchCustomEvent(\n GraphEvents.ON_AGENT_LOG,\n { level, scope, message, data, ...meta } satisfies AgentLogEvent,\n config\n );\n}\n"],"names":["dispatchCustomEvent","GraphEvents"],"mappings":";;;;;AAAA;AACA;AAMA;;;AAGG;AACI,eAAe,uBAAuB,CAC3C,KAAa,EACb,OAAgB,EAChB,MAAuB,EAAA;AAEvB,IAAA,IAAI;QACF,MAAMA,4BAAmB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC;IACnD;IAAE,OAAO,CAAC,EAAE;;QAEV,IACE,CAAC,YAAY,KAAK;AAClB,YAAA,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,2BAA2B,CAAC;YAC/C,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,sBAAsB,CAAC,EAC1C;;;;YAIA;QACF;;AAEA,QAAA,OAAO,CAAC,KAAK,CAAC,iCAAiC,EAAE,CAAC,CAAC;IACrD;AACF;AAEA;;;;;AAKG;AACG,SAAU,YAAY,CAC1B,MAAkC,EAClC,KAA6B,EAC7B,KAA6B,EAC7B,OAAe,EACf,IAA8B,EAC9B,IAA2C,EAC3C,OAA6B,EAAA;AAE7B,IAAA,IAAI,CAAC,MAAM;QAAE;IACb,IACE,KAAK,KAAK,OAAO;AACjB,QAAA,EAAE,OAAO,EAAE,KAAK,IAAI,KAAK,CAAC;AAC1B,QAAA,OAAO,CAAC,GAAG,CAAC,mBAAmB,KAAK,MAAM;QAE1C;IACF,KAAK,uBAAuB,CAC1BC,iBAAW,CAAC,YAAY,EACxB,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,IAAI,EAA0B,EAChE,MAAM,CACP;AACH;;;;;"}
|
|
@@ -58,6 +58,22 @@ function createHandlers(callbacks) {
|
|
|
58
58
|
callbacks?.onMessageDelta?.(event, data);
|
|
59
59
|
},
|
|
60
60
|
},
|
|
61
|
+
[_enum.GraphEvents.ON_SUMMARIZE_DELTA]: {
|
|
62
|
+
handle: (event, data) => {
|
|
63
|
+
aggregateContent({
|
|
64
|
+
event: event,
|
|
65
|
+
data: data,
|
|
66
|
+
});
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
[_enum.GraphEvents.ON_SUMMARIZE_COMPLETE]: {
|
|
70
|
+
handle: (event, data) => {
|
|
71
|
+
aggregateContent({
|
|
72
|
+
event: event,
|
|
73
|
+
data: data,
|
|
74
|
+
});
|
|
75
|
+
},
|
|
76
|
+
},
|
|
61
77
|
};
|
|
62
78
|
return {
|
|
63
79
|
contentParts,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"handlers.cjs","sources":["../../../src/utils/handlers.ts"],"sourcesContent":["/**\n * Multi-Agent Handler Utilities\n *\n * Provides a simple helper to create handlers with content aggregation for multi-agent scripts.\n *\n * Usage:\n * ```typescript\n * const { contentParts, aggregateContent, handlers } = createHandlers();\n *\n * // With callbacks\n * const { contentParts, aggregateContent, handlers } = createHandlers({\n * onRunStep: (event, data) => console.log('Step:', data),\n * onRunStepCompleted: (event, data) => console.log('Completed:', data)\n * });\n * ```\n */\n\nimport { GraphEvents } from '@/common';\nimport { ChatModelStreamHandler, createContentAggregator } from '@/stream';\nimport { ToolEndHandler, ModelEndHandler } from '@/events';\nimport type * as t from '@/types';\n\ninterface HandlerCallbacks {\n onRunStep?: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData) => void;\n onRunStepCompleted?: (\n event: GraphEvents.ON_RUN_STEP_COMPLETED,\n data: t.StreamEventData\n ) => void;\n onRunStepDelta?: (\n event: GraphEvents.ON_RUN_STEP_DELTA,\n data: t.StreamEventData\n ) => void;\n onMessageDelta?: (\n event: GraphEvents.ON_MESSAGE_DELTA,\n data: t.StreamEventData\n ) => void;\n}\n\n/**\n * Creates handlers with content aggregation for multi-agent scripts\n */\nexport function createHandlers(callbacks?: HandlerCallbacks): {\n contentParts: Array<t.MessageContentComplex | undefined>;\n aggregateContent: ReturnType<\n typeof createContentAggregator\n >['aggregateContent'];\n handlers: Record<string, t.EventHandler>;\n} {\n // Set up content aggregator\n const { contentParts, aggregateContent } = createContentAggregator();\n\n // Create the handlers object\n const handlers = {\n [GraphEvents.TOOL_END]: new ToolEndHandler(),\n [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),\n [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),\n\n [GraphEvents.ON_RUN_STEP]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.RunStep });\n callbacks?.onRunStep?.(event, data);\n },\n },\n\n [GraphEvents.ON_RUN_STEP_COMPLETED]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP_COMPLETED,\n data: t.StreamEventData\n ): void => {\n aggregateContent({\n event,\n data: data as unknown as { result: t.ToolEndEvent },\n });\n callbacks?.onRunStepCompleted?.(event, data);\n },\n },\n\n [GraphEvents.ON_RUN_STEP_DELTA]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP_DELTA,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.RunStepDeltaEvent });\n callbacks?.onRunStepDelta?.(event, data);\n },\n },\n\n [GraphEvents.ON_MESSAGE_DELTA]: {\n handle: (\n event: GraphEvents.ON_MESSAGE_DELTA,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.MessageDeltaEvent });\n callbacks?.onMessageDelta?.(event, data);\n },\n },\n };\n\n return {\n contentParts,\n aggregateContent,\n handlers,\n };\n}\n"],"names":["createContentAggregator","GraphEvents","ToolEndHandler","ModelEndHandler","ChatModelStreamHandler"],"mappings":";;;;;;AAAA;;;;;;;;;;;;;;;AAeG;AAuBH;;AAEG;AACG,SAAU,cAAc,CAAC,SAA4B,EAAA;;IAQzD,MAAM,EAAE,YAAY,EAAE,gBAAgB,EAAE,GAAGA,8BAAuB,EAAE;;AAGpE,IAAA,MAAM,QAAQ,GAAG;AACf,QAAA,CAACC,iBAAW,CAAC,QAAQ,GAAG,IAAIC,qBAAc,EAAE;AAC5C,QAAA,CAACD,iBAAW,CAAC,cAAc,GAAG,IAAIE,sBAAe,EAAE;AACnD,QAAA,CAACF,iBAAW,CAAC,iBAAiB,GAAG,IAAIG,6BAAsB,EAAE;AAE7D,QAAA,CAACH,iBAAW,CAAC,WAAW,GAAG;AACzB,YAAA,MAAM,EAAE,CACN,KAA8B,EAC9B,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAAiB,EAAE,CAAC;gBACpD,SAAS,EAAE,SAAS,GAAG,KAAK,EAAE,IAAI,CAAC;YACrC,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,qBAAqB,GAAG;AACnC,YAAA,MAAM,EAAE,CACN,KAAwC,EACxC,IAAuB,KACf;AACR,gBAAA,gBAAgB,CAAC;oBACf,KAAK;AACL,oBAAA,IAAI,EAAE,IAA6C;AACpD,iBAAA,CAAC;gBACF,SAAS,EAAE,kBAAkB,GAAG,KAAK,EAAE,IAAI,CAAC;YAC9C,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,iBAAiB,GAAG;AAC/B,YAAA,MAAM,EAAE,CACN,KAAoC,EACpC,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAA2B,EAAE,CAAC;gBAC9D,SAAS,EAAE,cAAc,GAAG,KAAK,EAAE,IAAI,CAAC;YAC1C,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,gBAAgB,GAAG;AAC9B,YAAA,MAAM,EAAE,CACN,KAAmC,EACnC,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAA2B,EAAE,CAAC;gBAC9D,SAAS,EAAE,cAAc,GAAG,KAAK,EAAE,IAAI,CAAC;YAC1C,CAAC;AACF,SAAA;KACF;IAED,OAAO;QACL,YAAY;QACZ,gBAAgB;QAChB,QAAQ;KACT;AACH;;;;"}
|
|
1
|
+
{"version":3,"file":"handlers.cjs","sources":["../../../src/utils/handlers.ts"],"sourcesContent":["/**\n * Multi-Agent Handler Utilities\n *\n * Provides a simple helper to create handlers with content aggregation for multi-agent scripts.\n *\n * Usage:\n * ```typescript\n * const { contentParts, aggregateContent, handlers } = createHandlers();\n *\n * // With callbacks\n * const { contentParts, aggregateContent, handlers } = createHandlers({\n * onRunStep: (event, data) => console.log('Step:', data),\n * onRunStepCompleted: (event, data) => console.log('Completed:', data)\n * });\n * ```\n */\n\nimport { GraphEvents } from '@/common';\nimport { ChatModelStreamHandler, createContentAggregator } from '@/stream';\nimport { ToolEndHandler, ModelEndHandler } from '@/events';\nimport type * as t from '@/types';\n\ninterface HandlerCallbacks {\n onRunStep?: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData) => void;\n onRunStepCompleted?: (\n event: GraphEvents.ON_RUN_STEP_COMPLETED,\n data: t.StreamEventData\n ) => void;\n onRunStepDelta?: (\n event: GraphEvents.ON_RUN_STEP_DELTA,\n data: t.StreamEventData\n ) => void;\n onMessageDelta?: (\n event: GraphEvents.ON_MESSAGE_DELTA,\n data: t.StreamEventData\n ) => void;\n}\n\n/**\n * Creates handlers with content aggregation for multi-agent scripts\n */\nexport function createHandlers(callbacks?: HandlerCallbacks): {\n contentParts: Array<t.MessageContentComplex | undefined>;\n aggregateContent: ReturnType<\n typeof createContentAggregator\n >['aggregateContent'];\n handlers: Record<string, t.EventHandler>;\n} {\n // Set up content aggregator\n const { contentParts, aggregateContent } = createContentAggregator();\n\n // Create the handlers object\n const handlers = {\n [GraphEvents.TOOL_END]: new ToolEndHandler(),\n [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),\n [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),\n\n [GraphEvents.ON_RUN_STEP]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.RunStep });\n callbacks?.onRunStep?.(event, data);\n },\n },\n\n [GraphEvents.ON_RUN_STEP_COMPLETED]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP_COMPLETED,\n data: t.StreamEventData\n ): void => {\n aggregateContent({\n event,\n data: data as unknown as { result: t.ToolEndEvent },\n });\n callbacks?.onRunStepCompleted?.(event, data);\n },\n },\n\n [GraphEvents.ON_RUN_STEP_DELTA]: {\n handle: (\n event: GraphEvents.ON_RUN_STEP_DELTA,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.RunStepDeltaEvent });\n callbacks?.onRunStepDelta?.(event, data);\n },\n },\n\n [GraphEvents.ON_MESSAGE_DELTA]: {\n handle: (\n event: GraphEvents.ON_MESSAGE_DELTA,\n data: t.StreamEventData\n ): void => {\n aggregateContent({ event, data: data as t.MessageDeltaEvent });\n callbacks?.onMessageDelta?.(event, data);\n },\n },\n\n [GraphEvents.ON_SUMMARIZE_DELTA]: {\n handle: (event: string, data: t.StreamEventData): void => {\n aggregateContent({\n event: event as GraphEvents,\n data: data as t.SummarizeDeltaData,\n });\n },\n },\n\n [GraphEvents.ON_SUMMARIZE_COMPLETE]: {\n handle: (event: string, data: t.StreamEventData): void => {\n aggregateContent({\n event: event as GraphEvents,\n data: data as t.SummarizeCompleteEvent,\n });\n },\n },\n };\n\n return {\n contentParts,\n aggregateContent,\n handlers,\n };\n}\n"],"names":["createContentAggregator","GraphEvents","ToolEndHandler","ModelEndHandler","ChatModelStreamHandler"],"mappings":";;;;;;AAAA;;;;;;;;;;;;;;;AAeG;AAuBH;;AAEG;AACG,SAAU,cAAc,CAAC,SAA4B,EAAA;;IAQzD,MAAM,EAAE,YAAY,EAAE,gBAAgB,EAAE,GAAGA,8BAAuB,EAAE;;AAGpE,IAAA,MAAM,QAAQ,GAAG;AACf,QAAA,CAACC,iBAAW,CAAC,QAAQ,GAAG,IAAIC,qBAAc,EAAE;AAC5C,QAAA,CAACD,iBAAW,CAAC,cAAc,GAAG,IAAIE,sBAAe,EAAE;AACnD,QAAA,CAACF,iBAAW,CAAC,iBAAiB,GAAG,IAAIG,6BAAsB,EAAE;AAE7D,QAAA,CAACH,iBAAW,CAAC,WAAW,GAAG;AACzB,YAAA,MAAM,EAAE,CACN,KAA8B,EAC9B,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAAiB,EAAE,CAAC;gBACpD,SAAS,EAAE,SAAS,GAAG,KAAK,EAAE,IAAI,CAAC;YACrC,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,qBAAqB,GAAG;AACnC,YAAA,MAAM,EAAE,CACN,KAAwC,EACxC,IAAuB,KACf;AACR,gBAAA,gBAAgB,CAAC;oBACf,KAAK;AACL,oBAAA,IAAI,EAAE,IAA6C;AACpD,iBAAA,CAAC;gBACF,SAAS,EAAE,kBAAkB,GAAG,KAAK,EAAE,IAAI,CAAC;YAC9C,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,iBAAiB,GAAG;AAC/B,YAAA,MAAM,EAAE,CACN,KAAoC,EACpC,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAA2B,EAAE,CAAC;gBAC9D,SAAS,EAAE,cAAc,GAAG,KAAK,EAAE,IAAI,CAAC;YAC1C,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,gBAAgB,GAAG;AAC9B,YAAA,MAAM,EAAE,CACN,KAAmC,EACnC,IAAuB,KACf;gBACR,gBAAgB,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,IAA2B,EAAE,CAAC;gBAC9D,SAAS,EAAE,cAAc,GAAG,KAAK,EAAE,IAAI,CAAC;YAC1C,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,kBAAkB,GAAG;AAChC,YAAA,MAAM,EAAE,CAAC,KAAa,EAAE,IAAuB,KAAU;AACvD,gBAAA,gBAAgB,CAAC;AACf,oBAAA,KAAK,EAAE,KAAoB;AAC3B,oBAAA,IAAI,EAAE,IAA4B;AACnC,iBAAA,CAAC;YACJ,CAAC;AACF,SAAA;AAED,QAAA,CAACA,iBAAW,CAAC,qBAAqB,GAAG;AACnC,YAAA,MAAM,EAAE,CAAC,KAAa,EAAE,IAAuB,KAAU;AACvD,gBAAA,gBAAgB,CAAC;AACf,oBAAA,KAAK,EAAE,KAAoB;AAC3B,oBAAA,IAAI,EAAE,IAAgC;AACvC,iBAAA,CAAC;YACJ,CAAC;AACF,SAAA;KACF;IAED,OAAO;QACL,YAAY;QACZ,gBAAgB;QAChB,QAAQ;KACT;AACH;;;;"}
|
package/dist/cjs/utils/llm.cjs
CHANGED
|
@@ -21,7 +21,17 @@ function isGoogleLike(provider) {
|
|
|
21
21
|
}
|
|
22
22
|
return [_enum.Providers.GOOGLE, _enum.Providers.VERTEXAI].includes(provider);
|
|
23
23
|
}
|
|
24
|
+
/** Returns true for native Anthropic or Bedrock running a Claude model. */
|
|
25
|
+
function isAnthropicLike(provider, clientOptions) {
|
|
26
|
+
if (provider === _enum.Providers.ANTHROPIC)
|
|
27
|
+
return true;
|
|
28
|
+
if (provider === _enum.Providers.BEDROCK) {
|
|
29
|
+
return /claude/i.test(String(clientOptions?.model ?? ''));
|
|
30
|
+
}
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
24
33
|
|
|
34
|
+
exports.isAnthropicLike = isAnthropicLike;
|
|
25
35
|
exports.isGoogleLike = isGoogleLike;
|
|
26
36
|
exports.isOpenAILike = isOpenAILike;
|
|
27
37
|
//# sourceMappingURL=llm.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.cjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n ] as string[]\n ).includes(provider);\n}\n\nexport function isGoogleLike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return ([Providers.GOOGLE, Providers.VERTEXAI] as string[]).includes(\n provider\n );\n}\n"],"names":["Providers"],"mappings":";;;;AAAA;AAGM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;IACd;IACA,OACE;AACE,QAAAA,eAAS,CAAC,MAAM;AAChB,QAAAA,eAAS,CAAC,KAAK;AACf,QAAAA,eAAS,CAAC,UAAU;AACpB,QAAAA,eAAS,CAAC,GAAG;AACb,QAAAA,eAAS,CAAC,QAAQ;AAErB,KAAA,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACtB;AAEM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;IACd;AACA,IAAA,OAAQ,CAACA,eAAS,CAAC,MAAM,EAAEA,eAAS,CAAC,QAAQ,CAAc,CAAC,QAAQ,CAClE,QAAQ,CACT;AACH
|
|
1
|
+
{"version":3,"file":"llm.cjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n ] as string[]\n ).includes(provider);\n}\n\nexport function isGoogleLike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return ([Providers.GOOGLE, Providers.VERTEXAI] as string[]).includes(\n provider\n );\n}\n\n/** Returns true for native Anthropic or Bedrock running a Claude model. */\nexport function isAnthropicLike(\n provider?: string | Providers,\n clientOptions?: { model?: string }\n): boolean {\n if (provider === Providers.ANTHROPIC) return true;\n if (provider === Providers.BEDROCK) {\n return /claude/i.test(String(clientOptions?.model ?? ''));\n }\n return false;\n}\n"],"names":["Providers"],"mappings":";;;;AAAA;AAGM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;IACd;IACA,OACE;AACE,QAAAA,eAAS,CAAC,MAAM;AAChB,QAAAA,eAAS,CAAC,KAAK;AACf,QAAAA,eAAS,CAAC,UAAU;AACpB,QAAAA,eAAS,CAAC,GAAG;AACb,QAAAA,eAAS,CAAC,QAAQ;AAErB,KAAA,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACtB;AAEM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;IACd;AACA,IAAA,OAAQ,CAACA,eAAS,CAAC,MAAM,EAAEA,eAAS,CAAC,QAAQ,CAAc,CAAC,QAAQ,CAClE,QAAQ,CACT;AACH;AAEA;AACM,SAAU,eAAe,CAC7B,QAA6B,EAC7B,aAAkC,EAAA;AAElC,IAAA,IAAI,QAAQ,KAAKA,eAAS,CAAC,SAAS;AAAE,QAAA,OAAO,IAAI;AACjD,IAAA,IAAI,QAAQ,KAAKA,eAAS,CAAC,OAAO,EAAE;AAClC,QAAA,OAAO,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,KAAK,IAAI,EAAE,CAAC,CAAC;IAC3D;AACA,IAAA,OAAO,KAAK;AACd;;;;;;"}
|
|
@@ -3,6 +3,212 @@
|
|
|
3
3
|
var aiTokenizer = require('ai-tokenizer');
|
|
4
4
|
var _enum = require('../common/enum.cjs');
|
|
5
5
|
|
|
6
|
+
/** Anthropic minimum image token cost. */
|
|
7
|
+
const ANTHROPIC_IMAGE_MIN_TOKENS = 1024;
|
|
8
|
+
/** Anthropic divisor: tokens = width × height / 750. */
|
|
9
|
+
const ANTHROPIC_IMAGE_DIVISOR = 750;
|
|
10
|
+
/** OpenAI low-detail fixed cost. */
|
|
11
|
+
const OPENAI_IMAGE_LOW_TOKENS = 85;
|
|
12
|
+
/** OpenAI high-detail tile size. */
|
|
13
|
+
const OPENAI_IMAGE_TILE_SIZE = 512;
|
|
14
|
+
/** OpenAI high-detail tokens per tile. */
|
|
15
|
+
const OPENAI_IMAGE_TOKENS_PER_TILE = 170;
|
|
16
|
+
/** Safety margin for image and document token estimates (5% overestimate). */
|
|
17
|
+
const IMAGE_TOKEN_SAFETY_MARGIN = 1.05;
|
|
18
|
+
/**
|
|
19
|
+
* Anthropic PDF: each page costs image tokens + text tokens.
|
|
20
|
+
* Typical range is 1500-3000 tokens/page. Using 2000 as midpoint.
|
|
21
|
+
*/
|
|
22
|
+
const ANTHROPIC_PDF_TOKENS_PER_PAGE = 2000;
|
|
23
|
+
/** OpenAI PDF: each page rendered as high-detail image. ~1500 tokens typical. */
|
|
24
|
+
const OPENAI_PDF_TOKENS_PER_PAGE = 1500;
|
|
25
|
+
/** Approximate base64 bytes per PDF page for page count estimation. */
|
|
26
|
+
const BASE64_BYTES_PER_PDF_PAGE = 75_000;
|
|
27
|
+
/** Fallback token cost for URL-referenced documents without local data. */
|
|
28
|
+
const URL_DOCUMENT_FALLBACK_TOKENS = 2000;
|
|
29
|
+
/**
|
|
30
|
+
* Extracts image dimensions from the first bytes of a base64-encoded
|
|
31
|
+
* PNG, JPEG, GIF, or WebP without decoding the full image.
|
|
32
|
+
* Returns null if the format is unrecognized or data is too short.
|
|
33
|
+
*/
|
|
34
|
+
function extractImageDimensions(base64Data) {
|
|
35
|
+
const raw = base64Data.startsWith('data:')
|
|
36
|
+
? base64Data.slice(base64Data.indexOf(',') + 1)
|
|
37
|
+
: base64Data;
|
|
38
|
+
if (raw.length < 32) {
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
const bytes = new Uint8Array(Buffer.from(raw.slice(0, 80), 'base64'));
|
|
42
|
+
if (bytes[0] === 0x89 && bytes[1] === 0x50) {
|
|
43
|
+
// PNG: width at bytes 16-19, height at 20-23 (big-endian)
|
|
44
|
+
const width = (bytes[16] << 24) | (bytes[17] << 16) | (bytes[18] << 8) | bytes[19];
|
|
45
|
+
const height = (bytes[20] << 24) | (bytes[21] << 16) | (bytes[22] << 8) | bytes[23];
|
|
46
|
+
return { width, height };
|
|
47
|
+
}
|
|
48
|
+
if (bytes[0] === 0xff && bytes[1] === 0xd8) {
|
|
49
|
+
// JPEG: scan for SOF0 (0xFFC0) or SOF2 (0xFFC2) marker
|
|
50
|
+
for (let i = 2; i < bytes.length - 9; i++) {
|
|
51
|
+
if (bytes[i] === 0xff &&
|
|
52
|
+
(bytes[i + 1] === 0xc0 || bytes[i + 1] === 0xc2)) {
|
|
53
|
+
const height = (bytes[i + 5] << 8) | bytes[i + 6];
|
|
54
|
+
const width = (bytes[i + 7] << 8) | bytes[i + 8];
|
|
55
|
+
return { width, height };
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
if (bytes[0] === 0x47 && bytes[1] === 0x49 && bytes[2] === 0x46) {
|
|
61
|
+
// GIF: width at bytes 6-7, height at 8-9 (little-endian)
|
|
62
|
+
const width = bytes[6] | (bytes[7] << 8);
|
|
63
|
+
const height = bytes[8] | (bytes[9] << 8);
|
|
64
|
+
return { width, height };
|
|
65
|
+
}
|
|
66
|
+
if (bytes[0] === 0x52 &&
|
|
67
|
+
bytes[1] === 0x49 &&
|
|
68
|
+
bytes[2] === 0x46 &&
|
|
69
|
+
bytes[3] === 0x46 &&
|
|
70
|
+
bytes[8] === 0x57 &&
|
|
71
|
+
bytes[9] === 0x45 &&
|
|
72
|
+
bytes[10] === 0x42 &&
|
|
73
|
+
bytes[11] === 0x50) {
|
|
74
|
+
// WebP VP8: width at bytes 26-27, height at 28-29
|
|
75
|
+
if (bytes.length > 29) {
|
|
76
|
+
const width = (bytes[26] | (bytes[27] << 8)) & 0x3fff;
|
|
77
|
+
const height = (bytes[28] | (bytes[29] << 8)) & 0x3fff;
|
|
78
|
+
return { width, height };
|
|
79
|
+
}
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
/** Estimates image token cost for Anthropic/Bedrock (Claude). */
|
|
85
|
+
function estimateAnthropicImageTokens(width, height) {
|
|
86
|
+
return Math.max(ANTHROPIC_IMAGE_MIN_TOKENS, Math.ceil((width * height) / ANTHROPIC_IMAGE_DIVISOR));
|
|
87
|
+
}
|
|
88
|
+
/** Estimates image token cost for OpenAI (high detail). */
|
|
89
|
+
function estimateOpenAIImageTokens(width, height, detail = 'high') {
|
|
90
|
+
if (detail === 'low') {
|
|
91
|
+
return OPENAI_IMAGE_LOW_TOKENS;
|
|
92
|
+
}
|
|
93
|
+
const tiles = Math.ceil(width / OPENAI_IMAGE_TILE_SIZE) *
|
|
94
|
+
Math.ceil(height / OPENAI_IMAGE_TILE_SIZE);
|
|
95
|
+
return OPENAI_IMAGE_LOW_TOKENS + tiles * OPENAI_IMAGE_TOKENS_PER_TILE;
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Estimates token cost for an image content block.
|
|
99
|
+
* Extracts dimensions from base64 header when available.
|
|
100
|
+
* Falls back to Anthropic minimum (1024) when dimensions can't be determined.
|
|
101
|
+
*/
|
|
102
|
+
function estimateImageBlockTokens(block, encoding) {
|
|
103
|
+
let base64Data;
|
|
104
|
+
if (block.type === _enum.ContentTypes.IMAGE_URL || block.type === 'image_url') {
|
|
105
|
+
const imageUrl = block.image_url;
|
|
106
|
+
const url = typeof imageUrl === 'string' ? imageUrl : imageUrl?.url;
|
|
107
|
+
if (typeof url === 'string' && url.startsWith('data:')) {
|
|
108
|
+
base64Data = url;
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
return ANTHROPIC_IMAGE_MIN_TOKENS;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
else if (block.type === 'image') {
|
|
115
|
+
const source = block.source;
|
|
116
|
+
if (source?.type === 'base64' && typeof source.data === 'string') {
|
|
117
|
+
base64Data = source.data;
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
return ANTHROPIC_IMAGE_MIN_TOKENS;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
return ANTHROPIC_IMAGE_MIN_TOKENS;
|
|
125
|
+
}
|
|
126
|
+
const dims = extractImageDimensions(base64Data);
|
|
127
|
+
if (dims == null) {
|
|
128
|
+
return ANTHROPIC_IMAGE_MIN_TOKENS;
|
|
129
|
+
}
|
|
130
|
+
if (encoding === 'claude') {
|
|
131
|
+
return estimateAnthropicImageTokens(dims.width, dims.height);
|
|
132
|
+
}
|
|
133
|
+
return estimateOpenAIImageTokens(dims.width, dims.height);
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Estimates token cost for a document/file content block.
|
|
137
|
+
* Handles both LangChain standard format (`type: 'file'` with `source_type`)
|
|
138
|
+
* and Anthropic format (`type: 'document'` with `source`).
|
|
139
|
+
*
|
|
140
|
+
* - Plain text: tokenized directly via `getTokenCount`.
|
|
141
|
+
* - Base64 PDF: page count estimated from base64 length × per-page cost.
|
|
142
|
+
* - URL reference: conservative flat estimate.
|
|
143
|
+
*/
|
|
144
|
+
function estimateDocumentBlockTokens(block, encoding, getTokenCount) {
|
|
145
|
+
const pdfTokensPerPage = encoding === 'claude'
|
|
146
|
+
? ANTHROPIC_PDF_TOKENS_PER_PAGE
|
|
147
|
+
: OPENAI_PDF_TOKENS_PER_PAGE;
|
|
148
|
+
// LangChain standard format: type='file', source_type, data/text/url, mime_type
|
|
149
|
+
const sourceType = block.source_type;
|
|
150
|
+
if (typeof sourceType === 'string') {
|
|
151
|
+
const mimeType = (block.mime_type ?? '').split(';')[0];
|
|
152
|
+
if (sourceType === 'text' && typeof block.text === 'string') {
|
|
153
|
+
return getTokenCount(block.text);
|
|
154
|
+
}
|
|
155
|
+
if (sourceType === 'base64' && typeof block.data === 'string') {
|
|
156
|
+
if (mimeType === 'application/pdf' || mimeType === '') {
|
|
157
|
+
const pageEstimate = Math.max(1, Math.ceil(block.data.length / BASE64_BYTES_PER_PDF_PAGE));
|
|
158
|
+
return pageEstimate * pdfTokensPerPage;
|
|
159
|
+
}
|
|
160
|
+
// Image inside a file block — delegate to image estimation
|
|
161
|
+
if (mimeType.startsWith('image/')) {
|
|
162
|
+
return estimateImageBlockTokens({
|
|
163
|
+
...block,
|
|
164
|
+
type: 'image',
|
|
165
|
+
source: { type: 'base64', data: block.data },
|
|
166
|
+
}, encoding);
|
|
167
|
+
}
|
|
168
|
+
return getTokenCount(block.data);
|
|
169
|
+
}
|
|
170
|
+
if (sourceType === 'url') {
|
|
171
|
+
return URL_DOCUMENT_FALLBACK_TOKENS;
|
|
172
|
+
}
|
|
173
|
+
return URL_DOCUMENT_FALLBACK_TOKENS;
|
|
174
|
+
}
|
|
175
|
+
// Anthropic format: type='document', source: { type, data, media_type }
|
|
176
|
+
const source = block.source;
|
|
177
|
+
if (source == null) {
|
|
178
|
+
return URL_DOCUMENT_FALLBACK_TOKENS;
|
|
179
|
+
}
|
|
180
|
+
if (source.type === 'text' && typeof source.data === 'string') {
|
|
181
|
+
return getTokenCount(source.data);
|
|
182
|
+
}
|
|
183
|
+
if (source.type === 'base64' && typeof source.data === 'string') {
|
|
184
|
+
const mediaType = (source.media_type ?? '').split(';')[0];
|
|
185
|
+
if (mediaType === 'application/pdf' || mediaType === '') {
|
|
186
|
+
const pageEstimate = Math.max(1, Math.ceil(source.data.length / BASE64_BYTES_PER_PDF_PAGE));
|
|
187
|
+
return pageEstimate * pdfTokensPerPage;
|
|
188
|
+
}
|
|
189
|
+
if (mediaType.startsWith('image/')) {
|
|
190
|
+
return estimateImageBlockTokens({ type: 'image', source: { type: 'base64', data: source.data } }, encoding);
|
|
191
|
+
}
|
|
192
|
+
return getTokenCount(source.data);
|
|
193
|
+
}
|
|
194
|
+
if (source.type === 'url') {
|
|
195
|
+
return URL_DOCUMENT_FALLBACK_TOKENS;
|
|
196
|
+
}
|
|
197
|
+
// content-type source (wraps other blocks like images)
|
|
198
|
+
if (source.type === 'content' && Array.isArray(source.content)) {
|
|
199
|
+
let total = 0;
|
|
200
|
+
for (const inner of source.content) {
|
|
201
|
+
if (inner != null && typeof inner === 'object' && 'type' in inner) {
|
|
202
|
+
const innerBlock = inner;
|
|
203
|
+
if (innerBlock.type === 'image') {
|
|
204
|
+
total += estimateImageBlockTokens(innerBlock, encoding);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
return total > 0 ? total : URL_DOCUMENT_FALLBACK_TOKENS;
|
|
209
|
+
}
|
|
210
|
+
return URL_DOCUMENT_FALLBACK_TOKENS;
|
|
211
|
+
}
|
|
6
212
|
const tokenizers = {};
|
|
7
213
|
async function getTokenizer(encoding = 'o200k_base') {
|
|
8
214
|
const cached = tokenizers[encoding];
|
|
@@ -22,34 +228,48 @@ function encodingForModel(model) {
|
|
|
22
228
|
}
|
|
23
229
|
return 'o200k_base';
|
|
24
230
|
}
|
|
25
|
-
function getTokenCountForMessage(message, getTokenCount) {
|
|
231
|
+
function getTokenCountForMessage(message, getTokenCount, encoding = 'o200k_base') {
|
|
26
232
|
const tokensPerMessage = 3;
|
|
27
233
|
const processValue = (value) => {
|
|
28
234
|
if (Array.isArray(value)) {
|
|
29
|
-
for (const
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
235
|
+
for (const raw of value) {
|
|
236
|
+
const item = raw;
|
|
237
|
+
if (item == null || typeof item.type !== 'string') {
|
|
238
|
+
continue;
|
|
239
|
+
}
|
|
240
|
+
if (item.type === _enum.ContentTypes.ERROR) {
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
if (item.type === _enum.ContentTypes.IMAGE_URL ||
|
|
244
|
+
item.type === 'image_url' ||
|
|
245
|
+
item.type === 'image') {
|
|
246
|
+
numTokens += Math.ceil(estimateImageBlockTokens(item, encoding) * IMAGE_TOKEN_SAFETY_MARGIN);
|
|
247
|
+
continue;
|
|
248
|
+
}
|
|
249
|
+
if (item.type === 'document' ||
|
|
250
|
+
item.type === 'file' ||
|
|
251
|
+
item.type === _enum.ContentTypes.IMAGE_FILE) {
|
|
252
|
+
numTokens += Math.ceil(estimateDocumentBlockTokens(item, encoding, getTokenCount) *
|
|
253
|
+
IMAGE_TOKEN_SAFETY_MARGIN);
|
|
34
254
|
continue;
|
|
35
255
|
}
|
|
36
256
|
if (item.type === _enum.ContentTypes.TOOL_CALL && item.tool_call != null) {
|
|
37
|
-
const toolName = item.tool_call
|
|
38
|
-
if (toolName
|
|
257
|
+
const toolName = item.tool_call.name;
|
|
258
|
+
if (typeof toolName === 'string' && toolName.length > 0) {
|
|
39
259
|
numTokens += getTokenCount(toolName);
|
|
40
260
|
}
|
|
41
|
-
const args = item.tool_call
|
|
42
|
-
if (args
|
|
261
|
+
const args = item.tool_call.args;
|
|
262
|
+
if (typeof args === 'string' && args.length > 0) {
|
|
43
263
|
numTokens += getTokenCount(args);
|
|
44
264
|
}
|
|
45
|
-
const output = item.tool_call
|
|
46
|
-
if (output
|
|
265
|
+
const output = item.tool_call.output;
|
|
266
|
+
if (typeof output === 'string' && output.length > 0) {
|
|
47
267
|
numTokens += getTokenCount(output);
|
|
48
268
|
}
|
|
49
269
|
continue;
|
|
50
270
|
}
|
|
51
271
|
const nestedValue = item[item.type];
|
|
52
|
-
if (
|
|
272
|
+
if (nestedValue == null) {
|
|
53
273
|
continue;
|
|
54
274
|
}
|
|
55
275
|
processValue(nestedValue);
|
|
@@ -69,6 +289,12 @@ function getTokenCountForMessage(message, getTokenCount) {
|
|
|
69
289
|
processValue(message.content);
|
|
70
290
|
return numTokens;
|
|
71
291
|
}
|
|
292
|
+
/**
|
|
293
|
+
* Anthropic's API consistently reports ~10% more tokens than the local
|
|
294
|
+
* claude tokenizer due to internal message framing and content encoding.
|
|
295
|
+
* Verified empirically across content types via the count_tokens endpoint.
|
|
296
|
+
*/
|
|
297
|
+
const CLAUDE_TOKEN_CORRECTION = 1.1;
|
|
72
298
|
/**
|
|
73
299
|
* Creates a token counter function using the specified encoding.
|
|
74
300
|
* Lazily loads the encoding data on first use via dynamic import.
|
|
@@ -76,7 +302,11 @@ function getTokenCountForMessage(message, getTokenCount) {
|
|
|
76
302
|
const createTokenCounter = async (encoding = 'o200k_base') => {
|
|
77
303
|
const tok = await getTokenizer(encoding);
|
|
78
304
|
const countTokens = (text) => tok.count(text);
|
|
79
|
-
|
|
305
|
+
const isClaude = encoding === 'claude';
|
|
306
|
+
return (message) => {
|
|
307
|
+
const count = getTokenCountForMessage(message, countTokens, encoding);
|
|
308
|
+
return isClaude ? Math.ceil(count * CLAUDE_TOKEN_CORRECTION) : count;
|
|
309
|
+
};
|
|
80
310
|
};
|
|
81
311
|
/** Utility to manage the token encoder lifecycle explicitly. */
|
|
82
312
|
const TokenEncoderManager = {
|
|
@@ -96,5 +326,8 @@ const TokenEncoderManager = {
|
|
|
96
326
|
exports.TokenEncoderManager = TokenEncoderManager;
|
|
97
327
|
exports.createTokenCounter = createTokenCounter;
|
|
98
328
|
exports.encodingForModel = encodingForModel;
|
|
329
|
+
exports.estimateAnthropicImageTokens = estimateAnthropicImageTokens;
|
|
330
|
+
exports.estimateOpenAIImageTokens = estimateOpenAIImageTokens;
|
|
331
|
+
exports.extractImageDimensions = extractImageDimensions;
|
|
99
332
|
exports.getTokenCountForMessage = getTokenCountForMessage;
|
|
100
333
|
//# sourceMappingURL=tokens.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tokens.cjs","sources":["../../../src/utils/tokens.ts"],"sourcesContent":["import { Tokenizer } from 'ai-tokenizer';\nimport type { BaseMessage } from '@langchain/core/messages';\nimport { ContentTypes } from '@/common/enum';\n\nexport type EncodingName = 'o200k_base' | 'claude';\n\nconst tokenizers: Partial<Record<EncodingName, Tokenizer>> = {};\n\nasync function getTokenizer(\n encoding: EncodingName = 'o200k_base'\n): Promise<Tokenizer> {\n const cached = tokenizers[encoding];\n if (cached) {\n return cached;\n }\n const data =\n encoding === 'claude'\n ? await import('ai-tokenizer/encoding/claude')\n : await import('ai-tokenizer/encoding/o200k_base');\n const instance = new Tokenizer(data);\n tokenizers[encoding] = instance;\n return instance;\n}\n\nexport function encodingForModel(model: string): EncodingName {\n if (model.toLowerCase().includes('claude')) {\n return 'claude';\n }\n return 'o200k_base';\n}\n\nexport function getTokenCountForMessage(\n message: BaseMessage,\n getTokenCount: (text: string) => number\n): number {\n const tokensPerMessage = 3;\n\n const processValue = (value: unknown): void => {\n if (Array.isArray(value)) {\n for (const item of value) {\n if (\n !item ||\n !item.type ||\n item.type === ContentTypes.ERROR ||\n item.type === ContentTypes.IMAGE_URL\n ) {\n continue;\n }\n\n if (item.type === ContentTypes.TOOL_CALL && item.tool_call != null) {\n const toolName = item.tool_call?.name || '';\n if (toolName != null && toolName && typeof toolName === 'string') {\n numTokens += getTokenCount(toolName);\n }\n\n const args = item.tool_call?.args || '';\n if (args != null && args && typeof args === 'string') {\n numTokens += getTokenCount(args);\n }\n\n const output = item.tool_call?.output || '';\n if (output != null && output && typeof output === 'string') {\n numTokens += getTokenCount(output);\n }\n continue;\n }\n\n const nestedValue = item[item.type];\n\n if (!nestedValue) {\n continue;\n }\n\n processValue(nestedValue);\n }\n } else if (typeof value === 'string') {\n numTokens += getTokenCount(value);\n } else if (typeof value === 'number') {\n numTokens += getTokenCount(value.toString());\n } else if (typeof value === 'boolean') {\n numTokens += getTokenCount(value.toString());\n }\n };\n\n let numTokens = tokensPerMessage;\n processValue(message.content);\n return numTokens;\n}\n\n/**\n * Creates a token counter function using the specified encoding.\n * Lazily loads the encoding data on first use via dynamic import.\n */\nexport const createTokenCounter = async (\n encoding: EncodingName = 'o200k_base'\n): Promise<(message: BaseMessage) => number> => {\n const tok = await getTokenizer(encoding);\n const countTokens = (text: string): number => tok.count(text);\n return (message: BaseMessage): number =>\n getTokenCountForMessage(message, countTokens);\n};\n\n/** Utility to manage the token encoder lifecycle explicitly. */\nexport const TokenEncoderManager = {\n async initialize(): Promise<void> {\n // No-op: ai-tokenizer is synchronously initialized from bundled data.\n },\n\n reset(): void {\n for (const key of Object.keys(tokenizers)) {\n delete tokenizers[key as EncodingName];\n }\n },\n\n isInitialized(): boolean {\n return Object.keys(tokenizers).length > 0;\n },\n};\n"],"names":["Tokenizer","ContentTypes"],"mappings":";;;;;AAMA,MAAM,UAAU,GAA6C,EAAE;AAE/D,eAAe,YAAY,CACzB,QAAA,GAAyB,YAAY,EAAA;AAErC,IAAA,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,CAAC;IACnC,IAAI,MAAM,EAAE;AACV,QAAA,OAAO,MAAM;IACf;AACA,IAAA,MAAM,IAAI,GACR,QAAQ,KAAK;AACX,UAAE,MAAM,OAAO,8BAA8B;AAC7C,UAAE,MAAM,OAAO,kCAAkC,CAAC;AACtD,IAAA,MAAM,QAAQ,GAAG,IAAIA,qBAAS,CAAC,IAAI,CAAC;AACpC,IAAA,UAAU,CAAC,QAAQ,CAAC,GAAG,QAAQ;AAC/B,IAAA,OAAO,QAAQ;AACjB;AAEM,SAAU,gBAAgB,CAAC,KAAa,EAAA;IAC5C,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;AAC1C,QAAA,OAAO,QAAQ;IACjB;AACA,IAAA,OAAO,YAAY;AACrB;AAEM,SAAU,uBAAuB,CACrC,OAAoB,EACpB,aAAuC,EAAA;IAEvC,MAAM,gBAAgB,GAAG,CAAC;AAE1B,IAAA,MAAM,YAAY,GAAG,CAAC,KAAc,KAAU;AAC5C,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AACxB,YAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACxB,gBAAA,IACE,CAAC,IAAI;oBACL,CAAC,IAAI,CAAC,IAAI;AACV,oBAAA,IAAI,CAAC,IAAI,KAAKC,kBAAY,CAAC,KAAK;AAChC,oBAAA,IAAI,CAAC,IAAI,KAAKA,kBAAY,CAAC,SAAS,EACpC;oBACA;gBACF;AAEA,gBAAA,IAAI,IAAI,CAAC,IAAI,KAAKA,kBAAY,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;oBAClE,MAAM,QAAQ,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE;oBAC3C,IAAI,QAAQ,IAAI,IAAI,IAAI,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAChE,wBAAA,SAAS,IAAI,aAAa,CAAC,QAAQ,CAAC;oBACtC;oBAEA,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE;oBACvC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AACpD,wBAAA,SAAS,IAAI,aAAa,CAAC,IAAI,CAAC;oBAClC;oBAEA,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,EAAE,MAAM,IAAI,EAAE;oBAC3C,IAAI,MAAM,IAAI,IAAI,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC1D,wBAAA,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;oBACpC;oBACA;gBACF;gBAEA,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;gBAEnC,IAAI,CAAC,WAAW,EAAE;oBAChB;gBACF;gBAEA,YAAY,CAAC,WAAW,CAAC;YAC3B;QACF;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACpC,YAAA,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC;QACnC;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YACpC,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC;QAC9C;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;YACrC,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC;QAC9C;AACF,IAAA,CAAC;IAED,IAAI,SAAS,GAAG,gBAAgB;AAChC,IAAA,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC;AAC7B,IAAA,OAAO,SAAS;AAClB;AAEA;;;AAGG;MACU,kBAAkB,GAAG,OAChC,QAAA,GAAyB,YAAY,KACQ;AAC7C,IAAA,MAAM,GAAG,GAAG,MAAM,YAAY,CAAC,QAAQ,CAAC;AACxC,IAAA,MAAM,WAAW,GAAG,CAAC,IAAY,KAAa,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC;IAC7D,OAAO,CAAC,OAAoB,KAC1B,uBAAuB,CAAC,OAAO,EAAE,WAAW,CAAC;AACjD;AAEA;AACO,MAAM,mBAAmB,GAAG;AACjC,IAAA,MAAM,UAAU,GAAA;;IAEhB,CAAC;IAED,KAAK,GAAA;QACH,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,YAAA,OAAO,UAAU,CAAC,GAAmB,CAAC;QACxC;IACF,CAAC;IAED,aAAa,GAAA;QACX,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC;IAC3C,CAAC;;;;;;;;"}
|
|
1
|
+
{"version":3,"file":"tokens.cjs","sources":["../../../src/utils/tokens.ts"],"sourcesContent":["import { Tokenizer } from 'ai-tokenizer';\nimport type { BaseMessage } from '@langchain/core/messages';\nimport { ContentTypes } from '@/common/enum';\n\nexport type EncodingName = 'o200k_base' | 'claude';\n\n/** Anthropic minimum image token cost. */\nconst ANTHROPIC_IMAGE_MIN_TOKENS = 1024;\n/** Anthropic divisor: tokens = width × height / 750. */\nconst ANTHROPIC_IMAGE_DIVISOR = 750;\n/** OpenAI low-detail fixed cost. */\nconst OPENAI_IMAGE_LOW_TOKENS = 85;\n/** OpenAI high-detail tile size. */\nconst OPENAI_IMAGE_TILE_SIZE = 512;\n/** OpenAI high-detail tokens per tile. */\nconst OPENAI_IMAGE_TOKENS_PER_TILE = 170;\n/** Google Gemini fixed per-image cost. */\nconst _GEMINI_IMAGE_TOKENS = 258;\n/** Safety margin for image and document token estimates (5% overestimate). */\nconst IMAGE_TOKEN_SAFETY_MARGIN = 1.05;\n\n/**\n * Anthropic PDF: each page costs image tokens + text tokens.\n * Typical range is 1500-3000 tokens/page. Using 2000 as midpoint.\n */\nconst ANTHROPIC_PDF_TOKENS_PER_PAGE = 2000;\n/** OpenAI PDF: each page rendered as high-detail image. ~1500 tokens typical. */\nconst OPENAI_PDF_TOKENS_PER_PAGE = 1500;\n/** Gemini PDF: fixed 258 tokens per page. */\nconst _GEMINI_PDF_TOKENS_PER_PAGE = 258;\n/** Approximate base64 bytes per PDF page for page count estimation. */\nconst BASE64_BYTES_PER_PDF_PAGE = 75_000;\n/** Fallback token cost for URL-referenced documents without local data. */\nconst URL_DOCUMENT_FALLBACK_TOKENS = 2000;\n\n/**\n * Extracts image dimensions from the first bytes of a base64-encoded\n * PNG, JPEG, GIF, or WebP without decoding the full image.\n * Returns null if the format is unrecognized or data is too short.\n */\nexport function extractImageDimensions(\n base64Data: string\n): { width: number; height: number } | null {\n const raw = base64Data.startsWith('data:')\n ? base64Data.slice(base64Data.indexOf(',') + 1)\n : base64Data;\n\n if (raw.length < 32) {\n return null;\n }\n\n const bytes = new Uint8Array(Buffer.from(raw.slice(0, 80), 'base64'));\n\n if (bytes[0] === 0x89 && bytes[1] === 0x50) {\n // PNG: width at bytes 16-19, height at 20-23 (big-endian)\n const width =\n (bytes[16] << 24) | (bytes[17] << 16) | (bytes[18] << 8) | bytes[19];\n const height =\n (bytes[20] << 24) | (bytes[21] << 16) | (bytes[22] << 8) | bytes[23];\n return { width, height };\n }\n\n if (bytes[0] === 0xff && bytes[1] === 0xd8) {\n // JPEG: scan for SOF0 (0xFFC0) or SOF2 (0xFFC2) marker\n for (let i = 2; i < bytes.length - 9; i++) {\n if (\n bytes[i] === 0xff &&\n (bytes[i + 1] === 0xc0 || bytes[i + 1] === 0xc2)\n ) {\n const height = (bytes[i + 5] << 8) | bytes[i + 6];\n const width = (bytes[i + 7] << 8) | bytes[i + 8];\n return { width, height };\n }\n }\n return null;\n }\n\n if (bytes[0] === 0x47 && bytes[1] === 0x49 && bytes[2] === 0x46) {\n // GIF: width at bytes 6-7, height at 8-9 (little-endian)\n const width = bytes[6] | (bytes[7] << 8);\n const height = bytes[8] | (bytes[9] << 8);\n return { width, height };\n }\n\n if (\n bytes[0] === 0x52 &&\n bytes[1] === 0x49 &&\n bytes[2] === 0x46 &&\n bytes[3] === 0x46 &&\n bytes[8] === 0x57 &&\n bytes[9] === 0x45 &&\n bytes[10] === 0x42 &&\n bytes[11] === 0x50\n ) {\n // WebP VP8: width at bytes 26-27, height at 28-29\n if (bytes.length > 29) {\n const width = (bytes[26] | (bytes[27] << 8)) & 0x3fff;\n const height = (bytes[28] | (bytes[29] << 8)) & 0x3fff;\n return { width, height };\n }\n return null;\n }\n\n return null;\n}\n\n/** Estimates image token cost for Anthropic/Bedrock (Claude). */\nexport function estimateAnthropicImageTokens(\n width: number,\n height: number\n): number {\n return Math.max(\n ANTHROPIC_IMAGE_MIN_TOKENS,\n Math.ceil((width * height) / ANTHROPIC_IMAGE_DIVISOR)\n );\n}\n\n/** Estimates image token cost for OpenAI (high detail). */\nexport function estimateOpenAIImageTokens(\n width: number,\n height: number,\n detail: string = 'high'\n): number {\n if (detail === 'low') {\n return OPENAI_IMAGE_LOW_TOKENS;\n }\n const tiles =\n Math.ceil(width / OPENAI_IMAGE_TILE_SIZE) *\n Math.ceil(height / OPENAI_IMAGE_TILE_SIZE);\n return OPENAI_IMAGE_LOW_TOKENS + tiles * OPENAI_IMAGE_TOKENS_PER_TILE;\n}\n\n/**\n * Estimates token cost for an image content block.\n * Extracts dimensions from base64 header when available.\n * Falls back to Anthropic minimum (1024) when dimensions can't be determined.\n */\nfunction estimateImageBlockTokens(\n block: Record<string, unknown>,\n encoding: EncodingName\n): number {\n let base64Data: string | undefined;\n\n if (block.type === ContentTypes.IMAGE_URL || block.type === 'image_url') {\n const imageUrl = block.image_url as string | { url?: string } | undefined;\n const url = typeof imageUrl === 'string' ? imageUrl : imageUrl?.url;\n if (typeof url === 'string' && url.startsWith('data:')) {\n base64Data = url;\n } else {\n return ANTHROPIC_IMAGE_MIN_TOKENS;\n }\n } else if (block.type === 'image') {\n const source = block.source as { type?: string; data?: string } | undefined;\n if (source?.type === 'base64' && typeof source.data === 'string') {\n base64Data = source.data;\n } else {\n return ANTHROPIC_IMAGE_MIN_TOKENS;\n }\n } else {\n return ANTHROPIC_IMAGE_MIN_TOKENS;\n }\n\n const dims = extractImageDimensions(base64Data);\n if (dims == null) {\n return ANTHROPIC_IMAGE_MIN_TOKENS;\n }\n\n if (encoding === 'claude') {\n return estimateAnthropicImageTokens(dims.width, dims.height);\n }\n return estimateOpenAIImageTokens(dims.width, dims.height);\n}\n\n/**\n * Estimates token cost for a document/file content block.\n * Handles both LangChain standard format (`type: 'file'` with `source_type`)\n * and Anthropic format (`type: 'document'` with `source`).\n *\n * - Plain text: tokenized directly via `getTokenCount`.\n * - Base64 PDF: page count estimated from base64 length × per-page cost.\n * - URL reference: conservative flat estimate.\n */\nfunction estimateDocumentBlockTokens(\n block: Record<string, unknown>,\n encoding: EncodingName,\n getTokenCount: (text: string) => number\n): number {\n const pdfTokensPerPage =\n encoding === 'claude'\n ? ANTHROPIC_PDF_TOKENS_PER_PAGE\n : OPENAI_PDF_TOKENS_PER_PAGE;\n\n // LangChain standard format: type='file', source_type, data/text/url, mime_type\n const sourceType = block.source_type as string | undefined;\n if (typeof sourceType === 'string') {\n const mimeType = ((block.mime_type as string | undefined) ?? '').split(\n ';'\n )[0];\n\n if (sourceType === 'text' && typeof block.text === 'string') {\n return getTokenCount(block.text as string);\n }\n\n if (sourceType === 'base64' && typeof block.data === 'string') {\n if (mimeType === 'application/pdf' || mimeType === '') {\n const pageEstimate = Math.max(\n 1,\n Math.ceil((block.data as string).length / BASE64_BYTES_PER_PDF_PAGE)\n );\n return pageEstimate * pdfTokensPerPage;\n }\n // Image inside a file block — delegate to image estimation\n if (mimeType.startsWith('image/')) {\n return estimateImageBlockTokens(\n {\n ...block,\n type: 'image',\n source: { type: 'base64', data: block.data },\n },\n encoding\n );\n }\n return getTokenCount(block.data as string);\n }\n\n if (sourceType === 'url') {\n return URL_DOCUMENT_FALLBACK_TOKENS;\n }\n\n return URL_DOCUMENT_FALLBACK_TOKENS;\n }\n\n // Anthropic format: type='document', source: { type, data, media_type }\n const source = block.source as\n | {\n type?: string;\n data?: string;\n media_type?: string;\n content?: unknown[];\n }\n | undefined;\n\n if (source == null) {\n return URL_DOCUMENT_FALLBACK_TOKENS;\n }\n\n if (source.type === 'text' && typeof source.data === 'string') {\n return getTokenCount(source.data);\n }\n\n if (source.type === 'base64' && typeof source.data === 'string') {\n const mediaType = (source.media_type ?? '').split(';')[0];\n if (mediaType === 'application/pdf' || mediaType === '') {\n const pageEstimate = Math.max(\n 1,\n Math.ceil(source.data.length / BASE64_BYTES_PER_PDF_PAGE)\n );\n return pageEstimate * pdfTokensPerPage;\n }\n if (mediaType.startsWith('image/')) {\n return estimateImageBlockTokens(\n { type: 'image', source: { type: 'base64', data: source.data } },\n encoding\n );\n }\n return getTokenCount(source.data);\n }\n\n if (source.type === 'url') {\n return URL_DOCUMENT_FALLBACK_TOKENS;\n }\n\n // content-type source (wraps other blocks like images)\n if (source.type === 'content' && Array.isArray(source.content)) {\n let total = 0;\n for (const inner of source.content) {\n if (inner != null && typeof inner === 'object' && 'type' in inner) {\n const innerBlock = inner as Record<string, unknown>;\n if (innerBlock.type === 'image') {\n total += estimateImageBlockTokens(innerBlock, encoding);\n }\n }\n }\n return total > 0 ? total : URL_DOCUMENT_FALLBACK_TOKENS;\n }\n\n return URL_DOCUMENT_FALLBACK_TOKENS;\n}\n\nconst tokenizers: Partial<Record<EncodingName, Tokenizer>> = {};\n\nasync function getTokenizer(\n encoding: EncodingName = 'o200k_base'\n): Promise<Tokenizer> {\n const cached = tokenizers[encoding];\n if (cached) {\n return cached;\n }\n const data =\n encoding === 'claude'\n ? await import('ai-tokenizer/encoding/claude')\n : await import('ai-tokenizer/encoding/o200k_base');\n const instance = new Tokenizer(data);\n tokenizers[encoding] = instance;\n return instance;\n}\n\nexport function encodingForModel(model: string): EncodingName {\n if (model.toLowerCase().includes('claude')) {\n return 'claude';\n }\n return 'o200k_base';\n}\n\nexport function getTokenCountForMessage(\n message: BaseMessage,\n getTokenCount: (text: string) => number,\n encoding: EncodingName = 'o200k_base'\n): number {\n const tokensPerMessage = 3;\n\n type ContentBlock = Record<string, unknown> & {\n type?: string;\n tool_call?: { name?: string; args?: string; output?: string };\n };\n\n const processValue = (value: unknown): void => {\n if (Array.isArray(value)) {\n for (const raw of value) {\n const item = raw as ContentBlock | null | undefined;\n if (item == null || typeof item.type !== 'string') {\n continue;\n }\n if (item.type === ContentTypes.ERROR) {\n continue;\n }\n\n if (\n item.type === ContentTypes.IMAGE_URL ||\n item.type === 'image_url' ||\n item.type === 'image'\n ) {\n numTokens += Math.ceil(\n estimateImageBlockTokens(item, encoding) * IMAGE_TOKEN_SAFETY_MARGIN\n );\n continue;\n }\n\n if (\n item.type === 'document' ||\n item.type === 'file' ||\n item.type === ContentTypes.IMAGE_FILE\n ) {\n numTokens += Math.ceil(\n estimateDocumentBlockTokens(item, encoding, getTokenCount) *\n IMAGE_TOKEN_SAFETY_MARGIN\n );\n continue;\n }\n\n if (item.type === ContentTypes.TOOL_CALL && item.tool_call != null) {\n const toolName = item.tool_call.name;\n if (typeof toolName === 'string' && toolName.length > 0) {\n numTokens += getTokenCount(toolName);\n }\n const args = item.tool_call.args;\n if (typeof args === 'string' && args.length > 0) {\n numTokens += getTokenCount(args);\n }\n const output = item.tool_call.output;\n if (typeof output === 'string' && output.length > 0) {\n numTokens += getTokenCount(output);\n }\n continue;\n }\n\n const nestedValue = item[item.type];\n if (nestedValue == null) {\n continue;\n }\n\n processValue(nestedValue);\n }\n } else if (typeof value === 'string') {\n numTokens += getTokenCount(value);\n } else if (typeof value === 'number') {\n numTokens += getTokenCount(value.toString());\n } else if (typeof value === 'boolean') {\n numTokens += getTokenCount(value.toString());\n }\n };\n\n let numTokens = tokensPerMessage;\n processValue(message.content);\n return numTokens;\n}\n\n/**\n * Anthropic's API consistently reports ~10% more tokens than the local\n * claude tokenizer due to internal message framing and content encoding.\n * Verified empirically across content types via the count_tokens endpoint.\n */\nconst CLAUDE_TOKEN_CORRECTION = 1.1;\n\n/**\n * Creates a token counter function using the specified encoding.\n * Lazily loads the encoding data on first use via dynamic import.\n */\nexport const createTokenCounter = async (\n encoding: EncodingName = 'o200k_base'\n): Promise<(message: BaseMessage) => number> => {\n const tok = await getTokenizer(encoding);\n const countTokens = (text: string): number => tok.count(text);\n const isClaude = encoding === 'claude';\n return (message: BaseMessage): number => {\n const count = getTokenCountForMessage(message, countTokens, encoding);\n return isClaude ? Math.ceil(count * CLAUDE_TOKEN_CORRECTION) : count;\n };\n};\n\n/** Utility to manage the token encoder lifecycle explicitly. */\nexport const TokenEncoderManager = {\n async initialize(): Promise<void> {\n // No-op: ai-tokenizer is synchronously initialized from bundled data.\n },\n\n reset(): void {\n for (const key of Object.keys(tokenizers)) {\n delete tokenizers[key as EncodingName];\n }\n },\n\n isInitialized(): boolean {\n return Object.keys(tokenizers).length > 0;\n },\n};\n"],"names":["ContentTypes","Tokenizer"],"mappings":";;;;;AAMA;AACA,MAAM,0BAA0B,GAAG,IAAI;AACvC;AACA,MAAM,uBAAuB,GAAG,GAAG;AACnC;AACA,MAAM,uBAAuB,GAAG,EAAE;AAClC;AACA,MAAM,sBAAsB,GAAG,GAAG;AAClC;AACA,MAAM,4BAA4B,GAAG,GAAG;AAGxC;AACA,MAAM,yBAAyB,GAAG,IAAI;AAEtC;;;AAGG;AACH,MAAM,6BAA6B,GAAG,IAAI;AAC1C;AACA,MAAM,0BAA0B,GAAG,IAAI;AAGvC;AACA,MAAM,yBAAyB,GAAG,MAAM;AACxC;AACA,MAAM,4BAA4B,GAAG,IAAI;AAEzC;;;;AAIG;AACG,SAAU,sBAAsB,CACpC,UAAkB,EAAA;AAElB,IAAA,MAAM,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,OAAO;AACvC,UAAE,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC;UAC5C,UAAU;AAEd,IAAA,IAAI,GAAG,CAAC,MAAM,GAAG,EAAE,EAAE;AACnB,QAAA,OAAO,IAAI;IACb;IAEA,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC;AAErE,IAAA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;;AAE1C,QAAA,MAAM,KAAK,GACT,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC;AACtE,QAAA,MAAM,MAAM,GACV,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC;AACtE,QAAA,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE;IAC1B;AAEA,IAAA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;;AAE1C,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACzC,YAAA,IACE,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,iBAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,IAAI,IAAI,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,EAChD;AACA,gBAAA,MAAM,MAAM,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;AACjD,gBAAA,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;AAChD,gBAAA,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE;YAC1B;QACF;AACA,QAAA,OAAO,IAAI;IACb;IAEA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;;AAE/D,QAAA,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACxC,QAAA,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACzC,QAAA,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE;IAC1B;AAEA,IAAA,IACE,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI;AACjB,QAAA,KAAK,CAAC,EAAE,CAAC,KAAK,IAAI;AAClB,QAAA,KAAK,CAAC,EAAE,CAAC,KAAK,IAAI,EAClB;;AAEA,QAAA,IAAI,KAAK,CAAC,MAAM,GAAG,EAAE,EAAE;AACrB,YAAA,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,MAAM;AACrD,YAAA,MAAM,MAAM,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,MAAM;AACtD,YAAA,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE;QAC1B;AACA,QAAA,OAAO,IAAI;IACb;AAEA,IAAA,OAAO,IAAI;AACb;AAEA;AACM,SAAU,4BAA4B,CAC1C,KAAa,EACb,MAAc,EAAA;AAEd,IAAA,OAAO,IAAI,CAAC,GAAG,CACb,0BAA0B,EAC1B,IAAI,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,IAAI,uBAAuB,CAAC,CACtD;AACH;AAEA;AACM,SAAU,yBAAyB,CACvC,KAAa,EACb,MAAc,EACd,SAAiB,MAAM,EAAA;AAEvB,IAAA,IAAI,MAAM,KAAK,KAAK,EAAE;AACpB,QAAA,OAAO,uBAAuB;IAChC;IACA,MAAM,KAAK,GACT,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,sBAAsB,CAAC;AACzC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,sBAAsB,CAAC;AAC5C,IAAA,OAAO,uBAAuB,GAAG,KAAK,GAAG,4BAA4B;AACvE;AAEA;;;;AAIG;AACH,SAAS,wBAAwB,CAC/B,KAA8B,EAC9B,QAAsB,EAAA;AAEtB,IAAA,IAAI,UAA8B;AAElC,IAAA,IAAI,KAAK,CAAC,IAAI,KAAKA,kBAAY,CAAC,SAAS,IAAI,KAAK,CAAC,IAAI,KAAK,WAAW,EAAE;AACvE,QAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,SAAkD;AACzE,QAAA,MAAM,GAAG,GAAG,OAAO,QAAQ,KAAK,QAAQ,GAAG,QAAQ,GAAG,QAAQ,EAAE,GAAG;AACnE,QAAA,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;YACtD,UAAU,GAAG,GAAG;QAClB;aAAO;AACL,YAAA,OAAO,0BAA0B;QACnC;IACF;AAAO,SAAA,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;AACjC,QAAA,MAAM,MAAM,GAAG,KAAK,CAAC,MAAsD;AAC3E,QAAA,IAAI,MAAM,EAAE,IAAI,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;AAChE,YAAA,UAAU,GAAG,MAAM,CAAC,IAAI;QAC1B;aAAO;AACL,YAAA,OAAO,0BAA0B;QACnC;IACF;SAAO;AACL,QAAA,OAAO,0BAA0B;IACnC;AAEA,IAAA,MAAM,IAAI,GAAG,sBAAsB,CAAC,UAAU,CAAC;AAC/C,IAAA,IAAI,IAAI,IAAI,IAAI,EAAE;AAChB,QAAA,OAAO,0BAA0B;IACnC;AAEA,IAAA,IAAI,QAAQ,KAAK,QAAQ,EAAE;QACzB,OAAO,4BAA4B,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC;IAC9D;IACA,OAAO,yBAAyB,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC;AAC3D;AAEA;;;;;;;;AAQG;AACH,SAAS,2BAA2B,CAClC,KAA8B,EAC9B,QAAsB,EACtB,aAAuC,EAAA;AAEvC,IAAA,MAAM,gBAAgB,GACpB,QAAQ,KAAK;AACX,UAAE;UACA,0BAA0B;;AAGhC,IAAA,MAAM,UAAU,GAAG,KAAK,CAAC,WAAiC;AAC1D,IAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,QAAA,MAAM,QAAQ,GAAG,CAAE,KAAK,CAAC,SAAgC,IAAI,EAAE,EAAE,KAAK,CACpE,GAAG,CACJ,CAAC,CAAC,CAAC;QAEJ,IAAI,UAAU,KAAK,MAAM,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;AAC3D,YAAA,OAAO,aAAa,CAAC,KAAK,CAAC,IAAc,CAAC;QAC5C;QAEA,IAAI,UAAU,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YAC7D,IAAI,QAAQ,KAAK,iBAAiB,IAAI,QAAQ,KAAK,EAAE,EAAE;gBACrD,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAC3B,CAAC,EACD,IAAI,CAAC,IAAI,CAAE,KAAK,CAAC,IAAe,CAAC,MAAM,GAAG,yBAAyB,CAAC,CACrE;gBACD,OAAO,YAAY,GAAG,gBAAgB;YACxC;;AAEA,YAAA,IAAI,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AACjC,gBAAA,OAAO,wBAAwB,CAC7B;AACE,oBAAA,GAAG,KAAK;AACR,oBAAA,IAAI,EAAE,OAAO;oBACb,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,EAAE;iBAC7C,EACD,QAAQ,CACT;YACH;AACA,YAAA,OAAO,aAAa,CAAC,KAAK,CAAC,IAAc,CAAC;QAC5C;AAEA,QAAA,IAAI,UAAU,KAAK,KAAK,EAAE;AACxB,YAAA,OAAO,4BAA4B;QACrC;AAEA,QAAA,OAAO,4BAA4B;IACrC;;AAGA,IAAA,MAAM,MAAM,GAAG,KAAK,CAAC,MAOR;AAEb,IAAA,IAAI,MAAM,IAAI,IAAI,EAAE;AAClB,QAAA,OAAO,4BAA4B;IACrC;AAEA,IAAA,IAAI,MAAM,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;AAC7D,QAAA,OAAO,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC;IACnC;AAEA,IAAA,IAAI,MAAM,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,IAAI,KAAK,QAAQ,EAAE;AAC/D,QAAA,MAAM,SAAS,GAAG,CAAC,MAAM,CAAC,UAAU,IAAI,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACzD,IAAI,SAAS,KAAK,iBAAiB,IAAI,SAAS,KAAK,EAAE,EAAE;YACvD,MAAM,YAAY,GAAG,IAAI,CAAC,GAAG,CAC3B,CAAC,EACD,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,yBAAyB,CAAC,CAC1D;YACD,OAAO,YAAY,GAAG,gBAAgB;QACxC;AACA,QAAA,IAAI,SAAS,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YAClC,OAAO,wBAAwB,CAC7B,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,EAAE,EAChE,QAAQ,CACT;QACH;AACA,QAAA,OAAO,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC;IACnC;AAEA,IAAA,IAAI,MAAM,CAAC,IAAI,KAAK,KAAK,EAAE;AACzB,QAAA,OAAO,4BAA4B;IACrC;;AAGA,IAAA,IAAI,MAAM,CAAC,IAAI,KAAK,SAAS,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE;QAC9D,IAAI,KAAK,GAAG,CAAC;AACb,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,EAAE;AAClC,YAAA,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,MAAM,IAAI,KAAK,EAAE;gBACjE,MAAM,UAAU,GAAG,KAAgC;AACnD,gBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,OAAO,EAAE;AAC/B,oBAAA,KAAK,IAAI,wBAAwB,CAAC,UAAU,EAAE,QAAQ,CAAC;gBACzD;YACF;QACF;QACA,OAAO,KAAK,GAAG,CAAC,GAAG,KAAK,GAAG,4BAA4B;IACzD;AAEA,IAAA,OAAO,4BAA4B;AACrC;AAEA,MAAM,UAAU,GAA6C,EAAE;AAE/D,eAAe,YAAY,CACzB,QAAA,GAAyB,YAAY,EAAA;AAErC,IAAA,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,CAAC;IACnC,IAAI,MAAM,EAAE;AACV,QAAA,OAAO,MAAM;IACf;AACA,IAAA,MAAM,IAAI,GACR,QAAQ,KAAK;AACX,UAAE,MAAM,OAAO,8BAA8B;AAC7C,UAAE,MAAM,OAAO,kCAAkC,CAAC;AACtD,IAAA,MAAM,QAAQ,GAAG,IAAIC,qBAAS,CAAC,IAAI,CAAC;AACpC,IAAA,UAAU,CAAC,QAAQ,CAAC,GAAG,QAAQ;AAC/B,IAAA,OAAO,QAAQ;AACjB;AAEM,SAAU,gBAAgB,CAAC,KAAa,EAAA;IAC5C,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;AAC1C,QAAA,OAAO,QAAQ;IACjB;AACA,IAAA,OAAO,YAAY;AACrB;AAEM,SAAU,uBAAuB,CACrC,OAAoB,EACpB,aAAuC,EACvC,WAAyB,YAAY,EAAA;IAErC,MAAM,gBAAgB,GAAG,CAAC;AAO1B,IAAA,MAAM,YAAY,GAAG,CAAC,KAAc,KAAU;AAC5C,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AACxB,YAAA,KAAK,MAAM,GAAG,IAAI,KAAK,EAAE;gBACvB,MAAM,IAAI,GAAG,GAAsC;gBACnD,IAAI,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;oBACjD;gBACF;gBACA,IAAI,IAAI,CAAC,IAAI,KAAKD,kBAAY,CAAC,KAAK,EAAE;oBACpC;gBACF;AAEA,gBAAA,IACE,IAAI,CAAC,IAAI,KAAKA,kBAAY,CAAC,SAAS;oBACpC,IAAI,CAAC,IAAI,KAAK,WAAW;AACzB,oBAAA,IAAI,CAAC,IAAI,KAAK,OAAO,EACrB;AACA,oBAAA,SAAS,IAAI,IAAI,CAAC,IAAI,CACpB,wBAAwB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,yBAAyB,CACrE;oBACD;gBACF;AAEA,gBAAA,IACE,IAAI,CAAC,IAAI,KAAK,UAAU;oBACxB,IAAI,CAAC,IAAI,KAAK,MAAM;AACpB,oBAAA,IAAI,CAAC,IAAI,KAAKA,kBAAY,CAAC,UAAU,EACrC;AACA,oBAAA,SAAS,IAAI,IAAI,CAAC,IAAI,CACpB,2BAA2B,CAAC,IAAI,EAAE,QAAQ,EAAE,aAAa,CAAC;AACxD,wBAAA,yBAAyB,CAC5B;oBACD;gBACF;AAEA,gBAAA,IAAI,IAAI,CAAC,IAAI,KAAKA,kBAAY,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;AAClE,oBAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;oBACpC,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;AACvD,wBAAA,SAAS,IAAI,aAAa,CAAC,QAAQ,CAAC;oBACtC;AACA,oBAAA,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;oBAChC,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;AAC/C,wBAAA,SAAS,IAAI,aAAa,CAAC,IAAI,CAAC;oBAClC;AACA,oBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM;oBACpC,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;AACnD,wBAAA,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;oBACpC;oBACA;gBACF;gBAEA,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;AACnC,gBAAA,IAAI,WAAW,IAAI,IAAI,EAAE;oBACvB;gBACF;gBAEA,YAAY,CAAC,WAAW,CAAC;YAC3B;QACF;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACpC,YAAA,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC;QACnC;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YACpC,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC;QAC9C;AAAO,aAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;YACrC,SAAS,IAAI,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC;QAC9C;AACF,IAAA,CAAC;IAED,IAAI,SAAS,GAAG,gBAAgB;AAChC,IAAA,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC;AAC7B,IAAA,OAAO,SAAS;AAClB;AAEA;;;;AAIG;AACH,MAAM,uBAAuB,GAAG,GAAG;AAEnC;;;AAGG;MACU,kBAAkB,GAAG,OAChC,QAAA,GAAyB,YAAY,KACQ;AAC7C,IAAA,MAAM,GAAG,GAAG,MAAM,YAAY,CAAC,QAAQ,CAAC;AACxC,IAAA,MAAM,WAAW,GAAG,CAAC,IAAY,KAAa,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC;AAC7D,IAAA,MAAM,QAAQ,GAAG,QAAQ,KAAK,QAAQ;IACtC,OAAO,CAAC,OAAoB,KAAY;QACtC,MAAM,KAAK,GAAG,uBAAuB,CAAC,OAAO,EAAE,WAAW,EAAE,QAAQ,CAAC;AACrE,QAAA,OAAO,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,uBAAuB,CAAC,GAAG,KAAK;AACtE,IAAA,CAAC;AACH;AAEA;AACO,MAAM,mBAAmB,GAAG;AACjC,IAAA,MAAM,UAAU,GAAA;;IAEhB,CAAC;IAED,KAAK,GAAA;QACH,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,YAAA,OAAO,UAAU,CAAC,GAAmB,CAAC;QACxC;IACF,CAAC;IAED,aAAa,GAAA;QACX,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC;IAC3C,CAAC;;;;;;;;;;;"}
|