@librechat/agents 3.1.55 → 3.1.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/cjs/graphs/Graph.cjs +1 -1
  2. package/dist/cjs/llm/openai/index.cjs +1 -1
  3. package/dist/cjs/main.cjs +1 -0
  4. package/dist/cjs/main.cjs.map +1 -1
  5. package/dist/cjs/messages/format.cjs +118 -32
  6. package/dist/cjs/messages/format.cjs.map +1 -1
  7. package/dist/cjs/run.cjs +5 -2
  8. package/dist/cjs/run.cjs.map +1 -1
  9. package/dist/cjs/stream.cjs +9 -0
  10. package/dist/cjs/stream.cjs.map +1 -1
  11. package/dist/cjs/tools/ToolNode.cjs +1 -1
  12. package/dist/cjs/utils/tokens.cjs +33 -45
  13. package/dist/cjs/utils/tokens.cjs.map +1 -1
  14. package/dist/esm/graphs/Graph.mjs +1 -1
  15. package/dist/esm/llm/openai/index.mjs +1 -1
  16. package/dist/esm/main.mjs +1 -1
  17. package/dist/esm/messages/format.mjs +119 -33
  18. package/dist/esm/messages/format.mjs.map +1 -1
  19. package/dist/esm/run.mjs +5 -2
  20. package/dist/esm/run.mjs.map +1 -1
  21. package/dist/esm/stream.mjs +9 -0
  22. package/dist/esm/stream.mjs.map +1 -1
  23. package/dist/esm/tools/ToolNode.mjs +1 -1
  24. package/dist/esm/utils/tokens.mjs +33 -46
  25. package/dist/esm/utils/tokens.mjs.map +1 -1
  26. package/dist/types/types/graph.d.ts +2 -0
  27. package/dist/types/types/stream.d.ts +2 -0
  28. package/dist/types/utils/tokens.d.ts +6 -18
  29. package/package.json +3 -2
  30. package/src/messages/ensureThinkingBlock.test.ts +502 -27
  31. package/src/messages/format.ts +155 -44
  32. package/src/run.ts +6 -2
  33. package/src/scripts/bedrock-cache-debug.ts +15 -15
  34. package/src/scripts/code_exec_multi_session.ts +8 -13
  35. package/src/scripts/image.ts +2 -1
  36. package/src/scripts/multi-agent-parallel-start.ts +3 -4
  37. package/src/scripts/multi-agent-sequence.ts +3 -4
  38. package/src/scripts/single-agent-metadata-test.ts +3 -6
  39. package/src/scripts/test-tool-before-handoff-role-order.ts +2 -3
  40. package/src/scripts/test-tools-before-handoff.ts +2 -3
  41. package/src/scripts/tools.ts +1 -7
  42. package/src/specs/token-memoization.test.ts +35 -34
  43. package/src/specs/tokens.test.ts +64 -0
  44. package/src/stream.ts +12 -0
  45. package/src/types/graph.ts +2 -0
  46. package/src/types/stream.ts +2 -0
  47. package/src/utils/tokens.ts +43 -54
package/src/stream.ts CHANGED
@@ -576,6 +576,16 @@ export function createContentAggregator(): t.ContentAggregatorResult {
576
576
  type: ToolCallTypes.TOOL_CALL,
577
577
  };
578
578
 
579
+ const auth =
580
+ contentPart.tool_call.auth ?? existingContent?.tool_call?.auth;
581
+ const expiresAt =
582
+ contentPart.tool_call.expires_at ??
583
+ existingContent?.tool_call?.expires_at;
584
+ if (auth != null) {
585
+ newToolCall.auth = auth;
586
+ newToolCall.expires_at = expiresAt;
587
+ }
588
+
579
589
  if (finalUpdate) {
580
590
  newToolCall.progress = 1;
581
591
  newToolCall.output = contentPart.tool_call.output;
@@ -713,6 +723,8 @@ export function createContentAggregator(): t.ContentAggregatorResult {
713
723
  args: toolCallDelta.args ?? '',
714
724
  name: toolCallDelta.name,
715
725
  id: toolCallId,
726
+ auth: runStepDelta.delta.auth,
727
+ expires_at: runStepDelta.delta.expires_at,
716
728
  },
717
729
  };
718
730
 
@@ -300,6 +300,8 @@ export type PartMetadata = {
300
300
  status?: string;
301
301
  action?: boolean;
302
302
  output?: string;
303
+ auth?: string;
304
+ expires_at?: number;
303
305
  };
304
306
 
305
307
  export type ModelEndData =
@@ -164,6 +164,8 @@ export type ToolCallsDetails = {
164
164
  export type ToolCallDelta = {
165
165
  type: StepTypes;
166
166
  tool_calls?: ToolCallChunk[]; // #new
167
+ auth?: string;
168
+ expires_at?: number;
167
169
  };
168
170
 
169
171
  export type AgentToolCall =
@@ -1,7 +1,34 @@
1
- import { Tiktoken } from 'js-tiktoken/lite';
1
+ import { Tokenizer } from 'ai-tokenizer';
2
2
  import type { BaseMessage } from '@langchain/core/messages';
3
3
  import { ContentTypes } from '@/common/enum';
4
4
 
5
+ export type EncodingName = 'o200k_base' | 'claude';
6
+
7
+ const tokenizers: Partial<Record<EncodingName, Tokenizer>> = {};
8
+
9
+ async function getTokenizer(
10
+ encoding: EncodingName = 'o200k_base'
11
+ ): Promise<Tokenizer> {
12
+ const cached = tokenizers[encoding];
13
+ if (cached) {
14
+ return cached;
15
+ }
16
+ const data =
17
+ encoding === 'claude'
18
+ ? await import('ai-tokenizer/encoding/claude')
19
+ : await import('ai-tokenizer/encoding/o200k_base');
20
+ const instance = new Tokenizer(data);
21
+ tokenizers[encoding] = instance;
22
+ return instance;
23
+ }
24
+
25
+ export function encodingForModel(model: string): EncodingName {
26
+ if (model.toLowerCase().includes('claude')) {
27
+ return 'claude';
28
+ }
29
+ return 'o200k_base';
30
+ }
31
+
5
32
  export function getTokenCountForMessage(
6
33
  message: BaseMessage,
7
34
  getTokenCount: (text: string) => number
@@ -60,70 +87,32 @@ export function getTokenCountForMessage(
60
87
  return numTokens;
61
88
  }
62
89
 
63
- let encoderPromise: Promise<Tiktoken> | undefined;
64
- let tokenCounterPromise: Promise<(message: BaseMessage) => number> | undefined;
65
-
66
- async function getSharedEncoder(): Promise<Tiktoken> {
67
- if (encoderPromise) {
68
- return encoderPromise;
69
- }
70
- encoderPromise = (async (): Promise<Tiktoken> => {
71
- const res = await fetch('https://tiktoken.pages.dev/js/o200k_base.json');
72
- const o200k_base = await res.json();
73
- return new Tiktoken(o200k_base);
74
- })();
75
- return encoderPromise;
76
- }
77
-
78
90
  /**
79
- * Creates a singleton token counter function that reuses the same encoder instance.
80
- * This avoids creating multiple function closures and prevents potential memory issues.
91
+ * Creates a token counter function using the specified encoding.
92
+ * Lazily loads the encoding data on first use via dynamic import.
81
93
  */
82
- export const createTokenCounter = async (): Promise<
83
- (message: BaseMessage) => number
84
- > => {
85
- if (tokenCounterPromise) {
86
- return tokenCounterPromise;
87
- }
88
-
89
- tokenCounterPromise = (async (): Promise<
90
- (message: BaseMessage) => number
91
- > => {
92
- const enc = await getSharedEncoder();
93
- const countTokens = (text: string): number => enc.encode(text).length;
94
- return (message: BaseMessage): number =>
95
- getTokenCountForMessage(message, countTokens);
96
- })();
97
-
98
- return tokenCounterPromise;
94
+ export const createTokenCounter = async (
95
+ encoding: EncodingName = 'o200k_base'
96
+ ): Promise<(message: BaseMessage) => number> => {
97
+ const tok = await getTokenizer(encoding);
98
+ const countTokens = (text: string): number => tok.count(text);
99
+ return (message: BaseMessage): number =>
100
+ getTokenCountForMessage(message, countTokens);
99
101
  };
100
102
 
101
- /**
102
- * Utility to manage the token encoder lifecycle explicitly.
103
- * Useful for applications that need fine-grained control over resource management.
104
- */
103
+ /** Utility to manage the token encoder lifecycle explicitly. */
105
104
  export const TokenEncoderManager = {
106
- /**
107
- * Pre-initializes the encoder. This can be called during app startup
108
- * to avoid lazy loading delays later.
109
- */
110
105
  async initialize(): Promise<void> {
111
- await getSharedEncoder();
106
+ // No-op: ai-tokenizer is synchronously initialized from bundled data.
112
107
  },
113
108
 
114
- /**
115
- * Clears the cached encoder and token counter.
116
- * Useful for testing or when you need to force a fresh reload.
117
- */
118
109
  reset(): void {
119
- encoderPromise = undefined;
120
- tokenCounterPromise = undefined;
110
+ for (const key of Object.keys(tokenizers)) {
111
+ delete tokenizers[key as EncodingName];
112
+ }
121
113
  },
122
114
 
123
- /**
124
- * Checks if the encoder has been initialized.
125
- */
126
115
  isInitialized(): boolean {
127
- return encoderPromise !== undefined;
116
+ return Object.keys(tokenizers).length > 0;
128
117
  },
129
118
  };