@threaded/ai 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +285 -83
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +92 -11
- package/dist/index.d.ts +92 -11
- package/dist/index.js +282 -90
- package/dist/index.js.map +1 -1
- package/package.json +6 -11
package/dist/index.js
CHANGED
|
@@ -1,29 +1,14 @@
|
|
|
1
|
-
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
-
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
-
}) : x)(function(x) {
|
|
4
|
-
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
-
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
-
});
|
|
7
|
-
|
|
8
1
|
// src/schema.ts
|
|
2
|
+
import { z } from "zod";
|
|
9
3
|
var isStandardSchema = (schema) => {
|
|
10
4
|
return schema && typeof schema === "object" && "~standard" in schema;
|
|
11
5
|
};
|
|
12
6
|
var convertStandardSchemaToJsonSchema = (standardSchema, name = "Schema") => {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
name,
|
|
19
|
-
schema: jsonSchema
|
|
20
|
-
};
|
|
21
|
-
}
|
|
22
|
-
} catch (error) {
|
|
23
|
-
}
|
|
24
|
-
throw new Error(
|
|
25
|
-
"Standard Schema conversion requires zod v4+ with toJSONSchema support. Please install zod@^4.0.0 or provide a JsonSchema object instead."
|
|
26
|
-
);
|
|
7
|
+
const jsonSchema = z.toJSONSchema(standardSchema);
|
|
8
|
+
return {
|
|
9
|
+
name,
|
|
10
|
+
schema: jsonSchema
|
|
11
|
+
};
|
|
27
12
|
};
|
|
28
13
|
var convertMCPSchemaToToolSchema = (mcpSchema) => {
|
|
29
14
|
if (!mcpSchema?.properties) return {};
|
|
@@ -44,6 +29,10 @@ function normalizeSchema(schema, name) {
|
|
|
44
29
|
}
|
|
45
30
|
return schema;
|
|
46
31
|
}
|
|
32
|
+
var convertStandardSchemaToSchemaProperties = (standardSchema) => {
|
|
33
|
+
const jsonSchema = z.toJSONSchema(standardSchema);
|
|
34
|
+
return convertMCPSchemaToToolSchema(jsonSchema);
|
|
35
|
+
};
|
|
47
36
|
|
|
48
37
|
// src/mcp.ts
|
|
49
38
|
var createMCPTools = async (client) => {
|
|
@@ -82,9 +71,10 @@ var Inherit = /* @__PURE__ */ ((Inherit2) => {
|
|
|
82
71
|
|
|
83
72
|
// src/utils.ts
|
|
84
73
|
var toolConfigToToolDefinition = (tool) => {
|
|
74
|
+
const schema = isStandardSchema(tool.schema) ? convertStandardSchemaToSchemaProperties(tool.schema) : tool.schema;
|
|
85
75
|
const properties = {};
|
|
86
76
|
const required = [];
|
|
87
|
-
for (const [key, prop] of Object.entries(
|
|
77
|
+
for (const [key, prop] of Object.entries(schema)) {
|
|
88
78
|
properties[key] = convertSchemaProperty(prop);
|
|
89
79
|
if (!prop.optional) {
|
|
90
80
|
required.push(key);
|
|
@@ -150,7 +140,66 @@ var maxCalls = (toolConfig, maxCalls2) => ({
|
|
|
150
140
|
_maxCalls: maxCalls2
|
|
151
141
|
});
|
|
152
142
|
|
|
143
|
+
// src/embed.ts
|
|
144
|
+
import { pipeline } from "@huggingface/transformers";
|
|
145
|
+
var modelCache = /* @__PURE__ */ new Map();
|
|
146
|
+
var embed = async (model2, text, config) => {
|
|
147
|
+
if (model2.startsWith("openai/")) {
|
|
148
|
+
const modelName = model2.replace("openai/", "");
|
|
149
|
+
const apiKey = getKey("openai") || process.env.OPENAI_API_KEY;
|
|
150
|
+
if (!apiKey) {
|
|
151
|
+
throw new Error("OpenAI API key not found");
|
|
152
|
+
}
|
|
153
|
+
const body = {
|
|
154
|
+
model: modelName,
|
|
155
|
+
input: text
|
|
156
|
+
};
|
|
157
|
+
if (config?.dimensions) {
|
|
158
|
+
body.dimensions = config.dimensions;
|
|
159
|
+
}
|
|
160
|
+
const response = await fetch("https://api.openai.com/v1/embeddings", {
|
|
161
|
+
method: "POST",
|
|
162
|
+
headers: {
|
|
163
|
+
"Content-Type": "application/json",
|
|
164
|
+
Authorization: `Bearer ${apiKey}`
|
|
165
|
+
},
|
|
166
|
+
body: JSON.stringify(body)
|
|
167
|
+
});
|
|
168
|
+
if (!response.ok) {
|
|
169
|
+
const error = await response.text();
|
|
170
|
+
throw new Error(`OpenAI API error: ${error}`);
|
|
171
|
+
}
|
|
172
|
+
const data = await response.json();
|
|
173
|
+
return data.data[0].embedding;
|
|
174
|
+
}
|
|
175
|
+
if (!modelCache.has(model2)) {
|
|
176
|
+
const extractor2 = await pipeline("feature-extraction", model2, {
|
|
177
|
+
dtype: "fp32"
|
|
178
|
+
});
|
|
179
|
+
modelCache.set(model2, extractor2);
|
|
180
|
+
}
|
|
181
|
+
const extractor = modelCache.get(model2);
|
|
182
|
+
const result = await extractor(text, { pooling: "mean", normalize: true });
|
|
183
|
+
return Array.from(result.data);
|
|
184
|
+
};
|
|
185
|
+
|
|
153
186
|
// src/providers/openai.ts
|
|
187
|
+
var appendToolCalls = (toolCalls, tcchunklist) => {
|
|
188
|
+
for (const tcchunk of tcchunklist) {
|
|
189
|
+
while (toolCalls.length <= tcchunk.index) {
|
|
190
|
+
toolCalls.push({
|
|
191
|
+
id: "",
|
|
192
|
+
type: "function",
|
|
193
|
+
function: { name: "", arguments: "" }
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
const tc = toolCalls[tcchunk.index];
|
|
197
|
+
tc.id += tcchunk.id || "";
|
|
198
|
+
tc.function.name += tcchunk.function?.name || "";
|
|
199
|
+
tc.function.arguments += tcchunk.function?.arguments || "";
|
|
200
|
+
}
|
|
201
|
+
return toolCalls;
|
|
202
|
+
};
|
|
154
203
|
var callOpenAI = async (config, ctx) => {
|
|
155
204
|
const { model: model2, instructions, schema } = config;
|
|
156
205
|
const apiKey = getKey("openai") || process.env.OPENAI_API_KEY;
|
|
@@ -217,17 +266,19 @@ var handleOpenAIStream = async (response, ctx) => {
|
|
|
217
266
|
const decoder = new TextDecoder();
|
|
218
267
|
let fullContent = "";
|
|
219
268
|
let toolCalls = [];
|
|
220
|
-
|
|
269
|
+
let buffer = "";
|
|
221
270
|
try {
|
|
222
271
|
while (true) {
|
|
223
272
|
const { done, value } = await reader.read();
|
|
224
273
|
if (done) break;
|
|
225
|
-
|
|
226
|
-
const lines =
|
|
274
|
+
buffer += decoder.decode(value, { stream: true });
|
|
275
|
+
const lines = buffer.split("\n");
|
|
276
|
+
buffer = lines.pop() || "";
|
|
227
277
|
for (const line of lines) {
|
|
228
278
|
if (line.startsWith("data: ")) {
|
|
229
|
-
const data = line.slice(6);
|
|
279
|
+
const data = line.slice(6).trim();
|
|
230
280
|
if (data === "[DONE]") continue;
|
|
281
|
+
if (!data) continue;
|
|
231
282
|
try {
|
|
232
283
|
const parsed = JSON.parse(data);
|
|
233
284
|
const delta = parsed.choices?.[0]?.delta;
|
|
@@ -238,25 +289,7 @@ var handleOpenAIStream = async (response, ctx) => {
|
|
|
238
289
|
}
|
|
239
290
|
}
|
|
240
291
|
if (delta?.tool_calls) {
|
|
241
|
-
|
|
242
|
-
const { index } = toolCall;
|
|
243
|
-
if (!toolCallsBuffer[index]) {
|
|
244
|
-
toolCallsBuffer[index] = {
|
|
245
|
-
id: toolCall.id || "",
|
|
246
|
-
type: "function",
|
|
247
|
-
function: { name: "", arguments: "" }
|
|
248
|
-
};
|
|
249
|
-
}
|
|
250
|
-
if (toolCall.id) {
|
|
251
|
-
toolCallsBuffer[index].id = toolCall.id;
|
|
252
|
-
}
|
|
253
|
-
if (toolCall.function?.name) {
|
|
254
|
-
toolCallsBuffer[index].function.name += toolCall.function.name;
|
|
255
|
-
}
|
|
256
|
-
if (toolCall.function?.arguments) {
|
|
257
|
-
toolCallsBuffer[index].function.arguments += toolCall.function.arguments;
|
|
258
|
-
}
|
|
259
|
-
}
|
|
292
|
+
toolCalls = appendToolCalls(toolCalls, delta.tool_calls);
|
|
260
293
|
}
|
|
261
294
|
} catch (e) {
|
|
262
295
|
}
|
|
@@ -266,7 +299,6 @@ var handleOpenAIStream = async (response, ctx) => {
|
|
|
266
299
|
} finally {
|
|
267
300
|
reader.releaseLock();
|
|
268
301
|
}
|
|
269
|
-
toolCalls = Object.values(toolCallsBuffer);
|
|
270
302
|
const msg = {
|
|
271
303
|
role: "assistant",
|
|
272
304
|
content: fullContent
|
|
@@ -282,18 +314,66 @@ var handleOpenAIStream = async (response, ctx) => {
|
|
|
282
314
|
};
|
|
283
315
|
|
|
284
316
|
// src/providers/anthropic.ts
|
|
317
|
+
var convertToAnthropicFormat = (messages) => {
|
|
318
|
+
const result = [];
|
|
319
|
+
let i = 0;
|
|
320
|
+
while (i < messages.length) {
|
|
321
|
+
const msg = messages[i];
|
|
322
|
+
if (msg.role === "system") {
|
|
323
|
+
i++;
|
|
324
|
+
continue;
|
|
325
|
+
}
|
|
326
|
+
if (msg.role === "assistant") {
|
|
327
|
+
if (msg.tool_calls) {
|
|
328
|
+
result.push({
|
|
329
|
+
role: "assistant",
|
|
330
|
+
content: msg.tool_calls.map((tc) => ({
|
|
331
|
+
type: "tool_use",
|
|
332
|
+
id: tc.id,
|
|
333
|
+
name: tc.function.name,
|
|
334
|
+
input: JSON.parse(tc.function.arguments)
|
|
335
|
+
}))
|
|
336
|
+
});
|
|
337
|
+
} else {
|
|
338
|
+
result.push({
|
|
339
|
+
role: "assistant",
|
|
340
|
+
content: msg.content
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
i++;
|
|
344
|
+
} else if (msg.role === "tool") {
|
|
345
|
+
const toolResults = [];
|
|
346
|
+
while (i < messages.length && messages[i].role === "tool") {
|
|
347
|
+
const toolMsg = messages[i];
|
|
348
|
+
toolResults.push({
|
|
349
|
+
type: "tool_result",
|
|
350
|
+
tool_use_id: toolMsg.tool_call_id,
|
|
351
|
+
content: toolMsg.content
|
|
352
|
+
});
|
|
353
|
+
i++;
|
|
354
|
+
}
|
|
355
|
+
result.push({
|
|
356
|
+
role: "user",
|
|
357
|
+
content: toolResults
|
|
358
|
+
});
|
|
359
|
+
} else {
|
|
360
|
+
result.push(msg);
|
|
361
|
+
i++;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
return result;
|
|
365
|
+
};
|
|
285
366
|
var callAnthropic = async (config, ctx) => {
|
|
286
367
|
const { model: model2, instructions, schema } = config;
|
|
287
368
|
const apiKey = getKey("anthropic") || process.env.ANTHROPIC_API_KEY;
|
|
288
369
|
if (!apiKey) {
|
|
289
370
|
throw new Error("Anthropic API key not found");
|
|
290
371
|
}
|
|
291
|
-
const messages = [...ctx.history];
|
|
292
372
|
let system = instructions;
|
|
293
|
-
if (
|
|
294
|
-
system =
|
|
295
|
-
messages.shift();
|
|
373
|
+
if (ctx.history[0]?.role === "system") {
|
|
374
|
+
system = ctx.history[0].content;
|
|
296
375
|
}
|
|
376
|
+
const messages = convertToAnthropicFormat(ctx.history);
|
|
297
377
|
if (schema) {
|
|
298
378
|
const schemaPrompt = `
|
|
299
379
|
|
|
@@ -368,15 +448,18 @@ var handleAnthropicStream = async (response, ctx) => {
|
|
|
368
448
|
const decoder = new TextDecoder();
|
|
369
449
|
let fullContent = "";
|
|
370
450
|
const toolCalls = [];
|
|
451
|
+
let buffer = "";
|
|
371
452
|
try {
|
|
372
453
|
while (true) {
|
|
373
454
|
const { done, value } = await reader.read();
|
|
374
455
|
if (done) break;
|
|
375
|
-
|
|
376
|
-
const lines =
|
|
456
|
+
buffer += decoder.decode(value, { stream: true });
|
|
457
|
+
const lines = buffer.split("\n");
|
|
458
|
+
buffer = lines.pop() || "";
|
|
377
459
|
for (const line of lines) {
|
|
378
460
|
if (line.startsWith("data: ")) {
|
|
379
|
-
const data = line.slice(6);
|
|
461
|
+
const data = line.slice(6).trim();
|
|
462
|
+
if (!data) continue;
|
|
380
463
|
try {
|
|
381
464
|
const parsed = JSON.parse(data);
|
|
382
465
|
if (parsed.type === "content_block_delta" && parsed.delta?.text) {
|
|
@@ -392,10 +475,17 @@ var handleAnthropicStream = async (response, ctx) => {
|
|
|
392
475
|
type: "function",
|
|
393
476
|
function: {
|
|
394
477
|
name: toolUse.name,
|
|
395
|
-
arguments:
|
|
396
|
-
}
|
|
478
|
+
arguments: ""
|
|
479
|
+
},
|
|
480
|
+
index: parsed.index
|
|
397
481
|
});
|
|
398
482
|
}
|
|
483
|
+
if (parsed.type === "content_block_delta" && parsed.delta?.type === "input_json_delta") {
|
|
484
|
+
const toolCall = toolCalls.find((tc) => tc.index === parsed.index);
|
|
485
|
+
if (toolCall) {
|
|
486
|
+
toolCall.function.arguments += parsed.delta.partial_json;
|
|
487
|
+
}
|
|
488
|
+
}
|
|
399
489
|
} catch (e) {
|
|
400
490
|
}
|
|
401
491
|
}
|
|
@@ -409,7 +499,7 @@ var handleAnthropicStream = async (response, ctx) => {
|
|
|
409
499
|
content: fullContent
|
|
410
500
|
};
|
|
411
501
|
if (toolCalls.length > 0) {
|
|
412
|
-
msg.tool_calls = toolCalls;
|
|
502
|
+
msg.tool_calls = toolCalls.map(({ index, ...tc }) => tc);
|
|
413
503
|
}
|
|
414
504
|
return {
|
|
415
505
|
...ctx,
|
|
@@ -505,15 +595,18 @@ var handleGoogleStream = async (response, ctx) => {
|
|
|
505
595
|
const decoder = new TextDecoder();
|
|
506
596
|
let fullContent = "";
|
|
507
597
|
const toolCalls = [];
|
|
598
|
+
let buffer = "";
|
|
508
599
|
try {
|
|
509
600
|
while (true) {
|
|
510
601
|
const { done, value } = await reader.read();
|
|
511
602
|
if (done) break;
|
|
512
|
-
|
|
513
|
-
const lines =
|
|
603
|
+
buffer += decoder.decode(value, { stream: true });
|
|
604
|
+
const lines = buffer.split("\n");
|
|
605
|
+
buffer = lines.pop() || "";
|
|
514
606
|
for (const line of lines) {
|
|
515
607
|
if (line.startsWith("data: ")) {
|
|
516
|
-
const data = line.slice(6);
|
|
608
|
+
const data = line.slice(6).trim();
|
|
609
|
+
if (!data) continue;
|
|
517
610
|
try {
|
|
518
611
|
const parsed = JSON.parse(data);
|
|
519
612
|
const candidate = parsed.candidates?.[0];
|
|
@@ -621,10 +714,16 @@ var model = ({
|
|
|
621
714
|
schema
|
|
622
715
|
} = {}) => {
|
|
623
716
|
return async (ctxOrMessage) => {
|
|
624
|
-
const ctx = typeof ctxOrMessage === "string" ?
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
717
|
+
const ctx = typeof ctxOrMessage === "string" ? (
|
|
718
|
+
// model()("hello!");
|
|
719
|
+
{
|
|
720
|
+
history: [{ role: "user", content: ctxOrMessage }],
|
|
721
|
+
tools: []
|
|
722
|
+
}
|
|
723
|
+
) : (
|
|
724
|
+
// model()(/* few shot or history */);
|
|
725
|
+
ctxOrMessage
|
|
726
|
+
);
|
|
628
727
|
const normalizedSchema = schema ? normalizeSchema(schema) : void 0;
|
|
629
728
|
const systemMessage = ctx.history.find((m) => m.role === "system");
|
|
630
729
|
const instructions = systemMessage?.content;
|
|
@@ -653,29 +752,18 @@ var executeTools = async (ctx) => {
|
|
|
653
752
|
approvalCallback,
|
|
654
753
|
parallel = false,
|
|
655
754
|
retryCount = 0,
|
|
656
|
-
approvalId
|
|
755
|
+
approvalId,
|
|
756
|
+
executeOnApproval = false
|
|
657
757
|
} = toolConfig;
|
|
658
|
-
const approvalPromises = calls.map(async (call) => {
|
|
659
|
-
if (requireApproval) {
|
|
660
|
-
let approved;
|
|
661
|
-
if (approvalCallback) {
|
|
662
|
-
approved = await approvalCallback(call);
|
|
663
|
-
} else {
|
|
664
|
-
const response = await requestApproval(call, approvalId);
|
|
665
|
-
approved = response.approved;
|
|
666
|
-
}
|
|
667
|
-
return { call, approved };
|
|
668
|
-
} else {
|
|
669
|
-
return { call, approved: true };
|
|
670
|
-
}
|
|
671
|
-
});
|
|
672
|
-
const approvals = await Promise.all(approvalPromises);
|
|
673
758
|
const updatedCounts = { ...ctx.toolCallCounts || {} };
|
|
674
|
-
const runCall = async (call) => {
|
|
675
|
-
|
|
676
|
-
if (!approval?.approved) {
|
|
759
|
+
const runCall = async (call, approved) => {
|
|
760
|
+
if (!approved) {
|
|
677
761
|
if (ctx.stream) {
|
|
678
|
-
ctx.stream({
|
|
762
|
+
ctx.stream({
|
|
763
|
+
type: "tool_error",
|
|
764
|
+
call,
|
|
765
|
+
error: "Tool execution denied by user"
|
|
766
|
+
});
|
|
679
767
|
}
|
|
680
768
|
return {
|
|
681
769
|
call,
|
|
@@ -730,7 +818,51 @@ var executeTools = async (ctx) => {
|
|
|
730
818
|
}
|
|
731
819
|
return { call, result: { error } };
|
|
732
820
|
};
|
|
733
|
-
|
|
821
|
+
if (executeOnApproval && requireApproval) {
|
|
822
|
+
const resultPromises = calls.map(async (call) => {
|
|
823
|
+
let approved;
|
|
824
|
+
if (approvalCallback) {
|
|
825
|
+
approved = await approvalCallback(call);
|
|
826
|
+
} else {
|
|
827
|
+
const response = await requestApproval(call, approvalId);
|
|
828
|
+
approved = response.approved;
|
|
829
|
+
}
|
|
830
|
+
return runCall(call, approved);
|
|
831
|
+
});
|
|
832
|
+
const results2 = await Promise.all(resultPromises);
|
|
833
|
+
return {
|
|
834
|
+
...ctx,
|
|
835
|
+
history: [
|
|
836
|
+
...ctx.history,
|
|
837
|
+
...results2.map(({ call, result }) => ({
|
|
838
|
+
role: "tool",
|
|
839
|
+
tool_call_id: call.id,
|
|
840
|
+
content: JSON.stringify(result)
|
|
841
|
+
}))
|
|
842
|
+
],
|
|
843
|
+
toolCallCounts: updatedCounts
|
|
844
|
+
};
|
|
845
|
+
}
|
|
846
|
+
const approvalPromises = calls.map(async (call) => {
|
|
847
|
+
if (requireApproval) {
|
|
848
|
+
let approved;
|
|
849
|
+
if (approvalCallback) {
|
|
850
|
+
approved = await approvalCallback(call);
|
|
851
|
+
} else {
|
|
852
|
+
const response = await requestApproval(call, approvalId);
|
|
853
|
+
approved = response.approved;
|
|
854
|
+
}
|
|
855
|
+
return { call, approved };
|
|
856
|
+
} else {
|
|
857
|
+
return { call, approved: true };
|
|
858
|
+
}
|
|
859
|
+
});
|
|
860
|
+
const approvals = await Promise.all(approvalPromises);
|
|
861
|
+
const runCallWithApproval = async (call) => {
|
|
862
|
+
const approval = approvals.find((a) => a.call.id === call.id);
|
|
863
|
+
return runCall(call, approval?.approved ?? true);
|
|
864
|
+
};
|
|
865
|
+
const results = parallel ? await Promise.all(calls.map(runCallWithApproval)) : await runCallsSequentially(calls, runCallWithApproval);
|
|
734
866
|
return {
|
|
735
867
|
...ctx,
|
|
736
868
|
history: [
|
|
@@ -796,14 +928,15 @@ var createThread = (id, store) => {
|
|
|
796
928
|
}
|
|
797
929
|
};
|
|
798
930
|
};
|
|
799
|
-
var defaultStore = createMemoryStore();
|
|
800
931
|
var threads = /* @__PURE__ */ new Map();
|
|
801
|
-
var getOrCreateThread = (id, store
|
|
802
|
-
|
|
803
|
-
|
|
932
|
+
var getOrCreateThread = (id, store) => {
|
|
933
|
+
const cacheKey = store ? `${id}-${store}` : id;
|
|
934
|
+
if (threads.has(cacheKey)) {
|
|
935
|
+
return threads.get(cacheKey);
|
|
804
936
|
}
|
|
805
|
-
const
|
|
806
|
-
|
|
937
|
+
const threadStore = store || createMemoryStore();
|
|
938
|
+
const thread = createThread(id, threadStore);
|
|
939
|
+
threads.set(cacheKey, thread);
|
|
807
940
|
return thread;
|
|
808
941
|
};
|
|
809
942
|
|
|
@@ -1034,6 +1167,9 @@ var scopeContext = (config, ctx) => {
|
|
|
1034
1167
|
scopedCtx.history = [{ role: "system", content: config.system }, ...scopedCtx.history];
|
|
1035
1168
|
}
|
|
1036
1169
|
}
|
|
1170
|
+
if (config.stream) {
|
|
1171
|
+
scopedCtx.stream = config.stream;
|
|
1172
|
+
}
|
|
1037
1173
|
return scopedCtx;
|
|
1038
1174
|
};
|
|
1039
1175
|
var scope = (config, ...steps) => {
|
|
@@ -1055,13 +1191,68 @@ var scope = (config, ...steps) => {
|
|
|
1055
1191
|
};
|
|
1056
1192
|
};
|
|
1057
1193
|
};
|
|
1194
|
+
|
|
1195
|
+
// src/utils/rateLimited.ts
|
|
1196
|
+
var rateLimited = (config) => (fn) => {
|
|
1197
|
+
const { rps, burst, concurrency } = config;
|
|
1198
|
+
let tokens = burst;
|
|
1199
|
+
let inFlight = 0;
|
|
1200
|
+
const queue = [];
|
|
1201
|
+
let intervalId = null;
|
|
1202
|
+
const refillTokens = () => {
|
|
1203
|
+
tokens = Math.min(tokens + 1, burst);
|
|
1204
|
+
processQueue();
|
|
1205
|
+
};
|
|
1206
|
+
const startInterval = () => {
|
|
1207
|
+
if (!intervalId) {
|
|
1208
|
+
intervalId = setInterval(refillTokens, 1e3 / rps);
|
|
1209
|
+
}
|
|
1210
|
+
};
|
|
1211
|
+
const stopInterval = () => {
|
|
1212
|
+
if (intervalId && queue.length === 0 && inFlight === 0) {
|
|
1213
|
+
clearInterval(intervalId);
|
|
1214
|
+
intervalId = null;
|
|
1215
|
+
}
|
|
1216
|
+
};
|
|
1217
|
+
const processQueue = () => {
|
|
1218
|
+
while (queue.length > 0 && tokens > 0 && inFlight < concurrency) {
|
|
1219
|
+
tokens--;
|
|
1220
|
+
inFlight++;
|
|
1221
|
+
const item = queue.shift();
|
|
1222
|
+
item.fn().then((result) => {
|
|
1223
|
+
inFlight--;
|
|
1224
|
+
item.resolve(result);
|
|
1225
|
+
processQueue();
|
|
1226
|
+
stopInterval();
|
|
1227
|
+
}).catch((error) => {
|
|
1228
|
+
inFlight--;
|
|
1229
|
+
item.reject(error);
|
|
1230
|
+
processQueue();
|
|
1231
|
+
stopInterval();
|
|
1232
|
+
});
|
|
1233
|
+
}
|
|
1234
|
+
};
|
|
1235
|
+
return (async (...args) => {
|
|
1236
|
+
return new Promise((resolve, reject) => {
|
|
1237
|
+
queue.push({
|
|
1238
|
+
fn: () => fn(...args),
|
|
1239
|
+
resolve,
|
|
1240
|
+
reject
|
|
1241
|
+
});
|
|
1242
|
+
startInterval();
|
|
1243
|
+
processQueue();
|
|
1244
|
+
});
|
|
1245
|
+
});
|
|
1246
|
+
};
|
|
1058
1247
|
export {
|
|
1059
1248
|
Inherit,
|
|
1060
1249
|
appendToLastRequest,
|
|
1061
1250
|
compose,
|
|
1062
1251
|
convertMCPSchemaToToolSchema,
|
|
1063
1252
|
convertStandardSchemaToJsonSchema,
|
|
1253
|
+
convertStandardSchemaToSchemaProperties,
|
|
1064
1254
|
createMCPTools,
|
|
1255
|
+
embed,
|
|
1065
1256
|
everyNMessages,
|
|
1066
1257
|
everyNTokens,
|
|
1067
1258
|
generateApprovalToken,
|
|
@@ -1075,6 +1266,7 @@ export {
|
|
|
1075
1266
|
onApprovalRequested,
|
|
1076
1267
|
onApprovalResolved,
|
|
1077
1268
|
parseModelName,
|
|
1269
|
+
rateLimited,
|
|
1078
1270
|
removeApprovalListener,
|
|
1079
1271
|
requestApproval,
|
|
1080
1272
|
resolveApproval,
|