@cuylabs/agent-core 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/dist/{builder-BKkipazh.d.ts → builder-UpOWQMW3.d.ts} +2 -2
- package/dist/{chunk-3C4VKG4P.js → chunk-4BDA7DQY.js} +273 -807
- package/dist/chunk-7VKQ4WPB.js +73 -0
- package/dist/chunk-BFM2YHNM.js +222 -0
- package/dist/chunk-CAA7FHIH.js +280 -0
- package/dist/chunk-KUVSERLJ.js +50 -0
- package/dist/chunk-N6HWIEEA.js +423 -0
- package/dist/chunk-N7P4PN3O.js +84 -0
- package/dist/{chunk-QWFMX226.js → chunk-RFEKJKTO.js} +252 -13
- package/dist/chunk-RZITT45F.js +202 -0
- package/dist/{chunk-X635CM2F.js → chunk-SQU2AJHO.js} +1 -1
- package/dist/chunk-VNQBHPCT.js +398 -0
- package/dist/{chunk-QAQADS4X.js → chunk-WWYYNWEW.js} +2 -1
- package/dist/{chunk-O2ZCFQL6.js → chunk-YSLSEQ6B.js} +105 -220
- package/dist/context/index.js +1 -1
- package/dist/errors/index.d.ts +11 -0
- package/dist/errors/index.js +16 -0
- package/dist/events-CE72w8W4.d.ts +149 -0
- package/dist/host/index.d.ts +45 -0
- package/dist/host/index.js +8 -0
- package/dist/{index-DZQJD_hp.d.ts → index-CWSchSql.d.ts} +42 -51
- package/dist/index.d.ts +98 -190
- package/dist/index.js +476 -939
- package/dist/inference/index.d.ts +62 -0
- package/dist/inference/index.js +27 -0
- package/dist/llm-error-D93FNNLY.d.ts +32 -0
- package/dist/middleware/index.d.ts +246 -5
- package/dist/middleware/index.js +7 -3
- package/dist/models/index.d.ts +226 -3
- package/dist/models/index.js +41 -3
- package/dist/presets/index.d.ts +53 -0
- package/dist/presets/index.js +28 -0
- package/dist/prompt/index.d.ts +12 -7
- package/dist/reasoning/index.d.ts +53 -8
- package/dist/reasoning/index.js +2 -7
- package/dist/{registry-CuRWWtcT.d.ts → registry-DwYqsQkX.d.ts} +1 -1
- package/dist/{runner-G1wxEgac.d.ts → runner-e2YRcUoX.d.ts} +82 -148
- package/dist/runtime/index.d.ts +44 -7
- package/dist/runtime/index.js +16 -5
- package/dist/safety/index.d.ts +38 -0
- package/dist/safety/index.js +12 -0
- package/dist/scope/index.d.ts +10 -0
- package/dist/scope/index.js +14 -0
- package/dist/{session-manager-Uawm2Le7.d.ts → session-manager-B_CWGTsl.d.ts} +1 -1
- package/dist/signal/index.d.ts +28 -0
- package/dist/signal/index.js +6 -0
- package/dist/skill/index.d.ts +8 -5
- package/dist/storage/index.d.ts +2 -2
- package/dist/sub-agent/index.d.ts +17 -8
- package/dist/tool/index.d.ts +9 -4
- package/dist/tool/index.js +4 -3
- package/dist/tool-BHbyUAy3.d.ts +150 -0
- package/dist/{tool-DYp6-cC3.d.ts → tool-DLXAR9Ce.d.ts} +5 -99
- package/dist/tracking/index.d.ts +3 -1
- package/dist/{tool-pFAnJc5Y.d.ts → types-BfNpU8NS.d.ts} +1 -150
- package/dist/types-BnpEOYV-.d.ts +50 -0
- package/dist/types-CHiPh8U2.d.ts +100 -0
- package/dist/types-CQL-SvTn.d.ts +29 -0
- package/dist/types-CWm-7rvB.d.ts +55 -0
- package/dist/types-KKDrdU9Y.d.ts +325 -0
- package/dist/{resolver-DOfZ-xuk.d.ts → types-QA4WhEfz.d.ts} +1 -117
- package/dist/types-QKHHQLLq.d.ts +336 -0
- package/dist/types-YuWV4ag7.d.ts +72 -0
- package/package.json +74 -8
- package/dist/capabilities/index.d.ts +0 -97
- package/dist/capabilities/index.js +0 -46
- package/dist/chunk-6TDTQJ4P.js +0 -116
- package/dist/chunk-FG4MD5MU.js +0 -54
- package/dist/config-D2xeGEHK.d.ts +0 -52
- package/dist/identifiers-BLUxFqV_.d.ts +0 -12
- package/dist/index-ipP3_ztp.d.ts +0 -198
- package/dist/network-D76DS5ot.d.ts +0 -5
- package/dist/types-BWo810L_.d.ts +0 -648
|
@@ -0,0 +1,423 @@
|
|
|
1
|
+
import {
|
|
2
|
+
executeAgentToolCall
|
|
3
|
+
} from "./chunk-7VKQ4WPB.js";
|
|
4
|
+
import {
|
|
5
|
+
buildReasoningOptionsSync
|
|
6
|
+
} from "./chunk-SQU2AJHO.js";
|
|
7
|
+
import {
|
|
8
|
+
snapshotScope
|
|
9
|
+
} from "./chunk-N7P4PN3O.js";
|
|
10
|
+
import {
|
|
11
|
+
LLMError,
|
|
12
|
+
isRetryable
|
|
13
|
+
} from "./chunk-RZITT45F.js";
|
|
14
|
+
|
|
15
|
+
// src/inference/toolset.ts
|
|
16
|
+
import { tool, zodSchema } from "ai";
|
|
17
|
+
async function buildToolSet(options) {
|
|
18
|
+
const toolSet = {};
|
|
19
|
+
const executionMode = options.executionMode ?? "auto";
|
|
20
|
+
for (const [id, info] of Object.entries(options.tools)) {
|
|
21
|
+
const initialized = await info.init({ cwd: options.cwd });
|
|
22
|
+
toolSet[id] = executionMode === "auto" ? tool({
|
|
23
|
+
description: initialized.description,
|
|
24
|
+
inputSchema: zodSchema(initialized.parameters),
|
|
25
|
+
execute: async (params) => (await executeAgentToolCall({
|
|
26
|
+
toolName: id,
|
|
27
|
+
tool: info,
|
|
28
|
+
params,
|
|
29
|
+
cwd: options.cwd,
|
|
30
|
+
abort: options.abort,
|
|
31
|
+
sessionID: options.sessionID,
|
|
32
|
+
messageID: options.messageID,
|
|
33
|
+
...options.host ? { host: options.host } : {},
|
|
34
|
+
...options.turnTracker ? { turnTracker: options.turnTracker } : {},
|
|
35
|
+
...options.middleware ? { middleware: options.middleware } : {}
|
|
36
|
+
})).output
|
|
37
|
+
}) : tool({
|
|
38
|
+
description: initialized.description,
|
|
39
|
+
inputSchema: zodSchema(initialized.parameters)
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
return toolSet;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// src/inference/stream.ts
|
|
46
|
+
import {
|
|
47
|
+
stepCountIs,
|
|
48
|
+
streamText
|
|
49
|
+
} from "ai";
|
|
50
|
+
|
|
51
|
+
// src/retry.ts
|
|
52
|
+
var DEFAULT_RETRY_CONFIG = {
|
|
53
|
+
maxAttempts: 3,
|
|
54
|
+
initialDelayMs: 2e3,
|
|
55
|
+
backoffFactor: 2,
|
|
56
|
+
maxDelayMs: 3e4,
|
|
57
|
+
jitter: true
|
|
58
|
+
};
|
|
59
|
+
function createRetryState() {
|
|
60
|
+
return {
|
|
61
|
+
attempt: 0,
|
|
62
|
+
errors: [],
|
|
63
|
+
canRetry: true,
|
|
64
|
+
nextDelayMs: void 0
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
function calculateDelay(attempt, error, config) {
|
|
68
|
+
if (error?.retryDelayMs) {
|
|
69
|
+
return error.retryDelayMs;
|
|
70
|
+
}
|
|
71
|
+
const baseDelay = config.initialDelayMs * Math.pow(config.backoffFactor, attempt - 1);
|
|
72
|
+
const cappedDelay = Math.min(baseDelay, config.maxDelayMs);
|
|
73
|
+
if (config.jitter) {
|
|
74
|
+
const jitterRange = cappedDelay * 0.25;
|
|
75
|
+
const jitter = (Math.random() - 0.5) * 2 * jitterRange;
|
|
76
|
+
return Math.max(0, Math.round(cappedDelay + jitter));
|
|
77
|
+
}
|
|
78
|
+
return Math.round(cappedDelay);
|
|
79
|
+
}
|
|
80
|
+
async function sleep(ms, signal) {
|
|
81
|
+
return new Promise((resolve, reject) => {
|
|
82
|
+
if (signal?.aborted) {
|
|
83
|
+
reject(new DOMException("Aborted", "AbortError"));
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
const timeoutId = setTimeout(() => {
|
|
87
|
+
cleanup();
|
|
88
|
+
resolve();
|
|
89
|
+
}, ms);
|
|
90
|
+
const abortHandler = () => {
|
|
91
|
+
clearTimeout(timeoutId);
|
|
92
|
+
cleanup();
|
|
93
|
+
reject(new DOMException("Aborted", "AbortError"));
|
|
94
|
+
};
|
|
95
|
+
const cleanup = () => {
|
|
96
|
+
signal?.removeEventListener("abort", abortHandler);
|
|
97
|
+
};
|
|
98
|
+
signal?.addEventListener("abort", abortHandler, { once: true });
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
async function withRetry(fn, config, signal) {
|
|
102
|
+
const mergedConfig = { ...DEFAULT_RETRY_CONFIG, ...config };
|
|
103
|
+
const state = createRetryState();
|
|
104
|
+
while (true) {
|
|
105
|
+
state.attempt++;
|
|
106
|
+
try {
|
|
107
|
+
return await fn(state.attempt);
|
|
108
|
+
} catch (error) {
|
|
109
|
+
const llmError = LLMError.from(error);
|
|
110
|
+
state.errors.push(llmError);
|
|
111
|
+
const shouldRetry2 = state.attempt < mergedConfig.maxAttempts && isRetryable(llmError) && !signal?.aborted;
|
|
112
|
+
if (!shouldRetry2) {
|
|
113
|
+
throw new LLMError({
|
|
114
|
+
message: `Failed after ${state.attempt} attempt(s): ${llmError.message}`,
|
|
115
|
+
category: llmError.category,
|
|
116
|
+
status: llmError.status,
|
|
117
|
+
headers: llmError.headers,
|
|
118
|
+
provider: llmError.provider,
|
|
119
|
+
model: llmError.model,
|
|
120
|
+
cause: llmError
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
const delayMs = calculateDelay(state.attempt, llmError, mergedConfig);
|
|
124
|
+
state.nextDelayMs = delayMs;
|
|
125
|
+
config?.onRetry?.(state.attempt, delayMs, llmError);
|
|
126
|
+
await sleep(delayMs, signal);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
function createRetryHandler(options) {
|
|
131
|
+
const config = options ?? {};
|
|
132
|
+
const signal = options?.signal;
|
|
133
|
+
return async (createStream) => {
|
|
134
|
+
return withRetry(createStream, config, signal);
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
function shouldRetry(error, attempt, maxAttempts = DEFAULT_RETRY_CONFIG.maxAttempts) {
|
|
138
|
+
if (attempt >= maxAttempts) return false;
|
|
139
|
+
return isRetryable(error);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// src/inference/types.ts
|
|
143
|
+
var DEFAULT_MAX_OUTPUT_TOKENS = 32e3;
|
|
144
|
+
var OUTPUT_TOKEN_MAX = DEFAULT_MAX_OUTPUT_TOKENS;
|
|
145
|
+
|
|
146
|
+
// src/inference/stream.ts
|
|
147
|
+
function buildModelCallContext(input) {
|
|
148
|
+
return {
|
|
149
|
+
sessionID: input.sessionID,
|
|
150
|
+
step: input.step ?? 1,
|
|
151
|
+
cwd: input.cwd,
|
|
152
|
+
abort: input.abort,
|
|
153
|
+
model: input.model,
|
|
154
|
+
toolNames: Object.keys(input.tools),
|
|
155
|
+
mcpToolNames: Object.keys(input.mcpTools ?? {}),
|
|
156
|
+
scope: snapshotScope()
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
function buildModelCallInput(input) {
|
|
160
|
+
return {
|
|
161
|
+
model: input.model,
|
|
162
|
+
system: [...input.system],
|
|
163
|
+
messages: [...input.messages],
|
|
164
|
+
temperature: input.temperature,
|
|
165
|
+
topP: input.topP,
|
|
166
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
167
|
+
maxSteps: input.maxSteps,
|
|
168
|
+
reasoningLevel: input.reasoningLevel,
|
|
169
|
+
telemetry: input.telemetry,
|
|
170
|
+
customStreamProvider: input.customStreamProvider,
|
|
171
|
+
toolExecutionMode: input.toolExecutionMode,
|
|
172
|
+
providerOptions: void 0,
|
|
173
|
+
systemMessages: void 0
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
function applyModelCallInput(target, modelCall) {
|
|
177
|
+
target.model = modelCall.model;
|
|
178
|
+
target.system = [...modelCall.system];
|
|
179
|
+
target.messages = [...modelCall.messages];
|
|
180
|
+
target.temperature = modelCall.temperature;
|
|
181
|
+
target.topP = modelCall.topP;
|
|
182
|
+
target.maxOutputTokens = modelCall.maxOutputTokens;
|
|
183
|
+
target.maxSteps = modelCall.maxSteps;
|
|
184
|
+
target.reasoningLevel = modelCall.reasoningLevel;
|
|
185
|
+
target.telemetry = modelCall.telemetry;
|
|
186
|
+
target.customStreamProvider = modelCall.customStreamProvider;
|
|
187
|
+
target.toolExecutionMode = modelCall.toolExecutionMode;
|
|
188
|
+
target.activeModelCall = modelCall;
|
|
189
|
+
}
|
|
190
|
+
function mergeProviderOptions(base, override) {
|
|
191
|
+
if (!base) return override;
|
|
192
|
+
if (!override) return base;
|
|
193
|
+
return { ...base, ...override };
|
|
194
|
+
}
|
|
195
|
+
function isBlockedModelCall(value) {
|
|
196
|
+
return "block" in value && value.block === true;
|
|
197
|
+
}
|
|
198
|
+
async function resolveModelCallInput(input) {
|
|
199
|
+
if (!input.middleware?.hasMiddleware) {
|
|
200
|
+
const current = buildModelCallInput(input);
|
|
201
|
+
input.activeModelCall = current;
|
|
202
|
+
return current;
|
|
203
|
+
}
|
|
204
|
+
const next = await input.middleware.runModelInput(
|
|
205
|
+
buildModelCallInput(input),
|
|
206
|
+
buildModelCallContext(input)
|
|
207
|
+
);
|
|
208
|
+
if (isBlockedModelCall(next)) {
|
|
209
|
+
return next;
|
|
210
|
+
}
|
|
211
|
+
applyModelCallInput(input, next);
|
|
212
|
+
return next;
|
|
213
|
+
}
|
|
214
|
+
function wrapModelStream(stream2, input) {
|
|
215
|
+
const normalizedText = Promise.resolve(stream2.text);
|
|
216
|
+
const normalizedUsage = Promise.resolve(stream2.usage).then((usage) => ({
|
|
217
|
+
inputTokens: usage.inputTokens ?? 0,
|
|
218
|
+
outputTokens: usage.outputTokens ?? 0,
|
|
219
|
+
totalTokens: usage.totalTokens ?? 0
|
|
220
|
+
}));
|
|
221
|
+
const normalizedFinishReason = Promise.resolve(stream2.finishReason).then(
|
|
222
|
+
(reason) => String(reason)
|
|
223
|
+
);
|
|
224
|
+
if (!input.middleware?.hasMiddleware) {
|
|
225
|
+
return {
|
|
226
|
+
fullStream: stream2.fullStream,
|
|
227
|
+
text: normalizedText,
|
|
228
|
+
usage: normalizedUsage,
|
|
229
|
+
finishReason: normalizedFinishReason
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
return {
|
|
233
|
+
fullStream: (async function* () {
|
|
234
|
+
const ctx = buildModelCallContext(input);
|
|
235
|
+
for await (const rawChunk of stream2.fullStream) {
|
|
236
|
+
const chunk = await input.middleware.runModelChunk(
|
|
237
|
+
rawChunk,
|
|
238
|
+
ctx
|
|
239
|
+
);
|
|
240
|
+
if (chunk) {
|
|
241
|
+
yield chunk;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
})(),
|
|
245
|
+
text: normalizedText,
|
|
246
|
+
usage: normalizedUsage,
|
|
247
|
+
finishReason: normalizedFinishReason
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
async function createCustomStream(input) {
|
|
251
|
+
const system = input.system.filter(Boolean).join("\n");
|
|
252
|
+
return input.customStreamProvider({
|
|
253
|
+
system,
|
|
254
|
+
messages: input.messages,
|
|
255
|
+
abortSignal: input.abort,
|
|
256
|
+
maxSteps: input.maxSteps
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
function getModelInfo(input) {
|
|
260
|
+
return {
|
|
261
|
+
provider: typeof input.model === "object" && "provider" in input.model ? String(input.model.provider) : void 0,
|
|
262
|
+
model: typeof input.model === "object" && "modelId" in input.model ? String(input.model.modelId) : String(input.model)
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
async function callStreamTextWithOtelContext(options) {
|
|
266
|
+
const { input, allTools, system, providerOptions } = options;
|
|
267
|
+
const systemParam = input.activeModelCall?.systemMessages?.length ? input.activeModelCall.systemMessages : system;
|
|
268
|
+
const mergedProviderOptions = mergeProviderOptions(
|
|
269
|
+
providerOptions,
|
|
270
|
+
input.activeModelCall?.providerOptions
|
|
271
|
+
);
|
|
272
|
+
const callStreamText = () => streamText({
|
|
273
|
+
model: input.model,
|
|
274
|
+
system: systemParam,
|
|
275
|
+
messages: input.messages,
|
|
276
|
+
tools: allTools,
|
|
277
|
+
stopWhen: stepCountIs(input.maxSteps ?? 50),
|
|
278
|
+
maxOutputTokens: input.maxOutputTokens ?? DEFAULT_MAX_OUTPUT_TOKENS,
|
|
279
|
+
temperature: input.temperature,
|
|
280
|
+
topP: input.topP,
|
|
281
|
+
abortSignal: input.abort,
|
|
282
|
+
providerOptions: mergedProviderOptions,
|
|
283
|
+
experimental_telemetry: input.telemetry,
|
|
284
|
+
prepareStep: input.intervention ? async ({ messages }) => {
|
|
285
|
+
const pending = input.intervention.drainImmediate();
|
|
286
|
+
if (pending.length === 0) {
|
|
287
|
+
return void 0;
|
|
288
|
+
}
|
|
289
|
+
const injected = pending.map((item) => ({
|
|
290
|
+
role: "user",
|
|
291
|
+
content: item.message
|
|
292
|
+
}));
|
|
293
|
+
for (const item of pending) {
|
|
294
|
+
input.intervention.onApplied?.(item);
|
|
295
|
+
}
|
|
296
|
+
return { messages: [...messages, ...injected] };
|
|
297
|
+
} : void 0,
|
|
298
|
+
onStepFinish: async (step) => {
|
|
299
|
+
if (!input.onStepFinish) {
|
|
300
|
+
return;
|
|
301
|
+
}
|
|
302
|
+
await input.onStepFinish({
|
|
303
|
+
toolResults: step.toolResults?.map((toolResult) => ({
|
|
304
|
+
toolName: toolResult.toolName,
|
|
305
|
+
toolCallId: toolResult.toolCallId,
|
|
306
|
+
output: toolResult.output
|
|
307
|
+
})),
|
|
308
|
+
usage: step.usage,
|
|
309
|
+
finishReason: step.finishReason
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
});
|
|
313
|
+
const otelCtx = input.middleware?.getOtelContext(input.sessionID);
|
|
314
|
+
if (!otelCtx) {
|
|
315
|
+
return callStreamText();
|
|
316
|
+
}
|
|
317
|
+
try {
|
|
318
|
+
const otelApi = await import("@opentelemetry/api");
|
|
319
|
+
return otelApi.context.with(
|
|
320
|
+
otelCtx,
|
|
321
|
+
callStreamText
|
|
322
|
+
);
|
|
323
|
+
} catch {
|
|
324
|
+
return callStreamText();
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
async function stream(input) {
|
|
328
|
+
const messageID = crypto.randomUUID();
|
|
329
|
+
const resolvedInput = await resolveModelCallInput(input);
|
|
330
|
+
const modelInfo = getModelInfo(input);
|
|
331
|
+
if (isBlockedModelCall(resolvedInput)) {
|
|
332
|
+
throw new LLMError({
|
|
333
|
+
message: resolvedInput.reason,
|
|
334
|
+
category: "invalid_request",
|
|
335
|
+
provider: modelInfo.provider,
|
|
336
|
+
model: modelInfo.model
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
const system = input.system.filter(Boolean).join("\n");
|
|
340
|
+
if (input.customStreamProvider) {
|
|
341
|
+
const runCustomStream = async () => await createCustomStream(input);
|
|
342
|
+
if (!input.retry || input.retry.maxAttempts === 0) {
|
|
343
|
+
return wrapModelStream(await runCustomStream(), input);
|
|
344
|
+
}
|
|
345
|
+
return wrapModelStream(await withRetry(
|
|
346
|
+
async () => await runCustomStream(),
|
|
347
|
+
input.retry,
|
|
348
|
+
input.abort
|
|
349
|
+
), input);
|
|
350
|
+
}
|
|
351
|
+
const toolSet = await buildToolSet({
|
|
352
|
+
tools: input.tools,
|
|
353
|
+
cwd: input.cwd,
|
|
354
|
+
sessionID: input.sessionID,
|
|
355
|
+
messageID,
|
|
356
|
+
abort: input.abort,
|
|
357
|
+
turnTracker: input.turnTracker,
|
|
358
|
+
host: input.host,
|
|
359
|
+
middleware: input.middleware,
|
|
360
|
+
executionMode: input.toolExecutionMode
|
|
361
|
+
});
|
|
362
|
+
const allTools = {
|
|
363
|
+
...toolSet,
|
|
364
|
+
...input.mcpTools ?? {}
|
|
365
|
+
};
|
|
366
|
+
const providerOptions = input.reasoningLevel ? buildReasoningOptionsSync(input.model, input.reasoningLevel) : void 0;
|
|
367
|
+
const createStream = async () => {
|
|
368
|
+
try {
|
|
369
|
+
return await callStreamTextWithOtelContext({
|
|
370
|
+
input,
|
|
371
|
+
allTools,
|
|
372
|
+
system,
|
|
373
|
+
providerOptions
|
|
374
|
+
});
|
|
375
|
+
} catch (error) {
|
|
376
|
+
throw LLMError.from(error, modelInfo);
|
|
377
|
+
}
|
|
378
|
+
};
|
|
379
|
+
if (!input.retry || input.retry.maxAttempts === 0) {
|
|
380
|
+
return wrapModelStream(await createStream(), input);
|
|
381
|
+
}
|
|
382
|
+
return wrapModelStream(await withRetry(
|
|
383
|
+
async () => await createStream(),
|
|
384
|
+
input.retry,
|
|
385
|
+
input.abort
|
|
386
|
+
), input);
|
|
387
|
+
}
|
|
388
|
+
async function streamOnce(input) {
|
|
389
|
+
return await stream({ ...input, retry: void 0 });
|
|
390
|
+
}
|
|
391
|
+
async function streamStep(input) {
|
|
392
|
+
return await stream({
|
|
393
|
+
...input,
|
|
394
|
+
maxSteps: 1
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
// src/inference/index.ts
|
|
399
|
+
var Inference = {
|
|
400
|
+
buildToolSet,
|
|
401
|
+
stream,
|
|
402
|
+
streamOnce,
|
|
403
|
+
streamStep
|
|
404
|
+
};
|
|
405
|
+
var LLM = Inference;
|
|
406
|
+
|
|
407
|
+
export {
|
|
408
|
+
buildToolSet,
|
|
409
|
+
DEFAULT_RETRY_CONFIG,
|
|
410
|
+
createRetryState,
|
|
411
|
+
calculateDelay,
|
|
412
|
+
sleep,
|
|
413
|
+
withRetry,
|
|
414
|
+
createRetryHandler,
|
|
415
|
+
shouldRetry,
|
|
416
|
+
DEFAULT_MAX_OUTPUT_TOKENS,
|
|
417
|
+
OUTPUT_TOKEN_MAX,
|
|
418
|
+
stream,
|
|
419
|
+
streamOnce,
|
|
420
|
+
streamStep,
|
|
421
|
+
Inference,
|
|
422
|
+
LLM
|
|
423
|
+
};
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
// src/scope/store.ts
|
|
2
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
3
|
+
import { randomUUID } from "crypto";
|
|
4
|
+
var scopeStore = new AsyncLocalStorage();
|
|
5
|
+
function cloneAttributes(attributes) {
|
|
6
|
+
return { ...attributes };
|
|
7
|
+
}
|
|
8
|
+
function cloneScope(scope) {
|
|
9
|
+
return {
|
|
10
|
+
...scope,
|
|
11
|
+
attributes: cloneAttributes(scope.attributes)
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
function getStoredScope() {
|
|
15
|
+
return scopeStore.getStore();
|
|
16
|
+
}
|
|
17
|
+
function buildScope(options, current) {
|
|
18
|
+
const parent = options.parent === void 0 ? current : options.parent ?? void 0;
|
|
19
|
+
const id = options.id ?? randomUUID();
|
|
20
|
+
return {
|
|
21
|
+
id,
|
|
22
|
+
rootId: parent?.rootId ?? id,
|
|
23
|
+
kind: options.kind,
|
|
24
|
+
name: options.name,
|
|
25
|
+
parentId: parent?.id,
|
|
26
|
+
depth: (parent?.depth ?? -1) + 1,
|
|
27
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
28
|
+
sessionId: options.sessionId ?? parent?.sessionId,
|
|
29
|
+
taskId: options.taskId ?? parent?.taskId,
|
|
30
|
+
step: options.step ?? parent?.step,
|
|
31
|
+
attributes: cloneAttributes(options.attributes ?? {})
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
function runWithScope(scope, fn) {
|
|
35
|
+
return Promise.resolve(scopeStore.run(scope, fn));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// src/scope/run.ts
|
|
39
|
+
function currentScope() {
|
|
40
|
+
const scope = getStoredScope();
|
|
41
|
+
return scope ? cloneScope(scope) : void 0;
|
|
42
|
+
}
|
|
43
|
+
function snapshotScope(scope = getStoredScope()) {
|
|
44
|
+
return scope ? cloneScope(scope) : void 0;
|
|
45
|
+
}
|
|
46
|
+
function createScope(options) {
|
|
47
|
+
return buildScope(options, getStoredScope());
|
|
48
|
+
}
|
|
49
|
+
function withinScope(options, fn) {
|
|
50
|
+
return runWithScope(buildScope(options, getStoredScope()), fn);
|
|
51
|
+
}
|
|
52
|
+
function restoreScope(snapshot, fn) {
|
|
53
|
+
if (!snapshot) {
|
|
54
|
+
return Promise.resolve(fn());
|
|
55
|
+
}
|
|
56
|
+
return runWithScope(cloneScope(snapshot), fn);
|
|
57
|
+
}
|
|
58
|
+
async function* streamWithinScope(options, iterable) {
|
|
59
|
+
const scope = buildScope(options, getStoredScope());
|
|
60
|
+
const iterator = await runWithScope(scope, () => iterable[Symbol.asyncIterator]());
|
|
61
|
+
try {
|
|
62
|
+
while (true) {
|
|
63
|
+
const next = await runWithScope(scope, () => iterator.next());
|
|
64
|
+
if (next.done) {
|
|
65
|
+
return next.value;
|
|
66
|
+
}
|
|
67
|
+
yield next.value;
|
|
68
|
+
}
|
|
69
|
+
} finally {
|
|
70
|
+
const returnFn = iterator.return?.bind(iterator);
|
|
71
|
+
if (returnFn) {
|
|
72
|
+
await runWithScope(scope, () => returnFn(void 0));
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export {
|
|
78
|
+
currentScope,
|
|
79
|
+
snapshotScope,
|
|
80
|
+
createScope,
|
|
81
|
+
withinScope,
|
|
82
|
+
restoreScope,
|
|
83
|
+
streamWithinScope
|
|
84
|
+
};
|