@usewhisper/mcp-server 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -24
- package/dist/autosubscribe-6EDKPBE2.js +4068 -0
- package/dist/autosubscribe-GHO6YR5A.js +4068 -0
- package/dist/autosubscribe-ISDETQIB.js +436 -0
- package/dist/autosubscribe-ISDETQIB.js.map +1 -0
- package/dist/chunk-3WGYBAYR.js +8387 -0
- package/dist/chunk-52VJYCZ7.js +455 -0
- package/dist/chunk-5KBZQHDL.js +189 -0
- package/dist/chunk-5KIJNY6Z.js +370 -0
- package/dist/chunk-7SN3CKDK.js +1076 -0
- package/dist/chunk-B3VWOHUA.js +271 -0
- package/dist/chunk-C57DHKTL.js +459 -0
- package/dist/chunk-EI5CE3EY.js +616 -0
- package/dist/chunk-FTWUJBAH.js +387 -0
- package/dist/chunk-FTWUJBAH.js.map +1 -0
- package/dist/chunk-H3HSKH2P.js +4841 -0
- package/dist/chunk-JO3ORBZD.js +616 -0
- package/dist/chunk-L6DXSM2U.js +457 -0
- package/dist/chunk-L6DXSM2U.js.map +1 -0
- package/dist/chunk-LMEYV4JD.js +368 -0
- package/dist/chunk-MEFLJ4PV.js +8385 -0
- package/dist/chunk-OBLI4FE4.js +276 -0
- package/dist/chunk-OBLI4FE4.js.map +1 -0
- package/dist/chunk-PPGYJJED.js +271 -0
- package/dist/chunk-QGM4M3NI.js +37 -0
- package/dist/chunk-T7KMSTWP.js +399 -0
- package/dist/chunk-TWEIYHI6.js +399 -0
- package/dist/chunk-UYWE7HSU.js +369 -0
- package/dist/chunk-UYWE7HSU.js.map +1 -0
- package/dist/chunk-X2DL2GWT.js +33 -0
- package/dist/chunk-X2DL2GWT.js.map +1 -0
- package/dist/chunk-X7HNNNJJ.js +1079 -0
- package/dist/consolidation-2GCKI4RE.js +220 -0
- package/dist/consolidation-4JOPW6BG.js +220 -0
- package/dist/consolidation-FOVQTWNQ.js +222 -0
- package/dist/consolidation-IFQ52E44.js +210 -0
- package/dist/consolidation-IFQ52E44.js.map +1 -0
- package/dist/context-sharing-4ITCNKG4.js +307 -0
- package/dist/context-sharing-6CCFIAKL.js +276 -0
- package/dist/context-sharing-6CCFIAKL.js.map +1 -0
- package/dist/context-sharing-GYKLXHZA.js +307 -0
- package/dist/context-sharing-PH64JTXS.js +308 -0
- package/dist/context-sharing-Y6LTZZOF.js +307 -0
- package/dist/cost-optimization-6OIKRSBV.js +196 -0
- package/dist/cost-optimization-6OIKRSBV.js.map +1 -0
- package/dist/cost-optimization-7DVSTL6R.js +307 -0
- package/dist/cost-optimization-BH5NAX33.js +287 -0
- package/dist/cost-optimization-BH5NAX33.js.map +1 -0
- package/dist/cost-optimization-F3L5BS5F.js +303 -0
- package/dist/ingest-2LPTWUUM.js +16 -0
- package/dist/ingest-7T5FAZNC.js +15 -0
- package/dist/ingest-EBNIE7XB.js +15 -0
- package/dist/ingest-FSHT5BCS.js +15 -0
- package/dist/ingest-QE2BTV72.js +15 -0
- package/dist/ingest-QE2BTV72.js.map +1 -0
- package/dist/oracle-3RLQF3DP.js +259 -0
- package/dist/oracle-FKRTQUUG.js +282 -0
- package/dist/oracle-J47QCSEW.js +263 -0
- package/dist/oracle-MDP5MZRC.js +257 -0
- package/dist/oracle-MDP5MZRC.js.map +1 -0
- package/dist/search-BLVHWLWC.js +14 -0
- package/dist/search-CZ5NYL5B.js +13 -0
- package/dist/search-CZ5NYL5B.js.map +1 -0
- package/dist/search-EG6TYWWW.js +13 -0
- package/dist/search-I22QQA7T.js +13 -0
- package/dist/search-T7H5G6DW.js +13 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +914 -1503
- package/dist/server.js.map +1 -1
- package/package.json +6 -7
|
@@ -0,0 +1,4841 @@
|
|
|
1
|
+
// ../node_modules/@anthropic-ai/sdk/internal/tslib.mjs
|
|
2
|
+
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
|
3
|
+
if (kind === "m")
|
|
4
|
+
throw new TypeError("Private method is not writable");
|
|
5
|
+
if (kind === "a" && !f)
|
|
6
|
+
throw new TypeError("Private accessor was defined without a setter");
|
|
7
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
8
|
+
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
9
|
+
return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
|
|
10
|
+
}
|
|
11
|
+
function __classPrivateFieldGet(receiver, state, kind, f) {
|
|
12
|
+
if (kind === "a" && !f)
|
|
13
|
+
throw new TypeError("Private accessor was defined without a getter");
|
|
14
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
15
|
+
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
16
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/uuid.mjs
|
|
20
|
+
var uuid4 = function() {
|
|
21
|
+
const { crypto } = globalThis;
|
|
22
|
+
if (crypto?.randomUUID) {
|
|
23
|
+
uuid4 = crypto.randomUUID.bind(crypto);
|
|
24
|
+
return crypto.randomUUID();
|
|
25
|
+
}
|
|
26
|
+
const u8 = new Uint8Array(1);
|
|
27
|
+
const randomByte = crypto ? () => crypto.getRandomValues(u8)[0] : () => Math.random() * 255 & 255;
|
|
28
|
+
return "10000000-1000-4000-8000-100000000000".replace(/[018]/g, (c) => (+c ^ randomByte() & 15 >> +c / 4).toString(16));
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
// ../node_modules/@anthropic-ai/sdk/internal/errors.mjs
|
|
32
|
+
function isAbortError(err) {
|
|
33
|
+
return typeof err === "object" && err !== null && // Spec-compliant fetch implementations
|
|
34
|
+
("name" in err && err.name === "AbortError" || // Expo fetch
|
|
35
|
+
"message" in err && String(err.message).includes("FetchRequestCanceledException"));
|
|
36
|
+
}
|
|
37
|
+
var castToError = (err) => {
|
|
38
|
+
if (err instanceof Error)
|
|
39
|
+
return err;
|
|
40
|
+
if (typeof err === "object" && err !== null) {
|
|
41
|
+
try {
|
|
42
|
+
if (Object.prototype.toString.call(err) === "[object Error]") {
|
|
43
|
+
const error = new Error(err.message, err.cause ? { cause: err.cause } : {});
|
|
44
|
+
if (err.stack)
|
|
45
|
+
error.stack = err.stack;
|
|
46
|
+
if (err.cause && !error.cause)
|
|
47
|
+
error.cause = err.cause;
|
|
48
|
+
if (err.name)
|
|
49
|
+
error.name = err.name;
|
|
50
|
+
return error;
|
|
51
|
+
}
|
|
52
|
+
} catch {
|
|
53
|
+
}
|
|
54
|
+
try {
|
|
55
|
+
return new Error(JSON.stringify(err));
|
|
56
|
+
} catch {
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return new Error(err);
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
// ../node_modules/@anthropic-ai/sdk/core/error.mjs
|
|
63
|
+
var AnthropicError = class extends Error {
|
|
64
|
+
};
|
|
65
|
+
var APIError = class _APIError extends AnthropicError {
|
|
66
|
+
constructor(status, error, message, headers) {
|
|
67
|
+
super(`${_APIError.makeMessage(status, error, message)}`);
|
|
68
|
+
this.status = status;
|
|
69
|
+
this.headers = headers;
|
|
70
|
+
this.requestID = headers?.get("request-id");
|
|
71
|
+
this.error = error;
|
|
72
|
+
}
|
|
73
|
+
static makeMessage(status, error, message) {
|
|
74
|
+
const msg = error?.message ? typeof error.message === "string" ? error.message : JSON.stringify(error.message) : error ? JSON.stringify(error) : message;
|
|
75
|
+
if (status && msg) {
|
|
76
|
+
return `${status} ${msg}`;
|
|
77
|
+
}
|
|
78
|
+
if (status) {
|
|
79
|
+
return `${status} status code (no body)`;
|
|
80
|
+
}
|
|
81
|
+
if (msg) {
|
|
82
|
+
return msg;
|
|
83
|
+
}
|
|
84
|
+
return "(no status code or body)";
|
|
85
|
+
}
|
|
86
|
+
static generate(status, errorResponse, message, headers) {
|
|
87
|
+
if (!status || !headers) {
|
|
88
|
+
return new APIConnectionError({ message, cause: castToError(errorResponse) });
|
|
89
|
+
}
|
|
90
|
+
const error = errorResponse;
|
|
91
|
+
if (status === 400) {
|
|
92
|
+
return new BadRequestError(status, error, message, headers);
|
|
93
|
+
}
|
|
94
|
+
if (status === 401) {
|
|
95
|
+
return new AuthenticationError(status, error, message, headers);
|
|
96
|
+
}
|
|
97
|
+
if (status === 403) {
|
|
98
|
+
return new PermissionDeniedError(status, error, message, headers);
|
|
99
|
+
}
|
|
100
|
+
if (status === 404) {
|
|
101
|
+
return new NotFoundError(status, error, message, headers);
|
|
102
|
+
}
|
|
103
|
+
if (status === 409) {
|
|
104
|
+
return new ConflictError(status, error, message, headers);
|
|
105
|
+
}
|
|
106
|
+
if (status === 422) {
|
|
107
|
+
return new UnprocessableEntityError(status, error, message, headers);
|
|
108
|
+
}
|
|
109
|
+
if (status === 429) {
|
|
110
|
+
return new RateLimitError(status, error, message, headers);
|
|
111
|
+
}
|
|
112
|
+
if (status >= 500) {
|
|
113
|
+
return new InternalServerError(status, error, message, headers);
|
|
114
|
+
}
|
|
115
|
+
return new _APIError(status, error, message, headers);
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
var APIUserAbortError = class extends APIError {
|
|
119
|
+
constructor({ message } = {}) {
|
|
120
|
+
super(void 0, void 0, message || "Request was aborted.", void 0);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
var APIConnectionError = class extends APIError {
|
|
124
|
+
constructor({ message, cause }) {
|
|
125
|
+
super(void 0, void 0, message || "Connection error.", void 0);
|
|
126
|
+
if (cause)
|
|
127
|
+
this.cause = cause;
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
var APIConnectionTimeoutError = class extends APIConnectionError {
|
|
131
|
+
constructor({ message } = {}) {
|
|
132
|
+
super({ message: message ?? "Request timed out." });
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
var BadRequestError = class extends APIError {
|
|
136
|
+
};
|
|
137
|
+
var AuthenticationError = class extends APIError {
|
|
138
|
+
};
|
|
139
|
+
var PermissionDeniedError = class extends APIError {
|
|
140
|
+
};
|
|
141
|
+
var NotFoundError = class extends APIError {
|
|
142
|
+
};
|
|
143
|
+
var ConflictError = class extends APIError {
|
|
144
|
+
};
|
|
145
|
+
var UnprocessableEntityError = class extends APIError {
|
|
146
|
+
};
|
|
147
|
+
var RateLimitError = class extends APIError {
|
|
148
|
+
};
|
|
149
|
+
var InternalServerError = class extends APIError {
|
|
150
|
+
};
|
|
151
|
+
|
|
152
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/values.mjs
|
|
153
|
+
var startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
|
154
|
+
var isAbsoluteURL = (url) => {
|
|
155
|
+
return startsWithSchemeRegexp.test(url);
|
|
156
|
+
};
|
|
157
|
+
var isArray = (val) => (isArray = Array.isArray, isArray(val));
|
|
158
|
+
var isReadonlyArray = isArray;
|
|
159
|
+
function maybeObj(x) {
|
|
160
|
+
if (typeof x !== "object") {
|
|
161
|
+
return {};
|
|
162
|
+
}
|
|
163
|
+
return x ?? {};
|
|
164
|
+
}
|
|
165
|
+
function isEmptyObj(obj) {
|
|
166
|
+
if (!obj)
|
|
167
|
+
return true;
|
|
168
|
+
for (const _k in obj)
|
|
169
|
+
return false;
|
|
170
|
+
return true;
|
|
171
|
+
}
|
|
172
|
+
function hasOwn(obj, key) {
|
|
173
|
+
return Object.prototype.hasOwnProperty.call(obj, key);
|
|
174
|
+
}
|
|
175
|
+
var validatePositiveInteger = (name, n) => {
|
|
176
|
+
if (typeof n !== "number" || !Number.isInteger(n)) {
|
|
177
|
+
throw new AnthropicError(`${name} must be an integer`);
|
|
178
|
+
}
|
|
179
|
+
if (n < 0) {
|
|
180
|
+
throw new AnthropicError(`${name} must be a positive integer`);
|
|
181
|
+
}
|
|
182
|
+
return n;
|
|
183
|
+
};
|
|
184
|
+
var safeJSON = (text) => {
|
|
185
|
+
try {
|
|
186
|
+
return JSON.parse(text);
|
|
187
|
+
} catch (err) {
|
|
188
|
+
return void 0;
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
|
|
193
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
194
|
+
|
|
195
|
+
// ../node_modules/@anthropic-ai/sdk/version.mjs
|
|
196
|
+
var VERSION = "0.73.0";
|
|
197
|
+
|
|
198
|
+
// ../node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
|
|
199
|
+
var isRunningInBrowser = () => {
|
|
200
|
+
return (
|
|
201
|
+
// @ts-ignore
|
|
202
|
+
typeof window !== "undefined" && // @ts-ignore
|
|
203
|
+
typeof window.document !== "undefined" && // @ts-ignore
|
|
204
|
+
typeof navigator !== "undefined"
|
|
205
|
+
);
|
|
206
|
+
};
|
|
207
|
+
function getDetectedPlatform() {
|
|
208
|
+
if (typeof Deno !== "undefined" && Deno.build != null) {
|
|
209
|
+
return "deno";
|
|
210
|
+
}
|
|
211
|
+
if (typeof EdgeRuntime !== "undefined") {
|
|
212
|
+
return "edge";
|
|
213
|
+
}
|
|
214
|
+
if (Object.prototype.toString.call(typeof globalThis.process !== "undefined" ? globalThis.process : 0) === "[object process]") {
|
|
215
|
+
return "node";
|
|
216
|
+
}
|
|
217
|
+
return "unknown";
|
|
218
|
+
}
|
|
219
|
+
var getPlatformProperties = () => {
|
|
220
|
+
const detectedPlatform = getDetectedPlatform();
|
|
221
|
+
if (detectedPlatform === "deno") {
|
|
222
|
+
return {
|
|
223
|
+
"X-Stainless-Lang": "js",
|
|
224
|
+
"X-Stainless-Package-Version": VERSION,
|
|
225
|
+
"X-Stainless-OS": normalizePlatform(Deno.build.os),
|
|
226
|
+
"X-Stainless-Arch": normalizeArch(Deno.build.arch),
|
|
227
|
+
"X-Stainless-Runtime": "deno",
|
|
228
|
+
"X-Stainless-Runtime-Version": typeof Deno.version === "string" ? Deno.version : Deno.version?.deno ?? "unknown"
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
if (typeof EdgeRuntime !== "undefined") {
|
|
232
|
+
return {
|
|
233
|
+
"X-Stainless-Lang": "js",
|
|
234
|
+
"X-Stainless-Package-Version": VERSION,
|
|
235
|
+
"X-Stainless-OS": "Unknown",
|
|
236
|
+
"X-Stainless-Arch": `other:${EdgeRuntime}`,
|
|
237
|
+
"X-Stainless-Runtime": "edge",
|
|
238
|
+
"X-Stainless-Runtime-Version": globalThis.process.version
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
if (detectedPlatform === "node") {
|
|
242
|
+
return {
|
|
243
|
+
"X-Stainless-Lang": "js",
|
|
244
|
+
"X-Stainless-Package-Version": VERSION,
|
|
245
|
+
"X-Stainless-OS": normalizePlatform(globalThis.process.platform ?? "unknown"),
|
|
246
|
+
"X-Stainless-Arch": normalizeArch(globalThis.process.arch ?? "unknown"),
|
|
247
|
+
"X-Stainless-Runtime": "node",
|
|
248
|
+
"X-Stainless-Runtime-Version": globalThis.process.version ?? "unknown"
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
const browserInfo = getBrowserInfo();
|
|
252
|
+
if (browserInfo) {
|
|
253
|
+
return {
|
|
254
|
+
"X-Stainless-Lang": "js",
|
|
255
|
+
"X-Stainless-Package-Version": VERSION,
|
|
256
|
+
"X-Stainless-OS": "Unknown",
|
|
257
|
+
"X-Stainless-Arch": "unknown",
|
|
258
|
+
"X-Stainless-Runtime": `browser:${browserInfo.browser}`,
|
|
259
|
+
"X-Stainless-Runtime-Version": browserInfo.version
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
return {
|
|
263
|
+
"X-Stainless-Lang": "js",
|
|
264
|
+
"X-Stainless-Package-Version": VERSION,
|
|
265
|
+
"X-Stainless-OS": "Unknown",
|
|
266
|
+
"X-Stainless-Arch": "unknown",
|
|
267
|
+
"X-Stainless-Runtime": "unknown",
|
|
268
|
+
"X-Stainless-Runtime-Version": "unknown"
|
|
269
|
+
};
|
|
270
|
+
};
|
|
271
|
+
function getBrowserInfo() {
|
|
272
|
+
if (typeof navigator === "undefined" || !navigator) {
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
const browserPatterns = [
|
|
276
|
+
{ key: "edge", pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
|
277
|
+
{ key: "ie", pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
|
278
|
+
{ key: "ie", pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
|
279
|
+
{ key: "chrome", pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
|
280
|
+
{ key: "firefox", pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
|
281
|
+
{ key: "safari", pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ }
|
|
282
|
+
];
|
|
283
|
+
for (const { key, pattern } of browserPatterns) {
|
|
284
|
+
const match = pattern.exec(navigator.userAgent);
|
|
285
|
+
if (match) {
|
|
286
|
+
const major = match[1] || 0;
|
|
287
|
+
const minor = match[2] || 0;
|
|
288
|
+
const patch = match[3] || 0;
|
|
289
|
+
return { browser: key, version: `${major}.${minor}.${patch}` };
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
return null;
|
|
293
|
+
}
|
|
294
|
+
var normalizeArch = (arch) => {
|
|
295
|
+
if (arch === "x32")
|
|
296
|
+
return "x32";
|
|
297
|
+
if (arch === "x86_64" || arch === "x64")
|
|
298
|
+
return "x64";
|
|
299
|
+
if (arch === "arm")
|
|
300
|
+
return "arm";
|
|
301
|
+
if (arch === "aarch64" || arch === "arm64")
|
|
302
|
+
return "arm64";
|
|
303
|
+
if (arch)
|
|
304
|
+
return `other:${arch}`;
|
|
305
|
+
return "unknown";
|
|
306
|
+
};
|
|
307
|
+
var normalizePlatform = (platform) => {
|
|
308
|
+
platform = platform.toLowerCase();
|
|
309
|
+
if (platform.includes("ios"))
|
|
310
|
+
return "iOS";
|
|
311
|
+
if (platform === "android")
|
|
312
|
+
return "Android";
|
|
313
|
+
if (platform === "darwin")
|
|
314
|
+
return "MacOS";
|
|
315
|
+
if (platform === "win32")
|
|
316
|
+
return "Windows";
|
|
317
|
+
if (platform === "freebsd")
|
|
318
|
+
return "FreeBSD";
|
|
319
|
+
if (platform === "openbsd")
|
|
320
|
+
return "OpenBSD";
|
|
321
|
+
if (platform === "linux")
|
|
322
|
+
return "Linux";
|
|
323
|
+
if (platform)
|
|
324
|
+
return `Other:${platform}`;
|
|
325
|
+
return "Unknown";
|
|
326
|
+
};
|
|
327
|
+
var _platformHeaders;
|
|
328
|
+
var getPlatformHeaders = () => {
|
|
329
|
+
return _platformHeaders ?? (_platformHeaders = getPlatformProperties());
|
|
330
|
+
};
|
|
331
|
+
|
|
332
|
+
// ../node_modules/@anthropic-ai/sdk/internal/shims.mjs
|
|
333
|
+
function getDefaultFetch() {
|
|
334
|
+
if (typeof fetch !== "undefined") {
|
|
335
|
+
return fetch;
|
|
336
|
+
}
|
|
337
|
+
throw new Error("`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`");
|
|
338
|
+
}
|
|
339
|
+
function makeReadableStream(...args) {
|
|
340
|
+
const ReadableStream = globalThis.ReadableStream;
|
|
341
|
+
if (typeof ReadableStream === "undefined") {
|
|
342
|
+
throw new Error("`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`");
|
|
343
|
+
}
|
|
344
|
+
return new ReadableStream(...args);
|
|
345
|
+
}
|
|
346
|
+
function ReadableStreamFrom(iterable) {
|
|
347
|
+
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
|
348
|
+
return makeReadableStream({
|
|
349
|
+
start() {
|
|
350
|
+
},
|
|
351
|
+
async pull(controller) {
|
|
352
|
+
const { done, value } = await iter.next();
|
|
353
|
+
if (done) {
|
|
354
|
+
controller.close();
|
|
355
|
+
} else {
|
|
356
|
+
controller.enqueue(value);
|
|
357
|
+
}
|
|
358
|
+
},
|
|
359
|
+
async cancel() {
|
|
360
|
+
await iter.return?.();
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
function ReadableStreamToAsyncIterable(stream) {
|
|
365
|
+
if (stream[Symbol.asyncIterator])
|
|
366
|
+
return stream;
|
|
367
|
+
const reader = stream.getReader();
|
|
368
|
+
return {
|
|
369
|
+
async next() {
|
|
370
|
+
try {
|
|
371
|
+
const result = await reader.read();
|
|
372
|
+
if (result?.done)
|
|
373
|
+
reader.releaseLock();
|
|
374
|
+
return result;
|
|
375
|
+
} catch (e) {
|
|
376
|
+
reader.releaseLock();
|
|
377
|
+
throw e;
|
|
378
|
+
}
|
|
379
|
+
},
|
|
380
|
+
async return() {
|
|
381
|
+
const cancelPromise = reader.cancel();
|
|
382
|
+
reader.releaseLock();
|
|
383
|
+
await cancelPromise;
|
|
384
|
+
return { done: true, value: void 0 };
|
|
385
|
+
},
|
|
386
|
+
[Symbol.asyncIterator]() {
|
|
387
|
+
return this;
|
|
388
|
+
}
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
async function CancelReadableStream(stream) {
|
|
392
|
+
if (stream === null || typeof stream !== "object")
|
|
393
|
+
return;
|
|
394
|
+
if (stream[Symbol.asyncIterator]) {
|
|
395
|
+
await stream[Symbol.asyncIterator]().return?.();
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
const reader = stream.getReader();
|
|
399
|
+
const cancelPromise = reader.cancel();
|
|
400
|
+
reader.releaseLock();
|
|
401
|
+
await cancelPromise;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// ../node_modules/@anthropic-ai/sdk/internal/request-options.mjs
|
|
405
|
+
var FallbackEncoder = ({ headers, body }) => {
|
|
406
|
+
return {
|
|
407
|
+
bodyHeaders: {
|
|
408
|
+
"content-type": "application/json"
|
|
409
|
+
},
|
|
410
|
+
body: JSON.stringify(body)
|
|
411
|
+
};
|
|
412
|
+
};
|
|
413
|
+
|
|
414
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
|
|
415
|
+
function concatBytes(buffers) {
|
|
416
|
+
let length = 0;
|
|
417
|
+
for (const buffer of buffers) {
|
|
418
|
+
length += buffer.length;
|
|
419
|
+
}
|
|
420
|
+
const output = new Uint8Array(length);
|
|
421
|
+
let index = 0;
|
|
422
|
+
for (const buffer of buffers) {
|
|
423
|
+
output.set(buffer, index);
|
|
424
|
+
index += buffer.length;
|
|
425
|
+
}
|
|
426
|
+
return output;
|
|
427
|
+
}
|
|
428
|
+
var encodeUTF8_;
|
|
429
|
+
function encodeUTF8(str) {
|
|
430
|
+
let encoder;
|
|
431
|
+
return (encodeUTF8_ ?? (encoder = new globalThis.TextEncoder(), encodeUTF8_ = encoder.encode.bind(encoder)))(str);
|
|
432
|
+
}
|
|
433
|
+
var decodeUTF8_;
|
|
434
|
+
function decodeUTF8(bytes) {
|
|
435
|
+
let decoder;
|
|
436
|
+
return (decodeUTF8_ ?? (decoder = new globalThis.TextDecoder(), decodeUTF8_ = decoder.decode.bind(decoder)))(bytes);
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// ../node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
|
|
440
|
+
var _LineDecoder_buffer;
|
|
441
|
+
var _LineDecoder_carriageReturnIndex;
|
|
442
|
+
var LineDecoder = class {
|
|
443
|
+
constructor() {
|
|
444
|
+
_LineDecoder_buffer.set(this, void 0);
|
|
445
|
+
_LineDecoder_carriageReturnIndex.set(this, void 0);
|
|
446
|
+
__classPrivateFieldSet(this, _LineDecoder_buffer, new Uint8Array(), "f");
|
|
447
|
+
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
448
|
+
}
|
|
449
|
+
decode(chunk) {
|
|
450
|
+
if (chunk == null) {
|
|
451
|
+
return [];
|
|
452
|
+
}
|
|
453
|
+
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) : typeof chunk === "string" ? encodeUTF8(chunk) : chunk;
|
|
454
|
+
__classPrivateFieldSet(this, _LineDecoder_buffer, concatBytes([__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), binaryChunk]), "f");
|
|
455
|
+
const lines = [];
|
|
456
|
+
let patternIndex;
|
|
457
|
+
while ((patternIndex = findNewlineIndex(__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) {
|
|
458
|
+
if (patternIndex.carriage && __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) {
|
|
459
|
+
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f");
|
|
460
|
+
continue;
|
|
461
|
+
}
|
|
462
|
+
if (__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null && (patternIndex.index !== __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) {
|
|
463
|
+
lines.push(decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1)));
|
|
464
|
+
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")), "f");
|
|
465
|
+
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
466
|
+
continue;
|
|
467
|
+
}
|
|
468
|
+
const endIndex = __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
|
|
469
|
+
const line = decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, endIndex));
|
|
470
|
+
lines.push(line);
|
|
471
|
+
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(patternIndex.index), "f");
|
|
472
|
+
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
473
|
+
}
|
|
474
|
+
return lines;
|
|
475
|
+
}
|
|
476
|
+
flush() {
|
|
477
|
+
if (!__classPrivateFieldGet(this, _LineDecoder_buffer, "f").length) {
|
|
478
|
+
return [];
|
|
479
|
+
}
|
|
480
|
+
return this.decode("\n");
|
|
481
|
+
}
|
|
482
|
+
};
|
|
483
|
+
_LineDecoder_buffer = /* @__PURE__ */ new WeakMap(), _LineDecoder_carriageReturnIndex = /* @__PURE__ */ new WeakMap();
|
|
484
|
+
LineDecoder.NEWLINE_CHARS = /* @__PURE__ */ new Set(["\n", "\r"]);
|
|
485
|
+
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
|
|
486
|
+
function findNewlineIndex(buffer, startIndex) {
|
|
487
|
+
const newline = 10;
|
|
488
|
+
const carriage = 13;
|
|
489
|
+
for (let i = startIndex ?? 0; i < buffer.length; i++) {
|
|
490
|
+
if (buffer[i] === newline) {
|
|
491
|
+
return { preceding: i, index: i + 1, carriage: false };
|
|
492
|
+
}
|
|
493
|
+
if (buffer[i] === carriage) {
|
|
494
|
+
return { preceding: i, index: i + 1, carriage: true };
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
return null;
|
|
498
|
+
}
|
|
499
|
+
function findDoubleNewlineIndex(buffer) {
|
|
500
|
+
const newline = 10;
|
|
501
|
+
const carriage = 13;
|
|
502
|
+
for (let i = 0; i < buffer.length - 1; i++) {
|
|
503
|
+
if (buffer[i] === newline && buffer[i + 1] === newline) {
|
|
504
|
+
return i + 2;
|
|
505
|
+
}
|
|
506
|
+
if (buffer[i] === carriage && buffer[i + 1] === carriage) {
|
|
507
|
+
return i + 2;
|
|
508
|
+
}
|
|
509
|
+
if (buffer[i] === carriage && buffer[i + 1] === newline && i + 3 < buffer.length && buffer[i + 2] === carriage && buffer[i + 3] === newline) {
|
|
510
|
+
return i + 4;
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
return -1;
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
|
|
517
|
+
var levelNumbers = {
|
|
518
|
+
off: 0,
|
|
519
|
+
error: 200,
|
|
520
|
+
warn: 300,
|
|
521
|
+
info: 400,
|
|
522
|
+
debug: 500
|
|
523
|
+
};
|
|
524
|
+
var parseLogLevel = (maybeLevel, sourceName, client) => {
|
|
525
|
+
if (!maybeLevel) {
|
|
526
|
+
return void 0;
|
|
527
|
+
}
|
|
528
|
+
if (hasOwn(levelNumbers, maybeLevel)) {
|
|
529
|
+
return maybeLevel;
|
|
530
|
+
}
|
|
531
|
+
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
|
532
|
+
return void 0;
|
|
533
|
+
};
|
|
534
|
+
function noop() {
|
|
535
|
+
}
|
|
536
|
+
function makeLogFn(fnLevel, logger, logLevel) {
|
|
537
|
+
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
|
538
|
+
return noop;
|
|
539
|
+
} else {
|
|
540
|
+
return logger[fnLevel].bind(logger);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
var noopLogger = {
|
|
544
|
+
error: noop,
|
|
545
|
+
warn: noop,
|
|
546
|
+
info: noop,
|
|
547
|
+
debug: noop
|
|
548
|
+
};
|
|
549
|
+
var cachedLoggers = /* @__PURE__ */ new WeakMap();
|
|
550
|
+
function loggerFor(client) {
|
|
551
|
+
const logger = client.logger;
|
|
552
|
+
const logLevel = client.logLevel ?? "off";
|
|
553
|
+
if (!logger) {
|
|
554
|
+
return noopLogger;
|
|
555
|
+
}
|
|
556
|
+
const cachedLogger = cachedLoggers.get(logger);
|
|
557
|
+
if (cachedLogger && cachedLogger[0] === logLevel) {
|
|
558
|
+
return cachedLogger[1];
|
|
559
|
+
}
|
|
560
|
+
const levelLogger = {
|
|
561
|
+
error: makeLogFn("error", logger, logLevel),
|
|
562
|
+
warn: makeLogFn("warn", logger, logLevel),
|
|
563
|
+
info: makeLogFn("info", logger, logLevel),
|
|
564
|
+
debug: makeLogFn("debug", logger, logLevel)
|
|
565
|
+
};
|
|
566
|
+
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
|
567
|
+
return levelLogger;
|
|
568
|
+
}
|
|
569
|
+
var formatRequestDetails = (details) => {
|
|
570
|
+
if (details.options) {
|
|
571
|
+
details.options = { ...details.options };
|
|
572
|
+
delete details.options["headers"];
|
|
573
|
+
}
|
|
574
|
+
if (details.headers) {
|
|
575
|
+
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
|
576
|
+
name,
|
|
577
|
+
name.toLowerCase() === "x-api-key" || name.toLowerCase() === "authorization" || name.toLowerCase() === "cookie" || name.toLowerCase() === "set-cookie" ? "***" : value
|
|
578
|
+
]));
|
|
579
|
+
}
|
|
580
|
+
if ("retryOfRequestLogID" in details) {
|
|
581
|
+
if (details.retryOfRequestLogID) {
|
|
582
|
+
details.retryOf = details.retryOfRequestLogID;
|
|
583
|
+
}
|
|
584
|
+
delete details.retryOfRequestLogID;
|
|
585
|
+
}
|
|
586
|
+
return details;
|
|
587
|
+
};
|
|
588
|
+
|
|
589
|
+
// ../node_modules/@anthropic-ai/sdk/core/streaming.mjs
|
|
590
|
+
var _Stream_client;
|
|
591
|
+
var Stream = class _Stream {
|
|
592
|
+
constructor(iterator, controller, client) {
|
|
593
|
+
this.iterator = iterator;
|
|
594
|
+
_Stream_client.set(this, void 0);
|
|
595
|
+
this.controller = controller;
|
|
596
|
+
__classPrivateFieldSet(this, _Stream_client, client, "f");
|
|
597
|
+
}
|
|
598
|
+
static fromSSEResponse(response, controller, client) {
|
|
599
|
+
let consumed = false;
|
|
600
|
+
const logger = client ? loggerFor(client) : console;
|
|
601
|
+
async function* iterator() {
|
|
602
|
+
if (consumed) {
|
|
603
|
+
throw new AnthropicError("Cannot iterate over a consumed stream, use `.tee()` to split the stream.");
|
|
604
|
+
}
|
|
605
|
+
consumed = true;
|
|
606
|
+
let done = false;
|
|
607
|
+
try {
|
|
608
|
+
for await (const sse of _iterSSEMessages(response, controller)) {
|
|
609
|
+
if (sse.event === "completion") {
|
|
610
|
+
try {
|
|
611
|
+
yield JSON.parse(sse.data);
|
|
612
|
+
} catch (e) {
|
|
613
|
+
logger.error(`Could not parse message into JSON:`, sse.data);
|
|
614
|
+
logger.error(`From chunk:`, sse.raw);
|
|
615
|
+
throw e;
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
if (sse.event === "message_start" || sse.event === "message_delta" || sse.event === "message_stop" || sse.event === "content_block_start" || sse.event === "content_block_delta" || sse.event === "content_block_stop") {
|
|
619
|
+
try {
|
|
620
|
+
yield JSON.parse(sse.data);
|
|
621
|
+
} catch (e) {
|
|
622
|
+
logger.error(`Could not parse message into JSON:`, sse.data);
|
|
623
|
+
logger.error(`From chunk:`, sse.raw);
|
|
624
|
+
throw e;
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
if (sse.event === "ping") {
|
|
628
|
+
continue;
|
|
629
|
+
}
|
|
630
|
+
if (sse.event === "error") {
|
|
631
|
+
throw new APIError(void 0, safeJSON(sse.data) ?? sse.data, void 0, response.headers);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
done = true;
|
|
635
|
+
} catch (e) {
|
|
636
|
+
if (isAbortError(e))
|
|
637
|
+
return;
|
|
638
|
+
throw e;
|
|
639
|
+
} finally {
|
|
640
|
+
if (!done)
|
|
641
|
+
controller.abort();
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
return new _Stream(iterator, controller, client);
|
|
645
|
+
}
|
|
646
|
+
/**
|
|
647
|
+
* Generates a Stream from a newline-separated ReadableStream
|
|
648
|
+
* where each item is a JSON value.
|
|
649
|
+
*/
|
|
650
|
+
static fromReadableStream(readableStream, controller, client) {
|
|
651
|
+
let consumed = false;
|
|
652
|
+
async function* iterLines() {
|
|
653
|
+
const lineDecoder = new LineDecoder();
|
|
654
|
+
const iter = ReadableStreamToAsyncIterable(readableStream);
|
|
655
|
+
for await (const chunk of iter) {
|
|
656
|
+
for (const line of lineDecoder.decode(chunk)) {
|
|
657
|
+
yield line;
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
for (const line of lineDecoder.flush()) {
|
|
661
|
+
yield line;
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
async function* iterator() {
|
|
665
|
+
if (consumed) {
|
|
666
|
+
throw new AnthropicError("Cannot iterate over a consumed stream, use `.tee()` to split the stream.");
|
|
667
|
+
}
|
|
668
|
+
consumed = true;
|
|
669
|
+
let done = false;
|
|
670
|
+
try {
|
|
671
|
+
for await (const line of iterLines()) {
|
|
672
|
+
if (done)
|
|
673
|
+
continue;
|
|
674
|
+
if (line)
|
|
675
|
+
yield JSON.parse(line);
|
|
676
|
+
}
|
|
677
|
+
done = true;
|
|
678
|
+
} catch (e) {
|
|
679
|
+
if (isAbortError(e))
|
|
680
|
+
return;
|
|
681
|
+
throw e;
|
|
682
|
+
} finally {
|
|
683
|
+
if (!done)
|
|
684
|
+
controller.abort();
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
return new _Stream(iterator, controller, client);
|
|
688
|
+
}
|
|
689
|
+
[(_Stream_client = /* @__PURE__ */ new WeakMap(), Symbol.asyncIterator)]() {
|
|
690
|
+
return this.iterator();
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Splits the stream into two streams which can be
|
|
694
|
+
* independently read from at different speeds.
|
|
695
|
+
*/
|
|
696
|
+
tee() {
|
|
697
|
+
const left = [];
|
|
698
|
+
const right = [];
|
|
699
|
+
const iterator = this.iterator();
|
|
700
|
+
const teeIterator = (queue) => {
|
|
701
|
+
return {
|
|
702
|
+
next: () => {
|
|
703
|
+
if (queue.length === 0) {
|
|
704
|
+
const result = iterator.next();
|
|
705
|
+
left.push(result);
|
|
706
|
+
right.push(result);
|
|
707
|
+
}
|
|
708
|
+
return queue.shift();
|
|
709
|
+
}
|
|
710
|
+
};
|
|
711
|
+
};
|
|
712
|
+
return [
|
|
713
|
+
new _Stream(() => teeIterator(left), this.controller, __classPrivateFieldGet(this, _Stream_client, "f")),
|
|
714
|
+
new _Stream(() => teeIterator(right), this.controller, __classPrivateFieldGet(this, _Stream_client, "f"))
|
|
715
|
+
];
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* Converts this stream to a newline-separated ReadableStream of
|
|
719
|
+
* JSON stringified values in the stream
|
|
720
|
+
* which can be turned back into a Stream with `Stream.fromReadableStream()`.
|
|
721
|
+
*/
|
|
722
|
+
toReadableStream() {
|
|
723
|
+
const self = this;
|
|
724
|
+
let iter;
|
|
725
|
+
return makeReadableStream({
|
|
726
|
+
async start() {
|
|
727
|
+
iter = self[Symbol.asyncIterator]();
|
|
728
|
+
},
|
|
729
|
+
async pull(ctrl) {
|
|
730
|
+
try {
|
|
731
|
+
const { value, done } = await iter.next();
|
|
732
|
+
if (done)
|
|
733
|
+
return ctrl.close();
|
|
734
|
+
const bytes = encodeUTF8(JSON.stringify(value) + "\n");
|
|
735
|
+
ctrl.enqueue(bytes);
|
|
736
|
+
} catch (err) {
|
|
737
|
+
ctrl.error(err);
|
|
738
|
+
}
|
|
739
|
+
},
|
|
740
|
+
async cancel() {
|
|
741
|
+
await iter.return?.();
|
|
742
|
+
}
|
|
743
|
+
});
|
|
744
|
+
}
|
|
745
|
+
};
|
|
746
|
+
async function* _iterSSEMessages(response, controller) {
|
|
747
|
+
if (!response.body) {
|
|
748
|
+
controller.abort();
|
|
749
|
+
if (typeof globalThis.navigator !== "undefined" && globalThis.navigator.product === "ReactNative") {
|
|
750
|
+
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
|
751
|
+
}
|
|
752
|
+
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
|
753
|
+
}
|
|
754
|
+
const sseDecoder = new SSEDecoder();
|
|
755
|
+
const lineDecoder = new LineDecoder();
|
|
756
|
+
const iter = ReadableStreamToAsyncIterable(response.body);
|
|
757
|
+
for await (const sseChunk of iterSSEChunks(iter)) {
|
|
758
|
+
for (const line of lineDecoder.decode(sseChunk)) {
|
|
759
|
+
const sse = sseDecoder.decode(line);
|
|
760
|
+
if (sse)
|
|
761
|
+
yield sse;
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
for (const line of lineDecoder.flush()) {
|
|
765
|
+
const sse = sseDecoder.decode(line);
|
|
766
|
+
if (sse)
|
|
767
|
+
yield sse;
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
async function* iterSSEChunks(iterator) {
|
|
771
|
+
let data = new Uint8Array();
|
|
772
|
+
for await (const chunk of iterator) {
|
|
773
|
+
if (chunk == null) {
|
|
774
|
+
continue;
|
|
775
|
+
}
|
|
776
|
+
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk) : typeof chunk === "string" ? encodeUTF8(chunk) : chunk;
|
|
777
|
+
let newData = new Uint8Array(data.length + binaryChunk.length);
|
|
778
|
+
newData.set(data);
|
|
779
|
+
newData.set(binaryChunk, data.length);
|
|
780
|
+
data = newData;
|
|
781
|
+
let patternIndex;
|
|
782
|
+
while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) {
|
|
783
|
+
yield data.slice(0, patternIndex);
|
|
784
|
+
data = data.slice(patternIndex);
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
if (data.length > 0) {
|
|
788
|
+
yield data;
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
var SSEDecoder = class {
|
|
792
|
+
constructor() {
|
|
793
|
+
this.event = null;
|
|
794
|
+
this.data = [];
|
|
795
|
+
this.chunks = [];
|
|
796
|
+
}
|
|
797
|
+
decode(line) {
|
|
798
|
+
if (line.endsWith("\r")) {
|
|
799
|
+
line = line.substring(0, line.length - 1);
|
|
800
|
+
}
|
|
801
|
+
if (!line) {
|
|
802
|
+
if (!this.event && !this.data.length)
|
|
803
|
+
return null;
|
|
804
|
+
const sse = {
|
|
805
|
+
event: this.event,
|
|
806
|
+
data: this.data.join("\n"),
|
|
807
|
+
raw: this.chunks
|
|
808
|
+
};
|
|
809
|
+
this.event = null;
|
|
810
|
+
this.data = [];
|
|
811
|
+
this.chunks = [];
|
|
812
|
+
return sse;
|
|
813
|
+
}
|
|
814
|
+
this.chunks.push(line);
|
|
815
|
+
if (line.startsWith(":")) {
|
|
816
|
+
return null;
|
|
817
|
+
}
|
|
818
|
+
let [fieldname, _, value] = partition(line, ":");
|
|
819
|
+
if (value.startsWith(" ")) {
|
|
820
|
+
value = value.substring(1);
|
|
821
|
+
}
|
|
822
|
+
if (fieldname === "event") {
|
|
823
|
+
this.event = value;
|
|
824
|
+
} else if (fieldname === "data") {
|
|
825
|
+
this.data.push(value);
|
|
826
|
+
}
|
|
827
|
+
return null;
|
|
828
|
+
}
|
|
829
|
+
};
|
|
830
|
+
function partition(str, delimiter) {
|
|
831
|
+
const index = str.indexOf(delimiter);
|
|
832
|
+
if (index !== -1) {
|
|
833
|
+
return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];
|
|
834
|
+
}
|
|
835
|
+
return [str, "", ""];
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
// ../node_modules/@anthropic-ai/sdk/internal/parse.mjs
|
|
839
|
+
async function defaultParseResponse(client, props) {
|
|
840
|
+
const { response, requestLogID, retryOfRequestLogID, startTime } = props;
|
|
841
|
+
const body = await (async () => {
|
|
842
|
+
if (props.options.stream) {
|
|
843
|
+
loggerFor(client).debug("response", response.status, response.url, response.headers, response.body);
|
|
844
|
+
if (props.options.__streamClass) {
|
|
845
|
+
return props.options.__streamClass.fromSSEResponse(response, props.controller);
|
|
846
|
+
}
|
|
847
|
+
return Stream.fromSSEResponse(response, props.controller);
|
|
848
|
+
}
|
|
849
|
+
if (response.status === 204) {
|
|
850
|
+
return null;
|
|
851
|
+
}
|
|
852
|
+
if (props.options.__binaryResponse) {
|
|
853
|
+
return response;
|
|
854
|
+
}
|
|
855
|
+
const contentType = response.headers.get("content-type");
|
|
856
|
+
const mediaType = contentType?.split(";")[0]?.trim();
|
|
857
|
+
const isJSON = mediaType?.includes("application/json") || mediaType?.endsWith("+json");
|
|
858
|
+
if (isJSON) {
|
|
859
|
+
const contentLength = response.headers.get("content-length");
|
|
860
|
+
if (contentLength === "0") {
|
|
861
|
+
return void 0;
|
|
862
|
+
}
|
|
863
|
+
const json = await response.json();
|
|
864
|
+
return addRequestID(json, response);
|
|
865
|
+
}
|
|
866
|
+
const text = await response.text();
|
|
867
|
+
return text;
|
|
868
|
+
})();
|
|
869
|
+
loggerFor(client).debug(`[${requestLogID}] response parsed`, formatRequestDetails({
|
|
870
|
+
retryOfRequestLogID,
|
|
871
|
+
url: response.url,
|
|
872
|
+
status: response.status,
|
|
873
|
+
body,
|
|
874
|
+
durationMs: Date.now() - startTime
|
|
875
|
+
}));
|
|
876
|
+
return body;
|
|
877
|
+
}
|
|
878
|
+
function addRequestID(value, response) {
|
|
879
|
+
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
880
|
+
return value;
|
|
881
|
+
}
|
|
882
|
+
return Object.defineProperty(value, "_request_id", {
|
|
883
|
+
value: response.headers.get("request-id"),
|
|
884
|
+
enumerable: false
|
|
885
|
+
});
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
// ../node_modules/@anthropic-ai/sdk/core/api-promise.mjs
|
|
889
|
+
var _APIPromise_client;
|
|
890
|
+
var APIPromise = class _APIPromise extends Promise {
|
|
891
|
+
constructor(client, responsePromise, parseResponse = defaultParseResponse) {
|
|
892
|
+
super((resolve) => {
|
|
893
|
+
resolve(null);
|
|
894
|
+
});
|
|
895
|
+
this.responsePromise = responsePromise;
|
|
896
|
+
this.parseResponse = parseResponse;
|
|
897
|
+
_APIPromise_client.set(this, void 0);
|
|
898
|
+
__classPrivateFieldSet(this, _APIPromise_client, client, "f");
|
|
899
|
+
}
|
|
900
|
+
_thenUnwrap(transform) {
|
|
901
|
+
return new _APIPromise(__classPrivateFieldGet(this, _APIPromise_client, "f"), this.responsePromise, async (client, props) => addRequestID(transform(await this.parseResponse(client, props), props), props.response));
|
|
902
|
+
}
|
|
903
|
+
/**
|
|
904
|
+
* Gets the raw `Response` instance instead of parsing the response
|
|
905
|
+
* data.
|
|
906
|
+
*
|
|
907
|
+
* If you want to parse the response body but still get the `Response`
|
|
908
|
+
* instance, you can use {@link withResponse()}.
|
|
909
|
+
*
|
|
910
|
+
* 👋 Getting the wrong TypeScript type for `Response`?
|
|
911
|
+
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
|
912
|
+
* to your `tsconfig.json`.
|
|
913
|
+
*/
|
|
914
|
+
asResponse() {
|
|
915
|
+
return this.responsePromise.then((p) => p.response);
|
|
916
|
+
}
|
|
917
|
+
/**
|
|
918
|
+
* Gets the parsed response data, the raw `Response` instance and the ID of the request,
|
|
919
|
+
* returned via the `request-id` header which is useful for debugging requests and resporting
|
|
920
|
+
* issues to Anthropic.
|
|
921
|
+
*
|
|
922
|
+
* If you just want to get the raw `Response` instance without parsing it,
|
|
923
|
+
* you can use {@link asResponse()}.
|
|
924
|
+
*
|
|
925
|
+
* 👋 Getting the wrong TypeScript type for `Response`?
|
|
926
|
+
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
|
927
|
+
* to your `tsconfig.json`.
|
|
928
|
+
*/
|
|
929
|
+
async withResponse() {
|
|
930
|
+
const [data, response] = await Promise.all([this.parse(), this.asResponse()]);
|
|
931
|
+
return { data, response, request_id: response.headers.get("request-id") };
|
|
932
|
+
}
|
|
933
|
+
parse() {
|
|
934
|
+
if (!this.parsedPromise) {
|
|
935
|
+
this.parsedPromise = this.responsePromise.then((data) => this.parseResponse(__classPrivateFieldGet(this, _APIPromise_client, "f"), data));
|
|
936
|
+
}
|
|
937
|
+
return this.parsedPromise;
|
|
938
|
+
}
|
|
939
|
+
then(onfulfilled, onrejected) {
|
|
940
|
+
return this.parse().then(onfulfilled, onrejected);
|
|
941
|
+
}
|
|
942
|
+
catch(onrejected) {
|
|
943
|
+
return this.parse().catch(onrejected);
|
|
944
|
+
}
|
|
945
|
+
finally(onfinally) {
|
|
946
|
+
return this.parse().finally(onfinally);
|
|
947
|
+
}
|
|
948
|
+
};
|
|
949
|
+
_APIPromise_client = /* @__PURE__ */ new WeakMap();
|
|
950
|
+
|
|
951
|
+
// ../node_modules/@anthropic-ai/sdk/core/pagination.mjs
|
|
952
|
+
var _AbstractPage_client;
|
|
953
|
+
var AbstractPage = class {
|
|
954
|
+
constructor(client, response, body, options) {
|
|
955
|
+
_AbstractPage_client.set(this, void 0);
|
|
956
|
+
__classPrivateFieldSet(this, _AbstractPage_client, client, "f");
|
|
957
|
+
this.options = options;
|
|
958
|
+
this.response = response;
|
|
959
|
+
this.body = body;
|
|
960
|
+
}
|
|
961
|
+
hasNextPage() {
|
|
962
|
+
const items = this.getPaginatedItems();
|
|
963
|
+
if (!items.length)
|
|
964
|
+
return false;
|
|
965
|
+
return this.nextPageRequestOptions() != null;
|
|
966
|
+
}
|
|
967
|
+
async getNextPage() {
|
|
968
|
+
const nextOptions = this.nextPageRequestOptions();
|
|
969
|
+
if (!nextOptions) {
|
|
970
|
+
throw new AnthropicError("No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.");
|
|
971
|
+
}
|
|
972
|
+
return await __classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions);
|
|
973
|
+
}
|
|
974
|
+
async *iterPages() {
|
|
975
|
+
let page = this;
|
|
976
|
+
yield page;
|
|
977
|
+
while (page.hasNextPage()) {
|
|
978
|
+
page = await page.getNextPage();
|
|
979
|
+
yield page;
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
async *[(_AbstractPage_client = /* @__PURE__ */ new WeakMap(), Symbol.asyncIterator)]() {
|
|
983
|
+
for await (const page of this.iterPages()) {
|
|
984
|
+
for (const item of page.getPaginatedItems()) {
|
|
985
|
+
yield item;
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
};
|
|
990
|
+
var PagePromise = class extends APIPromise {
|
|
991
|
+
constructor(client, request, Page2) {
|
|
992
|
+
super(client, request, async (client2, props) => new Page2(client2, props.response, await defaultParseResponse(client2, props), props.options));
|
|
993
|
+
}
|
|
994
|
+
/**
|
|
995
|
+
* Allow auto-paginating iteration on an unawaited list call, eg:
|
|
996
|
+
*
|
|
997
|
+
* for await (const item of client.items.list()) {
|
|
998
|
+
* console.log(item)
|
|
999
|
+
* }
|
|
1000
|
+
*/
|
|
1001
|
+
async *[Symbol.asyncIterator]() {
|
|
1002
|
+
const page = await this;
|
|
1003
|
+
for await (const item of page) {
|
|
1004
|
+
yield item;
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
};
|
|
1008
|
+
var Page = class extends AbstractPage {
|
|
1009
|
+
constructor(client, response, body, options) {
|
|
1010
|
+
super(client, response, body, options);
|
|
1011
|
+
this.data = body.data || [];
|
|
1012
|
+
this.has_more = body.has_more || false;
|
|
1013
|
+
this.first_id = body.first_id || null;
|
|
1014
|
+
this.last_id = body.last_id || null;
|
|
1015
|
+
}
|
|
1016
|
+
getPaginatedItems() {
|
|
1017
|
+
return this.data ?? [];
|
|
1018
|
+
}
|
|
1019
|
+
hasNextPage() {
|
|
1020
|
+
if (this.has_more === false) {
|
|
1021
|
+
return false;
|
|
1022
|
+
}
|
|
1023
|
+
return super.hasNextPage();
|
|
1024
|
+
}
|
|
1025
|
+
nextPageRequestOptions() {
|
|
1026
|
+
if (this.options.query?.["before_id"]) {
|
|
1027
|
+
const first_id = this.first_id;
|
|
1028
|
+
if (!first_id) {
|
|
1029
|
+
return null;
|
|
1030
|
+
}
|
|
1031
|
+
return {
|
|
1032
|
+
...this.options,
|
|
1033
|
+
query: {
|
|
1034
|
+
...maybeObj(this.options.query),
|
|
1035
|
+
before_id: first_id
|
|
1036
|
+
}
|
|
1037
|
+
};
|
|
1038
|
+
}
|
|
1039
|
+
const cursor = this.last_id;
|
|
1040
|
+
if (!cursor) {
|
|
1041
|
+
return null;
|
|
1042
|
+
}
|
|
1043
|
+
return {
|
|
1044
|
+
...this.options,
|
|
1045
|
+
query: {
|
|
1046
|
+
...maybeObj(this.options.query),
|
|
1047
|
+
after_id: cursor
|
|
1048
|
+
}
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
};
|
|
1052
|
+
var PageCursor = class extends AbstractPage {
|
|
1053
|
+
constructor(client, response, body, options) {
|
|
1054
|
+
super(client, response, body, options);
|
|
1055
|
+
this.data = body.data || [];
|
|
1056
|
+
this.has_more = body.has_more || false;
|
|
1057
|
+
this.next_page = body.next_page || null;
|
|
1058
|
+
}
|
|
1059
|
+
getPaginatedItems() {
|
|
1060
|
+
return this.data ?? [];
|
|
1061
|
+
}
|
|
1062
|
+
hasNextPage() {
|
|
1063
|
+
if (this.has_more === false) {
|
|
1064
|
+
return false;
|
|
1065
|
+
}
|
|
1066
|
+
return super.hasNextPage();
|
|
1067
|
+
}
|
|
1068
|
+
nextPageRequestOptions() {
|
|
1069
|
+
const cursor = this.next_page;
|
|
1070
|
+
if (!cursor) {
|
|
1071
|
+
return null;
|
|
1072
|
+
}
|
|
1073
|
+
return {
|
|
1074
|
+
...this.options,
|
|
1075
|
+
query: {
|
|
1076
|
+
...maybeObj(this.options.query),
|
|
1077
|
+
page: cursor
|
|
1078
|
+
}
|
|
1079
|
+
};
|
|
1080
|
+
}
|
|
1081
|
+
};
|
|
1082
|
+
|
|
1083
|
+
// ../node_modules/@anthropic-ai/sdk/internal/uploads.mjs
|
|
1084
|
+
var checkFileSupport = () => {
|
|
1085
|
+
if (typeof File === "undefined") {
|
|
1086
|
+
const { process } = globalThis;
|
|
1087
|
+
const isOldNode = typeof process?.versions?.node === "string" && parseInt(process.versions.node.split(".")) < 20;
|
|
1088
|
+
throw new Error("`File` is not defined as a global, which is required for file uploads." + (isOldNode ? " Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`." : ""));
|
|
1089
|
+
}
|
|
1090
|
+
};
|
|
1091
|
+
function makeFile(fileBits, fileName, options) {
|
|
1092
|
+
checkFileSupport();
|
|
1093
|
+
return new File(fileBits, fileName ?? "unknown_file", options);
|
|
1094
|
+
}
|
|
1095
|
+
function getName(value, stripPath) {
|
|
1096
|
+
const val = typeof value === "object" && value !== null && ("name" in value && value.name && String(value.name) || "url" in value && value.url && String(value.url) || "filename" in value && value.filename && String(value.filename) || "path" in value && value.path && String(value.path)) || "";
|
|
1097
|
+
return stripPath ? val.split(/[\\/]/).pop() || void 0 : val;
|
|
1098
|
+
}
|
|
1099
|
+
var isAsyncIterable = (value) => value != null && typeof value === "object" && typeof value[Symbol.asyncIterator] === "function";
|
|
1100
|
+
var multipartFormRequestOptions = async (opts, fetch2, stripFilenames = true) => {
|
|
1101
|
+
return { ...opts, body: await createForm(opts.body, fetch2, stripFilenames) };
|
|
1102
|
+
};
|
|
1103
|
+
var supportsFormDataMap = /* @__PURE__ */ new WeakMap();
|
|
1104
|
+
function supportsFormData(fetchObject) {
|
|
1105
|
+
const fetch2 = typeof fetchObject === "function" ? fetchObject : fetchObject.fetch;
|
|
1106
|
+
const cached = supportsFormDataMap.get(fetch2);
|
|
1107
|
+
if (cached)
|
|
1108
|
+
return cached;
|
|
1109
|
+
const promise = (async () => {
|
|
1110
|
+
try {
|
|
1111
|
+
const FetchResponse = "Response" in fetch2 ? fetch2.Response : (await fetch2("data:,")).constructor;
|
|
1112
|
+
const data = new FormData();
|
|
1113
|
+
if (data.toString() === await new FetchResponse(data).text()) {
|
|
1114
|
+
return false;
|
|
1115
|
+
}
|
|
1116
|
+
return true;
|
|
1117
|
+
} catch {
|
|
1118
|
+
return true;
|
|
1119
|
+
}
|
|
1120
|
+
})();
|
|
1121
|
+
supportsFormDataMap.set(fetch2, promise);
|
|
1122
|
+
return promise;
|
|
1123
|
+
}
|
|
1124
|
+
var createForm = async (body, fetch2, stripFilenames = true) => {
|
|
1125
|
+
if (!await supportsFormData(fetch2)) {
|
|
1126
|
+
throw new TypeError("The provided fetch function does not support file uploads with the current global FormData class.");
|
|
1127
|
+
}
|
|
1128
|
+
const form = new FormData();
|
|
1129
|
+
await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value, stripFilenames)));
|
|
1130
|
+
return form;
|
|
1131
|
+
};
|
|
1132
|
+
var isNamedBlob = (value) => value instanceof Blob && "name" in value;
|
|
1133
|
+
var addFormValue = async (form, key, value, stripFilenames) => {
|
|
1134
|
+
if (value === void 0)
|
|
1135
|
+
return;
|
|
1136
|
+
if (value == null) {
|
|
1137
|
+
throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`);
|
|
1138
|
+
}
|
|
1139
|
+
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
1140
|
+
form.append(key, String(value));
|
|
1141
|
+
} else if (value instanceof Response) {
|
|
1142
|
+
let options = {};
|
|
1143
|
+
const contentType = value.headers.get("Content-Type");
|
|
1144
|
+
if (contentType) {
|
|
1145
|
+
options = { type: contentType };
|
|
1146
|
+
}
|
|
1147
|
+
form.append(key, makeFile([await value.blob()], getName(value, stripFilenames), options));
|
|
1148
|
+
} else if (isAsyncIterable(value)) {
|
|
1149
|
+
form.append(key, makeFile([await new Response(ReadableStreamFrom(value)).blob()], getName(value, stripFilenames)));
|
|
1150
|
+
} else if (isNamedBlob(value)) {
|
|
1151
|
+
form.append(key, makeFile([value], getName(value, stripFilenames), { type: value.type }));
|
|
1152
|
+
} else if (Array.isArray(value)) {
|
|
1153
|
+
await Promise.all(value.map((entry) => addFormValue(form, key + "[]", entry, stripFilenames)));
|
|
1154
|
+
} else if (typeof value === "object") {
|
|
1155
|
+
await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop, stripFilenames)));
|
|
1156
|
+
} else {
|
|
1157
|
+
throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);
|
|
1158
|
+
}
|
|
1159
|
+
};
|
|
1160
|
+
|
|
1161
|
+
// ../node_modules/@anthropic-ai/sdk/internal/to-file.mjs
|
|
1162
|
+
var isBlobLike = (value) => value != null && typeof value === "object" && typeof value.size === "number" && typeof value.type === "string" && typeof value.text === "function" && typeof value.slice === "function" && typeof value.arrayBuffer === "function";
|
|
1163
|
+
var isFileLike = (value) => value != null && typeof value === "object" && typeof value.name === "string" && typeof value.lastModified === "number" && isBlobLike(value);
|
|
1164
|
+
var isResponseLike = (value) => value != null && typeof value === "object" && typeof value.url === "string" && typeof value.blob === "function";
|
|
1165
|
+
async function toFile(value, name, options) {
|
|
1166
|
+
checkFileSupport();
|
|
1167
|
+
value = await value;
|
|
1168
|
+
name || (name = getName(value, true));
|
|
1169
|
+
if (isFileLike(value)) {
|
|
1170
|
+
if (value instanceof File && name == null && options == null) {
|
|
1171
|
+
return value;
|
|
1172
|
+
}
|
|
1173
|
+
return makeFile([await value.arrayBuffer()], name ?? value.name, {
|
|
1174
|
+
type: value.type,
|
|
1175
|
+
lastModified: value.lastModified,
|
|
1176
|
+
...options
|
|
1177
|
+
});
|
|
1178
|
+
}
|
|
1179
|
+
if (isResponseLike(value)) {
|
|
1180
|
+
const blob = await value.blob();
|
|
1181
|
+
name || (name = new URL(value.url).pathname.split(/[\\/]/).pop());
|
|
1182
|
+
return makeFile(await getBytes(blob), name, options);
|
|
1183
|
+
}
|
|
1184
|
+
const parts = await getBytes(value);
|
|
1185
|
+
if (!options?.type) {
|
|
1186
|
+
const type = parts.find((part) => typeof part === "object" && "type" in part && part.type);
|
|
1187
|
+
if (typeof type === "string") {
|
|
1188
|
+
options = { ...options, type };
|
|
1189
|
+
}
|
|
1190
|
+
}
|
|
1191
|
+
return makeFile(parts, name, options);
|
|
1192
|
+
}
|
|
1193
|
+
async function getBytes(value) {
|
|
1194
|
+
let parts = [];
|
|
1195
|
+
if (typeof value === "string" || ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.
|
|
1196
|
+
value instanceof ArrayBuffer) {
|
|
1197
|
+
parts.push(value);
|
|
1198
|
+
} else if (isBlobLike(value)) {
|
|
1199
|
+
parts.push(value instanceof Blob ? value : await value.arrayBuffer());
|
|
1200
|
+
} else if (isAsyncIterable(value)) {
|
|
1201
|
+
for await (const chunk of value) {
|
|
1202
|
+
parts.push(...await getBytes(chunk));
|
|
1203
|
+
}
|
|
1204
|
+
} else {
|
|
1205
|
+
const constructor = value?.constructor?.name;
|
|
1206
|
+
throw new Error(`Unexpected data type: ${typeof value}${constructor ? `; constructor: ${constructor}` : ""}${propsForError(value)}`);
|
|
1207
|
+
}
|
|
1208
|
+
return parts;
|
|
1209
|
+
}
|
|
1210
|
+
function propsForError(value) {
|
|
1211
|
+
if (typeof value !== "object" || value === null)
|
|
1212
|
+
return "";
|
|
1213
|
+
const props = Object.getOwnPropertyNames(value);
|
|
1214
|
+
return `; props: [${props.map((p) => `"${p}"`).join(", ")}]`;
|
|
1215
|
+
}
|
|
1216
|
+
|
|
1217
|
+
// ../node_modules/@anthropic-ai/sdk/core/resource.mjs
|
|
1218
|
+
var APIResource = class {
|
|
1219
|
+
constructor(client) {
|
|
1220
|
+
this._client = client;
|
|
1221
|
+
}
|
|
1222
|
+
};
|
|
1223
|
+
|
|
1224
|
+
// ../node_modules/@anthropic-ai/sdk/internal/headers.mjs
|
|
1225
|
+
var brand_privateNullableHeaders = /* @__PURE__ */ Symbol.for("brand.privateNullableHeaders");
|
|
1226
|
+
function* iterateHeaders(headers) {
|
|
1227
|
+
if (!headers)
|
|
1228
|
+
return;
|
|
1229
|
+
if (brand_privateNullableHeaders in headers) {
|
|
1230
|
+
const { values, nulls } = headers;
|
|
1231
|
+
yield* values.entries();
|
|
1232
|
+
for (const name of nulls) {
|
|
1233
|
+
yield [name, null];
|
|
1234
|
+
}
|
|
1235
|
+
return;
|
|
1236
|
+
}
|
|
1237
|
+
let shouldClear = false;
|
|
1238
|
+
let iter;
|
|
1239
|
+
if (headers instanceof Headers) {
|
|
1240
|
+
iter = headers.entries();
|
|
1241
|
+
} else if (isReadonlyArray(headers)) {
|
|
1242
|
+
iter = headers;
|
|
1243
|
+
} else {
|
|
1244
|
+
shouldClear = true;
|
|
1245
|
+
iter = Object.entries(headers ?? {});
|
|
1246
|
+
}
|
|
1247
|
+
for (let row of iter) {
|
|
1248
|
+
const name = row[0];
|
|
1249
|
+
if (typeof name !== "string")
|
|
1250
|
+
throw new TypeError("expected header name to be a string");
|
|
1251
|
+
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
|
1252
|
+
let didClear = false;
|
|
1253
|
+
for (const value of values) {
|
|
1254
|
+
if (value === void 0)
|
|
1255
|
+
continue;
|
|
1256
|
+
if (shouldClear && !didClear) {
|
|
1257
|
+
didClear = true;
|
|
1258
|
+
yield [name, null];
|
|
1259
|
+
}
|
|
1260
|
+
yield [name, value];
|
|
1261
|
+
}
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
var buildHeaders = (newHeaders) => {
|
|
1265
|
+
const targetHeaders = new Headers();
|
|
1266
|
+
const nullHeaders = /* @__PURE__ */ new Set();
|
|
1267
|
+
for (const headers of newHeaders) {
|
|
1268
|
+
const seenHeaders = /* @__PURE__ */ new Set();
|
|
1269
|
+
for (const [name, value] of iterateHeaders(headers)) {
|
|
1270
|
+
const lowerName = name.toLowerCase();
|
|
1271
|
+
if (!seenHeaders.has(lowerName)) {
|
|
1272
|
+
targetHeaders.delete(name);
|
|
1273
|
+
seenHeaders.add(lowerName);
|
|
1274
|
+
}
|
|
1275
|
+
if (value === null) {
|
|
1276
|
+
targetHeaders.delete(name);
|
|
1277
|
+
nullHeaders.add(lowerName);
|
|
1278
|
+
} else {
|
|
1279
|
+
targetHeaders.append(name, value);
|
|
1280
|
+
nullHeaders.delete(lowerName);
|
|
1281
|
+
}
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
|
1285
|
+
};
|
|
1286
|
+
|
|
1287
|
+
// ../node_modules/@anthropic-ai/sdk/lib/stainless-helper-header.mjs
|
|
1288
|
+
var SDK_HELPER_SYMBOL = /* @__PURE__ */ Symbol("anthropic.sdk.stainlessHelper");
|
|
1289
|
+
function wasCreatedByStainlessHelper(value) {
|
|
1290
|
+
return typeof value === "object" && value !== null && SDK_HELPER_SYMBOL in value;
|
|
1291
|
+
}
|
|
1292
|
+
function collectStainlessHelpers(tools, messages) {
|
|
1293
|
+
const helpers = /* @__PURE__ */ new Set();
|
|
1294
|
+
if (tools) {
|
|
1295
|
+
for (const tool of tools) {
|
|
1296
|
+
if (wasCreatedByStainlessHelper(tool)) {
|
|
1297
|
+
helpers.add(tool[SDK_HELPER_SYMBOL]);
|
|
1298
|
+
}
|
|
1299
|
+
}
|
|
1300
|
+
}
|
|
1301
|
+
if (messages) {
|
|
1302
|
+
for (const message of messages) {
|
|
1303
|
+
if (wasCreatedByStainlessHelper(message)) {
|
|
1304
|
+
helpers.add(message[SDK_HELPER_SYMBOL]);
|
|
1305
|
+
}
|
|
1306
|
+
if (Array.isArray(message.content)) {
|
|
1307
|
+
for (const block of message.content) {
|
|
1308
|
+
if (wasCreatedByStainlessHelper(block)) {
|
|
1309
|
+
helpers.add(block[SDK_HELPER_SYMBOL]);
|
|
1310
|
+
}
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
}
|
|
1314
|
+
}
|
|
1315
|
+
return Array.from(helpers);
|
|
1316
|
+
}
|
|
1317
|
+
function stainlessHelperHeader(tools, messages) {
|
|
1318
|
+
const helpers = collectStainlessHelpers(tools, messages);
|
|
1319
|
+
if (helpers.length === 0)
|
|
1320
|
+
return {};
|
|
1321
|
+
return { "x-stainless-helper": helpers.join(", ") };
|
|
1322
|
+
}
|
|
1323
|
+
function stainlessHelperHeaderFromFile(file) {
|
|
1324
|
+
if (wasCreatedByStainlessHelper(file)) {
|
|
1325
|
+
return { "x-stainless-helper": file[SDK_HELPER_SYMBOL] };
|
|
1326
|
+
}
|
|
1327
|
+
return {};
|
|
1328
|
+
}
|
|
1329
|
+
|
|
1330
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
|
|
1331
|
+
function encodeURIPath(str) {
|
|
1332
|
+
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
|
1333
|
+
}
|
|
1334
|
+
var EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
|
1335
|
+
var createPathTagFunction = (pathEncoder = encodeURIPath) => function path2(statics, ...params) {
|
|
1336
|
+
if (statics.length === 1)
|
|
1337
|
+
return statics[0];
|
|
1338
|
+
let postPath = false;
|
|
1339
|
+
const invalidSegments = [];
|
|
1340
|
+
const path3 = statics.reduce((previousValue, currentValue, index) => {
|
|
1341
|
+
if (/[?#]/.test(currentValue)) {
|
|
1342
|
+
postPath = true;
|
|
1343
|
+
}
|
|
1344
|
+
const value = params[index];
|
|
1345
|
+
let encoded = (postPath ? encodeURIComponent : pathEncoder)("" + value);
|
|
1346
|
+
if (index !== params.length && (value == null || typeof value === "object" && // handle values from other realms
|
|
1347
|
+
value.toString === Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)?.toString)) {
|
|
1348
|
+
encoded = value + "";
|
|
1349
|
+
invalidSegments.push({
|
|
1350
|
+
start: previousValue.length + currentValue.length,
|
|
1351
|
+
length: encoded.length,
|
|
1352
|
+
error: `Value of type ${Object.prototype.toString.call(value).slice(8, -1)} is not a valid path parameter`
|
|
1353
|
+
});
|
|
1354
|
+
}
|
|
1355
|
+
return previousValue + currentValue + (index === params.length ? "" : encoded);
|
|
1356
|
+
}, "");
|
|
1357
|
+
const pathOnly = path3.split(/[?#]/, 1)[0];
|
|
1358
|
+
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
|
1359
|
+
let match;
|
|
1360
|
+
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
|
1361
|
+
invalidSegments.push({
|
|
1362
|
+
start: match.index,
|
|
1363
|
+
length: match[0].length,
|
|
1364
|
+
error: `Value "${match[0]}" can't be safely passed as a path parameter`
|
|
1365
|
+
});
|
|
1366
|
+
}
|
|
1367
|
+
invalidSegments.sort((a, b) => a.start - b.start);
|
|
1368
|
+
if (invalidSegments.length > 0) {
|
|
1369
|
+
let lastEnd = 0;
|
|
1370
|
+
const underline = invalidSegments.reduce((acc, segment) => {
|
|
1371
|
+
const spaces = " ".repeat(segment.start - lastEnd);
|
|
1372
|
+
const arrows = "^".repeat(segment.length);
|
|
1373
|
+
lastEnd = segment.start + segment.length;
|
|
1374
|
+
return acc + spaces + arrows;
|
|
1375
|
+
}, "");
|
|
1376
|
+
throw new AnthropicError(`Path parameters result in path with invalid segments:
|
|
1377
|
+
${invalidSegments.map((e) => e.error).join("\n")}
|
|
1378
|
+
${path3}
|
|
1379
|
+
${underline}`);
|
|
1380
|
+
}
|
|
1381
|
+
return path3;
|
|
1382
|
+
};
|
|
1383
|
+
var path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
|
1384
|
+
|
|
1385
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/files.mjs
|
|
1386
|
+
var Files = class extends APIResource {
|
|
1387
|
+
/**
|
|
1388
|
+
* List Files
|
|
1389
|
+
*
|
|
1390
|
+
* @example
|
|
1391
|
+
* ```ts
|
|
1392
|
+
* // Automatically fetches more pages as needed.
|
|
1393
|
+
* for await (const fileMetadata of client.beta.files.list()) {
|
|
1394
|
+
* // ...
|
|
1395
|
+
* }
|
|
1396
|
+
* ```
|
|
1397
|
+
*/
|
|
1398
|
+
list(params = {}, options) {
|
|
1399
|
+
const { betas, ...query } = params ?? {};
|
|
1400
|
+
return this._client.getAPIList("/v1/files", Page, {
|
|
1401
|
+
query,
|
|
1402
|
+
...options,
|
|
1403
|
+
headers: buildHeaders([
|
|
1404
|
+
{ "anthropic-beta": [...betas ?? [], "files-api-2025-04-14"].toString() },
|
|
1405
|
+
options?.headers
|
|
1406
|
+
])
|
|
1407
|
+
});
|
|
1408
|
+
}
|
|
1409
|
+
/**
|
|
1410
|
+
* Delete File
|
|
1411
|
+
*
|
|
1412
|
+
* @example
|
|
1413
|
+
* ```ts
|
|
1414
|
+
* const deletedFile = await client.beta.files.delete(
|
|
1415
|
+
* 'file_id',
|
|
1416
|
+
* );
|
|
1417
|
+
* ```
|
|
1418
|
+
*/
|
|
1419
|
+
delete(fileID, params = {}, options) {
|
|
1420
|
+
const { betas } = params ?? {};
|
|
1421
|
+
return this._client.delete(path`/v1/files/${fileID}`, {
|
|
1422
|
+
...options,
|
|
1423
|
+
headers: buildHeaders([
|
|
1424
|
+
{ "anthropic-beta": [...betas ?? [], "files-api-2025-04-14"].toString() },
|
|
1425
|
+
options?.headers
|
|
1426
|
+
])
|
|
1427
|
+
});
|
|
1428
|
+
}
|
|
1429
|
+
/**
|
|
1430
|
+
* Download File
|
|
1431
|
+
*
|
|
1432
|
+
* @example
|
|
1433
|
+
* ```ts
|
|
1434
|
+
* const response = await client.beta.files.download(
|
|
1435
|
+
* 'file_id',
|
|
1436
|
+
* );
|
|
1437
|
+
*
|
|
1438
|
+
* const content = await response.blob();
|
|
1439
|
+
* console.log(content);
|
|
1440
|
+
* ```
|
|
1441
|
+
*/
|
|
1442
|
+
download(fileID, params = {}, options) {
|
|
1443
|
+
const { betas } = params ?? {};
|
|
1444
|
+
return this._client.get(path`/v1/files/${fileID}/content`, {
|
|
1445
|
+
...options,
|
|
1446
|
+
headers: buildHeaders([
|
|
1447
|
+
{
|
|
1448
|
+
"anthropic-beta": [...betas ?? [], "files-api-2025-04-14"].toString(),
|
|
1449
|
+
Accept: "application/binary"
|
|
1450
|
+
},
|
|
1451
|
+
options?.headers
|
|
1452
|
+
]),
|
|
1453
|
+
__binaryResponse: true
|
|
1454
|
+
});
|
|
1455
|
+
}
|
|
1456
|
+
/**
|
|
1457
|
+
* Get File Metadata
|
|
1458
|
+
*
|
|
1459
|
+
* @example
|
|
1460
|
+
* ```ts
|
|
1461
|
+
* const fileMetadata =
|
|
1462
|
+
* await client.beta.files.retrieveMetadata('file_id');
|
|
1463
|
+
* ```
|
|
1464
|
+
*/
|
|
1465
|
+
retrieveMetadata(fileID, params = {}, options) {
|
|
1466
|
+
const { betas } = params ?? {};
|
|
1467
|
+
return this._client.get(path`/v1/files/${fileID}`, {
|
|
1468
|
+
...options,
|
|
1469
|
+
headers: buildHeaders([
|
|
1470
|
+
{ "anthropic-beta": [...betas ?? [], "files-api-2025-04-14"].toString() },
|
|
1471
|
+
options?.headers
|
|
1472
|
+
])
|
|
1473
|
+
});
|
|
1474
|
+
}
|
|
1475
|
+
/**
|
|
1476
|
+
* Upload File
|
|
1477
|
+
*
|
|
1478
|
+
* @example
|
|
1479
|
+
* ```ts
|
|
1480
|
+
* const fileMetadata = await client.beta.files.upload({
|
|
1481
|
+
* file: fs.createReadStream('path/to/file'),
|
|
1482
|
+
* });
|
|
1483
|
+
* ```
|
|
1484
|
+
*/
|
|
1485
|
+
upload(params, options) {
|
|
1486
|
+
const { betas, ...body } = params;
|
|
1487
|
+
return this._client.post("/v1/files", multipartFormRequestOptions({
|
|
1488
|
+
body,
|
|
1489
|
+
...options,
|
|
1490
|
+
headers: buildHeaders([
|
|
1491
|
+
{ "anthropic-beta": [...betas ?? [], "files-api-2025-04-14"].toString() },
|
|
1492
|
+
stainlessHelperHeaderFromFile(body.file),
|
|
1493
|
+
options?.headers
|
|
1494
|
+
])
|
|
1495
|
+
}, this._client));
|
|
1496
|
+
}
|
|
1497
|
+
};
|
|
1498
|
+
|
|
1499
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/models.mjs
|
|
1500
|
+
var Models = class extends APIResource {
|
|
1501
|
+
/**
|
|
1502
|
+
* Get a specific model.
|
|
1503
|
+
*
|
|
1504
|
+
* The Models API response can be used to determine information about a specific
|
|
1505
|
+
* model or resolve a model alias to a model ID.
|
|
1506
|
+
*
|
|
1507
|
+
* @example
|
|
1508
|
+
* ```ts
|
|
1509
|
+
* const betaModelInfo = await client.beta.models.retrieve(
|
|
1510
|
+
* 'model_id',
|
|
1511
|
+
* );
|
|
1512
|
+
* ```
|
|
1513
|
+
*/
|
|
1514
|
+
retrieve(modelID, params = {}, options) {
|
|
1515
|
+
const { betas } = params ?? {};
|
|
1516
|
+
return this._client.get(path`/v1/models/${modelID}?beta=true`, {
|
|
1517
|
+
...options,
|
|
1518
|
+
headers: buildHeaders([
|
|
1519
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
1520
|
+
options?.headers
|
|
1521
|
+
])
|
|
1522
|
+
});
|
|
1523
|
+
}
|
|
1524
|
+
/**
|
|
1525
|
+
* List available models.
|
|
1526
|
+
*
|
|
1527
|
+
* The Models API response can be used to determine which models are available for
|
|
1528
|
+
* use in the API. More recently released models are listed first.
|
|
1529
|
+
*
|
|
1530
|
+
* @example
|
|
1531
|
+
* ```ts
|
|
1532
|
+
* // Automatically fetches more pages as needed.
|
|
1533
|
+
* for await (const betaModelInfo of client.beta.models.list()) {
|
|
1534
|
+
* // ...
|
|
1535
|
+
* }
|
|
1536
|
+
* ```
|
|
1537
|
+
*/
|
|
1538
|
+
list(params = {}, options) {
|
|
1539
|
+
const { betas, ...query } = params ?? {};
|
|
1540
|
+
return this._client.getAPIList("/v1/models?beta=true", Page, {
|
|
1541
|
+
query,
|
|
1542
|
+
...options,
|
|
1543
|
+
headers: buildHeaders([
|
|
1544
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
1545
|
+
options?.headers
|
|
1546
|
+
])
|
|
1547
|
+
});
|
|
1548
|
+
}
|
|
1549
|
+
};
|
|
1550
|
+
|
|
1551
|
+
// ../node_modules/@anthropic-ai/sdk/internal/constants.mjs
|
|
1552
|
+
var MODEL_NONSTREAMING_TOKENS = {
|
|
1553
|
+
"claude-opus-4-20250514": 8192,
|
|
1554
|
+
"claude-opus-4-0": 8192,
|
|
1555
|
+
"claude-4-opus-20250514": 8192,
|
|
1556
|
+
"anthropic.claude-opus-4-20250514-v1:0": 8192,
|
|
1557
|
+
"claude-opus-4@20250514": 8192,
|
|
1558
|
+
"claude-opus-4-1-20250805": 8192,
|
|
1559
|
+
"anthropic.claude-opus-4-1-20250805-v1:0": 8192,
|
|
1560
|
+
"claude-opus-4-1@20250805": 8192
|
|
1561
|
+
};
|
|
1562
|
+
|
|
1563
|
+
// ../node_modules/@anthropic-ai/sdk/lib/beta-parser.mjs
|
|
1564
|
+
function getOutputFormat(params) {
|
|
1565
|
+
return params?.output_format ?? params?.output_config?.format;
|
|
1566
|
+
}
|
|
1567
|
+
function maybeParseBetaMessage(message, params, opts) {
|
|
1568
|
+
const outputFormat = getOutputFormat(params);
|
|
1569
|
+
if (!params || !("parse" in (outputFormat ?? {}))) {
|
|
1570
|
+
return {
|
|
1571
|
+
...message,
|
|
1572
|
+
content: message.content.map((block) => {
|
|
1573
|
+
if (block.type === "text") {
|
|
1574
|
+
const parsedBlock = Object.defineProperty({ ...block }, "parsed_output", {
|
|
1575
|
+
value: null,
|
|
1576
|
+
enumerable: false
|
|
1577
|
+
});
|
|
1578
|
+
return Object.defineProperty(parsedBlock, "parsed", {
|
|
1579
|
+
get() {
|
|
1580
|
+
opts.logger.warn("The `parsed` property on `text` blocks is deprecated, please use `parsed_output` instead.");
|
|
1581
|
+
return null;
|
|
1582
|
+
},
|
|
1583
|
+
enumerable: false
|
|
1584
|
+
});
|
|
1585
|
+
}
|
|
1586
|
+
return block;
|
|
1587
|
+
}),
|
|
1588
|
+
parsed_output: null
|
|
1589
|
+
};
|
|
1590
|
+
}
|
|
1591
|
+
return parseBetaMessage(message, params, opts);
|
|
1592
|
+
}
|
|
1593
|
+
function parseBetaMessage(message, params, opts) {
|
|
1594
|
+
let firstParsedOutput = null;
|
|
1595
|
+
const content = message.content.map((block) => {
|
|
1596
|
+
if (block.type === "text") {
|
|
1597
|
+
const parsedOutput = parseBetaOutputFormat(params, block.text);
|
|
1598
|
+
if (firstParsedOutput === null) {
|
|
1599
|
+
firstParsedOutput = parsedOutput;
|
|
1600
|
+
}
|
|
1601
|
+
const parsedBlock = Object.defineProperty({ ...block }, "parsed_output", {
|
|
1602
|
+
value: parsedOutput,
|
|
1603
|
+
enumerable: false
|
|
1604
|
+
});
|
|
1605
|
+
return Object.defineProperty(parsedBlock, "parsed", {
|
|
1606
|
+
get() {
|
|
1607
|
+
opts.logger.warn("The `parsed` property on `text` blocks is deprecated, please use `parsed_output` instead.");
|
|
1608
|
+
return parsedOutput;
|
|
1609
|
+
},
|
|
1610
|
+
enumerable: false
|
|
1611
|
+
});
|
|
1612
|
+
}
|
|
1613
|
+
return block;
|
|
1614
|
+
});
|
|
1615
|
+
return {
|
|
1616
|
+
...message,
|
|
1617
|
+
content,
|
|
1618
|
+
parsed_output: firstParsedOutput
|
|
1619
|
+
};
|
|
1620
|
+
}
|
|
1621
|
+
function parseBetaOutputFormat(params, content) {
|
|
1622
|
+
const outputFormat = getOutputFormat(params);
|
|
1623
|
+
if (outputFormat?.type !== "json_schema") {
|
|
1624
|
+
return null;
|
|
1625
|
+
}
|
|
1626
|
+
try {
|
|
1627
|
+
if ("parse" in outputFormat) {
|
|
1628
|
+
return outputFormat.parse(content);
|
|
1629
|
+
}
|
|
1630
|
+
return JSON.parse(content);
|
|
1631
|
+
} catch (error) {
|
|
1632
|
+
throw new AnthropicError(`Failed to parse structured output: ${error}`);
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
|
|
1636
|
+
// ../node_modules/@anthropic-ai/sdk/_vendor/partial-json-parser/parser.mjs
|
|
1637
|
+
var tokenize = (input) => {
|
|
1638
|
+
let current = 0;
|
|
1639
|
+
let tokens = [];
|
|
1640
|
+
while (current < input.length) {
|
|
1641
|
+
let char = input[current];
|
|
1642
|
+
if (char === "\\") {
|
|
1643
|
+
current++;
|
|
1644
|
+
continue;
|
|
1645
|
+
}
|
|
1646
|
+
if (char === "{") {
|
|
1647
|
+
tokens.push({
|
|
1648
|
+
type: "brace",
|
|
1649
|
+
value: "{"
|
|
1650
|
+
});
|
|
1651
|
+
current++;
|
|
1652
|
+
continue;
|
|
1653
|
+
}
|
|
1654
|
+
if (char === "}") {
|
|
1655
|
+
tokens.push({
|
|
1656
|
+
type: "brace",
|
|
1657
|
+
value: "}"
|
|
1658
|
+
});
|
|
1659
|
+
current++;
|
|
1660
|
+
continue;
|
|
1661
|
+
}
|
|
1662
|
+
if (char === "[") {
|
|
1663
|
+
tokens.push({
|
|
1664
|
+
type: "paren",
|
|
1665
|
+
value: "["
|
|
1666
|
+
});
|
|
1667
|
+
current++;
|
|
1668
|
+
continue;
|
|
1669
|
+
}
|
|
1670
|
+
if (char === "]") {
|
|
1671
|
+
tokens.push({
|
|
1672
|
+
type: "paren",
|
|
1673
|
+
value: "]"
|
|
1674
|
+
});
|
|
1675
|
+
current++;
|
|
1676
|
+
continue;
|
|
1677
|
+
}
|
|
1678
|
+
if (char === ":") {
|
|
1679
|
+
tokens.push({
|
|
1680
|
+
type: "separator",
|
|
1681
|
+
value: ":"
|
|
1682
|
+
});
|
|
1683
|
+
current++;
|
|
1684
|
+
continue;
|
|
1685
|
+
}
|
|
1686
|
+
if (char === ",") {
|
|
1687
|
+
tokens.push({
|
|
1688
|
+
type: "delimiter",
|
|
1689
|
+
value: ","
|
|
1690
|
+
});
|
|
1691
|
+
current++;
|
|
1692
|
+
continue;
|
|
1693
|
+
}
|
|
1694
|
+
if (char === '"') {
|
|
1695
|
+
let value = "";
|
|
1696
|
+
let danglingQuote = false;
|
|
1697
|
+
char = input[++current];
|
|
1698
|
+
while (char !== '"') {
|
|
1699
|
+
if (current === input.length) {
|
|
1700
|
+
danglingQuote = true;
|
|
1701
|
+
break;
|
|
1702
|
+
}
|
|
1703
|
+
if (char === "\\") {
|
|
1704
|
+
current++;
|
|
1705
|
+
if (current === input.length) {
|
|
1706
|
+
danglingQuote = true;
|
|
1707
|
+
break;
|
|
1708
|
+
}
|
|
1709
|
+
value += char + input[current];
|
|
1710
|
+
char = input[++current];
|
|
1711
|
+
} else {
|
|
1712
|
+
value += char;
|
|
1713
|
+
char = input[++current];
|
|
1714
|
+
}
|
|
1715
|
+
}
|
|
1716
|
+
char = input[++current];
|
|
1717
|
+
if (!danglingQuote) {
|
|
1718
|
+
tokens.push({
|
|
1719
|
+
type: "string",
|
|
1720
|
+
value
|
|
1721
|
+
});
|
|
1722
|
+
}
|
|
1723
|
+
continue;
|
|
1724
|
+
}
|
|
1725
|
+
let WHITESPACE = /\s/;
|
|
1726
|
+
if (char && WHITESPACE.test(char)) {
|
|
1727
|
+
current++;
|
|
1728
|
+
continue;
|
|
1729
|
+
}
|
|
1730
|
+
let NUMBERS = /[0-9]/;
|
|
1731
|
+
if (char && NUMBERS.test(char) || char === "-" || char === ".") {
|
|
1732
|
+
let value = "";
|
|
1733
|
+
if (char === "-") {
|
|
1734
|
+
value += char;
|
|
1735
|
+
char = input[++current];
|
|
1736
|
+
}
|
|
1737
|
+
while (char && NUMBERS.test(char) || char === ".") {
|
|
1738
|
+
value += char;
|
|
1739
|
+
char = input[++current];
|
|
1740
|
+
}
|
|
1741
|
+
tokens.push({
|
|
1742
|
+
type: "number",
|
|
1743
|
+
value
|
|
1744
|
+
});
|
|
1745
|
+
continue;
|
|
1746
|
+
}
|
|
1747
|
+
let LETTERS = /[a-z]/i;
|
|
1748
|
+
if (char && LETTERS.test(char)) {
|
|
1749
|
+
let value = "";
|
|
1750
|
+
while (char && LETTERS.test(char)) {
|
|
1751
|
+
if (current === input.length) {
|
|
1752
|
+
break;
|
|
1753
|
+
}
|
|
1754
|
+
value += char;
|
|
1755
|
+
char = input[++current];
|
|
1756
|
+
}
|
|
1757
|
+
if (value == "true" || value == "false" || value === "null") {
|
|
1758
|
+
tokens.push({
|
|
1759
|
+
type: "name",
|
|
1760
|
+
value
|
|
1761
|
+
});
|
|
1762
|
+
} else {
|
|
1763
|
+
current++;
|
|
1764
|
+
continue;
|
|
1765
|
+
}
|
|
1766
|
+
continue;
|
|
1767
|
+
}
|
|
1768
|
+
current++;
|
|
1769
|
+
}
|
|
1770
|
+
return tokens;
|
|
1771
|
+
};
|
|
1772
|
+
var strip = (tokens) => {
|
|
1773
|
+
if (tokens.length === 0) {
|
|
1774
|
+
return tokens;
|
|
1775
|
+
}
|
|
1776
|
+
let lastToken = tokens[tokens.length - 1];
|
|
1777
|
+
switch (lastToken.type) {
|
|
1778
|
+
case "separator":
|
|
1779
|
+
tokens = tokens.slice(0, tokens.length - 1);
|
|
1780
|
+
return strip(tokens);
|
|
1781
|
+
break;
|
|
1782
|
+
case "number":
|
|
1783
|
+
let lastCharacterOfLastToken = lastToken.value[lastToken.value.length - 1];
|
|
1784
|
+
if (lastCharacterOfLastToken === "." || lastCharacterOfLastToken === "-") {
|
|
1785
|
+
tokens = tokens.slice(0, tokens.length - 1);
|
|
1786
|
+
return strip(tokens);
|
|
1787
|
+
}
|
|
1788
|
+
case "string":
|
|
1789
|
+
let tokenBeforeTheLastToken = tokens[tokens.length - 2];
|
|
1790
|
+
if (tokenBeforeTheLastToken?.type === "delimiter") {
|
|
1791
|
+
tokens = tokens.slice(0, tokens.length - 1);
|
|
1792
|
+
return strip(tokens);
|
|
1793
|
+
} else if (tokenBeforeTheLastToken?.type === "brace" && tokenBeforeTheLastToken.value === "{") {
|
|
1794
|
+
tokens = tokens.slice(0, tokens.length - 1);
|
|
1795
|
+
return strip(tokens);
|
|
1796
|
+
}
|
|
1797
|
+
break;
|
|
1798
|
+
case "delimiter":
|
|
1799
|
+
tokens = tokens.slice(0, tokens.length - 1);
|
|
1800
|
+
return strip(tokens);
|
|
1801
|
+
break;
|
|
1802
|
+
}
|
|
1803
|
+
return tokens;
|
|
1804
|
+
};
|
|
1805
|
+
var unstrip = (tokens) => {
|
|
1806
|
+
let tail = [];
|
|
1807
|
+
tokens.map((token) => {
|
|
1808
|
+
if (token.type === "brace") {
|
|
1809
|
+
if (token.value === "{") {
|
|
1810
|
+
tail.push("}");
|
|
1811
|
+
} else {
|
|
1812
|
+
tail.splice(tail.lastIndexOf("}"), 1);
|
|
1813
|
+
}
|
|
1814
|
+
}
|
|
1815
|
+
if (token.type === "paren") {
|
|
1816
|
+
if (token.value === "[") {
|
|
1817
|
+
tail.push("]");
|
|
1818
|
+
} else {
|
|
1819
|
+
tail.splice(tail.lastIndexOf("]"), 1);
|
|
1820
|
+
}
|
|
1821
|
+
}
|
|
1822
|
+
});
|
|
1823
|
+
if (tail.length > 0) {
|
|
1824
|
+
tail.reverse().map((item) => {
|
|
1825
|
+
if (item === "}") {
|
|
1826
|
+
tokens.push({
|
|
1827
|
+
type: "brace",
|
|
1828
|
+
value: "}"
|
|
1829
|
+
});
|
|
1830
|
+
} else if (item === "]") {
|
|
1831
|
+
tokens.push({
|
|
1832
|
+
type: "paren",
|
|
1833
|
+
value: "]"
|
|
1834
|
+
});
|
|
1835
|
+
}
|
|
1836
|
+
});
|
|
1837
|
+
}
|
|
1838
|
+
return tokens;
|
|
1839
|
+
};
|
|
1840
|
+
var generate = (tokens) => {
|
|
1841
|
+
let output = "";
|
|
1842
|
+
tokens.map((token) => {
|
|
1843
|
+
switch (token.type) {
|
|
1844
|
+
case "string":
|
|
1845
|
+
output += '"' + token.value + '"';
|
|
1846
|
+
break;
|
|
1847
|
+
default:
|
|
1848
|
+
output += token.value;
|
|
1849
|
+
break;
|
|
1850
|
+
}
|
|
1851
|
+
});
|
|
1852
|
+
return output;
|
|
1853
|
+
};
|
|
1854
|
+
var partialParse = (input) => JSON.parse(generate(unstrip(strip(tokenize(input)))));
|
|
1855
|
+
|
|
1856
|
+
// ../node_modules/@anthropic-ai/sdk/lib/BetaMessageStream.mjs
|
|
1857
|
+
var _BetaMessageStream_instances;
|
|
1858
|
+
var _BetaMessageStream_currentMessageSnapshot;
|
|
1859
|
+
var _BetaMessageStream_params;
|
|
1860
|
+
var _BetaMessageStream_connectedPromise;
|
|
1861
|
+
var _BetaMessageStream_resolveConnectedPromise;
|
|
1862
|
+
var _BetaMessageStream_rejectConnectedPromise;
|
|
1863
|
+
var _BetaMessageStream_endPromise;
|
|
1864
|
+
var _BetaMessageStream_resolveEndPromise;
|
|
1865
|
+
var _BetaMessageStream_rejectEndPromise;
|
|
1866
|
+
var _BetaMessageStream_listeners;
|
|
1867
|
+
var _BetaMessageStream_ended;
|
|
1868
|
+
var _BetaMessageStream_errored;
|
|
1869
|
+
var _BetaMessageStream_aborted;
|
|
1870
|
+
var _BetaMessageStream_catchingPromiseCreated;
|
|
1871
|
+
var _BetaMessageStream_response;
|
|
1872
|
+
var _BetaMessageStream_request_id;
|
|
1873
|
+
var _BetaMessageStream_logger;
|
|
1874
|
+
var _BetaMessageStream_getFinalMessage;
|
|
1875
|
+
var _BetaMessageStream_getFinalText;
|
|
1876
|
+
var _BetaMessageStream_handleError;
|
|
1877
|
+
var _BetaMessageStream_beginRequest;
|
|
1878
|
+
var _BetaMessageStream_addStreamEvent;
|
|
1879
|
+
var _BetaMessageStream_endRequest;
|
|
1880
|
+
var _BetaMessageStream_accumulateMessage;
|
|
1881
|
+
var JSON_BUF_PROPERTY = "__json_buf";
|
|
1882
|
+
function tracksToolInput(content) {
|
|
1883
|
+
return content.type === "tool_use" || content.type === "server_tool_use" || content.type === "mcp_tool_use";
|
|
1884
|
+
}
|
|
1885
|
+
var BetaMessageStream = class _BetaMessageStream {
|
|
1886
|
+
constructor(params, opts) {
|
|
1887
|
+
_BetaMessageStream_instances.add(this);
|
|
1888
|
+
this.messages = [];
|
|
1889
|
+
this.receivedMessages = [];
|
|
1890
|
+
_BetaMessageStream_currentMessageSnapshot.set(this, void 0);
|
|
1891
|
+
_BetaMessageStream_params.set(this, null);
|
|
1892
|
+
this.controller = new AbortController();
|
|
1893
|
+
_BetaMessageStream_connectedPromise.set(this, void 0);
|
|
1894
|
+
_BetaMessageStream_resolveConnectedPromise.set(this, () => {
|
|
1895
|
+
});
|
|
1896
|
+
_BetaMessageStream_rejectConnectedPromise.set(this, () => {
|
|
1897
|
+
});
|
|
1898
|
+
_BetaMessageStream_endPromise.set(this, void 0);
|
|
1899
|
+
_BetaMessageStream_resolveEndPromise.set(this, () => {
|
|
1900
|
+
});
|
|
1901
|
+
_BetaMessageStream_rejectEndPromise.set(this, () => {
|
|
1902
|
+
});
|
|
1903
|
+
_BetaMessageStream_listeners.set(this, {});
|
|
1904
|
+
_BetaMessageStream_ended.set(this, false);
|
|
1905
|
+
_BetaMessageStream_errored.set(this, false);
|
|
1906
|
+
_BetaMessageStream_aborted.set(this, false);
|
|
1907
|
+
_BetaMessageStream_catchingPromiseCreated.set(this, false);
|
|
1908
|
+
_BetaMessageStream_response.set(this, void 0);
|
|
1909
|
+
_BetaMessageStream_request_id.set(this, void 0);
|
|
1910
|
+
_BetaMessageStream_logger.set(this, void 0);
|
|
1911
|
+
_BetaMessageStream_handleError.set(this, (error) => {
|
|
1912
|
+
__classPrivateFieldSet(this, _BetaMessageStream_errored, true, "f");
|
|
1913
|
+
if (isAbortError(error)) {
|
|
1914
|
+
error = new APIUserAbortError();
|
|
1915
|
+
}
|
|
1916
|
+
if (error instanceof APIUserAbortError) {
|
|
1917
|
+
__classPrivateFieldSet(this, _BetaMessageStream_aborted, true, "f");
|
|
1918
|
+
return this._emit("abort", error);
|
|
1919
|
+
}
|
|
1920
|
+
if (error instanceof AnthropicError) {
|
|
1921
|
+
return this._emit("error", error);
|
|
1922
|
+
}
|
|
1923
|
+
if (error instanceof Error) {
|
|
1924
|
+
const anthropicError = new AnthropicError(error.message);
|
|
1925
|
+
anthropicError.cause = error;
|
|
1926
|
+
return this._emit("error", anthropicError);
|
|
1927
|
+
}
|
|
1928
|
+
return this._emit("error", new AnthropicError(String(error)));
|
|
1929
|
+
});
|
|
1930
|
+
__classPrivateFieldSet(this, _BetaMessageStream_connectedPromise, new Promise((resolve, reject) => {
|
|
1931
|
+
__classPrivateFieldSet(this, _BetaMessageStream_resolveConnectedPromise, resolve, "f");
|
|
1932
|
+
__classPrivateFieldSet(this, _BetaMessageStream_rejectConnectedPromise, reject, "f");
|
|
1933
|
+
}), "f");
|
|
1934
|
+
__classPrivateFieldSet(this, _BetaMessageStream_endPromise, new Promise((resolve, reject) => {
|
|
1935
|
+
__classPrivateFieldSet(this, _BetaMessageStream_resolveEndPromise, resolve, "f");
|
|
1936
|
+
__classPrivateFieldSet(this, _BetaMessageStream_rejectEndPromise, reject, "f");
|
|
1937
|
+
}), "f");
|
|
1938
|
+
__classPrivateFieldGet(this, _BetaMessageStream_connectedPromise, "f").catch(() => {
|
|
1939
|
+
});
|
|
1940
|
+
__classPrivateFieldGet(this, _BetaMessageStream_endPromise, "f").catch(() => {
|
|
1941
|
+
});
|
|
1942
|
+
__classPrivateFieldSet(this, _BetaMessageStream_params, params, "f");
|
|
1943
|
+
__classPrivateFieldSet(this, _BetaMessageStream_logger, opts?.logger ?? console, "f");
|
|
1944
|
+
}
|
|
1945
|
+
get response() {
|
|
1946
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_response, "f");
|
|
1947
|
+
}
|
|
1948
|
+
get request_id() {
|
|
1949
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_request_id, "f");
|
|
1950
|
+
}
|
|
1951
|
+
/**
|
|
1952
|
+
* Returns the `MessageStream` data, the raw `Response` instance and the ID of the request,
|
|
1953
|
+
* returned vie the `request-id` header which is useful for debugging requests and resporting
|
|
1954
|
+
* issues to Anthropic.
|
|
1955
|
+
*
|
|
1956
|
+
* This is the same as the `APIPromise.withResponse()` method.
|
|
1957
|
+
*
|
|
1958
|
+
* This method will raise an error if you created the stream using `MessageStream.fromReadableStream`
|
|
1959
|
+
* as no `Response` is available.
|
|
1960
|
+
*/
|
|
1961
|
+
async withResponse() {
|
|
1962
|
+
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
|
1963
|
+
const response = await __classPrivateFieldGet(this, _BetaMessageStream_connectedPromise, "f");
|
|
1964
|
+
if (!response) {
|
|
1965
|
+
throw new Error("Could not resolve a `Response` object");
|
|
1966
|
+
}
|
|
1967
|
+
return {
|
|
1968
|
+
data: this,
|
|
1969
|
+
response,
|
|
1970
|
+
request_id: response.headers.get("request-id")
|
|
1971
|
+
};
|
|
1972
|
+
}
|
|
1973
|
+
/**
|
|
1974
|
+
* Intended for use on the frontend, consuming a stream produced with
|
|
1975
|
+
* `.toReadableStream()` on the backend.
|
|
1976
|
+
*
|
|
1977
|
+
* Note that messages sent to the model do not appear in `.on('message')`
|
|
1978
|
+
* in this context.
|
|
1979
|
+
*/
|
|
1980
|
+
static fromReadableStream(stream) {
|
|
1981
|
+
const runner = new _BetaMessageStream(null);
|
|
1982
|
+
runner._run(() => runner._fromReadableStream(stream));
|
|
1983
|
+
return runner;
|
|
1984
|
+
}
|
|
1985
|
+
static createMessage(messages, params, options, { logger } = {}) {
|
|
1986
|
+
const runner = new _BetaMessageStream(params, { logger });
|
|
1987
|
+
for (const message of params.messages) {
|
|
1988
|
+
runner._addMessageParam(message);
|
|
1989
|
+
}
|
|
1990
|
+
__classPrivateFieldSet(runner, _BetaMessageStream_params, { ...params, stream: true }, "f");
|
|
1991
|
+
runner._run(() => runner._createMessage(messages, { ...params, stream: true }, { ...options, headers: { ...options?.headers, "X-Stainless-Helper-Method": "stream" } }));
|
|
1992
|
+
return runner;
|
|
1993
|
+
}
|
|
1994
|
+
_run(executor) {
|
|
1995
|
+
executor().then(() => {
|
|
1996
|
+
this._emitFinal();
|
|
1997
|
+
this._emit("end");
|
|
1998
|
+
}, __classPrivateFieldGet(this, _BetaMessageStream_handleError, "f"));
|
|
1999
|
+
}
|
|
2000
|
+
_addMessageParam(message) {
|
|
2001
|
+
this.messages.push(message);
|
|
2002
|
+
}
|
|
2003
|
+
_addMessage(message, emit = true) {
|
|
2004
|
+
this.receivedMessages.push(message);
|
|
2005
|
+
if (emit) {
|
|
2006
|
+
this._emit("message", message);
|
|
2007
|
+
}
|
|
2008
|
+
}
|
|
2009
|
+
async _createMessage(messages, params, options) {
|
|
2010
|
+
const signal = options?.signal;
|
|
2011
|
+
let abortHandler;
|
|
2012
|
+
if (signal) {
|
|
2013
|
+
if (signal.aborted)
|
|
2014
|
+
this.controller.abort();
|
|
2015
|
+
abortHandler = this.controller.abort.bind(this.controller);
|
|
2016
|
+
signal.addEventListener("abort", abortHandler);
|
|
2017
|
+
}
|
|
2018
|
+
try {
|
|
2019
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_beginRequest).call(this);
|
|
2020
|
+
const { response, data: stream } = await messages.create({ ...params, stream: true }, { ...options, signal: this.controller.signal }).withResponse();
|
|
2021
|
+
this._connected(response);
|
|
2022
|
+
for await (const event of stream) {
|
|
2023
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_addStreamEvent).call(this, event);
|
|
2024
|
+
}
|
|
2025
|
+
if (stream.controller.signal?.aborted) {
|
|
2026
|
+
throw new APIUserAbortError();
|
|
2027
|
+
}
|
|
2028
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_endRequest).call(this);
|
|
2029
|
+
} finally {
|
|
2030
|
+
if (signal && abortHandler) {
|
|
2031
|
+
signal.removeEventListener("abort", abortHandler);
|
|
2032
|
+
}
|
|
2033
|
+
}
|
|
2034
|
+
}
|
|
2035
|
+
_connected(response) {
|
|
2036
|
+
if (this.ended)
|
|
2037
|
+
return;
|
|
2038
|
+
__classPrivateFieldSet(this, _BetaMessageStream_response, response, "f");
|
|
2039
|
+
__classPrivateFieldSet(this, _BetaMessageStream_request_id, response?.headers.get("request-id"), "f");
|
|
2040
|
+
__classPrivateFieldGet(this, _BetaMessageStream_resolveConnectedPromise, "f").call(this, response);
|
|
2041
|
+
this._emit("connect");
|
|
2042
|
+
}
|
|
2043
|
+
get ended() {
|
|
2044
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_ended, "f");
|
|
2045
|
+
}
|
|
2046
|
+
get errored() {
|
|
2047
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_errored, "f");
|
|
2048
|
+
}
|
|
2049
|
+
get aborted() {
|
|
2050
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_aborted, "f");
|
|
2051
|
+
}
|
|
2052
|
+
abort() {
|
|
2053
|
+
this.controller.abort();
|
|
2054
|
+
}
|
|
2055
|
+
/**
|
|
2056
|
+
* Adds the listener function to the end of the listeners array for the event.
|
|
2057
|
+
* No checks are made to see if the listener has already been added. Multiple calls passing
|
|
2058
|
+
* the same combination of event and listener will result in the listener being added, and
|
|
2059
|
+
* called, multiple times.
|
|
2060
|
+
* @returns this MessageStream, so that calls can be chained
|
|
2061
|
+
*/
|
|
2062
|
+
on(event, listener) {
|
|
2063
|
+
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = []);
|
|
2064
|
+
listeners.push({ listener });
|
|
2065
|
+
return this;
|
|
2066
|
+
}
|
|
2067
|
+
/**
|
|
2068
|
+
* Removes the specified listener from the listener array for the event.
|
|
2069
|
+
* off() will remove, at most, one instance of a listener from the listener array. If any single
|
|
2070
|
+
* listener has been added multiple times to the listener array for the specified event, then
|
|
2071
|
+
* off() must be called multiple times to remove each instance.
|
|
2072
|
+
* @returns this MessageStream, so that calls can be chained
|
|
2073
|
+
*/
|
|
2074
|
+
off(event, listener) {
|
|
2075
|
+
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event];
|
|
2076
|
+
if (!listeners)
|
|
2077
|
+
return this;
|
|
2078
|
+
const index = listeners.findIndex((l) => l.listener === listener);
|
|
2079
|
+
if (index >= 0)
|
|
2080
|
+
listeners.splice(index, 1);
|
|
2081
|
+
return this;
|
|
2082
|
+
}
|
|
2083
|
+
/**
|
|
2084
|
+
* Adds a one-time listener function for the event. The next time the event is triggered,
|
|
2085
|
+
* this listener is removed and then invoked.
|
|
2086
|
+
* @returns this MessageStream, so that calls can be chained
|
|
2087
|
+
*/
|
|
2088
|
+
once(event, listener) {
|
|
2089
|
+
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = []);
|
|
2090
|
+
listeners.push({ listener, once: true });
|
|
2091
|
+
return this;
|
|
2092
|
+
}
|
|
2093
|
+
/**
|
|
2094
|
+
* This is similar to `.once()`, but returns a Promise that resolves the next time
|
|
2095
|
+
* the event is triggered, instead of calling a listener callback.
|
|
2096
|
+
* @returns a Promise that resolves the next time given event is triggered,
|
|
2097
|
+
* or rejects if an error is emitted. (If you request the 'error' event,
|
|
2098
|
+
* returns a promise that resolves with the error).
|
|
2099
|
+
*
|
|
2100
|
+
* Example:
|
|
2101
|
+
*
|
|
2102
|
+
* const message = await stream.emitted('message') // rejects if the stream errors
|
|
2103
|
+
*/
|
|
2104
|
+
emitted(event) {
|
|
2105
|
+
return new Promise((resolve, reject) => {
|
|
2106
|
+
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
|
2107
|
+
if (event !== "error")
|
|
2108
|
+
this.once("error", reject);
|
|
2109
|
+
this.once(event, resolve);
|
|
2110
|
+
});
|
|
2111
|
+
}
|
|
2112
|
+
async done() {
|
|
2113
|
+
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
|
2114
|
+
await __classPrivateFieldGet(this, _BetaMessageStream_endPromise, "f");
|
|
2115
|
+
}
|
|
2116
|
+
get currentMessage() {
|
|
2117
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
|
2118
|
+
}
|
|
2119
|
+
/**
|
|
2120
|
+
* @returns a promise that resolves with the the final assistant Message response,
|
|
2121
|
+
* or rejects if an error occurred or the stream ended prematurely without producing a Message.
|
|
2122
|
+
* If structured outputs were used, this will be a ParsedMessage with a `parsed` field.
|
|
2123
|
+
*/
|
|
2124
|
+
async finalMessage() {
|
|
2125
|
+
await this.done();
|
|
2126
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalMessage).call(this);
|
|
2127
|
+
}
|
|
2128
|
+
/**
|
|
2129
|
+
* @returns a promise that resolves with the the final assistant Message's text response, concatenated
|
|
2130
|
+
* together if there are more than one text blocks.
|
|
2131
|
+
* Rejects if an error occurred or the stream ended prematurely without producing a Message.
|
|
2132
|
+
*/
|
|
2133
|
+
async finalText() {
|
|
2134
|
+
await this.done();
|
|
2135
|
+
return __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalText).call(this);
|
|
2136
|
+
}
|
|
2137
|
+
_emit(event, ...args) {
|
|
2138
|
+
if (__classPrivateFieldGet(this, _BetaMessageStream_ended, "f"))
|
|
2139
|
+
return;
|
|
2140
|
+
if (event === "end") {
|
|
2141
|
+
__classPrivateFieldSet(this, _BetaMessageStream_ended, true, "f");
|
|
2142
|
+
__classPrivateFieldGet(this, _BetaMessageStream_resolveEndPromise, "f").call(this);
|
|
2143
|
+
}
|
|
2144
|
+
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event];
|
|
2145
|
+
if (listeners) {
|
|
2146
|
+
__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = listeners.filter((l) => !l.once);
|
|
2147
|
+
listeners.forEach(({ listener }) => listener(...args));
|
|
2148
|
+
}
|
|
2149
|
+
if (event === "abort") {
|
|
2150
|
+
const error = args[0];
|
|
2151
|
+
if (!__classPrivateFieldGet(this, _BetaMessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
2152
|
+
Promise.reject(error);
|
|
2153
|
+
}
|
|
2154
|
+
__classPrivateFieldGet(this, _BetaMessageStream_rejectConnectedPromise, "f").call(this, error);
|
|
2155
|
+
__classPrivateFieldGet(this, _BetaMessageStream_rejectEndPromise, "f").call(this, error);
|
|
2156
|
+
this._emit("end");
|
|
2157
|
+
return;
|
|
2158
|
+
}
|
|
2159
|
+
if (event === "error") {
|
|
2160
|
+
const error = args[0];
|
|
2161
|
+
if (!__classPrivateFieldGet(this, _BetaMessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
2162
|
+
Promise.reject(error);
|
|
2163
|
+
}
|
|
2164
|
+
__classPrivateFieldGet(this, _BetaMessageStream_rejectConnectedPromise, "f").call(this, error);
|
|
2165
|
+
__classPrivateFieldGet(this, _BetaMessageStream_rejectEndPromise, "f").call(this, error);
|
|
2166
|
+
this._emit("end");
|
|
2167
|
+
}
|
|
2168
|
+
}
|
|
2169
|
+
_emitFinal() {
|
|
2170
|
+
const finalMessage = this.receivedMessages.at(-1);
|
|
2171
|
+
if (finalMessage) {
|
|
2172
|
+
this._emit("finalMessage", __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalMessage).call(this));
|
|
2173
|
+
}
|
|
2174
|
+
}
|
|
2175
|
+
async _fromReadableStream(readableStream, options) {
|
|
2176
|
+
const signal = options?.signal;
|
|
2177
|
+
let abortHandler;
|
|
2178
|
+
if (signal) {
|
|
2179
|
+
if (signal.aborted)
|
|
2180
|
+
this.controller.abort();
|
|
2181
|
+
abortHandler = this.controller.abort.bind(this.controller);
|
|
2182
|
+
signal.addEventListener("abort", abortHandler);
|
|
2183
|
+
}
|
|
2184
|
+
try {
|
|
2185
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_beginRequest).call(this);
|
|
2186
|
+
this._connected(null);
|
|
2187
|
+
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
|
2188
|
+
for await (const event of stream) {
|
|
2189
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_addStreamEvent).call(this, event);
|
|
2190
|
+
}
|
|
2191
|
+
if (stream.controller.signal?.aborted) {
|
|
2192
|
+
throw new APIUserAbortError();
|
|
2193
|
+
}
|
|
2194
|
+
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_endRequest).call(this);
|
|
2195
|
+
} finally {
|
|
2196
|
+
if (signal && abortHandler) {
|
|
2197
|
+
signal.removeEventListener("abort", abortHandler);
|
|
2198
|
+
}
|
|
2199
|
+
}
|
|
2200
|
+
}
|
|
2201
|
+
[(_BetaMessageStream_currentMessageSnapshot = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_params = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_connectedPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_resolveConnectedPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_rejectConnectedPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_endPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_resolveEndPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_rejectEndPromise = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_listeners = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_ended = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_errored = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_aborted = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_catchingPromiseCreated = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_response = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_request_id = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_logger = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_handleError = /* @__PURE__ */ new WeakMap(), _BetaMessageStream_instances = /* @__PURE__ */ new WeakSet(), _BetaMessageStream_getFinalMessage = function _BetaMessageStream_getFinalMessage2() {
|
|
2202
|
+
if (this.receivedMessages.length === 0) {
|
|
2203
|
+
throw new AnthropicError("stream ended without producing a Message with role=assistant");
|
|
2204
|
+
}
|
|
2205
|
+
return this.receivedMessages.at(-1);
|
|
2206
|
+
}, _BetaMessageStream_getFinalText = function _BetaMessageStream_getFinalText2() {
|
|
2207
|
+
if (this.receivedMessages.length === 0) {
|
|
2208
|
+
throw new AnthropicError("stream ended without producing a Message with role=assistant");
|
|
2209
|
+
}
|
|
2210
|
+
const textBlocks = this.receivedMessages.at(-1).content.filter((block) => block.type === "text").map((block) => block.text);
|
|
2211
|
+
if (textBlocks.length === 0) {
|
|
2212
|
+
throw new AnthropicError("stream ended without producing a content block with type=text");
|
|
2213
|
+
}
|
|
2214
|
+
return textBlocks.join(" ");
|
|
2215
|
+
}, _BetaMessageStream_beginRequest = function _BetaMessageStream_beginRequest2() {
|
|
2216
|
+
if (this.ended)
|
|
2217
|
+
return;
|
|
2218
|
+
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, void 0, "f");
|
|
2219
|
+
}, _BetaMessageStream_addStreamEvent = function _BetaMessageStream_addStreamEvent2(event) {
|
|
2220
|
+
if (this.ended)
|
|
2221
|
+
return;
|
|
2222
|
+
const messageSnapshot = __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_accumulateMessage).call(this, event);
|
|
2223
|
+
this._emit("streamEvent", event, messageSnapshot);
|
|
2224
|
+
switch (event.type) {
|
|
2225
|
+
case "content_block_delta": {
|
|
2226
|
+
const content = messageSnapshot.content.at(-1);
|
|
2227
|
+
switch (event.delta.type) {
|
|
2228
|
+
case "text_delta": {
|
|
2229
|
+
if (content.type === "text") {
|
|
2230
|
+
this._emit("text", event.delta.text, content.text || "");
|
|
2231
|
+
}
|
|
2232
|
+
break;
|
|
2233
|
+
}
|
|
2234
|
+
case "citations_delta": {
|
|
2235
|
+
if (content.type === "text") {
|
|
2236
|
+
this._emit("citation", event.delta.citation, content.citations ?? []);
|
|
2237
|
+
}
|
|
2238
|
+
break;
|
|
2239
|
+
}
|
|
2240
|
+
case "input_json_delta": {
|
|
2241
|
+
if (tracksToolInput(content) && content.input) {
|
|
2242
|
+
this._emit("inputJson", event.delta.partial_json, content.input);
|
|
2243
|
+
}
|
|
2244
|
+
break;
|
|
2245
|
+
}
|
|
2246
|
+
case "thinking_delta": {
|
|
2247
|
+
if (content.type === "thinking") {
|
|
2248
|
+
this._emit("thinking", event.delta.thinking, content.thinking);
|
|
2249
|
+
}
|
|
2250
|
+
break;
|
|
2251
|
+
}
|
|
2252
|
+
case "signature_delta": {
|
|
2253
|
+
if (content.type === "thinking") {
|
|
2254
|
+
this._emit("signature", content.signature);
|
|
2255
|
+
}
|
|
2256
|
+
break;
|
|
2257
|
+
}
|
|
2258
|
+
case "compaction_delta": {
|
|
2259
|
+
if (content.type === "compaction" && content.content) {
|
|
2260
|
+
this._emit("compaction", content.content);
|
|
2261
|
+
}
|
|
2262
|
+
break;
|
|
2263
|
+
}
|
|
2264
|
+
default:
|
|
2265
|
+
checkNever(event.delta);
|
|
2266
|
+
}
|
|
2267
|
+
break;
|
|
2268
|
+
}
|
|
2269
|
+
case "message_stop": {
|
|
2270
|
+
this._addMessageParam(messageSnapshot);
|
|
2271
|
+
this._addMessage(maybeParseBetaMessage(messageSnapshot, __classPrivateFieldGet(this, _BetaMessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _BetaMessageStream_logger, "f") }), true);
|
|
2272
|
+
break;
|
|
2273
|
+
}
|
|
2274
|
+
case "content_block_stop": {
|
|
2275
|
+
this._emit("contentBlock", messageSnapshot.content.at(-1));
|
|
2276
|
+
break;
|
|
2277
|
+
}
|
|
2278
|
+
case "message_start": {
|
|
2279
|
+
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, messageSnapshot, "f");
|
|
2280
|
+
break;
|
|
2281
|
+
}
|
|
2282
|
+
case "content_block_start":
|
|
2283
|
+
case "message_delta":
|
|
2284
|
+
break;
|
|
2285
|
+
}
|
|
2286
|
+
}, _BetaMessageStream_endRequest = function _BetaMessageStream_endRequest2() {
|
|
2287
|
+
if (this.ended) {
|
|
2288
|
+
throw new AnthropicError(`stream has ended, this shouldn't happen`);
|
|
2289
|
+
}
|
|
2290
|
+
const snapshot = __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
|
2291
|
+
if (!snapshot) {
|
|
2292
|
+
throw new AnthropicError(`request ended without sending any chunks`);
|
|
2293
|
+
}
|
|
2294
|
+
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, void 0, "f");
|
|
2295
|
+
return maybeParseBetaMessage(snapshot, __classPrivateFieldGet(this, _BetaMessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _BetaMessageStream_logger, "f") });
|
|
2296
|
+
}, _BetaMessageStream_accumulateMessage = function _BetaMessageStream_accumulateMessage2(event) {
|
|
2297
|
+
let snapshot = __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
|
2298
|
+
if (event.type === "message_start") {
|
|
2299
|
+
if (snapshot) {
|
|
2300
|
+
throw new AnthropicError(`Unexpected event order, got ${event.type} before receiving "message_stop"`);
|
|
2301
|
+
}
|
|
2302
|
+
return event.message;
|
|
2303
|
+
}
|
|
2304
|
+
if (!snapshot) {
|
|
2305
|
+
throw new AnthropicError(`Unexpected event order, got ${event.type} before "message_start"`);
|
|
2306
|
+
}
|
|
2307
|
+
switch (event.type) {
|
|
2308
|
+
case "message_stop":
|
|
2309
|
+
return snapshot;
|
|
2310
|
+
case "message_delta":
|
|
2311
|
+
snapshot.container = event.delta.container;
|
|
2312
|
+
snapshot.stop_reason = event.delta.stop_reason;
|
|
2313
|
+
snapshot.stop_sequence = event.delta.stop_sequence;
|
|
2314
|
+
snapshot.usage.output_tokens = event.usage.output_tokens;
|
|
2315
|
+
snapshot.context_management = event.context_management;
|
|
2316
|
+
if (event.usage.input_tokens != null) {
|
|
2317
|
+
snapshot.usage.input_tokens = event.usage.input_tokens;
|
|
2318
|
+
}
|
|
2319
|
+
if (event.usage.cache_creation_input_tokens != null) {
|
|
2320
|
+
snapshot.usage.cache_creation_input_tokens = event.usage.cache_creation_input_tokens;
|
|
2321
|
+
}
|
|
2322
|
+
if (event.usage.cache_read_input_tokens != null) {
|
|
2323
|
+
snapshot.usage.cache_read_input_tokens = event.usage.cache_read_input_tokens;
|
|
2324
|
+
}
|
|
2325
|
+
if (event.usage.server_tool_use != null) {
|
|
2326
|
+
snapshot.usage.server_tool_use = event.usage.server_tool_use;
|
|
2327
|
+
}
|
|
2328
|
+
if (event.usage.iterations != null) {
|
|
2329
|
+
snapshot.usage.iterations = event.usage.iterations;
|
|
2330
|
+
}
|
|
2331
|
+
return snapshot;
|
|
2332
|
+
case "content_block_start":
|
|
2333
|
+
snapshot.content.push(event.content_block);
|
|
2334
|
+
return snapshot;
|
|
2335
|
+
case "content_block_delta": {
|
|
2336
|
+
const snapshotContent = snapshot.content.at(event.index);
|
|
2337
|
+
switch (event.delta.type) {
|
|
2338
|
+
case "text_delta": {
|
|
2339
|
+
if (snapshotContent?.type === "text") {
|
|
2340
|
+
snapshot.content[event.index] = {
|
|
2341
|
+
...snapshotContent,
|
|
2342
|
+
text: (snapshotContent.text || "") + event.delta.text
|
|
2343
|
+
};
|
|
2344
|
+
}
|
|
2345
|
+
break;
|
|
2346
|
+
}
|
|
2347
|
+
case "citations_delta": {
|
|
2348
|
+
if (snapshotContent?.type === "text") {
|
|
2349
|
+
snapshot.content[event.index] = {
|
|
2350
|
+
...snapshotContent,
|
|
2351
|
+
citations: [...snapshotContent.citations ?? [], event.delta.citation]
|
|
2352
|
+
};
|
|
2353
|
+
}
|
|
2354
|
+
break;
|
|
2355
|
+
}
|
|
2356
|
+
case "input_json_delta": {
|
|
2357
|
+
if (snapshotContent && tracksToolInput(snapshotContent)) {
|
|
2358
|
+
let jsonBuf = snapshotContent[JSON_BUF_PROPERTY] || "";
|
|
2359
|
+
jsonBuf += event.delta.partial_json;
|
|
2360
|
+
const newContent = { ...snapshotContent };
|
|
2361
|
+
Object.defineProperty(newContent, JSON_BUF_PROPERTY, {
|
|
2362
|
+
value: jsonBuf,
|
|
2363
|
+
enumerable: false,
|
|
2364
|
+
writable: true
|
|
2365
|
+
});
|
|
2366
|
+
if (jsonBuf) {
|
|
2367
|
+
try {
|
|
2368
|
+
newContent.input = partialParse(jsonBuf);
|
|
2369
|
+
} catch (err) {
|
|
2370
|
+
const error = new AnthropicError(`Unable to parse tool parameter JSON from model. Please retry your request or adjust your prompt. Error: ${err}. JSON: ${jsonBuf}`);
|
|
2371
|
+
__classPrivateFieldGet(this, _BetaMessageStream_handleError, "f").call(this, error);
|
|
2372
|
+
}
|
|
2373
|
+
}
|
|
2374
|
+
snapshot.content[event.index] = newContent;
|
|
2375
|
+
}
|
|
2376
|
+
break;
|
|
2377
|
+
}
|
|
2378
|
+
case "thinking_delta": {
|
|
2379
|
+
if (snapshotContent?.type === "thinking") {
|
|
2380
|
+
snapshot.content[event.index] = {
|
|
2381
|
+
...snapshotContent,
|
|
2382
|
+
thinking: snapshotContent.thinking + event.delta.thinking
|
|
2383
|
+
};
|
|
2384
|
+
}
|
|
2385
|
+
break;
|
|
2386
|
+
}
|
|
2387
|
+
case "signature_delta": {
|
|
2388
|
+
if (snapshotContent?.type === "thinking") {
|
|
2389
|
+
snapshot.content[event.index] = {
|
|
2390
|
+
...snapshotContent,
|
|
2391
|
+
signature: event.delta.signature
|
|
2392
|
+
};
|
|
2393
|
+
}
|
|
2394
|
+
break;
|
|
2395
|
+
}
|
|
2396
|
+
case "compaction_delta": {
|
|
2397
|
+
if (snapshotContent?.type === "compaction") {
|
|
2398
|
+
snapshot.content[event.index] = {
|
|
2399
|
+
...snapshotContent,
|
|
2400
|
+
content: (snapshotContent.content || "") + event.delta.content
|
|
2401
|
+
};
|
|
2402
|
+
}
|
|
2403
|
+
break;
|
|
2404
|
+
}
|
|
2405
|
+
default:
|
|
2406
|
+
checkNever(event.delta);
|
|
2407
|
+
}
|
|
2408
|
+
return snapshot;
|
|
2409
|
+
}
|
|
2410
|
+
case "content_block_stop":
|
|
2411
|
+
return snapshot;
|
|
2412
|
+
}
|
|
2413
|
+
}, Symbol.asyncIterator)]() {
|
|
2414
|
+
const pushQueue = [];
|
|
2415
|
+
const readQueue = [];
|
|
2416
|
+
let done = false;
|
|
2417
|
+
this.on("streamEvent", (event) => {
|
|
2418
|
+
const reader = readQueue.shift();
|
|
2419
|
+
if (reader) {
|
|
2420
|
+
reader.resolve(event);
|
|
2421
|
+
} else {
|
|
2422
|
+
pushQueue.push(event);
|
|
2423
|
+
}
|
|
2424
|
+
});
|
|
2425
|
+
this.on("end", () => {
|
|
2426
|
+
done = true;
|
|
2427
|
+
for (const reader of readQueue) {
|
|
2428
|
+
reader.resolve(void 0);
|
|
2429
|
+
}
|
|
2430
|
+
readQueue.length = 0;
|
|
2431
|
+
});
|
|
2432
|
+
this.on("abort", (err) => {
|
|
2433
|
+
done = true;
|
|
2434
|
+
for (const reader of readQueue) {
|
|
2435
|
+
reader.reject(err);
|
|
2436
|
+
}
|
|
2437
|
+
readQueue.length = 0;
|
|
2438
|
+
});
|
|
2439
|
+
this.on("error", (err) => {
|
|
2440
|
+
done = true;
|
|
2441
|
+
for (const reader of readQueue) {
|
|
2442
|
+
reader.reject(err);
|
|
2443
|
+
}
|
|
2444
|
+
readQueue.length = 0;
|
|
2445
|
+
});
|
|
2446
|
+
return {
|
|
2447
|
+
next: async () => {
|
|
2448
|
+
if (!pushQueue.length) {
|
|
2449
|
+
if (done) {
|
|
2450
|
+
return { value: void 0, done: true };
|
|
2451
|
+
}
|
|
2452
|
+
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk2) => chunk2 ? { value: chunk2, done: false } : { value: void 0, done: true });
|
|
2453
|
+
}
|
|
2454
|
+
const chunk = pushQueue.shift();
|
|
2455
|
+
return { value: chunk, done: false };
|
|
2456
|
+
},
|
|
2457
|
+
return: async () => {
|
|
2458
|
+
this.abort();
|
|
2459
|
+
return { value: void 0, done: true };
|
|
2460
|
+
}
|
|
2461
|
+
};
|
|
2462
|
+
}
|
|
2463
|
+
toReadableStream() {
|
|
2464
|
+
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
|
2465
|
+
return stream.toReadableStream();
|
|
2466
|
+
}
|
|
2467
|
+
};
|
|
2468
|
+
function checkNever(x) {
|
|
2469
|
+
}
|
|
2470
|
+
|
|
2471
|
+
// ../node_modules/@anthropic-ai/sdk/lib/tools/ToolError.mjs
|
|
2472
|
+
var ToolError = class extends Error {
|
|
2473
|
+
constructor(content) {
|
|
2474
|
+
const message = typeof content === "string" ? content : content.map((block) => {
|
|
2475
|
+
if (block.type === "text")
|
|
2476
|
+
return block.text;
|
|
2477
|
+
return `[${block.type}]`;
|
|
2478
|
+
}).join(" ");
|
|
2479
|
+
super(message);
|
|
2480
|
+
this.name = "ToolError";
|
|
2481
|
+
this.content = content;
|
|
2482
|
+
}
|
|
2483
|
+
};
|
|
2484
|
+
|
|
2485
|
+
// ../node_modules/@anthropic-ai/sdk/lib/tools/CompactionControl.mjs
|
|
2486
|
+
var DEFAULT_TOKEN_THRESHOLD = 1e5;
|
|
2487
|
+
var DEFAULT_SUMMARY_PROMPT = `You have been working on the task described above but have not yet completed it. Write a continuation summary that will allow you (or another instance of yourself) to resume work efficiently in a future context window where the conversation history will be replaced with this summary. Your summary should be structured, concise, and actionable. Include:
|
|
2488
|
+
1. Task Overview
|
|
2489
|
+
The user's core request and success criteria
|
|
2490
|
+
Any clarifications or constraints they specified
|
|
2491
|
+
2. Current State
|
|
2492
|
+
What has been completed so far
|
|
2493
|
+
Files created, modified, or analyzed (with paths if relevant)
|
|
2494
|
+
Key outputs or artifacts produced
|
|
2495
|
+
3. Important Discoveries
|
|
2496
|
+
Technical constraints or requirements uncovered
|
|
2497
|
+
Decisions made and their rationale
|
|
2498
|
+
Errors encountered and how they were resolved
|
|
2499
|
+
What approaches were tried that didn't work (and why)
|
|
2500
|
+
4. Next Steps
|
|
2501
|
+
Specific actions needed to complete the task
|
|
2502
|
+
Any blockers or open questions to resolve
|
|
2503
|
+
Priority order if multiple steps remain
|
|
2504
|
+
5. Context to Preserve
|
|
2505
|
+
User preferences or style requirements
|
|
2506
|
+
Domain-specific details that aren't obvious
|
|
2507
|
+
Any promises made to the user
|
|
2508
|
+
Be concise but complete\u2014err on the side of including information that would prevent duplicate work or repeated mistakes. Write in a way that enables immediate resumption of the task.
|
|
2509
|
+
Wrap your summary in <summary></summary> tags.`;
|
|
2510
|
+
|
|
2511
|
+
// ../node_modules/@anthropic-ai/sdk/lib/tools/BetaToolRunner.mjs
|
|
2512
|
+
var _BetaToolRunner_instances;
|
|
2513
|
+
var _BetaToolRunner_consumed;
|
|
2514
|
+
var _BetaToolRunner_mutated;
|
|
2515
|
+
var _BetaToolRunner_state;
|
|
2516
|
+
var _BetaToolRunner_options;
|
|
2517
|
+
var _BetaToolRunner_message;
|
|
2518
|
+
var _BetaToolRunner_toolResponse;
|
|
2519
|
+
var _BetaToolRunner_completion;
|
|
2520
|
+
var _BetaToolRunner_iterationCount;
|
|
2521
|
+
var _BetaToolRunner_checkAndCompact;
|
|
2522
|
+
var _BetaToolRunner_generateToolResponse;
|
|
2523
|
+
function promiseWithResolvers() {
|
|
2524
|
+
let resolve;
|
|
2525
|
+
let reject;
|
|
2526
|
+
const promise = new Promise((res, rej) => {
|
|
2527
|
+
resolve = res;
|
|
2528
|
+
reject = rej;
|
|
2529
|
+
});
|
|
2530
|
+
return { promise, resolve, reject };
|
|
2531
|
+
}
|
|
2532
|
+
var BetaToolRunner = class {
|
|
2533
|
+
constructor(client, params, options) {
|
|
2534
|
+
_BetaToolRunner_instances.add(this);
|
|
2535
|
+
this.client = client;
|
|
2536
|
+
_BetaToolRunner_consumed.set(this, false);
|
|
2537
|
+
_BetaToolRunner_mutated.set(this, false);
|
|
2538
|
+
_BetaToolRunner_state.set(this, void 0);
|
|
2539
|
+
_BetaToolRunner_options.set(this, void 0);
|
|
2540
|
+
_BetaToolRunner_message.set(this, void 0);
|
|
2541
|
+
_BetaToolRunner_toolResponse.set(this, void 0);
|
|
2542
|
+
_BetaToolRunner_completion.set(this, void 0);
|
|
2543
|
+
_BetaToolRunner_iterationCount.set(this, 0);
|
|
2544
|
+
__classPrivateFieldSet(this, _BetaToolRunner_state, {
|
|
2545
|
+
params: {
|
|
2546
|
+
// You can't clone the entire params since there are functions as handlers.
|
|
2547
|
+
// You also don't really need to clone params.messages, but it probably will prevent a foot gun
|
|
2548
|
+
// somewhere.
|
|
2549
|
+
...params,
|
|
2550
|
+
messages: structuredClone(params.messages)
|
|
2551
|
+
}
|
|
2552
|
+
}, "f");
|
|
2553
|
+
const helpers = collectStainlessHelpers(params.tools, params.messages);
|
|
2554
|
+
const helperValue = ["BetaToolRunner", ...helpers].join(", ");
|
|
2555
|
+
__classPrivateFieldSet(this, _BetaToolRunner_options, {
|
|
2556
|
+
...options,
|
|
2557
|
+
headers: buildHeaders([{ "x-stainless-helper": helperValue }, options?.headers])
|
|
2558
|
+
}, "f");
|
|
2559
|
+
__classPrivateFieldSet(this, _BetaToolRunner_completion, promiseWithResolvers(), "f");
|
|
2560
|
+
}
|
|
2561
|
+
async *[(_BetaToolRunner_consumed = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_mutated = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_state = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_options = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_message = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_toolResponse = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_completion = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_iterationCount = /* @__PURE__ */ new WeakMap(), _BetaToolRunner_instances = /* @__PURE__ */ new WeakSet(), _BetaToolRunner_checkAndCompact = async function _BetaToolRunner_checkAndCompact2() {
|
|
2562
|
+
const compactionControl = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.compactionControl;
|
|
2563
|
+
if (!compactionControl || !compactionControl.enabled) {
|
|
2564
|
+
return false;
|
|
2565
|
+
}
|
|
2566
|
+
let tokensUsed = 0;
|
|
2567
|
+
if (__classPrivateFieldGet(this, _BetaToolRunner_message, "f") !== void 0) {
|
|
2568
|
+
try {
|
|
2569
|
+
const message = await __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
|
2570
|
+
const totalInputTokens = message.usage.input_tokens + (message.usage.cache_creation_input_tokens ?? 0) + (message.usage.cache_read_input_tokens ?? 0);
|
|
2571
|
+
tokensUsed = totalInputTokens + message.usage.output_tokens;
|
|
2572
|
+
} catch {
|
|
2573
|
+
return false;
|
|
2574
|
+
}
|
|
2575
|
+
}
|
|
2576
|
+
const threshold = compactionControl.contextTokenThreshold ?? DEFAULT_TOKEN_THRESHOLD;
|
|
2577
|
+
if (tokensUsed < threshold) {
|
|
2578
|
+
return false;
|
|
2579
|
+
}
|
|
2580
|
+
const model = compactionControl.model ?? __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.model;
|
|
2581
|
+
const summaryPrompt = compactionControl.summaryPrompt ?? DEFAULT_SUMMARY_PROMPT;
|
|
2582
|
+
const messages = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages;
|
|
2583
|
+
if (messages[messages.length - 1].role === "assistant") {
|
|
2584
|
+
const lastMessage = messages[messages.length - 1];
|
|
2585
|
+
if (Array.isArray(lastMessage.content)) {
|
|
2586
|
+
const nonToolBlocks = lastMessage.content.filter((block) => block.type !== "tool_use");
|
|
2587
|
+
if (nonToolBlocks.length === 0) {
|
|
2588
|
+
messages.pop();
|
|
2589
|
+
} else {
|
|
2590
|
+
lastMessage.content = nonToolBlocks;
|
|
2591
|
+
}
|
|
2592
|
+
}
|
|
2593
|
+
}
|
|
2594
|
+
const response = await this.client.beta.messages.create({
|
|
2595
|
+
model,
|
|
2596
|
+
messages: [
|
|
2597
|
+
...messages,
|
|
2598
|
+
{
|
|
2599
|
+
role: "user",
|
|
2600
|
+
content: [
|
|
2601
|
+
{
|
|
2602
|
+
type: "text",
|
|
2603
|
+
text: summaryPrompt
|
|
2604
|
+
}
|
|
2605
|
+
]
|
|
2606
|
+
}
|
|
2607
|
+
],
|
|
2608
|
+
max_tokens: __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_tokens
|
|
2609
|
+
}, {
|
|
2610
|
+
headers: { "x-stainless-helper": "compaction" }
|
|
2611
|
+
});
|
|
2612
|
+
if (response.content[0]?.type !== "text") {
|
|
2613
|
+
throw new AnthropicError("Expected text response for compaction");
|
|
2614
|
+
}
|
|
2615
|
+
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages = [
|
|
2616
|
+
{
|
|
2617
|
+
role: "user",
|
|
2618
|
+
content: response.content
|
|
2619
|
+
}
|
|
2620
|
+
];
|
|
2621
|
+
return true;
|
|
2622
|
+
}, Symbol.asyncIterator)]() {
|
|
2623
|
+
var _a2;
|
|
2624
|
+
if (__classPrivateFieldGet(this, _BetaToolRunner_consumed, "f")) {
|
|
2625
|
+
throw new AnthropicError("Cannot iterate over a consumed stream");
|
|
2626
|
+
}
|
|
2627
|
+
__classPrivateFieldSet(this, _BetaToolRunner_consumed, true, "f");
|
|
2628
|
+
__classPrivateFieldSet(this, _BetaToolRunner_mutated, true, "f");
|
|
2629
|
+
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, void 0, "f");
|
|
2630
|
+
try {
|
|
2631
|
+
while (true) {
|
|
2632
|
+
let stream;
|
|
2633
|
+
try {
|
|
2634
|
+
if (__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_iterations && __classPrivateFieldGet(this, _BetaToolRunner_iterationCount, "f") >= __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_iterations) {
|
|
2635
|
+
break;
|
|
2636
|
+
}
|
|
2637
|
+
__classPrivateFieldSet(this, _BetaToolRunner_mutated, false, "f");
|
|
2638
|
+
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, void 0, "f");
|
|
2639
|
+
__classPrivateFieldSet(this, _BetaToolRunner_iterationCount, (_a2 = __classPrivateFieldGet(this, _BetaToolRunner_iterationCount, "f"), _a2++, _a2), "f");
|
|
2640
|
+
__classPrivateFieldSet(this, _BetaToolRunner_message, void 0, "f");
|
|
2641
|
+
const { max_iterations, compactionControl, ...params } = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params;
|
|
2642
|
+
if (params.stream) {
|
|
2643
|
+
stream = this.client.beta.messages.stream({ ...params }, __classPrivateFieldGet(this, _BetaToolRunner_options, "f"));
|
|
2644
|
+
__classPrivateFieldSet(this, _BetaToolRunner_message, stream.finalMessage(), "f");
|
|
2645
|
+
__classPrivateFieldGet(this, _BetaToolRunner_message, "f").catch(() => {
|
|
2646
|
+
});
|
|
2647
|
+
yield stream;
|
|
2648
|
+
} else {
|
|
2649
|
+
__classPrivateFieldSet(this, _BetaToolRunner_message, this.client.beta.messages.create({ ...params, stream: false }, __classPrivateFieldGet(this, _BetaToolRunner_options, "f")), "f");
|
|
2650
|
+
yield __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
|
2651
|
+
}
|
|
2652
|
+
const isCompacted = await __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_checkAndCompact).call(this);
|
|
2653
|
+
if (!isCompacted) {
|
|
2654
|
+
if (!__classPrivateFieldGet(this, _BetaToolRunner_mutated, "f")) {
|
|
2655
|
+
const { role, content } = await __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
|
2656
|
+
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.push({ role, content });
|
|
2657
|
+
}
|
|
2658
|
+
const toolMessage = await __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_generateToolResponse).call(this, __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.at(-1));
|
|
2659
|
+
if (toolMessage) {
|
|
2660
|
+
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.push(toolMessage);
|
|
2661
|
+
} else if (!__classPrivateFieldGet(this, _BetaToolRunner_mutated, "f")) {
|
|
2662
|
+
break;
|
|
2663
|
+
}
|
|
2664
|
+
}
|
|
2665
|
+
} finally {
|
|
2666
|
+
if (stream) {
|
|
2667
|
+
stream.abort();
|
|
2668
|
+
}
|
|
2669
|
+
}
|
|
2670
|
+
}
|
|
2671
|
+
if (!__classPrivateFieldGet(this, _BetaToolRunner_message, "f")) {
|
|
2672
|
+
throw new AnthropicError("ToolRunner concluded without a message from the server");
|
|
2673
|
+
}
|
|
2674
|
+
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").resolve(await __classPrivateFieldGet(this, _BetaToolRunner_message, "f"));
|
|
2675
|
+
} catch (error) {
|
|
2676
|
+
__classPrivateFieldSet(this, _BetaToolRunner_consumed, false, "f");
|
|
2677
|
+
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").promise.catch(() => {
|
|
2678
|
+
});
|
|
2679
|
+
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").reject(error);
|
|
2680
|
+
__classPrivateFieldSet(this, _BetaToolRunner_completion, promiseWithResolvers(), "f");
|
|
2681
|
+
throw error;
|
|
2682
|
+
}
|
|
2683
|
+
}
|
|
2684
|
+
setMessagesParams(paramsOrMutator) {
|
|
2685
|
+
if (typeof paramsOrMutator === "function") {
|
|
2686
|
+
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params = paramsOrMutator(__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params);
|
|
2687
|
+
} else {
|
|
2688
|
+
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params = paramsOrMutator;
|
|
2689
|
+
}
|
|
2690
|
+
__classPrivateFieldSet(this, _BetaToolRunner_mutated, true, "f");
|
|
2691
|
+
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, void 0, "f");
|
|
2692
|
+
}
|
|
2693
|
+
/**
|
|
2694
|
+
* Get the tool response for the last message from the assistant.
|
|
2695
|
+
* Avoids redundant tool executions by caching results.
|
|
2696
|
+
*
|
|
2697
|
+
* @returns A promise that resolves to a BetaMessageParam containing tool results, or null if no tools need to be executed
|
|
2698
|
+
*
|
|
2699
|
+
* @example
|
|
2700
|
+
* const toolResponse = await runner.generateToolResponse();
|
|
2701
|
+
* if (toolResponse) {
|
|
2702
|
+
* console.log('Tool results:', toolResponse.content);
|
|
2703
|
+
* }
|
|
2704
|
+
*/
|
|
2705
|
+
async generateToolResponse() {
|
|
2706
|
+
const message = await __classPrivateFieldGet(this, _BetaToolRunner_message, "f") ?? this.params.messages.at(-1);
|
|
2707
|
+
if (!message) {
|
|
2708
|
+
return null;
|
|
2709
|
+
}
|
|
2710
|
+
return __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_generateToolResponse).call(this, message);
|
|
2711
|
+
}
|
|
2712
|
+
/**
|
|
2713
|
+
* Wait for the async iterator to complete. This works even if the async iterator hasn't yet started, and
|
|
2714
|
+
* will wait for an instance to start and go to completion.
|
|
2715
|
+
*
|
|
2716
|
+
* @returns A promise that resolves to the final BetaMessage when the iterator completes
|
|
2717
|
+
*
|
|
2718
|
+
* @example
|
|
2719
|
+
* // Start consuming the iterator
|
|
2720
|
+
* for await (const message of runner) {
|
|
2721
|
+
* console.log('Message:', message.content);
|
|
2722
|
+
* }
|
|
2723
|
+
*
|
|
2724
|
+
* // Meanwhile, wait for completion from another part of the code
|
|
2725
|
+
* const finalMessage = await runner.done();
|
|
2726
|
+
* console.log('Final response:', finalMessage.content);
|
|
2727
|
+
*/
|
|
2728
|
+
done() {
|
|
2729
|
+
return __classPrivateFieldGet(this, _BetaToolRunner_completion, "f").promise;
|
|
2730
|
+
}
|
|
2731
|
+
/**
|
|
2732
|
+
* Returns a promise indicating that the stream is done. Unlike .done(), this will eagerly read the stream:
|
|
2733
|
+
* * If the iterator has not been consumed, consume the entire iterator and return the final message from the
|
|
2734
|
+
* assistant.
|
|
2735
|
+
* * If the iterator has been consumed, waits for it to complete and returns the final message.
|
|
2736
|
+
*
|
|
2737
|
+
* @returns A promise that resolves to the final BetaMessage from the conversation
|
|
2738
|
+
* @throws {AnthropicError} If no messages were processed during the conversation
|
|
2739
|
+
*
|
|
2740
|
+
* @example
|
|
2741
|
+
* const finalMessage = await runner.runUntilDone();
|
|
2742
|
+
* console.log('Final response:', finalMessage.content);
|
|
2743
|
+
*/
|
|
2744
|
+
async runUntilDone() {
|
|
2745
|
+
if (!__classPrivateFieldGet(this, _BetaToolRunner_consumed, "f")) {
|
|
2746
|
+
for await (const _ of this) {
|
|
2747
|
+
}
|
|
2748
|
+
}
|
|
2749
|
+
return this.done();
|
|
2750
|
+
}
|
|
2751
|
+
/**
|
|
2752
|
+
* Get the current parameters being used by the ToolRunner.
|
|
2753
|
+
*
|
|
2754
|
+
* @returns A readonly view of the current ToolRunnerParams
|
|
2755
|
+
*
|
|
2756
|
+
* @example
|
|
2757
|
+
* const currentParams = runner.params;
|
|
2758
|
+
* console.log('Current model:', currentParams.model);
|
|
2759
|
+
* console.log('Message count:', currentParams.messages.length);
|
|
2760
|
+
*/
|
|
2761
|
+
get params() {
|
|
2762
|
+
return __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params;
|
|
2763
|
+
}
|
|
2764
|
+
/**
|
|
2765
|
+
* Add one or more messages to the conversation history.
|
|
2766
|
+
*
|
|
2767
|
+
* @param messages - One or more BetaMessageParam objects to add to the conversation
|
|
2768
|
+
*
|
|
2769
|
+
* @example
|
|
2770
|
+
* runner.pushMessages(
|
|
2771
|
+
* { role: 'user', content: 'Also, what about the weather in NYC?' }
|
|
2772
|
+
* );
|
|
2773
|
+
*
|
|
2774
|
+
* @example
|
|
2775
|
+
* // Adding multiple messages
|
|
2776
|
+
* runner.pushMessages(
|
|
2777
|
+
* { role: 'user', content: 'What about NYC?' },
|
|
2778
|
+
* { role: 'user', content: 'And Boston?' }
|
|
2779
|
+
* );
|
|
2780
|
+
*/
|
|
2781
|
+
pushMessages(...messages) {
|
|
2782
|
+
this.setMessagesParams((params) => ({
|
|
2783
|
+
...params,
|
|
2784
|
+
messages: [...params.messages, ...messages]
|
|
2785
|
+
}));
|
|
2786
|
+
}
|
|
2787
|
+
/**
|
|
2788
|
+
* Makes the ToolRunner directly awaitable, equivalent to calling .runUntilDone()
|
|
2789
|
+
* This allows using `await runner` instead of `await runner.runUntilDone()`
|
|
2790
|
+
*/
|
|
2791
|
+
then(onfulfilled, onrejected) {
|
|
2792
|
+
return this.runUntilDone().then(onfulfilled, onrejected);
|
|
2793
|
+
}
|
|
2794
|
+
};
|
|
2795
|
+
_BetaToolRunner_generateToolResponse = async function _BetaToolRunner_generateToolResponse2(lastMessage) {
|
|
2796
|
+
if (__classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f") !== void 0) {
|
|
2797
|
+
return __classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f");
|
|
2798
|
+
}
|
|
2799
|
+
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, generateToolResponse(__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params, lastMessage), "f");
|
|
2800
|
+
return __classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f");
|
|
2801
|
+
};
|
|
2802
|
+
async function generateToolResponse(params, lastMessage = params.messages.at(-1)) {
|
|
2803
|
+
if (!lastMessage || lastMessage.role !== "assistant" || !lastMessage.content || typeof lastMessage.content === "string") {
|
|
2804
|
+
return null;
|
|
2805
|
+
}
|
|
2806
|
+
const toolUseBlocks = lastMessage.content.filter((content) => content.type === "tool_use");
|
|
2807
|
+
if (toolUseBlocks.length === 0) {
|
|
2808
|
+
return null;
|
|
2809
|
+
}
|
|
2810
|
+
const toolResults = await Promise.all(toolUseBlocks.map(async (toolUse) => {
|
|
2811
|
+
const tool = params.tools.find((t) => ("name" in t ? t.name : t.mcp_server_name) === toolUse.name);
|
|
2812
|
+
if (!tool || !("run" in tool)) {
|
|
2813
|
+
return {
|
|
2814
|
+
type: "tool_result",
|
|
2815
|
+
tool_use_id: toolUse.id,
|
|
2816
|
+
content: `Error: Tool '${toolUse.name}' not found`,
|
|
2817
|
+
is_error: true
|
|
2818
|
+
};
|
|
2819
|
+
}
|
|
2820
|
+
try {
|
|
2821
|
+
let input = toolUse.input;
|
|
2822
|
+
if ("parse" in tool && tool.parse) {
|
|
2823
|
+
input = tool.parse(input);
|
|
2824
|
+
}
|
|
2825
|
+
const result = await tool.run(input);
|
|
2826
|
+
return {
|
|
2827
|
+
type: "tool_result",
|
|
2828
|
+
tool_use_id: toolUse.id,
|
|
2829
|
+
content: result
|
|
2830
|
+
};
|
|
2831
|
+
} catch (error) {
|
|
2832
|
+
return {
|
|
2833
|
+
type: "tool_result",
|
|
2834
|
+
tool_use_id: toolUse.id,
|
|
2835
|
+
content: error instanceof ToolError ? error.content : `Error: ${error instanceof Error ? error.message : String(error)}`,
|
|
2836
|
+
is_error: true
|
|
2837
|
+
};
|
|
2838
|
+
}
|
|
2839
|
+
}));
|
|
2840
|
+
return {
|
|
2841
|
+
role: "user",
|
|
2842
|
+
content: toolResults
|
|
2843
|
+
};
|
|
2844
|
+
}
|
|
2845
|
+
|
|
2846
|
+
// ../node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
|
|
2847
|
+
var JSONLDecoder = class _JSONLDecoder {
|
|
2848
|
+
constructor(iterator, controller) {
|
|
2849
|
+
this.iterator = iterator;
|
|
2850
|
+
this.controller = controller;
|
|
2851
|
+
}
|
|
2852
|
+
async *decoder() {
|
|
2853
|
+
const lineDecoder = new LineDecoder();
|
|
2854
|
+
for await (const chunk of this.iterator) {
|
|
2855
|
+
for (const line of lineDecoder.decode(chunk)) {
|
|
2856
|
+
yield JSON.parse(line);
|
|
2857
|
+
}
|
|
2858
|
+
}
|
|
2859
|
+
for (const line of lineDecoder.flush()) {
|
|
2860
|
+
yield JSON.parse(line);
|
|
2861
|
+
}
|
|
2862
|
+
}
|
|
2863
|
+
[Symbol.asyncIterator]() {
|
|
2864
|
+
return this.decoder();
|
|
2865
|
+
}
|
|
2866
|
+
static fromResponse(response, controller) {
|
|
2867
|
+
if (!response.body) {
|
|
2868
|
+
controller.abort();
|
|
2869
|
+
if (typeof globalThis.navigator !== "undefined" && globalThis.navigator.product === "ReactNative") {
|
|
2870
|
+
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
|
2871
|
+
}
|
|
2872
|
+
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
|
2873
|
+
}
|
|
2874
|
+
return new _JSONLDecoder(ReadableStreamToAsyncIterable(response.body), controller);
|
|
2875
|
+
}
|
|
2876
|
+
};
|
|
2877
|
+
|
|
2878
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/messages/batches.mjs
|
|
2879
|
+
var Batches = class extends APIResource {
|
|
2880
|
+
/**
|
|
2881
|
+
* Send a batch of Message creation requests.
|
|
2882
|
+
*
|
|
2883
|
+
* The Message Batches API can be used to process multiple Messages API requests at
|
|
2884
|
+
* once. Once a Message Batch is created, it begins processing immediately. Batches
|
|
2885
|
+
* can take up to 24 hours to complete.
|
|
2886
|
+
*
|
|
2887
|
+
* Learn more about the Message Batches API in our
|
|
2888
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
2889
|
+
*
|
|
2890
|
+
* @example
|
|
2891
|
+
* ```ts
|
|
2892
|
+
* const betaMessageBatch =
|
|
2893
|
+
* await client.beta.messages.batches.create({
|
|
2894
|
+
* requests: [
|
|
2895
|
+
* {
|
|
2896
|
+
* custom_id: 'my-custom-id-1',
|
|
2897
|
+
* params: {
|
|
2898
|
+
* max_tokens: 1024,
|
|
2899
|
+
* messages: [
|
|
2900
|
+
* { content: 'Hello, world', role: 'user' },
|
|
2901
|
+
* ],
|
|
2902
|
+
* model: 'claude-opus-4-6',
|
|
2903
|
+
* },
|
|
2904
|
+
* },
|
|
2905
|
+
* ],
|
|
2906
|
+
* });
|
|
2907
|
+
* ```
|
|
2908
|
+
*/
|
|
2909
|
+
create(params, options) {
|
|
2910
|
+
const { betas, ...body } = params;
|
|
2911
|
+
return this._client.post("/v1/messages/batches?beta=true", {
|
|
2912
|
+
body,
|
|
2913
|
+
...options,
|
|
2914
|
+
headers: buildHeaders([
|
|
2915
|
+
{ "anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString() },
|
|
2916
|
+
options?.headers
|
|
2917
|
+
])
|
|
2918
|
+
});
|
|
2919
|
+
}
|
|
2920
|
+
/**
|
|
2921
|
+
* This endpoint is idempotent and can be used to poll for Message Batch
|
|
2922
|
+
* completion. To access the results of a Message Batch, make a request to the
|
|
2923
|
+
* `results_url` field in the response.
|
|
2924
|
+
*
|
|
2925
|
+
* Learn more about the Message Batches API in our
|
|
2926
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
2927
|
+
*
|
|
2928
|
+
* @example
|
|
2929
|
+
* ```ts
|
|
2930
|
+
* const betaMessageBatch =
|
|
2931
|
+
* await client.beta.messages.batches.retrieve(
|
|
2932
|
+
* 'message_batch_id',
|
|
2933
|
+
* );
|
|
2934
|
+
* ```
|
|
2935
|
+
*/
|
|
2936
|
+
retrieve(messageBatchID, params = {}, options) {
|
|
2937
|
+
const { betas } = params ?? {};
|
|
2938
|
+
return this._client.get(path`/v1/messages/batches/${messageBatchID}?beta=true`, {
|
|
2939
|
+
...options,
|
|
2940
|
+
headers: buildHeaders([
|
|
2941
|
+
{ "anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString() },
|
|
2942
|
+
options?.headers
|
|
2943
|
+
])
|
|
2944
|
+
});
|
|
2945
|
+
}
|
|
2946
|
+
/**
|
|
2947
|
+
* List all Message Batches within a Workspace. Most recently created batches are
|
|
2948
|
+
* returned first.
|
|
2949
|
+
*
|
|
2950
|
+
* Learn more about the Message Batches API in our
|
|
2951
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
2952
|
+
*
|
|
2953
|
+
* @example
|
|
2954
|
+
* ```ts
|
|
2955
|
+
* // Automatically fetches more pages as needed.
|
|
2956
|
+
* for await (const betaMessageBatch of client.beta.messages.batches.list()) {
|
|
2957
|
+
* // ...
|
|
2958
|
+
* }
|
|
2959
|
+
* ```
|
|
2960
|
+
*/
|
|
2961
|
+
list(params = {}, options) {
|
|
2962
|
+
const { betas, ...query } = params ?? {};
|
|
2963
|
+
return this._client.getAPIList("/v1/messages/batches?beta=true", Page, {
|
|
2964
|
+
query,
|
|
2965
|
+
...options,
|
|
2966
|
+
headers: buildHeaders([
|
|
2967
|
+
{ "anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString() },
|
|
2968
|
+
options?.headers
|
|
2969
|
+
])
|
|
2970
|
+
});
|
|
2971
|
+
}
|
|
2972
|
+
/**
|
|
2973
|
+
* Delete a Message Batch.
|
|
2974
|
+
*
|
|
2975
|
+
* Message Batches can only be deleted once they've finished processing. If you'd
|
|
2976
|
+
* like to delete an in-progress batch, you must first cancel it.
|
|
2977
|
+
*
|
|
2978
|
+
* Learn more about the Message Batches API in our
|
|
2979
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
2980
|
+
*
|
|
2981
|
+
* @example
|
|
2982
|
+
* ```ts
|
|
2983
|
+
* const betaDeletedMessageBatch =
|
|
2984
|
+
* await client.beta.messages.batches.delete(
|
|
2985
|
+
* 'message_batch_id',
|
|
2986
|
+
* );
|
|
2987
|
+
* ```
|
|
2988
|
+
*/
|
|
2989
|
+
delete(messageBatchID, params = {}, options) {
|
|
2990
|
+
const { betas } = params ?? {};
|
|
2991
|
+
return this._client.delete(path`/v1/messages/batches/${messageBatchID}?beta=true`, {
|
|
2992
|
+
...options,
|
|
2993
|
+
headers: buildHeaders([
|
|
2994
|
+
{ "anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString() },
|
|
2995
|
+
options?.headers
|
|
2996
|
+
])
|
|
2997
|
+
});
|
|
2998
|
+
}
|
|
2999
|
+
/**
|
|
3000
|
+
* Batches may be canceled any time before processing ends. Once cancellation is
|
|
3001
|
+
* initiated, the batch enters a `canceling` state, at which time the system may
|
|
3002
|
+
* complete any in-progress, non-interruptible requests before finalizing
|
|
3003
|
+
* cancellation.
|
|
3004
|
+
*
|
|
3005
|
+
* The number of canceled requests is specified in `request_counts`. To determine
|
|
3006
|
+
* which requests were canceled, check the individual results within the batch.
|
|
3007
|
+
* Note that cancellation may not result in any canceled requests if they were
|
|
3008
|
+
* non-interruptible.
|
|
3009
|
+
*
|
|
3010
|
+
* Learn more about the Message Batches API in our
|
|
3011
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
3012
|
+
*
|
|
3013
|
+
* @example
|
|
3014
|
+
* ```ts
|
|
3015
|
+
* const betaMessageBatch =
|
|
3016
|
+
* await client.beta.messages.batches.cancel(
|
|
3017
|
+
* 'message_batch_id',
|
|
3018
|
+
* );
|
|
3019
|
+
* ```
|
|
3020
|
+
*/
|
|
3021
|
+
cancel(messageBatchID, params = {}, options) {
|
|
3022
|
+
const { betas } = params ?? {};
|
|
3023
|
+
return this._client.post(path`/v1/messages/batches/${messageBatchID}/cancel?beta=true`, {
|
|
3024
|
+
...options,
|
|
3025
|
+
headers: buildHeaders([
|
|
3026
|
+
{ "anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString() },
|
|
3027
|
+
options?.headers
|
|
3028
|
+
])
|
|
3029
|
+
});
|
|
3030
|
+
}
|
|
3031
|
+
/**
|
|
3032
|
+
* Streams the results of a Message Batch as a `.jsonl` file.
|
|
3033
|
+
*
|
|
3034
|
+
* Each line in the file is a JSON object containing the result of a single request
|
|
3035
|
+
* in the Message Batch. Results are not guaranteed to be in the same order as
|
|
3036
|
+
* requests. Use the `custom_id` field to match results to requests.
|
|
3037
|
+
*
|
|
3038
|
+
* Learn more about the Message Batches API in our
|
|
3039
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
3040
|
+
*
|
|
3041
|
+
* @example
|
|
3042
|
+
* ```ts
|
|
3043
|
+
* const betaMessageBatchIndividualResponse =
|
|
3044
|
+
* await client.beta.messages.batches.results(
|
|
3045
|
+
* 'message_batch_id',
|
|
3046
|
+
* );
|
|
3047
|
+
* ```
|
|
3048
|
+
*/
|
|
3049
|
+
async results(messageBatchID, params = {}, options) {
|
|
3050
|
+
const batch = await this.retrieve(messageBatchID);
|
|
3051
|
+
if (!batch.results_url) {
|
|
3052
|
+
throw new AnthropicError(`No batch \`results_url\`; Has it finished processing? ${batch.processing_status} - ${batch.id}`);
|
|
3053
|
+
}
|
|
3054
|
+
const { betas } = params ?? {};
|
|
3055
|
+
return this._client.get(batch.results_url, {
|
|
3056
|
+
...options,
|
|
3057
|
+
headers: buildHeaders([
|
|
3058
|
+
{
|
|
3059
|
+
"anthropic-beta": [...betas ?? [], "message-batches-2024-09-24"].toString(),
|
|
3060
|
+
Accept: "application/binary"
|
|
3061
|
+
},
|
|
3062
|
+
options?.headers
|
|
3063
|
+
]),
|
|
3064
|
+
stream: true,
|
|
3065
|
+
__binaryResponse: true
|
|
3066
|
+
})._thenUnwrap((_, props) => JSONLDecoder.fromResponse(props.response, props.controller));
|
|
3067
|
+
}
|
|
3068
|
+
};
|
|
3069
|
+
|
|
3070
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/messages/messages.mjs
|
|
3071
|
+
var DEPRECATED_MODELS = {
|
|
3072
|
+
"claude-1.3": "November 6th, 2024",
|
|
3073
|
+
"claude-1.3-100k": "November 6th, 2024",
|
|
3074
|
+
"claude-instant-1.1": "November 6th, 2024",
|
|
3075
|
+
"claude-instant-1.1-100k": "November 6th, 2024",
|
|
3076
|
+
"claude-instant-1.2": "November 6th, 2024",
|
|
3077
|
+
"claude-3-sonnet-20240229": "July 21st, 2025",
|
|
3078
|
+
"claude-3-opus-20240229": "January 5th, 2026",
|
|
3079
|
+
"claude-2.1": "July 21st, 2025",
|
|
3080
|
+
"claude-2.0": "July 21st, 2025",
|
|
3081
|
+
"claude-3-7-sonnet-latest": "February 19th, 2026",
|
|
3082
|
+
"claude-3-7-sonnet-20250219": "February 19th, 2026"
|
|
3083
|
+
};
|
|
3084
|
+
var MODELS_TO_WARN_WITH_THINKING_ENABLED = ["claude-opus-4-6"];
|
|
3085
|
+
var Messages = class extends APIResource {
|
|
3086
|
+
constructor() {
|
|
3087
|
+
super(...arguments);
|
|
3088
|
+
this.batches = new Batches(this._client);
|
|
3089
|
+
}
|
|
3090
|
+
create(params, options) {
|
|
3091
|
+
const modifiedParams = transformOutputFormat(params);
|
|
3092
|
+
const { betas, ...body } = modifiedParams;
|
|
3093
|
+
if (body.model in DEPRECATED_MODELS) {
|
|
3094
|
+
console.warn(`The model '${body.model}' is deprecated and will reach end-of-life on ${DEPRECATED_MODELS[body.model]}
|
|
3095
|
+
Please migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.`);
|
|
3096
|
+
}
|
|
3097
|
+
if (body.model in MODELS_TO_WARN_WITH_THINKING_ENABLED && body.thinking && body.thinking.type === "enabled") {
|
|
3098
|
+
console.warn(`Using Claude with ${body.model} and 'thinking.type=enabled' is deprecated. Use 'thinking.type=adaptive' instead which results in better model performance in our testing: https://platform.claude.com/docs/en/build-with-claude/adaptive-thinking`);
|
|
3099
|
+
}
|
|
3100
|
+
let timeout = this._client._options.timeout;
|
|
3101
|
+
if (!body.stream && timeout == null) {
|
|
3102
|
+
const maxNonstreamingTokens = MODEL_NONSTREAMING_TOKENS[body.model] ?? void 0;
|
|
3103
|
+
timeout = this._client.calculateNonstreamingTimeout(body.max_tokens, maxNonstreamingTokens);
|
|
3104
|
+
}
|
|
3105
|
+
const helperHeader = stainlessHelperHeader(body.tools, body.messages);
|
|
3106
|
+
return this._client.post("/v1/messages?beta=true", {
|
|
3107
|
+
body,
|
|
3108
|
+
timeout: timeout ?? 6e5,
|
|
3109
|
+
...options,
|
|
3110
|
+
headers: buildHeaders([
|
|
3111
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
3112
|
+
helperHeader,
|
|
3113
|
+
options?.headers
|
|
3114
|
+
]),
|
|
3115
|
+
stream: modifiedParams.stream ?? false
|
|
3116
|
+
});
|
|
3117
|
+
}
|
|
3118
|
+
/**
|
|
3119
|
+
* Send a structured list of input messages with text and/or image content, along with an expected `output_format` and
|
|
3120
|
+
* the response will be automatically parsed and available in the `parsed_output` property of the message.
|
|
3121
|
+
*
|
|
3122
|
+
* @example
|
|
3123
|
+
* ```ts
|
|
3124
|
+
* const message = await client.beta.messages.parse({
|
|
3125
|
+
* model: 'claude-3-5-sonnet-20241022',
|
|
3126
|
+
* max_tokens: 1024,
|
|
3127
|
+
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
|
3128
|
+
* output_format: zodOutputFormat(z.object({ answer: z.number() }), 'math'),
|
|
3129
|
+
* });
|
|
3130
|
+
*
|
|
3131
|
+
* console.log(message.parsed_output?.answer); // 4
|
|
3132
|
+
* ```
|
|
3133
|
+
*/
|
|
3134
|
+
parse(params, options) {
|
|
3135
|
+
options = {
|
|
3136
|
+
...options,
|
|
3137
|
+
headers: buildHeaders([
|
|
3138
|
+
{ "anthropic-beta": [...params.betas ?? [], "structured-outputs-2025-12-15"].toString() },
|
|
3139
|
+
options?.headers
|
|
3140
|
+
])
|
|
3141
|
+
};
|
|
3142
|
+
return this.create(params, options).then((message) => parseBetaMessage(message, params, { logger: this._client.logger ?? console }));
|
|
3143
|
+
}
|
|
3144
|
+
/**
|
|
3145
|
+
* Create a Message stream
|
|
3146
|
+
*/
|
|
3147
|
+
stream(body, options) {
|
|
3148
|
+
return BetaMessageStream.createMessage(this, body, options);
|
|
3149
|
+
}
|
|
3150
|
+
/**
|
|
3151
|
+
* Count the number of tokens in a Message.
|
|
3152
|
+
*
|
|
3153
|
+
* The Token Count API can be used to count the number of tokens in a Message,
|
|
3154
|
+
* including tools, images, and documents, without creating it.
|
|
3155
|
+
*
|
|
3156
|
+
* Learn more about token counting in our
|
|
3157
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/token-counting)
|
|
3158
|
+
*
|
|
3159
|
+
* @example
|
|
3160
|
+
* ```ts
|
|
3161
|
+
* const betaMessageTokensCount =
|
|
3162
|
+
* await client.beta.messages.countTokens({
|
|
3163
|
+
* messages: [{ content: 'string', role: 'user' }],
|
|
3164
|
+
* model: 'claude-opus-4-6',
|
|
3165
|
+
* });
|
|
3166
|
+
* ```
|
|
3167
|
+
*/
|
|
3168
|
+
countTokens(params, options) {
|
|
3169
|
+
const modifiedParams = transformOutputFormat(params);
|
|
3170
|
+
const { betas, ...body } = modifiedParams;
|
|
3171
|
+
return this._client.post("/v1/messages/count_tokens?beta=true", {
|
|
3172
|
+
body,
|
|
3173
|
+
...options,
|
|
3174
|
+
headers: buildHeaders([
|
|
3175
|
+
{ "anthropic-beta": [...betas ?? [], "token-counting-2024-11-01"].toString() },
|
|
3176
|
+
options?.headers
|
|
3177
|
+
])
|
|
3178
|
+
});
|
|
3179
|
+
}
|
|
3180
|
+
toolRunner(body, options) {
|
|
3181
|
+
return new BetaToolRunner(this._client, body, options);
|
|
3182
|
+
}
|
|
3183
|
+
};
|
|
3184
|
+
function transformOutputFormat(params) {
|
|
3185
|
+
if (!params.output_format) {
|
|
3186
|
+
return params;
|
|
3187
|
+
}
|
|
3188
|
+
if (params.output_config?.format) {
|
|
3189
|
+
throw new AnthropicError("Both output_format and output_config.format were provided. Please use only output_config.format (output_format is deprecated).");
|
|
3190
|
+
}
|
|
3191
|
+
const { output_format, ...rest } = params;
|
|
3192
|
+
return {
|
|
3193
|
+
...rest,
|
|
3194
|
+
output_config: {
|
|
3195
|
+
...params.output_config,
|
|
3196
|
+
format: output_format
|
|
3197
|
+
}
|
|
3198
|
+
};
|
|
3199
|
+
}
|
|
3200
|
+
Messages.Batches = Batches;
|
|
3201
|
+
Messages.BetaToolRunner = BetaToolRunner;
|
|
3202
|
+
Messages.ToolError = ToolError;
|
|
3203
|
+
|
|
3204
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/skills/versions.mjs
|
|
3205
|
+
var Versions = class extends APIResource {
|
|
3206
|
+
/**
|
|
3207
|
+
* Create Skill Version
|
|
3208
|
+
*
|
|
3209
|
+
* @example
|
|
3210
|
+
* ```ts
|
|
3211
|
+
* const version = await client.beta.skills.versions.create(
|
|
3212
|
+
* 'skill_id',
|
|
3213
|
+
* );
|
|
3214
|
+
* ```
|
|
3215
|
+
*/
|
|
3216
|
+
create(skillID, params = {}, options) {
|
|
3217
|
+
const { betas, ...body } = params ?? {};
|
|
3218
|
+
return this._client.post(path`/v1/skills/${skillID}/versions?beta=true`, multipartFormRequestOptions({
|
|
3219
|
+
body,
|
|
3220
|
+
...options,
|
|
3221
|
+
headers: buildHeaders([
|
|
3222
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3223
|
+
options?.headers
|
|
3224
|
+
])
|
|
3225
|
+
}, this._client));
|
|
3226
|
+
}
|
|
3227
|
+
/**
|
|
3228
|
+
* Get Skill Version
|
|
3229
|
+
*
|
|
3230
|
+
* @example
|
|
3231
|
+
* ```ts
|
|
3232
|
+
* const version = await client.beta.skills.versions.retrieve(
|
|
3233
|
+
* 'version',
|
|
3234
|
+
* { skill_id: 'skill_id' },
|
|
3235
|
+
* );
|
|
3236
|
+
* ```
|
|
3237
|
+
*/
|
|
3238
|
+
retrieve(version, params, options) {
|
|
3239
|
+
const { skill_id, betas } = params;
|
|
3240
|
+
return this._client.get(path`/v1/skills/${skill_id}/versions/${version}?beta=true`, {
|
|
3241
|
+
...options,
|
|
3242
|
+
headers: buildHeaders([
|
|
3243
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3244
|
+
options?.headers
|
|
3245
|
+
])
|
|
3246
|
+
});
|
|
3247
|
+
}
|
|
3248
|
+
/**
|
|
3249
|
+
* List Skill Versions
|
|
3250
|
+
*
|
|
3251
|
+
* @example
|
|
3252
|
+
* ```ts
|
|
3253
|
+
* // Automatically fetches more pages as needed.
|
|
3254
|
+
* for await (const versionListResponse of client.beta.skills.versions.list(
|
|
3255
|
+
* 'skill_id',
|
|
3256
|
+
* )) {
|
|
3257
|
+
* // ...
|
|
3258
|
+
* }
|
|
3259
|
+
* ```
|
|
3260
|
+
*/
|
|
3261
|
+
list(skillID, params = {}, options) {
|
|
3262
|
+
const { betas, ...query } = params ?? {};
|
|
3263
|
+
return this._client.getAPIList(path`/v1/skills/${skillID}/versions?beta=true`, PageCursor, {
|
|
3264
|
+
query,
|
|
3265
|
+
...options,
|
|
3266
|
+
headers: buildHeaders([
|
|
3267
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3268
|
+
options?.headers
|
|
3269
|
+
])
|
|
3270
|
+
});
|
|
3271
|
+
}
|
|
3272
|
+
/**
|
|
3273
|
+
* Delete Skill Version
|
|
3274
|
+
*
|
|
3275
|
+
* @example
|
|
3276
|
+
* ```ts
|
|
3277
|
+
* const version = await client.beta.skills.versions.delete(
|
|
3278
|
+
* 'version',
|
|
3279
|
+
* { skill_id: 'skill_id' },
|
|
3280
|
+
* );
|
|
3281
|
+
* ```
|
|
3282
|
+
*/
|
|
3283
|
+
delete(version, params, options) {
|
|
3284
|
+
const { skill_id, betas } = params;
|
|
3285
|
+
return this._client.delete(path`/v1/skills/${skill_id}/versions/${version}?beta=true`, {
|
|
3286
|
+
...options,
|
|
3287
|
+
headers: buildHeaders([
|
|
3288
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3289
|
+
options?.headers
|
|
3290
|
+
])
|
|
3291
|
+
});
|
|
3292
|
+
}
|
|
3293
|
+
};
|
|
3294
|
+
|
|
3295
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/skills/skills.mjs
|
|
3296
|
+
var Skills = class extends APIResource {
|
|
3297
|
+
constructor() {
|
|
3298
|
+
super(...arguments);
|
|
3299
|
+
this.versions = new Versions(this._client);
|
|
3300
|
+
}
|
|
3301
|
+
/**
|
|
3302
|
+
* Create Skill
|
|
3303
|
+
*
|
|
3304
|
+
* @example
|
|
3305
|
+
* ```ts
|
|
3306
|
+
* const skill = await client.beta.skills.create();
|
|
3307
|
+
* ```
|
|
3308
|
+
*/
|
|
3309
|
+
create(params = {}, options) {
|
|
3310
|
+
const { betas, ...body } = params ?? {};
|
|
3311
|
+
return this._client.post("/v1/skills?beta=true", multipartFormRequestOptions({
|
|
3312
|
+
body,
|
|
3313
|
+
...options,
|
|
3314
|
+
headers: buildHeaders([
|
|
3315
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3316
|
+
options?.headers
|
|
3317
|
+
])
|
|
3318
|
+
}, this._client, false));
|
|
3319
|
+
}
|
|
3320
|
+
/**
|
|
3321
|
+
* Get Skill
|
|
3322
|
+
*
|
|
3323
|
+
* @example
|
|
3324
|
+
* ```ts
|
|
3325
|
+
* const skill = await client.beta.skills.retrieve('skill_id');
|
|
3326
|
+
* ```
|
|
3327
|
+
*/
|
|
3328
|
+
retrieve(skillID, params = {}, options) {
|
|
3329
|
+
const { betas } = params ?? {};
|
|
3330
|
+
return this._client.get(path`/v1/skills/${skillID}?beta=true`, {
|
|
3331
|
+
...options,
|
|
3332
|
+
headers: buildHeaders([
|
|
3333
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3334
|
+
options?.headers
|
|
3335
|
+
])
|
|
3336
|
+
});
|
|
3337
|
+
}
|
|
3338
|
+
/**
|
|
3339
|
+
* List Skills
|
|
3340
|
+
*
|
|
3341
|
+
* @example
|
|
3342
|
+
* ```ts
|
|
3343
|
+
* // Automatically fetches more pages as needed.
|
|
3344
|
+
* for await (const skillListResponse of client.beta.skills.list()) {
|
|
3345
|
+
* // ...
|
|
3346
|
+
* }
|
|
3347
|
+
* ```
|
|
3348
|
+
*/
|
|
3349
|
+
list(params = {}, options) {
|
|
3350
|
+
const { betas, ...query } = params ?? {};
|
|
3351
|
+
return this._client.getAPIList("/v1/skills?beta=true", PageCursor, {
|
|
3352
|
+
query,
|
|
3353
|
+
...options,
|
|
3354
|
+
headers: buildHeaders([
|
|
3355
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3356
|
+
options?.headers
|
|
3357
|
+
])
|
|
3358
|
+
});
|
|
3359
|
+
}
|
|
3360
|
+
/**
|
|
3361
|
+
* Delete Skill
|
|
3362
|
+
*
|
|
3363
|
+
* @example
|
|
3364
|
+
* ```ts
|
|
3365
|
+
* const skill = await client.beta.skills.delete('skill_id');
|
|
3366
|
+
* ```
|
|
3367
|
+
*/
|
|
3368
|
+
delete(skillID, params = {}, options) {
|
|
3369
|
+
const { betas } = params ?? {};
|
|
3370
|
+
return this._client.delete(path`/v1/skills/${skillID}?beta=true`, {
|
|
3371
|
+
...options,
|
|
3372
|
+
headers: buildHeaders([
|
|
3373
|
+
{ "anthropic-beta": [...betas ?? [], "skills-2025-10-02"].toString() },
|
|
3374
|
+
options?.headers
|
|
3375
|
+
])
|
|
3376
|
+
});
|
|
3377
|
+
}
|
|
3378
|
+
};
|
|
3379
|
+
Skills.Versions = Versions;
|
|
3380
|
+
|
|
3381
|
+
// ../node_modules/@anthropic-ai/sdk/resources/beta/beta.mjs
|
|
3382
|
+
var Beta = class extends APIResource {
|
|
3383
|
+
constructor() {
|
|
3384
|
+
super(...arguments);
|
|
3385
|
+
this.models = new Models(this._client);
|
|
3386
|
+
this.messages = new Messages(this._client);
|
|
3387
|
+
this.files = new Files(this._client);
|
|
3388
|
+
this.skills = new Skills(this._client);
|
|
3389
|
+
}
|
|
3390
|
+
};
|
|
3391
|
+
Beta.Models = Models;
|
|
3392
|
+
Beta.Messages = Messages;
|
|
3393
|
+
Beta.Files = Files;
|
|
3394
|
+
Beta.Skills = Skills;
|
|
3395
|
+
|
|
3396
|
+
// ../node_modules/@anthropic-ai/sdk/resources/completions.mjs
|
|
3397
|
+
var Completions = class extends APIResource {
|
|
3398
|
+
create(params, options) {
|
|
3399
|
+
const { betas, ...body } = params;
|
|
3400
|
+
return this._client.post("/v1/complete", {
|
|
3401
|
+
body,
|
|
3402
|
+
timeout: this._client._options.timeout ?? 6e5,
|
|
3403
|
+
...options,
|
|
3404
|
+
headers: buildHeaders([
|
|
3405
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
3406
|
+
options?.headers
|
|
3407
|
+
]),
|
|
3408
|
+
stream: params.stream ?? false
|
|
3409
|
+
});
|
|
3410
|
+
}
|
|
3411
|
+
};
|
|
3412
|
+
|
|
3413
|
+
// ../node_modules/@anthropic-ai/sdk/lib/parser.mjs
|
|
3414
|
+
function getOutputFormat2(params) {
|
|
3415
|
+
return params?.output_config?.format;
|
|
3416
|
+
}
|
|
3417
|
+
function maybeParseMessage(message, params, opts) {
|
|
3418
|
+
const outputFormat = getOutputFormat2(params);
|
|
3419
|
+
if (!params || !("parse" in (outputFormat ?? {}))) {
|
|
3420
|
+
return {
|
|
3421
|
+
...message,
|
|
3422
|
+
content: message.content.map((block) => {
|
|
3423
|
+
if (block.type === "text") {
|
|
3424
|
+
const parsedBlock = Object.defineProperty({ ...block }, "parsed_output", {
|
|
3425
|
+
value: null,
|
|
3426
|
+
enumerable: false
|
|
3427
|
+
});
|
|
3428
|
+
return parsedBlock;
|
|
3429
|
+
}
|
|
3430
|
+
return block;
|
|
3431
|
+
}),
|
|
3432
|
+
parsed_output: null
|
|
3433
|
+
};
|
|
3434
|
+
}
|
|
3435
|
+
return parseMessage(message, params, opts);
|
|
3436
|
+
}
|
|
3437
|
+
function parseMessage(message, params, opts) {
|
|
3438
|
+
let firstParsedOutput = null;
|
|
3439
|
+
const content = message.content.map((block) => {
|
|
3440
|
+
if (block.type === "text") {
|
|
3441
|
+
const parsedOutput = parseOutputFormat(params, block.text);
|
|
3442
|
+
if (firstParsedOutput === null) {
|
|
3443
|
+
firstParsedOutput = parsedOutput;
|
|
3444
|
+
}
|
|
3445
|
+
const parsedBlock = Object.defineProperty({ ...block }, "parsed_output", {
|
|
3446
|
+
value: parsedOutput,
|
|
3447
|
+
enumerable: false
|
|
3448
|
+
});
|
|
3449
|
+
return parsedBlock;
|
|
3450
|
+
}
|
|
3451
|
+
return block;
|
|
3452
|
+
});
|
|
3453
|
+
return {
|
|
3454
|
+
...message,
|
|
3455
|
+
content,
|
|
3456
|
+
parsed_output: firstParsedOutput
|
|
3457
|
+
};
|
|
3458
|
+
}
|
|
3459
|
+
function parseOutputFormat(params, content) {
|
|
3460
|
+
const outputFormat = getOutputFormat2(params);
|
|
3461
|
+
if (outputFormat?.type !== "json_schema") {
|
|
3462
|
+
return null;
|
|
3463
|
+
}
|
|
3464
|
+
try {
|
|
3465
|
+
if ("parse" in outputFormat) {
|
|
3466
|
+
return outputFormat.parse(content);
|
|
3467
|
+
}
|
|
3468
|
+
return JSON.parse(content);
|
|
3469
|
+
} catch (error) {
|
|
3470
|
+
throw new AnthropicError(`Failed to parse structured output: ${error}`);
|
|
3471
|
+
}
|
|
3472
|
+
}
|
|
3473
|
+
|
|
3474
|
+
// ../node_modules/@anthropic-ai/sdk/lib/MessageStream.mjs
|
|
3475
|
+
var _MessageStream_instances;
|
|
3476
|
+
var _MessageStream_currentMessageSnapshot;
|
|
3477
|
+
var _MessageStream_params;
|
|
3478
|
+
var _MessageStream_connectedPromise;
|
|
3479
|
+
var _MessageStream_resolveConnectedPromise;
|
|
3480
|
+
var _MessageStream_rejectConnectedPromise;
|
|
3481
|
+
var _MessageStream_endPromise;
|
|
3482
|
+
var _MessageStream_resolveEndPromise;
|
|
3483
|
+
var _MessageStream_rejectEndPromise;
|
|
3484
|
+
var _MessageStream_listeners;
|
|
3485
|
+
var _MessageStream_ended;
|
|
3486
|
+
var _MessageStream_errored;
|
|
3487
|
+
var _MessageStream_aborted;
|
|
3488
|
+
var _MessageStream_catchingPromiseCreated;
|
|
3489
|
+
var _MessageStream_response;
|
|
3490
|
+
var _MessageStream_request_id;
|
|
3491
|
+
var _MessageStream_logger;
|
|
3492
|
+
var _MessageStream_getFinalMessage;
|
|
3493
|
+
var _MessageStream_getFinalText;
|
|
3494
|
+
var _MessageStream_handleError;
|
|
3495
|
+
var _MessageStream_beginRequest;
|
|
3496
|
+
var _MessageStream_addStreamEvent;
|
|
3497
|
+
var _MessageStream_endRequest;
|
|
3498
|
+
var _MessageStream_accumulateMessage;
|
|
3499
|
+
var JSON_BUF_PROPERTY2 = "__json_buf";
|
|
3500
|
+
function tracksToolInput2(content) {
|
|
3501
|
+
return content.type === "tool_use" || content.type === "server_tool_use";
|
|
3502
|
+
}
|
|
3503
|
+
var MessageStream = class _MessageStream {
|
|
3504
|
+
constructor(params, opts) {
|
|
3505
|
+
_MessageStream_instances.add(this);
|
|
3506
|
+
this.messages = [];
|
|
3507
|
+
this.receivedMessages = [];
|
|
3508
|
+
_MessageStream_currentMessageSnapshot.set(this, void 0);
|
|
3509
|
+
_MessageStream_params.set(this, null);
|
|
3510
|
+
this.controller = new AbortController();
|
|
3511
|
+
_MessageStream_connectedPromise.set(this, void 0);
|
|
3512
|
+
_MessageStream_resolveConnectedPromise.set(this, () => {
|
|
3513
|
+
});
|
|
3514
|
+
_MessageStream_rejectConnectedPromise.set(this, () => {
|
|
3515
|
+
});
|
|
3516
|
+
_MessageStream_endPromise.set(this, void 0);
|
|
3517
|
+
_MessageStream_resolveEndPromise.set(this, () => {
|
|
3518
|
+
});
|
|
3519
|
+
_MessageStream_rejectEndPromise.set(this, () => {
|
|
3520
|
+
});
|
|
3521
|
+
_MessageStream_listeners.set(this, {});
|
|
3522
|
+
_MessageStream_ended.set(this, false);
|
|
3523
|
+
_MessageStream_errored.set(this, false);
|
|
3524
|
+
_MessageStream_aborted.set(this, false);
|
|
3525
|
+
_MessageStream_catchingPromiseCreated.set(this, false);
|
|
3526
|
+
_MessageStream_response.set(this, void 0);
|
|
3527
|
+
_MessageStream_request_id.set(this, void 0);
|
|
3528
|
+
_MessageStream_logger.set(this, void 0);
|
|
3529
|
+
_MessageStream_handleError.set(this, (error) => {
|
|
3530
|
+
__classPrivateFieldSet(this, _MessageStream_errored, true, "f");
|
|
3531
|
+
if (isAbortError(error)) {
|
|
3532
|
+
error = new APIUserAbortError();
|
|
3533
|
+
}
|
|
3534
|
+
if (error instanceof APIUserAbortError) {
|
|
3535
|
+
__classPrivateFieldSet(this, _MessageStream_aborted, true, "f");
|
|
3536
|
+
return this._emit("abort", error);
|
|
3537
|
+
}
|
|
3538
|
+
if (error instanceof AnthropicError) {
|
|
3539
|
+
return this._emit("error", error);
|
|
3540
|
+
}
|
|
3541
|
+
if (error instanceof Error) {
|
|
3542
|
+
const anthropicError = new AnthropicError(error.message);
|
|
3543
|
+
anthropicError.cause = error;
|
|
3544
|
+
return this._emit("error", anthropicError);
|
|
3545
|
+
}
|
|
3546
|
+
return this._emit("error", new AnthropicError(String(error)));
|
|
3547
|
+
});
|
|
3548
|
+
__classPrivateFieldSet(this, _MessageStream_connectedPromise, new Promise((resolve, reject) => {
|
|
3549
|
+
__classPrivateFieldSet(this, _MessageStream_resolveConnectedPromise, resolve, "f");
|
|
3550
|
+
__classPrivateFieldSet(this, _MessageStream_rejectConnectedPromise, reject, "f");
|
|
3551
|
+
}), "f");
|
|
3552
|
+
__classPrivateFieldSet(this, _MessageStream_endPromise, new Promise((resolve, reject) => {
|
|
3553
|
+
__classPrivateFieldSet(this, _MessageStream_resolveEndPromise, resolve, "f");
|
|
3554
|
+
__classPrivateFieldSet(this, _MessageStream_rejectEndPromise, reject, "f");
|
|
3555
|
+
}), "f");
|
|
3556
|
+
__classPrivateFieldGet(this, _MessageStream_connectedPromise, "f").catch(() => {
|
|
3557
|
+
});
|
|
3558
|
+
__classPrivateFieldGet(this, _MessageStream_endPromise, "f").catch(() => {
|
|
3559
|
+
});
|
|
3560
|
+
__classPrivateFieldSet(this, _MessageStream_params, params, "f");
|
|
3561
|
+
__classPrivateFieldSet(this, _MessageStream_logger, opts?.logger ?? console, "f");
|
|
3562
|
+
}
|
|
3563
|
+
get response() {
|
|
3564
|
+
return __classPrivateFieldGet(this, _MessageStream_response, "f");
|
|
3565
|
+
}
|
|
3566
|
+
get request_id() {
|
|
3567
|
+
return __classPrivateFieldGet(this, _MessageStream_request_id, "f");
|
|
3568
|
+
}
|
|
3569
|
+
/**
|
|
3570
|
+
* Returns the `MessageStream` data, the raw `Response` instance and the ID of the request,
|
|
3571
|
+
* returned vie the `request-id` header which is useful for debugging requests and resporting
|
|
3572
|
+
* issues to Anthropic.
|
|
3573
|
+
*
|
|
3574
|
+
* This is the same as the `APIPromise.withResponse()` method.
|
|
3575
|
+
*
|
|
3576
|
+
* This method will raise an error if you created the stream using `MessageStream.fromReadableStream`
|
|
3577
|
+
* as no `Response` is available.
|
|
3578
|
+
*/
|
|
3579
|
+
async withResponse() {
|
|
3580
|
+
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
|
3581
|
+
const response = await __classPrivateFieldGet(this, _MessageStream_connectedPromise, "f");
|
|
3582
|
+
if (!response) {
|
|
3583
|
+
throw new Error("Could not resolve a `Response` object");
|
|
3584
|
+
}
|
|
3585
|
+
return {
|
|
3586
|
+
data: this,
|
|
3587
|
+
response,
|
|
3588
|
+
request_id: response.headers.get("request-id")
|
|
3589
|
+
};
|
|
3590
|
+
}
|
|
3591
|
+
/**
|
|
3592
|
+
* Intended for use on the frontend, consuming a stream produced with
|
|
3593
|
+
* `.toReadableStream()` on the backend.
|
|
3594
|
+
*
|
|
3595
|
+
* Note that messages sent to the model do not appear in `.on('message')`
|
|
3596
|
+
* in this context.
|
|
3597
|
+
*/
|
|
3598
|
+
static fromReadableStream(stream) {
|
|
3599
|
+
const runner = new _MessageStream(null);
|
|
3600
|
+
runner._run(() => runner._fromReadableStream(stream));
|
|
3601
|
+
return runner;
|
|
3602
|
+
}
|
|
3603
|
+
static createMessage(messages, params, options, { logger } = {}) {
|
|
3604
|
+
const runner = new _MessageStream(params, { logger });
|
|
3605
|
+
for (const message of params.messages) {
|
|
3606
|
+
runner._addMessageParam(message);
|
|
3607
|
+
}
|
|
3608
|
+
__classPrivateFieldSet(runner, _MessageStream_params, { ...params, stream: true }, "f");
|
|
3609
|
+
runner._run(() => runner._createMessage(messages, { ...params, stream: true }, { ...options, headers: { ...options?.headers, "X-Stainless-Helper-Method": "stream" } }));
|
|
3610
|
+
return runner;
|
|
3611
|
+
}
|
|
3612
|
+
_run(executor) {
|
|
3613
|
+
executor().then(() => {
|
|
3614
|
+
this._emitFinal();
|
|
3615
|
+
this._emit("end");
|
|
3616
|
+
}, __classPrivateFieldGet(this, _MessageStream_handleError, "f"));
|
|
3617
|
+
}
|
|
3618
|
+
_addMessageParam(message) {
|
|
3619
|
+
this.messages.push(message);
|
|
3620
|
+
}
|
|
3621
|
+
_addMessage(message, emit = true) {
|
|
3622
|
+
this.receivedMessages.push(message);
|
|
3623
|
+
if (emit) {
|
|
3624
|
+
this._emit("message", message);
|
|
3625
|
+
}
|
|
3626
|
+
}
|
|
3627
|
+
async _createMessage(messages, params, options) {
|
|
3628
|
+
const signal = options?.signal;
|
|
3629
|
+
let abortHandler;
|
|
3630
|
+
if (signal) {
|
|
3631
|
+
if (signal.aborted)
|
|
3632
|
+
this.controller.abort();
|
|
3633
|
+
abortHandler = this.controller.abort.bind(this.controller);
|
|
3634
|
+
signal.addEventListener("abort", abortHandler);
|
|
3635
|
+
}
|
|
3636
|
+
try {
|
|
3637
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_beginRequest).call(this);
|
|
3638
|
+
const { response, data: stream } = await messages.create({ ...params, stream: true }, { ...options, signal: this.controller.signal }).withResponse();
|
|
3639
|
+
this._connected(response);
|
|
3640
|
+
for await (const event of stream) {
|
|
3641
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_addStreamEvent).call(this, event);
|
|
3642
|
+
}
|
|
3643
|
+
if (stream.controller.signal?.aborted) {
|
|
3644
|
+
throw new APIUserAbortError();
|
|
3645
|
+
}
|
|
3646
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_endRequest).call(this);
|
|
3647
|
+
} finally {
|
|
3648
|
+
if (signal && abortHandler) {
|
|
3649
|
+
signal.removeEventListener("abort", abortHandler);
|
|
3650
|
+
}
|
|
3651
|
+
}
|
|
3652
|
+
}
|
|
3653
|
+
_connected(response) {
|
|
3654
|
+
if (this.ended)
|
|
3655
|
+
return;
|
|
3656
|
+
__classPrivateFieldSet(this, _MessageStream_response, response, "f");
|
|
3657
|
+
__classPrivateFieldSet(this, _MessageStream_request_id, response?.headers.get("request-id"), "f");
|
|
3658
|
+
__classPrivateFieldGet(this, _MessageStream_resolveConnectedPromise, "f").call(this, response);
|
|
3659
|
+
this._emit("connect");
|
|
3660
|
+
}
|
|
3661
|
+
get ended() {
|
|
3662
|
+
return __classPrivateFieldGet(this, _MessageStream_ended, "f");
|
|
3663
|
+
}
|
|
3664
|
+
get errored() {
|
|
3665
|
+
return __classPrivateFieldGet(this, _MessageStream_errored, "f");
|
|
3666
|
+
}
|
|
3667
|
+
get aborted() {
|
|
3668
|
+
return __classPrivateFieldGet(this, _MessageStream_aborted, "f");
|
|
3669
|
+
}
|
|
3670
|
+
abort() {
|
|
3671
|
+
this.controller.abort();
|
|
3672
|
+
}
|
|
3673
|
+
/**
|
|
3674
|
+
* Adds the listener function to the end of the listeners array for the event.
|
|
3675
|
+
* No checks are made to see if the listener has already been added. Multiple calls passing
|
|
3676
|
+
* the same combination of event and listener will result in the listener being added, and
|
|
3677
|
+
* called, multiple times.
|
|
3678
|
+
* @returns this MessageStream, so that calls can be chained
|
|
3679
|
+
*/
|
|
3680
|
+
on(event, listener) {
|
|
3681
|
+
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = []);
|
|
3682
|
+
listeners.push({ listener });
|
|
3683
|
+
return this;
|
|
3684
|
+
}
|
|
3685
|
+
/**
|
|
3686
|
+
* Removes the specified listener from the listener array for the event.
|
|
3687
|
+
* off() will remove, at most, one instance of a listener from the listener array. If any single
|
|
3688
|
+
* listener has been added multiple times to the listener array for the specified event, then
|
|
3689
|
+
* off() must be called multiple times to remove each instance.
|
|
3690
|
+
* @returns this MessageStream, so that calls can be chained
|
|
3691
|
+
*/
|
|
3692
|
+
off(event, listener) {
|
|
3693
|
+
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event];
|
|
3694
|
+
if (!listeners)
|
|
3695
|
+
return this;
|
|
3696
|
+
const index = listeners.findIndex((l) => l.listener === listener);
|
|
3697
|
+
if (index >= 0)
|
|
3698
|
+
listeners.splice(index, 1);
|
|
3699
|
+
return this;
|
|
3700
|
+
}
|
|
3701
|
+
/**
|
|
3702
|
+
* Adds a one-time listener function for the event. The next time the event is triggered,
|
|
3703
|
+
* this listener is removed and then invoked.
|
|
3704
|
+
* @returns this MessageStream, so that calls can be chained
|
|
3705
|
+
*/
|
|
3706
|
+
once(event, listener) {
|
|
3707
|
+
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = []);
|
|
3708
|
+
listeners.push({ listener, once: true });
|
|
3709
|
+
return this;
|
|
3710
|
+
}
|
|
3711
|
+
/**
|
|
3712
|
+
* This is similar to `.once()`, but returns a Promise that resolves the next time
|
|
3713
|
+
* the event is triggered, instead of calling a listener callback.
|
|
3714
|
+
* @returns a Promise that resolves the next time given event is triggered,
|
|
3715
|
+
* or rejects if an error is emitted. (If you request the 'error' event,
|
|
3716
|
+
* returns a promise that resolves with the error).
|
|
3717
|
+
*
|
|
3718
|
+
* Example:
|
|
3719
|
+
*
|
|
3720
|
+
* const message = await stream.emitted('message') // rejects if the stream errors
|
|
3721
|
+
*/
|
|
3722
|
+
emitted(event) {
|
|
3723
|
+
return new Promise((resolve, reject) => {
|
|
3724
|
+
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
|
3725
|
+
if (event !== "error")
|
|
3726
|
+
this.once("error", reject);
|
|
3727
|
+
this.once(event, resolve);
|
|
3728
|
+
});
|
|
3729
|
+
}
|
|
3730
|
+
async done() {
|
|
3731
|
+
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
|
3732
|
+
await __classPrivateFieldGet(this, _MessageStream_endPromise, "f");
|
|
3733
|
+
}
|
|
3734
|
+
get currentMessage() {
|
|
3735
|
+
return __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
|
3736
|
+
}
|
|
3737
|
+
/**
|
|
3738
|
+
* @returns a promise that resolves with the the final assistant Message response,
|
|
3739
|
+
* or rejects if an error occurred or the stream ended prematurely without producing a Message.
|
|
3740
|
+
* If structured outputs were used, this will be a ParsedMessage with a `parsed_output` field.
|
|
3741
|
+
*/
|
|
3742
|
+
async finalMessage() {
|
|
3743
|
+
await this.done();
|
|
3744
|
+
return __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalMessage).call(this);
|
|
3745
|
+
}
|
|
3746
|
+
/**
|
|
3747
|
+
* @returns a promise that resolves with the the final assistant Message's text response, concatenated
|
|
3748
|
+
* together if there are more than one text blocks.
|
|
3749
|
+
* Rejects if an error occurred or the stream ended prematurely without producing a Message.
|
|
3750
|
+
*/
|
|
3751
|
+
async finalText() {
|
|
3752
|
+
await this.done();
|
|
3753
|
+
return __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalText).call(this);
|
|
3754
|
+
}
|
|
3755
|
+
_emit(event, ...args) {
|
|
3756
|
+
if (__classPrivateFieldGet(this, _MessageStream_ended, "f"))
|
|
3757
|
+
return;
|
|
3758
|
+
if (event === "end") {
|
|
3759
|
+
__classPrivateFieldSet(this, _MessageStream_ended, true, "f");
|
|
3760
|
+
__classPrivateFieldGet(this, _MessageStream_resolveEndPromise, "f").call(this);
|
|
3761
|
+
}
|
|
3762
|
+
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event];
|
|
3763
|
+
if (listeners) {
|
|
3764
|
+
__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = listeners.filter((l) => !l.once);
|
|
3765
|
+
listeners.forEach(({ listener }) => listener(...args));
|
|
3766
|
+
}
|
|
3767
|
+
if (event === "abort") {
|
|
3768
|
+
const error = args[0];
|
|
3769
|
+
if (!__classPrivateFieldGet(this, _MessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
3770
|
+
Promise.reject(error);
|
|
3771
|
+
}
|
|
3772
|
+
__classPrivateFieldGet(this, _MessageStream_rejectConnectedPromise, "f").call(this, error);
|
|
3773
|
+
__classPrivateFieldGet(this, _MessageStream_rejectEndPromise, "f").call(this, error);
|
|
3774
|
+
this._emit("end");
|
|
3775
|
+
return;
|
|
3776
|
+
}
|
|
3777
|
+
if (event === "error") {
|
|
3778
|
+
const error = args[0];
|
|
3779
|
+
if (!__classPrivateFieldGet(this, _MessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
3780
|
+
Promise.reject(error);
|
|
3781
|
+
}
|
|
3782
|
+
__classPrivateFieldGet(this, _MessageStream_rejectConnectedPromise, "f").call(this, error);
|
|
3783
|
+
__classPrivateFieldGet(this, _MessageStream_rejectEndPromise, "f").call(this, error);
|
|
3784
|
+
this._emit("end");
|
|
3785
|
+
}
|
|
3786
|
+
}
|
|
3787
|
+
_emitFinal() {
|
|
3788
|
+
const finalMessage = this.receivedMessages.at(-1);
|
|
3789
|
+
if (finalMessage) {
|
|
3790
|
+
this._emit("finalMessage", __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalMessage).call(this));
|
|
3791
|
+
}
|
|
3792
|
+
}
|
|
3793
|
+
async _fromReadableStream(readableStream, options) {
|
|
3794
|
+
const signal = options?.signal;
|
|
3795
|
+
let abortHandler;
|
|
3796
|
+
if (signal) {
|
|
3797
|
+
if (signal.aborted)
|
|
3798
|
+
this.controller.abort();
|
|
3799
|
+
abortHandler = this.controller.abort.bind(this.controller);
|
|
3800
|
+
signal.addEventListener("abort", abortHandler);
|
|
3801
|
+
}
|
|
3802
|
+
try {
|
|
3803
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_beginRequest).call(this);
|
|
3804
|
+
this._connected(null);
|
|
3805
|
+
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
|
3806
|
+
for await (const event of stream) {
|
|
3807
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_addStreamEvent).call(this, event);
|
|
3808
|
+
}
|
|
3809
|
+
if (stream.controller.signal?.aborted) {
|
|
3810
|
+
throw new APIUserAbortError();
|
|
3811
|
+
}
|
|
3812
|
+
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_endRequest).call(this);
|
|
3813
|
+
} finally {
|
|
3814
|
+
if (signal && abortHandler) {
|
|
3815
|
+
signal.removeEventListener("abort", abortHandler);
|
|
3816
|
+
}
|
|
3817
|
+
}
|
|
3818
|
+
}
|
|
3819
|
+
[(_MessageStream_currentMessageSnapshot = /* @__PURE__ */ new WeakMap(), _MessageStream_params = /* @__PURE__ */ new WeakMap(), _MessageStream_connectedPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_resolveConnectedPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_rejectConnectedPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_endPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_resolveEndPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_rejectEndPromise = /* @__PURE__ */ new WeakMap(), _MessageStream_listeners = /* @__PURE__ */ new WeakMap(), _MessageStream_ended = /* @__PURE__ */ new WeakMap(), _MessageStream_errored = /* @__PURE__ */ new WeakMap(), _MessageStream_aborted = /* @__PURE__ */ new WeakMap(), _MessageStream_catchingPromiseCreated = /* @__PURE__ */ new WeakMap(), _MessageStream_response = /* @__PURE__ */ new WeakMap(), _MessageStream_request_id = /* @__PURE__ */ new WeakMap(), _MessageStream_logger = /* @__PURE__ */ new WeakMap(), _MessageStream_handleError = /* @__PURE__ */ new WeakMap(), _MessageStream_instances = /* @__PURE__ */ new WeakSet(), _MessageStream_getFinalMessage = function _MessageStream_getFinalMessage2() {
|
|
3820
|
+
if (this.receivedMessages.length === 0) {
|
|
3821
|
+
throw new AnthropicError("stream ended without producing a Message with role=assistant");
|
|
3822
|
+
}
|
|
3823
|
+
return this.receivedMessages.at(-1);
|
|
3824
|
+
}, _MessageStream_getFinalText = function _MessageStream_getFinalText2() {
|
|
3825
|
+
if (this.receivedMessages.length === 0) {
|
|
3826
|
+
throw new AnthropicError("stream ended without producing a Message with role=assistant");
|
|
3827
|
+
}
|
|
3828
|
+
const textBlocks = this.receivedMessages.at(-1).content.filter((block) => block.type === "text").map((block) => block.text);
|
|
3829
|
+
if (textBlocks.length === 0) {
|
|
3830
|
+
throw new AnthropicError("stream ended without producing a content block with type=text");
|
|
3831
|
+
}
|
|
3832
|
+
return textBlocks.join(" ");
|
|
3833
|
+
}, _MessageStream_beginRequest = function _MessageStream_beginRequest2() {
|
|
3834
|
+
if (this.ended)
|
|
3835
|
+
return;
|
|
3836
|
+
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, void 0, "f");
|
|
3837
|
+
}, _MessageStream_addStreamEvent = function _MessageStream_addStreamEvent2(event) {
|
|
3838
|
+
if (this.ended)
|
|
3839
|
+
return;
|
|
3840
|
+
const messageSnapshot = __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_accumulateMessage).call(this, event);
|
|
3841
|
+
this._emit("streamEvent", event, messageSnapshot);
|
|
3842
|
+
switch (event.type) {
|
|
3843
|
+
case "content_block_delta": {
|
|
3844
|
+
const content = messageSnapshot.content.at(-1);
|
|
3845
|
+
switch (event.delta.type) {
|
|
3846
|
+
case "text_delta": {
|
|
3847
|
+
if (content.type === "text") {
|
|
3848
|
+
this._emit("text", event.delta.text, content.text || "");
|
|
3849
|
+
}
|
|
3850
|
+
break;
|
|
3851
|
+
}
|
|
3852
|
+
case "citations_delta": {
|
|
3853
|
+
if (content.type === "text") {
|
|
3854
|
+
this._emit("citation", event.delta.citation, content.citations ?? []);
|
|
3855
|
+
}
|
|
3856
|
+
break;
|
|
3857
|
+
}
|
|
3858
|
+
case "input_json_delta": {
|
|
3859
|
+
if (tracksToolInput2(content) && content.input) {
|
|
3860
|
+
this._emit("inputJson", event.delta.partial_json, content.input);
|
|
3861
|
+
}
|
|
3862
|
+
break;
|
|
3863
|
+
}
|
|
3864
|
+
case "thinking_delta": {
|
|
3865
|
+
if (content.type === "thinking") {
|
|
3866
|
+
this._emit("thinking", event.delta.thinking, content.thinking);
|
|
3867
|
+
}
|
|
3868
|
+
break;
|
|
3869
|
+
}
|
|
3870
|
+
case "signature_delta": {
|
|
3871
|
+
if (content.type === "thinking") {
|
|
3872
|
+
this._emit("signature", content.signature);
|
|
3873
|
+
}
|
|
3874
|
+
break;
|
|
3875
|
+
}
|
|
3876
|
+
default:
|
|
3877
|
+
checkNever2(event.delta);
|
|
3878
|
+
}
|
|
3879
|
+
break;
|
|
3880
|
+
}
|
|
3881
|
+
case "message_stop": {
|
|
3882
|
+
this._addMessageParam(messageSnapshot);
|
|
3883
|
+
this._addMessage(maybeParseMessage(messageSnapshot, __classPrivateFieldGet(this, _MessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _MessageStream_logger, "f") }), true);
|
|
3884
|
+
break;
|
|
3885
|
+
}
|
|
3886
|
+
case "content_block_stop": {
|
|
3887
|
+
this._emit("contentBlock", messageSnapshot.content.at(-1));
|
|
3888
|
+
break;
|
|
3889
|
+
}
|
|
3890
|
+
case "message_start": {
|
|
3891
|
+
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, messageSnapshot, "f");
|
|
3892
|
+
break;
|
|
3893
|
+
}
|
|
3894
|
+
case "content_block_start":
|
|
3895
|
+
case "message_delta":
|
|
3896
|
+
break;
|
|
3897
|
+
}
|
|
3898
|
+
}, _MessageStream_endRequest = function _MessageStream_endRequest2() {
|
|
3899
|
+
if (this.ended) {
|
|
3900
|
+
throw new AnthropicError(`stream has ended, this shouldn't happen`);
|
|
3901
|
+
}
|
|
3902
|
+
const snapshot = __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
|
3903
|
+
if (!snapshot) {
|
|
3904
|
+
throw new AnthropicError(`request ended without sending any chunks`);
|
|
3905
|
+
}
|
|
3906
|
+
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, void 0, "f");
|
|
3907
|
+
return maybeParseMessage(snapshot, __classPrivateFieldGet(this, _MessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _MessageStream_logger, "f") });
|
|
3908
|
+
}, _MessageStream_accumulateMessage = function _MessageStream_accumulateMessage2(event) {
|
|
3909
|
+
let snapshot = __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
|
3910
|
+
if (event.type === "message_start") {
|
|
3911
|
+
if (snapshot) {
|
|
3912
|
+
throw new AnthropicError(`Unexpected event order, got ${event.type} before receiving "message_stop"`);
|
|
3913
|
+
}
|
|
3914
|
+
return event.message;
|
|
3915
|
+
}
|
|
3916
|
+
if (!snapshot) {
|
|
3917
|
+
throw new AnthropicError(`Unexpected event order, got ${event.type} before "message_start"`);
|
|
3918
|
+
}
|
|
3919
|
+
switch (event.type) {
|
|
3920
|
+
case "message_stop":
|
|
3921
|
+
return snapshot;
|
|
3922
|
+
case "message_delta":
|
|
3923
|
+
snapshot.stop_reason = event.delta.stop_reason;
|
|
3924
|
+
snapshot.stop_sequence = event.delta.stop_sequence;
|
|
3925
|
+
snapshot.usage.output_tokens = event.usage.output_tokens;
|
|
3926
|
+
if (event.usage.input_tokens != null) {
|
|
3927
|
+
snapshot.usage.input_tokens = event.usage.input_tokens;
|
|
3928
|
+
}
|
|
3929
|
+
if (event.usage.cache_creation_input_tokens != null) {
|
|
3930
|
+
snapshot.usage.cache_creation_input_tokens = event.usage.cache_creation_input_tokens;
|
|
3931
|
+
}
|
|
3932
|
+
if (event.usage.cache_read_input_tokens != null) {
|
|
3933
|
+
snapshot.usage.cache_read_input_tokens = event.usage.cache_read_input_tokens;
|
|
3934
|
+
}
|
|
3935
|
+
if (event.usage.server_tool_use != null) {
|
|
3936
|
+
snapshot.usage.server_tool_use = event.usage.server_tool_use;
|
|
3937
|
+
}
|
|
3938
|
+
return snapshot;
|
|
3939
|
+
case "content_block_start":
|
|
3940
|
+
snapshot.content.push({ ...event.content_block });
|
|
3941
|
+
return snapshot;
|
|
3942
|
+
case "content_block_delta": {
|
|
3943
|
+
const snapshotContent = snapshot.content.at(event.index);
|
|
3944
|
+
switch (event.delta.type) {
|
|
3945
|
+
case "text_delta": {
|
|
3946
|
+
if (snapshotContent?.type === "text") {
|
|
3947
|
+
snapshot.content[event.index] = {
|
|
3948
|
+
...snapshotContent,
|
|
3949
|
+
text: (snapshotContent.text || "") + event.delta.text
|
|
3950
|
+
};
|
|
3951
|
+
}
|
|
3952
|
+
break;
|
|
3953
|
+
}
|
|
3954
|
+
case "citations_delta": {
|
|
3955
|
+
if (snapshotContent?.type === "text") {
|
|
3956
|
+
snapshot.content[event.index] = {
|
|
3957
|
+
...snapshotContent,
|
|
3958
|
+
citations: [...snapshotContent.citations ?? [], event.delta.citation]
|
|
3959
|
+
};
|
|
3960
|
+
}
|
|
3961
|
+
break;
|
|
3962
|
+
}
|
|
3963
|
+
case "input_json_delta": {
|
|
3964
|
+
if (snapshotContent && tracksToolInput2(snapshotContent)) {
|
|
3965
|
+
let jsonBuf = snapshotContent[JSON_BUF_PROPERTY2] || "";
|
|
3966
|
+
jsonBuf += event.delta.partial_json;
|
|
3967
|
+
const newContent = { ...snapshotContent };
|
|
3968
|
+
Object.defineProperty(newContent, JSON_BUF_PROPERTY2, {
|
|
3969
|
+
value: jsonBuf,
|
|
3970
|
+
enumerable: false,
|
|
3971
|
+
writable: true
|
|
3972
|
+
});
|
|
3973
|
+
if (jsonBuf) {
|
|
3974
|
+
newContent.input = partialParse(jsonBuf);
|
|
3975
|
+
}
|
|
3976
|
+
snapshot.content[event.index] = newContent;
|
|
3977
|
+
}
|
|
3978
|
+
break;
|
|
3979
|
+
}
|
|
3980
|
+
case "thinking_delta": {
|
|
3981
|
+
if (snapshotContent?.type === "thinking") {
|
|
3982
|
+
snapshot.content[event.index] = {
|
|
3983
|
+
...snapshotContent,
|
|
3984
|
+
thinking: snapshotContent.thinking + event.delta.thinking
|
|
3985
|
+
};
|
|
3986
|
+
}
|
|
3987
|
+
break;
|
|
3988
|
+
}
|
|
3989
|
+
case "signature_delta": {
|
|
3990
|
+
if (snapshotContent?.type === "thinking") {
|
|
3991
|
+
snapshot.content[event.index] = {
|
|
3992
|
+
...snapshotContent,
|
|
3993
|
+
signature: event.delta.signature
|
|
3994
|
+
};
|
|
3995
|
+
}
|
|
3996
|
+
break;
|
|
3997
|
+
}
|
|
3998
|
+
default:
|
|
3999
|
+
checkNever2(event.delta);
|
|
4000
|
+
}
|
|
4001
|
+
return snapshot;
|
|
4002
|
+
}
|
|
4003
|
+
case "content_block_stop":
|
|
4004
|
+
return snapshot;
|
|
4005
|
+
}
|
|
4006
|
+
}, Symbol.asyncIterator)]() {
|
|
4007
|
+
const pushQueue = [];
|
|
4008
|
+
const readQueue = [];
|
|
4009
|
+
let done = false;
|
|
4010
|
+
this.on("streamEvent", (event) => {
|
|
4011
|
+
const reader = readQueue.shift();
|
|
4012
|
+
if (reader) {
|
|
4013
|
+
reader.resolve(event);
|
|
4014
|
+
} else {
|
|
4015
|
+
pushQueue.push(event);
|
|
4016
|
+
}
|
|
4017
|
+
});
|
|
4018
|
+
this.on("end", () => {
|
|
4019
|
+
done = true;
|
|
4020
|
+
for (const reader of readQueue) {
|
|
4021
|
+
reader.resolve(void 0);
|
|
4022
|
+
}
|
|
4023
|
+
readQueue.length = 0;
|
|
4024
|
+
});
|
|
4025
|
+
this.on("abort", (err) => {
|
|
4026
|
+
done = true;
|
|
4027
|
+
for (const reader of readQueue) {
|
|
4028
|
+
reader.reject(err);
|
|
4029
|
+
}
|
|
4030
|
+
readQueue.length = 0;
|
|
4031
|
+
});
|
|
4032
|
+
this.on("error", (err) => {
|
|
4033
|
+
done = true;
|
|
4034
|
+
for (const reader of readQueue) {
|
|
4035
|
+
reader.reject(err);
|
|
4036
|
+
}
|
|
4037
|
+
readQueue.length = 0;
|
|
4038
|
+
});
|
|
4039
|
+
return {
|
|
4040
|
+
next: async () => {
|
|
4041
|
+
if (!pushQueue.length) {
|
|
4042
|
+
if (done) {
|
|
4043
|
+
return { value: void 0, done: true };
|
|
4044
|
+
}
|
|
4045
|
+
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk2) => chunk2 ? { value: chunk2, done: false } : { value: void 0, done: true });
|
|
4046
|
+
}
|
|
4047
|
+
const chunk = pushQueue.shift();
|
|
4048
|
+
return { value: chunk, done: false };
|
|
4049
|
+
},
|
|
4050
|
+
return: async () => {
|
|
4051
|
+
this.abort();
|
|
4052
|
+
return { value: void 0, done: true };
|
|
4053
|
+
}
|
|
4054
|
+
};
|
|
4055
|
+
}
|
|
4056
|
+
toReadableStream() {
|
|
4057
|
+
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
|
4058
|
+
return stream.toReadableStream();
|
|
4059
|
+
}
|
|
4060
|
+
};
|
|
4061
|
+
function checkNever2(x) {
|
|
4062
|
+
}
|
|
4063
|
+
|
|
4064
|
+
// ../node_modules/@anthropic-ai/sdk/resources/messages/batches.mjs
|
|
4065
|
+
var Batches2 = class extends APIResource {
|
|
4066
|
+
/**
|
|
4067
|
+
* Send a batch of Message creation requests.
|
|
4068
|
+
*
|
|
4069
|
+
* The Message Batches API can be used to process multiple Messages API requests at
|
|
4070
|
+
* once. Once a Message Batch is created, it begins processing immediately. Batches
|
|
4071
|
+
* can take up to 24 hours to complete.
|
|
4072
|
+
*
|
|
4073
|
+
* Learn more about the Message Batches API in our
|
|
4074
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4075
|
+
*
|
|
4076
|
+
* @example
|
|
4077
|
+
* ```ts
|
|
4078
|
+
* const messageBatch = await client.messages.batches.create({
|
|
4079
|
+
* requests: [
|
|
4080
|
+
* {
|
|
4081
|
+
* custom_id: 'my-custom-id-1',
|
|
4082
|
+
* params: {
|
|
4083
|
+
* max_tokens: 1024,
|
|
4084
|
+
* messages: [
|
|
4085
|
+
* { content: 'Hello, world', role: 'user' },
|
|
4086
|
+
* ],
|
|
4087
|
+
* model: 'claude-opus-4-6',
|
|
4088
|
+
* },
|
|
4089
|
+
* },
|
|
4090
|
+
* ],
|
|
4091
|
+
* });
|
|
4092
|
+
* ```
|
|
4093
|
+
*/
|
|
4094
|
+
create(body, options) {
|
|
4095
|
+
return this._client.post("/v1/messages/batches", { body, ...options });
|
|
4096
|
+
}
|
|
4097
|
+
/**
|
|
4098
|
+
* This endpoint is idempotent and can be used to poll for Message Batch
|
|
4099
|
+
* completion. To access the results of a Message Batch, make a request to the
|
|
4100
|
+
* `results_url` field in the response.
|
|
4101
|
+
*
|
|
4102
|
+
* Learn more about the Message Batches API in our
|
|
4103
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4104
|
+
*
|
|
4105
|
+
* @example
|
|
4106
|
+
* ```ts
|
|
4107
|
+
* const messageBatch = await client.messages.batches.retrieve(
|
|
4108
|
+
* 'message_batch_id',
|
|
4109
|
+
* );
|
|
4110
|
+
* ```
|
|
4111
|
+
*/
|
|
4112
|
+
retrieve(messageBatchID, options) {
|
|
4113
|
+
return this._client.get(path`/v1/messages/batches/${messageBatchID}`, options);
|
|
4114
|
+
}
|
|
4115
|
+
/**
|
|
4116
|
+
* List all Message Batches within a Workspace. Most recently created batches are
|
|
4117
|
+
* returned first.
|
|
4118
|
+
*
|
|
4119
|
+
* Learn more about the Message Batches API in our
|
|
4120
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4121
|
+
*
|
|
4122
|
+
* @example
|
|
4123
|
+
* ```ts
|
|
4124
|
+
* // Automatically fetches more pages as needed.
|
|
4125
|
+
* for await (const messageBatch of client.messages.batches.list()) {
|
|
4126
|
+
* // ...
|
|
4127
|
+
* }
|
|
4128
|
+
* ```
|
|
4129
|
+
*/
|
|
4130
|
+
list(query = {}, options) {
|
|
4131
|
+
return this._client.getAPIList("/v1/messages/batches", Page, { query, ...options });
|
|
4132
|
+
}
|
|
4133
|
+
/**
|
|
4134
|
+
* Delete a Message Batch.
|
|
4135
|
+
*
|
|
4136
|
+
* Message Batches can only be deleted once they've finished processing. If you'd
|
|
4137
|
+
* like to delete an in-progress batch, you must first cancel it.
|
|
4138
|
+
*
|
|
4139
|
+
* Learn more about the Message Batches API in our
|
|
4140
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4141
|
+
*
|
|
4142
|
+
* @example
|
|
4143
|
+
* ```ts
|
|
4144
|
+
* const deletedMessageBatch =
|
|
4145
|
+
* await client.messages.batches.delete('message_batch_id');
|
|
4146
|
+
* ```
|
|
4147
|
+
*/
|
|
4148
|
+
delete(messageBatchID, options) {
|
|
4149
|
+
return this._client.delete(path`/v1/messages/batches/${messageBatchID}`, options);
|
|
4150
|
+
}
|
|
4151
|
+
/**
|
|
4152
|
+
* Batches may be canceled any time before processing ends. Once cancellation is
|
|
4153
|
+
* initiated, the batch enters a `canceling` state, at which time the system may
|
|
4154
|
+
* complete any in-progress, non-interruptible requests before finalizing
|
|
4155
|
+
* cancellation.
|
|
4156
|
+
*
|
|
4157
|
+
* The number of canceled requests is specified in `request_counts`. To determine
|
|
4158
|
+
* which requests were canceled, check the individual results within the batch.
|
|
4159
|
+
* Note that cancellation may not result in any canceled requests if they were
|
|
4160
|
+
* non-interruptible.
|
|
4161
|
+
*
|
|
4162
|
+
* Learn more about the Message Batches API in our
|
|
4163
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4164
|
+
*
|
|
4165
|
+
* @example
|
|
4166
|
+
* ```ts
|
|
4167
|
+
* const messageBatch = await client.messages.batches.cancel(
|
|
4168
|
+
* 'message_batch_id',
|
|
4169
|
+
* );
|
|
4170
|
+
* ```
|
|
4171
|
+
*/
|
|
4172
|
+
cancel(messageBatchID, options) {
|
|
4173
|
+
return this._client.post(path`/v1/messages/batches/${messageBatchID}/cancel`, options);
|
|
4174
|
+
}
|
|
4175
|
+
/**
|
|
4176
|
+
* Streams the results of a Message Batch as a `.jsonl` file.
|
|
4177
|
+
*
|
|
4178
|
+
* Each line in the file is a JSON object containing the result of a single request
|
|
4179
|
+
* in the Message Batch. Results are not guaranteed to be in the same order as
|
|
4180
|
+
* requests. Use the `custom_id` field to match results to requests.
|
|
4181
|
+
*
|
|
4182
|
+
* Learn more about the Message Batches API in our
|
|
4183
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
|
4184
|
+
*
|
|
4185
|
+
* @example
|
|
4186
|
+
* ```ts
|
|
4187
|
+
* const messageBatchIndividualResponse =
|
|
4188
|
+
* await client.messages.batches.results('message_batch_id');
|
|
4189
|
+
* ```
|
|
4190
|
+
*/
|
|
4191
|
+
async results(messageBatchID, options) {
|
|
4192
|
+
const batch = await this.retrieve(messageBatchID);
|
|
4193
|
+
if (!batch.results_url) {
|
|
4194
|
+
throw new AnthropicError(`No batch \`results_url\`; Has it finished processing? ${batch.processing_status} - ${batch.id}`);
|
|
4195
|
+
}
|
|
4196
|
+
return this._client.get(batch.results_url, {
|
|
4197
|
+
...options,
|
|
4198
|
+
headers: buildHeaders([{ Accept: "application/binary" }, options?.headers]),
|
|
4199
|
+
stream: true,
|
|
4200
|
+
__binaryResponse: true
|
|
4201
|
+
})._thenUnwrap((_, props) => JSONLDecoder.fromResponse(props.response, props.controller));
|
|
4202
|
+
}
|
|
4203
|
+
};
|
|
4204
|
+
|
|
4205
|
+
// ../node_modules/@anthropic-ai/sdk/resources/messages/messages.mjs
|
|
4206
|
+
var Messages2 = class extends APIResource {
|
|
4207
|
+
constructor() {
|
|
4208
|
+
super(...arguments);
|
|
4209
|
+
this.batches = new Batches2(this._client);
|
|
4210
|
+
}
|
|
4211
|
+
create(body, options) {
|
|
4212
|
+
if (body.model in DEPRECATED_MODELS2) {
|
|
4213
|
+
console.warn(`The model '${body.model}' is deprecated and will reach end-of-life on ${DEPRECATED_MODELS2[body.model]}
|
|
4214
|
+
Please migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.`);
|
|
4215
|
+
}
|
|
4216
|
+
if (body.model in MODELS_TO_WARN_WITH_THINKING_ENABLED2 && body.thinking && body.thinking.type === "enabled") {
|
|
4217
|
+
console.warn(`Using Claude with ${body.model} and 'thinking.type=enabled' is deprecated. Use 'thinking.type=adaptive' instead which results in better model performance in our testing: https://platform.claude.com/docs/en/build-with-claude/adaptive-thinking`);
|
|
4218
|
+
}
|
|
4219
|
+
let timeout = this._client._options.timeout;
|
|
4220
|
+
if (!body.stream && timeout == null) {
|
|
4221
|
+
const maxNonstreamingTokens = MODEL_NONSTREAMING_TOKENS[body.model] ?? void 0;
|
|
4222
|
+
timeout = this._client.calculateNonstreamingTimeout(body.max_tokens, maxNonstreamingTokens);
|
|
4223
|
+
}
|
|
4224
|
+
const helperHeader = stainlessHelperHeader(body.tools, body.messages);
|
|
4225
|
+
return this._client.post("/v1/messages", {
|
|
4226
|
+
body,
|
|
4227
|
+
timeout: timeout ?? 6e5,
|
|
4228
|
+
...options,
|
|
4229
|
+
headers: buildHeaders([helperHeader, options?.headers]),
|
|
4230
|
+
stream: body.stream ?? false
|
|
4231
|
+
});
|
|
4232
|
+
}
|
|
4233
|
+
/**
|
|
4234
|
+
* Send a structured list of input messages with text and/or image content, along with an expected `output_config.format` and
|
|
4235
|
+
* the response will be automatically parsed and available in the `parsed_output` property of the message.
|
|
4236
|
+
*
|
|
4237
|
+
* @example
|
|
4238
|
+
* ```ts
|
|
4239
|
+
* const message = await client.messages.parse({
|
|
4240
|
+
* model: 'claude-sonnet-4-5-20250929',
|
|
4241
|
+
* max_tokens: 1024,
|
|
4242
|
+
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
|
4243
|
+
* output_config: {
|
|
4244
|
+
* format: zodOutputFormat(z.object({ answer: z.number() })),
|
|
4245
|
+
* },
|
|
4246
|
+
* });
|
|
4247
|
+
*
|
|
4248
|
+
* console.log(message.parsed_output?.answer); // 4
|
|
4249
|
+
* ```
|
|
4250
|
+
*/
|
|
4251
|
+
parse(params, options) {
|
|
4252
|
+
return this.create(params, options).then((message) => parseMessage(message, params, { logger: this._client.logger ?? console }));
|
|
4253
|
+
}
|
|
4254
|
+
/**
|
|
4255
|
+
* Create a Message stream.
|
|
4256
|
+
*
|
|
4257
|
+
* If `output_config.format` is provided with a parseable format (like `zodOutputFormat()`),
|
|
4258
|
+
* the final message will include a `parsed_output` property with the parsed content.
|
|
4259
|
+
*
|
|
4260
|
+
* @example
|
|
4261
|
+
* ```ts
|
|
4262
|
+
* const stream = client.messages.stream({
|
|
4263
|
+
* model: 'claude-sonnet-4-5-20250929',
|
|
4264
|
+
* max_tokens: 1024,
|
|
4265
|
+
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
|
4266
|
+
* output_config: {
|
|
4267
|
+
* format: zodOutputFormat(z.object({ answer: z.number() })),
|
|
4268
|
+
* },
|
|
4269
|
+
* });
|
|
4270
|
+
*
|
|
4271
|
+
* const message = await stream.finalMessage();
|
|
4272
|
+
* console.log(message.parsed_output?.answer); // 4
|
|
4273
|
+
* ```
|
|
4274
|
+
*/
|
|
4275
|
+
stream(body, options) {
|
|
4276
|
+
return MessageStream.createMessage(this, body, options, { logger: this._client.logger ?? console });
|
|
4277
|
+
}
|
|
4278
|
+
/**
|
|
4279
|
+
* Count the number of tokens in a Message.
|
|
4280
|
+
*
|
|
4281
|
+
* The Token Count API can be used to count the number of tokens in a Message,
|
|
4282
|
+
* including tools, images, and documents, without creating it.
|
|
4283
|
+
*
|
|
4284
|
+
* Learn more about token counting in our
|
|
4285
|
+
* [user guide](https://docs.claude.com/en/docs/build-with-claude/token-counting)
|
|
4286
|
+
*
|
|
4287
|
+
* @example
|
|
4288
|
+
* ```ts
|
|
4289
|
+
* const messageTokensCount =
|
|
4290
|
+
* await client.messages.countTokens({
|
|
4291
|
+
* messages: [{ content: 'string', role: 'user' }],
|
|
4292
|
+
* model: 'claude-opus-4-6',
|
|
4293
|
+
* });
|
|
4294
|
+
* ```
|
|
4295
|
+
*/
|
|
4296
|
+
countTokens(body, options) {
|
|
4297
|
+
return this._client.post("/v1/messages/count_tokens", { body, ...options });
|
|
4298
|
+
}
|
|
4299
|
+
};
|
|
4300
|
+
var DEPRECATED_MODELS2 = {
|
|
4301
|
+
"claude-1.3": "November 6th, 2024",
|
|
4302
|
+
"claude-1.3-100k": "November 6th, 2024",
|
|
4303
|
+
"claude-instant-1.1": "November 6th, 2024",
|
|
4304
|
+
"claude-instant-1.1-100k": "November 6th, 2024",
|
|
4305
|
+
"claude-instant-1.2": "November 6th, 2024",
|
|
4306
|
+
"claude-3-sonnet-20240229": "July 21st, 2025",
|
|
4307
|
+
"claude-3-opus-20240229": "January 5th, 2026",
|
|
4308
|
+
"claude-2.1": "July 21st, 2025",
|
|
4309
|
+
"claude-2.0": "July 21st, 2025",
|
|
4310
|
+
"claude-3-7-sonnet-latest": "February 19th, 2026",
|
|
4311
|
+
"claude-3-7-sonnet-20250219": "February 19th, 2026",
|
|
4312
|
+
"claude-3-5-haiku-latest": "February 19th, 2026",
|
|
4313
|
+
"claude-3-5-haiku-20241022": "February 19th, 2026"
|
|
4314
|
+
};
|
|
4315
|
+
var MODELS_TO_WARN_WITH_THINKING_ENABLED2 = ["claude-opus-4-6"];
|
|
4316
|
+
Messages2.Batches = Batches2;
|
|
4317
|
+
|
|
4318
|
+
// ../node_modules/@anthropic-ai/sdk/resources/models.mjs
|
|
4319
|
+
var Models2 = class extends APIResource {
|
|
4320
|
+
/**
|
|
4321
|
+
* Get a specific model.
|
|
4322
|
+
*
|
|
4323
|
+
* The Models API response can be used to determine information about a specific
|
|
4324
|
+
* model or resolve a model alias to a model ID.
|
|
4325
|
+
*/
|
|
4326
|
+
retrieve(modelID, params = {}, options) {
|
|
4327
|
+
const { betas } = params ?? {};
|
|
4328
|
+
return this._client.get(path`/v1/models/${modelID}`, {
|
|
4329
|
+
...options,
|
|
4330
|
+
headers: buildHeaders([
|
|
4331
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
4332
|
+
options?.headers
|
|
4333
|
+
])
|
|
4334
|
+
});
|
|
4335
|
+
}
|
|
4336
|
+
/**
|
|
4337
|
+
* List available models.
|
|
4338
|
+
*
|
|
4339
|
+
* The Models API response can be used to determine which models are available for
|
|
4340
|
+
* use in the API. More recently released models are listed first.
|
|
4341
|
+
*/
|
|
4342
|
+
list(params = {}, options) {
|
|
4343
|
+
const { betas, ...query } = params ?? {};
|
|
4344
|
+
return this._client.getAPIList("/v1/models", Page, {
|
|
4345
|
+
query,
|
|
4346
|
+
...options,
|
|
4347
|
+
headers: buildHeaders([
|
|
4348
|
+
{ ...betas?.toString() != null ? { "anthropic-beta": betas?.toString() } : void 0 },
|
|
4349
|
+
options?.headers
|
|
4350
|
+
])
|
|
4351
|
+
});
|
|
4352
|
+
}
|
|
4353
|
+
};
|
|
4354
|
+
|
|
4355
|
+
// ../node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
|
|
4356
|
+
var readEnv = (env) => {
|
|
4357
|
+
if (typeof globalThis.process !== "undefined") {
|
|
4358
|
+
return globalThis.process.env?.[env]?.trim() ?? void 0;
|
|
4359
|
+
}
|
|
4360
|
+
if (typeof globalThis.Deno !== "undefined") {
|
|
4361
|
+
return globalThis.Deno.env?.get?.(env)?.trim();
|
|
4362
|
+
}
|
|
4363
|
+
return void 0;
|
|
4364
|
+
};
|
|
4365
|
+
|
|
4366
|
+
// ../node_modules/@anthropic-ai/sdk/client.mjs
|
|
4367
|
+
var _BaseAnthropic_instances;
|
|
4368
|
+
var _a;
|
|
4369
|
+
var _BaseAnthropic_encoder;
|
|
4370
|
+
var _BaseAnthropic_baseURLOverridden;
|
|
4371
|
+
var HUMAN_PROMPT = "\\n\\nHuman:";
|
|
4372
|
+
var AI_PROMPT = "\\n\\nAssistant:";
|
|
4373
|
+
var BaseAnthropic = class {
|
|
4374
|
+
/**
|
|
4375
|
+
* API Client for interfacing with the Anthropic API.
|
|
4376
|
+
*
|
|
4377
|
+
* @param {string | null | undefined} [opts.apiKey=process.env['ANTHROPIC_API_KEY'] ?? null]
|
|
4378
|
+
* @param {string | null | undefined} [opts.authToken=process.env['ANTHROPIC_AUTH_TOKEN'] ?? null]
|
|
4379
|
+
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BASE_URL'] ?? https://api.anthropic.com] - Override the default base URL for the API.
|
|
4380
|
+
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
|
4381
|
+
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
|
4382
|
+
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
|
4383
|
+
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
|
4384
|
+
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
|
4385
|
+
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
|
4386
|
+
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
|
4387
|
+
*/
|
|
4388
|
+
constructor({ baseURL = readEnv("ANTHROPIC_BASE_URL"), apiKey = readEnv("ANTHROPIC_API_KEY") ?? null, authToken = readEnv("ANTHROPIC_AUTH_TOKEN") ?? null, ...opts } = {}) {
|
|
4389
|
+
_BaseAnthropic_instances.add(this);
|
|
4390
|
+
_BaseAnthropic_encoder.set(this, void 0);
|
|
4391
|
+
const options = {
|
|
4392
|
+
apiKey,
|
|
4393
|
+
authToken,
|
|
4394
|
+
...opts,
|
|
4395
|
+
baseURL: baseURL || `https://api.anthropic.com`
|
|
4396
|
+
};
|
|
4397
|
+
if (!options.dangerouslyAllowBrowser && isRunningInBrowser()) {
|
|
4398
|
+
throw new AnthropicError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew Anthropic({ apiKey, dangerouslyAllowBrowser: true });\n");
|
|
4399
|
+
}
|
|
4400
|
+
this.baseURL = options.baseURL;
|
|
4401
|
+
this.timeout = options.timeout ?? _a.DEFAULT_TIMEOUT;
|
|
4402
|
+
this.logger = options.logger ?? console;
|
|
4403
|
+
const defaultLogLevel = "warn";
|
|
4404
|
+
this.logLevel = defaultLogLevel;
|
|
4405
|
+
this.logLevel = parseLogLevel(options.logLevel, "ClientOptions.logLevel", this) ?? parseLogLevel(readEnv("ANTHROPIC_LOG"), "process.env['ANTHROPIC_LOG']", this) ?? defaultLogLevel;
|
|
4406
|
+
this.fetchOptions = options.fetchOptions;
|
|
4407
|
+
this.maxRetries = options.maxRetries ?? 2;
|
|
4408
|
+
this.fetch = options.fetch ?? getDefaultFetch();
|
|
4409
|
+
__classPrivateFieldSet(this, _BaseAnthropic_encoder, FallbackEncoder, "f");
|
|
4410
|
+
this._options = options;
|
|
4411
|
+
this.apiKey = typeof apiKey === "string" ? apiKey : null;
|
|
4412
|
+
this.authToken = authToken;
|
|
4413
|
+
}
|
|
4414
|
+
/**
|
|
4415
|
+
* Create a new client instance re-using the same options given to the current client with optional overriding.
|
|
4416
|
+
*/
|
|
4417
|
+
withOptions(options) {
|
|
4418
|
+
const client = new this.constructor({
|
|
4419
|
+
...this._options,
|
|
4420
|
+
baseURL: this.baseURL,
|
|
4421
|
+
maxRetries: this.maxRetries,
|
|
4422
|
+
timeout: this.timeout,
|
|
4423
|
+
logger: this.logger,
|
|
4424
|
+
logLevel: this.logLevel,
|
|
4425
|
+
fetch: this.fetch,
|
|
4426
|
+
fetchOptions: this.fetchOptions,
|
|
4427
|
+
apiKey: this.apiKey,
|
|
4428
|
+
authToken: this.authToken,
|
|
4429
|
+
...options
|
|
4430
|
+
});
|
|
4431
|
+
return client;
|
|
4432
|
+
}
|
|
4433
|
+
defaultQuery() {
|
|
4434
|
+
return this._options.defaultQuery;
|
|
4435
|
+
}
|
|
4436
|
+
validateHeaders({ values, nulls }) {
|
|
4437
|
+
if (values.get("x-api-key") || values.get("authorization")) {
|
|
4438
|
+
return;
|
|
4439
|
+
}
|
|
4440
|
+
if (this.apiKey && values.get("x-api-key")) {
|
|
4441
|
+
return;
|
|
4442
|
+
}
|
|
4443
|
+
if (nulls.has("x-api-key")) {
|
|
4444
|
+
return;
|
|
4445
|
+
}
|
|
4446
|
+
if (this.authToken && values.get("authorization")) {
|
|
4447
|
+
return;
|
|
4448
|
+
}
|
|
4449
|
+
if (nulls.has("authorization")) {
|
|
4450
|
+
return;
|
|
4451
|
+
}
|
|
4452
|
+
throw new Error('Could not resolve authentication method. Expected either apiKey or authToken to be set. Or for one of the "X-Api-Key" or "Authorization" headers to be explicitly omitted');
|
|
4453
|
+
}
|
|
4454
|
+
async authHeaders(opts) {
|
|
4455
|
+
return buildHeaders([await this.apiKeyAuth(opts), await this.bearerAuth(opts)]);
|
|
4456
|
+
}
|
|
4457
|
+
async apiKeyAuth(opts) {
|
|
4458
|
+
if (this.apiKey == null) {
|
|
4459
|
+
return void 0;
|
|
4460
|
+
}
|
|
4461
|
+
return buildHeaders([{ "X-Api-Key": this.apiKey }]);
|
|
4462
|
+
}
|
|
4463
|
+
async bearerAuth(opts) {
|
|
4464
|
+
if (this.authToken == null) {
|
|
4465
|
+
return void 0;
|
|
4466
|
+
}
|
|
4467
|
+
return buildHeaders([{ Authorization: `Bearer ${this.authToken}` }]);
|
|
4468
|
+
}
|
|
4469
|
+
/**
|
|
4470
|
+
* Basic re-implementation of `qs.stringify` for primitive types.
|
|
4471
|
+
*/
|
|
4472
|
+
stringifyQuery(query) {
|
|
4473
|
+
return Object.entries(query).filter(([_, value]) => typeof value !== "undefined").map(([key, value]) => {
|
|
4474
|
+
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
4475
|
+
return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;
|
|
4476
|
+
}
|
|
4477
|
+
if (value === null) {
|
|
4478
|
+
return `${encodeURIComponent(key)}=`;
|
|
4479
|
+
}
|
|
4480
|
+
throw new AnthropicError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`);
|
|
4481
|
+
}).join("&");
|
|
4482
|
+
}
|
|
4483
|
+
getUserAgent() {
|
|
4484
|
+
return `${this.constructor.name}/JS ${VERSION}`;
|
|
4485
|
+
}
|
|
4486
|
+
defaultIdempotencyKey() {
|
|
4487
|
+
return `stainless-node-retry-${uuid4()}`;
|
|
4488
|
+
}
|
|
4489
|
+
makeStatusError(status, error, message, headers) {
|
|
4490
|
+
return APIError.generate(status, error, message, headers);
|
|
4491
|
+
}
|
|
4492
|
+
buildURL(path2, query, defaultBaseURL) {
|
|
4493
|
+
const baseURL = !__classPrivateFieldGet(this, _BaseAnthropic_instances, "m", _BaseAnthropic_baseURLOverridden).call(this) && defaultBaseURL || this.baseURL;
|
|
4494
|
+
const url = isAbsoluteURL(path2) ? new URL(path2) : new URL(baseURL + (baseURL.endsWith("/") && path2.startsWith("/") ? path2.slice(1) : path2));
|
|
4495
|
+
const defaultQuery = this.defaultQuery();
|
|
4496
|
+
if (!isEmptyObj(defaultQuery)) {
|
|
4497
|
+
query = { ...defaultQuery, ...query };
|
|
4498
|
+
}
|
|
4499
|
+
if (typeof query === "object" && query && !Array.isArray(query)) {
|
|
4500
|
+
url.search = this.stringifyQuery(query);
|
|
4501
|
+
}
|
|
4502
|
+
return url.toString();
|
|
4503
|
+
}
|
|
4504
|
+
_calculateNonstreamingTimeout(maxTokens) {
|
|
4505
|
+
const defaultTimeout = 10 * 60;
|
|
4506
|
+
const expectedTimeout = 60 * 60 * maxTokens / 128e3;
|
|
4507
|
+
if (expectedTimeout > defaultTimeout) {
|
|
4508
|
+
throw new AnthropicError("Streaming is required for operations that may take longer than 10 minutes. See https://github.com/anthropics/anthropic-sdk-typescript#streaming-responses for more details");
|
|
4509
|
+
}
|
|
4510
|
+
return defaultTimeout * 1e3;
|
|
4511
|
+
}
|
|
4512
|
+
/**
|
|
4513
|
+
* Used as a callback for mutating the given `FinalRequestOptions` object.
|
|
4514
|
+
*/
|
|
4515
|
+
async prepareOptions(options) {
|
|
4516
|
+
}
|
|
4517
|
+
/**
|
|
4518
|
+
* Used as a callback for mutating the given `RequestInit` object.
|
|
4519
|
+
*
|
|
4520
|
+
* This is useful for cases where you want to add certain headers based off of
|
|
4521
|
+
* the request properties, e.g. `method` or `url`.
|
|
4522
|
+
*/
|
|
4523
|
+
async prepareRequest(request, { url, options }) {
|
|
4524
|
+
}
|
|
4525
|
+
get(path2, opts) {
|
|
4526
|
+
return this.methodRequest("get", path2, opts);
|
|
4527
|
+
}
|
|
4528
|
+
post(path2, opts) {
|
|
4529
|
+
return this.methodRequest("post", path2, opts);
|
|
4530
|
+
}
|
|
4531
|
+
patch(path2, opts) {
|
|
4532
|
+
return this.methodRequest("patch", path2, opts);
|
|
4533
|
+
}
|
|
4534
|
+
put(path2, opts) {
|
|
4535
|
+
return this.methodRequest("put", path2, opts);
|
|
4536
|
+
}
|
|
4537
|
+
delete(path2, opts) {
|
|
4538
|
+
return this.methodRequest("delete", path2, opts);
|
|
4539
|
+
}
|
|
4540
|
+
methodRequest(method, path2, opts) {
|
|
4541
|
+
return this.request(Promise.resolve(opts).then((opts2) => {
|
|
4542
|
+
return { method, path: path2, ...opts2 };
|
|
4543
|
+
}));
|
|
4544
|
+
}
|
|
4545
|
+
request(options, remainingRetries = null) {
|
|
4546
|
+
return new APIPromise(this, this.makeRequest(options, remainingRetries, void 0));
|
|
4547
|
+
}
|
|
4548
|
+
async makeRequest(optionsInput, retriesRemaining, retryOfRequestLogID) {
|
|
4549
|
+
const options = await optionsInput;
|
|
4550
|
+
const maxRetries = options.maxRetries ?? this.maxRetries;
|
|
4551
|
+
if (retriesRemaining == null) {
|
|
4552
|
+
retriesRemaining = maxRetries;
|
|
4553
|
+
}
|
|
4554
|
+
await this.prepareOptions(options);
|
|
4555
|
+
const { req, url, timeout } = await this.buildRequest(options, {
|
|
4556
|
+
retryCount: maxRetries - retriesRemaining
|
|
4557
|
+
});
|
|
4558
|
+
await this.prepareRequest(req, { url, options });
|
|
4559
|
+
const requestLogID = "log_" + (Math.random() * (1 << 24) | 0).toString(16).padStart(6, "0");
|
|
4560
|
+
const retryLogStr = retryOfRequestLogID === void 0 ? "" : `, retryOf: ${retryOfRequestLogID}`;
|
|
4561
|
+
const startTime = Date.now();
|
|
4562
|
+
loggerFor(this).debug(`[${requestLogID}] sending request`, formatRequestDetails({
|
|
4563
|
+
retryOfRequestLogID,
|
|
4564
|
+
method: options.method,
|
|
4565
|
+
url,
|
|
4566
|
+
options,
|
|
4567
|
+
headers: req.headers
|
|
4568
|
+
}));
|
|
4569
|
+
if (options.signal?.aborted) {
|
|
4570
|
+
throw new APIUserAbortError();
|
|
4571
|
+
}
|
|
4572
|
+
const controller = new AbortController();
|
|
4573
|
+
const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError);
|
|
4574
|
+
const headersTime = Date.now();
|
|
4575
|
+
if (response instanceof globalThis.Error) {
|
|
4576
|
+
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
|
4577
|
+
if (options.signal?.aborted) {
|
|
4578
|
+
throw new APIUserAbortError();
|
|
4579
|
+
}
|
|
4580
|
+
const isTimeout = isAbortError(response) || /timed? ?out/i.test(String(response) + ("cause" in response ? String(response.cause) : ""));
|
|
4581
|
+
if (retriesRemaining) {
|
|
4582
|
+
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? "timed out" : "failed"} - ${retryMessage}`);
|
|
4583
|
+
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? "timed out" : "failed"} (${retryMessage})`, formatRequestDetails({
|
|
4584
|
+
retryOfRequestLogID,
|
|
4585
|
+
url,
|
|
4586
|
+
durationMs: headersTime - startTime,
|
|
4587
|
+
message: response.message
|
|
4588
|
+
}));
|
|
4589
|
+
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID);
|
|
4590
|
+
}
|
|
4591
|
+
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? "timed out" : "failed"} - error; no more retries left`);
|
|
4592
|
+
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? "timed out" : "failed"} (error; no more retries left)`, formatRequestDetails({
|
|
4593
|
+
retryOfRequestLogID,
|
|
4594
|
+
url,
|
|
4595
|
+
durationMs: headersTime - startTime,
|
|
4596
|
+
message: response.message
|
|
4597
|
+
}));
|
|
4598
|
+
if (isTimeout) {
|
|
4599
|
+
throw new APIConnectionTimeoutError();
|
|
4600
|
+
}
|
|
4601
|
+
throw new APIConnectionError({ cause: response });
|
|
4602
|
+
}
|
|
4603
|
+
const specialHeaders = [...response.headers.entries()].filter(([name]) => name === "request-id").map(([name, value]) => ", " + name + ": " + JSON.stringify(value)).join("");
|
|
4604
|
+
const responseInfo = `[${requestLogID}${retryLogStr}${specialHeaders}] ${req.method} ${url} ${response.ok ? "succeeded" : "failed"} with status ${response.status} in ${headersTime - startTime}ms`;
|
|
4605
|
+
if (!response.ok) {
|
|
4606
|
+
const shouldRetry = await this.shouldRetry(response);
|
|
4607
|
+
if (retriesRemaining && shouldRetry) {
|
|
4608
|
+
const retryMessage2 = `retrying, ${retriesRemaining} attempts remaining`;
|
|
4609
|
+
await CancelReadableStream(response.body);
|
|
4610
|
+
loggerFor(this).info(`${responseInfo} - ${retryMessage2}`);
|
|
4611
|
+
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage2})`, formatRequestDetails({
|
|
4612
|
+
retryOfRequestLogID,
|
|
4613
|
+
url: response.url,
|
|
4614
|
+
status: response.status,
|
|
4615
|
+
headers: response.headers,
|
|
4616
|
+
durationMs: headersTime - startTime
|
|
4617
|
+
}));
|
|
4618
|
+
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID, response.headers);
|
|
4619
|
+
}
|
|
4620
|
+
const retryMessage = shouldRetry ? `error; no more retries left` : `error; not retryable`;
|
|
4621
|
+
loggerFor(this).info(`${responseInfo} - ${retryMessage}`);
|
|
4622
|
+
const errText = await response.text().catch((err2) => castToError(err2).message);
|
|
4623
|
+
const errJSON = safeJSON(errText);
|
|
4624
|
+
const errMessage = errJSON ? void 0 : errText;
|
|
4625
|
+
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage})`, formatRequestDetails({
|
|
4626
|
+
retryOfRequestLogID,
|
|
4627
|
+
url: response.url,
|
|
4628
|
+
status: response.status,
|
|
4629
|
+
headers: response.headers,
|
|
4630
|
+
message: errMessage,
|
|
4631
|
+
durationMs: Date.now() - startTime
|
|
4632
|
+
}));
|
|
4633
|
+
const err = this.makeStatusError(response.status, errJSON, errMessage, response.headers);
|
|
4634
|
+
throw err;
|
|
4635
|
+
}
|
|
4636
|
+
loggerFor(this).info(responseInfo);
|
|
4637
|
+
loggerFor(this).debug(`[${requestLogID}] response start`, formatRequestDetails({
|
|
4638
|
+
retryOfRequestLogID,
|
|
4639
|
+
url: response.url,
|
|
4640
|
+
status: response.status,
|
|
4641
|
+
headers: response.headers,
|
|
4642
|
+
durationMs: headersTime - startTime
|
|
4643
|
+
}));
|
|
4644
|
+
return { response, options, controller, requestLogID, retryOfRequestLogID, startTime };
|
|
4645
|
+
}
|
|
4646
|
+
getAPIList(path2, Page2, opts) {
|
|
4647
|
+
return this.requestAPIList(Page2, opts && "then" in opts ? opts.then((opts2) => ({ method: "get", path: path2, ...opts2 })) : { method: "get", path: path2, ...opts });
|
|
4648
|
+
}
|
|
4649
|
+
requestAPIList(Page2, options) {
|
|
4650
|
+
const request = this.makeRequest(options, null, void 0);
|
|
4651
|
+
return new PagePromise(this, request, Page2);
|
|
4652
|
+
}
|
|
4653
|
+
async fetchWithTimeout(url, init, ms, controller) {
|
|
4654
|
+
const { signal, method, ...options } = init || {};
|
|
4655
|
+
const abort = this._makeAbort(controller);
|
|
4656
|
+
if (signal)
|
|
4657
|
+
signal.addEventListener("abort", abort, { once: true });
|
|
4658
|
+
const timeout = setTimeout(abort, ms);
|
|
4659
|
+
const isReadableBody = globalThis.ReadableStream && options.body instanceof globalThis.ReadableStream || typeof options.body === "object" && options.body !== null && Symbol.asyncIterator in options.body;
|
|
4660
|
+
const fetchOptions = {
|
|
4661
|
+
signal: controller.signal,
|
|
4662
|
+
...isReadableBody ? { duplex: "half" } : {},
|
|
4663
|
+
method: "GET",
|
|
4664
|
+
...options
|
|
4665
|
+
};
|
|
4666
|
+
if (method) {
|
|
4667
|
+
fetchOptions.method = method.toUpperCase();
|
|
4668
|
+
}
|
|
4669
|
+
try {
|
|
4670
|
+
return await this.fetch.call(void 0, url, fetchOptions);
|
|
4671
|
+
} finally {
|
|
4672
|
+
clearTimeout(timeout);
|
|
4673
|
+
}
|
|
4674
|
+
}
|
|
4675
|
+
async shouldRetry(response) {
|
|
4676
|
+
const shouldRetryHeader = response.headers.get("x-should-retry");
|
|
4677
|
+
if (shouldRetryHeader === "true")
|
|
4678
|
+
return true;
|
|
4679
|
+
if (shouldRetryHeader === "false")
|
|
4680
|
+
return false;
|
|
4681
|
+
if (response.status === 408)
|
|
4682
|
+
return true;
|
|
4683
|
+
if (response.status === 409)
|
|
4684
|
+
return true;
|
|
4685
|
+
if (response.status === 429)
|
|
4686
|
+
return true;
|
|
4687
|
+
if (response.status >= 500)
|
|
4688
|
+
return true;
|
|
4689
|
+
return false;
|
|
4690
|
+
}
|
|
4691
|
+
async retryRequest(options, retriesRemaining, requestLogID, responseHeaders) {
|
|
4692
|
+
let timeoutMillis;
|
|
4693
|
+
const retryAfterMillisHeader = responseHeaders?.get("retry-after-ms");
|
|
4694
|
+
if (retryAfterMillisHeader) {
|
|
4695
|
+
const timeoutMs = parseFloat(retryAfterMillisHeader);
|
|
4696
|
+
if (!Number.isNaN(timeoutMs)) {
|
|
4697
|
+
timeoutMillis = timeoutMs;
|
|
4698
|
+
}
|
|
4699
|
+
}
|
|
4700
|
+
const retryAfterHeader = responseHeaders?.get("retry-after");
|
|
4701
|
+
if (retryAfterHeader && !timeoutMillis) {
|
|
4702
|
+
const timeoutSeconds = parseFloat(retryAfterHeader);
|
|
4703
|
+
if (!Number.isNaN(timeoutSeconds)) {
|
|
4704
|
+
timeoutMillis = timeoutSeconds * 1e3;
|
|
4705
|
+
} else {
|
|
4706
|
+
timeoutMillis = Date.parse(retryAfterHeader) - Date.now();
|
|
4707
|
+
}
|
|
4708
|
+
}
|
|
4709
|
+
if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1e3)) {
|
|
4710
|
+
const maxRetries = options.maxRetries ?? this.maxRetries;
|
|
4711
|
+
timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);
|
|
4712
|
+
}
|
|
4713
|
+
await sleep(timeoutMillis);
|
|
4714
|
+
return this.makeRequest(options, retriesRemaining - 1, requestLogID);
|
|
4715
|
+
}
|
|
4716
|
+
calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {
|
|
4717
|
+
const initialRetryDelay = 0.5;
|
|
4718
|
+
const maxRetryDelay = 8;
|
|
4719
|
+
const numRetries = maxRetries - retriesRemaining;
|
|
4720
|
+
const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);
|
|
4721
|
+
const jitter = 1 - Math.random() * 0.25;
|
|
4722
|
+
return sleepSeconds * jitter * 1e3;
|
|
4723
|
+
}
|
|
4724
|
+
calculateNonstreamingTimeout(maxTokens, maxNonstreamingTokens) {
|
|
4725
|
+
const maxTime = 60 * 60 * 1e3;
|
|
4726
|
+
const defaultTime = 60 * 10 * 1e3;
|
|
4727
|
+
const expectedTime = maxTime * maxTokens / 128e3;
|
|
4728
|
+
if (expectedTime > defaultTime || maxNonstreamingTokens != null && maxTokens > maxNonstreamingTokens) {
|
|
4729
|
+
throw new AnthropicError("Streaming is required for operations that may take longer than 10 minutes. See https://github.com/anthropics/anthropic-sdk-typescript#long-requests for more details");
|
|
4730
|
+
}
|
|
4731
|
+
return defaultTime;
|
|
4732
|
+
}
|
|
4733
|
+
async buildRequest(inputOptions, { retryCount = 0 } = {}) {
|
|
4734
|
+
const options = { ...inputOptions };
|
|
4735
|
+
const { method, path: path2, query, defaultBaseURL } = options;
|
|
4736
|
+
const url = this.buildURL(path2, query, defaultBaseURL);
|
|
4737
|
+
if ("timeout" in options)
|
|
4738
|
+
validatePositiveInteger("timeout", options.timeout);
|
|
4739
|
+
options.timeout = options.timeout ?? this.timeout;
|
|
4740
|
+
const { bodyHeaders, body } = this.buildBody({ options });
|
|
4741
|
+
const reqHeaders = await this.buildHeaders({ options: inputOptions, method, bodyHeaders, retryCount });
|
|
4742
|
+
const req = {
|
|
4743
|
+
method,
|
|
4744
|
+
headers: reqHeaders,
|
|
4745
|
+
...options.signal && { signal: options.signal },
|
|
4746
|
+
...globalThis.ReadableStream && body instanceof globalThis.ReadableStream && { duplex: "half" },
|
|
4747
|
+
...body && { body },
|
|
4748
|
+
...this.fetchOptions ?? {},
|
|
4749
|
+
...options.fetchOptions ?? {}
|
|
4750
|
+
};
|
|
4751
|
+
return { req, url, timeout: options.timeout };
|
|
4752
|
+
}
|
|
4753
|
+
async buildHeaders({ options, method, bodyHeaders, retryCount }) {
|
|
4754
|
+
let idempotencyHeaders = {};
|
|
4755
|
+
if (this.idempotencyHeader && method !== "get") {
|
|
4756
|
+
if (!options.idempotencyKey)
|
|
4757
|
+
options.idempotencyKey = this.defaultIdempotencyKey();
|
|
4758
|
+
idempotencyHeaders[this.idempotencyHeader] = options.idempotencyKey;
|
|
4759
|
+
}
|
|
4760
|
+
const headers = buildHeaders([
|
|
4761
|
+
idempotencyHeaders,
|
|
4762
|
+
{
|
|
4763
|
+
Accept: "application/json",
|
|
4764
|
+
"User-Agent": this.getUserAgent(),
|
|
4765
|
+
"X-Stainless-Retry-Count": String(retryCount),
|
|
4766
|
+
...options.timeout ? { "X-Stainless-Timeout": String(Math.trunc(options.timeout / 1e3)) } : {},
|
|
4767
|
+
...getPlatformHeaders(),
|
|
4768
|
+
...this._options.dangerouslyAllowBrowser ? { "anthropic-dangerous-direct-browser-access": "true" } : void 0,
|
|
4769
|
+
"anthropic-version": "2023-06-01"
|
|
4770
|
+
},
|
|
4771
|
+
await this.authHeaders(options),
|
|
4772
|
+
this._options.defaultHeaders,
|
|
4773
|
+
bodyHeaders,
|
|
4774
|
+
options.headers
|
|
4775
|
+
]);
|
|
4776
|
+
this.validateHeaders(headers);
|
|
4777
|
+
return headers.values;
|
|
4778
|
+
}
|
|
4779
|
+
_makeAbort(controller) {
|
|
4780
|
+
return () => controller.abort();
|
|
4781
|
+
}
|
|
4782
|
+
buildBody({ options: { body, headers: rawHeaders } }) {
|
|
4783
|
+
if (!body) {
|
|
4784
|
+
return { bodyHeaders: void 0, body: void 0 };
|
|
4785
|
+
}
|
|
4786
|
+
const headers = buildHeaders([rawHeaders]);
|
|
4787
|
+
if (
|
|
4788
|
+
// Pass raw type verbatim
|
|
4789
|
+
ArrayBuffer.isView(body) || body instanceof ArrayBuffer || body instanceof DataView || typeof body === "string" && // Preserve legacy string encoding behavior for now
|
|
4790
|
+
headers.values.has("content-type") || // `Blob` is superset of `File`
|
|
4791
|
+
globalThis.Blob && body instanceof globalThis.Blob || // `FormData` -> `multipart/form-data`
|
|
4792
|
+
body instanceof FormData || // `URLSearchParams` -> `application/x-www-form-urlencoded`
|
|
4793
|
+
body instanceof URLSearchParams || // Send chunked stream (each chunk has own `length`)
|
|
4794
|
+
globalThis.ReadableStream && body instanceof globalThis.ReadableStream
|
|
4795
|
+
) {
|
|
4796
|
+
return { bodyHeaders: void 0, body };
|
|
4797
|
+
} else if (typeof body === "object" && (Symbol.asyncIterator in body || Symbol.iterator in body && "next" in body && typeof body.next === "function")) {
|
|
4798
|
+
return { bodyHeaders: void 0, body: ReadableStreamFrom(body) };
|
|
4799
|
+
} else {
|
|
4800
|
+
return __classPrivateFieldGet(this, _BaseAnthropic_encoder, "f").call(this, { body, headers });
|
|
4801
|
+
}
|
|
4802
|
+
}
|
|
4803
|
+
};
|
|
4804
|
+
_a = BaseAnthropic, _BaseAnthropic_encoder = /* @__PURE__ */ new WeakMap(), _BaseAnthropic_instances = /* @__PURE__ */ new WeakSet(), _BaseAnthropic_baseURLOverridden = function _BaseAnthropic_baseURLOverridden2() {
|
|
4805
|
+
return this.baseURL !== "https://api.anthropic.com";
|
|
4806
|
+
};
|
|
4807
|
+
BaseAnthropic.Anthropic = _a;
|
|
4808
|
+
BaseAnthropic.HUMAN_PROMPT = HUMAN_PROMPT;
|
|
4809
|
+
BaseAnthropic.AI_PROMPT = AI_PROMPT;
|
|
4810
|
+
BaseAnthropic.DEFAULT_TIMEOUT = 6e5;
|
|
4811
|
+
BaseAnthropic.AnthropicError = AnthropicError;
|
|
4812
|
+
BaseAnthropic.APIError = APIError;
|
|
4813
|
+
BaseAnthropic.APIConnectionError = APIConnectionError;
|
|
4814
|
+
BaseAnthropic.APIConnectionTimeoutError = APIConnectionTimeoutError;
|
|
4815
|
+
BaseAnthropic.APIUserAbortError = APIUserAbortError;
|
|
4816
|
+
BaseAnthropic.NotFoundError = NotFoundError;
|
|
4817
|
+
BaseAnthropic.ConflictError = ConflictError;
|
|
4818
|
+
BaseAnthropic.RateLimitError = RateLimitError;
|
|
4819
|
+
BaseAnthropic.BadRequestError = BadRequestError;
|
|
4820
|
+
BaseAnthropic.AuthenticationError = AuthenticationError;
|
|
4821
|
+
BaseAnthropic.InternalServerError = InternalServerError;
|
|
4822
|
+
BaseAnthropic.PermissionDeniedError = PermissionDeniedError;
|
|
4823
|
+
BaseAnthropic.UnprocessableEntityError = UnprocessableEntityError;
|
|
4824
|
+
BaseAnthropic.toFile = toFile;
|
|
4825
|
+
var Anthropic = class extends BaseAnthropic {
|
|
4826
|
+
constructor() {
|
|
4827
|
+
super(...arguments);
|
|
4828
|
+
this.completions = new Completions(this);
|
|
4829
|
+
this.messages = new Messages2(this);
|
|
4830
|
+
this.models = new Models2(this);
|
|
4831
|
+
this.beta = new Beta(this);
|
|
4832
|
+
}
|
|
4833
|
+
};
|
|
4834
|
+
Anthropic.Completions = Completions;
|
|
4835
|
+
Anthropic.Messages = Messages2;
|
|
4836
|
+
Anthropic.Models = Models2;
|
|
4837
|
+
Anthropic.Beta = Beta;
|
|
4838
|
+
|
|
4839
|
+
export {
|
|
4840
|
+
Anthropic
|
|
4841
|
+
};
|