@kevisual/query 0.0.9-alpha.1 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/query-ai.d.ts +3 -1
- package/dist/query-ai.js +1129 -673
- package/dist/query-browser.d.ts +40 -6
- package/dist/query-browser.js +26 -2
- package/dist/query-ws.d.ts +2 -1
- package/dist/query.d.ts +39 -5
- package/dist/query.js +26 -2
- package/package.json +9 -5
package/dist/query-ai.js
CHANGED
|
@@ -366,7 +366,7 @@ function stringify(object, opts = {}) {
|
|
|
366
366
|
return joined.length > 0 ? prefix + joined : '';
|
|
367
367
|
}
|
|
368
368
|
|
|
369
|
-
const VERSION = '4.
|
|
369
|
+
const VERSION = '4.88.0'; // x-release-please-version
|
|
370
370
|
|
|
371
371
|
let auto = false;
|
|
372
372
|
let kind = undefined;
|
|
@@ -595,13 +595,13 @@ class ContentFilterFinishReasonError extends OpenAIError {
|
|
|
595
595
|
}
|
|
596
596
|
}
|
|
597
597
|
|
|
598
|
-
var __classPrivateFieldSet$
|
|
598
|
+
var __classPrivateFieldSet$5 = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
599
599
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
600
600
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
601
601
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
602
602
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
603
603
|
};
|
|
604
|
-
var __classPrivateFieldGet$
|
|
604
|
+
var __classPrivateFieldGet$6 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
605
605
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
606
606
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
607
607
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
@@ -617,7 +617,7 @@ class LineDecoder {
|
|
|
617
617
|
constructor() {
|
|
618
618
|
_LineDecoder_carriageReturnIndex.set(this, void 0);
|
|
619
619
|
this.buffer = new Uint8Array();
|
|
620
|
-
__classPrivateFieldSet$
|
|
620
|
+
__classPrivateFieldSet$5(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
621
621
|
}
|
|
622
622
|
decode(chunk) {
|
|
623
623
|
if (chunk == null) {
|
|
@@ -632,25 +632,25 @@ class LineDecoder {
|
|
|
632
632
|
this.buffer = newData;
|
|
633
633
|
const lines = [];
|
|
634
634
|
let patternIndex;
|
|
635
|
-
while ((patternIndex = findNewlineIndex(this.buffer, __classPrivateFieldGet$
|
|
636
|
-
if (patternIndex.carriage && __classPrivateFieldGet$
|
|
635
|
+
while ((patternIndex = findNewlineIndex(this.buffer, __classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f"))) != null) {
|
|
636
|
+
if (patternIndex.carriage && __classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f") == null) {
|
|
637
637
|
// skip until we either get a corresponding `\n`, a new `\r` or nothing
|
|
638
|
-
__classPrivateFieldSet$
|
|
638
|
+
__classPrivateFieldSet$5(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f");
|
|
639
639
|
continue;
|
|
640
640
|
}
|
|
641
641
|
// we got double \r or \rtext\n
|
|
642
|
-
if (__classPrivateFieldGet$
|
|
643
|
-
(patternIndex.index !== __classPrivateFieldGet$
|
|
644
|
-
lines.push(this.decodeText(this.buffer.slice(0, __classPrivateFieldGet$
|
|
645
|
-
this.buffer = this.buffer.slice(__classPrivateFieldGet$
|
|
646
|
-
__classPrivateFieldSet$
|
|
642
|
+
if (__classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f") != null &&
|
|
643
|
+
(patternIndex.index !== __classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) {
|
|
644
|
+
lines.push(this.decodeText(this.buffer.slice(0, __classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f") - 1)));
|
|
645
|
+
this.buffer = this.buffer.slice(__classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f"));
|
|
646
|
+
__classPrivateFieldSet$5(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
647
647
|
continue;
|
|
648
648
|
}
|
|
649
|
-
const endIndex = __classPrivateFieldGet$
|
|
649
|
+
const endIndex = __classPrivateFieldGet$6(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
|
|
650
650
|
const line = this.decodeText(this.buffer.slice(0, endIndex));
|
|
651
651
|
lines.push(line);
|
|
652
652
|
this.buffer = this.buffer.slice(patternIndex.index);
|
|
653
|
-
__classPrivateFieldSet$
|
|
653
|
+
__classPrivateFieldSet$5(this, _LineDecoder_carriageReturnIndex, null, "f");
|
|
654
654
|
}
|
|
655
655
|
return lines;
|
|
656
656
|
}
|
|
@@ -795,7 +795,7 @@ class Stream {
|
|
|
795
795
|
done = true;
|
|
796
796
|
continue;
|
|
797
797
|
}
|
|
798
|
-
if (sse.event === null) {
|
|
798
|
+
if (sse.event === null || sse.event.startsWith('response.')) {
|
|
799
799
|
let data;
|
|
800
800
|
try {
|
|
801
801
|
data = JSON.parse(sse.data);
|
|
@@ -1179,13 +1179,13 @@ const addFormValue = async (form, key, value) => {
|
|
|
1179
1179
|
}
|
|
1180
1180
|
};
|
|
1181
1181
|
|
|
1182
|
-
var __classPrivateFieldSet$
|
|
1182
|
+
var __classPrivateFieldSet$4 = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
1183
1183
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
1184
1184
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
1185
1185
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
1186
1186
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
1187
1187
|
};
|
|
1188
|
-
var __classPrivateFieldGet$
|
|
1188
|
+
var __classPrivateFieldGet$5 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
1189
1189
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
1190
1190
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
1191
1191
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
@@ -1210,7 +1210,8 @@ async function defaultParseResponse(props) {
|
|
|
1210
1210
|
return response;
|
|
1211
1211
|
}
|
|
1212
1212
|
const contentType = response.headers.get('content-type');
|
|
1213
|
-
const
|
|
1213
|
+
const mediaType = contentType?.split(';')[0]?.trim();
|
|
1214
|
+
const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json');
|
|
1214
1215
|
if (isJSON) {
|
|
1215
1216
|
const json = await response.json();
|
|
1216
1217
|
debug('response', response.status, response.url, response.headers, json);
|
|
@@ -1633,7 +1634,7 @@ class APIClient {
|
|
|
1633
1634
|
class AbstractPage {
|
|
1634
1635
|
constructor(client, response, body, options) {
|
|
1635
1636
|
_AbstractPage_client.set(this, void 0);
|
|
1636
|
-
__classPrivateFieldSet$
|
|
1637
|
+
__classPrivateFieldSet$4(this, _AbstractPage_client, client, "f");
|
|
1637
1638
|
this.options = options;
|
|
1638
1639
|
this.response = response;
|
|
1639
1640
|
this.body = body;
|
|
@@ -1661,7 +1662,7 @@ class AbstractPage {
|
|
|
1661
1662
|
nextOptions.query = undefined;
|
|
1662
1663
|
nextOptions.path = nextInfo.url.toString();
|
|
1663
1664
|
}
|
|
1664
|
-
return await __classPrivateFieldGet$
|
|
1665
|
+
return await __classPrivateFieldGet$5(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions);
|
|
1665
1666
|
}
|
|
1666
1667
|
async *iterPages() {
|
|
1667
1668
|
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
@@ -2132,14 +2133,14 @@ let Completions$2 = class Completions extends APIResource {
|
|
|
2132
2133
|
return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false });
|
|
2133
2134
|
}
|
|
2134
2135
|
/**
|
|
2135
|
-
* Get a stored chat completion. Only
|
|
2136
|
+
* Get a stored chat completion. Only Chat Completions that have been created with
|
|
2136
2137
|
* the `store` parameter set to `true` will be returned.
|
|
2137
2138
|
*/
|
|
2138
2139
|
retrieve(completionId, options) {
|
|
2139
2140
|
return this._client.get(`/chat/completions/${completionId}`, options);
|
|
2140
2141
|
}
|
|
2141
2142
|
/**
|
|
2142
|
-
* Modify a stored chat completion. Only
|
|
2143
|
+
* Modify a stored chat completion. Only Chat Completions that have been created
|
|
2143
2144
|
* with the `store` parameter set to `true` can be modified. Currently, the only
|
|
2144
2145
|
* supported modification is to update the `metadata` field.
|
|
2145
2146
|
*/
|
|
@@ -2153,7 +2154,7 @@ let Completions$2 = class Completions extends APIResource {
|
|
|
2153
2154
|
return this._client.getAPIList('/chat/completions', ChatCompletionsPage, { query, ...options });
|
|
2154
2155
|
}
|
|
2155
2156
|
/**
|
|
2156
|
-
* Delete a stored chat completion. Only
|
|
2157
|
+
* Delete a stored chat completion. Only Chat Completions that have been created
|
|
2157
2158
|
* with the `store` parameter set to `true` can be deleted.
|
|
2158
2159
|
*/
|
|
2159
2160
|
del(completionId, options) {
|
|
@@ -2321,13 +2322,13 @@ const isToolMessage = (message) => {
|
|
|
2321
2322
|
return message?.role === 'tool';
|
|
2322
2323
|
};
|
|
2323
2324
|
|
|
2324
|
-
var __classPrivateFieldSet$
|
|
2325
|
+
var __classPrivateFieldSet$3 = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
2325
2326
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
2326
2327
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
2327
2328
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
2328
2329
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
2329
2330
|
};
|
|
2330
|
-
var __classPrivateFieldGet$
|
|
2331
|
+
var __classPrivateFieldGet$4 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
2331
2332
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
2332
2333
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
2333
2334
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
@@ -2348,20 +2349,20 @@ class EventStream {
|
|
|
2348
2349
|
_EventStream_errored.set(this, false);
|
|
2349
2350
|
_EventStream_aborted.set(this, false);
|
|
2350
2351
|
_EventStream_catchingPromiseCreated.set(this, false);
|
|
2351
|
-
__classPrivateFieldSet$
|
|
2352
|
-
__classPrivateFieldSet$
|
|
2353
|
-
__classPrivateFieldSet$
|
|
2352
|
+
__classPrivateFieldSet$3(this, _EventStream_connectedPromise, new Promise((resolve, reject) => {
|
|
2353
|
+
__classPrivateFieldSet$3(this, _EventStream_resolveConnectedPromise, resolve, "f");
|
|
2354
|
+
__classPrivateFieldSet$3(this, _EventStream_rejectConnectedPromise, reject, "f");
|
|
2354
2355
|
}), "f");
|
|
2355
|
-
__classPrivateFieldSet$
|
|
2356
|
-
__classPrivateFieldSet$
|
|
2357
|
-
__classPrivateFieldSet$
|
|
2356
|
+
__classPrivateFieldSet$3(this, _EventStream_endPromise, new Promise((resolve, reject) => {
|
|
2357
|
+
__classPrivateFieldSet$3(this, _EventStream_resolveEndPromise, resolve, "f");
|
|
2358
|
+
__classPrivateFieldSet$3(this, _EventStream_rejectEndPromise, reject, "f");
|
|
2358
2359
|
}), "f");
|
|
2359
2360
|
// Don't let these promises cause unhandled rejection errors.
|
|
2360
2361
|
// we will manually cause an unhandled rejection error later
|
|
2361
2362
|
// if the user hasn't registered any error listener or called
|
|
2362
2363
|
// any promise-returning method.
|
|
2363
|
-
__classPrivateFieldGet$
|
|
2364
|
-
__classPrivateFieldGet$
|
|
2364
|
+
__classPrivateFieldGet$4(this, _EventStream_connectedPromise, "f").catch(() => { });
|
|
2365
|
+
__classPrivateFieldGet$4(this, _EventStream_endPromise, "f").catch(() => { });
|
|
2365
2366
|
}
|
|
2366
2367
|
_run(executor) {
|
|
2367
2368
|
// Unfortunately if we call `executor()` immediately we get runtime errors about
|
|
@@ -2370,23 +2371,23 @@ class EventStream {
|
|
|
2370
2371
|
executor().then(() => {
|
|
2371
2372
|
this._emitFinal();
|
|
2372
2373
|
this._emit('end');
|
|
2373
|
-
}, __classPrivateFieldGet$
|
|
2374
|
+
}, __classPrivateFieldGet$4(this, _EventStream_instances, "m", _EventStream_handleError).bind(this));
|
|
2374
2375
|
}, 0);
|
|
2375
2376
|
}
|
|
2376
2377
|
_connected() {
|
|
2377
2378
|
if (this.ended)
|
|
2378
2379
|
return;
|
|
2379
|
-
__classPrivateFieldGet$
|
|
2380
|
+
__classPrivateFieldGet$4(this, _EventStream_resolveConnectedPromise, "f").call(this);
|
|
2380
2381
|
this._emit('connect');
|
|
2381
2382
|
}
|
|
2382
2383
|
get ended() {
|
|
2383
|
-
return __classPrivateFieldGet$
|
|
2384
|
+
return __classPrivateFieldGet$4(this, _EventStream_ended, "f");
|
|
2384
2385
|
}
|
|
2385
2386
|
get errored() {
|
|
2386
|
-
return __classPrivateFieldGet$
|
|
2387
|
+
return __classPrivateFieldGet$4(this, _EventStream_errored, "f");
|
|
2387
2388
|
}
|
|
2388
2389
|
get aborted() {
|
|
2389
|
-
return __classPrivateFieldGet$
|
|
2390
|
+
return __classPrivateFieldGet$4(this, _EventStream_aborted, "f");
|
|
2390
2391
|
}
|
|
2391
2392
|
abort() {
|
|
2392
2393
|
this.controller.abort();
|
|
@@ -2399,7 +2400,7 @@ class EventStream {
|
|
|
2399
2400
|
* @returns this ChatCompletionStream, so that calls can be chained
|
|
2400
2401
|
*/
|
|
2401
2402
|
on(event, listener) {
|
|
2402
|
-
const listeners = __classPrivateFieldGet$
|
|
2403
|
+
const listeners = __classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event] = []);
|
|
2403
2404
|
listeners.push({ listener });
|
|
2404
2405
|
return this;
|
|
2405
2406
|
}
|
|
@@ -2411,7 +2412,7 @@ class EventStream {
|
|
|
2411
2412
|
* @returns this ChatCompletionStream, so that calls can be chained
|
|
2412
2413
|
*/
|
|
2413
2414
|
off(event, listener) {
|
|
2414
|
-
const listeners = __classPrivateFieldGet$
|
|
2415
|
+
const listeners = __classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event];
|
|
2415
2416
|
if (!listeners)
|
|
2416
2417
|
return this;
|
|
2417
2418
|
const index = listeners.findIndex((l) => l.listener === listener);
|
|
@@ -2425,7 +2426,7 @@ class EventStream {
|
|
|
2425
2426
|
* @returns this ChatCompletionStream, so that calls can be chained
|
|
2426
2427
|
*/
|
|
2427
2428
|
once(event, listener) {
|
|
2428
|
-
const listeners = __classPrivateFieldGet$
|
|
2429
|
+
const listeners = __classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event] || (__classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event] = []);
|
|
2429
2430
|
listeners.push({ listener, once: true });
|
|
2430
2431
|
return this;
|
|
2431
2432
|
}
|
|
@@ -2442,44 +2443,44 @@ class EventStream {
|
|
|
2442
2443
|
*/
|
|
2443
2444
|
emitted(event) {
|
|
2444
2445
|
return new Promise((resolve, reject) => {
|
|
2445
|
-
__classPrivateFieldSet$
|
|
2446
|
+
__classPrivateFieldSet$3(this, _EventStream_catchingPromiseCreated, true, "f");
|
|
2446
2447
|
if (event !== 'error')
|
|
2447
2448
|
this.once('error', reject);
|
|
2448
2449
|
this.once(event, resolve);
|
|
2449
2450
|
});
|
|
2450
2451
|
}
|
|
2451
2452
|
async done() {
|
|
2452
|
-
__classPrivateFieldSet$
|
|
2453
|
-
await __classPrivateFieldGet$
|
|
2453
|
+
__classPrivateFieldSet$3(this, _EventStream_catchingPromiseCreated, true, "f");
|
|
2454
|
+
await __classPrivateFieldGet$4(this, _EventStream_endPromise, "f");
|
|
2454
2455
|
}
|
|
2455
2456
|
_emit(event, ...args) {
|
|
2456
2457
|
// make sure we don't emit any events after end
|
|
2457
|
-
if (__classPrivateFieldGet$
|
|
2458
|
+
if (__classPrivateFieldGet$4(this, _EventStream_ended, "f")) {
|
|
2458
2459
|
return;
|
|
2459
2460
|
}
|
|
2460
2461
|
if (event === 'end') {
|
|
2461
|
-
__classPrivateFieldSet$
|
|
2462
|
-
__classPrivateFieldGet$
|
|
2462
|
+
__classPrivateFieldSet$3(this, _EventStream_ended, true, "f");
|
|
2463
|
+
__classPrivateFieldGet$4(this, _EventStream_resolveEndPromise, "f").call(this);
|
|
2463
2464
|
}
|
|
2464
|
-
const listeners = __classPrivateFieldGet$
|
|
2465
|
+
const listeners = __classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event];
|
|
2465
2466
|
if (listeners) {
|
|
2466
|
-
__classPrivateFieldGet$
|
|
2467
|
+
__classPrivateFieldGet$4(this, _EventStream_listeners, "f")[event] = listeners.filter((l) => !l.once);
|
|
2467
2468
|
listeners.forEach(({ listener }) => listener(...args));
|
|
2468
2469
|
}
|
|
2469
2470
|
if (event === 'abort') {
|
|
2470
2471
|
const error = args[0];
|
|
2471
|
-
if (!__classPrivateFieldGet$
|
|
2472
|
+
if (!__classPrivateFieldGet$4(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
2472
2473
|
Promise.reject(error);
|
|
2473
2474
|
}
|
|
2474
|
-
__classPrivateFieldGet$
|
|
2475
|
-
__classPrivateFieldGet$
|
|
2475
|
+
__classPrivateFieldGet$4(this, _EventStream_rejectConnectedPromise, "f").call(this, error);
|
|
2476
|
+
__classPrivateFieldGet$4(this, _EventStream_rejectEndPromise, "f").call(this, error);
|
|
2476
2477
|
this._emit('end');
|
|
2477
2478
|
return;
|
|
2478
2479
|
}
|
|
2479
2480
|
if (event === 'error') {
|
|
2480
2481
|
// NOTE: _emit('error', error) should only be called from #handleError().
|
|
2481
2482
|
const error = args[0];
|
|
2482
|
-
if (!__classPrivateFieldGet$
|
|
2483
|
+
if (!__classPrivateFieldGet$4(this, _EventStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
|
2483
2484
|
// Trigger an unhandled rejection if the user hasn't registered any error handlers.
|
|
2484
2485
|
// If you are seeing stack traces here, make sure to handle errors via either:
|
|
2485
2486
|
// - runner.on('error', () => ...)
|
|
@@ -2488,20 +2489,20 @@ class EventStream {
|
|
|
2488
2489
|
// - etc.
|
|
2489
2490
|
Promise.reject(error);
|
|
2490
2491
|
}
|
|
2491
|
-
__classPrivateFieldGet$
|
|
2492
|
-
__classPrivateFieldGet$
|
|
2492
|
+
__classPrivateFieldGet$4(this, _EventStream_rejectConnectedPromise, "f").call(this, error);
|
|
2493
|
+
__classPrivateFieldGet$4(this, _EventStream_rejectEndPromise, "f").call(this, error);
|
|
2493
2494
|
this._emit('end');
|
|
2494
2495
|
}
|
|
2495
2496
|
}
|
|
2496
2497
|
_emitFinal() { }
|
|
2497
2498
|
}
|
|
2498
2499
|
_EventStream_connectedPromise = new WeakMap(), _EventStream_resolveConnectedPromise = new WeakMap(), _EventStream_rejectConnectedPromise = new WeakMap(), _EventStream_endPromise = new WeakMap(), _EventStream_resolveEndPromise = new WeakMap(), _EventStream_rejectEndPromise = new WeakMap(), _EventStream_listeners = new WeakMap(), _EventStream_ended = new WeakMap(), _EventStream_errored = new WeakMap(), _EventStream_aborted = new WeakMap(), _EventStream_catchingPromiseCreated = new WeakMap(), _EventStream_instances = new WeakSet(), _EventStream_handleError = function _EventStream_handleError(error) {
|
|
2499
|
-
__classPrivateFieldSet$
|
|
2500
|
+
__classPrivateFieldSet$3(this, _EventStream_errored, true, "f");
|
|
2500
2501
|
if (error instanceof Error && error.name === 'AbortError') {
|
|
2501
2502
|
error = new APIUserAbortError();
|
|
2502
2503
|
}
|
|
2503
2504
|
if (error instanceof APIUserAbortError) {
|
|
2504
|
-
__classPrivateFieldSet$
|
|
2505
|
+
__classPrivateFieldSet$3(this, _EventStream_aborted, true, "f");
|
|
2505
2506
|
return this._emit('abort', error);
|
|
2506
2507
|
}
|
|
2507
2508
|
if (error instanceof OpenAIError) {
|
|
@@ -2519,11 +2520,11 @@ _EventStream_connectedPromise = new WeakMap(), _EventStream_resolveConnectedProm
|
|
|
2519
2520
|
function isAutoParsableResponseFormat(response_format) {
|
|
2520
2521
|
return response_format?.['$brand'] === 'auto-parseable-response-format';
|
|
2521
2522
|
}
|
|
2522
|
-
function isAutoParsableTool(tool) {
|
|
2523
|
+
function isAutoParsableTool$1(tool) {
|
|
2523
2524
|
return tool?.['$brand'] === 'auto-parseable-tool';
|
|
2524
2525
|
}
|
|
2525
2526
|
function maybeParseChatCompletion(completion, params) {
|
|
2526
|
-
if (!params || !hasAutoParseableInput(params)) {
|
|
2527
|
+
if (!params || !hasAutoParseableInput$1(params)) {
|
|
2527
2528
|
return {
|
|
2528
2529
|
...completion,
|
|
2529
2530
|
choices: completion.choices.map((choice) => ({
|
|
@@ -2556,7 +2557,7 @@ function parseChatCompletion(completion, params) {
|
|
|
2556
2557
|
...choice.message,
|
|
2557
2558
|
...(choice.message.tool_calls ?
|
|
2558
2559
|
{
|
|
2559
|
-
tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall(params, toolCall)) ?? undefined,
|
|
2560
|
+
tool_calls: choice.message.tool_calls?.map((toolCall) => parseToolCall$1(params, toolCall)) ?? undefined,
|
|
2560
2561
|
}
|
|
2561
2562
|
: undefined),
|
|
2562
2563
|
parsed: choice.message.content && !choice.message.refusal ?
|
|
@@ -2580,13 +2581,13 @@ function parseResponseFormat(params, content) {
|
|
|
2580
2581
|
}
|
|
2581
2582
|
return null;
|
|
2582
2583
|
}
|
|
2583
|
-
function parseToolCall(params, toolCall) {
|
|
2584
|
+
function parseToolCall$1(params, toolCall) {
|
|
2584
2585
|
const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name);
|
|
2585
2586
|
return {
|
|
2586
2587
|
...toolCall,
|
|
2587
2588
|
function: {
|
|
2588
2589
|
...toolCall.function,
|
|
2589
|
-
parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments)
|
|
2590
|
+
parsed_arguments: isAutoParsableTool$1(inputTool) ? inputTool.$parseRaw(toolCall.function.arguments)
|
|
2590
2591
|
: inputTool?.function.strict ? JSON.parse(toolCall.function.arguments)
|
|
2591
2592
|
: null,
|
|
2592
2593
|
},
|
|
@@ -2597,13 +2598,13 @@ function shouldParseToolCall(params, toolCall) {
|
|
|
2597
2598
|
return false;
|
|
2598
2599
|
}
|
|
2599
2600
|
const inputTool = params.tools?.find((inputTool) => inputTool.function?.name === toolCall.function.name);
|
|
2600
|
-
return isAutoParsableTool(inputTool) || inputTool?.function.strict || false;
|
|
2601
|
+
return isAutoParsableTool$1(inputTool) || inputTool?.function.strict || false;
|
|
2601
2602
|
}
|
|
2602
|
-
function hasAutoParseableInput(params) {
|
|
2603
|
+
function hasAutoParseableInput$1(params) {
|
|
2603
2604
|
if (isAutoParsableResponseFormat(params.response_format)) {
|
|
2604
2605
|
return true;
|
|
2605
2606
|
}
|
|
2606
|
-
return (params.tools?.some((t) => isAutoParsableTool(t) || (t.type === 'function' && t.function.strict === true)) ?? false);
|
|
2607
|
+
return (params.tools?.some((t) => isAutoParsableTool$1(t) || (t.type === 'function' && t.function.strict === true)) ?? false);
|
|
2607
2608
|
}
|
|
2608
2609
|
function validateInputTools(tools) {
|
|
2609
2610
|
for (const tool of tools ?? []) {
|
|
@@ -2616,7 +2617,7 @@ function validateInputTools(tools) {
|
|
|
2616
2617
|
}
|
|
2617
2618
|
}
|
|
2618
2619
|
|
|
2619
|
-
var __classPrivateFieldGet$
|
|
2620
|
+
var __classPrivateFieldGet$3 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
2620
2621
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
2621
2622
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
2622
2623
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
@@ -2677,7 +2678,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2677
2678
|
*/
|
|
2678
2679
|
async finalContent() {
|
|
2679
2680
|
await this.done();
|
|
2680
|
-
return __classPrivateFieldGet$
|
|
2681
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this);
|
|
2681
2682
|
}
|
|
2682
2683
|
/**
|
|
2683
2684
|
* @returns a promise that resolves with the the final assistant ChatCompletionMessage response,
|
|
@@ -2685,7 +2686,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2685
2686
|
*/
|
|
2686
2687
|
async finalMessage() {
|
|
2687
2688
|
await this.done();
|
|
2688
|
-
return __classPrivateFieldGet$
|
|
2689
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this);
|
|
2689
2690
|
}
|
|
2690
2691
|
/**
|
|
2691
2692
|
* @returns a promise that resolves with the content of the final FunctionCall, or rejects
|
|
@@ -2693,15 +2694,15 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2693
2694
|
*/
|
|
2694
2695
|
async finalFunctionCall() {
|
|
2695
2696
|
await this.done();
|
|
2696
|
-
return __classPrivateFieldGet$
|
|
2697
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);
|
|
2697
2698
|
}
|
|
2698
2699
|
async finalFunctionCallResult() {
|
|
2699
2700
|
await this.done();
|
|
2700
|
-
return __classPrivateFieldGet$
|
|
2701
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);
|
|
2701
2702
|
}
|
|
2702
2703
|
async totalUsage() {
|
|
2703
2704
|
await this.done();
|
|
2704
|
-
return __classPrivateFieldGet$
|
|
2705
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this);
|
|
2705
2706
|
}
|
|
2706
2707
|
allChatCompletions() {
|
|
2707
2708
|
return [...this._chatCompletions];
|
|
@@ -2710,20 +2711,20 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2710
2711
|
const completion = this._chatCompletions[this._chatCompletions.length - 1];
|
|
2711
2712
|
if (completion)
|
|
2712
2713
|
this._emit('finalChatCompletion', completion);
|
|
2713
|
-
const finalMessage = __classPrivateFieldGet$
|
|
2714
|
+
const finalMessage = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this);
|
|
2714
2715
|
if (finalMessage)
|
|
2715
2716
|
this._emit('finalMessage', finalMessage);
|
|
2716
|
-
const finalContent = __classPrivateFieldGet$
|
|
2717
|
+
const finalContent = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalContent).call(this);
|
|
2717
2718
|
if (finalContent)
|
|
2718
2719
|
this._emit('finalContent', finalContent);
|
|
2719
|
-
const finalFunctionCall = __classPrivateFieldGet$
|
|
2720
|
+
const finalFunctionCall = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);
|
|
2720
2721
|
if (finalFunctionCall)
|
|
2721
2722
|
this._emit('finalFunctionCall', finalFunctionCall);
|
|
2722
|
-
const finalFunctionCallResult = __classPrivateFieldGet$
|
|
2723
|
+
const finalFunctionCallResult = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);
|
|
2723
2724
|
if (finalFunctionCallResult != null)
|
|
2724
2725
|
this._emit('finalFunctionCallResult', finalFunctionCallResult);
|
|
2725
2726
|
if (this._chatCompletions.some((c) => c.usage)) {
|
|
2726
|
-
this._emit('totalUsage', __classPrivateFieldGet$
|
|
2727
|
+
this._emit('totalUsage', __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_calculateTotalUsage).call(this));
|
|
2727
2728
|
}
|
|
2728
2729
|
}
|
|
2729
2730
|
async _createChatCompletion(client, params, options) {
|
|
@@ -2733,7 +2734,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2733
2734
|
this.controller.abort();
|
|
2734
2735
|
signal.addEventListener('abort', () => this.controller.abort());
|
|
2735
2736
|
}
|
|
2736
|
-
__classPrivateFieldGet$
|
|
2737
|
+
__classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_validateParams).call(this, params);
|
|
2737
2738
|
const chatCompletion = await client.chat.completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal });
|
|
2738
2739
|
this._connected();
|
|
2739
2740
|
return this._addChatCompletion(parseChatCompletion(chatCompletion, params));
|
|
@@ -2802,7 +2803,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2802
2803
|
}
|
|
2803
2804
|
// @ts-expect-error it can't rule out `never` type.
|
|
2804
2805
|
const rawContent = await fn.function(parsed, this);
|
|
2805
|
-
const content = __classPrivateFieldGet$
|
|
2806
|
+
const content = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);
|
|
2806
2807
|
this._addMessage({ role, name, content });
|
|
2807
2808
|
if (singleFunctionToCall)
|
|
2808
2809
|
return;
|
|
@@ -2815,7 +2816,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2815
2816
|
const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};
|
|
2816
2817
|
// TODO(someday): clean this logic up
|
|
2817
2818
|
const inputTools = params.tools.map((tool) => {
|
|
2818
|
-
if (isAutoParsableTool(tool)) {
|
|
2819
|
+
if (isAutoParsableTool$1(tool)) {
|
|
2819
2820
|
if (!tool.$callback) {
|
|
2820
2821
|
throw new OpenAIError('Tool given to `.runTools()` that does not have an associated function');
|
|
2821
2822
|
}
|
|
@@ -2898,7 +2899,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2898
2899
|
}
|
|
2899
2900
|
// @ts-expect-error it can't rule out `never` type.
|
|
2900
2901
|
const rawContent = await fn.function(parsed, this);
|
|
2901
|
-
const content = __classPrivateFieldGet$
|
|
2902
|
+
const content = __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);
|
|
2902
2903
|
this._addMessage({ role, tool_call_id, content });
|
|
2903
2904
|
if (singleFunctionToCall) {
|
|
2904
2905
|
return;
|
|
@@ -2909,7 +2910,7 @@ class AbstractChatCompletionRunner extends EventStream {
|
|
|
2909
2910
|
}
|
|
2910
2911
|
}
|
|
2911
2912
|
_AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() {
|
|
2912
|
-
return __classPrivateFieldGet$
|
|
2913
|
+
return __classPrivateFieldGet$3(this, _AbstractChatCompletionRunner_instances, "m", _AbstractChatCompletionRunner_getFinalMessage).call(this).content ?? null;
|
|
2913
2914
|
}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() {
|
|
2914
2915
|
let i = this.messages.length;
|
|
2915
2916
|
while (i-- > 0) {
|
|
@@ -3247,13 +3248,13 @@ const _parseJSON = (jsonString, allow) => {
|
|
|
3247
3248
|
// using this function with malformed JSON is undefined behavior
|
|
3248
3249
|
const partialParse = (input) => parseJSON(input, Allow.ALL ^ Allow.NUM);
|
|
3249
3250
|
|
|
3250
|
-
var __classPrivateFieldSet$
|
|
3251
|
+
var __classPrivateFieldSet$2 = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
3251
3252
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
3252
3253
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
3253
3254
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
3254
3255
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
3255
3256
|
};
|
|
3256
|
-
var __classPrivateFieldGet$
|
|
3257
|
+
var __classPrivateFieldGet$2 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
3257
3258
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
3258
3259
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
3259
3260
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
@@ -3266,11 +3267,11 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3266
3267
|
_ChatCompletionStream_params.set(this, void 0);
|
|
3267
3268
|
_ChatCompletionStream_choiceEventStates.set(this, void 0);
|
|
3268
3269
|
_ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0);
|
|
3269
|
-
__classPrivateFieldSet$
|
|
3270
|
-
__classPrivateFieldSet$
|
|
3270
|
+
__classPrivateFieldSet$2(this, _ChatCompletionStream_params, params, "f");
|
|
3271
|
+
__classPrivateFieldSet$2(this, _ChatCompletionStream_choiceEventStates, [], "f");
|
|
3271
3272
|
}
|
|
3272
3273
|
get currentChatCompletionSnapshot() {
|
|
3273
|
-
return __classPrivateFieldGet$
|
|
3274
|
+
return __classPrivateFieldGet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
|
3274
3275
|
}
|
|
3275
3276
|
/**
|
|
3276
3277
|
* Intended for use on the frontend, consuming a stream produced with
|
|
@@ -3297,16 +3298,16 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3297
3298
|
this.controller.abort();
|
|
3298
3299
|
signal.addEventListener('abort', () => this.controller.abort());
|
|
3299
3300
|
}
|
|
3300
|
-
__classPrivateFieldGet$
|
|
3301
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
|
|
3301
3302
|
const stream = await client.chat.completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
|
3302
3303
|
this._connected();
|
|
3303
3304
|
for await (const chunk of stream) {
|
|
3304
|
-
__classPrivateFieldGet$
|
|
3305
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
|
|
3305
3306
|
}
|
|
3306
3307
|
if (stream.controller.signal?.aborted) {
|
|
3307
3308
|
throw new APIUserAbortError();
|
|
3308
3309
|
}
|
|
3309
|
-
return this._addChatCompletion(__classPrivateFieldGet$
|
|
3310
|
+
return this._addChatCompletion(__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
|
3310
3311
|
}
|
|
3311
3312
|
async _fromReadableStream(readableStream, options) {
|
|
3312
3313
|
const signal = options?.signal;
|
|
@@ -3315,29 +3316,29 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3315
3316
|
this.controller.abort();
|
|
3316
3317
|
signal.addEventListener('abort', () => this.controller.abort());
|
|
3317
3318
|
}
|
|
3318
|
-
__classPrivateFieldGet$
|
|
3319
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
|
|
3319
3320
|
this._connected();
|
|
3320
3321
|
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
|
3321
3322
|
let chatId;
|
|
3322
3323
|
for await (const chunk of stream) {
|
|
3323
3324
|
if (chatId && chatId !== chunk.id) {
|
|
3324
3325
|
// A new request has been made.
|
|
3325
|
-
this._addChatCompletion(__classPrivateFieldGet$
|
|
3326
|
+
this._addChatCompletion(__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
|
3326
3327
|
}
|
|
3327
|
-
__classPrivateFieldGet$
|
|
3328
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
|
|
3328
3329
|
chatId = chunk.id;
|
|
3329
3330
|
}
|
|
3330
3331
|
if (stream.controller.signal?.aborted) {
|
|
3331
3332
|
throw new APIUserAbortError();
|
|
3332
3333
|
}
|
|
3333
|
-
return this._addChatCompletion(__classPrivateFieldGet$
|
|
3334
|
+
return this._addChatCompletion(__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
|
3334
3335
|
}
|
|
3335
3336
|
[(_ChatCompletionStream_params = new WeakMap(), _ChatCompletionStream_choiceEventStates = new WeakMap(), _ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() {
|
|
3336
3337
|
if (this.ended)
|
|
3337
3338
|
return;
|
|
3338
|
-
__classPrivateFieldSet$
|
|
3339
|
+
__classPrivateFieldSet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
|
|
3339
3340
|
}, _ChatCompletionStream_getChoiceEventState = function _ChatCompletionStream_getChoiceEventState(choice) {
|
|
3340
|
-
let state = __classPrivateFieldGet$
|
|
3341
|
+
let state = __classPrivateFieldGet$2(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index];
|
|
3341
3342
|
if (state) {
|
|
3342
3343
|
return state;
|
|
3343
3344
|
}
|
|
@@ -3349,12 +3350,12 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3349
3350
|
done_tool_calls: new Set(),
|
|
3350
3351
|
current_tool_call_index: null,
|
|
3351
3352
|
};
|
|
3352
|
-
__classPrivateFieldGet$
|
|
3353
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index] = state;
|
|
3353
3354
|
return state;
|
|
3354
3355
|
}, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) {
|
|
3355
3356
|
if (this.ended)
|
|
3356
3357
|
return;
|
|
3357
|
-
const completion = __classPrivateFieldGet$
|
|
3358
|
+
const completion = __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk);
|
|
3358
3359
|
this._emit('chunk', chunk, completion);
|
|
3359
3360
|
for (const choice of chunk.choices) {
|
|
3360
3361
|
const choiceSnapshot = completion.choices[choice.index];
|
|
@@ -3388,19 +3389,19 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3388
3389
|
snapshot: choiceSnapshot.logprobs?.refusal ?? [],
|
|
3389
3390
|
});
|
|
3390
3391
|
}
|
|
3391
|
-
const state = __classPrivateFieldGet$
|
|
3392
|
+
const state = __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
|
3392
3393
|
if (choiceSnapshot.finish_reason) {
|
|
3393
|
-
__classPrivateFieldGet$
|
|
3394
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot);
|
|
3394
3395
|
if (state.current_tool_call_index != null) {
|
|
3395
|
-
__classPrivateFieldGet$
|
|
3396
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index);
|
|
3396
3397
|
}
|
|
3397
3398
|
}
|
|
3398
3399
|
for (const toolCall of choice.delta.tool_calls ?? []) {
|
|
3399
3400
|
if (state.current_tool_call_index !== toolCall.index) {
|
|
3400
|
-
__classPrivateFieldGet$
|
|
3401
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot);
|
|
3401
3402
|
// new tool call started, the previous one is done
|
|
3402
3403
|
if (state.current_tool_call_index != null) {
|
|
3403
|
-
__classPrivateFieldGet$
|
|
3404
|
+
__classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index);
|
|
3404
3405
|
}
|
|
3405
3406
|
}
|
|
3406
3407
|
state.current_tool_call_index = toolCall.index;
|
|
@@ -3425,7 +3426,7 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3425
3426
|
}
|
|
3426
3427
|
}
|
|
3427
3428
|
}, _ChatCompletionStream_emitToolCallDoneEvent = function _ChatCompletionStream_emitToolCallDoneEvent(choiceSnapshot, toolCallIndex) {
|
|
3428
|
-
const state = __classPrivateFieldGet$
|
|
3429
|
+
const state = __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
|
3429
3430
|
if (state.done_tool_calls.has(toolCallIndex)) {
|
|
3430
3431
|
// we've already fired the done event
|
|
3431
3432
|
return;
|
|
@@ -3438,12 +3439,12 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3438
3439
|
throw new Error('tool call snapshot missing `type`');
|
|
3439
3440
|
}
|
|
3440
3441
|
if (toolCallSnapshot.type === 'function') {
|
|
3441
|
-
const inputTool = __classPrivateFieldGet$
|
|
3442
|
+
const inputTool = __classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f")?.tools?.find((tool) => tool.type === 'function' && tool.function.name === toolCallSnapshot.function.name);
|
|
3442
3443
|
this._emit('tool_calls.function.arguments.done', {
|
|
3443
3444
|
name: toolCallSnapshot.function.name,
|
|
3444
3445
|
index: toolCallIndex,
|
|
3445
3446
|
arguments: toolCallSnapshot.function.arguments,
|
|
3446
|
-
parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments)
|
|
3447
|
+
parsed_arguments: isAutoParsableTool$1(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments)
|
|
3447
3448
|
: inputTool?.function.strict ? JSON.parse(toolCallSnapshot.function.arguments)
|
|
3448
3449
|
: null,
|
|
3449
3450
|
});
|
|
@@ -3452,10 +3453,10 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3452
3453
|
assertNever(toolCallSnapshot.type);
|
|
3453
3454
|
}
|
|
3454
3455
|
}, _ChatCompletionStream_emitContentDoneEvents = function _ChatCompletionStream_emitContentDoneEvents(choiceSnapshot) {
|
|
3455
|
-
const state = __classPrivateFieldGet$
|
|
3456
|
+
const state = __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
|
3456
3457
|
if (choiceSnapshot.message.content && !state.content_done) {
|
|
3457
3458
|
state.content_done = true;
|
|
3458
|
-
const responseFormat = __classPrivateFieldGet$
|
|
3459
|
+
const responseFormat = __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this);
|
|
3459
3460
|
this._emit('content.done', {
|
|
3460
3461
|
content: choiceSnapshot.message.content,
|
|
3461
3462
|
parsed: responseFormat ? responseFormat.$parseRaw(choiceSnapshot.message.content) : null,
|
|
@@ -3477,25 +3478,25 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3477
3478
|
if (this.ended) {
|
|
3478
3479
|
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
|
3479
3480
|
}
|
|
3480
|
-
const snapshot = __classPrivateFieldGet$
|
|
3481
|
+
const snapshot = __classPrivateFieldGet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
|
3481
3482
|
if (!snapshot) {
|
|
3482
3483
|
throw new OpenAIError(`request ended without sending any chunks`);
|
|
3483
3484
|
}
|
|
3484
|
-
__classPrivateFieldSet$
|
|
3485
|
-
__classPrivateFieldSet$
|
|
3486
|
-
return finalizeChatCompletion(snapshot, __classPrivateFieldGet$
|
|
3485
|
+
__classPrivateFieldSet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
|
|
3486
|
+
__classPrivateFieldSet$2(this, _ChatCompletionStream_choiceEventStates, [], "f");
|
|
3487
|
+
return finalizeChatCompletion(snapshot, __classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f"));
|
|
3487
3488
|
}, _ChatCompletionStream_getAutoParseableResponseFormat = function _ChatCompletionStream_getAutoParseableResponseFormat() {
|
|
3488
|
-
const responseFormat = __classPrivateFieldGet$
|
|
3489
|
+
const responseFormat = __classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f")?.response_format;
|
|
3489
3490
|
if (isAutoParsableResponseFormat(responseFormat)) {
|
|
3490
3491
|
return responseFormat;
|
|
3491
3492
|
}
|
|
3492
3493
|
return null;
|
|
3493
3494
|
}, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) {
|
|
3494
3495
|
var _a, _b, _c, _d;
|
|
3495
|
-
let snapshot = __classPrivateFieldGet$
|
|
3496
|
+
let snapshot = __classPrivateFieldGet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
|
3496
3497
|
const { choices, ...rest } = chunk;
|
|
3497
3498
|
if (!snapshot) {
|
|
3498
|
-
snapshot = __classPrivateFieldSet$
|
|
3499
|
+
snapshot = __classPrivateFieldSet$2(this, _ChatCompletionStream_currentChatCompletionSnapshot, {
|
|
3499
3500
|
...rest,
|
|
3500
3501
|
choices: [],
|
|
3501
3502
|
}, "f");
|
|
@@ -3527,7 +3528,7 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3527
3528
|
}
|
|
3528
3529
|
if (finish_reason) {
|
|
3529
3530
|
choice.finish_reason = finish_reason;
|
|
3530
|
-
if (__classPrivateFieldGet$
|
|
3531
|
+
if (__classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f") && hasAutoParseableInput$1(__classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f"))) {
|
|
3531
3532
|
if (finish_reason === 'length') {
|
|
3532
3533
|
throw new LengthFinishReasonError();
|
|
3533
3534
|
}
|
|
@@ -3561,7 +3562,7 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3561
3562
|
}
|
|
3562
3563
|
if (content) {
|
|
3563
3564
|
choice.message.content = (choice.message.content || '') + content;
|
|
3564
|
-
if (!choice.message.refusal && __classPrivateFieldGet$
|
|
3565
|
+
if (!choice.message.refusal && __classPrivateFieldGet$2(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this)) {
|
|
3565
3566
|
choice.message.parsed = partialParse(choice.message.content);
|
|
3566
3567
|
}
|
|
3567
3568
|
}
|
|
@@ -3581,7 +3582,7 @@ class ChatCompletionStream extends AbstractChatCompletionRunner {
|
|
|
3581
3582
|
tool_call.function.name = fn.name;
|
|
3582
3583
|
if (fn?.arguments) {
|
|
3583
3584
|
tool_call.function.arguments += fn.arguments;
|
|
3584
|
-
if (shouldParseToolCall(__classPrivateFieldGet$
|
|
3585
|
+
if (shouldParseToolCall(__classPrivateFieldGet$2(this, _ChatCompletionStream_params, "f"), tool_call)) {
|
|
3585
3586
|
tool_call.function.parsed_arguments = partialParse(tool_call.function.arguments);
|
|
3586
3587
|
}
|
|
3587
3588
|
}
|
|
@@ -3834,12 +3835,12 @@ class Realtime extends APIResource {
|
|
|
3834
3835
|
}
|
|
3835
3836
|
Realtime.Sessions = Sessions;
|
|
3836
3837
|
|
|
3837
|
-
var __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
3838
|
+
var __classPrivateFieldGet$1 = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
3838
3839
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
3839
3840
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
3840
3841
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
3841
3842
|
};
|
|
3842
|
-
var __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
3843
|
+
var __classPrivateFieldSet$1 = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
3843
3844
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
3844
3845
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
3845
3846
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
@@ -3934,12 +3935,12 @@ class AssistantStream extends EventStream {
|
|
|
3934
3935
|
this._connected();
|
|
3935
3936
|
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
|
3936
3937
|
for await (const event of stream) {
|
|
3937
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
3938
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
3938
3939
|
}
|
|
3939
3940
|
if (stream.controller.signal?.aborted) {
|
|
3940
3941
|
throw new APIUserAbortError();
|
|
3941
3942
|
}
|
|
3942
|
-
return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
3943
|
+
return this._addRun(__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
3943
3944
|
}
|
|
3944
3945
|
toReadableStream() {
|
|
3945
3946
|
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
|
@@ -3967,12 +3968,12 @@ class AssistantStream extends EventStream {
|
|
|
3967
3968
|
});
|
|
3968
3969
|
this._connected();
|
|
3969
3970
|
for await (const event of stream) {
|
|
3970
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
3971
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
3971
3972
|
}
|
|
3972
3973
|
if (stream.controller.signal?.aborted) {
|
|
3973
3974
|
throw new APIUserAbortError();
|
|
3974
3975
|
}
|
|
3975
|
-
return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
3976
|
+
return this._addRun(__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
3976
3977
|
}
|
|
3977
3978
|
static createThreadAssistantStream(params, thread, options) {
|
|
3978
3979
|
const runner = new AssistantStream();
|
|
@@ -3991,30 +3992,30 @@ class AssistantStream extends EventStream {
|
|
|
3991
3992
|
return runner;
|
|
3992
3993
|
}
|
|
3993
3994
|
currentEvent() {
|
|
3994
|
-
return __classPrivateFieldGet(this, _AssistantStream_currentEvent, "f");
|
|
3995
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_currentEvent, "f");
|
|
3995
3996
|
}
|
|
3996
3997
|
currentRun() {
|
|
3997
|
-
return __classPrivateFieldGet(this, _AssistantStream_currentRunSnapshot, "f");
|
|
3998
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_currentRunSnapshot, "f");
|
|
3998
3999
|
}
|
|
3999
4000
|
currentMessageSnapshot() {
|
|
4000
|
-
return __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f");
|
|
4001
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f");
|
|
4001
4002
|
}
|
|
4002
4003
|
currentRunStepSnapshot() {
|
|
4003
|
-
return __classPrivateFieldGet(this, _AssistantStream_currentRunStepSnapshot, "f");
|
|
4004
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_currentRunStepSnapshot, "f");
|
|
4004
4005
|
}
|
|
4005
4006
|
async finalRunSteps() {
|
|
4006
4007
|
await this.done();
|
|
4007
|
-
return Object.values(__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f"));
|
|
4008
|
+
return Object.values(__classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f"));
|
|
4008
4009
|
}
|
|
4009
4010
|
async finalMessages() {
|
|
4010
4011
|
await this.done();
|
|
4011
|
-
return Object.values(__classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f"));
|
|
4012
|
+
return Object.values(__classPrivateFieldGet$1(this, _AssistantStream_messageSnapshots, "f"));
|
|
4012
4013
|
}
|
|
4013
4014
|
async finalRun() {
|
|
4014
4015
|
await this.done();
|
|
4015
|
-
if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f"))
|
|
4016
|
+
if (!__classPrivateFieldGet$1(this, _AssistantStream_finalRun, "f"))
|
|
4016
4017
|
throw Error('Final run was not received.');
|
|
4017
|
-
return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f");
|
|
4018
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_finalRun, "f");
|
|
4018
4019
|
}
|
|
4019
4020
|
async _createThreadAssistantStream(thread, params, options) {
|
|
4020
4021
|
const signal = options?.signal;
|
|
@@ -4027,12 +4028,12 @@ class AssistantStream extends EventStream {
|
|
|
4027
4028
|
const stream = await thread.createAndRun(body, { ...options, signal: this.controller.signal });
|
|
4028
4029
|
this._connected();
|
|
4029
4030
|
for await (const event of stream) {
|
|
4030
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
4031
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
4031
4032
|
}
|
|
4032
4033
|
if (stream.controller.signal?.aborted) {
|
|
4033
4034
|
throw new APIUserAbortError();
|
|
4034
4035
|
}
|
|
4035
|
-
return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
4036
|
+
return this._addRun(__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
4036
4037
|
}
|
|
4037
4038
|
async _createAssistantStream(run, threadId, params, options) {
|
|
4038
4039
|
const signal = options?.signal;
|
|
@@ -4045,12 +4046,12 @@ class AssistantStream extends EventStream {
|
|
|
4045
4046
|
const stream = await run.create(threadId, body, { ...options, signal: this.controller.signal });
|
|
4046
4047
|
this._connected();
|
|
4047
4048
|
for await (const event of stream) {
|
|
4048
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
4049
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_addEvent).call(this, event);
|
|
4049
4050
|
}
|
|
4050
4051
|
if (stream.controller.signal?.aborted) {
|
|
4051
4052
|
throw new APIUserAbortError();
|
|
4052
4053
|
}
|
|
4053
|
-
return this._addRun(__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
4054
|
+
return this._addRun(__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_endRequest).call(this));
|
|
4054
4055
|
}
|
|
4055
4056
|
static accumulateDelta(acc, delta) {
|
|
4056
4057
|
for (const [key, deltaValue] of Object.entries(delta)) {
|
|
@@ -4128,8 +4129,8 @@ class AssistantStream extends EventStream {
|
|
|
4128
4129
|
_AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
4129
4130
|
if (this.ended)
|
|
4130
4131
|
return;
|
|
4131
|
-
__classPrivateFieldSet(this, _AssistantStream_currentEvent, event, "f");
|
|
4132
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleEvent).call(this, event);
|
|
4132
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentEvent, event, "f");
|
|
4133
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_handleEvent).call(this, event);
|
|
4133
4134
|
switch (event.event) {
|
|
4134
4135
|
case 'thread.created':
|
|
4135
4136
|
//No action on this event.
|
|
@@ -4144,7 +4145,7 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4144
4145
|
case 'thread.run.cancelling':
|
|
4145
4146
|
case 'thread.run.cancelled':
|
|
4146
4147
|
case 'thread.run.expired':
|
|
4147
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRun).call(this, event);
|
|
4148
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_handleRun).call(this, event);
|
|
4148
4149
|
break;
|
|
4149
4150
|
case 'thread.run.step.created':
|
|
4150
4151
|
case 'thread.run.step.in_progress':
|
|
@@ -4153,14 +4154,14 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4153
4154
|
case 'thread.run.step.failed':
|
|
4154
4155
|
case 'thread.run.step.cancelled':
|
|
4155
4156
|
case 'thread.run.step.expired':
|
|
4156
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleRunStep).call(this, event);
|
|
4157
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_handleRunStep).call(this, event);
|
|
4157
4158
|
break;
|
|
4158
4159
|
case 'thread.message.created':
|
|
4159
4160
|
case 'thread.message.in_progress':
|
|
4160
4161
|
case 'thread.message.delta':
|
|
4161
4162
|
case 'thread.message.completed':
|
|
4162
4163
|
case 'thread.message.incomplete':
|
|
4163
|
-
__classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_handleMessage).call(this, event);
|
|
4164
|
+
__classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_handleMessage).call(this, event);
|
|
4164
4165
|
break;
|
|
4165
4166
|
case 'error':
|
|
4166
4167
|
//This is included for completeness, but errors are processed in the SSE event processing so this should not occur
|
|
@@ -4170,13 +4171,13 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4170
4171
|
if (this.ended) {
|
|
4171
4172
|
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
|
4172
4173
|
}
|
|
4173
|
-
if (!__classPrivateFieldGet(this, _AssistantStream_finalRun, "f"))
|
|
4174
|
+
if (!__classPrivateFieldGet$1(this, _AssistantStream_finalRun, "f"))
|
|
4174
4175
|
throw Error('Final run has not been received');
|
|
4175
|
-
return __classPrivateFieldGet(this, _AssistantStream_finalRun, "f");
|
|
4176
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_finalRun, "f");
|
|
4176
4177
|
}, _AssistantStream_handleMessage = function _AssistantStream_handleMessage(event) {
|
|
4177
|
-
const [accumulatedMessage, newContent] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateMessage).call(this, event, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"));
|
|
4178
|
-
__classPrivateFieldSet(this, _AssistantStream_messageSnapshot, accumulatedMessage, "f");
|
|
4179
|
-
__classPrivateFieldGet(this, _AssistantStream_messageSnapshots, "f")[accumulatedMessage.id] = accumulatedMessage;
|
|
4178
|
+
const [accumulatedMessage, newContent] = __classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_accumulateMessage).call(this, event, __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f"));
|
|
4179
|
+
__classPrivateFieldSet$1(this, _AssistantStream_messageSnapshot, accumulatedMessage, "f");
|
|
4180
|
+
__classPrivateFieldGet$1(this, _AssistantStream_messageSnapshots, "f")[accumulatedMessage.id] = accumulatedMessage;
|
|
4180
4181
|
for (const content of newContent) {
|
|
4181
4182
|
const snapshotContent = accumulatedMessage.content[content.index];
|
|
4182
4183
|
if (snapshotContent?.type == 'text') {
|
|
@@ -4204,48 +4205,48 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4204
4205
|
throw Error('The snapshot associated with this text delta is not text or missing');
|
|
4205
4206
|
}
|
|
4206
4207
|
}
|
|
4207
|
-
if (content.index != __classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")) {
|
|
4208
|
+
if (content.index != __classPrivateFieldGet$1(this, _AssistantStream_currentContentIndex, "f")) {
|
|
4208
4209
|
//See if we have in progress content
|
|
4209
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f")) {
|
|
4210
|
-
switch (__classPrivateFieldGet(this, _AssistantStream_currentContent, "f").type) {
|
|
4210
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentContent, "f")) {
|
|
4211
|
+
switch (__classPrivateFieldGet$1(this, _AssistantStream_currentContent, "f").type) {
|
|
4211
4212
|
case 'text':
|
|
4212
|
-
this._emit('textDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"));
|
|
4213
|
+
this._emit('textDone', __classPrivateFieldGet$1(this, _AssistantStream_currentContent, "f").text, __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f"));
|
|
4213
4214
|
break;
|
|
4214
4215
|
case 'image_file':
|
|
4215
|
-
this._emit('imageFileDone', __classPrivateFieldGet(this, _AssistantStream_currentContent, "f").image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"));
|
|
4216
|
+
this._emit('imageFileDone', __classPrivateFieldGet$1(this, _AssistantStream_currentContent, "f").image_file, __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f"));
|
|
4216
4217
|
break;
|
|
4217
4218
|
}
|
|
4218
4219
|
}
|
|
4219
|
-
__classPrivateFieldSet(this, _AssistantStream_currentContentIndex, content.index, "f");
|
|
4220
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentContentIndex, content.index, "f");
|
|
4220
4221
|
}
|
|
4221
|
-
__classPrivateFieldSet(this, _AssistantStream_currentContent, accumulatedMessage.content[content.index], "f");
|
|
4222
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentContent, accumulatedMessage.content[content.index], "f");
|
|
4222
4223
|
}
|
|
4223
4224
|
}
|
|
4224
4225
|
break;
|
|
4225
4226
|
case 'thread.message.completed':
|
|
4226
4227
|
case 'thread.message.incomplete':
|
|
4227
4228
|
//We emit the latest content we were working on on completion (including incomplete)
|
|
4228
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f") !== undefined) {
|
|
4229
|
-
const currentContent = event.data.content[__classPrivateFieldGet(this, _AssistantStream_currentContentIndex, "f")];
|
|
4229
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentContentIndex, "f") !== undefined) {
|
|
4230
|
+
const currentContent = event.data.content[__classPrivateFieldGet$1(this, _AssistantStream_currentContentIndex, "f")];
|
|
4230
4231
|
if (currentContent) {
|
|
4231
4232
|
switch (currentContent.type) {
|
|
4232
4233
|
case 'image_file':
|
|
4233
|
-
this._emit('imageFileDone', currentContent.image_file, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"));
|
|
4234
|
+
this._emit('imageFileDone', currentContent.image_file, __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f"));
|
|
4234
4235
|
break;
|
|
4235
4236
|
case 'text':
|
|
4236
|
-
this._emit('textDone', currentContent.text, __classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f"));
|
|
4237
|
+
this._emit('textDone', currentContent.text, __classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f"));
|
|
4237
4238
|
break;
|
|
4238
4239
|
}
|
|
4239
4240
|
}
|
|
4240
4241
|
}
|
|
4241
|
-
if (__classPrivateFieldGet(this, _AssistantStream_messageSnapshot, "f")) {
|
|
4242
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_messageSnapshot, "f")) {
|
|
4242
4243
|
this._emit('messageDone', event.data);
|
|
4243
4244
|
}
|
|
4244
|
-
__classPrivateFieldSet(this, _AssistantStream_messageSnapshot, undefined, "f");
|
|
4245
|
+
__classPrivateFieldSet$1(this, _AssistantStream_messageSnapshot, undefined, "f");
|
|
4245
4246
|
}
|
|
4246
4247
|
}, _AssistantStream_handleRunStep = function _AssistantStream_handleRunStep(event) {
|
|
4247
|
-
const accumulatedRunStep = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateRunStep).call(this, event);
|
|
4248
|
-
__classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, accumulatedRunStep, "f");
|
|
4248
|
+
const accumulatedRunStep = __classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_accumulateRunStep).call(this, event);
|
|
4249
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentRunStepSnapshot, accumulatedRunStep, "f");
|
|
4249
4250
|
switch (event.event) {
|
|
4250
4251
|
case 'thread.run.step.created':
|
|
4251
4252
|
this._emit('runStepCreated', event.data);
|
|
@@ -4257,17 +4258,17 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4257
4258
|
delta.step_details.tool_calls &&
|
|
4258
4259
|
accumulatedRunStep.step_details.type == 'tool_calls') {
|
|
4259
4260
|
for (const toolCall of delta.step_details.tool_calls) {
|
|
4260
|
-
if (toolCall.index == __classPrivateFieldGet(this, _AssistantStream_currentToolCallIndex, "f")) {
|
|
4261
|
+
if (toolCall.index == __classPrivateFieldGet$1(this, _AssistantStream_currentToolCallIndex, "f")) {
|
|
4261
4262
|
this._emit('toolCallDelta', toolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index]);
|
|
4262
4263
|
}
|
|
4263
4264
|
else {
|
|
4264
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) {
|
|
4265
|
-
this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f"));
|
|
4265
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f")) {
|
|
4266
|
+
this._emit('toolCallDone', __classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f"));
|
|
4266
4267
|
}
|
|
4267
|
-
__classPrivateFieldSet(this, _AssistantStream_currentToolCallIndex, toolCall.index, "f");
|
|
4268
|
-
__classPrivateFieldSet(this, _AssistantStream_currentToolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index], "f");
|
|
4269
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f"))
|
|
4270
|
-
this._emit('toolCallCreated', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f"));
|
|
4268
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentToolCallIndex, toolCall.index, "f");
|
|
4269
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentToolCall, accumulatedRunStep.step_details.tool_calls[toolCall.index], "f");
|
|
4270
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f"))
|
|
4271
|
+
this._emit('toolCallCreated', __classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f"));
|
|
4271
4272
|
}
|
|
4272
4273
|
}
|
|
4273
4274
|
}
|
|
@@ -4277,46 +4278,46 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4277
4278
|
case 'thread.run.step.failed':
|
|
4278
4279
|
case 'thread.run.step.cancelled':
|
|
4279
4280
|
case 'thread.run.step.expired':
|
|
4280
|
-
__classPrivateFieldSet(this, _AssistantStream_currentRunStepSnapshot, undefined, "f");
|
|
4281
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentRunStepSnapshot, undefined, "f");
|
|
4281
4282
|
const details = event.data.step_details;
|
|
4282
4283
|
if (details.type == 'tool_calls') {
|
|
4283
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) {
|
|
4284
|
-
this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f"));
|
|
4285
|
-
__classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f");
|
|
4284
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f")) {
|
|
4285
|
+
this._emit('toolCallDone', __classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f"));
|
|
4286
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentToolCall, undefined, "f");
|
|
4286
4287
|
}
|
|
4287
4288
|
}
|
|
4288
4289
|
this._emit('runStepDone', event.data, accumulatedRunStep);
|
|
4289
4290
|
break;
|
|
4290
4291
|
}
|
|
4291
4292
|
}, _AssistantStream_handleEvent = function _AssistantStream_handleEvent(event) {
|
|
4292
|
-
__classPrivateFieldGet(this, _AssistantStream_events, "f").push(event);
|
|
4293
|
+
__classPrivateFieldGet$1(this, _AssistantStream_events, "f").push(event);
|
|
4293
4294
|
this._emit('event', event);
|
|
4294
4295
|
}, _AssistantStream_accumulateRunStep = function _AssistantStream_accumulateRunStep(event) {
|
|
4295
4296
|
switch (event.event) {
|
|
4296
4297
|
case 'thread.run.step.created':
|
|
4297
|
-
__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data;
|
|
4298
|
+
__classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data;
|
|
4298
4299
|
return event.data;
|
|
4299
4300
|
case 'thread.run.step.delta':
|
|
4300
|
-
let snapshot = __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4301
|
+
let snapshot = __classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4301
4302
|
if (!snapshot) {
|
|
4302
4303
|
throw Error('Received a RunStepDelta before creation of a snapshot');
|
|
4303
4304
|
}
|
|
4304
4305
|
let data = event.data;
|
|
4305
4306
|
if (data.delta) {
|
|
4306
4307
|
const accumulated = AssistantStream.accumulateDelta(snapshot, data.delta);
|
|
4307
|
-
__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = accumulated;
|
|
4308
|
+
__classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = accumulated;
|
|
4308
4309
|
}
|
|
4309
|
-
return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4310
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4310
4311
|
case 'thread.run.step.completed':
|
|
4311
4312
|
case 'thread.run.step.failed':
|
|
4312
4313
|
case 'thread.run.step.cancelled':
|
|
4313
4314
|
case 'thread.run.step.expired':
|
|
4314
4315
|
case 'thread.run.step.in_progress':
|
|
4315
|
-
__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data;
|
|
4316
|
+
__classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id] = event.data;
|
|
4316
4317
|
break;
|
|
4317
4318
|
}
|
|
4318
|
-
if (__classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id])
|
|
4319
|
-
return __classPrivateFieldGet(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4319
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id])
|
|
4320
|
+
return __classPrivateFieldGet$1(this, _AssistantStream_runStepSnapshots, "f")[event.data.id];
|
|
4320
4321
|
throw new Error('No snapshot available');
|
|
4321
4322
|
}, _AssistantStream_accumulateMessage = function _AssistantStream_accumulateMessage(event, snapshot) {
|
|
4322
4323
|
let newContent = [];
|
|
@@ -4334,7 +4335,7 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4334
4335
|
for (const contentElement of data.delta.content) {
|
|
4335
4336
|
if (contentElement.index in snapshot.content) {
|
|
4336
4337
|
let currentContent = snapshot.content[contentElement.index];
|
|
4337
|
-
snapshot.content[contentElement.index] = __classPrivateFieldGet(this, _AssistantStream_instances, "m", _AssistantStream_accumulateContent).call(this, contentElement, currentContent);
|
|
4338
|
+
snapshot.content[contentElement.index] = __classPrivateFieldGet$1(this, _AssistantStream_instances, "m", _AssistantStream_accumulateContent).call(this, contentElement, currentContent);
|
|
4338
4339
|
}
|
|
4339
4340
|
else {
|
|
4340
4341
|
snapshot.content[contentElement.index] = contentElement;
|
|
@@ -4359,7 +4360,7 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4359
4360
|
}, _AssistantStream_accumulateContent = function _AssistantStream_accumulateContent(contentElement, currentContent) {
|
|
4360
4361
|
return AssistantStream.accumulateDelta(currentContent, contentElement);
|
|
4361
4362
|
}, _AssistantStream_handleRun = function _AssistantStream_handleRun(event) {
|
|
4362
|
-
__classPrivateFieldSet(this, _AssistantStream_currentRunSnapshot, event.data, "f");
|
|
4363
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentRunSnapshot, event.data, "f");
|
|
4363
4364
|
switch (event.event) {
|
|
4364
4365
|
case 'thread.run.created':
|
|
4365
4366
|
break;
|
|
@@ -4372,10 +4373,10 @@ _AssistantStream_addEvent = function _AssistantStream_addEvent(event) {
|
|
|
4372
4373
|
case 'thread.run.failed':
|
|
4373
4374
|
case 'thread.run.completed':
|
|
4374
4375
|
case 'thread.run.expired':
|
|
4375
|
-
__classPrivateFieldSet(this, _AssistantStream_finalRun, event.data, "f");
|
|
4376
|
-
if (__classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f")) {
|
|
4377
|
-
this._emit('toolCallDone', __classPrivateFieldGet(this, _AssistantStream_currentToolCall, "f"));
|
|
4378
|
-
__classPrivateFieldSet(this, _AssistantStream_currentToolCall, undefined, "f");
|
|
4376
|
+
__classPrivateFieldSet$1(this, _AssistantStream_finalRun, event.data, "f");
|
|
4377
|
+
if (__classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f")) {
|
|
4378
|
+
this._emit('toolCallDone', __classPrivateFieldGet$1(this, _AssistantStream_currentToolCall, "f"));
|
|
4379
|
+
__classPrivateFieldSet$1(this, _AssistantStream_currentToolCall, undefined, "f");
|
|
4379
4380
|
}
|
|
4380
4381
|
break;
|
|
4381
4382
|
}
|
|
@@ -4692,666 +4693,1115 @@ Threads.RunsPage = RunsPage;
|
|
|
4692
4693
|
Threads.Messages = Messages;
|
|
4693
4694
|
Threads.MessagesPage = MessagesPage;
|
|
4694
4695
|
|
|
4695
|
-
|
|
4696
|
-
|
|
4697
|
-
|
|
4698
|
-
|
|
4699
|
-
|
|
4700
|
-
|
|
4701
|
-
|
|
4702
|
-
|
|
4703
|
-
console.error(result.reason);
|
|
4704
|
-
}
|
|
4705
|
-
throw new Error(`${rejected.length} promise(s) failed - see the above errors`);
|
|
4696
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4697
|
+
class Beta extends APIResource {
|
|
4698
|
+
constructor() {
|
|
4699
|
+
super(...arguments);
|
|
4700
|
+
this.realtime = new Realtime(this._client);
|
|
4701
|
+
this.chat = new Chat(this._client);
|
|
4702
|
+
this.assistants = new Assistants(this._client);
|
|
4703
|
+
this.threads = new Threads(this._client);
|
|
4706
4704
|
}
|
|
4707
|
-
|
|
4708
|
-
|
|
4709
|
-
|
|
4710
|
-
|
|
4711
|
-
|
|
4712
|
-
|
|
4705
|
+
}
|
|
4706
|
+
Beta.Realtime = Realtime;
|
|
4707
|
+
Beta.Assistants = Assistants;
|
|
4708
|
+
Beta.AssistantsPage = AssistantsPage;
|
|
4709
|
+
Beta.Threads = Threads;
|
|
4710
|
+
|
|
4711
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4712
|
+
class Completions extends APIResource {
|
|
4713
|
+
create(body, options) {
|
|
4714
|
+
return this._client.post('/completions', { body, ...options, stream: body.stream ?? false });
|
|
4713
4715
|
}
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
+
}
|
|
4717
|
+
|
|
4718
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4719
|
+
class Embeddings extends APIResource {
|
|
4720
|
+
/**
|
|
4721
|
+
* Creates an embedding vector representing the input text.
|
|
4722
|
+
*/
|
|
4723
|
+
create(body, options) {
|
|
4724
|
+
return this._client.post('/embeddings', { body, ...options });
|
|
4725
|
+
}
|
|
4726
|
+
}
|
|
4716
4727
|
|
|
4717
4728
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4718
4729
|
let Files$1 = class Files extends APIResource {
|
|
4719
4730
|
/**
|
|
4720
|
-
*
|
|
4721
|
-
*
|
|
4722
|
-
*
|
|
4731
|
+
* Upload a file that can be used across various endpoints. Individual files can be
|
|
4732
|
+
* up to 512 MB, and the size of all files uploaded by one organization can be up
|
|
4733
|
+
* to 100 GB.
|
|
4734
|
+
*
|
|
4735
|
+
* The Assistants API supports files up to 2 million tokens and of specific file
|
|
4736
|
+
* types. See the
|
|
4737
|
+
* [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for
|
|
4738
|
+
* details.
|
|
4739
|
+
*
|
|
4740
|
+
* The Fine-tuning API only supports `.jsonl` files. The input also has certain
|
|
4741
|
+
* required formats for fine-tuning
|
|
4742
|
+
* [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or
|
|
4743
|
+
* [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
|
|
4744
|
+
* models.
|
|
4745
|
+
*
|
|
4746
|
+
* The Batch API only supports `.jsonl` files up to 200 MB in size. The input also
|
|
4747
|
+
* has a specific required
|
|
4748
|
+
* [format](https://platform.openai.com/docs/api-reference/batch/request-input).
|
|
4749
|
+
*
|
|
4750
|
+
* Please [contact us](https://help.openai.com/) if you need to increase these
|
|
4751
|
+
* storage limits.
|
|
4723
4752
|
*/
|
|
4724
|
-
create(
|
|
4725
|
-
return this._client.post(
|
|
4726
|
-
body,
|
|
4727
|
-
...options,
|
|
4728
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4729
|
-
});
|
|
4753
|
+
create(body, options) {
|
|
4754
|
+
return this._client.post('/files', multipartFormRequestOptions({ body, ...options }));
|
|
4730
4755
|
}
|
|
4731
4756
|
/**
|
|
4732
|
-
*
|
|
4757
|
+
* Returns information about a specific file.
|
|
4733
4758
|
*/
|
|
4734
|
-
retrieve(
|
|
4735
|
-
return this._client.get(`/
|
|
4736
|
-
...options,
|
|
4737
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4738
|
-
});
|
|
4759
|
+
retrieve(fileId, options) {
|
|
4760
|
+
return this._client.get(`/files/${fileId}`, options);
|
|
4739
4761
|
}
|
|
4740
|
-
list(
|
|
4762
|
+
list(query = {}, options) {
|
|
4741
4763
|
if (isRequestOptions(query)) {
|
|
4742
|
-
return this.list(
|
|
4764
|
+
return this.list({}, query);
|
|
4743
4765
|
}
|
|
4744
|
-
return this._client.getAPIList(
|
|
4745
|
-
query,
|
|
4746
|
-
...options,
|
|
4747
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4748
|
-
});
|
|
4749
|
-
}
|
|
4750
|
-
/**
|
|
4751
|
-
* Delete a vector store file. This will remove the file from the vector store but
|
|
4752
|
-
* the file itself will not be deleted. To delete the file, use the
|
|
4753
|
-
* [delete file](https://platform.openai.com/docs/api-reference/files/delete)
|
|
4754
|
-
* endpoint.
|
|
4755
|
-
*/
|
|
4756
|
-
del(vectorStoreId, fileId, options) {
|
|
4757
|
-
return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
|
|
4758
|
-
...options,
|
|
4759
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4760
|
-
});
|
|
4766
|
+
return this._client.getAPIList('/files', FileObjectsPage, { query, ...options });
|
|
4761
4767
|
}
|
|
4762
4768
|
/**
|
|
4763
|
-
*
|
|
4769
|
+
* Delete a file.
|
|
4764
4770
|
*/
|
|
4765
|
-
|
|
4766
|
-
|
|
4767
|
-
return await this.poll(vectorStoreId, file.id, options);
|
|
4771
|
+
del(fileId, options) {
|
|
4772
|
+
return this._client.delete(`/files/${fileId}`, options);
|
|
4768
4773
|
}
|
|
4769
4774
|
/**
|
|
4770
|
-
*
|
|
4771
|
-
*
|
|
4772
|
-
* Note: this will return even if the file failed to process, you need to check
|
|
4773
|
-
* file.last_error and file.status to handle these cases
|
|
4775
|
+
* Returns the contents of the specified file.
|
|
4774
4776
|
*/
|
|
4775
|
-
|
|
4776
|
-
|
|
4777
|
-
|
|
4778
|
-
headers
|
|
4779
|
-
|
|
4780
|
-
|
|
4781
|
-
const fileResponse = await this.retrieve(vectorStoreId, fileId, {
|
|
4782
|
-
...options,
|
|
4783
|
-
headers,
|
|
4784
|
-
}).withResponse();
|
|
4785
|
-
const file = fileResponse.data;
|
|
4786
|
-
switch (file.status) {
|
|
4787
|
-
case 'in_progress':
|
|
4788
|
-
let sleepInterval = 5000;
|
|
4789
|
-
if (options?.pollIntervalMs) {
|
|
4790
|
-
sleepInterval = options.pollIntervalMs;
|
|
4791
|
-
}
|
|
4792
|
-
else {
|
|
4793
|
-
const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms');
|
|
4794
|
-
if (headerInterval) {
|
|
4795
|
-
const headerIntervalMs = parseInt(headerInterval);
|
|
4796
|
-
if (!isNaN(headerIntervalMs)) {
|
|
4797
|
-
sleepInterval = headerIntervalMs;
|
|
4798
|
-
}
|
|
4799
|
-
}
|
|
4800
|
-
}
|
|
4801
|
-
await sleep(sleepInterval);
|
|
4802
|
-
break;
|
|
4803
|
-
case 'failed':
|
|
4804
|
-
case 'completed':
|
|
4805
|
-
return file;
|
|
4806
|
-
}
|
|
4807
|
-
}
|
|
4777
|
+
content(fileId, options) {
|
|
4778
|
+
return this._client.get(`/files/${fileId}/content`, {
|
|
4779
|
+
...options,
|
|
4780
|
+
headers: { Accept: 'application/binary', ...options?.headers },
|
|
4781
|
+
__binaryResponse: true,
|
|
4782
|
+
});
|
|
4808
4783
|
}
|
|
4809
4784
|
/**
|
|
4810
|
-
*
|
|
4785
|
+
* Returns the contents of the specified file.
|
|
4811
4786
|
*
|
|
4812
|
-
*
|
|
4813
|
-
* polling helper method to wait for processing to complete).
|
|
4787
|
+
* @deprecated The `.content()` method should be used instead
|
|
4814
4788
|
*/
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
return this.create(vectorStoreId, { file_id: fileInfo.id }, options);
|
|
4789
|
+
retrieveContent(fileId, options) {
|
|
4790
|
+
return this._client.get(`/files/${fileId}/content`, options);
|
|
4818
4791
|
}
|
|
4819
4792
|
/**
|
|
4820
|
-
*
|
|
4793
|
+
* Waits for the given file to be processed, default timeout is 30 mins.
|
|
4821
4794
|
*/
|
|
4822
|
-
async
|
|
4823
|
-
const
|
|
4824
|
-
|
|
4795
|
+
async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) {
|
|
4796
|
+
const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']);
|
|
4797
|
+
const start = Date.now();
|
|
4798
|
+
let file = await this.retrieve(id);
|
|
4799
|
+
while (!file.status || !TERMINAL_STATES.has(file.status)) {
|
|
4800
|
+
await sleep(pollInterval);
|
|
4801
|
+
file = await this.retrieve(id);
|
|
4802
|
+
if (Date.now() - start > maxWait) {
|
|
4803
|
+
throw new APIConnectionTimeoutError({
|
|
4804
|
+
message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`,
|
|
4805
|
+
});
|
|
4806
|
+
}
|
|
4807
|
+
}
|
|
4808
|
+
return file;
|
|
4825
4809
|
}
|
|
4826
4810
|
};
|
|
4827
|
-
class
|
|
4811
|
+
class FileObjectsPage extends CursorPage {
|
|
4828
4812
|
}
|
|
4829
|
-
Files$1.
|
|
4813
|
+
Files$1.FileObjectsPage = FileObjectsPage;
|
|
4830
4814
|
|
|
4831
4815
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4832
|
-
class
|
|
4816
|
+
class Checkpoints extends APIResource {
|
|
4817
|
+
list(fineTuningJobId, query = {}, options) {
|
|
4818
|
+
if (isRequestOptions(query)) {
|
|
4819
|
+
return this.list(fineTuningJobId, {}, query);
|
|
4820
|
+
}
|
|
4821
|
+
return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/checkpoints`, FineTuningJobCheckpointsPage, { query, ...options });
|
|
4822
|
+
}
|
|
4823
|
+
}
|
|
4824
|
+
class FineTuningJobCheckpointsPage extends CursorPage {
|
|
4825
|
+
}
|
|
4826
|
+
Checkpoints.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage;
|
|
4827
|
+
|
|
4828
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4829
|
+
class Jobs extends APIResource {
|
|
4830
|
+
constructor() {
|
|
4831
|
+
super(...arguments);
|
|
4832
|
+
this.checkpoints = new Checkpoints(this._client);
|
|
4833
|
+
}
|
|
4833
4834
|
/**
|
|
4834
|
-
*
|
|
4835
|
+
* Creates a fine-tuning job which begins the process of creating a new model from
|
|
4836
|
+
* a given dataset.
|
|
4837
|
+
*
|
|
4838
|
+
* Response includes details of the enqueued job including job status and the name
|
|
4839
|
+
* of the fine-tuned models once complete.
|
|
4840
|
+
*
|
|
4841
|
+
* [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
|
|
4835
4842
|
*/
|
|
4836
|
-
create(
|
|
4837
|
-
return this._client.post(
|
|
4838
|
-
body,
|
|
4839
|
-
...options,
|
|
4840
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4841
|
-
});
|
|
4843
|
+
create(body, options) {
|
|
4844
|
+
return this._client.post('/fine_tuning/jobs', { body, ...options });
|
|
4842
4845
|
}
|
|
4843
4846
|
/**
|
|
4844
|
-
*
|
|
4847
|
+
* Get info about a fine-tuning job.
|
|
4848
|
+
*
|
|
4849
|
+
* [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
|
|
4845
4850
|
*/
|
|
4846
|
-
retrieve(
|
|
4847
|
-
return this._client.get(`/
|
|
4848
|
-
...options,
|
|
4849
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4850
|
-
});
|
|
4851
|
-
}
|
|
4852
|
-
/**
|
|
4853
|
-
* Cancel a vector store file batch. This attempts to cancel the processing of
|
|
4854
|
-
* files in this batch as soon as possible.
|
|
4855
|
-
*/
|
|
4856
|
-
cancel(vectorStoreId, batchId, options) {
|
|
4857
|
-
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, {
|
|
4858
|
-
...options,
|
|
4859
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4860
|
-
});
|
|
4861
|
-
}
|
|
4862
|
-
/**
|
|
4863
|
-
* Create a vector store batch and poll until all files have been processed.
|
|
4864
|
-
*/
|
|
4865
|
-
async createAndPoll(vectorStoreId, body, options) {
|
|
4866
|
-
const batch = await this.create(vectorStoreId, body);
|
|
4867
|
-
return await this.poll(vectorStoreId, batch.id, options);
|
|
4851
|
+
retrieve(fineTuningJobId, options) {
|
|
4852
|
+
return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options);
|
|
4868
4853
|
}
|
|
4869
|
-
|
|
4854
|
+
list(query = {}, options) {
|
|
4870
4855
|
if (isRequestOptions(query)) {
|
|
4871
|
-
return this.
|
|
4856
|
+
return this.list({}, query);
|
|
4872
4857
|
}
|
|
4873
|
-
return this._client.getAPIList(
|
|
4858
|
+
return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options });
|
|
4874
4859
|
}
|
|
4875
4860
|
/**
|
|
4876
|
-
*
|
|
4877
|
-
*
|
|
4878
|
-
* Note: this will return even if one of the files failed to process, you need to
|
|
4879
|
-
* check batch.file_counts.failed_count to handle this case.
|
|
4861
|
+
* Immediately cancel a fine-tune job.
|
|
4880
4862
|
*/
|
|
4881
|
-
|
|
4882
|
-
|
|
4883
|
-
if (options?.pollIntervalMs) {
|
|
4884
|
-
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
|
|
4885
|
-
}
|
|
4886
|
-
while (true) {
|
|
4887
|
-
const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, {
|
|
4888
|
-
...options,
|
|
4889
|
-
headers,
|
|
4890
|
-
}).withResponse();
|
|
4891
|
-
switch (batch.status) {
|
|
4892
|
-
case 'in_progress':
|
|
4893
|
-
let sleepInterval = 5000;
|
|
4894
|
-
if (options?.pollIntervalMs) {
|
|
4895
|
-
sleepInterval = options.pollIntervalMs;
|
|
4896
|
-
}
|
|
4897
|
-
else {
|
|
4898
|
-
const headerInterval = response.headers.get('openai-poll-after-ms');
|
|
4899
|
-
if (headerInterval) {
|
|
4900
|
-
const headerIntervalMs = parseInt(headerInterval);
|
|
4901
|
-
if (!isNaN(headerIntervalMs)) {
|
|
4902
|
-
sleepInterval = headerIntervalMs;
|
|
4903
|
-
}
|
|
4904
|
-
}
|
|
4905
|
-
}
|
|
4906
|
-
await sleep(sleepInterval);
|
|
4907
|
-
break;
|
|
4908
|
-
case 'failed':
|
|
4909
|
-
case 'cancelled':
|
|
4910
|
-
case 'completed':
|
|
4911
|
-
return batch;
|
|
4912
|
-
}
|
|
4913
|
-
}
|
|
4863
|
+
cancel(fineTuningJobId, options) {
|
|
4864
|
+
return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options);
|
|
4914
4865
|
}
|
|
4915
|
-
|
|
4916
|
-
|
|
4917
|
-
|
|
4918
|
-
* The concurrency limit is configurable using the `maxConcurrency` parameter.
|
|
4919
|
-
*/
|
|
4920
|
-
async uploadAndPoll(vectorStoreId, { files, fileIds = [] }, options) {
|
|
4921
|
-
if (files == null || files.length == 0) {
|
|
4922
|
-
throw new Error(`No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`);
|
|
4923
|
-
}
|
|
4924
|
-
const configuredConcurrency = options?.maxConcurrency ?? 5;
|
|
4925
|
-
// We cap the number of workers at the number of files (so we don't start any unnecessary workers)
|
|
4926
|
-
const concurrencyLimit = Math.min(configuredConcurrency, files.length);
|
|
4927
|
-
const client = this._client;
|
|
4928
|
-
const fileIterator = files.values();
|
|
4929
|
-
const allFileIds = [...fileIds];
|
|
4930
|
-
// This code is based on this design. The libraries don't accommodate our environment limits.
|
|
4931
|
-
// https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all
|
|
4932
|
-
async function processFiles(iterator) {
|
|
4933
|
-
for (let item of iterator) {
|
|
4934
|
-
const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options);
|
|
4935
|
-
allFileIds.push(fileObj.id);
|
|
4936
|
-
}
|
|
4866
|
+
listEvents(fineTuningJobId, query = {}, options) {
|
|
4867
|
+
if (isRequestOptions(query)) {
|
|
4868
|
+
return this.listEvents(fineTuningJobId, {}, query);
|
|
4937
4869
|
}
|
|
4938
|
-
|
|
4939
|
-
|
|
4940
|
-
|
|
4941
|
-
await allSettledWithThrow(workers);
|
|
4942
|
-
return await this.createAndPoll(vectorStoreId, {
|
|
4943
|
-
file_ids: allFileIds,
|
|
4870
|
+
return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, {
|
|
4871
|
+
query,
|
|
4872
|
+
...options,
|
|
4944
4873
|
});
|
|
4945
4874
|
}
|
|
4946
4875
|
}
|
|
4876
|
+
class FineTuningJobsPage extends CursorPage {
|
|
4877
|
+
}
|
|
4878
|
+
class FineTuningJobEventsPage extends CursorPage {
|
|
4879
|
+
}
|
|
4880
|
+
Jobs.FineTuningJobsPage = FineTuningJobsPage;
|
|
4881
|
+
Jobs.FineTuningJobEventsPage = FineTuningJobEventsPage;
|
|
4882
|
+
Jobs.Checkpoints = Checkpoints;
|
|
4883
|
+
Jobs.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage;
|
|
4947
4884
|
|
|
4948
4885
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4949
|
-
class
|
|
4886
|
+
class FineTuning extends APIResource {
|
|
4950
4887
|
constructor() {
|
|
4951
4888
|
super(...arguments);
|
|
4952
|
-
this.
|
|
4953
|
-
this.fileBatches = new FileBatches(this._client);
|
|
4889
|
+
this.jobs = new Jobs(this._client);
|
|
4954
4890
|
}
|
|
4891
|
+
}
|
|
4892
|
+
FineTuning.Jobs = Jobs;
|
|
4893
|
+
FineTuning.FineTuningJobsPage = FineTuningJobsPage;
|
|
4894
|
+
FineTuning.FineTuningJobEventsPage = FineTuningJobEventsPage;
|
|
4895
|
+
|
|
4896
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4897
|
+
class Images extends APIResource {
|
|
4955
4898
|
/**
|
|
4956
|
-
*
|
|
4899
|
+
* Creates a variation of a given image.
|
|
4957
4900
|
*/
|
|
4958
|
-
|
|
4959
|
-
return this._client.post('/
|
|
4960
|
-
body,
|
|
4961
|
-
...options,
|
|
4962
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4963
|
-
});
|
|
4901
|
+
createVariation(body, options) {
|
|
4902
|
+
return this._client.post('/images/variations', multipartFormRequestOptions({ body, ...options }));
|
|
4964
4903
|
}
|
|
4965
4904
|
/**
|
|
4966
|
-
*
|
|
4905
|
+
* Creates an edited or extended image given an original image and a prompt.
|
|
4967
4906
|
*/
|
|
4968
|
-
|
|
4969
|
-
return this._client.
|
|
4970
|
-
...options,
|
|
4971
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4972
|
-
});
|
|
4907
|
+
edit(body, options) {
|
|
4908
|
+
return this._client.post('/images/edits', multipartFormRequestOptions({ body, ...options }));
|
|
4973
4909
|
}
|
|
4974
4910
|
/**
|
|
4975
|
-
*
|
|
4911
|
+
* Creates an image given a prompt.
|
|
4976
4912
|
*/
|
|
4977
|
-
|
|
4978
|
-
return this._client.post(
|
|
4979
|
-
body,
|
|
4980
|
-
...options,
|
|
4981
|
-
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
4982
|
-
});
|
|
4913
|
+
generate(body, options) {
|
|
4914
|
+
return this._client.post('/images/generations', { body, ...options });
|
|
4983
4915
|
}
|
|
4984
|
-
|
|
4985
|
-
|
|
4986
|
-
|
|
4987
|
-
|
|
4988
|
-
|
|
4989
|
-
|
|
4990
|
-
|
|
4991
|
-
|
|
4992
|
-
|
|
4916
|
+
}
|
|
4917
|
+
|
|
4918
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
4919
|
+
class Models extends APIResource {
|
|
4920
|
+
/**
|
|
4921
|
+
* Retrieves a model instance, providing basic information about the model such as
|
|
4922
|
+
* the owner and permissioning.
|
|
4923
|
+
*/
|
|
4924
|
+
retrieve(model, options) {
|
|
4925
|
+
return this._client.get(`/models/${model}`, options);
|
|
4993
4926
|
}
|
|
4994
4927
|
/**
|
|
4995
|
-
*
|
|
4928
|
+
* Lists the currently available models, and provides basic information about each
|
|
4929
|
+
* one such as the owner and availability.
|
|
4996
4930
|
*/
|
|
4997
|
-
|
|
4998
|
-
return this._client.
|
|
4999
|
-
|
|
5000
|
-
|
|
5001
|
-
|
|
4931
|
+
list(options) {
|
|
4932
|
+
return this._client.getAPIList('/models', ModelsPage, options);
|
|
4933
|
+
}
|
|
4934
|
+
/**
|
|
4935
|
+
* Delete a fine-tuned model. You must have the Owner role in your organization to
|
|
4936
|
+
* delete a model.
|
|
4937
|
+
*/
|
|
4938
|
+
del(model, options) {
|
|
4939
|
+
return this._client.delete(`/models/${model}`, options);
|
|
5002
4940
|
}
|
|
5003
4941
|
}
|
|
5004
|
-
|
|
4942
|
+
/**
|
|
4943
|
+
* Note: no pagination actually occurs yet, this is for forwards-compatibility.
|
|
4944
|
+
*/
|
|
4945
|
+
class ModelsPage extends Page {
|
|
5005
4946
|
}
|
|
5006
|
-
|
|
5007
|
-
VectorStores.Files = Files$1;
|
|
5008
|
-
VectorStores.VectorStoreFilesPage = VectorStoreFilesPage;
|
|
5009
|
-
VectorStores.FileBatches = FileBatches;
|
|
4947
|
+
Models.ModelsPage = ModelsPage;
|
|
5010
4948
|
|
|
5011
4949
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5012
|
-
class
|
|
5013
|
-
|
|
5014
|
-
|
|
5015
|
-
|
|
5016
|
-
|
|
5017
|
-
|
|
5018
|
-
this.
|
|
5019
|
-
this.threads = new Threads(this._client);
|
|
4950
|
+
class Moderations extends APIResource {
|
|
4951
|
+
/**
|
|
4952
|
+
* Classifies if text and/or image inputs are potentially harmful. Learn more in
|
|
4953
|
+
* the [moderation guide](https://platform.openai.com/docs/guides/moderation).
|
|
4954
|
+
*/
|
|
4955
|
+
create(body, options) {
|
|
4956
|
+
return this._client.post('/moderations', { body, ...options });
|
|
5020
4957
|
}
|
|
5021
4958
|
}
|
|
5022
|
-
Beta.Realtime = Realtime;
|
|
5023
|
-
Beta.VectorStores = VectorStores;
|
|
5024
|
-
Beta.VectorStoresPage = VectorStoresPage;
|
|
5025
|
-
Beta.Assistants = Assistants;
|
|
5026
|
-
Beta.AssistantsPage = AssistantsPage;
|
|
5027
|
-
Beta.Threads = Threads;
|
|
5028
4959
|
|
|
5029
|
-
|
|
5030
|
-
|
|
5031
|
-
|
|
5032
|
-
|
|
4960
|
+
function maybeParseResponse(response, params) {
|
|
4961
|
+
if (!params || !hasAutoParseableInput(params)) {
|
|
4962
|
+
return {
|
|
4963
|
+
...response,
|
|
4964
|
+
output_parsed: null,
|
|
4965
|
+
output: response.output.map((item) => {
|
|
4966
|
+
if (item.type === 'function_call') {
|
|
4967
|
+
return {
|
|
4968
|
+
...item,
|
|
4969
|
+
parsed_arguments: null,
|
|
4970
|
+
};
|
|
4971
|
+
}
|
|
4972
|
+
if (item.type === 'message') {
|
|
4973
|
+
return {
|
|
4974
|
+
...item,
|
|
4975
|
+
content: item.content.map((content) => ({
|
|
4976
|
+
...content,
|
|
4977
|
+
parsed: null,
|
|
4978
|
+
})),
|
|
4979
|
+
};
|
|
4980
|
+
}
|
|
4981
|
+
else {
|
|
4982
|
+
return item;
|
|
4983
|
+
}
|
|
4984
|
+
}),
|
|
4985
|
+
};
|
|
4986
|
+
}
|
|
4987
|
+
return parseResponse(response, params);
|
|
4988
|
+
}
|
|
4989
|
+
function parseResponse(response, params) {
|
|
4990
|
+
const output = response.output.map((item) => {
|
|
4991
|
+
if (item.type === 'function_call') {
|
|
4992
|
+
return {
|
|
4993
|
+
...item,
|
|
4994
|
+
parsed_arguments: parseToolCall(params, item),
|
|
4995
|
+
};
|
|
4996
|
+
}
|
|
4997
|
+
if (item.type === 'message') {
|
|
4998
|
+
const content = item.content.map((content) => {
|
|
4999
|
+
if (content.type === 'output_text') {
|
|
5000
|
+
return {
|
|
5001
|
+
...content,
|
|
5002
|
+
parsed: parseTextFormat(params, content.text),
|
|
5003
|
+
};
|
|
5004
|
+
}
|
|
5005
|
+
return content;
|
|
5006
|
+
});
|
|
5007
|
+
return {
|
|
5008
|
+
...item,
|
|
5009
|
+
content,
|
|
5010
|
+
};
|
|
5011
|
+
}
|
|
5012
|
+
return item;
|
|
5013
|
+
});
|
|
5014
|
+
const parsed = Object.assign({}, response, { output });
|
|
5015
|
+
if (!Object.getOwnPropertyDescriptor(response, 'output_text')) {
|
|
5016
|
+
addOutputText(parsed);
|
|
5017
|
+
}
|
|
5018
|
+
Object.defineProperty(parsed, 'output_parsed', {
|
|
5019
|
+
enumerable: true,
|
|
5020
|
+
get() {
|
|
5021
|
+
for (const output of parsed.output) {
|
|
5022
|
+
if (output.type !== 'message') {
|
|
5023
|
+
continue;
|
|
5024
|
+
}
|
|
5025
|
+
for (const content of output.content) {
|
|
5026
|
+
if (content.type === 'output_text' && content.parsed !== null) {
|
|
5027
|
+
return content.parsed;
|
|
5028
|
+
}
|
|
5029
|
+
}
|
|
5030
|
+
}
|
|
5031
|
+
return null;
|
|
5032
|
+
},
|
|
5033
|
+
});
|
|
5034
|
+
return parsed;
|
|
5035
|
+
}
|
|
5036
|
+
function parseTextFormat(params, content) {
|
|
5037
|
+
if (params.text?.format?.type !== 'json_schema') {
|
|
5038
|
+
return null;
|
|
5039
|
+
}
|
|
5040
|
+
if ('$parseRaw' in params.text?.format) {
|
|
5041
|
+
const text_format = params.text?.format;
|
|
5042
|
+
return text_format.$parseRaw(content);
|
|
5043
|
+
}
|
|
5044
|
+
return JSON.parse(content);
|
|
5045
|
+
}
|
|
5046
|
+
function hasAutoParseableInput(params) {
|
|
5047
|
+
if (isAutoParsableResponseFormat(params.text?.format)) {
|
|
5048
|
+
return true;
|
|
5049
|
+
}
|
|
5050
|
+
return false;
|
|
5051
|
+
}
|
|
5052
|
+
function isAutoParsableTool(tool) {
|
|
5053
|
+
return tool?.['$brand'] === 'auto-parseable-tool';
|
|
5054
|
+
}
|
|
5055
|
+
function getInputToolByName(input_tools, name) {
|
|
5056
|
+
return input_tools.find((tool) => tool.type === 'function' && tool.name === name);
|
|
5057
|
+
}
|
|
5058
|
+
function parseToolCall(params, toolCall) {
|
|
5059
|
+
const inputTool = getInputToolByName(params.tools ?? [], toolCall.name);
|
|
5060
|
+
return {
|
|
5061
|
+
...toolCall,
|
|
5062
|
+
...toolCall,
|
|
5063
|
+
parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCall.arguments)
|
|
5064
|
+
: inputTool?.strict ? JSON.parse(toolCall.arguments)
|
|
5065
|
+
: null,
|
|
5066
|
+
};
|
|
5067
|
+
}
|
|
5068
|
+
function addOutputText(rsp) {
|
|
5069
|
+
const texts = [];
|
|
5070
|
+
for (const output of rsp.output) {
|
|
5071
|
+
if (output.type !== 'message') {
|
|
5072
|
+
continue;
|
|
5073
|
+
}
|
|
5074
|
+
for (const content of output.content) {
|
|
5075
|
+
if (content.type === 'output_text') {
|
|
5076
|
+
texts.push(content.text);
|
|
5077
|
+
}
|
|
5078
|
+
}
|
|
5033
5079
|
}
|
|
5080
|
+
rsp.output_text = texts.join('');
|
|
5034
5081
|
}
|
|
5035
5082
|
|
|
5036
5083
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5037
|
-
class
|
|
5038
|
-
|
|
5039
|
-
|
|
5040
|
-
|
|
5041
|
-
|
|
5042
|
-
return this._client.
|
|
5084
|
+
class InputItems extends APIResource {
|
|
5085
|
+
list(responseId, query = {}, options) {
|
|
5086
|
+
if (isRequestOptions(query)) {
|
|
5087
|
+
return this.list(responseId, {}, query);
|
|
5088
|
+
}
|
|
5089
|
+
return this._client.getAPIList(`/responses/${responseId}/input_items`, ResponseItemsPage, {
|
|
5090
|
+
query,
|
|
5091
|
+
...options,
|
|
5092
|
+
});
|
|
5043
5093
|
}
|
|
5044
5094
|
}
|
|
5045
5095
|
|
|
5046
|
-
|
|
5047
|
-
|
|
5048
|
-
|
|
5049
|
-
|
|
5050
|
-
|
|
5051
|
-
|
|
5052
|
-
|
|
5053
|
-
|
|
5054
|
-
|
|
5055
|
-
|
|
5056
|
-
|
|
5057
|
-
|
|
5058
|
-
|
|
5059
|
-
|
|
5060
|
-
|
|
5061
|
-
|
|
5062
|
-
|
|
5063
|
-
|
|
5064
|
-
|
|
5065
|
-
|
|
5066
|
-
|
|
5067
|
-
|
|
5068
|
-
|
|
5069
|
-
|
|
5070
|
-
|
|
5071
|
-
|
|
5072
|
-
|
|
5096
|
+
var __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
5097
|
+
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
5098
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
5099
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
5100
|
+
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
5101
|
+
};
|
|
5102
|
+
var __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
5103
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
5104
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
5105
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
5106
|
+
};
|
|
5107
|
+
var _ResponseStream_instances, _ResponseStream_params, _ResponseStream_currentResponseSnapshot, _ResponseStream_finalResponse, _ResponseStream_beginRequest, _ResponseStream_addEvent, _ResponseStream_endRequest, _ResponseStream_accumulateResponse;
|
|
5108
|
+
class ResponseStream extends EventStream {
|
|
5109
|
+
constructor(params) {
|
|
5110
|
+
super();
|
|
5111
|
+
_ResponseStream_instances.add(this);
|
|
5112
|
+
_ResponseStream_params.set(this, void 0);
|
|
5113
|
+
_ResponseStream_currentResponseSnapshot.set(this, void 0);
|
|
5114
|
+
_ResponseStream_finalResponse.set(this, void 0);
|
|
5115
|
+
__classPrivateFieldSet(this, _ResponseStream_params, params, "f");
|
|
5116
|
+
}
|
|
5117
|
+
static createResponse(client, params, options) {
|
|
5118
|
+
const runner = new ResponseStream(params);
|
|
5119
|
+
runner._run(() => runner._createResponse(client, params, {
|
|
5120
|
+
...options,
|
|
5121
|
+
headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' },
|
|
5122
|
+
}));
|
|
5123
|
+
return runner;
|
|
5073
5124
|
}
|
|
5074
|
-
|
|
5075
|
-
|
|
5076
|
-
|
|
5077
|
-
|
|
5078
|
-
|
|
5125
|
+
async _createResponse(client, params, options) {
|
|
5126
|
+
const signal = options?.signal;
|
|
5127
|
+
if (signal) {
|
|
5128
|
+
if (signal.aborted)
|
|
5129
|
+
this.controller.abort();
|
|
5130
|
+
signal.addEventListener('abort', () => this.controller.abort());
|
|
5131
|
+
}
|
|
5132
|
+
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_beginRequest).call(this);
|
|
5133
|
+
const stream = await client.responses.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
|
5134
|
+
this._connected();
|
|
5135
|
+
for await (const event of stream) {
|
|
5136
|
+
__classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_addEvent).call(this, event);
|
|
5137
|
+
}
|
|
5138
|
+
if (stream.controller.signal?.aborted) {
|
|
5139
|
+
throw new APIUserAbortError();
|
|
5140
|
+
}
|
|
5141
|
+
return __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_endRequest).call(this);
|
|
5079
5142
|
}
|
|
5080
|
-
|
|
5081
|
-
if (
|
|
5082
|
-
return
|
|
5143
|
+
[(_ResponseStream_params = new WeakMap(), _ResponseStream_currentResponseSnapshot = new WeakMap(), _ResponseStream_finalResponse = new WeakMap(), _ResponseStream_instances = new WeakSet(), _ResponseStream_beginRequest = function _ResponseStream_beginRequest() {
|
|
5144
|
+
if (this.ended)
|
|
5145
|
+
return;
|
|
5146
|
+
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
|
5147
|
+
}, _ResponseStream_addEvent = function _ResponseStream_addEvent(event) {
|
|
5148
|
+
if (this.ended)
|
|
5149
|
+
return;
|
|
5150
|
+
const response = __classPrivateFieldGet(this, _ResponseStream_instances, "m", _ResponseStream_accumulateResponse).call(this, event);
|
|
5151
|
+
this._emit('event', event);
|
|
5152
|
+
switch (event.type) {
|
|
5153
|
+
case 'response.output_text.delta': {
|
|
5154
|
+
const output = response.output[event.output_index];
|
|
5155
|
+
if (!output) {
|
|
5156
|
+
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
|
5157
|
+
}
|
|
5158
|
+
if (output.type === 'message') {
|
|
5159
|
+
const content = output.content[event.content_index];
|
|
5160
|
+
if (!content) {
|
|
5161
|
+
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
|
5162
|
+
}
|
|
5163
|
+
if (content.type !== 'output_text') {
|
|
5164
|
+
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
|
5165
|
+
}
|
|
5166
|
+
this._emit('response.output_text.delta', {
|
|
5167
|
+
...event,
|
|
5168
|
+
snapshot: content.text,
|
|
5169
|
+
});
|
|
5170
|
+
}
|
|
5171
|
+
break;
|
|
5172
|
+
}
|
|
5173
|
+
case 'response.function_call_arguments.delta': {
|
|
5174
|
+
const output = response.output[event.output_index];
|
|
5175
|
+
if (!output) {
|
|
5176
|
+
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
|
5177
|
+
}
|
|
5178
|
+
if (output.type === 'function_call') {
|
|
5179
|
+
this._emit('response.function_call_arguments.delta', {
|
|
5180
|
+
...event,
|
|
5181
|
+
snapshot: output.arguments,
|
|
5182
|
+
});
|
|
5183
|
+
}
|
|
5184
|
+
break;
|
|
5185
|
+
}
|
|
5186
|
+
default:
|
|
5187
|
+
// @ts-ignore
|
|
5188
|
+
this._emit(event.type, event);
|
|
5189
|
+
break;
|
|
5083
5190
|
}
|
|
5084
|
-
|
|
5191
|
+
}, _ResponseStream_endRequest = function _ResponseStream_endRequest() {
|
|
5192
|
+
if (this.ended) {
|
|
5193
|
+
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
|
5194
|
+
}
|
|
5195
|
+
const snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
|
5196
|
+
if (!snapshot) {
|
|
5197
|
+
throw new OpenAIError(`request ended without sending any events`);
|
|
5198
|
+
}
|
|
5199
|
+
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, undefined, "f");
|
|
5200
|
+
const parsedResponse = finalizeResponse(snapshot, __classPrivateFieldGet(this, _ResponseStream_params, "f"));
|
|
5201
|
+
__classPrivateFieldSet(this, _ResponseStream_finalResponse, parsedResponse, "f");
|
|
5202
|
+
return parsedResponse;
|
|
5203
|
+
}, _ResponseStream_accumulateResponse = function _ResponseStream_accumulateResponse(event) {
|
|
5204
|
+
let snapshot = __classPrivateFieldGet(this, _ResponseStream_currentResponseSnapshot, "f");
|
|
5205
|
+
if (!snapshot) {
|
|
5206
|
+
if (event.type !== 'response.created') {
|
|
5207
|
+
throw new OpenAIError(`When snapshot hasn't been set yet, expected 'response.created' event, got ${event.type}`);
|
|
5208
|
+
}
|
|
5209
|
+
snapshot = __classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
|
5210
|
+
return snapshot;
|
|
5211
|
+
}
|
|
5212
|
+
switch (event.type) {
|
|
5213
|
+
case 'response.output_item.added': {
|
|
5214
|
+
snapshot.output.push(event.item);
|
|
5215
|
+
break;
|
|
5216
|
+
}
|
|
5217
|
+
case 'response.content_part.added': {
|
|
5218
|
+
const output = snapshot.output[event.output_index];
|
|
5219
|
+
if (!output) {
|
|
5220
|
+
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
|
5221
|
+
}
|
|
5222
|
+
if (output.type === 'message') {
|
|
5223
|
+
output.content.push(event.part);
|
|
5224
|
+
}
|
|
5225
|
+
break;
|
|
5226
|
+
}
|
|
5227
|
+
case 'response.output_text.delta': {
|
|
5228
|
+
const output = snapshot.output[event.output_index];
|
|
5229
|
+
if (!output) {
|
|
5230
|
+
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
|
5231
|
+
}
|
|
5232
|
+
if (output.type === 'message') {
|
|
5233
|
+
const content = output.content[event.content_index];
|
|
5234
|
+
if (!content) {
|
|
5235
|
+
throw new OpenAIError(`missing content at index ${event.content_index}`);
|
|
5236
|
+
}
|
|
5237
|
+
if (content.type !== 'output_text') {
|
|
5238
|
+
throw new OpenAIError(`expected content to be 'output_text', got ${content.type}`);
|
|
5239
|
+
}
|
|
5240
|
+
content.text += event.delta;
|
|
5241
|
+
}
|
|
5242
|
+
break;
|
|
5243
|
+
}
|
|
5244
|
+
case 'response.function_call_arguments.delta': {
|
|
5245
|
+
const output = snapshot.output[event.output_index];
|
|
5246
|
+
if (!output) {
|
|
5247
|
+
throw new OpenAIError(`missing output at index ${event.output_index}`);
|
|
5248
|
+
}
|
|
5249
|
+
if (output.type === 'function_call') {
|
|
5250
|
+
output.arguments += event.delta;
|
|
5251
|
+
}
|
|
5252
|
+
break;
|
|
5253
|
+
}
|
|
5254
|
+
case 'response.completed': {
|
|
5255
|
+
__classPrivateFieldSet(this, _ResponseStream_currentResponseSnapshot, event.response, "f");
|
|
5256
|
+
break;
|
|
5257
|
+
}
|
|
5258
|
+
}
|
|
5259
|
+
return snapshot;
|
|
5260
|
+
}, Symbol.asyncIterator)]() {
|
|
5261
|
+
const pushQueue = [];
|
|
5262
|
+
const readQueue = [];
|
|
5263
|
+
let done = false;
|
|
5264
|
+
this.on('event', (event) => {
|
|
5265
|
+
const reader = readQueue.shift();
|
|
5266
|
+
if (reader) {
|
|
5267
|
+
reader.resolve(event);
|
|
5268
|
+
}
|
|
5269
|
+
else {
|
|
5270
|
+
pushQueue.push(event);
|
|
5271
|
+
}
|
|
5272
|
+
});
|
|
5273
|
+
this.on('end', () => {
|
|
5274
|
+
done = true;
|
|
5275
|
+
for (const reader of readQueue) {
|
|
5276
|
+
reader.resolve(undefined);
|
|
5277
|
+
}
|
|
5278
|
+
readQueue.length = 0;
|
|
5279
|
+
});
|
|
5280
|
+
this.on('abort', (err) => {
|
|
5281
|
+
done = true;
|
|
5282
|
+
for (const reader of readQueue) {
|
|
5283
|
+
reader.reject(err);
|
|
5284
|
+
}
|
|
5285
|
+
readQueue.length = 0;
|
|
5286
|
+
});
|
|
5287
|
+
this.on('error', (err) => {
|
|
5288
|
+
done = true;
|
|
5289
|
+
for (const reader of readQueue) {
|
|
5290
|
+
reader.reject(err);
|
|
5291
|
+
}
|
|
5292
|
+
readQueue.length = 0;
|
|
5293
|
+
});
|
|
5294
|
+
return {
|
|
5295
|
+
next: async () => {
|
|
5296
|
+
if (!pushQueue.length) {
|
|
5297
|
+
if (done) {
|
|
5298
|
+
return { value: undefined, done: true };
|
|
5299
|
+
}
|
|
5300
|
+
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((event) => (event ? { value: event, done: false } : { value: undefined, done: true }));
|
|
5301
|
+
}
|
|
5302
|
+
const event = pushQueue.shift();
|
|
5303
|
+
return { value: event, done: false };
|
|
5304
|
+
},
|
|
5305
|
+
return: async () => {
|
|
5306
|
+
this.abort();
|
|
5307
|
+
return { value: undefined, done: true };
|
|
5308
|
+
},
|
|
5309
|
+
};
|
|
5085
5310
|
}
|
|
5086
5311
|
/**
|
|
5087
|
-
*
|
|
5312
|
+
* @returns a promise that resolves with the final Response, or rejects
|
|
5313
|
+
* if an error occurred or the stream ended prematurely without producing a REsponse.
|
|
5088
5314
|
*/
|
|
5089
|
-
|
|
5090
|
-
|
|
5315
|
+
async finalResponse() {
|
|
5316
|
+
await this.done();
|
|
5317
|
+
const response = __classPrivateFieldGet(this, _ResponseStream_finalResponse, "f");
|
|
5318
|
+
if (!response)
|
|
5319
|
+
throw new OpenAIError('stream ended without producing a ChatCompletion');
|
|
5320
|
+
return response;
|
|
5321
|
+
}
|
|
5322
|
+
}
|
|
5323
|
+
function finalizeResponse(snapshot, params) {
|
|
5324
|
+
return maybeParseResponse(snapshot, params);
|
|
5325
|
+
}
|
|
5326
|
+
|
|
5327
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5328
|
+
class Responses extends APIResource {
|
|
5329
|
+
constructor() {
|
|
5330
|
+
super(...arguments);
|
|
5331
|
+
this.inputItems = new InputItems(this._client);
|
|
5332
|
+
}
|
|
5333
|
+
create(body, options) {
|
|
5334
|
+
return this._client.post('/responses', { body, ...options, stream: body.stream ?? false })._thenUnwrap((rsp) => {
|
|
5335
|
+
if ('object' in rsp && rsp.object === 'response') {
|
|
5336
|
+
addOutputText(rsp);
|
|
5337
|
+
}
|
|
5338
|
+
return rsp;
|
|
5339
|
+
});
|
|
5340
|
+
}
|
|
5341
|
+
retrieve(responseId, query = {}, options) {
|
|
5342
|
+
if (isRequestOptions(query)) {
|
|
5343
|
+
return this.retrieve(responseId, {}, query);
|
|
5344
|
+
}
|
|
5345
|
+
return this._client.get(`/responses/${responseId}`, { query, ...options });
|
|
5091
5346
|
}
|
|
5092
5347
|
/**
|
|
5093
|
-
*
|
|
5348
|
+
* Deletes a model response with the given ID.
|
|
5094
5349
|
*/
|
|
5095
|
-
|
|
5096
|
-
return this._client.
|
|
5350
|
+
del(responseId, options) {
|
|
5351
|
+
return this._client.delete(`/responses/${responseId}`, {
|
|
5097
5352
|
...options,
|
|
5098
|
-
headers: { Accept: '
|
|
5099
|
-
__binaryResponse: true,
|
|
5353
|
+
headers: { Accept: '*/*', ...options?.headers },
|
|
5100
5354
|
});
|
|
5101
5355
|
}
|
|
5102
|
-
|
|
5103
|
-
|
|
5104
|
-
|
|
5105
|
-
|
|
5106
|
-
*/
|
|
5107
|
-
retrieveContent(fileId, options) {
|
|
5108
|
-
return this._client.get(`/files/${fileId}/content`, options);
|
|
5356
|
+
parse(body, options) {
|
|
5357
|
+
return this._client.responses
|
|
5358
|
+
.create(body, options)
|
|
5359
|
+
._thenUnwrap((response) => parseResponse(response, body));
|
|
5109
5360
|
}
|
|
5110
5361
|
/**
|
|
5111
|
-
*
|
|
5362
|
+
* Creates a chat completion stream
|
|
5112
5363
|
*/
|
|
5113
|
-
|
|
5114
|
-
|
|
5115
|
-
const start = Date.now();
|
|
5116
|
-
let file = await this.retrieve(id);
|
|
5117
|
-
while (!file.status || !TERMINAL_STATES.has(file.status)) {
|
|
5118
|
-
await sleep(pollInterval);
|
|
5119
|
-
file = await this.retrieve(id);
|
|
5120
|
-
if (Date.now() - start > maxWait) {
|
|
5121
|
-
throw new APIConnectionTimeoutError({
|
|
5122
|
-
message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`,
|
|
5123
|
-
});
|
|
5124
|
-
}
|
|
5125
|
-
}
|
|
5126
|
-
return file;
|
|
5364
|
+
stream(body, options) {
|
|
5365
|
+
return ResponseStream.createResponse(this._client, body, options);
|
|
5127
5366
|
}
|
|
5128
5367
|
}
|
|
5129
|
-
class
|
|
5368
|
+
class ResponseItemsPage extends CursorPage {
|
|
5130
5369
|
}
|
|
5131
|
-
|
|
5370
|
+
Responses.InputItems = InputItems;
|
|
5132
5371
|
|
|
5133
5372
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5134
|
-
class
|
|
5135
|
-
|
|
5136
|
-
|
|
5137
|
-
|
|
5138
|
-
|
|
5139
|
-
|
|
5373
|
+
class Parts extends APIResource {
|
|
5374
|
+
/**
|
|
5375
|
+
* Adds a
|
|
5376
|
+
* [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an
|
|
5377
|
+
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object.
|
|
5378
|
+
* A Part represents a chunk of bytes from the file you are trying to upload.
|
|
5379
|
+
*
|
|
5380
|
+
* Each Part can be at most 64 MB, and you can add Parts until you hit the Upload
|
|
5381
|
+
* maximum of 8 GB.
|
|
5382
|
+
*
|
|
5383
|
+
* It is possible to add multiple Parts in parallel. You can decide the intended
|
|
5384
|
+
* order of the Parts when you
|
|
5385
|
+
* [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete).
|
|
5386
|
+
*/
|
|
5387
|
+
create(uploadId, body, options) {
|
|
5388
|
+
return this._client.post(`/uploads/${uploadId}/parts`, multipartFormRequestOptions({ body, ...options }));
|
|
5140
5389
|
}
|
|
5141
5390
|
}
|
|
5142
|
-
class FineTuningJobCheckpointsPage extends CursorPage {
|
|
5143
|
-
}
|
|
5144
|
-
Checkpoints.FineTuningJobCheckpointsPage = FineTuningJobCheckpointsPage;
|
|
5145
5391
|
|
|
5146
5392
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5147
|
-
class
|
|
5393
|
+
class Uploads extends APIResource {
|
|
5148
5394
|
constructor() {
|
|
5149
5395
|
super(...arguments);
|
|
5150
|
-
this.
|
|
5396
|
+
this.parts = new Parts(this._client);
|
|
5151
5397
|
}
|
|
5152
5398
|
/**
|
|
5153
|
-
* Creates
|
|
5154
|
-
*
|
|
5399
|
+
* Creates an intermediate
|
|
5400
|
+
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object
|
|
5401
|
+
* that you can add
|
|
5402
|
+
* [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to.
|
|
5403
|
+
* Currently, an Upload can accept at most 8 GB in total and expires after an hour
|
|
5404
|
+
* after you create it.
|
|
5155
5405
|
*
|
|
5156
|
-
*
|
|
5157
|
-
*
|
|
5406
|
+
* Once you complete the Upload, we will create a
|
|
5407
|
+
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
|
|
5408
|
+
* contains all the parts you uploaded. This File is usable in the rest of our
|
|
5409
|
+
* platform as a regular File object.
|
|
5158
5410
|
*
|
|
5159
|
-
*
|
|
5411
|
+
* For certain `purpose` values, the correct `mime_type` must be specified. Please
|
|
5412
|
+
* refer to documentation for the
|
|
5413
|
+
* [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files).
|
|
5414
|
+
*
|
|
5415
|
+
* For guidance on the proper filename extensions for each purpose, please follow
|
|
5416
|
+
* the documentation on
|
|
5417
|
+
* [creating a File](https://platform.openai.com/docs/api-reference/files/create).
|
|
5160
5418
|
*/
|
|
5161
5419
|
create(body, options) {
|
|
5162
|
-
return this._client.post('/
|
|
5420
|
+
return this._client.post('/uploads', { body, ...options });
|
|
5163
5421
|
}
|
|
5164
5422
|
/**
|
|
5165
|
-
*
|
|
5423
|
+
* Cancels the Upload. No Parts may be added after an Upload is cancelled.
|
|
5424
|
+
*/
|
|
5425
|
+
cancel(uploadId, options) {
|
|
5426
|
+
return this._client.post(`/uploads/${uploadId}/cancel`, options);
|
|
5427
|
+
}
|
|
5428
|
+
/**
|
|
5429
|
+
* Completes the
|
|
5430
|
+
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object).
|
|
5166
5431
|
*
|
|
5167
|
-
*
|
|
5432
|
+
* Within the returned Upload object, there is a nested
|
|
5433
|
+
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
|
|
5434
|
+
* is ready to use in the rest of the platform.
|
|
5435
|
+
*
|
|
5436
|
+
* You can specify the order of the Parts by passing in an ordered list of the Part
|
|
5437
|
+
* IDs.
|
|
5438
|
+
*
|
|
5439
|
+
* The number of bytes uploaded upon completion must match the number of bytes
|
|
5440
|
+
* initially specified when creating the Upload object. No Parts may be added after
|
|
5441
|
+
* an Upload is completed.
|
|
5168
5442
|
*/
|
|
5169
|
-
|
|
5170
|
-
return this._client.
|
|
5443
|
+
complete(uploadId, body, options) {
|
|
5444
|
+
return this._client.post(`/uploads/${uploadId}/complete`, { body, ...options });
|
|
5171
5445
|
}
|
|
5172
|
-
|
|
5173
|
-
|
|
5174
|
-
|
|
5446
|
+
}
|
|
5447
|
+
Uploads.Parts = Parts;
|
|
5448
|
+
|
|
5449
|
+
/**
|
|
5450
|
+
* Like `Promise.allSettled()` but throws an error if any promises are rejected.
|
|
5451
|
+
*/
|
|
5452
|
+
const allSettledWithThrow = async (promises) => {
|
|
5453
|
+
const results = await Promise.allSettled(promises);
|
|
5454
|
+
const rejected = results.filter((result) => result.status === 'rejected');
|
|
5455
|
+
if (rejected.length) {
|
|
5456
|
+
for (const result of rejected) {
|
|
5457
|
+
console.error(result.reason);
|
|
5458
|
+
}
|
|
5459
|
+
throw new Error(`${rejected.length} promise(s) failed - see the above errors`);
|
|
5460
|
+
}
|
|
5461
|
+
// Note: TS was complaining about using `.filter().map()` here for some reason
|
|
5462
|
+
const values = [];
|
|
5463
|
+
for (const result of results) {
|
|
5464
|
+
if (result.status === 'fulfilled') {
|
|
5465
|
+
values.push(result.value);
|
|
5175
5466
|
}
|
|
5176
|
-
return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options });
|
|
5177
5467
|
}
|
|
5468
|
+
return values;
|
|
5469
|
+
};
|
|
5470
|
+
|
|
5471
|
+
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5472
|
+
class Files extends APIResource {
|
|
5178
5473
|
/**
|
|
5179
|
-
*
|
|
5474
|
+
* Create a vector store file by attaching a
|
|
5475
|
+
* [File](https://platform.openai.com/docs/api-reference/files) to a
|
|
5476
|
+
* [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
|
|
5180
5477
|
*/
|
|
5181
|
-
|
|
5182
|
-
return this._client.post(`/
|
|
5478
|
+
create(vectorStoreId, body, options) {
|
|
5479
|
+
return this._client.post(`/vector_stores/${vectorStoreId}/files`, {
|
|
5480
|
+
body,
|
|
5481
|
+
...options,
|
|
5482
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5483
|
+
});
|
|
5183
5484
|
}
|
|
5184
|
-
|
|
5485
|
+
/**
|
|
5486
|
+
* Retrieves a vector store file.
|
|
5487
|
+
*/
|
|
5488
|
+
retrieve(vectorStoreId, fileId, options) {
|
|
5489
|
+
return this._client.get(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
|
|
5490
|
+
...options,
|
|
5491
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5492
|
+
});
|
|
5493
|
+
}
|
|
5494
|
+
/**
|
|
5495
|
+
* Update attributes on a vector store file.
|
|
5496
|
+
*/
|
|
5497
|
+
update(vectorStoreId, fileId, body, options) {
|
|
5498
|
+
return this._client.post(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
|
|
5499
|
+
body,
|
|
5500
|
+
...options,
|
|
5501
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5502
|
+
});
|
|
5503
|
+
}
|
|
5504
|
+
list(vectorStoreId, query = {}, options) {
|
|
5185
5505
|
if (isRequestOptions(query)) {
|
|
5186
|
-
return this.
|
|
5506
|
+
return this.list(vectorStoreId, {}, query);
|
|
5187
5507
|
}
|
|
5188
|
-
return this._client.getAPIList(`/
|
|
5508
|
+
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files`, VectorStoreFilesPage, {
|
|
5189
5509
|
query,
|
|
5190
5510
|
...options,
|
|
5511
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5191
5512
|
});
|
|
5192
5513
|
}
|
|
5193
|
-
|
|
5194
|
-
|
|
5195
|
-
|
|
5196
|
-
|
|
5197
|
-
|
|
5198
|
-
|
|
5199
|
-
|
|
5200
|
-
|
|
5201
|
-
|
|
5202
|
-
|
|
5203
|
-
|
|
5204
|
-
class FineTuning extends APIResource {
|
|
5205
|
-
constructor() {
|
|
5206
|
-
super(...arguments);
|
|
5207
|
-
this.jobs = new Jobs(this._client);
|
|
5514
|
+
/**
|
|
5515
|
+
* Delete a vector store file. This will remove the file from the vector store but
|
|
5516
|
+
* the file itself will not be deleted. To delete the file, use the
|
|
5517
|
+
* [delete file](https://platform.openai.com/docs/api-reference/files/delete)
|
|
5518
|
+
* endpoint.
|
|
5519
|
+
*/
|
|
5520
|
+
del(vectorStoreId, fileId, options) {
|
|
5521
|
+
return this._client.delete(`/vector_stores/${vectorStoreId}/files/${fileId}`, {
|
|
5522
|
+
...options,
|
|
5523
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5524
|
+
});
|
|
5208
5525
|
}
|
|
5209
|
-
}
|
|
5210
|
-
FineTuning.Jobs = Jobs;
|
|
5211
|
-
FineTuning.FineTuningJobsPage = FineTuningJobsPage;
|
|
5212
|
-
FineTuning.FineTuningJobEventsPage = FineTuningJobEventsPage;
|
|
5213
|
-
|
|
5214
|
-
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5215
|
-
class Images extends APIResource {
|
|
5216
5526
|
/**
|
|
5217
|
-
*
|
|
5527
|
+
* Attach a file to the given vector store and wait for it to be processed.
|
|
5528
|
+
*/
|
|
5529
|
+
async createAndPoll(vectorStoreId, body, options) {
|
|
5530
|
+
const file = await this.create(vectorStoreId, body, options);
|
|
5531
|
+
return await this.poll(vectorStoreId, file.id, options);
|
|
5532
|
+
}
|
|
5533
|
+
/**
|
|
5534
|
+
* Wait for the vector store file to finish processing.
|
|
5535
|
+
*
|
|
5536
|
+
* Note: this will return even if the file failed to process, you need to check
|
|
5537
|
+
* file.last_error and file.status to handle these cases
|
|
5538
|
+
*/
|
|
5539
|
+
async poll(vectorStoreId, fileId, options) {
|
|
5540
|
+
const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
|
|
5541
|
+
if (options?.pollIntervalMs) {
|
|
5542
|
+
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
|
|
5543
|
+
}
|
|
5544
|
+
while (true) {
|
|
5545
|
+
const fileResponse = await this.retrieve(vectorStoreId, fileId, {
|
|
5546
|
+
...options,
|
|
5547
|
+
headers,
|
|
5548
|
+
}).withResponse();
|
|
5549
|
+
const file = fileResponse.data;
|
|
5550
|
+
switch (file.status) {
|
|
5551
|
+
case 'in_progress':
|
|
5552
|
+
let sleepInterval = 5000;
|
|
5553
|
+
if (options?.pollIntervalMs) {
|
|
5554
|
+
sleepInterval = options.pollIntervalMs;
|
|
5555
|
+
}
|
|
5556
|
+
else {
|
|
5557
|
+
const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms');
|
|
5558
|
+
if (headerInterval) {
|
|
5559
|
+
const headerIntervalMs = parseInt(headerInterval);
|
|
5560
|
+
if (!isNaN(headerIntervalMs)) {
|
|
5561
|
+
sleepInterval = headerIntervalMs;
|
|
5562
|
+
}
|
|
5563
|
+
}
|
|
5564
|
+
}
|
|
5565
|
+
await sleep(sleepInterval);
|
|
5566
|
+
break;
|
|
5567
|
+
case 'failed':
|
|
5568
|
+
case 'completed':
|
|
5569
|
+
return file;
|
|
5570
|
+
}
|
|
5571
|
+
}
|
|
5572
|
+
}
|
|
5573
|
+
/**
|
|
5574
|
+
* Upload a file to the `files` API and then attach it to the given vector store.
|
|
5575
|
+
*
|
|
5576
|
+
* Note the file will be asynchronously processed (you can use the alternative
|
|
5577
|
+
* polling helper method to wait for processing to complete).
|
|
5218
5578
|
*/
|
|
5219
|
-
|
|
5220
|
-
|
|
5579
|
+
async upload(vectorStoreId, file, options) {
|
|
5580
|
+
const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options);
|
|
5581
|
+
return this.create(vectorStoreId, { file_id: fileInfo.id }, options);
|
|
5221
5582
|
}
|
|
5222
5583
|
/**
|
|
5223
|
-
*
|
|
5584
|
+
* Add a file to a vector store and poll until processing is complete.
|
|
5224
5585
|
*/
|
|
5225
|
-
|
|
5226
|
-
|
|
5586
|
+
async uploadAndPoll(vectorStoreId, file, options) {
|
|
5587
|
+
const fileInfo = await this.upload(vectorStoreId, file, options);
|
|
5588
|
+
return await this.poll(vectorStoreId, fileInfo.id, options);
|
|
5227
5589
|
}
|
|
5228
5590
|
/**
|
|
5229
|
-
*
|
|
5591
|
+
* Retrieve the parsed contents of a vector store file.
|
|
5230
5592
|
*/
|
|
5231
|
-
|
|
5232
|
-
return this._client.
|
|
5593
|
+
content(vectorStoreId, fileId, options) {
|
|
5594
|
+
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/files/${fileId}/content`, FileContentResponsesPage, { ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } });
|
|
5233
5595
|
}
|
|
5234
5596
|
}
|
|
5597
|
+
class VectorStoreFilesPage extends CursorPage {
|
|
5598
|
+
}
|
|
5599
|
+
/**
|
|
5600
|
+
* Note: no pagination actually occurs yet, this is for forwards-compatibility.
|
|
5601
|
+
*/
|
|
5602
|
+
class FileContentResponsesPage extends Page {
|
|
5603
|
+
}
|
|
5604
|
+
Files.VectorStoreFilesPage = VectorStoreFilesPage;
|
|
5605
|
+
Files.FileContentResponsesPage = FileContentResponsesPage;
|
|
5235
5606
|
|
|
5236
5607
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5237
|
-
class
|
|
5608
|
+
class FileBatches extends APIResource {
|
|
5238
5609
|
/**
|
|
5239
|
-
*
|
|
5240
|
-
* the owner and permissioning.
|
|
5610
|
+
* Create a vector store file batch.
|
|
5241
5611
|
*/
|
|
5242
|
-
|
|
5243
|
-
return this._client.
|
|
5612
|
+
create(vectorStoreId, body, options) {
|
|
5613
|
+
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches`, {
|
|
5614
|
+
body,
|
|
5615
|
+
...options,
|
|
5616
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5617
|
+
});
|
|
5244
5618
|
}
|
|
5245
5619
|
/**
|
|
5246
|
-
*
|
|
5247
|
-
* one such as the owner and availability.
|
|
5620
|
+
* Retrieves a vector store file batch.
|
|
5248
5621
|
*/
|
|
5249
|
-
|
|
5250
|
-
return this._client.
|
|
5622
|
+
retrieve(vectorStoreId, batchId, options) {
|
|
5623
|
+
return this._client.get(`/vector_stores/${vectorStoreId}/file_batches/${batchId}`, {
|
|
5624
|
+
...options,
|
|
5625
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5626
|
+
});
|
|
5251
5627
|
}
|
|
5252
5628
|
/**
|
|
5253
|
-
*
|
|
5254
|
-
*
|
|
5629
|
+
* Cancel a vector store file batch. This attempts to cancel the processing of
|
|
5630
|
+
* files in this batch as soon as possible.
|
|
5255
5631
|
*/
|
|
5256
|
-
|
|
5257
|
-
return this._client.
|
|
5632
|
+
cancel(vectorStoreId, batchId, options) {
|
|
5633
|
+
return this._client.post(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/cancel`, {
|
|
5634
|
+
...options,
|
|
5635
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5636
|
+
});
|
|
5258
5637
|
}
|
|
5259
|
-
}
|
|
5260
|
-
/**
|
|
5261
|
-
* Note: no pagination actually occurs yet, this is for forwards-compatibility.
|
|
5262
|
-
*/
|
|
5263
|
-
class ModelsPage extends Page {
|
|
5264
|
-
}
|
|
5265
|
-
Models.ModelsPage = ModelsPage;
|
|
5266
|
-
|
|
5267
|
-
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5268
|
-
class Moderations extends APIResource {
|
|
5269
5638
|
/**
|
|
5270
|
-
*
|
|
5271
|
-
* the [moderation guide](https://platform.openai.com/docs/guides/moderation).
|
|
5639
|
+
* Create a vector store batch and poll until all files have been processed.
|
|
5272
5640
|
*/
|
|
5273
|
-
|
|
5274
|
-
|
|
5641
|
+
async createAndPoll(vectorStoreId, body, options) {
|
|
5642
|
+
const batch = await this.create(vectorStoreId, body);
|
|
5643
|
+
return await this.poll(vectorStoreId, batch.id, options);
|
|
5644
|
+
}
|
|
5645
|
+
listFiles(vectorStoreId, batchId, query = {}, options) {
|
|
5646
|
+
if (isRequestOptions(query)) {
|
|
5647
|
+
return this.listFiles(vectorStoreId, batchId, {}, query);
|
|
5648
|
+
}
|
|
5649
|
+
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/file_batches/${batchId}/files`, VectorStoreFilesPage, { query, ...options, headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers } });
|
|
5275
5650
|
}
|
|
5276
|
-
}
|
|
5277
|
-
|
|
5278
|
-
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5279
|
-
class Parts extends APIResource {
|
|
5280
5651
|
/**
|
|
5281
|
-
*
|
|
5282
|
-
* [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an
|
|
5283
|
-
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object.
|
|
5284
|
-
* A Part represents a chunk of bytes from the file you are trying to upload.
|
|
5652
|
+
* Wait for the given file batch to be processed.
|
|
5285
5653
|
*
|
|
5286
|
-
*
|
|
5287
|
-
*
|
|
5654
|
+
* Note: this will return even if one of the files failed to process, you need to
|
|
5655
|
+
* check batch.file_counts.failed_count to handle this case.
|
|
5656
|
+
*/
|
|
5657
|
+
async poll(vectorStoreId, batchId, options) {
|
|
5658
|
+
const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
|
|
5659
|
+
if (options?.pollIntervalMs) {
|
|
5660
|
+
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
|
|
5661
|
+
}
|
|
5662
|
+
while (true) {
|
|
5663
|
+
const { data: batch, response } = await this.retrieve(vectorStoreId, batchId, {
|
|
5664
|
+
...options,
|
|
5665
|
+
headers,
|
|
5666
|
+
}).withResponse();
|
|
5667
|
+
switch (batch.status) {
|
|
5668
|
+
case 'in_progress':
|
|
5669
|
+
let sleepInterval = 5000;
|
|
5670
|
+
if (options?.pollIntervalMs) {
|
|
5671
|
+
sleepInterval = options.pollIntervalMs;
|
|
5672
|
+
}
|
|
5673
|
+
else {
|
|
5674
|
+
const headerInterval = response.headers.get('openai-poll-after-ms');
|
|
5675
|
+
if (headerInterval) {
|
|
5676
|
+
const headerIntervalMs = parseInt(headerInterval);
|
|
5677
|
+
if (!isNaN(headerIntervalMs)) {
|
|
5678
|
+
sleepInterval = headerIntervalMs;
|
|
5679
|
+
}
|
|
5680
|
+
}
|
|
5681
|
+
}
|
|
5682
|
+
await sleep(sleepInterval);
|
|
5683
|
+
break;
|
|
5684
|
+
case 'failed':
|
|
5685
|
+
case 'cancelled':
|
|
5686
|
+
case 'completed':
|
|
5687
|
+
return batch;
|
|
5688
|
+
}
|
|
5689
|
+
}
|
|
5690
|
+
}
|
|
5691
|
+
/**
|
|
5692
|
+
* Uploads the given files concurrently and then creates a vector store file batch.
|
|
5288
5693
|
*
|
|
5289
|
-
*
|
|
5290
|
-
* order of the Parts when you
|
|
5291
|
-
* [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete).
|
|
5694
|
+
* The concurrency limit is configurable using the `maxConcurrency` parameter.
|
|
5292
5695
|
*/
|
|
5293
|
-
|
|
5294
|
-
|
|
5696
|
+
async uploadAndPoll(vectorStoreId, { files, fileIds = [] }, options) {
|
|
5697
|
+
if (files == null || files.length == 0) {
|
|
5698
|
+
throw new Error(`No \`files\` provided to process. If you've already uploaded files you should use \`.createAndPoll()\` instead`);
|
|
5699
|
+
}
|
|
5700
|
+
const configuredConcurrency = options?.maxConcurrency ?? 5;
|
|
5701
|
+
// We cap the number of workers at the number of files (so we don't start any unnecessary workers)
|
|
5702
|
+
const concurrencyLimit = Math.min(configuredConcurrency, files.length);
|
|
5703
|
+
const client = this._client;
|
|
5704
|
+
const fileIterator = files.values();
|
|
5705
|
+
const allFileIds = [...fileIds];
|
|
5706
|
+
// This code is based on this design. The libraries don't accommodate our environment limits.
|
|
5707
|
+
// https://stackoverflow.com/questions/40639432/what-is-the-best-way-to-limit-concurrency-when-using-es6s-promise-all
|
|
5708
|
+
async function processFiles(iterator) {
|
|
5709
|
+
for (let item of iterator) {
|
|
5710
|
+
const fileObj = await client.files.create({ file: item, purpose: 'assistants' }, options);
|
|
5711
|
+
allFileIds.push(fileObj.id);
|
|
5712
|
+
}
|
|
5713
|
+
}
|
|
5714
|
+
// Start workers to process results
|
|
5715
|
+
const workers = Array(concurrencyLimit).fill(fileIterator).map(processFiles);
|
|
5716
|
+
// Wait for all processing to complete.
|
|
5717
|
+
await allSettledWithThrow(workers);
|
|
5718
|
+
return await this.createAndPoll(vectorStoreId, {
|
|
5719
|
+
file_ids: allFileIds,
|
|
5720
|
+
});
|
|
5295
5721
|
}
|
|
5296
5722
|
}
|
|
5297
5723
|
|
|
5298
5724
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5299
|
-
class
|
|
5725
|
+
class VectorStores extends APIResource {
|
|
5300
5726
|
constructor() {
|
|
5301
5727
|
super(...arguments);
|
|
5302
|
-
this.
|
|
5728
|
+
this.files = new Files(this._client);
|
|
5729
|
+
this.fileBatches = new FileBatches(this._client);
|
|
5303
5730
|
}
|
|
5304
5731
|
/**
|
|
5305
|
-
*
|
|
5306
|
-
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object
|
|
5307
|
-
* that you can add
|
|
5308
|
-
* [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to.
|
|
5309
|
-
* Currently, an Upload can accept at most 8 GB in total and expires after an hour
|
|
5310
|
-
* after you create it.
|
|
5311
|
-
*
|
|
5312
|
-
* Once you complete the Upload, we will create a
|
|
5313
|
-
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
|
|
5314
|
-
* contains all the parts you uploaded. This File is usable in the rest of our
|
|
5315
|
-
* platform as a regular File object.
|
|
5316
|
-
*
|
|
5317
|
-
* For certain `purpose`s, the correct `mime_type` must be specified. Please refer
|
|
5318
|
-
* to documentation for the supported MIME types for your use case:
|
|
5319
|
-
*
|
|
5320
|
-
* - [Assistants](https://platform.openai.com/docs/assistants/tools/file-search#supported-files)
|
|
5321
|
-
*
|
|
5322
|
-
* For guidance on the proper filename extensions for each purpose, please follow
|
|
5323
|
-
* the documentation on
|
|
5324
|
-
* [creating a File](https://platform.openai.com/docs/api-reference/files/create).
|
|
5732
|
+
* Create a vector store.
|
|
5325
5733
|
*/
|
|
5326
5734
|
create(body, options) {
|
|
5327
|
-
return this._client.post('/
|
|
5735
|
+
return this._client.post('/vector_stores', {
|
|
5736
|
+
body,
|
|
5737
|
+
...options,
|
|
5738
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5739
|
+
});
|
|
5328
5740
|
}
|
|
5329
5741
|
/**
|
|
5330
|
-
*
|
|
5742
|
+
* Retrieves a vector store.
|
|
5331
5743
|
*/
|
|
5332
|
-
|
|
5333
|
-
return this._client.
|
|
5744
|
+
retrieve(vectorStoreId, options) {
|
|
5745
|
+
return this._client.get(`/vector_stores/${vectorStoreId}`, {
|
|
5746
|
+
...options,
|
|
5747
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5748
|
+
});
|
|
5334
5749
|
}
|
|
5335
5750
|
/**
|
|
5336
|
-
*
|
|
5337
|
-
* [Upload](https://platform.openai.com/docs/api-reference/uploads/object).
|
|
5338
|
-
*
|
|
5339
|
-
* Within the returned Upload object, there is a nested
|
|
5340
|
-
* [File](https://platform.openai.com/docs/api-reference/files/object) object that
|
|
5341
|
-
* is ready to use in the rest of the platform.
|
|
5342
|
-
*
|
|
5343
|
-
* You can specify the order of the Parts by passing in an ordered list of the Part
|
|
5344
|
-
* IDs.
|
|
5345
|
-
*
|
|
5346
|
-
* The number of bytes uploaded upon completion must match the number of bytes
|
|
5347
|
-
* initially specified when creating the Upload object. No Parts may be added after
|
|
5348
|
-
* an Upload is completed.
|
|
5751
|
+
* Modifies a vector store.
|
|
5349
5752
|
*/
|
|
5350
|
-
|
|
5351
|
-
return this._client.post(`/
|
|
5753
|
+
update(vectorStoreId, body, options) {
|
|
5754
|
+
return this._client.post(`/vector_stores/${vectorStoreId}`, {
|
|
5755
|
+
body,
|
|
5756
|
+
...options,
|
|
5757
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5758
|
+
});
|
|
5759
|
+
}
|
|
5760
|
+
list(query = {}, options) {
|
|
5761
|
+
if (isRequestOptions(query)) {
|
|
5762
|
+
return this.list({}, query);
|
|
5763
|
+
}
|
|
5764
|
+
return this._client.getAPIList('/vector_stores', VectorStoresPage, {
|
|
5765
|
+
query,
|
|
5766
|
+
...options,
|
|
5767
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5768
|
+
});
|
|
5769
|
+
}
|
|
5770
|
+
/**
|
|
5771
|
+
* Delete a vector store.
|
|
5772
|
+
*/
|
|
5773
|
+
del(vectorStoreId, options) {
|
|
5774
|
+
return this._client.delete(`/vector_stores/${vectorStoreId}`, {
|
|
5775
|
+
...options,
|
|
5776
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5777
|
+
});
|
|
5778
|
+
}
|
|
5779
|
+
/**
|
|
5780
|
+
* Search a vector store for relevant chunks based on a query and file attributes
|
|
5781
|
+
* filter.
|
|
5782
|
+
*/
|
|
5783
|
+
search(vectorStoreId, body, options) {
|
|
5784
|
+
return this._client.getAPIList(`/vector_stores/${vectorStoreId}/search`, VectorStoreSearchResponsesPage, {
|
|
5785
|
+
body,
|
|
5786
|
+
method: 'post',
|
|
5787
|
+
...options,
|
|
5788
|
+
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
5789
|
+
});
|
|
5352
5790
|
}
|
|
5353
5791
|
}
|
|
5354
|
-
|
|
5792
|
+
class VectorStoresPage extends CursorPage {
|
|
5793
|
+
}
|
|
5794
|
+
/**
|
|
5795
|
+
* Note: no pagination actually occurs yet, this is for forwards-compatibility.
|
|
5796
|
+
*/
|
|
5797
|
+
class VectorStoreSearchResponsesPage extends Page {
|
|
5798
|
+
}
|
|
5799
|
+
VectorStores.VectorStoresPage = VectorStoresPage;
|
|
5800
|
+
VectorStores.VectorStoreSearchResponsesPage = VectorStoreSearchResponsesPage;
|
|
5801
|
+
VectorStores.Files = Files;
|
|
5802
|
+
VectorStores.VectorStoreFilesPage = VectorStoreFilesPage;
|
|
5803
|
+
VectorStores.FileContentResponsesPage = FileContentResponsesPage;
|
|
5804
|
+
VectorStores.FileBatches = FileBatches;
|
|
5355
5805
|
|
|
5356
5806
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
5357
5807
|
var _a;
|
|
@@ -5398,15 +5848,17 @@ class OpenAI extends APIClient {
|
|
|
5398
5848
|
this.completions = new Completions(this);
|
|
5399
5849
|
this.chat = new Chat$1(this);
|
|
5400
5850
|
this.embeddings = new Embeddings(this);
|
|
5401
|
-
this.files = new Files(this);
|
|
5851
|
+
this.files = new Files$1(this);
|
|
5402
5852
|
this.images = new Images(this);
|
|
5403
5853
|
this.audio = new Audio(this);
|
|
5404
5854
|
this.moderations = new Moderations(this);
|
|
5405
5855
|
this.models = new Models(this);
|
|
5406
5856
|
this.fineTuning = new FineTuning(this);
|
|
5857
|
+
this.vectorStores = new VectorStores(this);
|
|
5407
5858
|
this.beta = new Beta(this);
|
|
5408
5859
|
this.batches = new Batches(this);
|
|
5409
5860
|
this.uploads = new Uploads(this);
|
|
5861
|
+
this.responses = new Responses(this);
|
|
5410
5862
|
this._options = options;
|
|
5411
5863
|
this.apiKey = apiKey;
|
|
5412
5864
|
this.organization = organization;
|
|
@@ -5452,7 +5904,7 @@ OpenAI.Completions = Completions;
|
|
|
5452
5904
|
OpenAI.Chat = Chat$1;
|
|
5453
5905
|
OpenAI.ChatCompletionsPage = ChatCompletionsPage;
|
|
5454
5906
|
OpenAI.Embeddings = Embeddings;
|
|
5455
|
-
OpenAI.Files = Files;
|
|
5907
|
+
OpenAI.Files = Files$1;
|
|
5456
5908
|
OpenAI.FileObjectsPage = FileObjectsPage;
|
|
5457
5909
|
OpenAI.Images = Images;
|
|
5458
5910
|
OpenAI.Audio = Audio;
|
|
@@ -5460,10 +5912,14 @@ OpenAI.Moderations = Moderations;
|
|
|
5460
5912
|
OpenAI.Models = Models;
|
|
5461
5913
|
OpenAI.ModelsPage = ModelsPage;
|
|
5462
5914
|
OpenAI.FineTuning = FineTuning;
|
|
5915
|
+
OpenAI.VectorStores = VectorStores;
|
|
5916
|
+
OpenAI.VectorStoresPage = VectorStoresPage;
|
|
5917
|
+
OpenAI.VectorStoreSearchResponsesPage = VectorStoreSearchResponsesPage;
|
|
5463
5918
|
OpenAI.Beta = Beta;
|
|
5464
5919
|
OpenAI.Batches = Batches;
|
|
5465
5920
|
OpenAI.BatchesPage = BatchesPage;
|
|
5466
5921
|
OpenAI.Uploads = Uploads;
|
|
5922
|
+
OpenAI.Responses = Responses;
|
|
5467
5923
|
|
|
5468
5924
|
class QueryAI {
|
|
5469
5925
|
openai;
|