ai 3.1.0-canary.3 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +1 -1
  2. package/dist/index.d.mts +982 -24
  3. package/dist/index.d.ts +982 -24
  4. package/dist/index.js +1748 -175
  5. package/dist/index.js.map +1 -1
  6. package/dist/index.mjs +1723 -174
  7. package/dist/index.mjs.map +1 -1
  8. package/package.json +14 -31
  9. package/prompts/dist/index.d.mts +13 -1
  10. package/prompts/dist/index.d.ts +13 -1
  11. package/prompts/dist/index.js +13 -0
  12. package/prompts/dist/index.js.map +1 -1
  13. package/prompts/dist/index.mjs +12 -0
  14. package/prompts/dist/index.mjs.map +1 -1
  15. package/react/dist/index.d.mts +27 -6
  16. package/react/dist/index.d.ts +31 -8
  17. package/react/dist/index.js +155 -141
  18. package/react/dist/index.js.map +1 -1
  19. package/react/dist/index.mjs +154 -141
  20. package/react/dist/index.mjs.map +1 -1
  21. package/react/dist/index.server.d.mts +4 -2
  22. package/react/dist/index.server.d.ts +4 -2
  23. package/react/dist/index.server.js.map +1 -1
  24. package/react/dist/index.server.mjs.map +1 -1
  25. package/rsc/dist/index.d.ts +385 -20
  26. package/rsc/dist/rsc-client.d.mts +1 -1
  27. package/rsc/dist/rsc-client.mjs +2 -0
  28. package/rsc/dist/rsc-client.mjs.map +1 -1
  29. package/rsc/dist/rsc-server.d.mts +367 -20
  30. package/rsc/dist/rsc-server.mjs +676 -35
  31. package/rsc/dist/rsc-server.mjs.map +1 -1
  32. package/rsc/dist/rsc-shared.d.mts +24 -9
  33. package/rsc/dist/rsc-shared.mjs +98 -4
  34. package/rsc/dist/rsc-shared.mjs.map +1 -1
  35. package/solid/dist/index.d.mts +7 -3
  36. package/solid/dist/index.d.ts +7 -3
  37. package/solid/dist/index.js +106 -107
  38. package/solid/dist/index.js.map +1 -1
  39. package/solid/dist/index.mjs +106 -107
  40. package/solid/dist/index.mjs.map +1 -1
  41. package/svelte/dist/index.d.mts +7 -3
  42. package/svelte/dist/index.d.ts +7 -3
  43. package/svelte/dist/index.js +109 -109
  44. package/svelte/dist/index.js.map +1 -1
  45. package/svelte/dist/index.mjs +109 -109
  46. package/svelte/dist/index.mjs.map +1 -1
  47. package/vue/dist/index.d.mts +7 -3
  48. package/vue/dist/index.d.ts +7 -3
  49. package/vue/dist/index.js +106 -107
  50. package/vue/dist/index.js.map +1 -1
  51. package/vue/dist/index.mjs +106 -107
  52. package/vue/dist/index.mjs.map +1 -1
  53. package/ai-model-specification/dist/index.d.mts +0 -606
  54. package/ai-model-specification/dist/index.d.ts +0 -606
  55. package/ai-model-specification/dist/index.js +0 -617
  56. package/ai-model-specification/dist/index.js.map +0 -1
  57. package/ai-model-specification/dist/index.mjs +0 -560
  58. package/ai-model-specification/dist/index.mjs.map +0 -1
  59. package/core/dist/index.d.mts +0 -590
  60. package/core/dist/index.d.ts +0 -590
  61. package/core/dist/index.js +0 -1528
  62. package/core/dist/index.js.map +0 -1
  63. package/core/dist/index.mjs +0 -1481
  64. package/core/dist/index.mjs.map +0 -1
  65. package/provider/dist/index.d.mts +0 -429
  66. package/provider/dist/index.d.ts +0 -429
  67. package/provider/dist/index.js +0 -1194
  68. package/provider/dist/index.js.map +0 -1
  69. package/provider/dist/index.mjs +0 -1158
  70. package/provider/dist/index.mjs.map +0 -1
@@ -1,1158 +0,0 @@
1
- // ai-model-specification/errors/api-call-error.ts
2
- var APICallError = class extends Error {
3
- constructor({
4
- message,
5
- url,
6
- requestBodyValues,
7
- statusCode,
8
- responseBody,
9
- cause,
10
- isRetryable = statusCode != null && (statusCode === 408 || // request timeout
11
- statusCode === 409 || // conflict
12
- statusCode === 429 || // too many requests
13
- statusCode >= 500),
14
- // server error
15
- data
16
- }) {
17
- super(message);
18
- this.name = "ApiCallError";
19
- this.url = url;
20
- this.requestBodyValues = requestBodyValues;
21
- this.statusCode = statusCode;
22
- this.responseBody = responseBody;
23
- this.cause = cause;
24
- this.isRetryable = isRetryable;
25
- this.data = data;
26
- }
27
- toJSON() {
28
- return {
29
- name: this.name,
30
- message: this.message,
31
- url: this.url,
32
- requestBodyValues: this.requestBodyValues,
33
- statusCode: this.statusCode,
34
- responseBody: this.responseBody,
35
- cause: this.cause,
36
- isRetryable: this.isRetryable,
37
- data: this.data
38
- };
39
- }
40
- };
41
-
42
- // ai-model-specification/util/get-error-message.ts
43
- function getErrorMessage(error) {
44
- if (error == null) {
45
- return "unknown error";
46
- }
47
- if (typeof error === "string") {
48
- return error;
49
- }
50
- if (error instanceof Error) {
51
- return error.message;
52
- }
53
- return JSON.stringify(error);
54
- }
55
-
56
- // ai-model-specification/errors/json-parse-error.ts
57
- var JSONParseError = class extends Error {
58
- constructor({ text, cause }) {
59
- super(
60
- `JSON parsing failed: Text: ${text}.
61
- Error message: ${getErrorMessage(cause)}`
62
- );
63
- this.name = "JSONParseError";
64
- this.cause = cause;
65
- this.text = text;
66
- }
67
- toJSON() {
68
- return {
69
- name: this.name,
70
- message: this.message,
71
- cause: this.cause,
72
- stack: this.stack,
73
- valueText: this.text
74
- };
75
- }
76
- };
77
-
78
- // ai-model-specification/errors/load-api-key-error.ts
79
- var LoadAPIKeyError = class extends Error {
80
- constructor({ message }) {
81
- super(message);
82
- this.name = "LoadAPIKeyError";
83
- }
84
- toJSON() {
85
- return {
86
- name: this.name,
87
- message: this.message
88
- };
89
- }
90
- };
91
-
92
- // ai-model-specification/errors/type-validation-error.ts
93
- var TypeValidationError = class extends Error {
94
- constructor({ value, cause }) {
95
- super(
96
- `Type validation failed: Value: ${JSON.stringify(value)}.
97
- Error message: ${getErrorMessage(cause)}`
98
- );
99
- this.name = "TypeValidationError";
100
- this.cause = cause;
101
- this.value = value;
102
- }
103
- toJSON() {
104
- return {
105
- name: this.name,
106
- message: this.message,
107
- cause: this.cause,
108
- stack: this.stack,
109
- value: this.value
110
- };
111
- }
112
- };
113
-
114
- // ai-model-specification/errors/unsupported-functionality-error.ts
115
- var UnsupportedFunctionalityError = class extends Error {
116
- constructor({
117
- provider,
118
- functionality
119
- }) {
120
- super(
121
- `Functionality not supported by the provider. Provider: ${provider}.
122
- Functionality: ${functionality}`
123
- );
124
- this.name = "UnsupportedFunctionalityError";
125
- this.provider = provider;
126
- this.functionality = functionality;
127
- }
128
- toJSON() {
129
- return {
130
- name: this.name,
131
- message: this.message,
132
- stack: this.stack,
133
- provider: this.provider,
134
- functionality: this.functionality
135
- };
136
- }
137
- };
138
-
139
- // ai-model-specification/util/load-api-key.ts
140
- function loadApiKey({
141
- apiKey,
142
- environmentVariableName,
143
- apiKeyParameterName = "apiKey",
144
- description
145
- }) {
146
- if (apiKey != null) {
147
- return apiKey;
148
- }
149
- if (typeof process === "undefined") {
150
- throw new LoadAPIKeyError({
151
- message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
152
- });
153
- }
154
- apiKey = process.env[environmentVariableName];
155
- if (apiKey == null) {
156
- throw new LoadAPIKeyError({
157
- message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
158
- });
159
- }
160
- return apiKey;
161
- }
162
-
163
- // ai-model-specification/util/parse-json.ts
164
- import SecureJSON from "secure-json-parse";
165
-
166
- // ai-model-specification/util/validate-types.ts
167
- function validateTypes({
168
- value,
169
- schema
170
- }) {
171
- try {
172
- return schema.parse(value);
173
- } catch (error) {
174
- throw new TypeValidationError({ value, cause: error });
175
- }
176
- }
177
- function safeValidateTypes({
178
- value,
179
- schema
180
- }) {
181
- try {
182
- const validationResult = schema.safeParse(value);
183
- if (validationResult.success) {
184
- return {
185
- success: true,
186
- value: validationResult.data
187
- };
188
- }
189
- return {
190
- success: false,
191
- error: new TypeValidationError({
192
- value,
193
- cause: validationResult.error
194
- })
195
- };
196
- } catch (error) {
197
- return {
198
- success: false,
199
- error: error instanceof TypeValidationError ? error : new TypeValidationError({ value, cause: error })
200
- };
201
- }
202
- }
203
-
204
- // ai-model-specification/util/parse-json.ts
205
- function parseJSON({
206
- text,
207
- schema
208
- }) {
209
- try {
210
- const value = SecureJSON.parse(text);
211
- if (schema == null) {
212
- return value;
213
- }
214
- return validateTypes({ value, schema });
215
- } catch (error) {
216
- if (error instanceof JSONParseError || error instanceof TypeValidationError) {
217
- throw error;
218
- }
219
- throw new JSONParseError({ text, cause: error });
220
- }
221
- }
222
- function safeParseJSON({
223
- text,
224
- schema
225
- }) {
226
- try {
227
- const value = SecureJSON.parse(text);
228
- if (schema == null) {
229
- return {
230
- success: true,
231
- value
232
- };
233
- }
234
- return safeValidateTypes({ value, schema });
235
- } catch (error) {
236
- return {
237
- success: false,
238
- error: error instanceof JSONParseError ? error : new JSONParseError({ text, cause: error })
239
- };
240
- }
241
- }
242
- function isParseableJson(input) {
243
- try {
244
- SecureJSON.parse(input);
245
- return true;
246
- } catch (e) {
247
- return false;
248
- }
249
- }
250
-
251
- // ai-model-specification/util/post-to-api.ts
252
- var postJsonToApi = async ({
253
- url,
254
- headers,
255
- body,
256
- failedResponseHandler,
257
- successfulResponseHandler,
258
- abortSignal
259
- }) => postToApi({
260
- url,
261
- headers: {
262
- ...headers,
263
- "Content-Type": "application/json"
264
- },
265
- body: {
266
- content: JSON.stringify(body),
267
- values: body
268
- },
269
- failedResponseHandler,
270
- successfulResponseHandler,
271
- abortSignal
272
- });
273
- var postToApi = async ({
274
- url,
275
- headers = {},
276
- body,
277
- successfulResponseHandler,
278
- failedResponseHandler,
279
- abortSignal
280
- }) => {
281
- try {
282
- const definedHeaders = Object.fromEntries(
283
- Object.entries(headers).filter(([_key, value]) => value != null)
284
- );
285
- const response = await fetch(url, {
286
- method: "POST",
287
- headers: definedHeaders,
288
- body: body.content,
289
- signal: abortSignal
290
- });
291
- if (!response.ok) {
292
- try {
293
- throw await failedResponseHandler({
294
- response,
295
- url,
296
- requestBodyValues: body.values
297
- });
298
- } catch (error) {
299
- if (error instanceof Error) {
300
- if (error.name === "AbortError" || error instanceof APICallError) {
301
- throw error;
302
- }
303
- }
304
- throw new APICallError({
305
- message: "Failed to process error response",
306
- cause: error,
307
- statusCode: response.status,
308
- url,
309
- requestBodyValues: body.values
310
- });
311
- }
312
- }
313
- try {
314
- return await successfulResponseHandler({
315
- response,
316
- url,
317
- requestBodyValues: body.values
318
- });
319
- } catch (error) {
320
- if (error instanceof Error) {
321
- if (error.name === "AbortError" || error instanceof APICallError) {
322
- throw error;
323
- }
324
- }
325
- throw new APICallError({
326
- message: "Failed to process successful response",
327
- cause: error,
328
- statusCode: response.status,
329
- url,
330
- requestBodyValues: body.values
331
- });
332
- }
333
- } catch (error) {
334
- if (error instanceof Error) {
335
- if (error.name === "AbortError") {
336
- throw error;
337
- }
338
- }
339
- if (error instanceof TypeError && error.message === "fetch failed") {
340
- const cause = error.cause;
341
- if (cause != null) {
342
- throw new APICallError({
343
- message: `Cannot connect to API: ${cause.message}`,
344
- cause,
345
- url,
346
- requestBodyValues: body.values,
347
- isRetryable: true
348
- // retry when network error
349
- });
350
- }
351
- }
352
- throw error;
353
- }
354
- };
355
-
356
- // ai-model-specification/util/response-handler.ts
357
- import {
358
- EventSourceParserStream
359
- } from "eventsource-parser/stream";
360
- var createJsonErrorResponseHandler = ({
361
- errorSchema,
362
- errorToMessage,
363
- isRetryable
364
- }) => async ({ response, url, requestBodyValues }) => {
365
- const responseBody = await response.text();
366
- if (responseBody.trim() === "") {
367
- return new APICallError({
368
- message: response.statusText,
369
- url,
370
- requestBodyValues,
371
- statusCode: response.status,
372
- responseBody,
373
- isRetryable: isRetryable == null ? void 0 : isRetryable(response)
374
- });
375
- }
376
- try {
377
- const parsedError = parseJSON({
378
- text: responseBody,
379
- schema: errorSchema
380
- });
381
- return new APICallError({
382
- message: errorToMessage(parsedError),
383
- url,
384
- requestBodyValues,
385
- statusCode: response.status,
386
- responseBody,
387
- data: parsedError,
388
- isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
389
- });
390
- } catch (parseError) {
391
- return new APICallError({
392
- message: response.statusText,
393
- url,
394
- requestBodyValues,
395
- statusCode: response.status,
396
- responseBody,
397
- isRetryable: isRetryable == null ? void 0 : isRetryable(response)
398
- });
399
- }
400
- };
401
- var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
402
- if (response.body == null) {
403
- throw new Error("No response body");
404
- }
405
- return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
406
- new TransformStream({
407
- transform({ data }, controller) {
408
- if (data === "[DONE]") {
409
- return;
410
- }
411
- const parseResult = safeParseJSON({
412
- text: data,
413
- schema: chunkSchema
414
- });
415
- controller.enqueue(
416
- parseResult.success ? { type: "value", value: parseResult.value } : { type: "error", error: parseResult.error }
417
- );
418
- }
419
- })
420
- );
421
- };
422
- var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
423
- const responseBody = await response.text();
424
- const parsedResult = safeParseJSON({
425
- text: responseBody,
426
- schema: responseSchema
427
- });
428
- if (!parsedResult.success) {
429
- throw new APICallError({
430
- message: "Invalid JSON response",
431
- cause: parsedResult.error,
432
- statusCode: response.status,
433
- responseBody,
434
- url,
435
- requestBodyValues
436
- });
437
- }
438
- return parsedResult.value;
439
- };
440
-
441
- // ai-model-specification/util/scale.ts
442
- function scale({
443
- inputMin = 0,
444
- inputMax = 1,
445
- outputMin,
446
- outputMax,
447
- value
448
- }) {
449
- if (value === void 0) {
450
- return void 0;
451
- }
452
- const inputRange = inputMax - inputMin;
453
- const outputRange = outputMax - outputMin;
454
- return (value - inputMin) * outputRange / inputRange + outputMin;
455
- }
456
-
457
- // ai-model-specification/util/uint8-utils.ts
458
- function convertUint8ArrayToBase64(array) {
459
- let latin1string = "";
460
- for (const value of array) {
461
- latin1string += String.fromCodePoint(value);
462
- }
463
- return globalThis.btoa(latin1string);
464
- }
465
-
466
- // provider/openai/openai-chat-language-model.ts
467
- import { nanoid } from "nanoid";
468
- import { z as z2 } from "zod";
469
-
470
- // provider/openai/convert-to-openai-chat-messages.ts
471
- function convertToOpenAIChatMessages(prompt) {
472
- const messages = [];
473
- for (const { role, content } of prompt) {
474
- switch (role) {
475
- case "system": {
476
- messages.push({ role: "system", content });
477
- break;
478
- }
479
- case "user": {
480
- messages.push({
481
- role: "user",
482
- content: content.map((part) => {
483
- var _a;
484
- switch (part.type) {
485
- case "text": {
486
- return { type: "text", text: part.text };
487
- }
488
- case "image": {
489
- return {
490
- type: "image_url",
491
- image_url: {
492
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
493
- }
494
- };
495
- }
496
- }
497
- })
498
- });
499
- break;
500
- }
501
- case "assistant": {
502
- let text = "";
503
- const toolCalls = [];
504
- for (const part of content) {
505
- switch (part.type) {
506
- case "text": {
507
- text += part.text;
508
- break;
509
- }
510
- case "tool-call": {
511
- toolCalls.push({
512
- id: part.toolCallId,
513
- type: "function",
514
- function: {
515
- name: part.toolName,
516
- arguments: JSON.stringify(part.args)
517
- }
518
- });
519
- break;
520
- }
521
- default: {
522
- const _exhaustiveCheck = part;
523
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
524
- }
525
- }
526
- }
527
- messages.push({
528
- role: "assistant",
529
- content: text,
530
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
531
- });
532
- break;
533
- }
534
- case "tool": {
535
- for (const toolResponse of content) {
536
- messages.push({
537
- role: "tool",
538
- tool_call_id: toolResponse.toolCallId,
539
- content: JSON.stringify(toolResponse.result)
540
- });
541
- }
542
- break;
543
- }
544
- default: {
545
- const _exhaustiveCheck = role;
546
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
547
- }
548
- }
549
- }
550
- return messages;
551
- }
552
-
553
- // provider/openai/openai-error.ts
554
- import { z } from "zod";
555
- var openAIErrorDataSchema = z.object({
556
- error: z.object({
557
- message: z.string(),
558
- type: z.string(),
559
- param: z.any().nullable(),
560
- code: z.string().nullable()
561
- })
562
- });
563
- var openaiFailedResponseHandler = createJsonErrorResponseHandler({
564
- errorSchema: openAIErrorDataSchema,
565
- errorToMessage: (data) => data.error.message
566
- });
567
-
568
- // provider/openai/map-openai-finish-reason.ts
569
- function mapOpenAIFinishReason(finishReason) {
570
- switch (finishReason) {
571
- case "stop":
572
- return "stop";
573
- case "length":
574
- return "length";
575
- case "content-filter":
576
- return "content-filter";
577
- case "function_call":
578
- case "tool-calls":
579
- return "tool-calls";
580
- default:
581
- return "other";
582
- }
583
- }
584
-
585
- // provider/openai/openai-chat-language-model.ts
586
- var OpenAIChatLanguageModel = class {
587
- constructor(modelId, settings, config) {
588
- this.specificationVersion = "v1";
589
- this.defaultObjectGenerationMode = "tool";
590
- this.modelId = modelId;
591
- this.settings = settings;
592
- this.config = config;
593
- }
594
- get provider() {
595
- return this.config.provider;
596
- }
597
- getArgs({
598
- mode,
599
- prompt,
600
- maxTokens,
601
- temperature,
602
- topP,
603
- frequencyPenalty,
604
- presencePenalty,
605
- seed
606
- }) {
607
- var _a;
608
- const type = mode.type;
609
- const baseArgs = {
610
- // model id:
611
- model: this.modelId,
612
- // model specific settings:
613
- logit_bias: this.settings.logitBias,
614
- user: this.settings.user,
615
- // standardized settings:
616
- max_tokens: maxTokens,
617
- temperature: scale({
618
- value: temperature,
619
- outputMin: 0,
620
- outputMax: 2
621
- }),
622
- top_p: topP,
623
- frequency_penalty: scale({
624
- value: frequencyPenalty,
625
- inputMin: -1,
626
- inputMax: 1,
627
- outputMin: -2,
628
- outputMax: 2
629
- }),
630
- presence_penalty: scale({
631
- value: presencePenalty,
632
- inputMin: -1,
633
- inputMax: 1,
634
- outputMin: -2,
635
- outputMax: 2
636
- }),
637
- seed,
638
- // messages:
639
- messages: convertToOpenAIChatMessages(prompt)
640
- };
641
- switch (type) {
642
- case "regular": {
643
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
644
- return {
645
- ...baseArgs,
646
- tools: tools == null ? void 0 : tools.map((tool) => ({
647
- type: "function",
648
- function: {
649
- name: tool.name,
650
- description: tool.description,
651
- parameters: tool.parameters
652
- }
653
- }))
654
- };
655
- }
656
- case "object-json": {
657
- return {
658
- ...baseArgs,
659
- response_format: { type: "json_object" }
660
- };
661
- }
662
- case "object-tool": {
663
- return {
664
- ...baseArgs,
665
- tool_choice: { type: "function", function: { name: mode.tool.name } },
666
- tools: [{ type: "function", function: mode.tool }]
667
- };
668
- }
669
- case "object-grammar": {
670
- throw new UnsupportedFunctionalityError({
671
- functionality: "object-grammar mode",
672
- provider: this.provider
673
- });
674
- }
675
- default: {
676
- const _exhaustiveCheck = type;
677
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
678
- }
679
- }
680
- }
681
- async doGenerate(options) {
682
- var _a, _b;
683
- const args = this.getArgs(options);
684
- const response = await postJsonToApi({
685
- url: `${this.config.baseUrl}/chat/completions`,
686
- headers: this.config.headers(),
687
- body: args,
688
- failedResponseHandler: openaiFailedResponseHandler,
689
- successfulResponseHandler: createJsonResponseHandler(
690
- openAIChatResponseSchema
691
- ),
692
- abortSignal: options.abortSignal
693
- });
694
- const { messages: rawPrompt, ...rawSettings } = args;
695
- const choice = response.choices[0];
696
- return {
697
- text: (_a = choice.message.content) != null ? _a : void 0,
698
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
699
- toolCallType: "function",
700
- toolCallId: toolCall.id,
701
- toolName: toolCall.function.name,
702
- args: toolCall.function.arguments
703
- })),
704
- finishReason: mapOpenAIFinishReason(choice.finish_reason),
705
- usage: {
706
- promptTokens: response.usage.prompt_tokens,
707
- completionTokens: response.usage.completion_tokens
708
- },
709
- rawCall: { rawPrompt, rawSettings },
710
- warnings: []
711
- };
712
- }
713
- async doStream(options) {
714
- const args = this.getArgs(options);
715
- const response = await postJsonToApi({
716
- url: `${this.config.baseUrl}/chat/completions`,
717
- headers: this.config.headers(),
718
- body: {
719
- ...args,
720
- stream: true
721
- },
722
- failedResponseHandler: openaiFailedResponseHandler,
723
- successfulResponseHandler: createEventSourceResponseHandler(
724
- openaiChatChunkSchema
725
- ),
726
- abortSignal: options.abortSignal
727
- });
728
- const { messages: rawPrompt, ...rawSettings } = args;
729
- const toolCalls = [];
730
- return {
731
- stream: response.pipeThrough(
732
- new TransformStream({
733
- transform(chunk, controller) {
734
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
735
- if (chunk.type === "error") {
736
- controller.enqueue(chunk);
737
- return;
738
- }
739
- const value = chunk.value;
740
- if (((_b = (_a = value.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.delta) == null) {
741
- return;
742
- }
743
- const delta = value.choices[0].delta;
744
- if (delta.content != null) {
745
- controller.enqueue({
746
- type: "text-delta",
747
- textDelta: delta.content
748
- });
749
- }
750
- if (delta.tool_calls != null) {
751
- for (const toolCallDelta of delta.tool_calls) {
752
- const index = toolCallDelta.index;
753
- if (toolCalls[index] == null) {
754
- toolCalls[index] = toolCallDelta;
755
- continue;
756
- }
757
- const toolCall = toolCalls[index];
758
- if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
759
- toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
760
- }
761
- controller.enqueue({
762
- type: "tool-call-delta",
763
- toolCallId: (_f = toolCall.id) != null ? _f : "",
764
- // TODO empty?
765
- toolName: (_h = (_g = toolCall.function) == null ? void 0 : _g.name) != null ? _h : "",
766
- // TODO empty?
767
- argsTextDelta: (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : ""
768
- // TODO empty?
769
- });
770
- if (((_k = toolCall.function) == null ? void 0 : _k.name) == null || ((_l = toolCall.function) == null ? void 0 : _l.arguments) == null || !isParseableJson(toolCall.function.arguments)) {
771
- continue;
772
- }
773
- controller.enqueue({
774
- type: "tool-call",
775
- toolCallType: "function",
776
- toolCallId: (_m = toolCall.id) != null ? _m : nanoid(),
777
- toolName: toolCall.function.name,
778
- args: toolCall.function.arguments
779
- });
780
- }
781
- }
782
- }
783
- })
784
- ),
785
- rawCall: { rawPrompt, rawSettings },
786
- warnings: []
787
- };
788
- }
789
- };
790
- var openAIChatResponseSchema = z2.object({
791
- choices: z2.array(
792
- z2.object({
793
- message: z2.object({
794
- role: z2.literal("assistant"),
795
- content: z2.string().nullable(),
796
- tool_calls: z2.array(
797
- z2.object({
798
- id: z2.string(),
799
- type: z2.literal("function"),
800
- function: z2.object({
801
- name: z2.string(),
802
- arguments: z2.string()
803
- })
804
- })
805
- ).optional()
806
- }),
807
- index: z2.number(),
808
- finish_reason: z2.string().optional().nullable()
809
- })
810
- ),
811
- object: z2.literal("chat.completion"),
812
- usage: z2.object({
813
- prompt_tokens: z2.number(),
814
- completion_tokens: z2.number()
815
- })
816
- });
817
- var openaiChatChunkSchema = z2.object({
818
- object: z2.literal("chat.completion.chunk"),
819
- choices: z2.array(
820
- z2.object({
821
- delta: z2.object({
822
- role: z2.enum(["assistant"]).optional(),
823
- content: z2.string().nullable().optional(),
824
- tool_calls: z2.array(
825
- z2.object({
826
- index: z2.number(),
827
- id: z2.string().optional(),
828
- type: z2.literal("function").optional(),
829
- function: z2.object({
830
- name: z2.string().optional(),
831
- arguments: z2.string().optional()
832
- })
833
- })
834
- ).optional()
835
- }),
836
- finish_reason: z2.string().nullable().optional(),
837
- index: z2.number()
838
- })
839
- )
840
- });
841
-
842
- // provider/openai/openai-completion-language-model.ts
843
- import { z as z3 } from "zod";
844
-
845
- // provider/openai/convert-to-openai-completion-prompt.ts
846
- function convertToOpenAICompletionPrompt({
847
- prompt,
848
- inputFormat,
849
- provider,
850
- user = "user",
851
- assistant = "assistant"
852
- }) {
853
- if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
854
- return { prompt: prompt[0].content[0].text };
855
- }
856
- let text = "";
857
- if (prompt[0].role === "system") {
858
- text += `${prompt[0].content}
859
-
860
- `;
861
- prompt = prompt.slice(1);
862
- }
863
- for (const { role, content } of prompt) {
864
- switch (role) {
865
- case "system": {
866
- throw new Error(`Unexpected system message in prompt: ${content}`);
867
- break;
868
- }
869
- case "user": {
870
- const userMessage = content.map((part) => {
871
- switch (part.type) {
872
- case "text": {
873
- return part.text;
874
- }
875
- case "image": {
876
- throw new UnsupportedFunctionalityError({
877
- provider,
878
- functionality: "images"
879
- });
880
- }
881
- }
882
- }).join("");
883
- text += `${user}:
884
- ${userMessage}
885
-
886
- `;
887
- break;
888
- }
889
- case "assistant": {
890
- const assistantMessage = content.map((part) => {
891
- switch (part.type) {
892
- case "text": {
893
- return part.text;
894
- }
895
- case "tool-call": {
896
- throw new UnsupportedFunctionalityError({
897
- provider,
898
- functionality: "tool-call messages"
899
- });
900
- }
901
- }
902
- }).join("");
903
- text += `${assistant}:
904
- ${assistantMessage}
905
-
906
- `;
907
- break;
908
- }
909
- case "tool": {
910
- throw new UnsupportedFunctionalityError({
911
- provider,
912
- functionality: "tool messages"
913
- });
914
- }
915
- default: {
916
- const _exhaustiveCheck = role;
917
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
918
- }
919
- }
920
- }
921
- text += `${assistant}:
922
- `;
923
- return {
924
- prompt: text,
925
- stopSequences: [`
926
- ${user}:`]
927
- };
928
- }
929
-
930
- // provider/openai/openai-completion-language-model.ts
931
- var OpenAICompletionLanguageModel = class {
932
- constructor(modelId, settings, config) {
933
- this.specificationVersion = "v1";
934
- this.defaultObjectGenerationMode = void 0;
935
- this.modelId = modelId;
936
- this.settings = settings;
937
- this.config = config;
938
- }
939
- get provider() {
940
- return this.config.provider;
941
- }
942
- getArgs({
943
- mode,
944
- inputFormat,
945
- prompt,
946
- maxTokens,
947
- temperature,
948
- topP,
949
- frequencyPenalty,
950
- presencePenalty,
951
- seed
952
- }) {
953
- var _a;
954
- const type = mode.type;
955
- const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({
956
- prompt,
957
- inputFormat,
958
- provider: this.provider
959
- });
960
- const baseArgs = {
961
- // model id:
962
- model: this.modelId,
963
- // model specific settings:
964
- echo: this.settings.echo,
965
- logit_bias: this.settings.logitBias,
966
- suffix: this.settings.suffix,
967
- user: this.settings.user,
968
- // standardized settings:
969
- max_tokens: maxTokens,
970
- temperature: scale({
971
- value: temperature,
972
- outputMin: 0,
973
- outputMax: 2
974
- }),
975
- top_p: topP,
976
- frequency_penalty: scale({
977
- value: frequencyPenalty,
978
- inputMin: -1,
979
- inputMax: 1,
980
- outputMin: -2,
981
- outputMax: 2
982
- }),
983
- presence_penalty: scale({
984
- value: presencePenalty,
985
- inputMin: -1,
986
- inputMax: 1,
987
- outputMin: -2,
988
- outputMax: 2
989
- }),
990
- seed,
991
- // prompt:
992
- prompt: completionPrompt,
993
- // stop sequences:
994
- stop: stopSequences
995
- };
996
- switch (type) {
997
- case "regular": {
998
- if ((_a = mode.tools) == null ? void 0 : _a.length) {
999
- throw new UnsupportedFunctionalityError({
1000
- functionality: "tools",
1001
- provider: this.provider
1002
- });
1003
- }
1004
- return baseArgs;
1005
- }
1006
- case "object-json": {
1007
- throw new UnsupportedFunctionalityError({
1008
- functionality: "object-json mode",
1009
- provider: this.provider
1010
- });
1011
- }
1012
- case "object-tool": {
1013
- throw new UnsupportedFunctionalityError({
1014
- functionality: "object-tool mode",
1015
- provider: this.provider
1016
- });
1017
- }
1018
- case "object-grammar": {
1019
- throw new UnsupportedFunctionalityError({
1020
- functionality: "object-grammar mode",
1021
- provider: this.provider
1022
- });
1023
- }
1024
- default: {
1025
- const _exhaustiveCheck = type;
1026
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1027
- }
1028
- }
1029
- }
1030
- async doGenerate(options) {
1031
- const args = this.getArgs(options);
1032
- const response = await postJsonToApi({
1033
- url: `${this.config.baseUrl}/completions`,
1034
- headers: this.config.headers(),
1035
- body: args,
1036
- failedResponseHandler: openaiFailedResponseHandler,
1037
- successfulResponseHandler: createJsonResponseHandler(
1038
- openAICompletionResponseSchema
1039
- ),
1040
- abortSignal: options.abortSignal
1041
- });
1042
- const { prompt: rawPrompt, ...rawSettings } = args;
1043
- const choice = response.choices[0];
1044
- return {
1045
- text: choice.text,
1046
- usage: {
1047
- promptTokens: response.usage.prompt_tokens,
1048
- completionTokens: response.usage.completion_tokens
1049
- },
1050
- finishReason: mapOpenAIFinishReason(choice.finish_reason),
1051
- rawCall: { rawPrompt, rawSettings },
1052
- warnings: []
1053
- };
1054
- }
1055
- async doStream(options) {
1056
- const args = this.getArgs(options);
1057
- const response = await postJsonToApi({
1058
- url: `${this.config.baseUrl}/completions`,
1059
- headers: this.config.headers(),
1060
- body: {
1061
- ...this.getArgs(options),
1062
- stream: true
1063
- },
1064
- failedResponseHandler: openaiFailedResponseHandler,
1065
- successfulResponseHandler: createEventSourceResponseHandler(
1066
- openaiCompletionChunkSchema
1067
- ),
1068
- abortSignal: options.abortSignal
1069
- });
1070
- const { prompt: rawPrompt, ...rawSettings } = args;
1071
- return {
1072
- stream: response.pipeThrough(
1073
- new TransformStream({
1074
- transform(chunk, controller) {
1075
- var _a, _b;
1076
- if (chunk.type === "error") {
1077
- controller.enqueue(chunk);
1078
- return;
1079
- }
1080
- const value = chunk.value;
1081
- if (((_b = (_a = value.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.text) != null) {
1082
- controller.enqueue({
1083
- type: "text-delta",
1084
- textDelta: value.choices[0].text
1085
- });
1086
- }
1087
- }
1088
- })
1089
- ),
1090
- rawCall: { rawPrompt, rawSettings },
1091
- warnings: []
1092
- };
1093
- }
1094
- };
1095
- var openAICompletionResponseSchema = z3.object({
1096
- choices: z3.array(
1097
- z3.object({
1098
- text: z3.string(),
1099
- finish_reason: z3.string()
1100
- })
1101
- ),
1102
- usage: z3.object({
1103
- prompt_tokens: z3.number(),
1104
- completion_tokens: z3.number()
1105
- })
1106
- });
1107
- var openaiCompletionChunkSchema = z3.object({
1108
- object: z3.literal("text_completion"),
1109
- choices: z3.array(
1110
- z3.object({
1111
- text: z3.string(),
1112
- finish_reason: z3.enum(["stop", "length", "content_filter"]).optional().nullable(),
1113
- index: z3.number()
1114
- })
1115
- )
1116
- });
1117
-
1118
- // provider/openai/openai-facade.ts
1119
- var OpenAI = class {
1120
- constructor(options = {}) {
1121
- this.baseUrl = options.baseUrl;
1122
- this.apiKey = options.apiKey;
1123
- this.organization = options.organization;
1124
- }
1125
- get baseConfig() {
1126
- var _a;
1127
- return {
1128
- organization: this.organization,
1129
- baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
1130
- headers: () => ({
1131
- Authorization: `Bearer ${loadApiKey({
1132
- apiKey: this.apiKey,
1133
- environmentVariableName: "OPENAI_API_KEY",
1134
- description: "OpenAI"
1135
- })}`,
1136
- "OpenAI-Organization": this.organization
1137
- })
1138
- };
1139
- }
1140
- chat(modelId, settings = {}) {
1141
- return new OpenAIChatLanguageModel(modelId, settings, {
1142
- provider: "openai.chat",
1143
- ...this.baseConfig
1144
- });
1145
- }
1146
- completion(modelId, settings = {}) {
1147
- return new OpenAICompletionLanguageModel(modelId, settings, {
1148
- provider: "openai.completion",
1149
- ...this.baseConfig
1150
- });
1151
- }
1152
- };
1153
- var openai = new OpenAI();
1154
- export {
1155
- OpenAI,
1156
- openai
1157
- };
1158
- //# sourceMappingURL=index.mjs.map