ai 2.2.20 → 2.2.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -242,7 +242,7 @@ interface ChatCompletionChunk {
242
242
  }
243
243
  interface ChatCompletionChunkChoice {
244
244
  delta: ChoiceDelta;
245
- finish_reason: 'stop' | 'length' | 'function_call' | 'content_filter' | null;
245
+ finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null;
246
246
  index: number;
247
247
  }
248
248
  interface ChoiceDelta {
@@ -258,7 +258,33 @@ interface ChoiceDelta {
258
258
  /**
259
259
  * The role of the author of this message.
260
260
  */
261
- role?: 'system' | 'user' | 'assistant' | 'function';
261
+ role?: 'system' | 'user' | 'assistant' | 'tool';
262
+ tool_calls?: Array<DeltaToolCall>;
263
+ }
264
+ interface DeltaToolCall {
265
+ index: number;
266
+ /**
267
+ * The ID of the tool call.
268
+ */
269
+ id?: string;
270
+ function?: ToolCallFunction;
271
+ /**
272
+ * The type of the tool. Currently, only `function` is supported.
273
+ */
274
+ type?: 'function';
275
+ }
276
+ interface ToolCallFunction {
277
+ /**
278
+ * The arguments to call the function with, as generated by the model in JSON
279
+ * format. Note that the model does not always generate valid JSON, and may
280
+ * hallucinate parameters not defined by your function schema. Validate the
281
+ * arguments in your code before calling your function.
282
+ */
283
+ arguments?: string;
284
+ /**
285
+ * The name of the function to call.
286
+ */
287
+ name?: string;
262
288
  }
263
289
  /**
264
290
  * https://github.com/openai/openai-node/blob/3ec43ee790a2eb6a0ccdd5f25faa23251b0f9b8e/src/resources/completions.ts#L28C1-L64C1
@@ -548,7 +574,9 @@ interface Prediction {
548
574
  * return new StreamingTextResponse(stream)
549
575
  *
550
576
  */
551
- declare function ReplicateStream(res: Prediction, cb?: AIStreamCallbacksAndOptions): Promise<ReadableStream>;
577
+ declare function ReplicateStream(res: Prediction, cb?: AIStreamCallbacksAndOptions, options?: {
578
+ headers?: Record<string, string>;
579
+ }): Promise<ReadableStream>;
552
580
 
553
581
  declare const nanoid: (size?: number | undefined) => string;
554
582
  declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
package/dist/index.js CHANGED
@@ -530,14 +530,12 @@ function createParser2(res) {
530
530
  if (!text)
531
531
  return;
532
532
  if (value.generated_text != null && value.generated_text.length > 0) {
533
- controller.close();
534
533
  return;
535
534
  }
536
535
  if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
537
- controller.close();
538
- } else {
539
- controller.enqueue(text);
536
+ return;
540
537
  }
538
+ controller.enqueue(text);
541
539
  }
542
540
  });
543
541
  }
@@ -693,7 +691,7 @@ function LangChainStream(callbacks) {
693
691
  }
694
692
 
695
693
  // streams/replicate-stream.ts
696
- async function ReplicateStream(res, cb) {
694
+ async function ReplicateStream(res, cb, options) {
697
695
  var _a;
698
696
  const url = (_a = res.urls) == null ? void 0 : _a.stream;
699
697
  if (!url) {
@@ -705,7 +703,8 @@ async function ReplicateStream(res, cb) {
705
703
  const eventStream = await fetch(url, {
706
704
  method: "GET",
707
705
  headers: {
708
- Accept: "text/event-stream"
706
+ Accept: "text/event-stream",
707
+ ...options == null ? void 0 : options.headers
709
708
  }
710
709
  });
711
710
  return AIStream(eventStream, void 0, cb).pipeThrough(
package/dist/index.mjs CHANGED
@@ -484,14 +484,12 @@ function createParser2(res) {
484
484
  if (!text)
485
485
  return;
486
486
  if (value.generated_text != null && value.generated_text.length > 0) {
487
- controller.close();
488
487
  return;
489
488
  }
490
489
  if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
491
- controller.close();
492
- } else {
493
- controller.enqueue(text);
490
+ return;
494
491
  }
492
+ controller.enqueue(text);
495
493
  }
496
494
  });
497
495
  }
@@ -647,7 +645,7 @@ function LangChainStream(callbacks) {
647
645
  }
648
646
 
649
647
  // streams/replicate-stream.ts
650
- async function ReplicateStream(res, cb) {
648
+ async function ReplicateStream(res, cb, options) {
651
649
  var _a;
652
650
  const url = (_a = res.urls) == null ? void 0 : _a.stream;
653
651
  if (!url) {
@@ -659,7 +657,8 @@ async function ReplicateStream(res, cb) {
659
657
  const eventStream = await fetch(url, {
660
658
  method: "GET",
661
659
  headers: {
662
- Accept: "text/event-stream"
660
+ Accept: "text/event-stream",
661
+ ...options == null ? void 0 : options.headers
663
662
  }
664
663
  });
665
664
  return AIStream(eventStream, void 0, cb).pipeThrough(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.20",
3
+ "version": "2.2.22",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -69,6 +69,7 @@
69
69
  },
70
70
  "devDependencies": {
71
71
  "@edge-runtime/jest-environment": "1.1.0-beta.31",
72
+ "@huggingface/inference": "2.6.4",
72
73
  "@types/jest": "29.2.0",
73
74
  "@types/node": "^17.0.12",
74
75
  "@types/react": "^18.2.8",
@@ -76,6 +77,7 @@
76
77
  "eslint": "^7.32.0",
77
78
  "jest": "29.2.1",
78
79
  "langchain": "0.0.172",
80
+ "openai": "4.16.1",
79
81
  "ts-jest": "29.0.3",
80
82
  "tsup": "^6.7.0",
81
83
  "typescript": "5.1.3",
@@ -86,7 +86,7 @@ var getStreamStringTypeAndValue = (line) => {
86
86
  };
87
87
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
88
88
 
89
- // react/parseComplexResponse.ts
89
+ // react/parse-complex-response.ts
90
90
  async function parseComplexResponse({
91
91
  reader,
92
92
  abortControllerRef,
@@ -332,7 +332,7 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
332
332
  function useChat({
333
333
  api = "/api/chat",
334
334
  id,
335
- initialMessages = [],
335
+ initialMessages,
336
336
  initialInput = "",
337
337
  sendExtraMessageFields,
338
338
  experimental_onFunctionCall,
@@ -345,8 +345,9 @@ function useChat({
345
345
  } = {}) {
346
346
  const hookId = (0, import_react.useId)();
347
347
  const chatId = id || hookId;
348
+ const [initialMessagesFallback] = (0, import_react.useState)([]);
348
349
  const { data: messages, mutate } = (0, import_swr.default)([api, chatId], null, {
349
- fallbackData: initialMessages
350
+ fallbackData: initialMessages != null ? initialMessages : initialMessagesFallback
350
351
  });
351
352
  const { data: isLoading = false, mutate: mutateLoading } = (0, import_swr.default)(
352
353
  [chatId, "loading"],
@@ -50,7 +50,7 @@ var getStreamStringTypeAndValue = (line) => {
50
50
  };
51
51
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
52
52
 
53
- // react/parseComplexResponse.ts
53
+ // react/parse-complex-response.ts
54
54
  async function parseComplexResponse({
55
55
  reader,
56
56
  abortControllerRef,
@@ -296,7 +296,7 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
296
296
  function useChat({
297
297
  api = "/api/chat",
298
298
  id,
299
- initialMessages = [],
299
+ initialMessages,
300
300
  initialInput = "",
301
301
  sendExtraMessageFields,
302
302
  experimental_onFunctionCall,
@@ -309,8 +309,9 @@ function useChat({
309
309
  } = {}) {
310
310
  const hookId = useId();
311
311
  const chatId = id || hookId;
312
+ const [initialMessagesFallback] = useState([]);
312
313
  const { data: messages, mutate } = useSWR([api, chatId], null, {
313
- fallbackData: initialMessages
314
+ fallbackData: initialMessages != null ? initialMessages : initialMessagesFallback
314
315
  });
315
316
  const { data: isLoading = false, mutate: mutateLoading } = useSWR(
316
317
  [chatId, "loading"],