ai 2.2.25 → 2.2.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -186,24 +186,22 @@ function createChunkDecoder(complex) {
186
186
  }
187
187
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
188
188
 
189
- // shared/call-api.ts
190
- var import_nanoid = require("nanoid");
191
-
192
- // shared/parse-complex-response.ts
193
- async function parseComplexResponse({
194
- reader,
195
- abortControllerRef,
196
- update,
197
- onFinish,
198
- generateId = nanoid,
199
- getCurrentDate = () => /* @__PURE__ */ new Date()
200
- }) {
201
- const createdAt = getCurrentDate();
202
- const decode = createChunkDecoder(true);
203
- const prefixMap = {
204
- data: []
205
- };
206
- const NEWLINE = "\n".charCodeAt(0);
189
+ // shared/read-data-stream.ts
190
+ var NEWLINE = "\n".charCodeAt(0);
191
+ function concatChunks(chunks, totalLength) {
192
+ const concatenatedChunks = new Uint8Array(totalLength);
193
+ let offset = 0;
194
+ for (const chunk of chunks) {
195
+ concatenatedChunks.set(chunk, offset);
196
+ offset += chunk.length;
197
+ }
198
+ chunks.length = 0;
199
+ return concatenatedChunks;
200
+ }
201
+ async function* readDataStream(reader, {
202
+ isAborted
203
+ } = {}) {
204
+ const decoder = new TextDecoder();
207
205
  const chunks = [];
208
206
  let totalLength = 0;
209
207
  while (true) {
@@ -218,61 +216,70 @@ async function parseComplexResponse({
218
216
  if (chunks.length === 0) {
219
217
  break;
220
218
  }
221
- let concatenatedChunks = new Uint8Array(totalLength);
222
- let offset = 0;
223
- for (const chunk of chunks) {
224
- concatenatedChunks.set(chunk, offset);
225
- offset += chunk.length;
226
- }
227
- chunks.length = 0;
219
+ const concatenatedChunks = concatChunks(chunks, totalLength);
228
220
  totalLength = 0;
229
- const lines = decode(concatenatedChunks);
230
- if (typeof lines === "string") {
231
- throw new Error(
232
- "Invalid response format. Complex mode was set but the response is a string. This should never happen."
233
- );
221
+ const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
222
+ for (const streamPart of streamParts2) {
223
+ yield streamPart;
234
224
  }
235
- for (const { type, value: value2 } of lines) {
236
- if (type === "text") {
237
- if (prefixMap["text"]) {
238
- prefixMap["text"] = {
239
- ...prefixMap["text"],
240
- content: (prefixMap["text"].content || "") + value2
241
- };
242
- } else {
243
- prefixMap["text"] = {
244
- id: generateId(),
245
- role: "assistant",
246
- content: value2,
247
- createdAt
248
- };
249
- }
250
- }
251
- let functionCallMessage = null;
252
- if (type === "function_call") {
253
- prefixMap["function_call"] = {
225
+ if (isAborted == null ? void 0 : isAborted()) {
226
+ reader.cancel();
227
+ break;
228
+ }
229
+ }
230
+ }
231
+
232
+ // shared/parse-complex-response.ts
233
+ async function parseComplexResponse({
234
+ reader,
235
+ abortControllerRef,
236
+ update,
237
+ onFinish,
238
+ generateId = nanoid,
239
+ getCurrentDate = () => /* @__PURE__ */ new Date()
240
+ }) {
241
+ const createdAt = getCurrentDate();
242
+ const prefixMap = {
243
+ data: []
244
+ };
245
+ for await (const { type, value } of readDataStream(reader, {
246
+ isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
247
+ })) {
248
+ if (type === "text") {
249
+ if (prefixMap["text"]) {
250
+ prefixMap["text"] = {
251
+ ...prefixMap["text"],
252
+ content: (prefixMap["text"].content || "") + value
253
+ };
254
+ } else {
255
+ prefixMap["text"] = {
254
256
  id: generateId(),
255
257
  role: "assistant",
256
- content: "",
257
- function_call: value2.function_call,
258
- name: value2.function_call.name,
258
+ content: value,
259
259
  createdAt
260
260
  };
261
- functionCallMessage = prefixMap["function_call"];
262
- }
263
- if (type === "data") {
264
- prefixMap["data"].push(...value2);
265
- }
266
- const responseMessage = prefixMap["text"];
267
- const merged = [functionCallMessage, responseMessage].filter(
268
- Boolean
269
- );
270
- update(merged, [...prefixMap["data"]]);
271
- if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
272
- reader.cancel();
273
- break;
274
261
  }
275
262
  }
263
+ let functionCallMessage = null;
264
+ if (type === "function_call") {
265
+ prefixMap["function_call"] = {
266
+ id: generateId(),
267
+ role: "assistant",
268
+ content: "",
269
+ function_call: value.function_call,
270
+ name: value.function_call.name,
271
+ createdAt
272
+ };
273
+ functionCallMessage = prefixMap["function_call"];
274
+ }
275
+ if (type === "data") {
276
+ prefixMap["data"].push(...value);
277
+ }
278
+ const responseMessage = prefixMap["text"];
279
+ const merged = [functionCallMessage, responseMessage].filter(
280
+ Boolean
281
+ );
282
+ update(merged, [...prefixMap["data"]]);
276
283
  }
277
284
  onFinish == null ? void 0 : onFinish(prefixMap);
278
285
  return {
@@ -295,7 +302,8 @@ async function callApi({
295
302
  restoreMessagesOnFailure,
296
303
  onResponse,
297
304
  onUpdate,
298
- onFinish
305
+ onFinish,
306
+ generateId
299
307
  }) {
300
308
  var _a;
301
309
  const response = await fetch(api, {
@@ -304,7 +312,10 @@ async function callApi({
304
312
  messages,
305
313
  ...body
306
314
  }),
307
- headers,
315
+ headers: {
316
+ "Content-Type": "application/json",
317
+ ...headers
318
+ },
308
319
  signal: (_a = abortController == null ? void 0 : abortController()) == null ? void 0 : _a.signal,
309
320
  credentials
310
321
  }).catch((err) => {
@@ -338,13 +349,14 @@ async function callApi({
338
349
  if (onFinish && prefixMap.text != null) {
339
350
  onFinish(prefixMap.text);
340
351
  }
341
- }
352
+ },
353
+ generateId
342
354
  });
343
355
  } else {
344
356
  const createdAt = /* @__PURE__ */ new Date();
345
357
  const decode = createChunkDecoder(false);
346
358
  let streamedResponse = "";
347
- const replyId = (0, import_nanoid.nanoid)();
359
+ const replyId = generateId();
348
360
  let responseMessage = {
349
361
  id: replyId,
350
362
  createdAt,
@@ -429,7 +441,7 @@ async function processChatStream({
429
441
  }
430
442
 
431
443
  // react/use-chat.ts
432
- var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
444
+ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, generateId, onFinish, onResponse, sendExtraMessageFields) => {
433
445
  var _a, _b;
434
446
  const previousMessages = messagesRef.current;
435
447
  mutate(chatRequest.messages, false);
@@ -442,7 +454,7 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
442
454
  }
443
455
  }));
444
456
  if (typeof api !== "string") {
445
- const replyId = nanoid();
457
+ const replyId = generateId();
446
458
  const createdAt = /* @__PURE__ */ new Date();
447
459
  let responseMessage = {
448
460
  id: replyId,
@@ -505,7 +517,8 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
505
517
  mutate([...chatRequest.messages, ...merged], false);
506
518
  mutateStreamData([...existingData || [], ...data || []], false);
507
519
  },
508
- onFinish
520
+ onFinish,
521
+ generateId
509
522
  });
510
523
  };
511
524
  function useChat({
@@ -520,7 +533,8 @@ function useChat({
520
533
  onError,
521
534
  credentials,
522
535
  headers,
523
- body
536
+ body,
537
+ generateId = nanoid
524
538
  } = {}) {
525
539
  const hookId = (0, import_react.useId)();
526
540
  const chatId = id || hookId;
@@ -568,6 +582,7 @@ function useChat({
568
582
  extraMetadataRef,
569
583
  messagesRef,
570
584
  abortControllerRef,
585
+ generateId,
571
586
  onFinish,
572
587
  onResponse,
573
588
  sendExtraMessageFields
@@ -606,13 +621,14 @@ function useChat({
606
621
  sendExtraMessageFields,
607
622
  experimental_onFunctionCall,
608
623
  messagesRef.current,
609
- abortControllerRef.current
624
+ abortControllerRef.current,
625
+ generateId
610
626
  ]
611
627
  );
612
628
  const append = (0, import_react.useCallback)(
613
629
  async (message, { options, functions, function_call, data } = {}) => {
614
630
  if (!message.id) {
615
- message.id = nanoid();
631
+ message.id = generateId();
616
632
  }
617
633
  const chatRequest = {
618
634
  messages: messagesRef.current.concat(message),
@@ -623,7 +639,7 @@ function useChat({
623
639
  };
624
640
  return triggerRequest(chatRequest);
625
641
  },
626
- [triggerRequest]
642
+ [triggerRequest, generateId]
627
643
  );
628
644
  const reload = (0, import_react.useCallback)(
629
645
  async ({ options, functions, function_call } = {}) => {
@@ -729,6 +745,7 @@ function useCompletion({
729
745
  [completionId, "loading"],
730
746
  null
731
747
  );
748
+ const { data: streamData, mutate: mutateStreamData } = (0, import_swr2.default)([completionId, "streamData"], null);
732
749
  const [error, setError] = (0, import_react2.useState)(void 0);
733
750
  const completion = data;
734
751
  const [abortController, setAbortController] = (0, import_react2.useState)(null);
@@ -785,17 +802,39 @@ function useCompletion({
785
802
  }
786
803
  let result = "";
787
804
  const reader = res.body.getReader();
788
- const decoder = createChunkDecoder();
789
- while (true) {
790
- const { done, value } = await reader.read();
791
- if (done) {
792
- break;
805
+ const isComplexMode = res.headers.get(COMPLEX_HEADER) === "true";
806
+ if (isComplexMode) {
807
+ for await (const { type, value } of readDataStream(reader, {
808
+ isAborted: () => abortController2 === null
809
+ })) {
810
+ switch (type) {
811
+ case "text": {
812
+ result += value;
813
+ mutate(result, false);
814
+ break;
815
+ }
816
+ case "data": {
817
+ mutateStreamData(
818
+ [...streamData || [], ...value || []],
819
+ false
820
+ );
821
+ break;
822
+ }
823
+ }
793
824
  }
794
- result += decoder(value);
795
- mutate(result, false);
796
- if (abortController2 === null) {
797
- reader.cancel();
798
- break;
825
+ } else {
826
+ const decoder = createChunkDecoder();
827
+ while (true) {
828
+ const { done, value } = await reader.read();
829
+ if (done) {
830
+ break;
831
+ }
832
+ result += decoder(value);
833
+ mutate(result, false);
834
+ if (abortController2 === null) {
835
+ reader.cancel();
836
+ break;
837
+ }
799
838
  }
800
839
  }
801
840
  if (onFinish) {
@@ -871,50 +910,31 @@ function useCompletion({
871
910
  setInput,
872
911
  handleInputChange,
873
912
  handleSubmit,
874
- isLoading
913
+ isLoading,
914
+ data: streamData
875
915
  };
876
916
  }
877
917
 
878
918
  // react/use-assistant.ts
879
919
  var import_react3 = require("react");
880
-
881
- // shared/process-message-stream.ts
882
- async function processMessageStream(reader, processMessage) {
883
- const decoder = new TextDecoder();
884
- let buffer = "";
885
- while (true) {
886
- const { done, value } = await reader.read();
887
- if (done) {
888
- if (buffer.length > 0) {
889
- processMessage(buffer);
890
- }
891
- break;
892
- }
893
- buffer += decoder.decode(value, { stream: true });
894
- let endIndex;
895
- while ((endIndex = buffer.indexOf("\n")) !== -1) {
896
- processMessage(buffer.substring(0, endIndex).trim());
897
- buffer = buffer.substring(endIndex + 1);
898
- }
899
- }
900
- }
901
-
902
- // react/use-assistant.ts
903
920
  function experimental_useAssistant({
904
921
  api,
905
- threadId: threadIdParam
922
+ threadId: threadIdParam,
923
+ credentials,
924
+ headers,
925
+ body
906
926
  }) {
907
927
  const [messages, setMessages] = (0, import_react3.useState)([]);
908
928
  const [input, setInput] = (0, import_react3.useState)("");
909
929
  const [threadId, setThreadId] = (0, import_react3.useState)(void 0);
910
930
  const [status, setStatus] = (0, import_react3.useState)("awaiting_message");
911
931
  const [error, setError] = (0, import_react3.useState)(void 0);
912
- const handleInputChange = (e) => {
913
- setInput(e.target.value);
932
+ const handleInputChange = (event) => {
933
+ setInput(event.target.value);
914
934
  };
915
- const submitMessage = async (e) => {
916
- var _a;
917
- e.preventDefault();
935
+ const submitMessage = async (event, requestOptions) => {
936
+ var _a, _b;
937
+ (_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
918
938
  if (input === "") {
919
939
  return;
920
940
  }
@@ -926,19 +946,24 @@ function experimental_useAssistant({
926
946
  setInput("");
927
947
  const result = await fetch(api, {
928
948
  method: "POST",
929
- headers: { "Content-Type": "application/json" },
949
+ credentials,
950
+ headers: { "Content-Type": "application/json", ...headers },
930
951
  body: JSON.stringify({
952
+ ...body,
931
953
  // always use user-provided threadId when available:
932
- threadId: (_a = threadIdParam != null ? threadIdParam : threadId) != null ? _a : null,
933
- message: input
954
+ threadId: (_b = threadIdParam != null ? threadIdParam : threadId) != null ? _b : null,
955
+ message: input,
956
+ // optional request data:
957
+ data: requestOptions == null ? void 0 : requestOptions.data
934
958
  })
935
959
  });
936
960
  if (result.body == null) {
937
961
  throw new Error("The response body is empty.");
938
962
  }
939
- await processMessageStream(result.body.getReader(), (message) => {
940
- try {
941
- const { type, value } = parseStreamPart(message);
963
+ try {
964
+ for await (const { type, value } of readDataStream(
965
+ result.body.getReader()
966
+ )) {
942
967
  switch (type) {
943
968
  case "assistant_message": {
944
969
  setMessages((messages2) => [
@@ -965,10 +990,10 @@ function experimental_useAssistant({
965
990
  break;
966
991
  }
967
992
  }
968
- } catch (error2) {
969
- setError(error2);
970
993
  }
971
- });
994
+ } catch (error2) {
995
+ setError(error2);
996
+ }
972
997
  setStatus("awaiting_message");
973
998
  };
974
999
  return {