ai 2.2.25 → 2.2.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -149,24 +149,22 @@ function createChunkDecoder(complex) {
149
149
  }
150
150
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
151
151
 
152
- // shared/call-api.ts
153
- import { nanoid as nanoid2 } from "nanoid";
154
-
155
- // shared/parse-complex-response.ts
156
- async function parseComplexResponse({
157
- reader,
158
- abortControllerRef,
159
- update,
160
- onFinish,
161
- generateId = nanoid,
162
- getCurrentDate = () => /* @__PURE__ */ new Date()
163
- }) {
164
- const createdAt = getCurrentDate();
165
- const decode = createChunkDecoder(true);
166
- const prefixMap = {
167
- data: []
168
- };
169
- const NEWLINE = "\n".charCodeAt(0);
152
+ // shared/read-data-stream.ts
153
+ var NEWLINE = "\n".charCodeAt(0);
154
+ function concatChunks(chunks, totalLength) {
155
+ const concatenatedChunks = new Uint8Array(totalLength);
156
+ let offset = 0;
157
+ for (const chunk of chunks) {
158
+ concatenatedChunks.set(chunk, offset);
159
+ offset += chunk.length;
160
+ }
161
+ chunks.length = 0;
162
+ return concatenatedChunks;
163
+ }
164
+ async function* readDataStream(reader, {
165
+ isAborted
166
+ } = {}) {
167
+ const decoder = new TextDecoder();
170
168
  const chunks = [];
171
169
  let totalLength = 0;
172
170
  while (true) {
@@ -181,61 +179,70 @@ async function parseComplexResponse({
181
179
  if (chunks.length === 0) {
182
180
  break;
183
181
  }
184
- let concatenatedChunks = new Uint8Array(totalLength);
185
- let offset = 0;
186
- for (const chunk of chunks) {
187
- concatenatedChunks.set(chunk, offset);
188
- offset += chunk.length;
189
- }
190
- chunks.length = 0;
182
+ const concatenatedChunks = concatChunks(chunks, totalLength);
191
183
  totalLength = 0;
192
- const lines = decode(concatenatedChunks);
193
- if (typeof lines === "string") {
194
- throw new Error(
195
- "Invalid response format. Complex mode was set but the response is a string. This should never happen."
196
- );
184
+ const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
185
+ for (const streamPart of streamParts2) {
186
+ yield streamPart;
197
187
  }
198
- for (const { type, value: value2 } of lines) {
199
- if (type === "text") {
200
- if (prefixMap["text"]) {
201
- prefixMap["text"] = {
202
- ...prefixMap["text"],
203
- content: (prefixMap["text"].content || "") + value2
204
- };
205
- } else {
206
- prefixMap["text"] = {
207
- id: generateId(),
208
- role: "assistant",
209
- content: value2,
210
- createdAt
211
- };
212
- }
213
- }
214
- let functionCallMessage = null;
215
- if (type === "function_call") {
216
- prefixMap["function_call"] = {
188
+ if (isAborted == null ? void 0 : isAborted()) {
189
+ reader.cancel();
190
+ break;
191
+ }
192
+ }
193
+ }
194
+
195
+ // shared/parse-complex-response.ts
196
+ async function parseComplexResponse({
197
+ reader,
198
+ abortControllerRef,
199
+ update,
200
+ onFinish,
201
+ generateId = nanoid,
202
+ getCurrentDate = () => /* @__PURE__ */ new Date()
203
+ }) {
204
+ const createdAt = getCurrentDate();
205
+ const prefixMap = {
206
+ data: []
207
+ };
208
+ for await (const { type, value } of readDataStream(reader, {
209
+ isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
210
+ })) {
211
+ if (type === "text") {
212
+ if (prefixMap["text"]) {
213
+ prefixMap["text"] = {
214
+ ...prefixMap["text"],
215
+ content: (prefixMap["text"].content || "") + value
216
+ };
217
+ } else {
218
+ prefixMap["text"] = {
217
219
  id: generateId(),
218
220
  role: "assistant",
219
- content: "",
220
- function_call: value2.function_call,
221
- name: value2.function_call.name,
221
+ content: value,
222
222
  createdAt
223
223
  };
224
- functionCallMessage = prefixMap["function_call"];
225
- }
226
- if (type === "data") {
227
- prefixMap["data"].push(...value2);
228
- }
229
- const responseMessage = prefixMap["text"];
230
- const merged = [functionCallMessage, responseMessage].filter(
231
- Boolean
232
- );
233
- update(merged, [...prefixMap["data"]]);
234
- if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
235
- reader.cancel();
236
- break;
237
224
  }
238
225
  }
226
+ let functionCallMessage = null;
227
+ if (type === "function_call") {
228
+ prefixMap["function_call"] = {
229
+ id: generateId(),
230
+ role: "assistant",
231
+ content: "",
232
+ function_call: value.function_call,
233
+ name: value.function_call.name,
234
+ createdAt
235
+ };
236
+ functionCallMessage = prefixMap["function_call"];
237
+ }
238
+ if (type === "data") {
239
+ prefixMap["data"].push(...value);
240
+ }
241
+ const responseMessage = prefixMap["text"];
242
+ const merged = [functionCallMessage, responseMessage].filter(
243
+ Boolean
244
+ );
245
+ update(merged, [...prefixMap["data"]]);
239
246
  }
240
247
  onFinish == null ? void 0 : onFinish(prefixMap);
241
248
  return {
@@ -258,7 +265,8 @@ async function callApi({
258
265
  restoreMessagesOnFailure,
259
266
  onResponse,
260
267
  onUpdate,
261
- onFinish
268
+ onFinish,
269
+ generateId
262
270
  }) {
263
271
  var _a;
264
272
  const response = await fetch(api, {
@@ -267,7 +275,10 @@ async function callApi({
267
275
  messages,
268
276
  ...body
269
277
  }),
270
- headers,
278
+ headers: {
279
+ "Content-Type": "application/json",
280
+ ...headers
281
+ },
271
282
  signal: (_a = abortController == null ? void 0 : abortController()) == null ? void 0 : _a.signal,
272
283
  credentials
273
284
  }).catch((err) => {
@@ -301,13 +312,14 @@ async function callApi({
301
312
  if (onFinish && prefixMap.text != null) {
302
313
  onFinish(prefixMap.text);
303
314
  }
304
- }
315
+ },
316
+ generateId
305
317
  });
306
318
  } else {
307
319
  const createdAt = /* @__PURE__ */ new Date();
308
320
  const decode = createChunkDecoder(false);
309
321
  let streamedResponse = "";
310
- const replyId = nanoid2();
322
+ const replyId = generateId();
311
323
  let responseMessage = {
312
324
  id: replyId,
313
325
  createdAt,
@@ -392,7 +404,7 @@ async function processChatStream({
392
404
  }
393
405
 
394
406
  // react/use-chat.ts
395
- var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
407
+ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, generateId, onFinish, onResponse, sendExtraMessageFields) => {
396
408
  var _a, _b;
397
409
  const previousMessages = messagesRef.current;
398
410
  mutate(chatRequest.messages, false);
@@ -405,7 +417,7 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
405
417
  }
406
418
  }));
407
419
  if (typeof api !== "string") {
408
- const replyId = nanoid();
420
+ const replyId = generateId();
409
421
  const createdAt = /* @__PURE__ */ new Date();
410
422
  let responseMessage = {
411
423
  id: replyId,
@@ -468,7 +480,8 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
468
480
  mutate([...chatRequest.messages, ...merged], false);
469
481
  mutateStreamData([...existingData || [], ...data || []], false);
470
482
  },
471
- onFinish
483
+ onFinish,
484
+ generateId
472
485
  });
473
486
  };
474
487
  function useChat({
@@ -483,7 +496,8 @@ function useChat({
483
496
  onError,
484
497
  credentials,
485
498
  headers,
486
- body
499
+ body,
500
+ generateId = nanoid
487
501
  } = {}) {
488
502
  const hookId = useId();
489
503
  const chatId = id || hookId;
@@ -531,6 +545,7 @@ function useChat({
531
545
  extraMetadataRef,
532
546
  messagesRef,
533
547
  abortControllerRef,
548
+ generateId,
534
549
  onFinish,
535
550
  onResponse,
536
551
  sendExtraMessageFields
@@ -569,13 +584,14 @@ function useChat({
569
584
  sendExtraMessageFields,
570
585
  experimental_onFunctionCall,
571
586
  messagesRef.current,
572
- abortControllerRef.current
587
+ abortControllerRef.current,
588
+ generateId
573
589
  ]
574
590
  );
575
591
  const append = useCallback(
576
592
  async (message, { options, functions, function_call, data } = {}) => {
577
593
  if (!message.id) {
578
- message.id = nanoid();
594
+ message.id = generateId();
579
595
  }
580
596
  const chatRequest = {
581
597
  messages: messagesRef.current.concat(message),
@@ -586,7 +602,7 @@ function useChat({
586
602
  };
587
603
  return triggerRequest(chatRequest);
588
604
  },
589
- [triggerRequest]
605
+ [triggerRequest, generateId]
590
606
  );
591
607
  const reload = useCallback(
592
608
  async ({ options, functions, function_call } = {}) => {
@@ -692,6 +708,7 @@ function useCompletion({
692
708
  [completionId, "loading"],
693
709
  null
694
710
  );
711
+ const { data: streamData, mutate: mutateStreamData } = useSWR2([completionId, "streamData"], null);
695
712
  const [error, setError] = useState2(void 0);
696
713
  const completion = data;
697
714
  const [abortController, setAbortController] = useState2(null);
@@ -748,17 +765,39 @@ function useCompletion({
748
765
  }
749
766
  let result = "";
750
767
  const reader = res.body.getReader();
751
- const decoder = createChunkDecoder();
752
- while (true) {
753
- const { done, value } = await reader.read();
754
- if (done) {
755
- break;
768
+ const isComplexMode = res.headers.get(COMPLEX_HEADER) === "true";
769
+ if (isComplexMode) {
770
+ for await (const { type, value } of readDataStream(reader, {
771
+ isAborted: () => abortController2 === null
772
+ })) {
773
+ switch (type) {
774
+ case "text": {
775
+ result += value;
776
+ mutate(result, false);
777
+ break;
778
+ }
779
+ case "data": {
780
+ mutateStreamData(
781
+ [...streamData || [], ...value || []],
782
+ false
783
+ );
784
+ break;
785
+ }
786
+ }
756
787
  }
757
- result += decoder(value);
758
- mutate(result, false);
759
- if (abortController2 === null) {
760
- reader.cancel();
761
- break;
788
+ } else {
789
+ const decoder = createChunkDecoder();
790
+ while (true) {
791
+ const { done, value } = await reader.read();
792
+ if (done) {
793
+ break;
794
+ }
795
+ result += decoder(value);
796
+ mutate(result, false);
797
+ if (abortController2 === null) {
798
+ reader.cancel();
799
+ break;
800
+ }
762
801
  }
763
802
  }
764
803
  if (onFinish) {
@@ -834,50 +873,31 @@ function useCompletion({
834
873
  setInput,
835
874
  handleInputChange,
836
875
  handleSubmit,
837
- isLoading
876
+ isLoading,
877
+ data: streamData
838
878
  };
839
879
  }
840
880
 
841
881
  // react/use-assistant.ts
842
882
  import { useState as useState3 } from "react";
843
-
844
- // shared/process-message-stream.ts
845
- async function processMessageStream(reader, processMessage) {
846
- const decoder = new TextDecoder();
847
- let buffer = "";
848
- while (true) {
849
- const { done, value } = await reader.read();
850
- if (done) {
851
- if (buffer.length > 0) {
852
- processMessage(buffer);
853
- }
854
- break;
855
- }
856
- buffer += decoder.decode(value, { stream: true });
857
- let endIndex;
858
- while ((endIndex = buffer.indexOf("\n")) !== -1) {
859
- processMessage(buffer.substring(0, endIndex).trim());
860
- buffer = buffer.substring(endIndex + 1);
861
- }
862
- }
863
- }
864
-
865
- // react/use-assistant.ts
866
883
  function experimental_useAssistant({
867
884
  api,
868
- threadId: threadIdParam
885
+ threadId: threadIdParam,
886
+ credentials,
887
+ headers,
888
+ body
869
889
  }) {
870
890
  const [messages, setMessages] = useState3([]);
871
891
  const [input, setInput] = useState3("");
872
892
  const [threadId, setThreadId] = useState3(void 0);
873
893
  const [status, setStatus] = useState3("awaiting_message");
874
894
  const [error, setError] = useState3(void 0);
875
- const handleInputChange = (e) => {
876
- setInput(e.target.value);
895
+ const handleInputChange = (event) => {
896
+ setInput(event.target.value);
877
897
  };
878
- const submitMessage = async (e) => {
879
- var _a;
880
- e.preventDefault();
898
+ const submitMessage = async (event, requestOptions) => {
899
+ var _a, _b;
900
+ (_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
881
901
  if (input === "") {
882
902
  return;
883
903
  }
@@ -889,19 +909,24 @@ function experimental_useAssistant({
889
909
  setInput("");
890
910
  const result = await fetch(api, {
891
911
  method: "POST",
892
- headers: { "Content-Type": "application/json" },
912
+ credentials,
913
+ headers: { "Content-Type": "application/json", ...headers },
893
914
  body: JSON.stringify({
915
+ ...body,
894
916
  // always use user-provided threadId when available:
895
- threadId: (_a = threadIdParam != null ? threadIdParam : threadId) != null ? _a : null,
896
- message: input
917
+ threadId: (_b = threadIdParam != null ? threadIdParam : threadId) != null ? _b : null,
918
+ message: input,
919
+ // optional request data:
920
+ data: requestOptions == null ? void 0 : requestOptions.data
897
921
  })
898
922
  });
899
923
  if (result.body == null) {
900
924
  throw new Error("The response body is empty.");
901
925
  }
902
- await processMessageStream(result.body.getReader(), (message) => {
903
- try {
904
- const { type, value } = parseStreamPart(message);
926
+ try {
927
+ for await (const { type, value } of readDataStream(
928
+ result.body.getReader()
929
+ )) {
905
930
  switch (type) {
906
931
  case "assistant_message": {
907
932
  setMessages((messages2) => [
@@ -928,10 +953,10 @@ function experimental_useAssistant({
928
953
  break;
929
954
  }
930
955
  }
931
- } catch (error2) {
932
- setError(error2);
933
956
  }
934
- });
957
+ } catch (error2) {
958
+ setError(error2);
959
+ }
935
960
  setStatus("awaiting_message");
936
961
  };
937
962
  return {
@@ -35,6 +35,7 @@ interface Function {
35
35
  */
36
36
  description?: string;
37
37
  }
38
+ type IdGenerator = () => string;
38
39
  /**
39
40
  * Shared types between the API and UI packages.
40
41
  */
@@ -109,6 +110,11 @@ type UseChatOptions = {
109
110
  * Callback function to be called when an error is encountered.
110
111
  */
111
112
  onError?: (error: Error) => void;
113
+ /**
114
+ * A way to provide a function that is going to be used for ids for messages.
115
+ * If not provided nanoid is used by default.
116
+ */
117
+ generateId?: IdGenerator;
112
118
  /**
113
119
  * The credentials mode to be used for the fetch request.
114
120
  * Possible values are: 'omit', 'same-origin', 'include'.
@@ -238,7 +244,7 @@ type UseChatHelpers = {
238
244
  /** Additional data added on the server via StreamData */
239
245
  data: Accessor<JSONValue[] | undefined>;
240
246
  };
241
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
247
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, generateId, }?: UseChatOptions): UseChatHelpers;
242
248
 
243
249
  type UseCompletionHelpers = {
244
250
  /** The current completion result */
@@ -273,6 +279,8 @@ type UseCompletionHelpers = {
273
279
  handleSubmit: (e: any) => void;
274
280
  /** Whether the API request is in progress */
275
281
  isLoading: Accessor<boolean>;
282
+ /** Additional data added on the server via StreamData */
283
+ data: Accessor<JSONValue[] | undefined>;
276
284
  };
277
285
  declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
278
286