ai 2.2.25 → 2.2.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -35,6 +35,7 @@ interface Function {
35
35
  */
36
36
  description?: string;
37
37
  }
38
+ type IdGenerator = () => string;
38
39
  /**
39
40
  * Shared types between the API and UI packages.
40
41
  */
@@ -115,6 +116,11 @@ type UseChatOptions = {
115
116
  * Callback function to be called when an error is encountered.
116
117
  */
117
118
  onError?: (error: Error) => void;
119
+ /**
120
+ * A way to provide a function that is going to be used for ids for messages.
121
+ * If not provided nanoid is used by default.
122
+ */
123
+ generateId?: IdGenerator;
118
124
  /**
119
125
  * The credentials mode to be used for the fetch request.
120
126
  * Possible values are: 'omit', 'same-origin', 'include'.
@@ -692,6 +698,7 @@ declare class experimental_StreamingReactResponse {
692
698
  data?: JSONValue[] | undefined;
693
699
  }) => UINode | Promise<UINode>;
694
700
  data?: experimental_StreamData;
701
+ generateId?: IdGenerator;
695
702
  });
696
703
  }
697
704
 
@@ -704,4 +711,4 @@ declare function experimental_AssistantResponse({ threadId, messageId }: {
704
711
  sendMessage: (message: AssistantMessage) => void;
705
712
  }) => Promise<void>): Response;
706
713
 
707
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
714
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, IdGenerator, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -960,6 +960,7 @@ async function parseComplexResponse({
960
960
  // streams/streaming-react-response.ts
961
961
  var experimental_StreamingReactResponse = class {
962
962
  constructor(res, options) {
963
+ var _a;
963
964
  let resolveFunc = () => {
964
965
  };
965
966
  let next = new Promise((resolve) => {
@@ -973,8 +974,8 @@ var experimental_StreamingReactResponse = class {
973
974
  parseComplexResponse({
974
975
  reader: processedStream.getReader(),
975
976
  update: (merged, data) => {
976
- var _a, _b, _c;
977
- const content2 = (_b = (_a = merged[0]) == null ? void 0 : _a.content) != null ? _b : "";
977
+ var _a2, _b, _c;
978
+ const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
978
979
  const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
979
980
  const payload = { ui, content: content2 };
980
981
  const resolvePrevious = resolveFunc;
@@ -987,6 +988,7 @@ var experimental_StreamingReactResponse = class {
987
988
  });
988
989
  lastPayload = payload;
989
990
  },
991
+ generateId: (_a = options.generateId) != null ? _a : nanoid,
990
992
  onFinish: () => {
991
993
  if (lastPayload !== void 0) {
992
994
  resolveFunc({
@@ -1002,12 +1004,12 @@ var experimental_StreamingReactResponse = class {
1002
1004
  const decode = createChunkDecoder();
1003
1005
  const reader = res.getReader();
1004
1006
  async function readChunk() {
1005
- var _a;
1007
+ var _a2;
1006
1008
  const { done, value } = await reader.read();
1007
1009
  if (!done) {
1008
1010
  content += decode(value);
1009
1011
  }
1010
- const ui = ((_a = options == null ? void 0 : options.ui) == null ? void 0 : _a.call(options, { content })) || content;
1012
+ const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
1011
1013
  const payload = {
1012
1014
  ui,
1013
1015
  content
package/dist/index.mjs CHANGED
@@ -912,6 +912,7 @@ async function parseComplexResponse({
912
912
  // streams/streaming-react-response.ts
913
913
  var experimental_StreamingReactResponse = class {
914
914
  constructor(res, options) {
915
+ var _a;
915
916
  let resolveFunc = () => {
916
917
  };
917
918
  let next = new Promise((resolve) => {
@@ -925,8 +926,8 @@ var experimental_StreamingReactResponse = class {
925
926
  parseComplexResponse({
926
927
  reader: processedStream.getReader(),
927
928
  update: (merged, data) => {
928
- var _a, _b, _c;
929
- const content2 = (_b = (_a = merged[0]) == null ? void 0 : _a.content) != null ? _b : "";
929
+ var _a2, _b, _c;
930
+ const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
930
931
  const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
931
932
  const payload = { ui, content: content2 };
932
933
  const resolvePrevious = resolveFunc;
@@ -939,6 +940,7 @@ var experimental_StreamingReactResponse = class {
939
940
  });
940
941
  lastPayload = payload;
941
942
  },
943
+ generateId: (_a = options.generateId) != null ? _a : nanoid,
942
944
  onFinish: () => {
943
945
  if (lastPayload !== void 0) {
944
946
  resolveFunc({
@@ -954,12 +956,12 @@ var experimental_StreamingReactResponse = class {
954
956
  const decode = createChunkDecoder();
955
957
  const reader = res.getReader();
956
958
  async function readChunk() {
957
- var _a;
959
+ var _a2;
958
960
  const { done, value } = await reader.read();
959
961
  if (!done) {
960
962
  content += decode(value);
961
963
  }
962
- const ui = ((_a = options == null ? void 0 : options.ui) == null ? void 0 : _a.call(options, { content })) || content;
964
+ const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
963
965
  const payload = {
964
966
  ui,
965
967
  content
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.25",
3
+ "version": "2.2.26",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -64,16 +64,19 @@
64
64
  "swrv": "1.0.4"
65
65
  },
66
66
  "devDependencies": {
67
+ "@anthropic-ai/sdk": "0.10.0",
67
68
  "@aws-sdk/client-bedrock-runtime": "3.451.0",
68
69
  "@edge-runtime/jest-environment": "1.1.0-beta.31",
69
70
  "@huggingface/inference": "2.6.4",
70
71
  "@testing-library/jest-dom": "^6.1.4",
71
72
  "@testing-library/react": "^14.0.0",
72
73
  "@testing-library/user-event": "^14.5.1",
74
+ "@testing-library/vue": "^8.0.1",
73
75
  "@types/jest": "29.2.0",
74
76
  "@types/node": "^17.0.12",
75
77
  "@types/react": "^18.2.8",
76
78
  "@types/react-dom": "^18.2.0",
79
+ "@vue/vue3-jest": "28",
77
80
  "eslint": "^7.32.0",
78
81
  "jest": "29.2.1",
79
82
  "jest-environment-jsdom": "^29.7.0",
@@ -83,8 +86,8 @@
83
86
  "ts-jest": "29.0.3",
84
87
  "tsup": "^6.7.0",
85
88
  "typescript": "5.1.3",
86
- "eslint-config-vercel-ai": "0.0.0",
87
- "@vercel/ai-tsconfig": "0.0.0"
89
+ "@vercel/ai-tsconfig": "0.0.0",
90
+ "eslint-config-vercel-ai": "0.0.0"
88
91
  },
89
92
  "peerDependencies": {
90
93
  "react": "^18.2.0",
@@ -33,6 +33,7 @@ interface Function {
33
33
  */
34
34
  description?: string;
35
35
  }
36
+ type IdGenerator = () => string;
36
37
  /**
37
38
  * Shared types between the API and UI packages.
38
39
  */
@@ -113,6 +114,11 @@ type UseChatOptions = {
113
114
  * Callback function to be called when an error is encountered.
114
115
  */
115
116
  onError?: (error: Error) => void;
117
+ /**
118
+ * A way to provide a function that is going to be used for ids for messages.
119
+ * If not provided nanoid is used by default.
120
+ */
121
+ generateId?: IdGenerator;
116
122
  /**
117
123
  * The credentials mode to be used for the fetch request.
118
124
  * Possible values are: 'omit', 'same-origin', 'include'.
@@ -240,6 +246,7 @@ declare class experimental_StreamingReactResponse {
240
246
  data?: JSONValue[] | undefined;
241
247
  }) => UINode | Promise<UINode>;
242
248
  data?: experimental_StreamData;
249
+ generateId?: IdGenerator;
243
250
  });
244
251
  }
245
252
 
@@ -289,7 +296,7 @@ type StreamingReactResponseAction = (payload: {
289
296
  messages: Message[];
290
297
  data?: Record<string, string>;
291
298
  }) => Promise<experimental_StreamingReactResponse>;
292
- declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: Omit<UseChatOptions, 'api'> & {
299
+ declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, generateId, }?: Omit<UseChatOptions, 'api'> & {
293
300
  api?: string | StreamingReactResponseAction;
294
301
  }): UseChatHelpers;
295
302
 
@@ -338,20 +345,30 @@ type UseCompletionHelpers = {
338
345
  declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
339
346
 
340
347
  type AssistantStatus = 'in_progress' | 'awaiting_message';
341
- declare function experimental_useAssistant({ api, threadId: threadIdParam, }: {
342
- api: string;
343
- threadId?: string | undefined;
344
- }): {
348
+ type UseAssistantHelpers = {
349
+ /** Current messages in the chat */
345
350
  messages: Message[];
351
+ /** Current thread ID */
346
352
  threadId: string | undefined;
353
+ /** The current value of the input */
347
354
  input: string;
348
- handleInputChange: (e: any) => void;
349
- submitMessage: (e: any) => Promise<void>;
355
+ /** An input/textarea-ready onChange handler to control the value of the input */
356
+ handleInputChange: (event: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
357
+ /** Form submission handler to automatically reset input and append a user message */
358
+ submitMessage: (event?: React.FormEvent<HTMLFormElement>, requestOptions?: {
359
+ data?: Record<string, string>;
360
+ }) => Promise<void>;
361
+ /** Current status of the assistant */
350
362
  status: AssistantStatus;
351
- error: unknown;
363
+ /** Current error, if any */
364
+ error: undefined | unknown;
352
365
  };
366
+ declare function experimental_useAssistant({ api, threadId: threadIdParam, }: {
367
+ api: string;
368
+ threadId?: string | undefined;
369
+ }): UseAssistantHelpers;
353
370
 
354
- export { AssistantStatus, CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
371
+ export { AssistantStatus, CreateMessage, Message, UseAssistantHelpers, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
355
372
  import * as react_jsx_runtime from 'react/jsx-runtime';
356
373
 
357
374
  type Props = {
@@ -186,9 +186,6 @@ function createChunkDecoder(complex) {
186
186
  }
187
187
  var COMPLEX_HEADER = "X-Experimental-Stream-Data";
188
188
 
189
- // shared/call-api.ts
190
- var import_nanoid = require("nanoid");
191
-
192
189
  // shared/parse-complex-response.ts
193
190
  async function parseComplexResponse({
194
191
  reader,
@@ -295,7 +292,8 @@ async function callApi({
295
292
  restoreMessagesOnFailure,
296
293
  onResponse,
297
294
  onUpdate,
298
- onFinish
295
+ onFinish,
296
+ generateId
299
297
  }) {
300
298
  var _a;
301
299
  const response = await fetch(api, {
@@ -304,7 +302,10 @@ async function callApi({
304
302
  messages,
305
303
  ...body
306
304
  }),
307
- headers,
305
+ headers: {
306
+ "Content-Type": "application/json",
307
+ ...headers
308
+ },
308
309
  signal: (_a = abortController == null ? void 0 : abortController()) == null ? void 0 : _a.signal,
309
310
  credentials
310
311
  }).catch((err) => {
@@ -338,13 +339,14 @@ async function callApi({
338
339
  if (onFinish && prefixMap.text != null) {
339
340
  onFinish(prefixMap.text);
340
341
  }
341
- }
342
+ },
343
+ generateId
342
344
  });
343
345
  } else {
344
346
  const createdAt = /* @__PURE__ */ new Date();
345
347
  const decode = createChunkDecoder(false);
346
348
  let streamedResponse = "";
347
- const replyId = (0, import_nanoid.nanoid)();
349
+ const replyId = generateId();
348
350
  let responseMessage = {
349
351
  id: replyId,
350
352
  createdAt,
@@ -429,7 +431,7 @@ async function processChatStream({
429
431
  }
430
432
 
431
433
  // react/use-chat.ts
432
- var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
434
+ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadataRef, messagesRef, abortControllerRef, generateId, onFinish, onResponse, sendExtraMessageFields) => {
433
435
  var _a, _b;
434
436
  const previousMessages = messagesRef.current;
435
437
  mutate(chatRequest.messages, false);
@@ -442,7 +444,7 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
442
444
  }
443
445
  }));
444
446
  if (typeof api !== "string") {
445
- const replyId = nanoid();
447
+ const replyId = generateId();
446
448
  const createdAt = /* @__PURE__ */ new Date();
447
449
  let responseMessage = {
448
450
  id: replyId,
@@ -505,7 +507,8 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
505
507
  mutate([...chatRequest.messages, ...merged], false);
506
508
  mutateStreamData([...existingData || [], ...data || []], false);
507
509
  },
508
- onFinish
510
+ onFinish,
511
+ generateId
509
512
  });
510
513
  };
511
514
  function useChat({
@@ -520,7 +523,8 @@ function useChat({
520
523
  onError,
521
524
  credentials,
522
525
  headers,
523
- body
526
+ body,
527
+ generateId = nanoid
524
528
  } = {}) {
525
529
  const hookId = (0, import_react.useId)();
526
530
  const chatId = id || hookId;
@@ -568,6 +572,7 @@ function useChat({
568
572
  extraMetadataRef,
569
573
  messagesRef,
570
574
  abortControllerRef,
575
+ generateId,
571
576
  onFinish,
572
577
  onResponse,
573
578
  sendExtraMessageFields
@@ -606,13 +611,14 @@ function useChat({
606
611
  sendExtraMessageFields,
607
612
  experimental_onFunctionCall,
608
613
  messagesRef.current,
609
- abortControllerRef.current
614
+ abortControllerRef.current,
615
+ generateId
610
616
  ]
611
617
  );
612
618
  const append = (0, import_react.useCallback)(
613
619
  async (message, { options, functions, function_call, data } = {}) => {
614
620
  if (!message.id) {
615
- message.id = nanoid();
621
+ message.id = generateId();
616
622
  }
617
623
  const chatRequest = {
618
624
  messages: messagesRef.current.concat(message),
@@ -623,7 +629,7 @@ function useChat({
623
629
  };
624
630
  return triggerRequest(chatRequest);
625
631
  },
626
- [triggerRequest]
632
+ [triggerRequest, generateId]
627
633
  );
628
634
  const reload = (0, import_react.useCallback)(
629
635
  async ({ options, functions, function_call } = {}) => {
@@ -708,6 +714,29 @@ function useChat({
708
714
  // react/use-completion.ts
709
715
  var import_react2 = require("react");
710
716
  var import_swr2 = __toESM(require("swr"));
717
+
718
+ // shared/process-message-stream.ts
719
+ async function processMessageStream(reader, processMessage) {
720
+ const decoder = new TextDecoder();
721
+ let buffer = "";
722
+ while (true) {
723
+ const { done, value } = await reader.read();
724
+ if (done) {
725
+ if (buffer.length > 0) {
726
+ processMessage(buffer);
727
+ }
728
+ break;
729
+ }
730
+ buffer += decoder.decode(value, { stream: true });
731
+ let endIndex;
732
+ while ((endIndex = buffer.indexOf("\n")) !== -1) {
733
+ processMessage(buffer.substring(0, endIndex).trim());
734
+ buffer = buffer.substring(endIndex + 1);
735
+ }
736
+ }
737
+ }
738
+
739
+ // react/use-completion.ts
711
740
  function useCompletion({
712
741
  api = "/api/completion",
713
742
  id,
@@ -785,17 +814,28 @@ function useCompletion({
785
814
  }
786
815
  let result = "";
787
816
  const reader = res.body.getReader();
788
- const decoder = createChunkDecoder();
789
- while (true) {
790
- const { done, value } = await reader.read();
791
- if (done) {
792
- break;
793
- }
794
- result += decoder(value);
795
- mutate(result, false);
796
- if (abortController2 === null) {
797
- reader.cancel();
798
- break;
817
+ const isComplexMode = res.headers.get(COMPLEX_HEADER) === "true";
818
+ if (isComplexMode) {
819
+ await processMessageStream(reader, (message) => {
820
+ const { type, value } = parseStreamPart(message);
821
+ if (type === "text") {
822
+ result += value;
823
+ mutate(result, false);
824
+ }
825
+ });
826
+ } else {
827
+ const decoder = createChunkDecoder();
828
+ while (true) {
829
+ const { done, value } = await reader.read();
830
+ if (done) {
831
+ break;
832
+ }
833
+ result += decoder(value);
834
+ mutate(result, false);
835
+ if (abortController2 === null) {
836
+ reader.cancel();
837
+ break;
838
+ }
799
839
  }
800
840
  }
801
841
  if (onFinish) {
@@ -877,29 +917,6 @@ function useCompletion({
877
917
 
878
918
  // react/use-assistant.ts
879
919
  var import_react3 = require("react");
880
-
881
- // shared/process-message-stream.ts
882
- async function processMessageStream(reader, processMessage) {
883
- const decoder = new TextDecoder();
884
- let buffer = "";
885
- while (true) {
886
- const { done, value } = await reader.read();
887
- if (done) {
888
- if (buffer.length > 0) {
889
- processMessage(buffer);
890
- }
891
- break;
892
- }
893
- buffer += decoder.decode(value, { stream: true });
894
- let endIndex;
895
- while ((endIndex = buffer.indexOf("\n")) !== -1) {
896
- processMessage(buffer.substring(0, endIndex).trim());
897
- buffer = buffer.substring(endIndex + 1);
898
- }
899
- }
900
- }
901
-
902
- // react/use-assistant.ts
903
920
  function experimental_useAssistant({
904
921
  api,
905
922
  threadId: threadIdParam
@@ -909,12 +926,12 @@ function experimental_useAssistant({
909
926
  const [threadId, setThreadId] = (0, import_react3.useState)(void 0);
910
927
  const [status, setStatus] = (0, import_react3.useState)("awaiting_message");
911
928
  const [error, setError] = (0, import_react3.useState)(void 0);
912
- const handleInputChange = (e) => {
913
- setInput(e.target.value);
929
+ const handleInputChange = (event) => {
930
+ setInput(event.target.value);
914
931
  };
915
- const submitMessage = async (e) => {
916
- var _a;
917
- e.preventDefault();
932
+ const submitMessage = async (event, requestOptions) => {
933
+ var _a, _b;
934
+ (_a = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a.call(event);
918
935
  if (input === "") {
919
936
  return;
920
937
  }
@@ -929,8 +946,10 @@ function experimental_useAssistant({
929
946
  headers: { "Content-Type": "application/json" },
930
947
  body: JSON.stringify({
931
948
  // always use user-provided threadId when available:
932
- threadId: (_a = threadIdParam != null ? threadIdParam : threadId) != null ? _a : null,
933
- message: input
949
+ threadId: (_b = threadIdParam != null ? threadIdParam : threadId) != null ? _b : null,
950
+ message: input,
951
+ // optional request data:
952
+ data: requestOptions == null ? void 0 : requestOptions.data
934
953
  })
935
954
  });
936
955
  if (result.body == null) {