@ai-sdk/react 2.0.0-canary.20 → 2.0.0-canary.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,27 @@
1
1
  # @ai-sdk/react
2
2
 
3
+ ## 2.0.0-canary.22
4
+
5
+ ### Major Changes
6
+
7
+ - 40acf9b: feat (ui): introduce ChatStore and ChatTransport
8
+
9
+ ### Patch Changes
10
+
11
+ - Updated dependencies [40acf9b]
12
+ - @ai-sdk/provider-utils@3.0.0-canary.18
13
+ - ai@5.0.0-canary.23
14
+
15
+ ## 2.0.0-canary.21
16
+
17
+ ### Patch Changes
18
+
19
+ - Updated dependencies [e7dc6c7]
20
+ - Updated dependencies [a34eb39]
21
+ - Updated dependencies [b33ed7a]
22
+ - Updated dependencies [765f1cd]
23
+ - ai@5.0.0-canary.22
24
+
3
25
  ## 2.0.0-canary.20
4
26
 
5
27
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -1,20 +1,34 @@
1
- import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, JSONValue, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
1
+ import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
2
2
  export { CreateUIMessage, UIMessage, UseChatOptions, UseCompletionOptions } from 'ai';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
4
  import z from 'zod';
5
5
 
6
6
  type UseChatHelpers<MESSAGE_METADATA = unknown> = {
7
+ /**
8
+ * The id of the chat.
9
+ */
10
+ readonly id: string;
11
+ /**
12
+ * Hook status:
13
+ *
14
+ * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
15
+ * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
16
+ * - `ready`: The full response has been received and processed; a new user message can be submitted.
17
+ * - `error`: An error occurred during the API request, preventing successful completion.
18
+ */
19
+ readonly status: 'submitted' | 'streaming' | 'ready' | 'error';
7
20
  /** Current messages in the chat */
8
- messages: UIMessage<MESSAGE_METADATA>[];
21
+ readonly messages: UIMessage<MESSAGE_METADATA>[];
9
22
  /** The error object of the API request */
10
- error: undefined | Error;
23
+ readonly error: undefined | Error;
11
24
  /**
12
25
  * Append a user message to the chat list. This triggers the API call to fetch
13
26
  * the assistant's response.
27
+ *
14
28
  * @param message The message to append
15
29
  * @param options Additional options to pass to the API call
16
30
  */
17
- append: (message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>, chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
31
+ append: (message: CreateUIMessage<MESSAGE_METADATA>, options?: ChatRequestOptions) => Promise<void>;
18
32
  /**
19
33
  * Reload the last AI chat response for the given chat history. If the last
20
34
  * message isn't from the assistant, it will request the API to generate a
@@ -47,45 +61,12 @@ type UseChatHelpers<MESSAGE_METADATA = unknown> = {
47
61
  }, chatRequestOptions?: ChatRequestOptions & {
48
62
  files?: FileList | FileUIPart[];
49
63
  }) => void;
50
- metadata?: Object;
51
- /**
52
- * Whether the API request is in progress
53
- *
54
- * @deprecated use `status` instead
55
- */
56
- isLoading: boolean;
57
- /**
58
- * Hook status:
59
- *
60
- * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
61
- * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
62
- * - `ready`: The full response has been received and processed; a new user message can be submitted.
63
- * - `error`: An error occurred during the API request, preventing successful completion.
64
- */
65
- status: 'submitted' | 'streaming' | 'ready' | 'error';
66
64
  addToolResult: ({ toolCallId, result, }: {
67
65
  toolCallId: string;
68
66
  result: any;
69
67
  }) => void;
70
- /** The id of the chat */
71
- id: string;
72
68
  };
73
- declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol, onResponse, onFinish, onError, credentials, headers, body, generateId, fetch, experimental_throttle: throttleWaitMs, messageMetadataSchema, }?: UseChatOptions<MESSAGE_METADATA> & {
74
- /**
75
- * Experimental (React only). When a function is provided, it will be used
76
- * to prepare the request body for the chat API. This can be useful for
77
- * customizing the request body based on the messages and data in the chat.
78
- *
79
- * @param messages The current messages in the chat.
80
- * @param requestData The data object passed in the chat request.
81
- * @param requestBody The request body object passed in the chat request.
82
- */
83
- experimental_prepareRequestBody?: (options: {
84
- id: string;
85
- messages: UIMessage[];
86
- requestData?: JSONValue;
87
- requestBody?: object;
88
- }) => unknown;
69
+ declare function useChat<MESSAGE_METADATA>({ id, initialInput, onToolCall, onFinish, onError, generateId, experimental_throttle: throttleWaitMs, chatStore: chatStoreArg, }?: UseChatOptions<MESSAGE_METADATA> & {
89
70
  /**
90
71
  Custom throttle wait in ms for the chat messages and data updates.
91
72
  Default is undefined, which disables throttling.
@@ -137,7 +118,7 @@ type UseCompletionHelpers = {
137
118
  /** Whether the API request is in progress */
138
119
  isLoading: boolean;
139
120
  };
140
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onResponse, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
121
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
141
122
  /**
142
123
  * Custom throttle wait in ms for the completion and data updates.
143
124
  * Default is undefined, which disables throttling.
package/dist/index.d.ts CHANGED
@@ -1,20 +1,34 @@
1
- import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, JSONValue, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
1
+ import { UIMessage, CreateUIMessage, ChatRequestOptions, FileUIPart, UseChatOptions, CompletionRequestOptions, UseCompletionOptions, Schema, DeepPartial } from 'ai';
2
2
  export { CreateUIMessage, UIMessage, UseChatOptions, UseCompletionOptions } from 'ai';
3
3
  import { FetchFunction } from '@ai-sdk/provider-utils';
4
4
  import z from 'zod';
5
5
 
6
6
  type UseChatHelpers<MESSAGE_METADATA = unknown> = {
7
+ /**
8
+ * The id of the chat.
9
+ */
10
+ readonly id: string;
11
+ /**
12
+ * Hook status:
13
+ *
14
+ * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
15
+ * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
16
+ * - `ready`: The full response has been received and processed; a new user message can be submitted.
17
+ * - `error`: An error occurred during the API request, preventing successful completion.
18
+ */
19
+ readonly status: 'submitted' | 'streaming' | 'ready' | 'error';
7
20
  /** Current messages in the chat */
8
- messages: UIMessage<MESSAGE_METADATA>[];
21
+ readonly messages: UIMessage<MESSAGE_METADATA>[];
9
22
  /** The error object of the API request */
10
- error: undefined | Error;
23
+ readonly error: undefined | Error;
11
24
  /**
12
25
  * Append a user message to the chat list. This triggers the API call to fetch
13
26
  * the assistant's response.
27
+ *
14
28
  * @param message The message to append
15
29
  * @param options Additional options to pass to the API call
16
30
  */
17
- append: (message: UIMessage<MESSAGE_METADATA> | CreateUIMessage<MESSAGE_METADATA>, chatRequestOptions?: ChatRequestOptions) => Promise<string | null | undefined>;
31
+ append: (message: CreateUIMessage<MESSAGE_METADATA>, options?: ChatRequestOptions) => Promise<void>;
18
32
  /**
19
33
  * Reload the last AI chat response for the given chat history. If the last
20
34
  * message isn't from the assistant, it will request the API to generate a
@@ -47,45 +61,12 @@ type UseChatHelpers<MESSAGE_METADATA = unknown> = {
47
61
  }, chatRequestOptions?: ChatRequestOptions & {
48
62
  files?: FileList | FileUIPart[];
49
63
  }) => void;
50
- metadata?: Object;
51
- /**
52
- * Whether the API request is in progress
53
- *
54
- * @deprecated use `status` instead
55
- */
56
- isLoading: boolean;
57
- /**
58
- * Hook status:
59
- *
60
- * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
61
- * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
62
- * - `ready`: The full response has been received and processed; a new user message can be submitted.
63
- * - `error`: An error occurred during the API request, preventing successful completion.
64
- */
65
- status: 'submitted' | 'streaming' | 'ready' | 'error';
66
64
  addToolResult: ({ toolCallId, result, }: {
67
65
  toolCallId: string;
68
66
  result: any;
69
67
  }) => void;
70
- /** The id of the chat */
71
- id: string;
72
68
  };
73
- declare function useChat<MESSAGE_METADATA>({ api, id, initialMessages, initialInput, onToolCall, experimental_prepareRequestBody, maxSteps, streamProtocol, onResponse, onFinish, onError, credentials, headers, body, generateId, fetch, experimental_throttle: throttleWaitMs, messageMetadataSchema, }?: UseChatOptions<MESSAGE_METADATA> & {
74
- /**
75
- * Experimental (React only). When a function is provided, it will be used
76
- * to prepare the request body for the chat API. This can be useful for
77
- * customizing the request body based on the messages and data in the chat.
78
- *
79
- * @param messages The current messages in the chat.
80
- * @param requestData The data object passed in the chat request.
81
- * @param requestBody The request body object passed in the chat request.
82
- */
83
- experimental_prepareRequestBody?: (options: {
84
- id: string;
85
- messages: UIMessage[];
86
- requestData?: JSONValue;
87
- requestBody?: object;
88
- }) => unknown;
69
+ declare function useChat<MESSAGE_METADATA>({ id, initialInput, onToolCall, onFinish, onError, generateId, experimental_throttle: throttleWaitMs, chatStore: chatStoreArg, }?: UseChatOptions<MESSAGE_METADATA> & {
89
70
  /**
90
71
  Custom throttle wait in ms for the chat messages and data updates.
91
72
  Default is undefined, which disables throttling.
@@ -137,7 +118,7 @@ type UseCompletionHelpers = {
137
118
  /** Whether the API request is in progress */
138
119
  isLoading: boolean;
139
120
  };
140
- declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onResponse, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
121
+ declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, streamProtocol, fetch, onFinish, onError, experimental_throttle: throttleWaitMs, }?: UseCompletionOptions & {
141
122
  /**
142
123
  * Custom throttle wait in ms for the completion and data updates.
143
124
  * Default is undefined, which disables throttling.