@reverbia/sdk 1.0.0-next.20251124100226 → 1.0.0-next.20251125084024

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -81,6 +81,13 @@ type LlmapiRole = string;
81
81
  type SendMessageArgs = {
82
82
  messages: LlmapiMessage[];
83
83
  model: string;
84
+ /**
85
+ * Per-request callback for data chunks. Called in addition to the global
86
+ * `onData` callback if provided in `useChat` options.
87
+ *
88
+ * @param chunk - The content delta from the current chunk
89
+ */
90
+ onData?: (chunk: string) => void;
84
91
  };
85
92
  type SendMessageResult = {
86
93
  data: LlmapiChatCompletionResponse;
@@ -91,32 +98,72 @@ type SendMessageResult = {
91
98
  };
92
99
  type UseChatOptions = {
93
100
  getToken?: () => Promise<string | null>;
101
+ /**
102
+ * Callback function to be called when a new data chunk is received.
103
+ */
104
+ onData?: (chunk: string) => void;
105
+ /**
106
+ * Callback function to be called when the chat completion finishes successfully.
107
+ */
108
+ onFinish?: (response: LlmapiChatCompletionResponse) => void;
109
+ /**
110
+ * Callback function to be called when an unexpected error is encountered.
111
+ *
112
+ * **Note:** This callback is NOT called for aborted requests (via `stop()` or
113
+ * component unmount). Aborts are intentional actions and are not considered
114
+ * errors. To detect aborts, check the `error` field in the `sendMessage` result:
115
+ * `result.error === "Request aborted"`.
116
+ *
117
+ * @param error - The error that occurred (never an AbortError)
118
+ */
119
+ onError?: (error: Error) => void;
94
120
  };
95
121
  type UseChatResult = {
96
122
  isLoading: boolean;
97
123
  sendMessage: (args: SendMessageArgs) => Promise<SendMessageResult>;
124
+ /**
125
+ * Aborts the current streaming request if one is in progress.
126
+ *
127
+ * When a request is aborted, `sendMessage` will return with
128
+ * `{ data: null, error: "Request aborted" }`. The `onError` callback
129
+ * will NOT be called, as aborts are intentional actions, not errors.
130
+ */
131
+ stop: () => void;
98
132
  };
99
133
  /**
100
134
  * A React hook for managing chat completions with authentication.
101
135
  *
102
136
  * This hook provides a convenient way to send chat messages to the LLM API
103
137
  * with automatic token management and loading state handling.
138
+ * Streaming is enabled by default for better user experience.
104
139
  *
105
140
  * @param options - Optional configuration object
106
141
  * @param options.getToken - An async function that returns an authentication token.
107
142
  * This token will be used as a Bearer token in the Authorization header.
108
143
  * If not provided, `sendMessage` will return an error.
144
+ * @param options.onData - Callback function to be called when a new data chunk is received.
145
+ * @param options.onFinish - Callback function to be called when the chat completion finishes successfully.
146
+ * @param options.onError - Callback function to be called when an unexpected error
147
+ * is encountered. Note: This is NOT called for aborted requests (see `stop()`).
109
148
  *
110
149
  * @returns An object containing:
111
150
  * - `isLoading`: A boolean indicating whether a request is currently in progress
112
151
  * - `sendMessage`: An async function to send chat messages
152
+ * - `stop`: A function to abort the current request
113
153
  *
114
154
  * @example
115
155
  * ```tsx
116
- * const { isLoading, sendMessage } = useChat({
156
+ * const { isLoading, sendMessage, stop } = useChat({
117
157
  * getToken: async () => {
118
158
  * // Get your auth token from your auth provider
119
159
  * return await getAuthToken();
160
+ * },
161
+ * onFinish: (response) => {
162
+ * console.log("Chat finished:", response);
163
+ * },
164
+ * onError: (error) => {
165
+ * // This is only called for unexpected errors, not aborts
166
+ * console.error("Chat error:", error);
120
167
  * }
121
168
  * });
122
169
  *
@@ -127,11 +174,18 @@ type UseChatResult = {
127
174
  * });
128
175
  *
129
176
  * if (result.error) {
130
- * console.error(result.error);
177
+ * if (result.error === "Request aborted") {
178
+ * console.log("Request was aborted");
179
+ * } else {
180
+ * console.error("Error:", result.error);
181
+ * }
131
182
  * } else {
132
- * console.log(result.data);
183
+ * console.log("Success:", result.data);
133
184
  * }
134
185
  * };
186
+ *
187
+ * // To stop generation:
188
+ * // stop();
135
189
  * ```
136
190
  */
137
191
  declare function useChat(options?: UseChatOptions): UseChatResult;
@@ -81,6 +81,13 @@ type LlmapiRole = string;
81
81
  type SendMessageArgs = {
82
82
  messages: LlmapiMessage[];
83
83
  model: string;
84
+ /**
85
+ * Per-request callback for data chunks. Called in addition to the global
86
+ * `onData` callback if provided in `useChat` options.
87
+ *
88
+ * @param chunk - The content delta from the current chunk
89
+ */
90
+ onData?: (chunk: string) => void;
84
91
  };
85
92
  type SendMessageResult = {
86
93
  data: LlmapiChatCompletionResponse;
@@ -91,32 +98,72 @@ type SendMessageResult = {
91
98
  };
92
99
  type UseChatOptions = {
93
100
  getToken?: () => Promise<string | null>;
101
+ /**
102
+ * Callback function to be called when a new data chunk is received.
103
+ */
104
+ onData?: (chunk: string) => void;
105
+ /**
106
+ * Callback function to be called when the chat completion finishes successfully.
107
+ */
108
+ onFinish?: (response: LlmapiChatCompletionResponse) => void;
109
+ /**
110
+ * Callback function to be called when an unexpected error is encountered.
111
+ *
112
+ * **Note:** This callback is NOT called for aborted requests (via `stop()` or
113
+ * component unmount). Aborts are intentional actions and are not considered
114
+ * errors. To detect aborts, check the `error` field in the `sendMessage` result:
115
+ * `result.error === "Request aborted"`.
116
+ *
117
+ * @param error - The error that occurred (never an AbortError)
118
+ */
119
+ onError?: (error: Error) => void;
94
120
  };
95
121
  type UseChatResult = {
96
122
  isLoading: boolean;
97
123
  sendMessage: (args: SendMessageArgs) => Promise<SendMessageResult>;
124
+ /**
125
+ * Aborts the current streaming request if one is in progress.
126
+ *
127
+ * When a request is aborted, `sendMessage` will return with
128
+ * `{ data: null, error: "Request aborted" }`. The `onError` callback
129
+ * will NOT be called, as aborts are intentional actions, not errors.
130
+ */
131
+ stop: () => void;
98
132
  };
99
133
  /**
100
134
  * A React hook for managing chat completions with authentication.
101
135
  *
102
136
  * This hook provides a convenient way to send chat messages to the LLM API
103
137
  * with automatic token management and loading state handling.
138
+ * Streaming is enabled by default for better user experience.
104
139
  *
105
140
  * @param options - Optional configuration object
106
141
  * @param options.getToken - An async function that returns an authentication token.
107
142
  * This token will be used as a Bearer token in the Authorization header.
108
143
  * If not provided, `sendMessage` will return an error.
144
+ * @param options.onData - Callback function to be called when a new data chunk is received.
145
+ * @param options.onFinish - Callback function to be called when the chat completion finishes successfully.
146
+ * @param options.onError - Callback function to be called when an unexpected error
147
+ * is encountered. Note: This is NOT called for aborted requests (see `stop()`).
109
148
  *
110
149
  * @returns An object containing:
111
150
  * - `isLoading`: A boolean indicating whether a request is currently in progress
112
151
  * - `sendMessage`: An async function to send chat messages
152
+ * - `stop`: A function to abort the current request
113
153
  *
114
154
  * @example
115
155
  * ```tsx
116
- * const { isLoading, sendMessage } = useChat({
156
+ * const { isLoading, sendMessage, stop } = useChat({
117
157
  * getToken: async () => {
118
158
  * // Get your auth token from your auth provider
119
159
  * return await getAuthToken();
160
+ * },
161
+ * onFinish: (response) => {
162
+ * console.log("Chat finished:", response);
163
+ * },
164
+ * onError: (error) => {
165
+ * // This is only called for unexpected errors, not aborts
166
+ * console.error("Chat error:", error);
120
167
  * }
121
168
  * });
122
169
  *
@@ -127,11 +174,18 @@ type UseChatResult = {
127
174
  * });
128
175
  *
129
176
  * if (result.error) {
130
- * console.error(result.error);
177
+ * if (result.error === "Request aborted") {
178
+ * console.log("Request was aborted");
179
+ * } else {
180
+ * console.error("Error:", result.error);
181
+ * }
131
182
  * } else {
132
- * console.log(result.data);
183
+ * console.log("Success:", result.data);
133
184
  * }
134
185
  * };
186
+ *
187
+ * // To stop generation:
188
+ * // stop();
135
189
  * ```
136
190
  */
137
191
  declare function useChat(options?: UseChatOptions): UseChatResult;