ai 2.2.23 → 2.2.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -116,57 +116,134 @@ function readableFromAsyncIterable(iterable) {
116
116
  });
117
117
  }
118
118
 
119
- // shared/utils.ts
120
- import { customAlphabet } from "nanoid/non-secure";
121
- var nanoid = customAlphabet(
122
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
123
- 7
124
- );
125
- function createChunkDecoder(complex) {
126
- const decoder = new TextDecoder();
127
- if (!complex) {
128
- return function(chunk) {
129
- if (!chunk)
130
- return "";
131
- return decoder.decode(chunk, { stream: true });
119
+ // shared/stream-parts.ts
120
+ var textStreamPart = {
121
+ code: "0",
122
+ name: "text",
123
+ parse: (value) => {
124
+ if (typeof value !== "string") {
125
+ throw new Error('"text" parts expect a string value.');
126
+ }
127
+ return { type: "text", value };
128
+ }
129
+ };
130
+ var functionCallStreamPart = {
131
+ code: "1",
132
+ name: "function_call",
133
+ parse: (value) => {
134
+ if (value == null || typeof value !== "object" || !("function_call" in value) || typeof value.function_call !== "object" || value.function_call == null || !("name" in value.function_call) || !("arguments" in value.function_call) || typeof value.function_call.name !== "string" || typeof value.function_call.arguments !== "string") {
135
+ throw new Error(
136
+ '"function_call" parts expect an object with a "function_call" property.'
137
+ );
138
+ }
139
+ return {
140
+ type: "function_call",
141
+ value
132
142
  };
133
143
  }
134
- return function(chunk) {
135
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
136
- return decoded.map(getStreamStringTypeAndValue).filter(Boolean);
137
- };
138
- }
139
- var StreamStringPrefixes = {
140
- text: 0,
141
- function_call: 1,
142
- data: 2
143
- // user_err: 3?
144
144
  };
145
- var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
146
- var getStreamString = (type, value) => `${StreamStringPrefixes[type]}:${JSON.stringify(value)}
147
- `;
148
- var getStreamStringTypeAndValue = (line) => {
149
- const firstSeperatorIndex = line.indexOf(":");
150
- if (firstSeperatorIndex === -1) {
151
- throw new Error("Failed to parse stream string");
145
+ var dataStreamPart = {
146
+ code: "2",
147
+ name: "data",
148
+ parse: (value) => {
149
+ if (!Array.isArray(value)) {
150
+ throw new Error('"data" parts expect an array value.');
151
+ }
152
+ return { type: "data", value };
152
153
  }
153
- const prefix = line.slice(0, firstSeperatorIndex);
154
- const type = Object.keys(StreamStringPrefixes).find(
155
- (key) => StreamStringPrefixes[key] === Number(prefix)
156
- );
157
- const val = line.slice(firstSeperatorIndex + 1);
158
- let parsedVal = val;
159
- if (!val) {
160
- return { type, value: "" };
154
+ };
155
+ var errorStreamPart = {
156
+ code: "3",
157
+ name: "error",
158
+ parse: (value) => {
159
+ if (typeof value !== "string") {
160
+ throw new Error('"error" parts expect a string value.');
161
+ }
162
+ return { type: "error", value };
161
163
  }
162
- try {
163
- parsedVal = JSON.parse(val);
164
- } catch (e) {
165
- console.error("Failed to parse JSON value:", val);
164
+ };
165
+ var assistantMessage = {
166
+ code: "4",
167
+ name: "assistant_message",
168
+ parse: (value) => {
169
+ if (value == null || typeof value !== "object" || !("id" in value) || !("role" in value) || !("content" in value) || typeof value.id !== "string" || typeof value.role !== "string" || value.role !== "assistant" || !Array.isArray(value.content) || !value.content.every(
170
+ (item) => item != null && typeof item === "object" && "type" in item && item.type === "text" && "text" in item && item.text != null && typeof item.text === "object" && "value" in item.text && typeof item.text.value === "string"
171
+ )) {
172
+ throw new Error(
173
+ '"assistant_message" parts expect an object with an "id", "role", and "content" property.'
174
+ );
175
+ }
176
+ return {
177
+ type: "assistant_message",
178
+ value
179
+ };
166
180
  }
167
- return { type, value: parsedVal };
168
181
  };
169
- var COMPLEX_HEADER = "X-Experimental-Stream-Data";
182
+ var assistantControlData = {
183
+ code: "5",
184
+ name: "assistant_control_data",
185
+ parse: (value) => {
186
+ if (value == null || typeof value !== "object" || !("threadId" in value) || !("messageId" in value) || typeof value.threadId !== "string" || typeof value.messageId !== "string") {
187
+ throw new Error(
188
+ '"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
189
+ );
190
+ }
191
+ return {
192
+ type: "assistant_control_data",
193
+ value: {
194
+ threadId: value.threadId,
195
+ messageId: value.messageId
196
+ }
197
+ };
198
+ }
199
+ };
200
+ var streamParts = [
201
+ textStreamPart,
202
+ functionCallStreamPart,
203
+ dataStreamPart,
204
+ errorStreamPart,
205
+ assistantMessage,
206
+ assistantControlData
207
+ ];
208
+ var streamPartsByCode = {
209
+ [textStreamPart.code]: textStreamPart,
210
+ [functionCallStreamPart.code]: functionCallStreamPart,
211
+ [dataStreamPart.code]: dataStreamPart,
212
+ [errorStreamPart.code]: errorStreamPart,
213
+ [assistantMessage.code]: assistantMessage,
214
+ [assistantControlData.code]: assistantControlData
215
+ };
216
+ var StreamStringPrefixes = {
217
+ [textStreamPart.name]: textStreamPart.code,
218
+ [functionCallStreamPart.name]: functionCallStreamPart.code,
219
+ [dataStreamPart.name]: dataStreamPart.code,
220
+ [errorStreamPart.name]: errorStreamPart.code,
221
+ [assistantMessage.name]: assistantMessage.code,
222
+ [assistantControlData.name]: assistantControlData.code
223
+ };
224
+ var validCodes = streamParts.map((part) => part.code);
225
+ var parseStreamPart = (line) => {
226
+ const firstSeparatorIndex = line.indexOf(":");
227
+ if (firstSeparatorIndex === -1) {
228
+ throw new Error("Failed to parse stream string. No separator found.");
229
+ }
230
+ const prefix = line.slice(0, firstSeparatorIndex);
231
+ if (!validCodes.includes(prefix)) {
232
+ throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
233
+ }
234
+ const code = prefix;
235
+ const textValue = line.slice(firstSeparatorIndex + 1);
236
+ const jsonValue = JSON.parse(textValue);
237
+ return streamPartsByCode[code].parse(jsonValue);
238
+ };
239
+ function formatStreamPart(type, value) {
240
+ const streamPart = streamParts.find((part) => part.name === type);
241
+ if (!streamPart) {
242
+ throw new Error(`Invalid stream part type: ${type}`);
243
+ }
244
+ return `${streamPart.code}:${JSON.stringify(value)}
245
+ `;
246
+ }
170
247
 
171
248
  // streams/stream-data.ts
172
249
  var experimental_StreamData = class {
@@ -191,7 +268,7 @@ var experimental_StreamData = class {
191
268
  transform: async (chunk, controller) => {
192
269
  if (self.data.length > 0) {
193
270
  const encodedData = self.encoder.encode(
194
- getStreamString("data", JSON.stringify(self.data))
271
+ formatStreamPart("data", self.data)
195
272
  );
196
273
  self.data = [];
197
274
  controller.enqueue(encodedData);
@@ -210,7 +287,7 @@ var experimental_StreamData = class {
210
287
  }
211
288
  if (self.data.length) {
212
289
  const encodedData = self.encoder.encode(
213
- getStreamString("data", JSON.stringify(self.data))
290
+ formatStreamPart("data", self.data)
214
291
  );
215
292
  controller.enqueue(encodedData);
216
293
  }
@@ -248,11 +325,76 @@ function createStreamDataTransformer(experimental_streamData) {
248
325
  return new TransformStream({
249
326
  transform: async (chunk, controller) => {
250
327
  const message = decoder.decode(chunk);
251
- controller.enqueue(encoder.encode(getStreamString("text", message)));
328
+ controller.enqueue(encoder.encode(formatStreamPart("text", message)));
252
329
  }
253
330
  });
254
331
  }
255
332
 
333
+ // streams/aws-bedrock-stream.ts
334
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
335
+ var _a, _b;
336
+ const decoder = new TextDecoder();
337
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
338
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
339
+ if (bytes != null) {
340
+ const chunkText = decoder.decode(bytes);
341
+ const chunkJSON = JSON.parse(chunkText);
342
+ const delta = extractTextDeltaFromChunk(chunkJSON);
343
+ if (delta != null) {
344
+ yield delta;
345
+ }
346
+ }
347
+ }
348
+ }
349
+ function AWSBedrockAnthropicStream(response, callbacks) {
350
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
351
+ }
352
+ function AWSBedrockCohereStream(response, callbacks) {
353
+ return AWSBedrockStream(
354
+ response,
355
+ callbacks,
356
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
357
+ // so we take the full generation:
358
+ (chunk) => {
359
+ var _a, _b;
360
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
361
+ }
362
+ );
363
+ }
364
+ function AWSBedrockLlama2Stream(response, callbacks) {
365
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
366
+ }
367
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
368
+ return readableFromAsyncIterable(
369
+ asDeltaIterable(response, extractTextDeltaFromChunk)
370
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
371
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
372
+ );
373
+ }
374
+
375
+ // shared/utils.ts
376
+ import { customAlphabet } from "nanoid/non-secure";
377
+ var nanoid = customAlphabet(
378
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
379
+ 7
380
+ );
381
+ function createChunkDecoder(complex) {
382
+ const decoder = new TextDecoder();
383
+ if (!complex) {
384
+ return function(chunk) {
385
+ if (!chunk)
386
+ return "";
387
+ return decoder.decode(chunk, { stream: true });
388
+ };
389
+ }
390
+ return function(chunk) {
391
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
392
+ return decoded.map(parseStreamPart).filter(Boolean);
393
+ };
394
+ }
395
+ var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
396
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
397
+
256
398
  // streams/openai-stream.ts
257
399
  function parseOpenAIStream() {
258
400
  const extract = chunkToText();
@@ -356,7 +498,7 @@ function createFunctionCallTransformer(callbacks) {
356
498
  }
357
499
  if (!isFunctionStreamingIn) {
358
500
  controller.enqueue(
359
- isComplexMode ? textEncoder.encode(getStreamString("text", message)) : chunk
501
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", message)) : chunk
360
502
  );
361
503
  return;
362
504
  } else {
@@ -398,13 +540,17 @@ function createFunctionCallTransformer(callbacks) {
398
540
  if (!functionResponse) {
399
541
  controller.enqueue(
400
542
  textEncoder.encode(
401
- isComplexMode ? getStreamString("function_call", aggregatedResponse) : aggregatedResponse
543
+ isComplexMode ? formatStreamPart(
544
+ "function_call",
545
+ // parse to prevent double-encoding:
546
+ JSON.parse(aggregatedResponse)
547
+ ) : aggregatedResponse
402
548
  )
403
549
  );
404
550
  return;
405
551
  } else if (typeof functionResponse === "string") {
406
552
  controller.enqueue(
407
- isComplexMode ? textEncoder.encode(getStreamString("text", functionResponse)) : textEncoder.encode(functionResponse)
553
+ isComplexMode ? textEncoder.encode(formatStreamPart("text", functionResponse)) : textEncoder.encode(functionResponse)
408
554
  );
409
555
  return;
410
556
  }
@@ -669,18 +815,22 @@ async function ReplicateStream(res, cb, options) {
669
815
  );
670
816
  }
671
817
 
672
- // react/parse-complex-response.ts
818
+ // shared/parse-complex-response.ts
673
819
  async function parseComplexResponse({
674
820
  reader,
675
821
  abortControllerRef,
676
822
  update,
677
- onFinish
823
+ onFinish,
824
+ generateId = nanoid,
825
+ getCurrentDate = () => /* @__PURE__ */ new Date()
678
826
  }) {
827
+ const createdAt = getCurrentDate();
679
828
  const decode = createChunkDecoder(true);
680
- const createdAt = /* @__PURE__ */ new Date();
681
- const prefixMap = {};
829
+ const prefixMap = {
830
+ data: []
831
+ };
682
832
  const NEWLINE = "\n".charCodeAt(0);
683
- let chunks = [];
833
+ const chunks = [];
684
834
  let totalLength = 0;
685
835
  while (true) {
686
836
  const { value } = await reader.read();
@@ -717,7 +867,7 @@ async function parseComplexResponse({
717
867
  };
718
868
  } else {
719
869
  prefixMap["text"] = {
720
- id: nanoid(),
870
+ id: generateId(),
721
871
  role: "assistant",
722
872
  content: value2,
723
873
  createdAt
@@ -726,37 +876,24 @@ async function parseComplexResponse({
726
876
  }
727
877
  let functionCallMessage = null;
728
878
  if (type === "function_call") {
729
- prefixMap["function_call"] = value2;
730
- let functionCall = prefixMap["function_call"];
731
- if (functionCall && typeof functionCall === "string") {
732
- const parsedFunctionCall = JSON.parse(
733
- functionCall
734
- ).function_call;
735
- functionCallMessage = {
736
- id: nanoid(),
737
- role: "assistant",
738
- content: "",
739
- function_call: parsedFunctionCall,
740
- name: parsedFunctionCall.name,
741
- createdAt
742
- };
743
- prefixMap["function_call"] = functionCallMessage;
744
- }
879
+ prefixMap["function_call"] = {
880
+ id: generateId(),
881
+ role: "assistant",
882
+ content: "",
883
+ function_call: value2.function_call,
884
+ name: value2.function_call.name,
885
+ createdAt
886
+ };
887
+ functionCallMessage = prefixMap["function_call"];
745
888
  }
746
889
  if (type === "data") {
747
- const parsedValue = JSON.parse(value2);
748
- if (prefixMap["data"]) {
749
- prefixMap["data"] = [...prefixMap["data"], ...parsedValue];
750
- } else {
751
- prefixMap["data"] = parsedValue;
752
- }
890
+ prefixMap["data"].push(...value2);
753
891
  }
754
- const data = prefixMap["data"];
755
892
  const responseMessage = prefixMap["text"];
756
893
  const merged = [functionCallMessage, responseMessage].filter(
757
894
  Boolean
758
895
  );
759
- update(merged, data);
896
+ update(merged, [...prefixMap["data"]]);
760
897
  if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
761
898
  reader.cancel();
762
899
  break;
@@ -764,7 +901,12 @@ async function parseComplexResponse({
764
901
  }
765
902
  }
766
903
  onFinish == null ? void 0 : onFinish(prefixMap);
767
- return prefixMap;
904
+ return {
905
+ messages: [prefixMap.text, prefixMap.function_call].filter(
906
+ Boolean
907
+ ),
908
+ data: prefixMap.data
909
+ };
768
910
  }
769
911
 
770
912
  // streams/streaming-react-response.ts
@@ -839,8 +981,61 @@ var experimental_StreamingReactResponse = class {
839
981
  return next;
840
982
  }
841
983
  };
984
+
985
+ // streams/assistant-response.ts
986
+ function experimental_AssistantResponse({ threadId, messageId }, process2) {
987
+ const stream = new ReadableStream({
988
+ async start(controller) {
989
+ var _a;
990
+ const textEncoder = new TextEncoder();
991
+ const sendMessage = (message) => {
992
+ controller.enqueue(
993
+ textEncoder.encode(formatStreamPart("assistant_message", message))
994
+ );
995
+ };
996
+ const sendError = (errorMessage) => {
997
+ controller.enqueue(
998
+ textEncoder.encode(formatStreamPart("error", errorMessage))
999
+ );
1000
+ };
1001
+ controller.enqueue(
1002
+ textEncoder.encode(
1003
+ formatStreamPart("assistant_control_data", {
1004
+ threadId,
1005
+ messageId
1006
+ })
1007
+ )
1008
+ );
1009
+ try {
1010
+ await process2({
1011
+ threadId,
1012
+ messageId,
1013
+ sendMessage
1014
+ });
1015
+ } catch (error) {
1016
+ sendError((_a = error.message) != null ? _a : `${error}`);
1017
+ } finally {
1018
+ controller.close();
1019
+ }
1020
+ },
1021
+ pull(controller) {
1022
+ },
1023
+ cancel() {
1024
+ }
1025
+ });
1026
+ return new Response(stream, {
1027
+ status: 200,
1028
+ headers: {
1029
+ "Content-Type": "text/plain; charset=utf-8"
1030
+ }
1031
+ });
1032
+ }
842
1033
  export {
843
1034
  AIStream,
1035
+ AWSBedrockAnthropicStream,
1036
+ AWSBedrockCohereStream,
1037
+ AWSBedrockLlama2Stream,
1038
+ AWSBedrockStream,
844
1039
  AnthropicStream,
845
1040
  COMPLEX_HEADER,
846
1041
  CohereStream,
@@ -848,16 +1043,14 @@ export {
848
1043
  LangChainStream,
849
1044
  OpenAIStream,
850
1045
  ReplicateStream,
851
- StreamStringPrefixes,
852
1046
  StreamingTextResponse,
853
1047
  createCallbacksTransformer,
854
1048
  createChunkDecoder,
855
1049
  createEventStreamTransformer,
856
1050
  createStreamDataTransformer,
1051
+ experimental_AssistantResponse,
857
1052
  experimental_StreamData,
858
1053
  experimental_StreamingReactResponse,
859
- getStreamString,
860
- getStreamStringTypeAndValue,
861
1054
  isStreamStringEqualToType,
862
1055
  nanoid,
863
1056
  readableFromAsyncIterable,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.23",
3
+ "version": "2.2.25",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -64,6 +64,7 @@
64
64
  "swrv": "1.0.4"
65
65
  },
66
66
  "devDependencies": {
67
+ "@aws-sdk/client-bedrock-runtime": "3.451.0",
67
68
  "@edge-runtime/jest-environment": "1.1.0-beta.31",
68
69
  "@huggingface/inference": "2.6.4",
69
70
  "@testing-library/jest-dom": "^6.1.4",
@@ -82,8 +83,8 @@
82
83
  "ts-jest": "29.0.3",
83
84
  "tsup": "^6.7.0",
84
85
  "typescript": "5.1.3",
85
- "@vercel/ai-tsconfig": "0.0.0",
86
- "eslint-config-vercel-ai": "0.0.0"
86
+ "eslint-config-vercel-ai": "0.0.0",
87
+ "@vercel/ai-tsconfig": "0.0.0"
87
88
  },
88
89
  "peerDependencies": {
89
90
  "react": "^18.2.0",
@@ -33,6 +33,13 @@ interface Message {
33
33
  function_call?: string | FunctionCall;
34
34
  }
35
35
 
36
+ /**
37
+ * A prompt constructor for Anthropic models.
38
+ * Does not support `function` messages.
39
+ * @see https://docs.anthropic.com/claude/reference/getting-started-with-the-api
40
+ */
41
+ declare function experimental_buildAnthropicPrompt(messages: Pick<Message, 'content' | 'role'>[]): string;
42
+
36
43
  /**
37
44
  * A prompt constructor for the HuggingFace StarChat Beta model.
38
45
  * Does not support `function` messages.
@@ -52,4 +59,4 @@ declare function experimental_buildOpenAssistantPrompt(messages: Pick<Message, '
52
59
  */
53
60
  declare function experimental_buildLlama2Prompt(messages: Pick<Message, 'content' | 'role'>[]): string;
54
61
 
55
- export { experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
62
+ export { experimental_buildAnthropicPrompt, experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
@@ -20,12 +20,28 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // prompts/index.ts
21
21
  var prompts_exports = {};
22
22
  __export(prompts_exports, {
23
+ experimental_buildAnthropicPrompt: () => experimental_buildAnthropicPrompt,
23
24
  experimental_buildLlama2Prompt: () => experimental_buildLlama2Prompt,
24
25
  experimental_buildOpenAssistantPrompt: () => experimental_buildOpenAssistantPrompt,
25
26
  experimental_buildStarChatBetaPrompt: () => experimental_buildStarChatBetaPrompt
26
27
  });
27
28
  module.exports = __toCommonJS(prompts_exports);
28
29
 
30
+ // prompts/anthropic.ts
31
+ function experimental_buildAnthropicPrompt(messages) {
32
+ return messages.map(({ content, role }) => {
33
+ if (role === "user") {
34
+ return `
35
+
36
+ Human: ${content}`;
37
+ } else {
38
+ return `
39
+
40
+ Assistant: ${content}`;
41
+ }
42
+ }) + "\n\nAssistant:";
43
+ }
44
+
29
45
  // prompts/huggingface.ts
30
46
  function experimental_buildStarChatBetaPrompt(messages) {
31
47
  return messages.map(({ content, role }) => {
@@ -83,6 +99,7 @@ ${content}
83
99
  }
84
100
  // Annotate the CommonJS export names for ESM import in node:
85
101
  0 && (module.exports = {
102
+ experimental_buildAnthropicPrompt,
86
103
  experimental_buildLlama2Prompt,
87
104
  experimental_buildOpenAssistantPrompt,
88
105
  experimental_buildStarChatBetaPrompt
@@ -1,3 +1,18 @@
1
+ // prompts/anthropic.ts
2
+ function experimental_buildAnthropicPrompt(messages) {
3
+ return messages.map(({ content, role }) => {
4
+ if (role === "user") {
5
+ return `
6
+
7
+ Human: ${content}`;
8
+ } else {
9
+ return `
10
+
11
+ Assistant: ${content}`;
12
+ }
13
+ }) + "\n\nAssistant:";
14
+ }
15
+
1
16
  // prompts/huggingface.ts
2
17
  function experimental_buildStarChatBetaPrompt(messages) {
3
18
  return messages.map(({ content, role }) => {
@@ -54,6 +69,7 @@ ${content}
54
69
  return startPrompt + conversation.join("") + endPrompt;
55
70
  }
56
71
  export {
72
+ experimental_buildAnthropicPrompt,
57
73
  experimental_buildLlama2Prompt,
58
74
  experimental_buildOpenAssistantPrompt,
59
75
  experimental_buildStarChatBetaPrompt
@@ -283,7 +283,7 @@ type UseChatHelpers = {
283
283
  /** Whether the API request is in progress */
284
284
  isLoading: boolean;
285
285
  /** Additional data added on the server via StreamData */
286
- data?: any;
286
+ data?: JSONValue[] | undefined;
287
287
  };
288
288
  type StreamingReactResponseAction = (payload: {
289
289
  messages: Message[];
@@ -337,7 +337,21 @@ type UseCompletionHelpers = {
337
337
  };
338
338
  declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
339
339
 
340
- export { CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, useChat, useCompletion };
340
+ type AssistantStatus = 'in_progress' | 'awaiting_message';
341
+ declare function experimental_useAssistant({ api, threadId: threadIdParam, }: {
342
+ api: string;
343
+ threadId?: string | undefined;
344
+ }): {
345
+ messages: Message[];
346
+ threadId: string | undefined;
347
+ input: string;
348
+ handleInputChange: (e: any) => void;
349
+ submitMessage: (e: any) => Promise<void>;
350
+ status: AssistantStatus;
351
+ error: unknown;
352
+ };
353
+
354
+ export { AssistantStatus, CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
341
355
  import * as react_jsx_runtime from 'react/jsx-runtime';
342
356
 
343
357
  type Props = {