ai 2.2.24 → 2.2.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -461,6 +461,18 @@ declare function AIStream(response: Response, customParser?: AIStreamParser, cal
461
461
  */
462
462
  declare function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>): ReadableStream<T>;
463
463
 
464
+ interface AWSBedrockResponse {
465
+ body?: AsyncIterable<{
466
+ chunk?: {
467
+ bytes?: Uint8Array;
468
+ };
469
+ }>;
470
+ }
471
+ declare function AWSBedrockAnthropicStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
472
+ declare function AWSBedrockCohereStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
473
+ declare function AWSBedrockLlama2Stream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
474
+ declare function AWSBedrockStream(response: AWSBedrockResponse, callbacks: AIStreamCallbacksAndOptions | undefined, extractTextDeltaFromChunk: (chunk: any) => string): ReadableStream<any>;
475
+
464
476
  /**
465
477
  * A stream wrapper to send custom JSON-encoded data back to the client.
466
478
  */
@@ -692,4 +704,4 @@ declare function experimental_AssistantResponse({ threadId, messageId }: {
692
704
  sendMessage: (message: AssistantMessage) => void;
693
705
  }) => Promise<void>): Response;
694
706
 
695
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
707
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -21,6 +21,10 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
21
21
  var streams_exports = {};
22
22
  __export(streams_exports, {
23
23
  AIStream: () => AIStream,
24
+ AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
25
+ AWSBedrockCohereStream: () => AWSBedrockCohereStream,
26
+ AWSBedrockLlama2Stream: () => AWSBedrockLlama2Stream,
27
+ AWSBedrockStream: () => AWSBedrockStream,
24
28
  AnthropicStream: () => AnthropicStream,
25
29
  COMPLEX_HEADER: () => COMPLEX_HEADER,
26
30
  CohereStream: () => CohereStream,
@@ -289,29 +293,6 @@ function formatStreamPart(type, value) {
289
293
  `;
290
294
  }
291
295
 
292
- // shared/utils.ts
293
- var import_non_secure = require("nanoid/non-secure");
294
- var nanoid = (0, import_non_secure.customAlphabet)(
295
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
296
- 7
297
- );
298
- function createChunkDecoder(complex) {
299
- const decoder = new TextDecoder();
300
- if (!complex) {
301
- return function(chunk) {
302
- if (!chunk)
303
- return "";
304
- return decoder.decode(chunk, { stream: true });
305
- };
306
- }
307
- return function(chunk) {
308
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
309
- return decoded.map(parseStreamPart).filter(Boolean);
310
- };
311
- }
312
- var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
313
- var COMPLEX_HEADER = "X-Experimental-Stream-Data";
314
-
315
296
  // streams/stream-data.ts
316
297
  var experimental_StreamData = class {
317
298
  constructor() {
@@ -397,6 +378,71 @@ function createStreamDataTransformer(experimental_streamData) {
397
378
  });
398
379
  }
399
380
 
381
+ // streams/aws-bedrock-stream.ts
382
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
383
+ var _a, _b;
384
+ const decoder = new TextDecoder();
385
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
386
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
387
+ if (bytes != null) {
388
+ const chunkText = decoder.decode(bytes);
389
+ const chunkJSON = JSON.parse(chunkText);
390
+ const delta = extractTextDeltaFromChunk(chunkJSON);
391
+ if (delta != null) {
392
+ yield delta;
393
+ }
394
+ }
395
+ }
396
+ }
397
+ function AWSBedrockAnthropicStream(response, callbacks) {
398
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
399
+ }
400
+ function AWSBedrockCohereStream(response, callbacks) {
401
+ return AWSBedrockStream(
402
+ response,
403
+ callbacks,
404
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
405
+ // so we take the full generation:
406
+ (chunk) => {
407
+ var _a, _b;
408
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
409
+ }
410
+ );
411
+ }
412
+ function AWSBedrockLlama2Stream(response, callbacks) {
413
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
414
+ }
415
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
416
+ return readableFromAsyncIterable(
417
+ asDeltaIterable(response, extractTextDeltaFromChunk)
418
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
419
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
420
+ );
421
+ }
422
+
423
+ // shared/utils.ts
424
+ var import_non_secure = require("nanoid/non-secure");
425
+ var nanoid = (0, import_non_secure.customAlphabet)(
426
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
427
+ 7
428
+ );
429
+ function createChunkDecoder(complex) {
430
+ const decoder = new TextDecoder();
431
+ if (!complex) {
432
+ return function(chunk) {
433
+ if (!chunk)
434
+ return "";
435
+ return decoder.decode(chunk, { stream: true });
436
+ };
437
+ }
438
+ return function(chunk) {
439
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
440
+ return decoded.map(parseStreamPart).filter(Boolean);
441
+ };
442
+ }
443
+ var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
444
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
445
+
400
446
  // streams/openai-stream.ts
401
447
  function parseOpenAIStream() {
402
448
  const extract = chunkToText();
@@ -1035,6 +1081,10 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
1035
1081
  // Annotate the CommonJS export names for ESM import in node:
1036
1082
  0 && (module.exports = {
1037
1083
  AIStream,
1084
+ AWSBedrockAnthropicStream,
1085
+ AWSBedrockCohereStream,
1086
+ AWSBedrockLlama2Stream,
1087
+ AWSBedrockStream,
1038
1088
  AnthropicStream,
1039
1089
  COMPLEX_HEADER,
1040
1090
  CohereStream,
package/dist/index.mjs CHANGED
@@ -245,29 +245,6 @@ function formatStreamPart(type, value) {
245
245
  `;
246
246
  }
247
247
 
248
- // shared/utils.ts
249
- import { customAlphabet } from "nanoid/non-secure";
250
- var nanoid = customAlphabet(
251
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
252
- 7
253
- );
254
- function createChunkDecoder(complex) {
255
- const decoder = new TextDecoder();
256
- if (!complex) {
257
- return function(chunk) {
258
- if (!chunk)
259
- return "";
260
- return decoder.decode(chunk, { stream: true });
261
- };
262
- }
263
- return function(chunk) {
264
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
265
- return decoded.map(parseStreamPart).filter(Boolean);
266
- };
267
- }
268
- var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
269
- var COMPLEX_HEADER = "X-Experimental-Stream-Data";
270
-
271
248
  // streams/stream-data.ts
272
249
  var experimental_StreamData = class {
273
250
  constructor() {
@@ -353,6 +330,71 @@ function createStreamDataTransformer(experimental_streamData) {
353
330
  });
354
331
  }
355
332
 
333
+ // streams/aws-bedrock-stream.ts
334
+ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
335
+ var _a, _b;
336
+ const decoder = new TextDecoder();
337
+ for await (const chunk of (_a = response.body) != null ? _a : []) {
338
+ const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
339
+ if (bytes != null) {
340
+ const chunkText = decoder.decode(bytes);
341
+ const chunkJSON = JSON.parse(chunkText);
342
+ const delta = extractTextDeltaFromChunk(chunkJSON);
343
+ if (delta != null) {
344
+ yield delta;
345
+ }
346
+ }
347
+ }
348
+ }
349
+ function AWSBedrockAnthropicStream(response, callbacks) {
350
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
351
+ }
352
+ function AWSBedrockCohereStream(response, callbacks) {
353
+ return AWSBedrockStream(
354
+ response,
355
+ callbacks,
356
+ // As of 2023-11-17, Bedrock does not support streaming for Cohere,
357
+ // so we take the full generation:
358
+ (chunk) => {
359
+ var _a, _b;
360
+ return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
361
+ }
362
+ );
363
+ }
364
+ function AWSBedrockLlama2Stream(response, callbacks) {
365
+ return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
366
+ }
367
+ function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
368
+ return readableFromAsyncIterable(
369
+ asDeltaIterable(response, extractTextDeltaFromChunk)
370
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
371
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
372
+ );
373
+ }
374
+
375
+ // shared/utils.ts
376
+ import { customAlphabet } from "nanoid/non-secure";
377
+ var nanoid = customAlphabet(
378
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
379
+ 7
380
+ );
381
+ function createChunkDecoder(complex) {
382
+ const decoder = new TextDecoder();
383
+ if (!complex) {
384
+ return function(chunk) {
385
+ if (!chunk)
386
+ return "";
387
+ return decoder.decode(chunk, { stream: true });
388
+ };
389
+ }
390
+ return function(chunk) {
391
+ const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
392
+ return decoded.map(parseStreamPart).filter(Boolean);
393
+ };
394
+ }
395
+ var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
396
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
397
+
356
398
  // streams/openai-stream.ts
357
399
  function parseOpenAIStream() {
358
400
  const extract = chunkToText();
@@ -990,6 +1032,10 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
990
1032
  }
991
1033
  export {
992
1034
  AIStream,
1035
+ AWSBedrockAnthropicStream,
1036
+ AWSBedrockCohereStream,
1037
+ AWSBedrockLlama2Stream,
1038
+ AWSBedrockStream,
993
1039
  AnthropicStream,
994
1040
  COMPLEX_HEADER,
995
1041
  CohereStream,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.24",
3
+ "version": "2.2.25",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -64,6 +64,7 @@
64
64
  "swrv": "1.0.4"
65
65
  },
66
66
  "devDependencies": {
67
+ "@aws-sdk/client-bedrock-runtime": "3.451.0",
67
68
  "@edge-runtime/jest-environment": "1.1.0-beta.31",
68
69
  "@huggingface/inference": "2.6.4",
69
70
  "@testing-library/jest-dom": "^6.1.4",
@@ -82,8 +83,8 @@
82
83
  "ts-jest": "29.0.3",
83
84
  "tsup": "^6.7.0",
84
85
  "typescript": "5.1.3",
85
- "@vercel/ai-tsconfig": "0.0.0",
86
- "eslint-config-vercel-ai": "0.0.0"
86
+ "eslint-config-vercel-ai": "0.0.0",
87
+ "@vercel/ai-tsconfig": "0.0.0"
87
88
  },
88
89
  "peerDependencies": {
89
90
  "react": "^18.2.0",
@@ -33,6 +33,13 @@ interface Message {
33
33
  function_call?: string | FunctionCall;
34
34
  }
35
35
 
36
+ /**
37
+ * A prompt constructor for Anthropic models.
38
+ * Does not support `function` messages.
39
+ * @see https://docs.anthropic.com/claude/reference/getting-started-with-the-api
40
+ */
41
+ declare function experimental_buildAnthropicPrompt(messages: Pick<Message, 'content' | 'role'>[]): string;
42
+
36
43
  /**
37
44
  * A prompt constructor for the HuggingFace StarChat Beta model.
38
45
  * Does not support `function` messages.
@@ -52,4 +59,4 @@ declare function experimental_buildOpenAssistantPrompt(messages: Pick<Message, '
52
59
  */
53
60
  declare function experimental_buildLlama2Prompt(messages: Pick<Message, 'content' | 'role'>[]): string;
54
61
 
55
- export { experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
62
+ export { experimental_buildAnthropicPrompt, experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
@@ -20,12 +20,28 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // prompts/index.ts
21
21
  var prompts_exports = {};
22
22
  __export(prompts_exports, {
23
+ experimental_buildAnthropicPrompt: () => experimental_buildAnthropicPrompt,
23
24
  experimental_buildLlama2Prompt: () => experimental_buildLlama2Prompt,
24
25
  experimental_buildOpenAssistantPrompt: () => experimental_buildOpenAssistantPrompt,
25
26
  experimental_buildStarChatBetaPrompt: () => experimental_buildStarChatBetaPrompt
26
27
  });
27
28
  module.exports = __toCommonJS(prompts_exports);
28
29
 
30
+ // prompts/anthropic.ts
31
+ function experimental_buildAnthropicPrompt(messages) {
32
+ return messages.map(({ content, role }) => {
33
+ if (role === "user") {
34
+ return `
35
+
36
+ Human: ${content}`;
37
+ } else {
38
+ return `
39
+
40
+ Assistant: ${content}`;
41
+ }
42
+ }) + "\n\nAssistant:";
43
+ }
44
+
29
45
  // prompts/huggingface.ts
30
46
  function experimental_buildStarChatBetaPrompt(messages) {
31
47
  return messages.map(({ content, role }) => {
@@ -83,6 +99,7 @@ ${content}
83
99
  }
84
100
  // Annotate the CommonJS export names for ESM import in node:
85
101
  0 && (module.exports = {
102
+ experimental_buildAnthropicPrompt,
86
103
  experimental_buildLlama2Prompt,
87
104
  experimental_buildOpenAssistantPrompt,
88
105
  experimental_buildStarChatBetaPrompt
@@ -1,3 +1,18 @@
1
+ // prompts/anthropic.ts
2
+ function experimental_buildAnthropicPrompt(messages) {
3
+ return messages.map(({ content, role }) => {
4
+ if (role === "user") {
5
+ return `
6
+
7
+ Human: ${content}`;
8
+ } else {
9
+ return `
10
+
11
+ Assistant: ${content}`;
12
+ }
13
+ }) + "\n\nAssistant:";
14
+ }
15
+
1
16
  // prompts/huggingface.ts
2
17
  function experimental_buildStarChatBetaPrompt(messages) {
3
18
  return messages.map(({ content, role }) => {
@@ -54,6 +69,7 @@ ${content}
54
69
  return startPrompt + conversation.join("") + endPrompt;
55
70
  }
56
71
  export {
72
+ experimental_buildAnthropicPrompt,
57
73
  experimental_buildLlama2Prompt,
58
74
  experimental_buildOpenAssistantPrompt,
59
75
  experimental_buildStarChatBetaPrompt
@@ -343,6 +343,7 @@ declare function experimental_useAssistant({ api, threadId: threadIdParam, }: {
343
343
  threadId?: string | undefined;
344
344
  }): {
345
345
  messages: Message[];
346
+ threadId: string | undefined;
346
347
  input: string;
347
348
  handleInputChange: (e: any) => void;
348
349
  submitMessage: (e: any) => Promise<void>;
@@ -973,6 +973,7 @@ function experimental_useAssistant({
973
973
  };
974
974
  return {
975
975
  messages,
976
+ threadId,
976
977
  input,
977
978
  handleInputChange,
978
979
  submitMessage,
@@ -936,6 +936,7 @@ function experimental_useAssistant({
936
936
  };
937
937
  return {
938
938
  messages,
939
+ threadId,
939
940
  input,
940
941
  handleInputChange,
941
942
  submitMessage,
@@ -201,6 +201,9 @@ type UseCompletionOptions = {
201
201
  */
202
202
  body?: object;
203
203
  };
204
+ type JSONValue = null | string | number | boolean | {
205
+ [x: string]: JSONValue;
206
+ } | Array<JSONValue>;
204
207
 
205
208
  type UseChatHelpers = {
206
209
  /** Current messages in the chat */
@@ -237,6 +240,8 @@ type UseChatHelpers = {
237
240
  metadata?: Object;
238
241
  /** Whether the API request is in progress */
239
242
  isLoading: Readable<boolean | undefined>;
243
+ /** Additional data added on the server via StreamData */
244
+ data: Readable<JSONValue[] | undefined>;
240
245
  };
241
246
  declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: UseChatOptions): UseChatHelpers;
242
247
 
@@ -25,9 +25,6 @@ __export(svelte_exports, {
25
25
  });
26
26
  module.exports = __toCommonJS(svelte_exports);
27
27
 
28
- // svelte/use-chat.ts
29
- var import_store = require("svelte/store");
30
-
31
28
  // ../../node_modules/.pnpm/swrev@4.0.0/node_modules/swrev/dist/swrev.mjs
32
29
  var P = Object.defineProperty;
33
30
  var F = (r, e, t) => e in r ? P(r, e, { enumerable: true, configurable: true, writable: true, value: t }) : r[e] = t;
@@ -525,6 +522,12 @@ var W = (t) => new O2(t);
525
522
  var c = W();
526
523
  var F2 = (t, e) => c.useSWR(t, e);
527
524
 
525
+ // svelte/use-chat.ts
526
+ var import_store = require("svelte/store");
527
+
528
+ // shared/call-api.ts
529
+ var import_nanoid = require("nanoid");
530
+
528
531
  // shared/utils.ts
529
532
  var import_non_secure = require("nanoid/non-secure");
530
533
 
@@ -668,103 +671,291 @@ function createChunkDecoder(complex) {
668
671
  return decoded.map(parseStreamPart).filter(Boolean);
669
672
  };
670
673
  }
674
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
671
675
 
672
- // svelte/use-chat.ts
673
- var getStreamedResponse = async (api, chatRequest, mutate, extraMetadata, previousMessages, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
674
- var _a, _b, _c, _d;
675
- mutate(chatRequest.messages);
676
- const res = await fetch(api, {
676
+ // shared/parse-complex-response.ts
677
+ async function parseComplexResponse({
678
+ reader,
679
+ abortControllerRef,
680
+ update,
681
+ onFinish,
682
+ generateId = nanoid,
683
+ getCurrentDate = () => /* @__PURE__ */ new Date()
684
+ }) {
685
+ const createdAt = getCurrentDate();
686
+ const decode = createChunkDecoder(true);
687
+ const prefixMap = {
688
+ data: []
689
+ };
690
+ const NEWLINE = "\n".charCodeAt(0);
691
+ const chunks = [];
692
+ let totalLength = 0;
693
+ while (true) {
694
+ const { value } = await reader.read();
695
+ if (value) {
696
+ chunks.push(value);
697
+ totalLength += value.length;
698
+ if (value[value.length - 1] !== NEWLINE) {
699
+ continue;
700
+ }
701
+ }
702
+ if (chunks.length === 0) {
703
+ break;
704
+ }
705
+ let concatenatedChunks = new Uint8Array(totalLength);
706
+ let offset = 0;
707
+ for (const chunk of chunks) {
708
+ concatenatedChunks.set(chunk, offset);
709
+ offset += chunk.length;
710
+ }
711
+ chunks.length = 0;
712
+ totalLength = 0;
713
+ const lines = decode(concatenatedChunks);
714
+ if (typeof lines === "string") {
715
+ throw new Error(
716
+ "Invalid response format. Complex mode was set but the response is a string. This should never happen."
717
+ );
718
+ }
719
+ for (const { type, value: value2 } of lines) {
720
+ if (type === "text") {
721
+ if (prefixMap["text"]) {
722
+ prefixMap["text"] = {
723
+ ...prefixMap["text"],
724
+ content: (prefixMap["text"].content || "") + value2
725
+ };
726
+ } else {
727
+ prefixMap["text"] = {
728
+ id: generateId(),
729
+ role: "assistant",
730
+ content: value2,
731
+ createdAt
732
+ };
733
+ }
734
+ }
735
+ let functionCallMessage = null;
736
+ if (type === "function_call") {
737
+ prefixMap["function_call"] = {
738
+ id: generateId(),
739
+ role: "assistant",
740
+ content: "",
741
+ function_call: value2.function_call,
742
+ name: value2.function_call.name,
743
+ createdAt
744
+ };
745
+ functionCallMessage = prefixMap["function_call"];
746
+ }
747
+ if (type === "data") {
748
+ prefixMap["data"].push(...value2);
749
+ }
750
+ const responseMessage = prefixMap["text"];
751
+ const merged = [functionCallMessage, responseMessage].filter(
752
+ Boolean
753
+ );
754
+ update(merged, [...prefixMap["data"]]);
755
+ if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
756
+ reader.cancel();
757
+ break;
758
+ }
759
+ }
760
+ }
761
+ onFinish == null ? void 0 : onFinish(prefixMap);
762
+ return {
763
+ messages: [prefixMap.text, prefixMap.function_call].filter(
764
+ Boolean
765
+ ),
766
+ data: prefixMap.data
767
+ };
768
+ }
769
+
770
+ // shared/call-api.ts
771
+ async function callApi({
772
+ api,
773
+ messages,
774
+ body,
775
+ credentials,
776
+ headers,
777
+ abortController,
778
+ appendMessage,
779
+ restoreMessagesOnFailure,
780
+ onResponse,
781
+ onUpdate,
782
+ onFinish
783
+ }) {
784
+ var _a;
785
+ const response = await fetch(api, {
677
786
  method: "POST",
678
787
  body: JSON.stringify({
679
- messages: sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(
680
- ({ role, content, name, function_call }) => ({
681
- role,
682
- content,
683
- ...name !== void 0 && { name },
684
- ...function_call !== void 0 && {
685
- function_call
686
- }
687
- })
688
- ),
689
- ...extraMetadata.body,
690
- ...(_a = chatRequest.options) == null ? void 0 : _a.body,
691
- ...chatRequest.functions !== void 0 && {
692
- functions: chatRequest.functions
693
- },
694
- ...chatRequest.function_call !== void 0 && {
695
- function_call: chatRequest.function_call
696
- }
788
+ messages,
789
+ ...body
697
790
  }),
698
- credentials: extraMetadata.credentials,
699
- headers: {
700
- ...extraMetadata.headers,
701
- ...(_b = chatRequest.options) == null ? void 0 : _b.headers
702
- },
703
- ...abortControllerRef !== null && {
704
- signal: abortControllerRef.signal
705
- }
791
+ headers,
792
+ signal: (_a = abortController == null ? void 0 : abortController()) == null ? void 0 : _a.signal,
793
+ credentials
706
794
  }).catch((err) => {
707
- mutate(previousMessages);
795
+ restoreMessagesOnFailure();
708
796
  throw err;
709
797
  });
710
798
  if (onResponse) {
711
799
  try {
712
- await onResponse(res);
800
+ await onResponse(response);
713
801
  } catch (err) {
714
802
  throw err;
715
803
  }
716
804
  }
717
- if (!res.ok) {
718
- mutate(previousMessages);
719
- throw new Error(await res.text() || "Failed to fetch the chat response.");
805
+ if (!response.ok) {
806
+ restoreMessagesOnFailure();
807
+ throw new Error(
808
+ await response.text() || "Failed to fetch the chat response."
809
+ );
720
810
  }
721
- if (!res.body) {
811
+ if (!response.body) {
722
812
  throw new Error("The response body is empty.");
723
813
  }
724
- let streamedResponse = "";
725
- const createdAt = /* @__PURE__ */ new Date();
726
- const replyId = nanoid();
727
- const reader = res.body.getReader();
728
- const decode = createChunkDecoder();
729
- let responseMessage = {
730
- id: replyId,
731
- createdAt,
732
- content: "",
733
- role: "assistant"
734
- };
735
- while (true) {
736
- const { done, value } = await reader.read();
737
- if (done) {
738
- break;
814
+ const reader = response.body.getReader();
815
+ const isComplexMode = response.headers.get(COMPLEX_HEADER) === "true";
816
+ if (isComplexMode) {
817
+ return await parseComplexResponse({
818
+ reader,
819
+ abortControllerRef: abortController != null ? { current: abortController() } : void 0,
820
+ update: onUpdate,
821
+ onFinish(prefixMap) {
822
+ if (onFinish && prefixMap.text != null) {
823
+ onFinish(prefixMap.text);
824
+ }
825
+ }
826
+ });
827
+ } else {
828
+ const createdAt = /* @__PURE__ */ new Date();
829
+ const decode = createChunkDecoder(false);
830
+ let streamedResponse = "";
831
+ const replyId = (0, import_nanoid.nanoid)();
832
+ let responseMessage = {
833
+ id: replyId,
834
+ createdAt,
835
+ content: "",
836
+ role: "assistant"
837
+ };
838
+ while (true) {
839
+ const { done, value } = await reader.read();
840
+ if (done) {
841
+ break;
842
+ }
843
+ streamedResponse += decode(value);
844
+ if (streamedResponse.startsWith('{"function_call":')) {
845
+ responseMessage["function_call"] = streamedResponse;
846
+ } else {
847
+ responseMessage["content"] = streamedResponse;
848
+ }
849
+ appendMessage({ ...responseMessage });
850
+ if ((abortController == null ? void 0 : abortController()) === null) {
851
+ reader.cancel();
852
+ break;
853
+ }
739
854
  }
740
- streamedResponse += decode(value);
741
- const functionStart = streamedResponse.indexOf("{");
742
- if (functionStart !== -1) {
743
- const matches = /(.*?)(?:({"function_call".*?}})(.*))?$/gs.exec(
744
- streamedResponse
745
- );
746
- responseMessage.content = `${(_c = matches == null ? void 0 : matches[1]) != null ? _c : ""}${(_d = matches == null ? void 0 : matches[3]) != null ? _d : ""}`;
747
- responseMessage.function_call = matches == null ? void 0 : matches[2];
748
- } else {
749
- responseMessage.content = streamedResponse;
855
+ if (streamedResponse.startsWith('{"function_call":')) {
856
+ const parsedFunctionCall = JSON.parse(streamedResponse).function_call;
857
+ responseMessage["function_call"] = parsedFunctionCall;
858
+ appendMessage({ ...responseMessage });
750
859
  }
751
- mutate([...chatRequest.messages, { ...responseMessage }]);
752
- if (abortControllerRef === null) {
753
- reader.cancel();
754
- break;
860
+ if (onFinish) {
861
+ onFinish(responseMessage);
755
862
  }
863
+ return responseMessage;
756
864
  }
757
- if (typeof responseMessage.function_call === "string") {
758
- const parsedFunctionCall = JSON.parse(
759
- responseMessage.function_call
760
- ).function_call;
761
- responseMessage.function_call = parsedFunctionCall;
762
- mutate([...chatRequest.messages, { ...responseMessage }]);
763
- }
764
- if (onFinish) {
765
- onFinish(responseMessage);
865
+ }
866
+
867
+ // shared/process-chat-stream.ts
868
+ async function processChatStream({
869
+ getStreamedResponse: getStreamedResponse2,
870
+ experimental_onFunctionCall,
871
+ updateChatRequest,
872
+ getCurrentMessages
873
+ }) {
874
+ while (true) {
875
+ const messagesAndDataOrJustMessage = await getStreamedResponse2();
876
+ if ("messages" in messagesAndDataOrJustMessage) {
877
+ let hasFollowingResponse = false;
878
+ for (const message of messagesAndDataOrJustMessage.messages) {
879
+ if (message.function_call === void 0 || typeof message.function_call === "string") {
880
+ continue;
881
+ }
882
+ hasFollowingResponse = true;
883
+ if (experimental_onFunctionCall) {
884
+ const functionCall = message.function_call;
885
+ const functionCallResponse = await experimental_onFunctionCall(
886
+ getCurrentMessages(),
887
+ functionCall
888
+ );
889
+ if (functionCallResponse === void 0) {
890
+ hasFollowingResponse = false;
891
+ break;
892
+ }
893
+ updateChatRequest(functionCallResponse);
894
+ }
895
+ }
896
+ if (!hasFollowingResponse) {
897
+ break;
898
+ }
899
+ } else {
900
+ const streamedResponseMessage = messagesAndDataOrJustMessage;
901
+ if (streamedResponseMessage.function_call === void 0 || typeof streamedResponseMessage.function_call === "string") {
902
+ break;
903
+ }
904
+ if (experimental_onFunctionCall) {
905
+ const functionCall = streamedResponseMessage.function_call;
906
+ const functionCallResponse = await experimental_onFunctionCall(getCurrentMessages(), functionCall);
907
+ if (functionCallResponse === void 0)
908
+ break;
909
+ updateChatRequest(functionCallResponse);
910
+ }
911
+ }
766
912
  }
767
- return responseMessage;
913
+ }
914
+
915
+ // svelte/use-chat.ts
916
+ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadata, previousMessages, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
917
+ var _a, _b;
918
+ mutate(chatRequest.messages);
919
+ const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(({ role, content, name, function_call }) => ({
920
+ role,
921
+ content,
922
+ ...name !== void 0 && { name },
923
+ ...function_call !== void 0 && {
924
+ function_call
925
+ }
926
+ }));
927
+ return await callApi({
928
+ api,
929
+ messages: constructedMessagesPayload,
930
+ body: {
931
+ ...extraMetadata.body,
932
+ ...(_a = chatRequest.options) == null ? void 0 : _a.body,
933
+ ...chatRequest.functions !== void 0 && {
934
+ functions: chatRequest.functions
935
+ },
936
+ ...chatRequest.function_call !== void 0 && {
937
+ function_call: chatRequest.function_call
938
+ }
939
+ },
940
+ credentials: extraMetadata.credentials,
941
+ headers: {
942
+ ...extraMetadata.headers,
943
+ ...(_b = chatRequest.options) == null ? void 0 : _b.headers
944
+ },
945
+ abortController: () => abortControllerRef,
946
+ appendMessage(message) {
947
+ mutate([...chatRequest.messages, message]);
948
+ },
949
+ restoreMessagesOnFailure() {
950
+ mutate(previousMessages);
951
+ },
952
+ onResponse,
953
+ onUpdate(merged, data) {
954
+ mutate([...chatRequest.messages, ...merged]);
955
+ mutateStreamData([...existingData || [], ...data || []]);
956
+ },
957
+ onFinish
958
+ });
768
959
  };
769
960
  var uniqueId = 0;
770
961
  var store = {};
@@ -792,6 +983,7 @@ function useChat({
792
983
  fetcher: () => store[key] || initialMessages,
793
984
  fallbackData: initialMessages
794
985
  });
986
+ const streamData = (0, import_store.writable)(void 0);
795
987
  const loading = (0, import_store.writable)(false);
796
988
  data.set(initialMessages);
797
989
  const mutate = (data2) => {
@@ -811,29 +1003,28 @@ function useChat({
811
1003
  error.set(void 0);
812
1004
  loading.set(true);
813
1005
  abortController = new AbortController();
814
- while (true) {
815
- const streamedResponseMessage = await getStreamedResponse(
1006
+ await processChatStream({
1007
+ getStreamedResponse: () => getStreamedResponse(
816
1008
  api,
817
1009
  chatRequest,
818
1010
  mutate,
1011
+ (data2) => {
1012
+ streamData.set(data2);
1013
+ },
1014
+ (0, import_store.get)(streamData),
819
1015
  extraMetadata,
820
1016
  (0, import_store.get)(messages),
821
1017
  abortController,
822
1018
  onFinish,
823
1019
  onResponse,
824
1020
  sendExtraMessageFields
825
- );
826
- if (streamedResponseMessage.function_call === void 0 || typeof streamedResponseMessage.function_call === "string") {
827
- break;
828
- }
829
- if (experimental_onFunctionCall) {
830
- const functionCall = streamedResponseMessage.function_call;
831
- const functionCallResponse = await experimental_onFunctionCall((0, import_store.get)(messages), functionCall);
832
- if (functionCallResponse === void 0)
833
- break;
834
- chatRequest = functionCallResponse;
835
- }
836
- }
1021
+ ),
1022
+ experimental_onFunctionCall,
1023
+ updateChatRequest: (chatRequestParam) => {
1024
+ chatRequest = chatRequestParam;
1025
+ },
1026
+ getCurrentMessages: () => (0, import_store.get)(messages)
1027
+ });
837
1028
  abortController = null;
838
1029
  return null;
839
1030
  } catch (err) {
@@ -927,7 +1118,8 @@ function useChat({
927
1118
  setMessages,
928
1119
  input,
929
1120
  handleSubmit,
930
- isLoading
1121
+ isLoading,
1122
+ data: streamData
931
1123
  };
932
1124
  }
933
1125
 
@@ -1,6 +1,3 @@
1
- // svelte/use-chat.ts
2
- import { get, writable, derived } from "svelte/store";
3
-
4
1
  // ../../node_modules/.pnpm/swrev@4.0.0/node_modules/swrev/dist/swrev.mjs
5
2
  var P = Object.defineProperty;
6
3
  var F = (r, e, t) => e in r ? P(r, e, { enumerable: true, configurable: true, writable: true, value: t }) : r[e] = t;
@@ -498,6 +495,12 @@ var W = (t) => new O2(t);
498
495
  var c = W();
499
496
  var F2 = (t, e) => c.useSWR(t, e);
500
497
 
498
+ // svelte/use-chat.ts
499
+ import { derived, get, writable } from "svelte/store";
500
+
501
+ // shared/call-api.ts
502
+ import { nanoid as nanoid2 } from "nanoid";
503
+
501
504
  // shared/utils.ts
502
505
  import { customAlphabet } from "nanoid/non-secure";
503
506
 
@@ -641,103 +644,291 @@ function createChunkDecoder(complex) {
641
644
  return decoded.map(parseStreamPart).filter(Boolean);
642
645
  };
643
646
  }
647
+ var COMPLEX_HEADER = "X-Experimental-Stream-Data";
644
648
 
645
- // svelte/use-chat.ts
646
- var getStreamedResponse = async (api, chatRequest, mutate, extraMetadata, previousMessages, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
647
- var _a, _b, _c, _d;
648
- mutate(chatRequest.messages);
649
- const res = await fetch(api, {
649
+ // shared/parse-complex-response.ts
650
+ async function parseComplexResponse({
651
+ reader,
652
+ abortControllerRef,
653
+ update,
654
+ onFinish,
655
+ generateId = nanoid,
656
+ getCurrentDate = () => /* @__PURE__ */ new Date()
657
+ }) {
658
+ const createdAt = getCurrentDate();
659
+ const decode = createChunkDecoder(true);
660
+ const prefixMap = {
661
+ data: []
662
+ };
663
+ const NEWLINE = "\n".charCodeAt(0);
664
+ const chunks = [];
665
+ let totalLength = 0;
666
+ while (true) {
667
+ const { value } = await reader.read();
668
+ if (value) {
669
+ chunks.push(value);
670
+ totalLength += value.length;
671
+ if (value[value.length - 1] !== NEWLINE) {
672
+ continue;
673
+ }
674
+ }
675
+ if (chunks.length === 0) {
676
+ break;
677
+ }
678
+ let concatenatedChunks = new Uint8Array(totalLength);
679
+ let offset = 0;
680
+ for (const chunk of chunks) {
681
+ concatenatedChunks.set(chunk, offset);
682
+ offset += chunk.length;
683
+ }
684
+ chunks.length = 0;
685
+ totalLength = 0;
686
+ const lines = decode(concatenatedChunks);
687
+ if (typeof lines === "string") {
688
+ throw new Error(
689
+ "Invalid response format. Complex mode was set but the response is a string. This should never happen."
690
+ );
691
+ }
692
+ for (const { type, value: value2 } of lines) {
693
+ if (type === "text") {
694
+ if (prefixMap["text"]) {
695
+ prefixMap["text"] = {
696
+ ...prefixMap["text"],
697
+ content: (prefixMap["text"].content || "") + value2
698
+ };
699
+ } else {
700
+ prefixMap["text"] = {
701
+ id: generateId(),
702
+ role: "assistant",
703
+ content: value2,
704
+ createdAt
705
+ };
706
+ }
707
+ }
708
+ let functionCallMessage = null;
709
+ if (type === "function_call") {
710
+ prefixMap["function_call"] = {
711
+ id: generateId(),
712
+ role: "assistant",
713
+ content: "",
714
+ function_call: value2.function_call,
715
+ name: value2.function_call.name,
716
+ createdAt
717
+ };
718
+ functionCallMessage = prefixMap["function_call"];
719
+ }
720
+ if (type === "data") {
721
+ prefixMap["data"].push(...value2);
722
+ }
723
+ const responseMessage = prefixMap["text"];
724
+ const merged = [functionCallMessage, responseMessage].filter(
725
+ Boolean
726
+ );
727
+ update(merged, [...prefixMap["data"]]);
728
+ if ((abortControllerRef == null ? void 0 : abortControllerRef.current) === null) {
729
+ reader.cancel();
730
+ break;
731
+ }
732
+ }
733
+ }
734
+ onFinish == null ? void 0 : onFinish(prefixMap);
735
+ return {
736
+ messages: [prefixMap.text, prefixMap.function_call].filter(
737
+ Boolean
738
+ ),
739
+ data: prefixMap.data
740
+ };
741
+ }
742
+
743
+ // shared/call-api.ts
744
+ async function callApi({
745
+ api,
746
+ messages,
747
+ body,
748
+ credentials,
749
+ headers,
750
+ abortController,
751
+ appendMessage,
752
+ restoreMessagesOnFailure,
753
+ onResponse,
754
+ onUpdate,
755
+ onFinish
756
+ }) {
757
+ var _a;
758
+ const response = await fetch(api, {
650
759
  method: "POST",
651
760
  body: JSON.stringify({
652
- messages: sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(
653
- ({ role, content, name, function_call }) => ({
654
- role,
655
- content,
656
- ...name !== void 0 && { name },
657
- ...function_call !== void 0 && {
658
- function_call
659
- }
660
- })
661
- ),
662
- ...extraMetadata.body,
663
- ...(_a = chatRequest.options) == null ? void 0 : _a.body,
664
- ...chatRequest.functions !== void 0 && {
665
- functions: chatRequest.functions
666
- },
667
- ...chatRequest.function_call !== void 0 && {
668
- function_call: chatRequest.function_call
669
- }
761
+ messages,
762
+ ...body
670
763
  }),
671
- credentials: extraMetadata.credentials,
672
- headers: {
673
- ...extraMetadata.headers,
674
- ...(_b = chatRequest.options) == null ? void 0 : _b.headers
675
- },
676
- ...abortControllerRef !== null && {
677
- signal: abortControllerRef.signal
678
- }
764
+ headers,
765
+ signal: (_a = abortController == null ? void 0 : abortController()) == null ? void 0 : _a.signal,
766
+ credentials
679
767
  }).catch((err) => {
680
- mutate(previousMessages);
768
+ restoreMessagesOnFailure();
681
769
  throw err;
682
770
  });
683
771
  if (onResponse) {
684
772
  try {
685
- await onResponse(res);
773
+ await onResponse(response);
686
774
  } catch (err) {
687
775
  throw err;
688
776
  }
689
777
  }
690
- if (!res.ok) {
691
- mutate(previousMessages);
692
- throw new Error(await res.text() || "Failed to fetch the chat response.");
778
+ if (!response.ok) {
779
+ restoreMessagesOnFailure();
780
+ throw new Error(
781
+ await response.text() || "Failed to fetch the chat response."
782
+ );
693
783
  }
694
- if (!res.body) {
784
+ if (!response.body) {
695
785
  throw new Error("The response body is empty.");
696
786
  }
697
- let streamedResponse = "";
698
- const createdAt = /* @__PURE__ */ new Date();
699
- const replyId = nanoid();
700
- const reader = res.body.getReader();
701
- const decode = createChunkDecoder();
702
- let responseMessage = {
703
- id: replyId,
704
- createdAt,
705
- content: "",
706
- role: "assistant"
707
- };
708
- while (true) {
709
- const { done, value } = await reader.read();
710
- if (done) {
711
- break;
787
+ const reader = response.body.getReader();
788
+ const isComplexMode = response.headers.get(COMPLEX_HEADER) === "true";
789
+ if (isComplexMode) {
790
+ return await parseComplexResponse({
791
+ reader,
792
+ abortControllerRef: abortController != null ? { current: abortController() } : void 0,
793
+ update: onUpdate,
794
+ onFinish(prefixMap) {
795
+ if (onFinish && prefixMap.text != null) {
796
+ onFinish(prefixMap.text);
797
+ }
798
+ }
799
+ });
800
+ } else {
801
+ const createdAt = /* @__PURE__ */ new Date();
802
+ const decode = createChunkDecoder(false);
803
+ let streamedResponse = "";
804
+ const replyId = nanoid2();
805
+ let responseMessage = {
806
+ id: replyId,
807
+ createdAt,
808
+ content: "",
809
+ role: "assistant"
810
+ };
811
+ while (true) {
812
+ const { done, value } = await reader.read();
813
+ if (done) {
814
+ break;
815
+ }
816
+ streamedResponse += decode(value);
817
+ if (streamedResponse.startsWith('{"function_call":')) {
818
+ responseMessage["function_call"] = streamedResponse;
819
+ } else {
820
+ responseMessage["content"] = streamedResponse;
821
+ }
822
+ appendMessage({ ...responseMessage });
823
+ if ((abortController == null ? void 0 : abortController()) === null) {
824
+ reader.cancel();
825
+ break;
826
+ }
712
827
  }
713
- streamedResponse += decode(value);
714
- const functionStart = streamedResponse.indexOf("{");
715
- if (functionStart !== -1) {
716
- const matches = /(.*?)(?:({"function_call".*?}})(.*))?$/gs.exec(
717
- streamedResponse
718
- );
719
- responseMessage.content = `${(_c = matches == null ? void 0 : matches[1]) != null ? _c : ""}${(_d = matches == null ? void 0 : matches[3]) != null ? _d : ""}`;
720
- responseMessage.function_call = matches == null ? void 0 : matches[2];
721
- } else {
722
- responseMessage.content = streamedResponse;
828
+ if (streamedResponse.startsWith('{"function_call":')) {
829
+ const parsedFunctionCall = JSON.parse(streamedResponse).function_call;
830
+ responseMessage["function_call"] = parsedFunctionCall;
831
+ appendMessage({ ...responseMessage });
723
832
  }
724
- mutate([...chatRequest.messages, { ...responseMessage }]);
725
- if (abortControllerRef === null) {
726
- reader.cancel();
727
- break;
833
+ if (onFinish) {
834
+ onFinish(responseMessage);
728
835
  }
836
+ return responseMessage;
729
837
  }
730
- if (typeof responseMessage.function_call === "string") {
731
- const parsedFunctionCall = JSON.parse(
732
- responseMessage.function_call
733
- ).function_call;
734
- responseMessage.function_call = parsedFunctionCall;
735
- mutate([...chatRequest.messages, { ...responseMessage }]);
736
- }
737
- if (onFinish) {
738
- onFinish(responseMessage);
838
+ }
839
+
840
+ // shared/process-chat-stream.ts
841
+ async function processChatStream({
842
+ getStreamedResponse: getStreamedResponse2,
843
+ experimental_onFunctionCall,
844
+ updateChatRequest,
845
+ getCurrentMessages
846
+ }) {
847
+ while (true) {
848
+ const messagesAndDataOrJustMessage = await getStreamedResponse2();
849
+ if ("messages" in messagesAndDataOrJustMessage) {
850
+ let hasFollowingResponse = false;
851
+ for (const message of messagesAndDataOrJustMessage.messages) {
852
+ if (message.function_call === void 0 || typeof message.function_call === "string") {
853
+ continue;
854
+ }
855
+ hasFollowingResponse = true;
856
+ if (experimental_onFunctionCall) {
857
+ const functionCall = message.function_call;
858
+ const functionCallResponse = await experimental_onFunctionCall(
859
+ getCurrentMessages(),
860
+ functionCall
861
+ );
862
+ if (functionCallResponse === void 0) {
863
+ hasFollowingResponse = false;
864
+ break;
865
+ }
866
+ updateChatRequest(functionCallResponse);
867
+ }
868
+ }
869
+ if (!hasFollowingResponse) {
870
+ break;
871
+ }
872
+ } else {
873
+ const streamedResponseMessage = messagesAndDataOrJustMessage;
874
+ if (streamedResponseMessage.function_call === void 0 || typeof streamedResponseMessage.function_call === "string") {
875
+ break;
876
+ }
877
+ if (experimental_onFunctionCall) {
878
+ const functionCall = streamedResponseMessage.function_call;
879
+ const functionCallResponse = await experimental_onFunctionCall(getCurrentMessages(), functionCall);
880
+ if (functionCallResponse === void 0)
881
+ break;
882
+ updateChatRequest(functionCallResponse);
883
+ }
884
+ }
739
885
  }
740
- return responseMessage;
886
+ }
887
+
888
+ // svelte/use-chat.ts
889
+ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, existingData, extraMetadata, previousMessages, abortControllerRef, onFinish, onResponse, sendExtraMessageFields) => {
890
+ var _a, _b;
891
+ mutate(chatRequest.messages);
892
+ const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(({ role, content, name, function_call }) => ({
893
+ role,
894
+ content,
895
+ ...name !== void 0 && { name },
896
+ ...function_call !== void 0 && {
897
+ function_call
898
+ }
899
+ }));
900
+ return await callApi({
901
+ api,
902
+ messages: constructedMessagesPayload,
903
+ body: {
904
+ ...extraMetadata.body,
905
+ ...(_a = chatRequest.options) == null ? void 0 : _a.body,
906
+ ...chatRequest.functions !== void 0 && {
907
+ functions: chatRequest.functions
908
+ },
909
+ ...chatRequest.function_call !== void 0 && {
910
+ function_call: chatRequest.function_call
911
+ }
912
+ },
913
+ credentials: extraMetadata.credentials,
914
+ headers: {
915
+ ...extraMetadata.headers,
916
+ ...(_b = chatRequest.options) == null ? void 0 : _b.headers
917
+ },
918
+ abortController: () => abortControllerRef,
919
+ appendMessage(message) {
920
+ mutate([...chatRequest.messages, message]);
921
+ },
922
+ restoreMessagesOnFailure() {
923
+ mutate(previousMessages);
924
+ },
925
+ onResponse,
926
+ onUpdate(merged, data) {
927
+ mutate([...chatRequest.messages, ...merged]);
928
+ mutateStreamData([...existingData || [], ...data || []]);
929
+ },
930
+ onFinish
931
+ });
741
932
  };
742
933
  var uniqueId = 0;
743
934
  var store = {};
@@ -765,6 +956,7 @@ function useChat({
765
956
  fetcher: () => store[key] || initialMessages,
766
957
  fallbackData: initialMessages
767
958
  });
959
+ const streamData = writable(void 0);
768
960
  const loading = writable(false);
769
961
  data.set(initialMessages);
770
962
  const mutate = (data2) => {
@@ -784,29 +976,28 @@ function useChat({
784
976
  error.set(void 0);
785
977
  loading.set(true);
786
978
  abortController = new AbortController();
787
- while (true) {
788
- const streamedResponseMessage = await getStreamedResponse(
979
+ await processChatStream({
980
+ getStreamedResponse: () => getStreamedResponse(
789
981
  api,
790
982
  chatRequest,
791
983
  mutate,
984
+ (data2) => {
985
+ streamData.set(data2);
986
+ },
987
+ get(streamData),
792
988
  extraMetadata,
793
989
  get(messages),
794
990
  abortController,
795
991
  onFinish,
796
992
  onResponse,
797
993
  sendExtraMessageFields
798
- );
799
- if (streamedResponseMessage.function_call === void 0 || typeof streamedResponseMessage.function_call === "string") {
800
- break;
801
- }
802
- if (experimental_onFunctionCall) {
803
- const functionCall = streamedResponseMessage.function_call;
804
- const functionCallResponse = await experimental_onFunctionCall(get(messages), functionCall);
805
- if (functionCallResponse === void 0)
806
- break;
807
- chatRequest = functionCallResponse;
808
- }
809
- }
994
+ ),
995
+ experimental_onFunctionCall,
996
+ updateChatRequest: (chatRequestParam) => {
997
+ chatRequest = chatRequestParam;
998
+ },
999
+ getCurrentMessages: () => get(messages)
1000
+ });
810
1001
  abortController = null;
811
1002
  return null;
812
1003
  } catch (err) {
@@ -900,7 +1091,8 @@ function useChat({
900
1091
  setMessages,
901
1092
  input,
902
1093
  handleSubmit,
903
- isLoading
1094
+ isLoading,
1095
+ data: streamData
904
1096
  };
905
1097
  }
906
1098