@livekit/agents 1.2.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/dist/_exceptions.cjs.map +1 -1
  2. package/dist/_exceptions.d.ts.map +1 -1
  3. package/dist/_exceptions.js.map +1 -1
  4. package/dist/beta/workflows/task_group.cjs +7 -4
  5. package/dist/beta/workflows/task_group.cjs.map +1 -1
  6. package/dist/beta/workflows/task_group.d.ts.map +1 -1
  7. package/dist/beta/workflows/task_group.js +7 -4
  8. package/dist/beta/workflows/task_group.js.map +1 -1
  9. package/dist/inference/interruption/http_transport.cjs.map +1 -1
  10. package/dist/inference/interruption/http_transport.d.cts +3 -1
  11. package/dist/inference/interruption/http_transport.d.ts +3 -1
  12. package/dist/inference/interruption/http_transport.d.ts.map +1 -1
  13. package/dist/inference/interruption/http_transport.js.map +1 -1
  14. package/dist/inference/interruption/ws_transport.cjs +37 -32
  15. package/dist/inference/interruption/ws_transport.cjs.map +1 -1
  16. package/dist/inference/interruption/ws_transport.d.ts.map +1 -1
  17. package/dist/inference/interruption/ws_transport.js +37 -32
  18. package/dist/inference/interruption/ws_transport.js.map +1 -1
  19. package/dist/inference/tts.cjs.map +1 -1
  20. package/dist/inference/tts.d.cts +42 -4
  21. package/dist/inference/tts.d.ts +42 -4
  22. package/dist/inference/tts.d.ts.map +1 -1
  23. package/dist/inference/tts.js.map +1 -1
  24. package/dist/inference/tts.test.cjs +72 -0
  25. package/dist/inference/tts.test.cjs.map +1 -1
  26. package/dist/inference/tts.test.js +72 -0
  27. package/dist/inference/tts.test.js.map +1 -1
  28. package/dist/llm/chat_context.cjs +102 -31
  29. package/dist/llm/chat_context.cjs.map +1 -1
  30. package/dist/llm/chat_context.d.ts.map +1 -1
  31. package/dist/llm/chat_context.js +102 -31
  32. package/dist/llm/chat_context.js.map +1 -1
  33. package/dist/llm/chat_context.test.cjs +123 -5
  34. package/dist/llm/chat_context.test.cjs.map +1 -1
  35. package/dist/llm/chat_context.test.js +123 -5
  36. package/dist/llm/chat_context.test.js.map +1 -1
  37. package/dist/llm/fallback_adapter.cjs +2 -0
  38. package/dist/llm/fallback_adapter.cjs.map +1 -1
  39. package/dist/llm/fallback_adapter.d.ts.map +1 -1
  40. package/dist/llm/fallback_adapter.js +2 -0
  41. package/dist/llm/fallback_adapter.js.map +1 -1
  42. package/dist/llm/index.cjs +2 -0
  43. package/dist/llm/index.cjs.map +1 -1
  44. package/dist/llm/index.d.cts +1 -1
  45. package/dist/llm/index.d.ts +1 -1
  46. package/dist/llm/index.d.ts.map +1 -1
  47. package/dist/llm/index.js +2 -0
  48. package/dist/llm/index.js.map +1 -1
  49. package/dist/llm/utils.cjs +89 -0
  50. package/dist/llm/utils.cjs.map +1 -1
  51. package/dist/llm/utils.d.cts +8 -0
  52. package/dist/llm/utils.d.ts +8 -0
  53. package/dist/llm/utils.d.ts.map +1 -1
  54. package/dist/llm/utils.js +88 -0
  55. package/dist/llm/utils.js.map +1 -1
  56. package/dist/llm/utils.test.cjs +90 -0
  57. package/dist/llm/utils.test.cjs.map +1 -1
  58. package/dist/llm/utils.test.js +98 -2
  59. package/dist/llm/utils.test.js.map +1 -1
  60. package/dist/stt/stt.cjs +8 -0
  61. package/dist/stt/stt.cjs.map +1 -1
  62. package/dist/stt/stt.d.cts +8 -0
  63. package/dist/stt/stt.d.ts +8 -0
  64. package/dist/stt/stt.d.ts.map +1 -1
  65. package/dist/stt/stt.js +8 -0
  66. package/dist/stt/stt.js.map +1 -1
  67. package/dist/tts/fallback_adapter.cjs +6 -0
  68. package/dist/tts/fallback_adapter.cjs.map +1 -1
  69. package/dist/tts/fallback_adapter.d.ts.map +1 -1
  70. package/dist/tts/fallback_adapter.js +6 -0
  71. package/dist/tts/fallback_adapter.js.map +1 -1
  72. package/dist/typed_promise.cjs +48 -0
  73. package/dist/typed_promise.cjs.map +1 -0
  74. package/dist/typed_promise.d.cts +24 -0
  75. package/dist/typed_promise.d.ts +24 -0
  76. package/dist/typed_promise.d.ts.map +1 -0
  77. package/dist/typed_promise.js +28 -0
  78. package/dist/typed_promise.js.map +1 -0
  79. package/dist/utils.cjs +2 -2
  80. package/dist/utils.cjs.map +1 -1
  81. package/dist/utils.js +2 -2
  82. package/dist/utils.js.map +1 -1
  83. package/dist/version.cjs +1 -1
  84. package/dist/version.js +1 -1
  85. package/package.json +4 -2
  86. package/src/_exceptions.ts +5 -0
  87. package/src/beta/workflows/task_group.ts +14 -5
  88. package/src/inference/interruption/http_transport.ts +2 -1
  89. package/src/inference/interruption/ws_transport.ts +44 -34
  90. package/src/inference/tts.test.ts +87 -0
  91. package/src/inference/tts.ts +46 -6
  92. package/src/llm/chat_context.test.ts +137 -5
  93. package/src/llm/chat_context.ts +119 -38
  94. package/src/llm/fallback_adapter.ts +5 -2
  95. package/src/llm/index.ts +2 -0
  96. package/src/llm/utils.test.ts +103 -2
  97. package/src/llm/utils.ts +128 -0
  98. package/src/stt/stt.ts +9 -1
  99. package/src/tts/fallback_adapter.ts +9 -2
  100. package/src/typed_promise.ts +67 -0
  101. package/src/utils.ts +2 -2
@@ -17,6 +17,18 @@ import {
17
17
 
18
18
  initializeLogger({ pretty: false, level: 'error' });
19
19
 
20
+ const summaryXml = (summary: string) =>
21
+ ['<chat_history_summary>', summary, '</chat_history_summary>'].join('\n');
22
+
23
+ class TrackingFakeLLM extends FakeLLM {
24
+ chatCalls = 0;
25
+
26
+ chat(...args: Parameters<FakeLLM['chat']>) {
27
+ this.chatCalls += 1;
28
+ return super.chat(...args);
29
+ }
30
+ }
31
+
20
32
  describe('ChatContext.toJSON', () => {
21
33
  it('should match snapshot for empty context', () => {
22
34
  const context = new ChatContext();
@@ -288,18 +300,26 @@ describe('ChatContext.toJSON', () => {
288
300
  });
289
301
 
290
302
  describe('ChatContext._summarize', () => {
291
- it('keeps chronological timestamps with summary + tail', async () => {
303
+ it('includes function calls in the summarization source and keeps chronological order', async () => {
292
304
  const ctx = new ChatContext();
293
305
  ctx.addMessage({ role: 'system', content: 'System prompt', createdAt: 0 });
294
306
  ctx.addMessage({ role: 'user', content: 'hello', createdAt: 1000 });
295
307
  ctx.addMessage({ role: 'assistant', content: 'hi there', createdAt: 2000 });
308
+ ctx.insert(
309
+ FunctionCall.create({
310
+ callId: 'call_1',
311
+ name: 'lookup',
312
+ args: '{"order":"123"}',
313
+ createdAt: 2500,
314
+ }),
315
+ );
296
316
  ctx.insert(
297
317
  new FunctionCallOutput({
298
318
  callId: 'call_1',
299
319
  name: 'lookup',
300
- output: '{"ok":true}',
320
+ output: '{"status":"delivered"}',
301
321
  isError: false,
302
- createdAt: 3500,
322
+ createdAt: 2600,
303
323
  }),
304
324
  );
305
325
  ctx.addMessage({ role: 'user', content: 'my color is blue', createdAt: 3000 });
@@ -307,7 +327,22 @@ describe('ChatContext._summarize', () => {
307
327
 
308
328
  const fake = new FakeLLM([
309
329
  {
310
- input: 'Conversation to summarize:\n\nuser: hello\nassistant: hi there',
330
+ input: [
331
+ 'Conversation to summarize:',
332
+ '',
333
+ '<user>',
334
+ 'hello',
335
+ '</user>',
336
+ '<assistant>',
337
+ 'hi there',
338
+ '</assistant>',
339
+ '<function_call name="lookup" call_id="call_1">',
340
+ '{"order":"123"}',
341
+ '</function_call>',
342
+ '<function_call_output name="lookup" call_id="call_1">',
343
+ '{"status":"delivered"}',
344
+ '</function_call_output>',
345
+ ].join('\n'),
311
346
  content: 'condensed head',
312
347
  },
313
348
  ]);
@@ -323,12 +358,109 @@ describe('ChatContext._summarize', () => {
323
358
  throw new Error('summary message is missing');
324
359
  }
325
360
 
326
- expect(summary.createdAt).toBeCloseTo(2999.999, 6);
361
+ expect(summary.textContent).toBe(summaryXml('condensed head'));
362
+ expect(summary.createdAt).toBeCloseTo(2999.999999, 6);
363
+ expect(ctx.items.filter((item) => item.type === 'function_call')).toHaveLength(0);
364
+ expect(ctx.items.filter((item) => item.type === 'function_call_output')).toHaveLength(0);
327
365
 
328
366
  const createdAts = ctx.items.map((item) => item.createdAt);
329
367
  const sorted = [...createdAts].sort((a, b) => a - b);
330
368
  expect(createdAts).toEqual(sorted);
331
369
  });
370
+
371
+ it('preserves interleaved tool items that belong to the recent tail', async () => {
372
+ const ctx = new ChatContext();
373
+ ctx.addMessage({ role: 'system', content: 'System prompt', createdAt: 0 });
374
+ ctx.addMessage({ role: 'user', content: 'my earbuds are broken', createdAt: 1000 });
375
+ ctx.addMessage({
376
+ role: 'assistant',
377
+ content: 'Can you share your order number?',
378
+ createdAt: 2000,
379
+ });
380
+ ctx.addMessage({ role: 'user', content: 'Order #123', createdAt: 3000 });
381
+ ctx.insert(
382
+ FunctionCall.create({
383
+ callId: 'call_2',
384
+ name: 'lookup_order',
385
+ args: '{"order":"123"}',
386
+ createdAt: 3500,
387
+ }),
388
+ );
389
+ ctx.insert(
390
+ new FunctionCallOutput({
391
+ callId: 'call_2',
392
+ name: 'lookup_order',
393
+ output: '{"status":"delivered"}',
394
+ isError: false,
395
+ createdAt: 3600,
396
+ }),
397
+ );
398
+ ctx.addMessage({
399
+ role: 'assistant',
400
+ content: 'Found your order. Let me check the warranty.',
401
+ createdAt: 4000,
402
+ });
403
+ ctx.addMessage({ role: 'user', content: 'Thanks.', createdAt: 5000 });
404
+ ctx.addMessage({ role: 'assistant', content: 'You are under warranty.', createdAt: 6000 });
405
+
406
+ const fake = new FakeLLM([
407
+ {
408
+ input: [
409
+ 'Conversation to summarize:',
410
+ '',
411
+ '<user>',
412
+ 'my earbuds are broken',
413
+ '</user>',
414
+ '<assistant>',
415
+ 'Can you share your order number?',
416
+ '</assistant>',
417
+ ].join('\n'),
418
+ content: 'older summary',
419
+ },
420
+ ]);
421
+
422
+ await ctx._summarize(fake, { keepLastTurns: 2 });
423
+
424
+ const functionItems = ctx.items.filter(
425
+ (item) => item.type === 'function_call' || item.type === 'function_call_output',
426
+ );
427
+ expect(functionItems).toHaveLength(2);
428
+ expect(functionItems.map((item) => item.createdAt)).toEqual([3500, 3600]);
429
+
430
+ const rawTailMessages = ctx.items.filter(
431
+ (item) =>
432
+ item.type === 'message' &&
433
+ (item.role === 'user' || item.role === 'assistant') &&
434
+ item.extra?.is_summary !== true,
435
+ );
436
+ expect(rawTailMessages).toHaveLength(4);
437
+ expect(rawTailMessages.map((item) => item.textContent)).toEqual([
438
+ 'Order #123',
439
+ 'Found your order. Let me check the warranty.',
440
+ 'Thanks.',
441
+ 'You are under warranty.',
442
+ ]);
443
+
444
+ const createdAts = ctx.items.map((item) => item.createdAt);
445
+ const sorted = [...createdAts].sort((a, b) => a - b);
446
+ expect(createdAts).toEqual(sorted);
447
+ });
448
+
449
+ it('skips summarization when the recent-turn budget already covers the history', async () => {
450
+ const ctx = new ChatContext();
451
+ ctx.addMessage({ role: 'system', content: 'System prompt', createdAt: 0 });
452
+ ctx.addMessage({ role: 'user', content: 'hello', createdAt: 1000 });
453
+ ctx.addMessage({ role: 'assistant', content: 'hi there', createdAt: 2000 });
454
+
455
+ const llm = new TrackingFakeLLM();
456
+ const originalIds = ctx.items.map((item) => item.id);
457
+
458
+ const result = await ctx._summarize(llm, { keepLastTurns: 1 });
459
+
460
+ expect(result).toBe(ctx);
461
+ expect(llm.chatCalls).toBe(0);
462
+ expect(ctx.items.map((item) => item.id)).toEqual(originalIds);
463
+ });
332
464
  });
333
465
 
334
466
  describe('ReadonlyChatContext with immutable array', () => {
@@ -820,39 +820,65 @@ export class ChatContext {
820
820
  async _summarize(llm: LLM, options: { keepLastTurns?: number } = {}): Promise<ChatContext> {
821
821
  const { keepLastTurns = 2 } = options;
822
822
 
823
- const toSummarize: ChatMessage[] = [];
824
- for (const item of this._items) {
825
- if (item.type !== 'message') continue;
826
- if (item.role !== 'user' && item.role !== 'assistant') continue;
827
- if (item.extra?.is_summary === true) continue;
823
+ // Split the history into a head/tail over the full item stream so recent
824
+ // tool calls/outputs stay attached to the turns they belong to.
825
+ const msgBudget = keepLastTurns * 2;
826
+ let splitIdx = this._items.length;
827
+
828
+ if (msgBudget > 0) {
829
+ let msgCount = 0;
830
+ let foundSplit = false;
831
+ for (let i = this._items.length - 1; i >= 0; i -= 1) {
832
+ const item = this._items[i]!;
833
+ if (item.type === 'message' && (item.role === 'user' || item.role === 'assistant')) {
834
+ msgCount += 1;
835
+ if (msgCount >= msgBudget) {
836
+ splitIdx = i;
837
+ foundSplit = true;
838
+ break;
839
+ }
840
+ }
841
+ }
828
842
 
829
- const text = (item.textContent ?? '').trim();
830
- if (text) {
831
- toSummarize.push(item);
843
+ if (!foundSplit) {
844
+ return this;
832
845
  }
833
846
  }
834
847
 
835
- if (toSummarize.length === 0) {
848
+ if (splitIdx === 0) {
836
849
  return this;
837
850
  }
838
851
 
839
- const tailN = Math.max(0, Math.min(toSummarize.length, keepLastTurns * 2));
840
- let head: ChatMessage[];
841
- let tail: ChatMessage[];
842
- if (tailN === 0) {
843
- head = toSummarize;
844
- tail = [];
845
- } else {
846
- head = toSummarize.slice(0, -tailN);
847
- tail = toSummarize.slice(-tailN);
852
+ const headItems = this._items.slice(0, splitIdx);
853
+ const tailItems = this._items.slice(splitIdx);
854
+
855
+ const toSummarize: Array<ChatMessage | FunctionCall | FunctionCallOutput> = [];
856
+ for (const item of headItems) {
857
+ if (item.type === 'message') {
858
+ if (item.role !== 'user' && item.role !== 'assistant') continue;
859
+ if (item.extra?.is_summary === true) continue;
860
+
861
+ const text = (item.textContent ?? '').trim();
862
+ if (text) {
863
+ toSummarize.push(item);
864
+ }
865
+ } else if (item.type === 'function_call' || item.type === 'function_call_output') {
866
+ toSummarize.push(item);
867
+ }
848
868
  }
849
869
 
850
- if (head.length === 0) {
870
+ if (toSummarize.length === 0) {
851
871
  return this;
852
872
  }
853
873
 
854
- const sourceText = head
855
- .map((m) => `${m.role}: ${(m.textContent ?? '').trim()}`)
874
+ const sourceText = toSummarize
875
+ .map((item) => {
876
+ if (item.type === 'message') {
877
+ return toXml(item.role, (item.textContent ?? '').trim());
878
+ }
879
+
880
+ return functionCallItemToMessage(item).textContent ?? '';
881
+ })
856
882
  .join('\n')
857
883
  .trim();
858
884
 
@@ -864,10 +890,21 @@ export class ChatContext {
864
890
  const promptCtx = new ChatContext();
865
891
  promptCtx.addMessage({
866
892
  role: 'system',
867
- content:
868
- 'Compress older chat history into a short, faithful summary.\n' +
869
- 'Focus on user goals, constraints, decisions, key facts/preferences/entities, and pending tasks.\n' +
870
- 'Exclude chit-chat and greetings. Be concise.',
893
+ content: [
894
+ 'Compress older conversation history into a short, faithful summary.',
895
+ '',
896
+ 'The conversation is formatted as XML. Here is how to read it:',
897
+ '- <user>...</user> - something the user said.',
898
+ '- <assistant>...</assistant> - something the assistant said.',
899
+ '- <function_call name="..." call_id="...">...</function_call> - the assistant invoked an action.',
900
+ '- <function_call_output name="..." call_id="...">...</function_call_output> - the result of that action. May contain <error>...</error> if it failed.',
901
+ '',
902
+ 'Guidelines:',
903
+ '- Distill the information learned from function call outputs into the summary. Do not mention that a tool or function was called; just preserve the knowledge gained.',
904
+ '- Focus on user goals, constraints, decisions, key facts, preferences, entities, and any pending or unresolved tasks.',
905
+ '- Omit greetings, filler, and chit-chat.',
906
+ '- Be concise.',
907
+ ].join('\n'),
871
908
  });
872
909
  promptCtx.addMessage({
873
910
  role: 'user',
@@ -886,18 +923,13 @@ export class ChatContext {
886
923
  return this;
887
924
  }
888
925
 
889
- const tailStartTs = tail.length > 0 ? tail[0]!.createdAt : Infinity;
890
-
891
926
  const preserved: ChatItem[] = [];
892
- for (const it of this._items) {
893
- if (
894
- (it.type === 'function_call' || it.type === 'function_call_output') &&
895
- it.createdAt < tailStartTs
896
- ) {
927
+ for (const it of headItems) {
928
+ if (it.type === 'message' && (it.role === 'user' || it.role === 'assistant')) {
897
929
  continue;
898
930
  }
899
931
 
900
- if (it.type === 'message' && (it.role === 'user' || it.role === 'assistant')) {
932
+ if (it.type === 'function_call' || it.type === 'function_call_output') {
901
933
  continue;
902
934
  }
903
935
 
@@ -907,18 +939,18 @@ export class ChatContext {
907
939
  this._items = preserved;
908
940
 
909
941
  const createdAtHint =
910
- tail.length > 0 ? tail[0]!.createdAt - 1e-3 : head[head.length - 1]!.createdAt + 1e-3;
942
+ tailItems.length > 0
943
+ ? tailItems[0]!.createdAt - 1e-6
944
+ : headItems[headItems.length - 1]!.createdAt + 1e-6;
911
945
 
912
946
  this.addMessage({
913
947
  role: 'assistant',
914
- content: `[history summary]\n${summary}`,
948
+ content: toXml('chat_history_summary', summary),
915
949
  createdAt: createdAtHint,
916
950
  extra: { is_summary: true },
917
951
  });
918
952
 
919
- for (const msg of tail) {
920
- this.insert(msg);
921
- }
953
+ this._items.push(...tailItems);
922
954
 
923
955
  return this;
924
956
  }
@@ -931,6 +963,55 @@ export class ChatContext {
931
963
  }
932
964
  }
933
965
 
966
+ function toAttrsStr(attrs?: Record<string, unknown>): string | undefined {
967
+ if (!attrs) {
968
+ return undefined;
969
+ }
970
+
971
+ return Object.entries(attrs)
972
+ .map(([key, value]) => `${key}="${String(value)}"`)
973
+ .join(' ');
974
+ }
975
+
976
+ function toXml(tagName: string, content?: string, attrs?: Record<string, unknown>): string {
977
+ const attrsStr = toAttrsStr(attrs);
978
+ if (content) {
979
+ return [attrsStr ? `<${tagName} ${attrsStr}>` : `<${tagName}>`, content, `</${tagName}>`].join(
980
+ '\n',
981
+ );
982
+ }
983
+
984
+ return attrsStr ? `<${tagName} ${attrsStr} />` : `<${tagName} />`;
985
+ }
986
+
987
+ function functionCallItemToMessage(item: FunctionCall | FunctionCallOutput): ChatMessage {
988
+ if (item.type === 'function_call') {
989
+ return new ChatMessage({
990
+ role: 'user',
991
+ content: [
992
+ toXml('function_call', item.args, {
993
+ name: item.name,
994
+ call_id: item.callId,
995
+ }),
996
+ ],
997
+ createdAt: item.createdAt,
998
+ extra: { is_function_call: true },
999
+ });
1000
+ }
1001
+
1002
+ return new ChatMessage({
1003
+ role: 'assistant',
1004
+ content: [
1005
+ toXml('function_call_output', item.isError ? toXml('error', item.output) : item.output, {
1006
+ name: item.name,
1007
+ call_id: item.callId,
1008
+ }),
1009
+ ],
1010
+ createdAt: item.createdAt,
1011
+ extra: { is_function_call_output: true },
1012
+ });
1013
+ }
1014
+
934
1015
  export class ReadonlyChatContext extends ChatContext {
935
1016
  static readonly errorMsg =
936
1017
  'Please use .copy() and agent.update_chat_ctx() to modify the chat context.';
@@ -1,6 +1,7 @@
1
1
  // SPDX-FileCopyrightText: 2024 LiveKit, Inc.
2
2
  //
3
3
  // SPDX-License-Identifier: Apache-2.0
4
+ import type { Throws } from '@livekit/throws-transformer/throws';
4
5
  import { APIConnectionError, APIError } from '../_exceptions.js';
5
6
  import { log } from '../log.js';
6
7
  import { type APIConnectOptions, DEFAULT_API_CONNECT_OPTIONS } from '../types.js';
@@ -186,11 +187,12 @@ class FallbackLLMStream extends LLMStream {
186
187
  /**
187
188
  * Try to generate with a single LLM.
188
189
  * Returns an async generator that yields chunks.
190
+ * @throws {APIError} When the LLM returns an API error (retryable or not)
189
191
  */
190
192
  private async *tryGenerate(
191
193
  llm: LLM,
192
194
  checkRecovery: boolean = false,
193
- ): AsyncGenerator<ChatChunk, void, unknown> {
195
+ ): AsyncGenerator<Throws<ChatChunk, APIError>, void, unknown> {
194
196
  const connOptions: APIConnectOptions = {
195
197
  ...this.connOptions,
196
198
  maxRetry: this.adapter.maxRetryPerLLM,
@@ -296,8 +298,9 @@ class FallbackLLMStream extends LLMStream {
296
298
 
297
299
  /**
298
300
  * Main run method - iterates through LLMs with fallback logic.
301
+ * @throws {APIConnectionError} When all LLM providers have been exhausted
299
302
  */
300
- protected async run(): Promise<void> {
303
+ protected async run(): Promise<Throws<void, APIConnectionError>> {
301
304
  const startTime = Date.now();
302
305
 
303
306
  // Check if all LLMs are unavailable
package/src/llm/index.ts CHANGED
@@ -63,10 +63,12 @@ export {
63
63
  computeChatCtxDiff,
64
64
  createToolOptions,
65
65
  executeToolCall,
66
+ formatChatHistory,
66
67
  oaiBuildFunctionInfo,
67
68
  oaiParams,
68
69
  serializeImage,
69
70
  toJsonSchema,
71
+ type FormatChatHistoryOptions,
70
72
  type OpenAIFunctionParameters,
71
73
  type SerializedImage,
72
74
  } from './utils.js';
@@ -4,8 +4,15 @@
4
4
  import { VideoBufferType, VideoFrame } from '@livekit/rtc-node';
5
5
  import sharp from 'sharp';
6
6
  import { beforeEach, describe, expect, it, vi } from 'vitest';
7
- import { ChatContext, ChatMessage, type ImageContent } from './chat_context.js';
8
- import { computeChatCtxDiff, serializeImage } from './utils.js';
7
+ import {
8
+ AgentHandoffItem,
9
+ ChatContext,
10
+ ChatMessage,
11
+ FunctionCall,
12
+ FunctionCallOutput,
13
+ type ImageContent,
14
+ } from './chat_context.js';
15
+ import { computeChatCtxDiff, formatChatHistory, serializeImage } from './utils.js';
9
16
 
10
17
  function createChatMessage(
11
18
  id: string,
@@ -356,6 +363,100 @@ describe('computeChatCtxDiff', () => {
356
363
  });
357
364
  });
358
365
 
366
+ describe('formatChatHistory', () => {
367
+ it('should format mixed chat history items for logging', () => {
368
+ const ctx = new ChatContext([
369
+ ChatMessage.create({
370
+ id: 'msg_system',
371
+ role: 'system',
372
+ content: 'You are helpful.',
373
+ createdAt: 1,
374
+ }),
375
+ ChatMessage.create({
376
+ id: 'msg_user',
377
+ role: 'user',
378
+ content: [
379
+ 'Show me my order',
380
+ createImageContent('https://example.com/receipt.png'),
381
+ { type: 'audio_content', frame: [], transcript: 'order 123' },
382
+ ],
383
+ createdAt: 2,
384
+ }),
385
+ FunctionCall.create({
386
+ id: 'fn_call',
387
+ callId: 'call_1',
388
+ name: 'lookup_order',
389
+ args: '{"orderId":"123"}',
390
+ createdAt: 3,
391
+ }),
392
+ FunctionCallOutput.create({
393
+ id: 'fn_output',
394
+ callId: 'call_1',
395
+ name: 'lookup_order',
396
+ output: '{"status":"shipped","ok":true}',
397
+ isError: false,
398
+ createdAt: 4,
399
+ }),
400
+ AgentHandoffItem.create({
401
+ id: 'handoff',
402
+ oldAgentId: 'support',
403
+ newAgentId: 'returns',
404
+ createdAt: 5,
405
+ }),
406
+ ]);
407
+
408
+ expect(formatChatHistory(ctx)).toBe(
409
+ [
410
+ 'Chat history (5 items)',
411
+ '',
412
+ '[0] message system',
413
+ ' You are helpful.',
414
+ '',
415
+ '[1] message user',
416
+ ' Show me my order',
417
+ ' [image url=https://example.com/receipt.png]',
418
+ ' [audio transcript="order 123"]',
419
+ '',
420
+ '[2] function_call lookup_order call_id=call_1',
421
+ ' {',
422
+ ' "orderId": "123"',
423
+ ' }',
424
+ '',
425
+ '[3] function_call_output lookup_order call_id=call_1',
426
+ ' {',
427
+ ' "status": "shipped",',
428
+ ' "ok": true',
429
+ ' }',
430
+ '',
431
+ '[4] agent_handoff',
432
+ ' support -> returns',
433
+ ].join('\n'),
434
+ );
435
+ });
436
+
437
+ it('should optionally include ids and timestamps', () => {
438
+ const ctx = new ChatContext([
439
+ FunctionCallOutput.create({
440
+ id: 'fn_output',
441
+ callId: 'call_1',
442
+ name: 'lookup_order',
443
+ output: 'plain text error',
444
+ isError: true,
445
+ createdAt: 123.456,
446
+ }),
447
+ ]);
448
+
449
+ expect(formatChatHistory(ctx, { includeIds: true, includeTimestamps: true })).toBe(
450
+ [
451
+ 'Chat history (1 items)',
452
+ '',
453
+ '[0] function_call_output lookup_order call_id=call_1 error=true id=fn_output created_at=123.456',
454
+ ' plain text error',
455
+ ].join('\n'),
456
+ );
457
+ });
458
+ });
459
+
359
460
  describe('serializeImage', () => {
360
461
  let consoleWarnSpy: ReturnType<typeof vi.spyOn>;
361
462