@copilotkit/runtime 1.4.8-no-pino-redact.1 → 1.5.0-coagents-v0-3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/CHANGELOG.md +20 -5
  2. package/__snapshots__/schema/schema.graphql +7 -9
  3. package/dist/{chunk-OKQVDDJ2.mjs → chunk-4AYRDPWK.mjs} +285 -63
  4. package/dist/chunk-4AYRDPWK.mjs.map +1 -0
  5. package/dist/{chunk-APVJZO3R.mjs → chunk-7BOO3OFP.mjs} +2 -2
  6. package/dist/{chunk-OSWF5QDO.mjs → chunk-GSTWFOZF.mjs} +2 -2
  7. package/dist/{chunk-M74ZTUCC.mjs → chunk-OMAAKIJQ.mjs} +2 -2
  8. package/dist/{chunk-36AYTJIE.mjs → chunk-PXS4SBRF.mjs} +447 -321
  9. package/dist/chunk-PXS4SBRF.mjs.map +1 -0
  10. package/dist/{chunk-B74M7FXG.mjs → chunk-RFF5IIZJ.mjs} +3 -2
  11. package/dist/chunk-RFF5IIZJ.mjs.map +1 -0
  12. package/dist/{copilot-runtime-12e7ac40.d.ts → copilot-runtime-2e46a7b6.d.ts} +2 -2
  13. package/dist/graphql/types/converted/index.d.ts +1 -1
  14. package/dist/graphql/types/converted/index.js +2 -1
  15. package/dist/graphql/types/converted/index.js.map +1 -1
  16. package/dist/graphql/types/converted/index.mjs +1 -1
  17. package/dist/{groq-adapter-24abe931.d.ts → groq-adapter-7bf6824b.d.ts} +1 -1
  18. package/dist/{index-10b1c870.d.ts → index-ff3fbc33.d.ts} +7 -8
  19. package/dist/index.d.ts +5 -5
  20. package/dist/index.js +824 -473
  21. package/dist/index.js.map +1 -1
  22. package/dist/index.mjs +10 -6
  23. package/dist/index.mjs.map +1 -1
  24. package/dist/{langserve-f021ab9c.d.ts → langserve-f318db89.d.ts} +53 -14
  25. package/dist/lib/index.d.ts +4 -4
  26. package/dist/lib/index.js +729 -452
  27. package/dist/lib/index.js.map +1 -1
  28. package/dist/lib/index.mjs +6 -6
  29. package/dist/lib/integrations/index.d.ts +4 -4
  30. package/dist/lib/integrations/index.js +79 -31
  31. package/dist/lib/integrations/index.js.map +1 -1
  32. package/dist/lib/integrations/index.mjs +6 -6
  33. package/dist/lib/integrations/nest/index.d.ts +3 -3
  34. package/dist/lib/integrations/nest/index.js +79 -31
  35. package/dist/lib/integrations/nest/index.js.map +1 -1
  36. package/dist/lib/integrations/nest/index.mjs +4 -4
  37. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  38. package/dist/lib/integrations/node-express/index.js +79 -31
  39. package/dist/lib/integrations/node-express/index.js.map +1 -1
  40. package/dist/lib/integrations/node-express/index.mjs +4 -4
  41. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  42. package/dist/lib/integrations/node-http/index.js +79 -31
  43. package/dist/lib/integrations/node-http/index.js.map +1 -1
  44. package/dist/lib/integrations/node-http/index.mjs +3 -3
  45. package/dist/service-adapters/index.d.ts +36 -5
  46. package/dist/service-adapters/index.js +285 -61
  47. package/dist/service-adapters/index.js.map +1 -1
  48. package/dist/service-adapters/index.mjs +5 -1
  49. package/package.json +2 -2
  50. package/src/agents/langgraph/event-source.ts +140 -148
  51. package/src/agents/langgraph/events.ts +1 -1
  52. package/src/graphql/inputs/message.input.ts +15 -3
  53. package/src/graphql/resolvers/copilot.resolver.ts +32 -6
  54. package/src/graphql/types/converted/index.ts +4 -3
  55. package/src/graphql/types/copilot-response.type.ts +12 -3
  56. package/src/graphql/types/enums.ts +0 -11
  57. package/src/lib/logger.ts +4 -0
  58. package/src/lib/runtime/copilot-runtime.ts +1 -7
  59. package/src/lib/runtime/remote-action-constructors.ts +64 -58
  60. package/src/lib/runtime/remote-actions.ts +1 -0
  61. package/src/lib/runtime/remote-lg-action.ts +159 -140
  62. package/src/service-adapters/anthropic/anthropic-adapter.ts +16 -6
  63. package/src/service-adapters/conversion.ts +2 -1
  64. package/src/service-adapters/events.ts +118 -54
  65. package/src/service-adapters/experimental/empty/empty-adapter.ts +33 -0
  66. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +7 -3
  67. package/src/service-adapters/groq/groq-adapter.ts +23 -8
  68. package/src/service-adapters/index.ts +7 -1
  69. package/src/service-adapters/langchain/utils.ts +55 -32
  70. package/src/service-adapters/openai/openai-adapter.ts +22 -9
  71. package/src/service-adapters/openai/openai-assistant-adapter.ts +22 -8
  72. package/src/service-adapters/unify/unify-adapter.ts +28 -11
  73. package/dist/chunk-36AYTJIE.mjs.map +0 -1
  74. package/dist/chunk-B74M7FXG.mjs.map +0 -1
  75. package/dist/chunk-OKQVDDJ2.mjs.map +0 -1
  76. /package/dist/{chunk-APVJZO3R.mjs.map → chunk-7BOO3OFP.mjs.map} +0 -0
  77. /package/dist/{chunk-OSWF5QDO.mjs.map → chunk-GSTWFOZF.mjs.map} +0 -0
  78. /package/dist/{chunk-M74ZTUCC.mjs.map → chunk-OMAAKIJQ.mjs.map} +0 -0
@@ -1,8 +1,8 @@
1
- import { C as CopilotServiceAdapter, c as CopilotRuntimeChatCompletionRequest, d as CopilotRuntimeChatCompletionResponse } from '../langserve-f021ab9c.js';
2
- export { a as RemoteChain, R as RemoteChainParameters } from '../langserve-f021ab9c.js';
3
- export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-24abe931.js';
1
+ import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-f318db89.js';
2
+ export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-f318db89.js';
3
+ export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-7bf6824b.js';
4
4
  import Anthropic from '@anthropic-ai/sdk';
5
- import '../index-10b1c870.js';
5
+ import '../index-ff3fbc33.js';
6
6
  import '../graphql/types/base/index.js';
7
7
  import 'rxjs';
8
8
  import '@copilotkit/shared';
@@ -50,4 +50,35 @@ declare class AnthropicAdapter implements CopilotServiceAdapter {
50
50
  process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
51
51
  }
52
52
 
53
- export { AnthropicAdapter, AnthropicAdapterParams, CopilotServiceAdapter };
53
+ interface OllamaAdapterOptions {
54
+ model?: string;
55
+ }
56
+ declare class ExperimentalOllamaAdapter implements CopilotServiceAdapter {
57
+ private model;
58
+ constructor(options?: OllamaAdapterOptions);
59
+ process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
60
+ }
61
+
62
+ /**
63
+ * CopilotKit Empty Adapter
64
+ *
65
+ * This adapter is meant to preserve adherence to runtime requirements, while doing nothing
66
+ * Ideal if you don't want to connect an LLM the to the runtime, and only use your LangGraph agent.
67
+ * Be aware that Copilot Suggestions will not work if you use this adapter
68
+ *
69
+ * ## Example
70
+ *
71
+ * ```ts
72
+ * import { CopilotRuntime, ExperimentalEmptyAdapter } from "@copilotkit/runtime";
73
+ *
74
+ * const copilotKit = new CopilotRuntime();
75
+ *
76
+ * return new ExperimentalEmptyAdapter();
77
+ * ```
78
+ */
79
+
80
+ declare class ExperimentalEmptyAdapter implements CopilotServiceAdapter {
81
+ process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
82
+ }
83
+
84
+ export { AnthropicAdapter, AnthropicAdapterParams, CopilotRuntimeChatCompletionRequest, CopilotRuntimeChatCompletionResponse, CopilotServiceAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter };
@@ -31,6 +31,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
31
31
  var service_adapters_exports = {};
32
32
  __export(service_adapters_exports, {
33
33
  AnthropicAdapter: () => AnthropicAdapter,
34
+ ExperimentalEmptyAdapter: () => ExperimentalEmptyAdapter,
35
+ ExperimentalOllamaAdapter: () => ExperimentalOllamaAdapter,
34
36
  GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
35
37
  GroqAdapter: () => GroqAdapter,
36
38
  LangChainAdapter: () => LangChainAdapter,
@@ -315,6 +317,8 @@ var OpenAIAdapter = class {
315
317
  eventSource.stream(async (eventStream$) => {
316
318
  var _a, _b;
317
319
  let mode = null;
320
+ let currentMessageId;
321
+ let currentToolCallId;
318
322
  for await (const chunk of stream) {
319
323
  if (chunk.choices.length === 0) {
320
324
  continue;
@@ -323,30 +327,52 @@ var OpenAIAdapter = class {
323
327
  const content = chunk.choices[0].delta.content;
324
328
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
325
329
  mode = null;
326
- eventStream$.sendTextMessageEnd();
330
+ eventStream$.sendTextMessageEnd({
331
+ messageId: currentMessageId
332
+ });
327
333
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
328
334
  mode = null;
329
- eventStream$.sendActionExecutionEnd();
335
+ eventStream$.sendActionExecutionEnd({
336
+ actionExecutionId: currentToolCallId
337
+ });
330
338
  }
331
339
  if (mode === null) {
332
340
  if (toolCall == null ? void 0 : toolCall.id) {
333
341
  mode = "function";
334
- eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
342
+ currentToolCallId = toolCall.id;
343
+ eventStream$.sendActionExecutionStart({
344
+ actionExecutionId: currentToolCallId,
345
+ parentMessageId: chunk.id,
346
+ actionName: toolCall.function.name
347
+ });
335
348
  } else if (content) {
336
349
  mode = "message";
337
- eventStream$.sendTextMessageStart(chunk.id);
350
+ currentMessageId = chunk.id;
351
+ eventStream$.sendTextMessageStart({
352
+ messageId: currentMessageId
353
+ });
338
354
  }
339
355
  }
340
356
  if (mode === "message" && content) {
341
- eventStream$.sendTextMessageContent(content);
357
+ eventStream$.sendTextMessageContent({
358
+ messageId: currentMessageId,
359
+ content
360
+ });
342
361
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
343
- eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
362
+ eventStream$.sendActionExecutionArgs({
363
+ actionExecutionId: currentToolCallId,
364
+ args: toolCall.function.arguments
365
+ });
344
366
  }
345
367
  }
346
368
  if (mode === "message") {
347
- eventStream$.sendTextMessageEnd();
369
+ eventStream$.sendTextMessageEnd({
370
+ messageId: currentMessageId
371
+ });
348
372
  } else if (mode === "function") {
349
- eventStream$.sendActionExecutionEnd();
373
+ eventStream$.sendActionExecutionEnd({
374
+ actionExecutionId: currentToolCallId
375
+ });
350
376
  }
351
377
  eventStream$.complete();
352
378
  });
@@ -414,17 +440,25 @@ function isBaseMessageChunk(message) {
414
440
  __name(isBaseMessageChunk, "isBaseMessageChunk");
415
441
  function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
416
442
  if (actionExecution) {
417
- eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, "Sending a message");
443
+ eventStream$.sendActionExecutionResult({
444
+ actionExecutionId: actionExecution.id,
445
+ actionName: actionExecution.name,
446
+ result: "Sending a message"
447
+ });
418
448
  }
419
449
  }
420
450
  __name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
421
451
  async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
422
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
452
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
423
453
  if (typeof result === "string") {
424
454
  if (!actionExecution) {
425
455
  eventStream$.sendTextMessage((0, import_shared2.randomId)(), result);
426
456
  } else {
427
- eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
457
+ eventStream$.sendActionExecutionResult({
458
+ actionExecutionId: actionExecution.id,
459
+ actionName: actionExecution.name,
460
+ result
461
+ });
428
462
  }
429
463
  } else if (isAIMessage(result)) {
430
464
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -432,7 +466,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
432
466
  eventStream$.sendTextMessage((0, import_shared2.randomId)(), result.content);
433
467
  }
434
468
  for (const toolCall of result.tool_calls) {
435
- eventStream$.sendActionExecution(toolCall.id || (0, import_shared2.randomId)(), toolCall.name, JSON.stringify(toolCall.args));
469
+ eventStream$.sendActionExecution({
470
+ actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
471
+ actionName: toolCall.name,
472
+ args: JSON.stringify(toolCall.args)
473
+ });
436
474
  }
437
475
  } else if (isBaseMessageChunk(result)) {
438
476
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -441,13 +479,18 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
441
479
  }
442
480
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
443
481
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
444
- eventStream$.sendActionExecution(toolCall.id || (0, import_shared2.randomId)(), toolCall.name, JSON.stringify(toolCall.args));
482
+ eventStream$.sendActionExecution({
483
+ actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
484
+ actionName: toolCall.name,
485
+ args: JSON.stringify(toolCall.args)
486
+ });
445
487
  }
446
488
  }
447
489
  } else if (result && "getReader" in result) {
448
490
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
449
491
  let reader = result.getReader();
450
492
  let mode = null;
493
+ let currentMessageId;
451
494
  const toolCallDetails = {
452
495
  name: null,
453
496
  id: null,
@@ -461,9 +504,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
461
504
  let toolCallId = void 0;
462
505
  let toolCallArgs = void 0;
463
506
  let hasToolCall = false;
464
- let content = value == null ? void 0 : value.content;
507
+ let content = "";
508
+ if (value && value.content) {
509
+ content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
510
+ }
465
511
  if (isAIMessageChunk(value)) {
466
- let chunk = (_d = value.tool_call_chunks) == null ? void 0 : _d[0];
512
+ let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
467
513
  toolCallArgs = chunk == null ? void 0 : chunk.args;
468
514
  hasToolCall = chunk != void 0;
469
515
  if (chunk == null ? void 0 : chunk.name)
@@ -478,18 +524,22 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
478
524
  toolCallName = toolCallDetails.name;
479
525
  toolCallId = toolCallDetails.id;
480
526
  } else if (isBaseMessageChunk(value)) {
481
- let chunk = (_f = (_e = value.additional_kwargs) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f[0];
482
- toolCallName = (_g = chunk == null ? void 0 : chunk.function) == null ? void 0 : _g.name;
527
+ let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
528
+ toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
483
529
  toolCallId = chunk == null ? void 0 : chunk.id;
484
- toolCallArgs = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.arguments;
530
+ toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
485
531
  hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
486
532
  }
487
533
  if (mode === "message" && (toolCallId || done)) {
488
534
  mode = null;
489
- eventStream$.sendTextMessageEnd();
535
+ eventStream$.sendTextMessageEnd({
536
+ messageId: currentMessageId
537
+ });
490
538
  } else if (mode === "function" && (!hasToolCall || done)) {
491
539
  mode = null;
492
- eventStream$.sendActionExecutionEnd();
540
+ eventStream$.sendActionExecutionEnd({
541
+ actionExecutionId: toolCallId
542
+ });
493
543
  }
494
544
  if (done) {
495
545
  break;
@@ -497,21 +547,40 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
497
547
  if (mode === null) {
498
548
  if (hasToolCall && toolCallId && toolCallName) {
499
549
  mode = "function";
500
- eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
550
+ eventStream$.sendActionExecutionStart({
551
+ actionExecutionId: toolCallId,
552
+ actionName: toolCallName,
553
+ parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
554
+ });
501
555
  } else if (content) {
502
556
  mode = "message";
503
- eventStream$.sendTextMessageStart((0, import_shared2.randomId)());
557
+ currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared2.randomId)();
558
+ eventStream$.sendTextMessageStart({
559
+ messageId: currentMessageId
560
+ });
504
561
  }
505
562
  }
506
563
  if (mode === "message" && content) {
507
- eventStream$.sendTextMessageContent(Array.isArray(content) ? ((_i = content[0]) == null ? void 0 : _i.text) ?? "" : content);
564
+ eventStream$.sendTextMessageContent({
565
+ messageId: currentMessageId,
566
+ content
567
+ });
508
568
  } else if (mode === "function" && toolCallArgs) {
509
569
  if (toolCallDetails.index !== toolCallDetails.prevIndex) {
510
- eventStream$.sendActionExecutionEnd();
511
- eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
570
+ eventStream$.sendActionExecutionEnd({
571
+ actionExecutionId: toolCallId
572
+ });
573
+ eventStream$.sendActionExecutionStart({
574
+ actionExecutionId: toolCallId,
575
+ actionName: toolCallName,
576
+ parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
577
+ });
512
578
  toolCallDetails.prevIndex = toolCallDetails.index;
513
579
  }
514
- eventStream$.sendActionExecutionArgs(toolCallArgs);
580
+ eventStream$.sendActionExecutionArgs({
581
+ actionExecutionId: toolCallId,
582
+ args: toolCallArgs
583
+ });
515
584
  }
516
585
  } catch (error) {
517
586
  console.error("Error reading from stream", error);
@@ -519,7 +588,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
519
588
  }
520
589
  }
521
590
  } else if (actionExecution) {
522
- eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, encodeResult(result));
591
+ eventStream$.sendActionExecutionResult({
592
+ actionExecutionId: actionExecution.id,
593
+ actionName: actionExecution.name,
594
+ result: encodeResult(result)
595
+ });
523
596
  } else {
524
597
  throw new Error("Invalid return type from LangChain function.");
525
598
  }
@@ -699,21 +772,33 @@ var OpenAIAssistantAdapter = class {
699
772
  eventSource.stream(async (eventStream$) => {
700
773
  var _a, _b, _c, _d, _e, _f;
701
774
  let inFunctionCall = false;
775
+ let currentMessageId;
776
+ let currentToolCallId;
702
777
  for await (const chunk of stream) {
703
778
  switch (chunk.event) {
704
779
  case "thread.message.created":
705
780
  if (inFunctionCall) {
706
- eventStream$.sendActionExecutionEnd();
781
+ eventStream$.sendActionExecutionEnd({
782
+ actionExecutionId: currentToolCallId
783
+ });
707
784
  }
708
- eventStream$.sendTextMessageStart(chunk.data.id);
785
+ currentMessageId = chunk.data.id;
786
+ eventStream$.sendTextMessageStart({
787
+ messageId: currentMessageId
788
+ });
709
789
  break;
710
790
  case "thread.message.delta":
711
791
  if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
712
- eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
792
+ eventStream$.sendTextMessageContent({
793
+ messageId: currentMessageId,
794
+ content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
795
+ });
713
796
  }
714
797
  break;
715
798
  case "thread.message.completed":
716
- eventStream$.sendTextMessageEnd();
799
+ eventStream$.sendTextMessageEnd({
800
+ messageId: currentMessageId
801
+ });
717
802
  break;
718
803
  case "thread.run.step.delta":
719
804
  let toolCallId;
@@ -726,18 +811,30 @@ var OpenAIAssistantAdapter = class {
726
811
  }
727
812
  if (toolCallName && toolCallId) {
728
813
  if (inFunctionCall) {
729
- eventStream$.sendActionExecutionEnd();
814
+ eventStream$.sendActionExecutionEnd({
815
+ actionExecutionId: currentToolCallId
816
+ });
730
817
  }
731
818
  inFunctionCall = true;
732
- eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
819
+ currentToolCallId = toolCallId;
820
+ eventStream$.sendActionExecutionStart({
821
+ actionExecutionId: currentToolCallId,
822
+ parentMessageId: chunk.data.id,
823
+ actionName: toolCallName
824
+ });
733
825
  } else if (toolCallArgs) {
734
- eventStream$.sendActionExecutionArgs(toolCallArgs);
826
+ eventStream$.sendActionExecutionArgs({
827
+ actionExecutionId: currentToolCallId,
828
+ args: toolCallArgs
829
+ });
735
830
  }
736
831
  break;
737
832
  }
738
833
  }
739
834
  if (inFunctionCall) {
740
- eventStream$.sendActionExecutionEnd();
835
+ eventStream$.sendActionExecutionEnd({
836
+ actionExecutionId: currentToolCallId
837
+ });
741
838
  }
742
839
  eventStream$.complete();
743
840
  });
@@ -790,46 +887,77 @@ var UnifyAdapter = class {
790
887
  }
791
888
  });
792
889
  let model = null;
890
+ let currentMessageId;
891
+ let currentToolCallId;
793
892
  request.eventSource.stream(async (eventStream$) => {
794
893
  var _a, _b;
795
894
  let mode = null;
796
895
  for await (const chunk of stream) {
797
896
  if (this.start) {
798
897
  model = chunk.model;
799
- eventStream$.sendTextMessageStart((0, import_shared4.randomId)());
800
- eventStream$.sendTextMessageContent(`Model used: ${model}
801
- `);
802
- eventStream$.sendTextMessageEnd();
898
+ currentMessageId = (0, import_shared4.randomId)();
899
+ eventStream$.sendTextMessageStart({
900
+ messageId: currentMessageId
901
+ });
902
+ eventStream$.sendTextMessageContent({
903
+ messageId: currentMessageId,
904
+ content: `Model used: ${model}
905
+ `
906
+ });
907
+ eventStream$.sendTextMessageEnd({
908
+ messageId: currentMessageId
909
+ });
803
910
  this.start = false;
804
911
  }
805
912
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
806
913
  const content = chunk.choices[0].delta.content;
807
914
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
808
915
  mode = null;
809
- eventStream$.sendTextMessageEnd();
916
+ eventStream$.sendTextMessageEnd({
917
+ messageId: currentMessageId
918
+ });
810
919
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
811
920
  mode = null;
812
- eventStream$.sendActionExecutionEnd();
921
+ eventStream$.sendActionExecutionEnd({
922
+ actionExecutionId: currentToolCallId
923
+ });
813
924
  }
814
925
  if (mode === null) {
815
926
  if (toolCall == null ? void 0 : toolCall.id) {
816
927
  mode = "function";
817
- eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
928
+ currentToolCallId = toolCall.id;
929
+ eventStream$.sendActionExecutionStart({
930
+ actionExecutionId: currentToolCallId,
931
+ actionName: toolCall.function.name
932
+ });
818
933
  } else if (content) {
819
934
  mode = "message";
820
- eventStream$.sendTextMessageStart(chunk.id);
935
+ currentMessageId = chunk.id;
936
+ eventStream$.sendTextMessageStart({
937
+ messageId: currentMessageId
938
+ });
821
939
  }
822
940
  }
823
941
  if (mode === "message" && content) {
824
- eventStream$.sendTextMessageContent(content);
942
+ eventStream$.sendTextMessageContent({
943
+ messageId: currentMessageId,
944
+ content
945
+ });
825
946
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
826
- eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
947
+ eventStream$.sendActionExecutionArgs({
948
+ actionExecutionId: currentToolCallId,
949
+ args: toolCall.function.arguments
950
+ });
827
951
  }
828
952
  }
829
953
  if (mode === "message") {
830
- eventStream$.sendTextMessageEnd();
954
+ eventStream$.sendTextMessageEnd({
955
+ messageId: currentMessageId
956
+ });
831
957
  } else if (mode === "function") {
832
- eventStream$.sendActionExecutionEnd();
958
+ eventStream$.sendActionExecutionEnd({
959
+ actionExecutionId: currentToolCallId
960
+ });
833
961
  }
834
962
  eventStream$.complete();
835
963
  });
@@ -895,35 +1023,59 @@ var GroqAdapter = class {
895
1023
  eventSource.stream(async (eventStream$) => {
896
1024
  var _a, _b;
897
1025
  let mode = null;
1026
+ let currentMessageId;
1027
+ let currentToolCallId;
898
1028
  for await (const chunk of stream) {
899
1029
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
900
1030
  const content = chunk.choices[0].delta.content;
901
1031
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
902
1032
  mode = null;
903
- eventStream$.sendTextMessageEnd();
1033
+ eventStream$.sendTextMessageEnd({
1034
+ messageId: currentMessageId
1035
+ });
904
1036
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
905
1037
  mode = null;
906
- eventStream$.sendActionExecutionEnd();
1038
+ eventStream$.sendActionExecutionEnd({
1039
+ actionExecutionId: currentToolCallId
1040
+ });
907
1041
  }
908
1042
  if (mode === null) {
909
1043
  if (toolCall == null ? void 0 : toolCall.id) {
910
1044
  mode = "function";
911
- eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
1045
+ currentToolCallId = toolCall.id;
1046
+ eventStream$.sendActionExecutionStart({
1047
+ actionExecutionId: currentToolCallId,
1048
+ actionName: toolCall.function.name,
1049
+ parentMessageId: chunk.id
1050
+ });
912
1051
  } else if (content) {
913
1052
  mode = "message";
914
- eventStream$.sendTextMessageStart(chunk.id);
1053
+ currentMessageId = chunk.id;
1054
+ eventStream$.sendTextMessageStart({
1055
+ messageId: currentMessageId
1056
+ });
915
1057
  }
916
1058
  }
917
1059
  if (mode === "message" && content) {
918
- eventStream$.sendTextMessageContent(content);
1060
+ eventStream$.sendTextMessageContent({
1061
+ messageId: currentMessageId,
1062
+ content
1063
+ });
919
1064
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
920
- eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
1065
+ eventStream$.sendActionExecutionArgs({
1066
+ actionExecutionId: currentToolCallId,
1067
+ args: toolCall.function.arguments
1068
+ });
921
1069
  }
922
1070
  }
923
1071
  if (mode === "message") {
924
- eventStream$.sendTextMessageEnd();
1072
+ eventStream$.sendTextMessageEnd({
1073
+ messageId: currentMessageId
1074
+ });
925
1075
  } else if (mode === "function") {
926
- eventStream$.sendActionExecutionEnd();
1076
+ eventStream$.sendActionExecutionEnd({
1077
+ actionExecutionId: currentToolCallId
1078
+ });
927
1079
  }
928
1080
  eventStream$.complete();
929
1081
  });
@@ -1131,7 +1283,11 @@ var AnthropicAdapter = class {
1131
1283
  mode = "message";
1132
1284
  } else if (chunk.content_block.type === "tool_use") {
1133
1285
  currentToolCallId = chunk.content_block.id;
1134
- eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);
1286
+ eventStream$.sendActionExecutionStart({
1287
+ actionExecutionId: currentToolCallId,
1288
+ actionName: chunk.content_block.name,
1289
+ parentMessageId: currentMessageId
1290
+ });
1135
1291
  mode = "function";
1136
1292
  }
1137
1293
  } else if (chunk.type === "content_block_delta") {
@@ -1139,21 +1295,33 @@ var AnthropicAdapter = class {
1139
1295
  const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
1140
1296
  if (text.length > 0) {
1141
1297
  if (!didOutputText) {
1142
- eventStream$.sendTextMessageStart(currentMessageId);
1298
+ eventStream$.sendTextMessageStart({
1299
+ messageId: currentMessageId
1300
+ });
1143
1301
  didOutputText = true;
1144
1302
  }
1145
- eventStream$.sendTextMessageContent(text);
1303
+ eventStream$.sendTextMessageContent({
1304
+ messageId: currentMessageId,
1305
+ content: text
1306
+ });
1146
1307
  }
1147
1308
  } else if (chunk.delta.type === "input_json_delta") {
1148
- eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);
1309
+ eventStream$.sendActionExecutionArgs({
1310
+ actionExecutionId: currentToolCallId,
1311
+ args: chunk.delta.partial_json
1312
+ });
1149
1313
  }
1150
1314
  } else if (chunk.type === "content_block_stop") {
1151
1315
  if (mode === "message") {
1152
1316
  if (didOutputText) {
1153
- eventStream$.sendTextMessageEnd();
1317
+ eventStream$.sendTextMessageEnd({
1318
+ messageId: currentMessageId
1319
+ });
1154
1320
  }
1155
1321
  } else if (mode === "function") {
1156
- eventStream$.sendActionExecutionEnd();
1322
+ eventStream$.sendActionExecutionEnd({
1323
+ actionExecutionId: currentToolCallId
1324
+ });
1157
1325
  }
1158
1326
  }
1159
1327
  }
@@ -1197,9 +1365,65 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
1197
1365
  this.didFilterThinkingTag = false;
1198
1366
  }
1199
1367
  }, "FilterThinkingTextBuffer");
1368
+
1369
+ // src/service-adapters/experimental/ollama/ollama-adapter.ts
1370
+ var import_ollama = require("@langchain/community/llms/ollama");
1371
+ var import_shared7 = require("@copilotkit/shared");
1372
+ var DEFAULT_MODEL4 = "llama3:latest";
1373
+ var ExperimentalOllamaAdapter = class {
1374
+ model;
1375
+ constructor(options) {
1376
+ if (options == null ? void 0 : options.model) {
1377
+ this.model = options.model;
1378
+ } else {
1379
+ this.model = DEFAULT_MODEL4;
1380
+ }
1381
+ }
1382
+ async process(request) {
1383
+ const { messages, actions, eventSource } = request;
1384
+ const ollama = new import_ollama.Ollama({
1385
+ model: this.model
1386
+ });
1387
+ const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
1388
+ const _stream = await ollama.stream(contents);
1389
+ eventSource.stream(async (eventStream$) => {
1390
+ const currentMessageId = (0, import_shared7.randomId)();
1391
+ eventStream$.sendTextMessageStart({
1392
+ messageId: currentMessageId
1393
+ });
1394
+ for await (const chunkText of _stream) {
1395
+ eventStream$.sendTextMessageContent({
1396
+ messageId: currentMessageId,
1397
+ content: chunkText
1398
+ });
1399
+ }
1400
+ eventStream$.sendTextMessageEnd({
1401
+ messageId: currentMessageId
1402
+ });
1403
+ eventStream$.complete();
1404
+ });
1405
+ return {
1406
+ threadId: request.threadId || (0, import_shared7.randomId)()
1407
+ };
1408
+ }
1409
+ };
1410
+ __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
1411
+
1412
+ // src/service-adapters/experimental/empty/empty-adapter.ts
1413
+ var import_shared8 = require("@copilotkit/shared");
1414
+ var ExperimentalEmptyAdapter = class {
1415
+ async process(request) {
1416
+ return {
1417
+ threadId: request.threadId || (0, import_shared8.randomId)()
1418
+ };
1419
+ }
1420
+ };
1421
+ __name(ExperimentalEmptyAdapter, "ExperimentalEmptyAdapter");
1200
1422
  // Annotate the CommonJS export names for ESM import in node:
1201
1423
  0 && (module.exports = {
1202
1424
  AnthropicAdapter,
1425
+ ExperimentalEmptyAdapter,
1426
+ ExperimentalOllamaAdapter,
1203
1427
  GoogleGenerativeAIAdapter,
1204
1428
  GroqAdapter,
1205
1429
  LangChainAdapter,