@copilotkit/runtime 1.4.8-coagents-v0-3.1 → 1.4.8-next.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/CHANGELOG.md +5 -4
  2. package/__snapshots__/schema/schema.graphql +10 -7
  3. package/dist/{chunk-FVYNRYIB.mjs → chunk-5KJYPVQJ.mjs} +307 -423
  4. package/dist/chunk-5KJYPVQJ.mjs.map +1 -0
  5. package/dist/{chunk-RFF5IIZJ.mjs → chunk-B74M7FXG.mjs} +2 -3
  6. package/dist/chunk-B74M7FXG.mjs.map +1 -0
  7. package/dist/{chunk-BACNNHHI.mjs → chunk-CGGI46KC.mjs} +2 -2
  8. package/dist/{chunk-YFG3Q3YH.mjs → chunk-EU52BTKR.mjs} +2 -2
  9. package/dist/{chunk-MQJNZYAH.mjs → chunk-KO4QCMY4.mjs} +2 -2
  10. package/dist/{chunk-2PK2SFRB.mjs → chunk-W7GP2EOT.mjs} +106 -221
  11. package/dist/chunk-W7GP2EOT.mjs.map +1 -0
  12. package/dist/{copilot-runtime-b15b683d.d.ts → copilot-runtime-1c5bf72b.d.ts} +2 -2
  13. package/dist/graphql/types/converted/index.d.ts +1 -1
  14. package/dist/graphql/types/converted/index.js +1 -2
  15. package/dist/graphql/types/converted/index.js.map +1 -1
  16. package/dist/graphql/types/converted/index.mjs +1 -1
  17. package/dist/{groq-adapter-50aa9621.d.ts → groq-adapter-b6a40422.d.ts} +1 -1
  18. package/dist/{index-ff3fbc33.d.ts → index-10b1c870.d.ts} +8 -7
  19. package/dist/index.d.ts +5 -5
  20. package/dist/index.js +515 -745
  21. package/dist/index.js.map +1 -1
  22. package/dist/index.mjs +10 -6
  23. package/dist/index.mjs.map +1 -1
  24. package/dist/{langserve-8ec29cba.d.ts → langserve-6245df39.d.ts} +14 -52
  25. package/dist/lib/index.d.ts +4 -4
  26. package/dist/lib/index.js +442 -706
  27. package/dist/lib/index.js.map +1 -1
  28. package/dist/lib/index.mjs +6 -6
  29. package/dist/lib/integrations/index.d.ts +4 -4
  30. package/dist/lib/integrations/index.js +37 -71
  31. package/dist/lib/integrations/index.js.map +1 -1
  32. package/dist/lib/integrations/index.mjs +6 -6
  33. package/dist/lib/integrations/nest/index.d.ts +3 -3
  34. package/dist/lib/integrations/nest/index.js +37 -71
  35. package/dist/lib/integrations/nest/index.js.map +1 -1
  36. package/dist/lib/integrations/nest/index.mjs +4 -4
  37. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  38. package/dist/lib/integrations/node-express/index.js +37 -71
  39. package/dist/lib/integrations/node-express/index.js.map +1 -1
  40. package/dist/lib/integrations/node-express/index.mjs +4 -4
  41. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  42. package/dist/lib/integrations/node-http/index.js +37 -71
  43. package/dist/lib/integrations/node-http/index.js.map +1 -1
  44. package/dist/lib/integrations/node-http/index.mjs +3 -3
  45. package/dist/service-adapters/index.d.ts +36 -5
  46. package/dist/service-adapters/index.js +106 -219
  47. package/dist/service-adapters/index.js.map +1 -1
  48. package/dist/service-adapters/index.mjs +5 -1
  49. package/package.json +2 -2
  50. package/src/agents/langgraph/event-source.ts +148 -140
  51. package/src/agents/langgraph/events.ts +1 -1
  52. package/src/graphql/inputs/forwarded-parameters.input.ts +3 -0
  53. package/src/graphql/inputs/message.input.ts +3 -15
  54. package/src/graphql/resolvers/copilot.resolver.ts +6 -32
  55. package/src/graphql/types/converted/index.ts +3 -4
  56. package/src/graphql/types/copilot-response.type.ts +3 -12
  57. package/src/graphql/types/enums.ts +11 -0
  58. package/src/lib/runtime/remote-action-constructors.ts +62 -60
  59. package/src/lib/runtime/remote-actions.ts +0 -1
  60. package/src/lib/runtime/remote-lg-action.ts +140 -161
  61. package/src/service-adapters/anthropic/anthropic-adapter.ts +6 -16
  62. package/src/service-adapters/conversion.ts +1 -2
  63. package/src/service-adapters/events.ts +52 -111
  64. package/src/service-adapters/experimental/empty/empty-adapter.ts +33 -0
  65. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +3 -7
  66. package/src/service-adapters/groq/groq-adapter.ts +8 -23
  67. package/src/service-adapters/index.ts +7 -1
  68. package/src/service-adapters/langchain/utils.ts +31 -49
  69. package/src/service-adapters/openai/openai-adapter.ts +9 -22
  70. package/src/service-adapters/openai/openai-assistant-adapter.ts +8 -22
  71. package/src/service-adapters/unify/unify-adapter.ts +11 -28
  72. package/dist/chunk-2PK2SFRB.mjs.map +0 -1
  73. package/dist/chunk-FVYNRYIB.mjs.map +0 -1
  74. package/dist/chunk-RFF5IIZJ.mjs.map +0 -1
  75. /package/dist/{chunk-BACNNHHI.mjs.map → chunk-CGGI46KC.mjs.map} +0 -0
  76. /package/dist/{chunk-YFG3Q3YH.mjs.map → chunk-EU52BTKR.mjs.map} +0 -0
  77. /package/dist/{chunk-MQJNZYAH.mjs.map → chunk-KO4QCMY4.mjs.map} +0 -0
@@ -1,8 +1,8 @@
1
- import { C as CopilotServiceAdapter, c as CopilotRuntimeChatCompletionRequest, d as CopilotRuntimeChatCompletionResponse } from '../langserve-8ec29cba.js';
2
- export { a as RemoteChain, R as RemoteChainParameters } from '../langserve-8ec29cba.js';
3
- export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-50aa9621.js';
1
+ import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-6245df39.js';
2
+ export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-6245df39.js';
3
+ export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-b6a40422.js';
4
4
  import Anthropic from '@anthropic-ai/sdk';
5
- import '../index-ff3fbc33.js';
5
+ import '../index-10b1c870.js';
6
6
  import '../graphql/types/base/index.js';
7
7
  import 'rxjs';
8
8
  import '@copilotkit/shared';
@@ -50,4 +50,35 @@ declare class AnthropicAdapter implements CopilotServiceAdapter {
50
50
  process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
51
51
  }
52
52
 
53
- export { AnthropicAdapter, AnthropicAdapterParams, CopilotServiceAdapter };
53
+ interface OllamaAdapterOptions {
54
+ model?: string;
55
+ }
56
+ declare class ExperimentalOllamaAdapter implements CopilotServiceAdapter {
57
+ private model;
58
+ constructor(options?: OllamaAdapterOptions);
59
+ process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
60
+ }
61
+
62
+ /**
63
+ * CopilotKit Empty Adapter
64
+ *
65
+ * This adapter is meant to preserve adherence to runtime requirements, while doing nothing
66
+ * Ideal if you don't want to connect an LLM the to the runtime, and only use your LangGraph agent.
67
+ * Be aware that Copilot Suggestions will not work if you use this adapter
68
+ *
69
+ * ## Example
70
+ *
71
+ * ```ts
72
+ * import { CopilotRuntime, ExperimentalEmptyAdapter } from "@copilotkit/runtime";
73
+ *
74
+ * const copilotKit = new CopilotRuntime();
75
+ *
76
+ * return new ExperimentalEmptyAdapter();
77
+ * ```
78
+ */
79
+
80
+ declare class ExperimentalEmptyAdapter implements CopilotServiceAdapter {
81
+ process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
82
+ }
83
+
84
+ export { AnthropicAdapter, AnthropicAdapterParams, CopilotRuntimeChatCompletionRequest, CopilotRuntimeChatCompletionResponse, CopilotServiceAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter };
@@ -31,6 +31,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
31
31
  var service_adapters_exports = {};
32
32
  __export(service_adapters_exports, {
33
33
  AnthropicAdapter: () => AnthropicAdapter,
34
+ ExperimentalEmptyAdapter: () => ExperimentalEmptyAdapter,
35
+ ExperimentalOllamaAdapter: () => ExperimentalOllamaAdapter,
34
36
  GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
35
37
  GroqAdapter: () => GroqAdapter,
36
38
  LangChainAdapter: () => LangChainAdapter,
@@ -315,8 +317,6 @@ var OpenAIAdapter = class {
315
317
  eventSource.stream(async (eventStream$) => {
316
318
  var _a, _b;
317
319
  let mode = null;
318
- let currentMessageId;
319
- let currentToolCallId;
320
320
  for await (const chunk of stream) {
321
321
  if (chunk.choices.length === 0) {
322
322
  continue;
@@ -325,52 +325,30 @@ var OpenAIAdapter = class {
325
325
  const content = chunk.choices[0].delta.content;
326
326
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
327
327
  mode = null;
328
- eventStream$.sendTextMessageEnd({
329
- messageId: currentMessageId
330
- });
328
+ eventStream$.sendTextMessageEnd();
331
329
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
332
330
  mode = null;
333
- eventStream$.sendActionExecutionEnd({
334
- actionExecutionId: currentToolCallId
335
- });
331
+ eventStream$.sendActionExecutionEnd();
336
332
  }
337
333
  if (mode === null) {
338
334
  if (toolCall == null ? void 0 : toolCall.id) {
339
335
  mode = "function";
340
- currentToolCallId = toolCall.id;
341
- eventStream$.sendActionExecutionStart({
342
- actionExecutionId: currentToolCallId,
343
- parentMessageId: chunk.id,
344
- actionName: toolCall.function.name
345
- });
336
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
346
337
  } else if (content) {
347
338
  mode = "message";
348
- currentMessageId = chunk.id;
349
- eventStream$.sendTextMessageStart({
350
- messageId: currentMessageId
351
- });
339
+ eventStream$.sendTextMessageStart(chunk.id);
352
340
  }
353
341
  }
354
342
  if (mode === "message" && content) {
355
- eventStream$.sendTextMessageContent({
356
- messageId: currentMessageId,
357
- content
358
- });
343
+ eventStream$.sendTextMessageContent(content);
359
344
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
360
- eventStream$.sendActionExecutionArgs({
361
- actionExecutionId: currentToolCallId,
362
- args: toolCall.function.arguments
363
- });
345
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
364
346
  }
365
347
  }
366
348
  if (mode === "message") {
367
- eventStream$.sendTextMessageEnd({
368
- messageId: currentMessageId
369
- });
349
+ eventStream$.sendTextMessageEnd();
370
350
  } else if (mode === "function") {
371
- eventStream$.sendActionExecutionEnd({
372
- actionExecutionId: currentToolCallId
373
- });
351
+ eventStream$.sendActionExecutionEnd();
374
352
  }
375
353
  eventStream$.complete();
376
354
  });
@@ -438,25 +416,17 @@ function isBaseMessageChunk(message) {
438
416
  __name(isBaseMessageChunk, "isBaseMessageChunk");
439
417
  function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
440
418
  if (actionExecution) {
441
- eventStream$.sendActionExecutionResult({
442
- actionExecutionId: actionExecution.id,
443
- actionName: actionExecution.name,
444
- result: "Sending a message"
445
- });
419
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, "Sending a message");
446
420
  }
447
421
  }
448
422
  __name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
449
423
  async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
450
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
424
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
451
425
  if (typeof result === "string") {
452
426
  if (!actionExecution) {
453
427
  eventStream$.sendTextMessage((0, import_shared2.randomId)(), result);
454
428
  } else {
455
- eventStream$.sendActionExecutionResult({
456
- actionExecutionId: actionExecution.id,
457
- actionName: actionExecution.name,
458
- result
459
- });
429
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
460
430
  }
461
431
  } else if (isAIMessage(result)) {
462
432
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -464,11 +434,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
464
434
  eventStream$.sendTextMessage((0, import_shared2.randomId)(), result.content);
465
435
  }
466
436
  for (const toolCall of result.tool_calls) {
467
- eventStream$.sendActionExecution({
468
- actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
469
- actionName: toolCall.name,
470
- args: JSON.stringify(toolCall.args)
471
- });
437
+ eventStream$.sendActionExecution(toolCall.id || (0, import_shared2.randomId)(), toolCall.name, JSON.stringify(toolCall.args));
472
438
  }
473
439
  } else if (isBaseMessageChunk(result)) {
474
440
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -477,18 +443,13 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
477
443
  }
478
444
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
479
445
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
480
- eventStream$.sendActionExecution({
481
- actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
482
- actionName: toolCall.name,
483
- args: JSON.stringify(toolCall.args)
484
- });
446
+ eventStream$.sendActionExecution(toolCall.id || (0, import_shared2.randomId)(), toolCall.name, JSON.stringify(toolCall.args));
485
447
  }
486
448
  }
487
449
  } else if (result && "getReader" in result) {
488
450
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
489
451
  let reader = result.getReader();
490
452
  let mode = null;
491
- let currentMessageId;
492
453
  const toolCallDetails = {
493
454
  name: null,
494
455
  id: null,
@@ -527,14 +488,10 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
527
488
  }
528
489
  if (mode === "message" && (toolCallId || done)) {
529
490
  mode = null;
530
- eventStream$.sendTextMessageEnd({
531
- messageId: currentMessageId
532
- });
491
+ eventStream$.sendTextMessageEnd();
533
492
  } else if (mode === "function" && (!hasToolCall || done)) {
534
493
  mode = null;
535
- eventStream$.sendActionExecutionEnd({
536
- actionExecutionId: toolCallId
537
- });
494
+ eventStream$.sendActionExecutionEnd();
538
495
  }
539
496
  if (done) {
540
497
  break;
@@ -542,40 +499,21 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
542
499
  if (mode === null) {
543
500
  if (hasToolCall && toolCallId && toolCallName) {
544
501
  mode = "function";
545
- eventStream$.sendActionExecutionStart({
546
- actionExecutionId: toolCallId,
547
- actionName: toolCallName,
548
- parentMessageId: (_i = value.lc_kwargs) == null ? void 0 : _i.id
549
- });
502
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
550
503
  } else if (content) {
551
504
  mode = "message";
552
- currentMessageId = ((_j = value.lc_kwargs) == null ? void 0 : _j.id) || (0, import_shared2.randomId)();
553
- eventStream$.sendTextMessageStart({
554
- messageId: currentMessageId
555
- });
505
+ eventStream$.sendTextMessageStart((0, import_shared2.randomId)());
556
506
  }
557
507
  }
558
508
  if (mode === "message" && content) {
559
- eventStream$.sendTextMessageContent({
560
- messageId: currentMessageId,
561
- content: Array.isArray(content) ? ((_k = content[0]) == null ? void 0 : _k.text) ?? "" : content
562
- });
509
+ eventStream$.sendTextMessageContent(Array.isArray(content) ? ((_i = content[0]) == null ? void 0 : _i.text) ?? "" : content);
563
510
  } else if (mode === "function" && toolCallArgs) {
564
511
  if (toolCallDetails.index !== toolCallDetails.prevIndex) {
565
- eventStream$.sendActionExecutionEnd({
566
- actionExecutionId: toolCallId
567
- });
568
- eventStream$.sendActionExecutionStart({
569
- actionExecutionId: toolCallId,
570
- actionName: toolCallName,
571
- parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
572
- });
512
+ eventStream$.sendActionExecutionEnd();
513
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
573
514
  toolCallDetails.prevIndex = toolCallDetails.index;
574
515
  }
575
- eventStream$.sendActionExecutionArgs({
576
- actionExecutionId: toolCallId,
577
- args: toolCallArgs
578
- });
516
+ eventStream$.sendActionExecutionArgs(toolCallArgs);
579
517
  }
580
518
  } catch (error) {
581
519
  console.error("Error reading from stream", error);
@@ -583,11 +521,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
583
521
  }
584
522
  }
585
523
  } else if (actionExecution) {
586
- eventStream$.sendActionExecutionResult({
587
- actionExecutionId: actionExecution.id,
588
- actionName: actionExecution.name,
589
- result: encodeResult(result)
590
- });
524
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, encodeResult(result));
591
525
  } else {
592
526
  throw new Error("Invalid return type from LangChain function.");
593
527
  }
@@ -767,33 +701,21 @@ var OpenAIAssistantAdapter = class {
767
701
  eventSource.stream(async (eventStream$) => {
768
702
  var _a, _b, _c, _d, _e, _f;
769
703
  let inFunctionCall = false;
770
- let currentMessageId;
771
- let currentToolCallId;
772
704
  for await (const chunk of stream) {
773
705
  switch (chunk.event) {
774
706
  case "thread.message.created":
775
707
  if (inFunctionCall) {
776
- eventStream$.sendActionExecutionEnd({
777
- actionExecutionId: currentToolCallId
778
- });
708
+ eventStream$.sendActionExecutionEnd();
779
709
  }
780
- currentMessageId = chunk.data.id;
781
- eventStream$.sendTextMessageStart({
782
- messageId: currentMessageId
783
- });
710
+ eventStream$.sendTextMessageStart(chunk.data.id);
784
711
  break;
785
712
  case "thread.message.delta":
786
713
  if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
787
- eventStream$.sendTextMessageContent({
788
- messageId: currentMessageId,
789
- content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
790
- });
714
+ eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
791
715
  }
792
716
  break;
793
717
  case "thread.message.completed":
794
- eventStream$.sendTextMessageEnd({
795
- messageId: currentMessageId
796
- });
718
+ eventStream$.sendTextMessageEnd();
797
719
  break;
798
720
  case "thread.run.step.delta":
799
721
  let toolCallId;
@@ -806,30 +728,18 @@ var OpenAIAssistantAdapter = class {
806
728
  }
807
729
  if (toolCallName && toolCallId) {
808
730
  if (inFunctionCall) {
809
- eventStream$.sendActionExecutionEnd({
810
- actionExecutionId: currentToolCallId
811
- });
731
+ eventStream$.sendActionExecutionEnd();
812
732
  }
813
733
  inFunctionCall = true;
814
- currentToolCallId = toolCallId;
815
- eventStream$.sendActionExecutionStart({
816
- actionExecutionId: currentToolCallId,
817
- parentMessageId: chunk.data.id,
818
- actionName: toolCallName
819
- });
734
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
820
735
  } else if (toolCallArgs) {
821
- eventStream$.sendActionExecutionArgs({
822
- actionExecutionId: currentToolCallId,
823
- args: toolCallArgs
824
- });
736
+ eventStream$.sendActionExecutionArgs(toolCallArgs);
825
737
  }
826
738
  break;
827
739
  }
828
740
  }
829
741
  if (inFunctionCall) {
830
- eventStream$.sendActionExecutionEnd({
831
- actionExecutionId: currentToolCallId
832
- });
742
+ eventStream$.sendActionExecutionEnd();
833
743
  }
834
744
  eventStream$.complete();
835
745
  });
@@ -882,77 +792,46 @@ var UnifyAdapter = class {
882
792
  }
883
793
  });
884
794
  let model = null;
885
- let currentMessageId;
886
- let currentToolCallId;
887
795
  request.eventSource.stream(async (eventStream$) => {
888
796
  var _a, _b;
889
797
  let mode = null;
890
798
  for await (const chunk of stream) {
891
799
  if (this.start) {
892
800
  model = chunk.model;
893
- currentMessageId = (0, import_shared4.randomId)();
894
- eventStream$.sendTextMessageStart({
895
- messageId: currentMessageId
896
- });
897
- eventStream$.sendTextMessageContent({
898
- messageId: currentMessageId,
899
- content: `Model used: ${model}
900
- `
901
- });
902
- eventStream$.sendTextMessageEnd({
903
- messageId: currentMessageId
904
- });
801
+ eventStream$.sendTextMessageStart((0, import_shared4.randomId)());
802
+ eventStream$.sendTextMessageContent(`Model used: ${model}
803
+ `);
804
+ eventStream$.sendTextMessageEnd();
905
805
  this.start = false;
906
806
  }
907
807
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
908
808
  const content = chunk.choices[0].delta.content;
909
809
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
910
810
  mode = null;
911
- eventStream$.sendTextMessageEnd({
912
- messageId: currentMessageId
913
- });
811
+ eventStream$.sendTextMessageEnd();
914
812
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
915
813
  mode = null;
916
- eventStream$.sendActionExecutionEnd({
917
- actionExecutionId: currentToolCallId
918
- });
814
+ eventStream$.sendActionExecutionEnd();
919
815
  }
920
816
  if (mode === null) {
921
817
  if (toolCall == null ? void 0 : toolCall.id) {
922
818
  mode = "function";
923
- currentToolCallId = toolCall.id;
924
- eventStream$.sendActionExecutionStart({
925
- actionExecutionId: currentToolCallId,
926
- actionName: toolCall.function.name
927
- });
819
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
928
820
  } else if (content) {
929
821
  mode = "message";
930
- currentMessageId = chunk.id;
931
- eventStream$.sendTextMessageStart({
932
- messageId: currentMessageId
933
- });
822
+ eventStream$.sendTextMessageStart(chunk.id);
934
823
  }
935
824
  }
936
825
  if (mode === "message" && content) {
937
- eventStream$.sendTextMessageContent({
938
- messageId: currentMessageId,
939
- content
940
- });
826
+ eventStream$.sendTextMessageContent(content);
941
827
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
942
- eventStream$.sendActionExecutionArgs({
943
- actionExecutionId: currentToolCallId,
944
- args: toolCall.function.arguments
945
- });
828
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
946
829
  }
947
830
  }
948
831
  if (mode === "message") {
949
- eventStream$.sendTextMessageEnd({
950
- messageId: currentMessageId
951
- });
832
+ eventStream$.sendTextMessageEnd();
952
833
  } else if (mode === "function") {
953
- eventStream$.sendActionExecutionEnd({
954
- actionExecutionId: currentToolCallId
955
- });
834
+ eventStream$.sendActionExecutionEnd();
956
835
  }
957
836
  eventStream$.complete();
958
837
  });
@@ -1018,59 +897,35 @@ var GroqAdapter = class {
1018
897
  eventSource.stream(async (eventStream$) => {
1019
898
  var _a, _b;
1020
899
  let mode = null;
1021
- let currentMessageId;
1022
- let currentToolCallId;
1023
900
  for await (const chunk of stream) {
1024
901
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
1025
902
  const content = chunk.choices[0].delta.content;
1026
903
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1027
904
  mode = null;
1028
- eventStream$.sendTextMessageEnd({
1029
- messageId: currentMessageId
1030
- });
905
+ eventStream$.sendTextMessageEnd();
1031
906
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1032
907
  mode = null;
1033
- eventStream$.sendActionExecutionEnd({
1034
- actionExecutionId: currentToolCallId
1035
- });
908
+ eventStream$.sendActionExecutionEnd();
1036
909
  }
1037
910
  if (mode === null) {
1038
911
  if (toolCall == null ? void 0 : toolCall.id) {
1039
912
  mode = "function";
1040
- currentToolCallId = toolCall.id;
1041
- eventStream$.sendActionExecutionStart({
1042
- actionExecutionId: currentToolCallId,
1043
- actionName: toolCall.function.name,
1044
- parentMessageId: chunk.id
1045
- });
913
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
1046
914
  } else if (content) {
1047
915
  mode = "message";
1048
- currentMessageId = chunk.id;
1049
- eventStream$.sendTextMessageStart({
1050
- messageId: currentMessageId
1051
- });
916
+ eventStream$.sendTextMessageStart(chunk.id);
1052
917
  }
1053
918
  }
1054
919
  if (mode === "message" && content) {
1055
- eventStream$.sendTextMessageContent({
1056
- messageId: currentMessageId,
1057
- content
1058
- });
920
+ eventStream$.sendTextMessageContent(content);
1059
921
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1060
- eventStream$.sendActionExecutionArgs({
1061
- actionExecutionId: currentToolCallId,
1062
- args: toolCall.function.arguments
1063
- });
922
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
1064
923
  }
1065
924
  }
1066
925
  if (mode === "message") {
1067
- eventStream$.sendTextMessageEnd({
1068
- messageId: currentMessageId
1069
- });
926
+ eventStream$.sendTextMessageEnd();
1070
927
  } else if (mode === "function") {
1071
- eventStream$.sendActionExecutionEnd({
1072
- actionExecutionId: currentToolCallId
1073
- });
928
+ eventStream$.sendActionExecutionEnd();
1074
929
  }
1075
930
  eventStream$.complete();
1076
931
  });
@@ -1278,11 +1133,7 @@ var AnthropicAdapter = class {
1278
1133
  mode = "message";
1279
1134
  } else if (chunk.content_block.type === "tool_use") {
1280
1135
  currentToolCallId = chunk.content_block.id;
1281
- eventStream$.sendActionExecutionStart({
1282
- actionExecutionId: currentToolCallId,
1283
- actionName: chunk.content_block.name,
1284
- parentMessageId: currentMessageId
1285
- });
1136
+ eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);
1286
1137
  mode = "function";
1287
1138
  }
1288
1139
  } else if (chunk.type === "content_block_delta") {
@@ -1290,33 +1141,21 @@ var AnthropicAdapter = class {
1290
1141
  const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
1291
1142
  if (text.length > 0) {
1292
1143
  if (!didOutputText) {
1293
- eventStream$.sendTextMessageStart({
1294
- messageId: currentMessageId
1295
- });
1144
+ eventStream$.sendTextMessageStart(currentMessageId);
1296
1145
  didOutputText = true;
1297
1146
  }
1298
- eventStream$.sendTextMessageContent({
1299
- messageId: currentMessageId,
1300
- content: text
1301
- });
1147
+ eventStream$.sendTextMessageContent(text);
1302
1148
  }
1303
1149
  } else if (chunk.delta.type === "input_json_delta") {
1304
- eventStream$.sendActionExecutionArgs({
1305
- actionExecutionId: currentToolCallId,
1306
- args: chunk.delta.partial_json
1307
- });
1150
+ eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);
1308
1151
  }
1309
1152
  } else if (chunk.type === "content_block_stop") {
1310
1153
  if (mode === "message") {
1311
1154
  if (didOutputText) {
1312
- eventStream$.sendTextMessageEnd({
1313
- messageId: currentMessageId
1314
- });
1155
+ eventStream$.sendTextMessageEnd();
1315
1156
  }
1316
1157
  } else if (mode === "function") {
1317
- eventStream$.sendActionExecutionEnd({
1318
- actionExecutionId: currentToolCallId
1319
- });
1158
+ eventStream$.sendActionExecutionEnd();
1320
1159
  }
1321
1160
  }
1322
1161
  }
@@ -1360,9 +1199,57 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
1360
1199
  this.didFilterThinkingTag = false;
1361
1200
  }
1362
1201
  }, "FilterThinkingTextBuffer");
1202
+
1203
+ // src/service-adapters/experimental/ollama/ollama-adapter.ts
1204
+ var import_ollama = require("@langchain/community/llms/ollama");
1205
+ var import_shared7 = require("@copilotkit/shared");
1206
+ var DEFAULT_MODEL4 = "llama3:latest";
1207
+ var ExperimentalOllamaAdapter = class {
1208
+ model;
1209
+ constructor(options) {
1210
+ if (options == null ? void 0 : options.model) {
1211
+ this.model = options.model;
1212
+ } else {
1213
+ this.model = DEFAULT_MODEL4;
1214
+ }
1215
+ }
1216
+ async process(request) {
1217
+ const { messages, actions, eventSource } = request;
1218
+ const ollama = new import_ollama.Ollama({
1219
+ model: this.model
1220
+ });
1221
+ const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
1222
+ const _stream = await ollama.stream(contents);
1223
+ eventSource.stream(async (eventStream$) => {
1224
+ eventStream$.sendTextMessageStart((0, import_shared7.randomId)());
1225
+ for await (const chunkText of _stream) {
1226
+ eventStream$.sendTextMessageContent(chunkText);
1227
+ }
1228
+ eventStream$.sendTextMessageEnd();
1229
+ eventStream$.complete();
1230
+ });
1231
+ return {
1232
+ threadId: request.threadId || (0, import_shared7.randomId)()
1233
+ };
1234
+ }
1235
+ };
1236
+ __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
1237
+
1238
+ // src/service-adapters/experimental/empty/empty-adapter.ts
1239
+ var import_shared8 = require("@copilotkit/shared");
1240
+ var ExperimentalEmptyAdapter = class {
1241
+ async process(request) {
1242
+ return {
1243
+ threadId: request.threadId || (0, import_shared8.randomId)()
1244
+ };
1245
+ }
1246
+ };
1247
+ __name(ExperimentalEmptyAdapter, "ExperimentalEmptyAdapter");
1363
1248
  // Annotate the CommonJS export names for ESM import in node:
1364
1249
  0 && (module.exports = {
1365
1250
  AnthropicAdapter,
1251
+ ExperimentalEmptyAdapter,
1252
+ ExperimentalOllamaAdapter,
1366
1253
  GoogleGenerativeAIAdapter,
1367
1254
  GroqAdapter,
1368
1255
  LangChainAdapter,