@copilotkit/runtime 1.5.1-next.1 → 1.5.1-next.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/__snapshots__/schema/schema.graphql +9 -8
  3. package/dist/{chunk-RFF5IIZJ.mjs → chunk-B74M7FXG.mjs} +2 -3
  4. package/dist/chunk-B74M7FXG.mjs.map +1 -0
  5. package/dist/{chunk-L4VT7Z25.mjs → chunk-M4QD67S3.mjs} +2 -2
  6. package/dist/{chunk-CLGKEUOA.mjs → chunk-OKQVDDJ2.mjs} +63 -298
  7. package/dist/chunk-OKQVDDJ2.mjs.map +1 -0
  8. package/dist/{chunk-G2PTXSIW.mjs → chunk-U3AYI5XZ.mjs} +2 -2
  9. package/dist/{chunk-Z3MD5FQ4.mjs → chunk-U75F2RAQ.mjs} +2 -2
  10. package/dist/{chunk-6N4ZHRRC.mjs → chunk-ZSWCEADS.mjs} +331 -465
  11. package/dist/chunk-ZSWCEADS.mjs.map +1 -0
  12. package/dist/{copilot-runtime-6285d897.d.ts → copilot-runtime-12e7ac40.d.ts} +2 -2
  13. package/dist/graphql/types/converted/index.d.ts +1 -1
  14. package/dist/graphql/types/converted/index.js +1 -2
  15. package/dist/graphql/types/converted/index.js.map +1 -1
  16. package/dist/graphql/types/converted/index.mjs +1 -1
  17. package/dist/{groq-adapter-15d41154.d.ts → groq-adapter-24abe931.d.ts} +1 -1
  18. package/dist/{index-ff3fbc33.d.ts → index-10b1c870.d.ts} +8 -7
  19. package/dist/index.d.ts +5 -5
  20. package/dist/index.js +480 -852
  21. package/dist/index.js.map +1 -1
  22. package/dist/index.mjs +11 -15
  23. package/dist/index.mjs.map +1 -1
  24. package/dist/{langserve-48e976ac.d.ts → langserve-f021ab9c.d.ts} +14 -54
  25. package/dist/lib/index.d.ts +4 -4
  26. package/dist/lib/index.js +459 -754
  27. package/dist/lib/index.js.map +1 -1
  28. package/dist/lib/index.mjs +7 -7
  29. package/dist/lib/integrations/index.d.ts +4 -4
  30. package/dist/lib/integrations/index.js +30 -78
  31. package/dist/lib/integrations/index.js.map +1 -1
  32. package/dist/lib/integrations/index.mjs +6 -6
  33. package/dist/lib/integrations/nest/index.d.ts +3 -3
  34. package/dist/lib/integrations/nest/index.js +30 -78
  35. package/dist/lib/integrations/nest/index.js.map +1 -1
  36. package/dist/lib/integrations/nest/index.mjs +4 -4
  37. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  38. package/dist/lib/integrations/node-express/index.js +30 -78
  39. package/dist/lib/integrations/node-express/index.js.map +1 -1
  40. package/dist/lib/integrations/node-express/index.mjs +4 -4
  41. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  42. package/dist/lib/integrations/node-http/index.js +30 -78
  43. package/dist/lib/integrations/node-http/index.js.map +1 -1
  44. package/dist/lib/integrations/node-http/index.mjs +3 -3
  45. package/dist/service-adapters/index.d.ts +5 -36
  46. package/dist/service-adapters/index.js +61 -298
  47. package/dist/service-adapters/index.js.map +1 -1
  48. package/dist/service-adapters/index.mjs +1 -5
  49. package/package.json +4 -4
  50. package/src/graphql/resolvers/copilot.resolver.ts +16 -0
  51. package/src/graphql/types/agents-response.type.ts +22 -0
  52. package/src/lib/runtime/copilot-runtime.ts +50 -0
  53. package/dist/chunk-6N4ZHRRC.mjs.map +0 -1
  54. package/dist/chunk-CLGKEUOA.mjs.map +0 -1
  55. package/dist/chunk-RFF5IIZJ.mjs.map +0 -1
  56. /package/dist/{chunk-L4VT7Z25.mjs.map → chunk-M4QD67S3.mjs.map} +0 -0
  57. /package/dist/{chunk-G2PTXSIW.mjs.map → chunk-U3AYI5XZ.mjs.map} +0 -0
  58. /package/dist/{chunk-Z3MD5FQ4.mjs.map → chunk-U75F2RAQ.mjs.map} +0 -0
@@ -271,16 +271,11 @@ var OpenAIAdapter = class {
271
271
  },
272
272
  ...this.disableParallelToolCalls && {
273
273
  parallel_tool_calls: false
274
- },
275
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
276
- temperature: forwardedParameters.temperature
277
274
  }
278
275
  });
279
276
  eventSource.stream(async (eventStream$) => {
280
277
  var _a, _b;
281
278
  let mode = null;
282
- let currentMessageId;
283
- let currentToolCallId;
284
279
  for await (const chunk of stream) {
285
280
  if (chunk.choices.length === 0) {
286
281
  continue;
@@ -289,52 +284,30 @@ var OpenAIAdapter = class {
289
284
  const content = chunk.choices[0].delta.content;
290
285
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
291
286
  mode = null;
292
- eventStream$.sendTextMessageEnd({
293
- messageId: currentMessageId
294
- });
287
+ eventStream$.sendTextMessageEnd();
295
288
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
296
289
  mode = null;
297
- eventStream$.sendActionExecutionEnd({
298
- actionExecutionId: currentToolCallId
299
- });
290
+ eventStream$.sendActionExecutionEnd();
300
291
  }
301
292
  if (mode === null) {
302
293
  if (toolCall == null ? void 0 : toolCall.id) {
303
294
  mode = "function";
304
- currentToolCallId = toolCall.id;
305
- eventStream$.sendActionExecutionStart({
306
- actionExecutionId: currentToolCallId,
307
- parentMessageId: chunk.id,
308
- actionName: toolCall.function.name
309
- });
295
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
310
296
  } else if (content) {
311
297
  mode = "message";
312
- currentMessageId = chunk.id;
313
- eventStream$.sendTextMessageStart({
314
- messageId: currentMessageId
315
- });
298
+ eventStream$.sendTextMessageStart(chunk.id);
316
299
  }
317
300
  }
318
301
  if (mode === "message" && content) {
319
- eventStream$.sendTextMessageContent({
320
- messageId: currentMessageId,
321
- content
322
- });
302
+ eventStream$.sendTextMessageContent(content);
323
303
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
324
- eventStream$.sendActionExecutionArgs({
325
- actionExecutionId: currentToolCallId,
326
- args: toolCall.function.arguments
327
- });
304
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
328
305
  }
329
306
  }
330
307
  if (mode === "message") {
331
- eventStream$.sendTextMessageEnd({
332
- messageId: currentMessageId
333
- });
308
+ eventStream$.sendTextMessageEnd();
334
309
  } else if (mode === "function") {
335
- eventStream$.sendActionExecutionEnd({
336
- actionExecutionId: currentToolCallId
337
- });
310
+ eventStream$.sendActionExecutionEnd();
338
311
  }
339
312
  eventStream$.complete();
340
313
  });
@@ -402,25 +375,17 @@ function isBaseMessageChunk(message) {
402
375
  __name(isBaseMessageChunk, "isBaseMessageChunk");
403
376
  function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
404
377
  if (actionExecution) {
405
- eventStream$.sendActionExecutionResult({
406
- actionExecutionId: actionExecution.id,
407
- actionName: actionExecution.name,
408
- result: "Sending a message"
409
- });
378
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, "Sending a message");
410
379
  }
411
380
  }
412
381
  __name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
413
382
  async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
414
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
383
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
415
384
  if (typeof result === "string") {
416
385
  if (!actionExecution) {
417
386
  eventStream$.sendTextMessage(randomId2(), result);
418
387
  } else {
419
- eventStream$.sendActionExecutionResult({
420
- actionExecutionId: actionExecution.id,
421
- actionName: actionExecution.name,
422
- result
423
- });
388
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
424
389
  }
425
390
  } else if (isAIMessage(result)) {
426
391
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -428,11 +393,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
428
393
  eventStream$.sendTextMessage(randomId2(), result.content);
429
394
  }
430
395
  for (const toolCall of result.tool_calls) {
431
- eventStream$.sendActionExecution({
432
- actionExecutionId: toolCall.id || randomId2(),
433
- actionName: toolCall.name,
434
- args: JSON.stringify(toolCall.args)
435
- });
396
+ eventStream$.sendActionExecution(toolCall.id || randomId2(), toolCall.name, JSON.stringify(toolCall.args));
436
397
  }
437
398
  } else if (isBaseMessageChunk(result)) {
438
399
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
@@ -441,18 +402,13 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
441
402
  }
442
403
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
443
404
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
444
- eventStream$.sendActionExecution({
445
- actionExecutionId: toolCall.id || randomId2(),
446
- actionName: toolCall.name,
447
- args: JSON.stringify(toolCall.args)
448
- });
405
+ eventStream$.sendActionExecution(toolCall.id || randomId2(), toolCall.name, JSON.stringify(toolCall.args));
449
406
  }
450
407
  }
451
408
  } else if (result && "getReader" in result) {
452
409
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
453
410
  let reader = result.getReader();
454
411
  let mode = null;
455
- let currentMessageId;
456
412
  const toolCallDetails = {
457
413
  name: null,
458
414
  id: null,
@@ -466,12 +422,9 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
466
422
  let toolCallId = void 0;
467
423
  let toolCallArgs = void 0;
468
424
  let hasToolCall = false;
469
- let content = "";
470
- if (value && value.content) {
471
- content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
472
- }
425
+ let content = value == null ? void 0 : value.content;
473
426
  if (isAIMessageChunk(value)) {
474
- let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
427
+ let chunk = (_d = value.tool_call_chunks) == null ? void 0 : _d[0];
475
428
  toolCallArgs = chunk == null ? void 0 : chunk.args;
476
429
  hasToolCall = chunk != void 0;
477
430
  if (chunk == null ? void 0 : chunk.name)
@@ -486,22 +439,18 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
486
439
  toolCallName = toolCallDetails.name;
487
440
  toolCallId = toolCallDetails.id;
488
441
  } else if (isBaseMessageChunk(value)) {
489
- let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
490
- toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
442
+ let chunk = (_f = (_e = value.additional_kwargs) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f[0];
443
+ toolCallName = (_g = chunk == null ? void 0 : chunk.function) == null ? void 0 : _g.name;
491
444
  toolCallId = chunk == null ? void 0 : chunk.id;
492
- toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
445
+ toolCallArgs = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.arguments;
493
446
  hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
494
447
  }
495
448
  if (mode === "message" && (toolCallId || done)) {
496
449
  mode = null;
497
- eventStream$.sendTextMessageEnd({
498
- messageId: currentMessageId
499
- });
450
+ eventStream$.sendTextMessageEnd();
500
451
  } else if (mode === "function" && (!hasToolCall || done)) {
501
452
  mode = null;
502
- eventStream$.sendActionExecutionEnd({
503
- actionExecutionId: toolCallId
504
- });
453
+ eventStream$.sendActionExecutionEnd();
505
454
  }
506
455
  if (done) {
507
456
  break;
@@ -509,40 +458,21 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
509
458
  if (mode === null) {
510
459
  if (hasToolCall && toolCallId && toolCallName) {
511
460
  mode = "function";
512
- eventStream$.sendActionExecutionStart({
513
- actionExecutionId: toolCallId,
514
- actionName: toolCallName,
515
- parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
516
- });
461
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
517
462
  } else if (content) {
518
463
  mode = "message";
519
- currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId2();
520
- eventStream$.sendTextMessageStart({
521
- messageId: currentMessageId
522
- });
464
+ eventStream$.sendTextMessageStart(randomId2());
523
465
  }
524
466
  }
525
467
  if (mode === "message" && content) {
526
- eventStream$.sendTextMessageContent({
527
- messageId: currentMessageId,
528
- content
529
- });
468
+ eventStream$.sendTextMessageContent(Array.isArray(content) ? ((_i = content[0]) == null ? void 0 : _i.text) ?? "" : content);
530
469
  } else if (mode === "function" && toolCallArgs) {
531
470
  if (toolCallDetails.index !== toolCallDetails.prevIndex) {
532
- eventStream$.sendActionExecutionEnd({
533
- actionExecutionId: toolCallId
534
- });
535
- eventStream$.sendActionExecutionStart({
536
- actionExecutionId: toolCallId,
537
- actionName: toolCallName,
538
- parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
539
- });
471
+ eventStream$.sendActionExecutionEnd();
472
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
540
473
  toolCallDetails.prevIndex = toolCallDetails.index;
541
474
  }
542
- eventStream$.sendActionExecutionArgs({
543
- actionExecutionId: toolCallId,
544
- args: toolCallArgs
545
- });
475
+ eventStream$.sendActionExecutionArgs(toolCallArgs);
546
476
  }
547
477
  } catch (error) {
548
478
  console.error("Error reading from stream", error);
@@ -550,11 +480,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
550
480
  }
551
481
  }
552
482
  } else if (actionExecution) {
553
- eventStream$.sendActionExecutionResult({
554
- actionExecutionId: actionExecution.id,
555
- actionName: actionExecution.name,
556
- result: encodeResult(result)
557
- });
483
+ eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, encodeResult(result));
558
484
  } else {
559
485
  throw new Error("Invalid return type from LangChain function.");
560
486
  }
@@ -734,33 +660,21 @@ var OpenAIAssistantAdapter = class {
734
660
  eventSource.stream(async (eventStream$) => {
735
661
  var _a, _b, _c, _d, _e, _f;
736
662
  let inFunctionCall = false;
737
- let currentMessageId;
738
- let currentToolCallId;
739
663
  for await (const chunk of stream) {
740
664
  switch (chunk.event) {
741
665
  case "thread.message.created":
742
666
  if (inFunctionCall) {
743
- eventStream$.sendActionExecutionEnd({
744
- actionExecutionId: currentToolCallId
745
- });
667
+ eventStream$.sendActionExecutionEnd();
746
668
  }
747
- currentMessageId = chunk.data.id;
748
- eventStream$.sendTextMessageStart({
749
- messageId: currentMessageId
750
- });
669
+ eventStream$.sendTextMessageStart(chunk.data.id);
751
670
  break;
752
671
  case "thread.message.delta":
753
672
  if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
754
- eventStream$.sendTextMessageContent({
755
- messageId: currentMessageId,
756
- content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
757
- });
673
+ eventStream$.sendTextMessageContent((_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value);
758
674
  }
759
675
  break;
760
676
  case "thread.message.completed":
761
- eventStream$.sendTextMessageEnd({
762
- messageId: currentMessageId
763
- });
677
+ eventStream$.sendTextMessageEnd();
764
678
  break;
765
679
  case "thread.run.step.delta":
766
680
  let toolCallId;
@@ -773,30 +687,18 @@ var OpenAIAssistantAdapter = class {
773
687
  }
774
688
  if (toolCallName && toolCallId) {
775
689
  if (inFunctionCall) {
776
- eventStream$.sendActionExecutionEnd({
777
- actionExecutionId: currentToolCallId
778
- });
690
+ eventStream$.sendActionExecutionEnd();
779
691
  }
780
692
  inFunctionCall = true;
781
- currentToolCallId = toolCallId;
782
- eventStream$.sendActionExecutionStart({
783
- actionExecutionId: currentToolCallId,
784
- parentMessageId: chunk.data.id,
785
- actionName: toolCallName
786
- });
693
+ eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
787
694
  } else if (toolCallArgs) {
788
- eventStream$.sendActionExecutionArgs({
789
- actionExecutionId: currentToolCallId,
790
- args: toolCallArgs
791
- });
695
+ eventStream$.sendActionExecutionArgs(toolCallArgs);
792
696
  }
793
697
  break;
794
698
  }
795
699
  }
796
700
  if (inFunctionCall) {
797
- eventStream$.sendActionExecutionEnd({
798
- actionExecutionId: currentToolCallId
799
- });
701
+ eventStream$.sendActionExecutionEnd();
800
702
  }
801
703
  eventStream$.complete();
802
704
  });
@@ -839,7 +741,6 @@ var UnifyAdapter = class {
839
741
  apiKey: this.apiKey,
840
742
  baseURL: "https://api.unify.ai/v0/"
841
743
  });
842
- const forwardedParameters = request.forwardedParameters;
843
744
  const messages = request.messages.map(convertMessageToOpenAIMessage);
844
745
  const stream = await openai.chat.completions.create({
845
746
  model: this.model,
@@ -847,83 +748,49 @@ var UnifyAdapter = class {
847
748
  stream: true,
848
749
  ...tools.length > 0 && {
849
750
  tools
850
- },
851
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
852
- temperature: forwardedParameters.temperature
853
751
  }
854
752
  });
855
753
  let model = null;
856
- let currentMessageId;
857
- let currentToolCallId;
858
754
  request.eventSource.stream(async (eventStream$) => {
859
755
  var _a, _b;
860
756
  let mode = null;
861
757
  for await (const chunk of stream) {
862
758
  if (this.start) {
863
759
  model = chunk.model;
864
- currentMessageId = randomId4();
865
- eventStream$.sendTextMessageStart({
866
- messageId: currentMessageId
867
- });
868
- eventStream$.sendTextMessageContent({
869
- messageId: currentMessageId,
870
- content: `Model used: ${model}
871
- `
872
- });
873
- eventStream$.sendTextMessageEnd({
874
- messageId: currentMessageId
875
- });
760
+ eventStream$.sendTextMessageStart(randomId4());
761
+ eventStream$.sendTextMessageContent(`Model used: ${model}
762
+ `);
763
+ eventStream$.sendTextMessageEnd();
876
764
  this.start = false;
877
765
  }
878
766
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
879
767
  const content = chunk.choices[0].delta.content;
880
768
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
881
769
  mode = null;
882
- eventStream$.sendTextMessageEnd({
883
- messageId: currentMessageId
884
- });
770
+ eventStream$.sendTextMessageEnd();
885
771
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
886
772
  mode = null;
887
- eventStream$.sendActionExecutionEnd({
888
- actionExecutionId: currentToolCallId
889
- });
773
+ eventStream$.sendActionExecutionEnd();
890
774
  }
891
775
  if (mode === null) {
892
776
  if (toolCall == null ? void 0 : toolCall.id) {
893
777
  mode = "function";
894
- currentToolCallId = toolCall.id;
895
- eventStream$.sendActionExecutionStart({
896
- actionExecutionId: currentToolCallId,
897
- actionName: toolCall.function.name
898
- });
778
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
899
779
  } else if (content) {
900
780
  mode = "message";
901
- currentMessageId = chunk.id;
902
- eventStream$.sendTextMessageStart({
903
- messageId: currentMessageId
904
- });
781
+ eventStream$.sendTextMessageStart(chunk.id);
905
782
  }
906
783
  }
907
784
  if (mode === "message" && content) {
908
- eventStream$.sendTextMessageContent({
909
- messageId: currentMessageId,
910
- content
911
- });
785
+ eventStream$.sendTextMessageContent(content);
912
786
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
913
- eventStream$.sendActionExecutionArgs({
914
- actionExecutionId: currentToolCallId,
915
- args: toolCall.function.arguments
916
- });
787
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
917
788
  }
918
789
  }
919
790
  if (mode === "message") {
920
- eventStream$.sendTextMessageEnd({
921
- messageId: currentMessageId
922
- });
791
+ eventStream$.sendTextMessageEnd();
923
792
  } else if (mode === "function") {
924
- eventStream$.sendActionExecutionEnd({
925
- actionExecutionId: currentToolCallId
926
- });
793
+ eventStream$.sendActionExecutionEnd();
927
794
  }
928
795
  eventStream$.complete();
929
796
  });
@@ -984,67 +851,40 @@ var GroqAdapter = class {
984
851
  },
985
852
  ...this.disableParallelToolCalls && {
986
853
  parallel_tool_calls: false
987
- },
988
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
989
- temperature: forwardedParameters.temperature
990
854
  }
991
855
  });
992
856
  eventSource.stream(async (eventStream$) => {
993
857
  var _a, _b;
994
858
  let mode = null;
995
- let currentMessageId;
996
- let currentToolCallId;
997
859
  for await (const chunk of stream) {
998
860
  const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
999
861
  const content = chunk.choices[0].delta.content;
1000
862
  if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
1001
863
  mode = null;
1002
- eventStream$.sendTextMessageEnd({
1003
- messageId: currentMessageId
1004
- });
864
+ eventStream$.sendTextMessageEnd();
1005
865
  } else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
1006
866
  mode = null;
1007
- eventStream$.sendActionExecutionEnd({
1008
- actionExecutionId: currentToolCallId
1009
- });
867
+ eventStream$.sendActionExecutionEnd();
1010
868
  }
1011
869
  if (mode === null) {
1012
870
  if (toolCall == null ? void 0 : toolCall.id) {
1013
871
  mode = "function";
1014
- currentToolCallId = toolCall.id;
1015
- eventStream$.sendActionExecutionStart({
1016
- actionExecutionId: currentToolCallId,
1017
- actionName: toolCall.function.name,
1018
- parentMessageId: chunk.id
1019
- });
872
+ eventStream$.sendActionExecutionStart(toolCall.id, toolCall.function.name);
1020
873
  } else if (content) {
1021
874
  mode = "message";
1022
- currentMessageId = chunk.id;
1023
- eventStream$.sendTextMessageStart({
1024
- messageId: currentMessageId
1025
- });
875
+ eventStream$.sendTextMessageStart(chunk.id);
1026
876
  }
1027
877
  }
1028
878
  if (mode === "message" && content) {
1029
- eventStream$.sendTextMessageContent({
1030
- messageId: currentMessageId,
1031
- content
1032
- });
879
+ eventStream$.sendTextMessageContent(content);
1033
880
  } else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
1034
- eventStream$.sendActionExecutionArgs({
1035
- actionExecutionId: currentToolCallId,
1036
- args: toolCall.function.arguments
1037
- });
881
+ eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
1038
882
  }
1039
883
  }
1040
884
  if (mode === "message") {
1041
- eventStream$.sendTextMessageEnd({
1042
- messageId: currentMessageId
1043
- });
885
+ eventStream$.sendTextMessageEnd();
1044
886
  } else if (mode === "function") {
1045
- eventStream$.sendActionExecutionEnd({
1046
- actionExecutionId: currentToolCallId
1047
- });
887
+ eventStream$.sendActionExecutionEnd();
1048
888
  }
1049
889
  eventStream$.complete();
1050
890
  });
@@ -1228,9 +1068,6 @@ var AnthropicAdapter = class {
1228
1068
  model: this.model,
1229
1069
  messages: anthropicMessages,
1230
1070
  max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
1231
- ...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
1232
- temperature: forwardedParameters.temperature
1233
- } : {},
1234
1071
  ...tools.length > 0 && {
1235
1072
  tools
1236
1073
  },
@@ -1255,11 +1092,7 @@ var AnthropicAdapter = class {
1255
1092
  mode = "message";
1256
1093
  } else if (chunk.content_block.type === "tool_use") {
1257
1094
  currentToolCallId = chunk.content_block.id;
1258
- eventStream$.sendActionExecutionStart({
1259
- actionExecutionId: currentToolCallId,
1260
- actionName: chunk.content_block.name,
1261
- parentMessageId: currentMessageId
1262
- });
1095
+ eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);
1263
1096
  mode = "function";
1264
1097
  }
1265
1098
  } else if (chunk.type === "content_block_delta") {
@@ -1267,33 +1100,21 @@ var AnthropicAdapter = class {
1267
1100
  const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
1268
1101
  if (text.length > 0) {
1269
1102
  if (!didOutputText) {
1270
- eventStream$.sendTextMessageStart({
1271
- messageId: currentMessageId
1272
- });
1103
+ eventStream$.sendTextMessageStart(currentMessageId);
1273
1104
  didOutputText = true;
1274
1105
  }
1275
- eventStream$.sendTextMessageContent({
1276
- messageId: currentMessageId,
1277
- content: text
1278
- });
1106
+ eventStream$.sendTextMessageContent(text);
1279
1107
  }
1280
1108
  } else if (chunk.delta.type === "input_json_delta") {
1281
- eventStream$.sendActionExecutionArgs({
1282
- actionExecutionId: currentToolCallId,
1283
- args: chunk.delta.partial_json
1284
- });
1109
+ eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);
1285
1110
  }
1286
1111
  } else if (chunk.type === "content_block_stop") {
1287
1112
  if (mode === "message") {
1288
1113
  if (didOutputText) {
1289
- eventStream$.sendTextMessageEnd({
1290
- messageId: currentMessageId
1291
- });
1114
+ eventStream$.sendTextMessageEnd();
1292
1115
  }
1293
1116
  } else if (mode === "function") {
1294
- eventStream$.sendActionExecutionEnd({
1295
- actionExecutionId: currentToolCallId
1296
- });
1117
+ eventStream$.sendActionExecutionEnd();
1297
1118
  }
1298
1119
  }
1299
1120
  }
@@ -1338,60 +1159,6 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
1338
1159
  }
1339
1160
  }, "FilterThinkingTextBuffer");
1340
1161
 
1341
- // src/service-adapters/experimental/ollama/ollama-adapter.ts
1342
- import { Ollama } from "@langchain/community/llms/ollama";
1343
- import { randomId as randomId7 } from "@copilotkit/shared";
1344
- var DEFAULT_MODEL4 = "llama3:latest";
1345
- var ExperimentalOllamaAdapter = class {
1346
- model;
1347
- constructor(options) {
1348
- if (options == null ? void 0 : options.model) {
1349
- this.model = options.model;
1350
- } else {
1351
- this.model = DEFAULT_MODEL4;
1352
- }
1353
- }
1354
- async process(request) {
1355
- const { messages, actions, eventSource } = request;
1356
- const ollama = new Ollama({
1357
- model: this.model
1358
- });
1359
- const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
1360
- const _stream = await ollama.stream(contents);
1361
- eventSource.stream(async (eventStream$) => {
1362
- const currentMessageId = randomId7();
1363
- eventStream$.sendTextMessageStart({
1364
- messageId: currentMessageId
1365
- });
1366
- for await (const chunkText of _stream) {
1367
- eventStream$.sendTextMessageContent({
1368
- messageId: currentMessageId,
1369
- content: chunkText
1370
- });
1371
- }
1372
- eventStream$.sendTextMessageEnd({
1373
- messageId: currentMessageId
1374
- });
1375
- eventStream$.complete();
1376
- });
1377
- return {
1378
- threadId: request.threadId || randomId7()
1379
- };
1380
- }
1381
- };
1382
- __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
1383
-
1384
- // src/service-adapters/experimental/empty/empty-adapter.ts
1385
- import { randomId as randomId8 } from "@copilotkit/shared";
1386
- var ExperimentalEmptyAdapter = class {
1387
- async process(request) {
1388
- return {
1389
- threadId: request.threadId || randomId8()
1390
- };
1391
- }
1392
- };
1393
- __name(ExperimentalEmptyAdapter, "ExperimentalEmptyAdapter");
1394
-
1395
1162
  export {
1396
1163
  RemoteChain,
1397
1164
  OpenAIAdapter,
@@ -1401,8 +1168,6 @@ export {
1401
1168
  OpenAIAssistantAdapter,
1402
1169
  UnifyAdapter,
1403
1170
  GroqAdapter,
1404
- AnthropicAdapter,
1405
- ExperimentalOllamaAdapter,
1406
- ExperimentalEmptyAdapter
1171
+ AnthropicAdapter
1407
1172
  };
1408
- //# sourceMappingURL=chunk-CLGKEUOA.mjs.map
1173
+ //# sourceMappingURL=chunk-OKQVDDJ2.mjs.map