@copilotkit/runtime 1.5.0-tyler-reset-chat.0 → 1.5.1-custom-tag-pre.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +150 -3
- package/__snapshots__/schema/schema.graphql +8 -9
- package/dist/{chunk-K67A6XOJ.mjs → chunk-3ECBC2K2.mjs} +462 -328
- package/dist/chunk-3ECBC2K2.mjs.map +1 -0
- package/dist/{chunk-QNQ6UT3D.mjs → chunk-5E6LOP76.mjs} +2 -2
- package/dist/{chunk-OKQVDDJ2.mjs → chunk-CLGKEUOA.mjs} +298 -63
- package/dist/chunk-CLGKEUOA.mjs.map +1 -0
- package/dist/{chunk-ZBG4KJW5.mjs → chunk-MKDG5ZHT.mjs} +2 -2
- package/dist/{chunk-AGSBOD2T.mjs → chunk-MYZB2EKG.mjs} +2 -2
- package/dist/{chunk-B74M7FXG.mjs → chunk-RFF5IIZJ.mjs} +3 -2
- package/dist/chunk-RFF5IIZJ.mjs.map +1 -0
- package/dist/{copilot-runtime-12e7ac40.d.ts → copilot-runtime-6285d897.d.ts} +2 -2
- package/dist/graphql/types/converted/index.d.ts +1 -1
- package/dist/graphql/types/converted/index.js +2 -1
- package/dist/graphql/types/converted/index.js.map +1 -1
- package/dist/graphql/types/converted/index.mjs +1 -1
- package/dist/{groq-adapter-24abe931.d.ts → groq-adapter-15d41154.d.ts} +1 -1
- package/dist/{index-10b1c870.d.ts → index-ff3fbc33.d.ts} +7 -8
- package/dist/index.d.ts +5 -5
- package/dist/index.js +852 -480
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -6
- package/dist/index.mjs.map +1 -1
- package/dist/{langserve-f021ab9c.d.ts → langserve-48e976ac.d.ts} +54 -14
- package/dist/lib/index.d.ts +4 -4
- package/dist/lib/index.js +754 -459
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +6 -6
- package/dist/lib/integrations/index.d.ts +4 -4
- package/dist/lib/integrations/index.js +78 -30
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +6 -6
- package/dist/lib/integrations/nest/index.d.ts +3 -3
- package/dist/lib/integrations/nest/index.js +78 -30
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +4 -4
- package/dist/lib/integrations/node-express/index.d.ts +3 -3
- package/dist/lib/integrations/node-express/index.js +78 -30
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +4 -4
- package/dist/lib/integrations/node-http/index.d.ts +3 -3
- package/dist/lib/integrations/node-http/index.js +78 -30
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +3 -3
- package/dist/service-adapters/index.d.ts +36 -5
- package/dist/service-adapters/index.js +298 -61
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +5 -1
- package/package.json +4 -4
- package/src/agents/langgraph/event-source.ts +140 -148
- package/src/agents/langgraph/events.ts +1 -1
- package/src/graphql/inputs/forwarded-parameters.input.ts +3 -0
- package/src/graphql/inputs/message.input.ts +15 -3
- package/src/graphql/resolvers/copilot.resolver.ts +32 -6
- package/src/graphql/types/converted/index.ts +4 -3
- package/src/graphql/types/copilot-response.type.ts +12 -3
- package/src/graphql/types/enums.ts +0 -11
- package/src/lib/runtime/copilot-runtime.ts +1 -7
- package/src/lib/runtime/remote-action-constructors.ts +64 -58
- package/src/lib/runtime/remote-actions.ts +1 -0
- package/src/lib/runtime/remote-lg-action.ts +184 -154
- package/src/service-adapters/anthropic/anthropic-adapter.ts +17 -6
- package/src/service-adapters/conversion.ts +2 -1
- package/src/service-adapters/events.ts +118 -54
- package/src/service-adapters/experimental/empty/empty-adapter.ts +33 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +7 -3
- package/src/service-adapters/groq/groq-adapter.ts +24 -8
- package/src/service-adapters/index.ts +7 -1
- package/src/service-adapters/langchain/utils.ts +55 -32
- package/src/service-adapters/openai/openai-adapter.ts +23 -9
- package/src/service-adapters/openai/openai-assistant-adapter.ts +22 -8
- package/src/service-adapters/unify/unify-adapter.ts +30 -11
- package/dist/chunk-B74M7FXG.mjs.map +0 -1
- package/dist/chunk-K67A6XOJ.mjs.map +0 -1
- package/dist/chunk-OKQVDDJ2.mjs.map +0 -1
- /package/dist/{chunk-QNQ6UT3D.mjs.map → chunk-5E6LOP76.mjs.map} +0 -0
- /package/dist/{chunk-ZBG4KJW5.mjs.map → chunk-MKDG5ZHT.mjs.map} +0 -0
- /package/dist/{chunk-AGSBOD2T.mjs.map → chunk-MYZB2EKG.mjs.map} +0 -0
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export {
|
|
3
|
-
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-
|
|
1
|
+
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-48e976ac.js';
|
|
2
|
+
export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-48e976ac.js';
|
|
3
|
+
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-15d41154.js';
|
|
4
4
|
import Anthropic from '@anthropic-ai/sdk';
|
|
5
|
-
import '../index-
|
|
5
|
+
import '../index-ff3fbc33.js';
|
|
6
6
|
import '../graphql/types/base/index.js';
|
|
7
7
|
import 'rxjs';
|
|
8
8
|
import '@copilotkit/shared';
|
|
@@ -50,4 +50,35 @@ declare class AnthropicAdapter implements CopilotServiceAdapter {
|
|
|
50
50
|
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
51
51
|
}
|
|
52
52
|
|
|
53
|
-
|
|
53
|
+
interface OllamaAdapterOptions {
|
|
54
|
+
model?: string;
|
|
55
|
+
}
|
|
56
|
+
declare class ExperimentalOllamaAdapter implements CopilotServiceAdapter {
|
|
57
|
+
private model;
|
|
58
|
+
constructor(options?: OllamaAdapterOptions);
|
|
59
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* CopilotKit Empty Adapter
|
|
64
|
+
*
|
|
65
|
+
* This adapter is meant to preserve adherence to runtime requirements, while doing nothing
|
|
66
|
+
* Ideal if you don't want to connect an LLM the to the runtime, and only use your LangGraph agent.
|
|
67
|
+
* Be aware that Copilot Suggestions will not work if you use this adapter
|
|
68
|
+
*
|
|
69
|
+
* ## Example
|
|
70
|
+
*
|
|
71
|
+
* ```ts
|
|
72
|
+
* import { CopilotRuntime, ExperimentalEmptyAdapter } from "@copilotkit/runtime";
|
|
73
|
+
*
|
|
74
|
+
* const copilotKit = new CopilotRuntime();
|
|
75
|
+
*
|
|
76
|
+
* return new ExperimentalEmptyAdapter();
|
|
77
|
+
* ```
|
|
78
|
+
*/
|
|
79
|
+
|
|
80
|
+
declare class ExperimentalEmptyAdapter implements CopilotServiceAdapter {
|
|
81
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export { AnthropicAdapter, AnthropicAdapterParams, CopilotRuntimeChatCompletionRequest, CopilotRuntimeChatCompletionResponse, CopilotServiceAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter };
|
|
@@ -31,6 +31,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
31
31
|
var service_adapters_exports = {};
|
|
32
32
|
__export(service_adapters_exports, {
|
|
33
33
|
AnthropicAdapter: () => AnthropicAdapter,
|
|
34
|
+
ExperimentalEmptyAdapter: () => ExperimentalEmptyAdapter,
|
|
35
|
+
ExperimentalOllamaAdapter: () => ExperimentalOllamaAdapter,
|
|
34
36
|
GoogleGenerativeAIAdapter: () => GoogleGenerativeAIAdapter,
|
|
35
37
|
GroqAdapter: () => GroqAdapter,
|
|
36
38
|
LangChainAdapter: () => LangChainAdapter,
|
|
@@ -310,11 +312,16 @@ var OpenAIAdapter = class {
|
|
|
310
312
|
},
|
|
311
313
|
...this.disableParallelToolCalls && {
|
|
312
314
|
parallel_tool_calls: false
|
|
315
|
+
},
|
|
316
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
317
|
+
temperature: forwardedParameters.temperature
|
|
313
318
|
}
|
|
314
319
|
});
|
|
315
320
|
eventSource.stream(async (eventStream$) => {
|
|
316
321
|
var _a, _b;
|
|
317
322
|
let mode = null;
|
|
323
|
+
let currentMessageId;
|
|
324
|
+
let currentToolCallId;
|
|
318
325
|
for await (const chunk of stream) {
|
|
319
326
|
if (chunk.choices.length === 0) {
|
|
320
327
|
continue;
|
|
@@ -323,30 +330,52 @@ var OpenAIAdapter = class {
|
|
|
323
330
|
const content = chunk.choices[0].delta.content;
|
|
324
331
|
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
325
332
|
mode = null;
|
|
326
|
-
eventStream$.sendTextMessageEnd(
|
|
333
|
+
eventStream$.sendTextMessageEnd({
|
|
334
|
+
messageId: currentMessageId
|
|
335
|
+
});
|
|
327
336
|
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
328
337
|
mode = null;
|
|
329
|
-
eventStream$.sendActionExecutionEnd(
|
|
338
|
+
eventStream$.sendActionExecutionEnd({
|
|
339
|
+
actionExecutionId: currentToolCallId
|
|
340
|
+
});
|
|
330
341
|
}
|
|
331
342
|
if (mode === null) {
|
|
332
343
|
if (toolCall == null ? void 0 : toolCall.id) {
|
|
333
344
|
mode = "function";
|
|
334
|
-
|
|
345
|
+
currentToolCallId = toolCall.id;
|
|
346
|
+
eventStream$.sendActionExecutionStart({
|
|
347
|
+
actionExecutionId: currentToolCallId,
|
|
348
|
+
parentMessageId: chunk.id,
|
|
349
|
+
actionName: toolCall.function.name
|
|
350
|
+
});
|
|
335
351
|
} else if (content) {
|
|
336
352
|
mode = "message";
|
|
337
|
-
|
|
353
|
+
currentMessageId = chunk.id;
|
|
354
|
+
eventStream$.sendTextMessageStart({
|
|
355
|
+
messageId: currentMessageId
|
|
356
|
+
});
|
|
338
357
|
}
|
|
339
358
|
}
|
|
340
359
|
if (mode === "message" && content) {
|
|
341
|
-
eventStream$.sendTextMessageContent(
|
|
360
|
+
eventStream$.sendTextMessageContent({
|
|
361
|
+
messageId: currentMessageId,
|
|
362
|
+
content
|
|
363
|
+
});
|
|
342
364
|
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
343
|
-
eventStream$.sendActionExecutionArgs(
|
|
365
|
+
eventStream$.sendActionExecutionArgs({
|
|
366
|
+
actionExecutionId: currentToolCallId,
|
|
367
|
+
args: toolCall.function.arguments
|
|
368
|
+
});
|
|
344
369
|
}
|
|
345
370
|
}
|
|
346
371
|
if (mode === "message") {
|
|
347
|
-
eventStream$.sendTextMessageEnd(
|
|
372
|
+
eventStream$.sendTextMessageEnd({
|
|
373
|
+
messageId: currentMessageId
|
|
374
|
+
});
|
|
348
375
|
} else if (mode === "function") {
|
|
349
|
-
eventStream$.sendActionExecutionEnd(
|
|
376
|
+
eventStream$.sendActionExecutionEnd({
|
|
377
|
+
actionExecutionId: currentToolCallId
|
|
378
|
+
});
|
|
350
379
|
}
|
|
351
380
|
eventStream$.complete();
|
|
352
381
|
});
|
|
@@ -414,17 +443,25 @@ function isBaseMessageChunk(message) {
|
|
|
414
443
|
__name(isBaseMessageChunk, "isBaseMessageChunk");
|
|
415
444
|
function maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution) {
|
|
416
445
|
if (actionExecution) {
|
|
417
|
-
eventStream$.sendActionExecutionResult(
|
|
446
|
+
eventStream$.sendActionExecutionResult({
|
|
447
|
+
actionExecutionId: actionExecution.id,
|
|
448
|
+
actionName: actionExecution.name,
|
|
449
|
+
result: "Sending a message"
|
|
450
|
+
});
|
|
418
451
|
}
|
|
419
452
|
}
|
|
420
453
|
__name(maybeSendActionExecutionResultIsMessage, "maybeSendActionExecutionResultIsMessage");
|
|
421
454
|
async function streamLangChainResponse({ result, eventStream$, actionExecution }) {
|
|
422
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
455
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
423
456
|
if (typeof result === "string") {
|
|
424
457
|
if (!actionExecution) {
|
|
425
458
|
eventStream$.sendTextMessage((0, import_shared2.randomId)(), result);
|
|
426
459
|
} else {
|
|
427
|
-
eventStream$.sendActionExecutionResult(
|
|
460
|
+
eventStream$.sendActionExecutionResult({
|
|
461
|
+
actionExecutionId: actionExecution.id,
|
|
462
|
+
actionName: actionExecution.name,
|
|
463
|
+
result
|
|
464
|
+
});
|
|
428
465
|
}
|
|
429
466
|
} else if (isAIMessage(result)) {
|
|
430
467
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
@@ -432,7 +469,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
432
469
|
eventStream$.sendTextMessage((0, import_shared2.randomId)(), result.content);
|
|
433
470
|
}
|
|
434
471
|
for (const toolCall of result.tool_calls) {
|
|
435
|
-
eventStream$.sendActionExecution(
|
|
472
|
+
eventStream$.sendActionExecution({
|
|
473
|
+
actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
|
|
474
|
+
actionName: toolCall.name,
|
|
475
|
+
args: JSON.stringify(toolCall.args)
|
|
476
|
+
});
|
|
436
477
|
}
|
|
437
478
|
} else if (isBaseMessageChunk(result)) {
|
|
438
479
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
@@ -441,13 +482,18 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
441
482
|
}
|
|
442
483
|
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
443
484
|
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
444
|
-
eventStream$.sendActionExecution(
|
|
485
|
+
eventStream$.sendActionExecution({
|
|
486
|
+
actionExecutionId: toolCall.id || (0, import_shared2.randomId)(),
|
|
487
|
+
actionName: toolCall.name,
|
|
488
|
+
args: JSON.stringify(toolCall.args)
|
|
489
|
+
});
|
|
445
490
|
}
|
|
446
491
|
}
|
|
447
492
|
} else if (result && "getReader" in result) {
|
|
448
493
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
449
494
|
let reader = result.getReader();
|
|
450
495
|
let mode = null;
|
|
496
|
+
let currentMessageId;
|
|
451
497
|
const toolCallDetails = {
|
|
452
498
|
name: null,
|
|
453
499
|
id: null,
|
|
@@ -461,9 +507,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
461
507
|
let toolCallId = void 0;
|
|
462
508
|
let toolCallArgs = void 0;
|
|
463
509
|
let hasToolCall = false;
|
|
464
|
-
let content =
|
|
510
|
+
let content = "";
|
|
511
|
+
if (value && value.content) {
|
|
512
|
+
content = Array.isArray(value.content) ? ((_d = value.content[0]) == null ? void 0 : _d.text) ?? "" : value.content;
|
|
513
|
+
}
|
|
465
514
|
if (isAIMessageChunk(value)) {
|
|
466
|
-
let chunk = (
|
|
515
|
+
let chunk = (_e = value.tool_call_chunks) == null ? void 0 : _e[0];
|
|
467
516
|
toolCallArgs = chunk == null ? void 0 : chunk.args;
|
|
468
517
|
hasToolCall = chunk != void 0;
|
|
469
518
|
if (chunk == null ? void 0 : chunk.name)
|
|
@@ -478,18 +527,22 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
478
527
|
toolCallName = toolCallDetails.name;
|
|
479
528
|
toolCallId = toolCallDetails.id;
|
|
480
529
|
} else if (isBaseMessageChunk(value)) {
|
|
481
|
-
let chunk = (
|
|
482
|
-
toolCallName = (
|
|
530
|
+
let chunk = (_g = (_f = value.additional_kwargs) == null ? void 0 : _f.tool_calls) == null ? void 0 : _g[0];
|
|
531
|
+
toolCallName = (_h = chunk == null ? void 0 : chunk.function) == null ? void 0 : _h.name;
|
|
483
532
|
toolCallId = chunk == null ? void 0 : chunk.id;
|
|
484
|
-
toolCallArgs = (
|
|
533
|
+
toolCallArgs = (_i = chunk == null ? void 0 : chunk.function) == null ? void 0 : _i.arguments;
|
|
485
534
|
hasToolCall = (chunk == null ? void 0 : chunk.function) != void 0;
|
|
486
535
|
}
|
|
487
536
|
if (mode === "message" && (toolCallId || done)) {
|
|
488
537
|
mode = null;
|
|
489
|
-
eventStream$.sendTextMessageEnd(
|
|
538
|
+
eventStream$.sendTextMessageEnd({
|
|
539
|
+
messageId: currentMessageId
|
|
540
|
+
});
|
|
490
541
|
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
491
542
|
mode = null;
|
|
492
|
-
eventStream$.sendActionExecutionEnd(
|
|
543
|
+
eventStream$.sendActionExecutionEnd({
|
|
544
|
+
actionExecutionId: toolCallId
|
|
545
|
+
});
|
|
493
546
|
}
|
|
494
547
|
if (done) {
|
|
495
548
|
break;
|
|
@@ -497,21 +550,40 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
497
550
|
if (mode === null) {
|
|
498
551
|
if (hasToolCall && toolCallId && toolCallName) {
|
|
499
552
|
mode = "function";
|
|
500
|
-
eventStream$.sendActionExecutionStart(
|
|
553
|
+
eventStream$.sendActionExecutionStart({
|
|
554
|
+
actionExecutionId: toolCallId,
|
|
555
|
+
actionName: toolCallName,
|
|
556
|
+
parentMessageId: (_j = value.lc_kwargs) == null ? void 0 : _j.id
|
|
557
|
+
});
|
|
501
558
|
} else if (content) {
|
|
502
559
|
mode = "message";
|
|
503
|
-
|
|
560
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared2.randomId)();
|
|
561
|
+
eventStream$.sendTextMessageStart({
|
|
562
|
+
messageId: currentMessageId
|
|
563
|
+
});
|
|
504
564
|
}
|
|
505
565
|
}
|
|
506
566
|
if (mode === "message" && content) {
|
|
507
|
-
eventStream$.sendTextMessageContent(
|
|
567
|
+
eventStream$.sendTextMessageContent({
|
|
568
|
+
messageId: currentMessageId,
|
|
569
|
+
content
|
|
570
|
+
});
|
|
508
571
|
} else if (mode === "function" && toolCallArgs) {
|
|
509
572
|
if (toolCallDetails.index !== toolCallDetails.prevIndex) {
|
|
510
|
-
eventStream$.sendActionExecutionEnd(
|
|
511
|
-
|
|
573
|
+
eventStream$.sendActionExecutionEnd({
|
|
574
|
+
actionExecutionId: toolCallId
|
|
575
|
+
});
|
|
576
|
+
eventStream$.sendActionExecutionStart({
|
|
577
|
+
actionExecutionId: toolCallId,
|
|
578
|
+
actionName: toolCallName,
|
|
579
|
+
parentMessageId: (_l = value.lc_kwargs) == null ? void 0 : _l.id
|
|
580
|
+
});
|
|
512
581
|
toolCallDetails.prevIndex = toolCallDetails.index;
|
|
513
582
|
}
|
|
514
|
-
eventStream$.sendActionExecutionArgs(
|
|
583
|
+
eventStream$.sendActionExecutionArgs({
|
|
584
|
+
actionExecutionId: toolCallId,
|
|
585
|
+
args: toolCallArgs
|
|
586
|
+
});
|
|
515
587
|
}
|
|
516
588
|
} catch (error) {
|
|
517
589
|
console.error("Error reading from stream", error);
|
|
@@ -519,7 +591,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
519
591
|
}
|
|
520
592
|
}
|
|
521
593
|
} else if (actionExecution) {
|
|
522
|
-
eventStream$.sendActionExecutionResult(
|
|
594
|
+
eventStream$.sendActionExecutionResult({
|
|
595
|
+
actionExecutionId: actionExecution.id,
|
|
596
|
+
actionName: actionExecution.name,
|
|
597
|
+
result: encodeResult(result)
|
|
598
|
+
});
|
|
523
599
|
} else {
|
|
524
600
|
throw new Error("Invalid return type from LangChain function.");
|
|
525
601
|
}
|
|
@@ -699,21 +775,33 @@ var OpenAIAssistantAdapter = class {
|
|
|
699
775
|
eventSource.stream(async (eventStream$) => {
|
|
700
776
|
var _a, _b, _c, _d, _e, _f;
|
|
701
777
|
let inFunctionCall = false;
|
|
778
|
+
let currentMessageId;
|
|
779
|
+
let currentToolCallId;
|
|
702
780
|
for await (const chunk of stream) {
|
|
703
781
|
switch (chunk.event) {
|
|
704
782
|
case "thread.message.created":
|
|
705
783
|
if (inFunctionCall) {
|
|
706
|
-
eventStream$.sendActionExecutionEnd(
|
|
784
|
+
eventStream$.sendActionExecutionEnd({
|
|
785
|
+
actionExecutionId: currentToolCallId
|
|
786
|
+
});
|
|
707
787
|
}
|
|
708
|
-
|
|
788
|
+
currentMessageId = chunk.data.id;
|
|
789
|
+
eventStream$.sendTextMessageStart({
|
|
790
|
+
messageId: currentMessageId
|
|
791
|
+
});
|
|
709
792
|
break;
|
|
710
793
|
case "thread.message.delta":
|
|
711
794
|
if (((_a = chunk.data.delta.content) == null ? void 0 : _a[0].type) === "text") {
|
|
712
|
-
eventStream$.sendTextMessageContent(
|
|
795
|
+
eventStream$.sendTextMessageContent({
|
|
796
|
+
messageId: currentMessageId,
|
|
797
|
+
content: (_b = chunk.data.delta.content) == null ? void 0 : _b[0].text.value
|
|
798
|
+
});
|
|
713
799
|
}
|
|
714
800
|
break;
|
|
715
801
|
case "thread.message.completed":
|
|
716
|
-
eventStream$.sendTextMessageEnd(
|
|
802
|
+
eventStream$.sendTextMessageEnd({
|
|
803
|
+
messageId: currentMessageId
|
|
804
|
+
});
|
|
717
805
|
break;
|
|
718
806
|
case "thread.run.step.delta":
|
|
719
807
|
let toolCallId;
|
|
@@ -726,18 +814,30 @@ var OpenAIAssistantAdapter = class {
|
|
|
726
814
|
}
|
|
727
815
|
if (toolCallName && toolCallId) {
|
|
728
816
|
if (inFunctionCall) {
|
|
729
|
-
eventStream$.sendActionExecutionEnd(
|
|
817
|
+
eventStream$.sendActionExecutionEnd({
|
|
818
|
+
actionExecutionId: currentToolCallId
|
|
819
|
+
});
|
|
730
820
|
}
|
|
731
821
|
inFunctionCall = true;
|
|
732
|
-
|
|
822
|
+
currentToolCallId = toolCallId;
|
|
823
|
+
eventStream$.sendActionExecutionStart({
|
|
824
|
+
actionExecutionId: currentToolCallId,
|
|
825
|
+
parentMessageId: chunk.data.id,
|
|
826
|
+
actionName: toolCallName
|
|
827
|
+
});
|
|
733
828
|
} else if (toolCallArgs) {
|
|
734
|
-
eventStream$.sendActionExecutionArgs(
|
|
829
|
+
eventStream$.sendActionExecutionArgs({
|
|
830
|
+
actionExecutionId: currentToolCallId,
|
|
831
|
+
args: toolCallArgs
|
|
832
|
+
});
|
|
735
833
|
}
|
|
736
834
|
break;
|
|
737
835
|
}
|
|
738
836
|
}
|
|
739
837
|
if (inFunctionCall) {
|
|
740
|
-
eventStream$.sendActionExecutionEnd(
|
|
838
|
+
eventStream$.sendActionExecutionEnd({
|
|
839
|
+
actionExecutionId: currentToolCallId
|
|
840
|
+
});
|
|
741
841
|
}
|
|
742
842
|
eventStream$.complete();
|
|
743
843
|
});
|
|
@@ -780,6 +880,7 @@ var UnifyAdapter = class {
|
|
|
780
880
|
apiKey: this.apiKey,
|
|
781
881
|
baseURL: "https://api.unify.ai/v0/"
|
|
782
882
|
});
|
|
883
|
+
const forwardedParameters = request.forwardedParameters;
|
|
783
884
|
const messages = request.messages.map(convertMessageToOpenAIMessage);
|
|
784
885
|
const stream = await openai.chat.completions.create({
|
|
785
886
|
model: this.model,
|
|
@@ -787,49 +888,83 @@ var UnifyAdapter = class {
|
|
|
787
888
|
stream: true,
|
|
788
889
|
...tools.length > 0 && {
|
|
789
890
|
tools
|
|
891
|
+
},
|
|
892
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
893
|
+
temperature: forwardedParameters.temperature
|
|
790
894
|
}
|
|
791
895
|
});
|
|
792
896
|
let model = null;
|
|
897
|
+
let currentMessageId;
|
|
898
|
+
let currentToolCallId;
|
|
793
899
|
request.eventSource.stream(async (eventStream$) => {
|
|
794
900
|
var _a, _b;
|
|
795
901
|
let mode = null;
|
|
796
902
|
for await (const chunk of stream) {
|
|
797
903
|
if (this.start) {
|
|
798
904
|
model = chunk.model;
|
|
799
|
-
|
|
800
|
-
eventStream$.
|
|
801
|
-
|
|
802
|
-
|
|
905
|
+
currentMessageId = (0, import_shared4.randomId)();
|
|
906
|
+
eventStream$.sendTextMessageStart({
|
|
907
|
+
messageId: currentMessageId
|
|
908
|
+
});
|
|
909
|
+
eventStream$.sendTextMessageContent({
|
|
910
|
+
messageId: currentMessageId,
|
|
911
|
+
content: `Model used: ${model}
|
|
912
|
+
`
|
|
913
|
+
});
|
|
914
|
+
eventStream$.sendTextMessageEnd({
|
|
915
|
+
messageId: currentMessageId
|
|
916
|
+
});
|
|
803
917
|
this.start = false;
|
|
804
918
|
}
|
|
805
919
|
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
806
920
|
const content = chunk.choices[0].delta.content;
|
|
807
921
|
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
808
922
|
mode = null;
|
|
809
|
-
eventStream$.sendTextMessageEnd(
|
|
923
|
+
eventStream$.sendTextMessageEnd({
|
|
924
|
+
messageId: currentMessageId
|
|
925
|
+
});
|
|
810
926
|
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
811
927
|
mode = null;
|
|
812
|
-
eventStream$.sendActionExecutionEnd(
|
|
928
|
+
eventStream$.sendActionExecutionEnd({
|
|
929
|
+
actionExecutionId: currentToolCallId
|
|
930
|
+
});
|
|
813
931
|
}
|
|
814
932
|
if (mode === null) {
|
|
815
933
|
if (toolCall == null ? void 0 : toolCall.id) {
|
|
816
934
|
mode = "function";
|
|
817
|
-
|
|
935
|
+
currentToolCallId = toolCall.id;
|
|
936
|
+
eventStream$.sendActionExecutionStart({
|
|
937
|
+
actionExecutionId: currentToolCallId,
|
|
938
|
+
actionName: toolCall.function.name
|
|
939
|
+
});
|
|
818
940
|
} else if (content) {
|
|
819
941
|
mode = "message";
|
|
820
|
-
|
|
942
|
+
currentMessageId = chunk.id;
|
|
943
|
+
eventStream$.sendTextMessageStart({
|
|
944
|
+
messageId: currentMessageId
|
|
945
|
+
});
|
|
821
946
|
}
|
|
822
947
|
}
|
|
823
948
|
if (mode === "message" && content) {
|
|
824
|
-
eventStream$.sendTextMessageContent(
|
|
949
|
+
eventStream$.sendTextMessageContent({
|
|
950
|
+
messageId: currentMessageId,
|
|
951
|
+
content
|
|
952
|
+
});
|
|
825
953
|
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
826
|
-
eventStream$.sendActionExecutionArgs(
|
|
954
|
+
eventStream$.sendActionExecutionArgs({
|
|
955
|
+
actionExecutionId: currentToolCallId,
|
|
956
|
+
args: toolCall.function.arguments
|
|
957
|
+
});
|
|
827
958
|
}
|
|
828
959
|
}
|
|
829
960
|
if (mode === "message") {
|
|
830
|
-
eventStream$.sendTextMessageEnd(
|
|
961
|
+
eventStream$.sendTextMessageEnd({
|
|
962
|
+
messageId: currentMessageId
|
|
963
|
+
});
|
|
831
964
|
} else if (mode === "function") {
|
|
832
|
-
eventStream$.sendActionExecutionEnd(
|
|
965
|
+
eventStream$.sendActionExecutionEnd({
|
|
966
|
+
actionExecutionId: currentToolCallId
|
|
967
|
+
});
|
|
833
968
|
}
|
|
834
969
|
eventStream$.complete();
|
|
835
970
|
});
|
|
@@ -890,40 +1025,67 @@ var GroqAdapter = class {
|
|
|
890
1025
|
},
|
|
891
1026
|
...this.disableParallelToolCalls && {
|
|
892
1027
|
parallel_tool_calls: false
|
|
1028
|
+
},
|
|
1029
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) && {
|
|
1030
|
+
temperature: forwardedParameters.temperature
|
|
893
1031
|
}
|
|
894
1032
|
});
|
|
895
1033
|
eventSource.stream(async (eventStream$) => {
|
|
896
1034
|
var _a, _b;
|
|
897
1035
|
let mode = null;
|
|
1036
|
+
let currentMessageId;
|
|
1037
|
+
let currentToolCallId;
|
|
898
1038
|
for await (const chunk of stream) {
|
|
899
1039
|
const toolCall = (_a = chunk.choices[0].delta.tool_calls) == null ? void 0 : _a[0];
|
|
900
1040
|
const content = chunk.choices[0].delta.content;
|
|
901
1041
|
if (mode === "message" && (toolCall == null ? void 0 : toolCall.id)) {
|
|
902
1042
|
mode = null;
|
|
903
|
-
eventStream$.sendTextMessageEnd(
|
|
1043
|
+
eventStream$.sendTextMessageEnd({
|
|
1044
|
+
messageId: currentMessageId
|
|
1045
|
+
});
|
|
904
1046
|
} else if (mode === "function" && (toolCall === void 0 || (toolCall == null ? void 0 : toolCall.id))) {
|
|
905
1047
|
mode = null;
|
|
906
|
-
eventStream$.sendActionExecutionEnd(
|
|
1048
|
+
eventStream$.sendActionExecutionEnd({
|
|
1049
|
+
actionExecutionId: currentToolCallId
|
|
1050
|
+
});
|
|
907
1051
|
}
|
|
908
1052
|
if (mode === null) {
|
|
909
1053
|
if (toolCall == null ? void 0 : toolCall.id) {
|
|
910
1054
|
mode = "function";
|
|
911
|
-
|
|
1055
|
+
currentToolCallId = toolCall.id;
|
|
1056
|
+
eventStream$.sendActionExecutionStart({
|
|
1057
|
+
actionExecutionId: currentToolCallId,
|
|
1058
|
+
actionName: toolCall.function.name,
|
|
1059
|
+
parentMessageId: chunk.id
|
|
1060
|
+
});
|
|
912
1061
|
} else if (content) {
|
|
913
1062
|
mode = "message";
|
|
914
|
-
|
|
1063
|
+
currentMessageId = chunk.id;
|
|
1064
|
+
eventStream$.sendTextMessageStart({
|
|
1065
|
+
messageId: currentMessageId
|
|
1066
|
+
});
|
|
915
1067
|
}
|
|
916
1068
|
}
|
|
917
1069
|
if (mode === "message" && content) {
|
|
918
|
-
eventStream$.sendTextMessageContent(
|
|
1070
|
+
eventStream$.sendTextMessageContent({
|
|
1071
|
+
messageId: currentMessageId,
|
|
1072
|
+
content
|
|
1073
|
+
});
|
|
919
1074
|
} else if (mode === "function" && ((_b = toolCall == null ? void 0 : toolCall.function) == null ? void 0 : _b.arguments)) {
|
|
920
|
-
eventStream$.sendActionExecutionArgs(
|
|
1075
|
+
eventStream$.sendActionExecutionArgs({
|
|
1076
|
+
actionExecutionId: currentToolCallId,
|
|
1077
|
+
args: toolCall.function.arguments
|
|
1078
|
+
});
|
|
921
1079
|
}
|
|
922
1080
|
}
|
|
923
1081
|
if (mode === "message") {
|
|
924
|
-
eventStream$.sendTextMessageEnd(
|
|
1082
|
+
eventStream$.sendTextMessageEnd({
|
|
1083
|
+
messageId: currentMessageId
|
|
1084
|
+
});
|
|
925
1085
|
} else if (mode === "function") {
|
|
926
|
-
eventStream$.sendActionExecutionEnd(
|
|
1086
|
+
eventStream$.sendActionExecutionEnd({
|
|
1087
|
+
actionExecutionId: currentToolCallId
|
|
1088
|
+
});
|
|
927
1089
|
}
|
|
928
1090
|
eventStream$.complete();
|
|
929
1091
|
});
|
|
@@ -1107,6 +1269,9 @@ var AnthropicAdapter = class {
|
|
|
1107
1269
|
model: this.model,
|
|
1108
1270
|
messages: anthropicMessages,
|
|
1109
1271
|
max_tokens: (forwardedParameters == null ? void 0 : forwardedParameters.maxTokens) || 1024,
|
|
1272
|
+
...(forwardedParameters == null ? void 0 : forwardedParameters.temperature) ? {
|
|
1273
|
+
temperature: forwardedParameters.temperature
|
|
1274
|
+
} : {},
|
|
1110
1275
|
...tools.length > 0 && {
|
|
1111
1276
|
tools
|
|
1112
1277
|
},
|
|
@@ -1131,7 +1296,11 @@ var AnthropicAdapter = class {
|
|
|
1131
1296
|
mode = "message";
|
|
1132
1297
|
} else if (chunk.content_block.type === "tool_use") {
|
|
1133
1298
|
currentToolCallId = chunk.content_block.id;
|
|
1134
|
-
eventStream$.sendActionExecutionStart(
|
|
1299
|
+
eventStream$.sendActionExecutionStart({
|
|
1300
|
+
actionExecutionId: currentToolCallId,
|
|
1301
|
+
actionName: chunk.content_block.name,
|
|
1302
|
+
parentMessageId: currentMessageId
|
|
1303
|
+
});
|
|
1135
1304
|
mode = "function";
|
|
1136
1305
|
}
|
|
1137
1306
|
} else if (chunk.type === "content_block_delta") {
|
|
@@ -1139,21 +1308,33 @@ var AnthropicAdapter = class {
|
|
|
1139
1308
|
const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);
|
|
1140
1309
|
if (text.length > 0) {
|
|
1141
1310
|
if (!didOutputText) {
|
|
1142
|
-
eventStream$.sendTextMessageStart(
|
|
1311
|
+
eventStream$.sendTextMessageStart({
|
|
1312
|
+
messageId: currentMessageId
|
|
1313
|
+
});
|
|
1143
1314
|
didOutputText = true;
|
|
1144
1315
|
}
|
|
1145
|
-
eventStream$.sendTextMessageContent(
|
|
1316
|
+
eventStream$.sendTextMessageContent({
|
|
1317
|
+
messageId: currentMessageId,
|
|
1318
|
+
content: text
|
|
1319
|
+
});
|
|
1146
1320
|
}
|
|
1147
1321
|
} else if (chunk.delta.type === "input_json_delta") {
|
|
1148
|
-
eventStream$.sendActionExecutionArgs(
|
|
1322
|
+
eventStream$.sendActionExecutionArgs({
|
|
1323
|
+
actionExecutionId: currentToolCallId,
|
|
1324
|
+
args: chunk.delta.partial_json
|
|
1325
|
+
});
|
|
1149
1326
|
}
|
|
1150
1327
|
} else if (chunk.type === "content_block_stop") {
|
|
1151
1328
|
if (mode === "message") {
|
|
1152
1329
|
if (didOutputText) {
|
|
1153
|
-
eventStream$.sendTextMessageEnd(
|
|
1330
|
+
eventStream$.sendTextMessageEnd({
|
|
1331
|
+
messageId: currentMessageId
|
|
1332
|
+
});
|
|
1154
1333
|
}
|
|
1155
1334
|
} else if (mode === "function") {
|
|
1156
|
-
eventStream$.sendActionExecutionEnd(
|
|
1335
|
+
eventStream$.sendActionExecutionEnd({
|
|
1336
|
+
actionExecutionId: currentToolCallId
|
|
1337
|
+
});
|
|
1157
1338
|
}
|
|
1158
1339
|
}
|
|
1159
1340
|
}
|
|
@@ -1197,9 +1378,65 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
|
|
|
1197
1378
|
this.didFilterThinkingTag = false;
|
|
1198
1379
|
}
|
|
1199
1380
|
}, "FilterThinkingTextBuffer");
|
|
1381
|
+
|
|
1382
|
+
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1383
|
+
var import_ollama = require("@langchain/community/llms/ollama");
|
|
1384
|
+
var import_shared7 = require("@copilotkit/shared");
|
|
1385
|
+
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1386
|
+
var ExperimentalOllamaAdapter = class {
|
|
1387
|
+
model;
|
|
1388
|
+
constructor(options) {
|
|
1389
|
+
if (options == null ? void 0 : options.model) {
|
|
1390
|
+
this.model = options.model;
|
|
1391
|
+
} else {
|
|
1392
|
+
this.model = DEFAULT_MODEL4;
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
async process(request) {
|
|
1396
|
+
const { messages, actions, eventSource } = request;
|
|
1397
|
+
const ollama = new import_ollama.Ollama({
|
|
1398
|
+
model: this.model
|
|
1399
|
+
});
|
|
1400
|
+
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1401
|
+
const _stream = await ollama.stream(contents);
|
|
1402
|
+
eventSource.stream(async (eventStream$) => {
|
|
1403
|
+
const currentMessageId = (0, import_shared7.randomId)();
|
|
1404
|
+
eventStream$.sendTextMessageStart({
|
|
1405
|
+
messageId: currentMessageId
|
|
1406
|
+
});
|
|
1407
|
+
for await (const chunkText of _stream) {
|
|
1408
|
+
eventStream$.sendTextMessageContent({
|
|
1409
|
+
messageId: currentMessageId,
|
|
1410
|
+
content: chunkText
|
|
1411
|
+
});
|
|
1412
|
+
}
|
|
1413
|
+
eventStream$.sendTextMessageEnd({
|
|
1414
|
+
messageId: currentMessageId
|
|
1415
|
+
});
|
|
1416
|
+
eventStream$.complete();
|
|
1417
|
+
});
|
|
1418
|
+
return {
|
|
1419
|
+
threadId: request.threadId || (0, import_shared7.randomId)()
|
|
1420
|
+
};
|
|
1421
|
+
}
|
|
1422
|
+
};
|
|
1423
|
+
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1424
|
+
|
|
1425
|
+
// src/service-adapters/experimental/empty/empty-adapter.ts
|
|
1426
|
+
var import_shared8 = require("@copilotkit/shared");
|
|
1427
|
+
var ExperimentalEmptyAdapter = class {
|
|
1428
|
+
async process(request) {
|
|
1429
|
+
return {
|
|
1430
|
+
threadId: request.threadId || (0, import_shared8.randomId)()
|
|
1431
|
+
};
|
|
1432
|
+
}
|
|
1433
|
+
};
|
|
1434
|
+
__name(ExperimentalEmptyAdapter, "ExperimentalEmptyAdapter");
|
|
1200
1435
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1201
1436
|
0 && (module.exports = {
|
|
1202
1437
|
AnthropicAdapter,
|
|
1438
|
+
ExperimentalEmptyAdapter,
|
|
1439
|
+
ExperimentalOllamaAdapter,
|
|
1203
1440
|
GoogleGenerativeAIAdapter,
|
|
1204
1441
|
GroqAdapter,
|
|
1205
1442
|
LangChainAdapter,
|