@librechat/agents 3.0.32 → 3.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +0 -1
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/llm/providers.cjs +0 -3
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/run.cjs +10 -1
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/utils/llm.cjs +0 -1
- package/dist/cjs/utils/llm.cjs.map +1 -1
- package/dist/esm/common/enum.mjs +0 -1
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/llm/providers.mjs +0 -3
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/run.mjs +10 -1
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/utils/llm.mjs +0 -1
- package/dist/esm/utils/llm.mjs.map +1 -1
- package/dist/types/common/enum.d.ts +0 -1
- package/dist/types/types/llm.d.ts +1 -6
- package/package.json +1 -2
- package/src/common/enum.ts +0 -1
- package/src/llm/providers.ts +0 -3
- package/src/run.ts +13 -4
- package/src/types/llm.ts +0 -6
- package/src/utils/llm.ts +0 -1
- package/src/utils/llmConfig.ts +5 -3
- package/dist/cjs/llm/ollama/index.cjs +0 -70
- package/dist/cjs/llm/ollama/index.cjs.map +0 -1
- package/dist/cjs/llm/ollama/utils.cjs +0 -158
- package/dist/cjs/llm/ollama/utils.cjs.map +0 -1
- package/dist/esm/llm/ollama/index.mjs +0 -68
- package/dist/esm/llm/ollama/index.mjs.map +0 -1
- package/dist/esm/llm/ollama/utils.mjs +0 -155
- package/dist/esm/llm/ollama/utils.mjs.map +0 -1
- package/dist/types/llm/ollama/index.d.ts +0 -8
- package/dist/types/llm/ollama/utils.d.ts +0 -7
- package/src/llm/ollama/index.ts +0 -92
- package/src/llm/ollama/utils.ts +0 -193
package/src/llm/providers.ts
CHANGED
|
@@ -16,13 +16,11 @@ import { CustomChatBedrockConverse } from '@/llm/bedrock';
|
|
|
16
16
|
import { CustomAnthropic } from '@/llm/anthropic';
|
|
17
17
|
import { ChatOpenRouter } from '@/llm/openrouter';
|
|
18
18
|
import { ChatVertexAI } from '@/llm/vertexai';
|
|
19
|
-
import { ChatOllama } from '@/llm/ollama';
|
|
20
19
|
import { Providers } from '@/common';
|
|
21
20
|
|
|
22
21
|
export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
23
22
|
[Providers.XAI]: ChatXAI,
|
|
24
23
|
[Providers.OPENAI]: ChatOpenAI,
|
|
25
|
-
[Providers.OLLAMA]: ChatOllama,
|
|
26
24
|
[Providers.AZURE]: AzureChatOpenAI,
|
|
27
25
|
[Providers.VERTEXAI]: ChatVertexAI,
|
|
28
26
|
[Providers.DEEPSEEK]: ChatDeepSeek,
|
|
@@ -38,7 +36,6 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
|
38
36
|
export const manualToolStreamProviders = new Set<Providers | string>([
|
|
39
37
|
Providers.ANTHROPIC,
|
|
40
38
|
Providers.BEDROCK,
|
|
41
|
-
Providers.OLLAMA,
|
|
42
39
|
]);
|
|
43
40
|
|
|
44
41
|
export const getChatModelClass = <P extends Providers>(
|
package/src/run.ts
CHANGED
|
@@ -197,7 +197,7 @@ export class Run<_T extends t.BaseGraphState> {
|
|
|
197
197
|
}
|
|
198
198
|
const handler = this.handlerRegistry?.getHandler(eventName);
|
|
199
199
|
if (handler && this.Graph) {
|
|
200
|
-
await handler.handle(
|
|
200
|
+
return await handler.handle(
|
|
201
201
|
eventName,
|
|
202
202
|
data as
|
|
203
203
|
| t.StreamEventData
|
|
@@ -276,6 +276,15 @@ export class Run<_T extends t.BaseGraphState> {
|
|
|
276
276
|
|
|
277
277
|
const stream = this.graphRunnable.streamEvents(inputs, config, {
|
|
278
278
|
raiseError: true,
|
|
279
|
+
/**
|
|
280
|
+
* Prevent EventStreamCallbackHandler from processing custom events.
|
|
281
|
+
* Custom events are already handled via our createCustomEventCallback()
|
|
282
|
+
* which routes them through the handlerRegistry.
|
|
283
|
+
* Without this flag, EventStreamCallbackHandler throws errors when
|
|
284
|
+
* custom events are dispatched for run IDs not in its internal map
|
|
285
|
+
* (due to timing issues in parallel execution or after run cleanup).
|
|
286
|
+
*/
|
|
287
|
+
ignoreCustomEvent: true,
|
|
279
288
|
});
|
|
280
289
|
|
|
281
290
|
for await (const event of stream) {
|
|
@@ -431,9 +440,9 @@ export class Run<_T extends t.BaseGraphState> {
|
|
|
431
440
|
} catch (_e) {
|
|
432
441
|
// Fallback: strip callbacks to avoid EventStream tracer errors in certain environments
|
|
433
442
|
// But preserve langfuse handler if it exists
|
|
434
|
-
const langfuseHandler = (
|
|
435
|
-
|
|
436
|
-
);
|
|
443
|
+
const langfuseHandler = (
|
|
444
|
+
invokeConfig.callbacks as t.ProvidedCallbacks
|
|
445
|
+
)?.find((cb) => cb instanceof CallbackHandler);
|
|
437
446
|
const { callbacks: _cb, ...rest } = invokeConfig;
|
|
438
447
|
const safeConfig = Object.assign({}, rest, {
|
|
439
448
|
callbacks: langfuseHandler ? [langfuseHandler] : [],
|
package/src/types/llm.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
// src/types/llm.ts
|
|
2
|
-
import { ChatOllama } from '@langchain/ollama';
|
|
3
2
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
4
3
|
import type {
|
|
5
4
|
BindToolsInput,
|
|
@@ -22,7 +21,6 @@ import type { RequestOptions } from '@google/generative-ai';
|
|
|
22
21
|
import type { StructuredTool } from '@langchain/core/tools';
|
|
23
22
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
24
23
|
import type { Runnable } from '@langchain/core/runnables';
|
|
25
|
-
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
26
24
|
import type { OpenAI as OpenAIClient } from 'openai';
|
|
27
25
|
import type { ChatXAIInput } from '@langchain/xai';
|
|
28
26
|
import {
|
|
@@ -57,7 +55,6 @@ export type AnthropicReasoning = {
|
|
|
57
55
|
thinkingBudget?: number;
|
|
58
56
|
};
|
|
59
57
|
export type OpenAIClientOptions = ChatOpenAIFields;
|
|
60
|
-
export type OllamaClientOptions = ChatOllamaInput;
|
|
61
58
|
export type AnthropicClientOptions = AnthropicInput;
|
|
62
59
|
export type MistralAIClientOptions = ChatMistralAIInput;
|
|
63
60
|
export type VertexAIClientOptions = ChatVertexAIInput & {
|
|
@@ -80,7 +77,6 @@ export type XAIClientOptions = ChatXAIInput;
|
|
|
80
77
|
export type ClientOptions =
|
|
81
78
|
| OpenAIClientOptions
|
|
82
79
|
| AzureClientOptions
|
|
83
|
-
| OllamaClientOptions
|
|
84
80
|
| AnthropicClientOptions
|
|
85
81
|
| MistralAIClientOptions
|
|
86
82
|
| VertexAIClientOptions
|
|
@@ -103,7 +99,6 @@ export type LLMConfig = SharedLLMConfig &
|
|
|
103
99
|
export type ProviderOptionsMap = {
|
|
104
100
|
[Providers.AZURE]: AzureClientOptions;
|
|
105
101
|
[Providers.OPENAI]: OpenAIClientOptions;
|
|
106
|
-
[Providers.OLLAMA]: OllamaClientOptions;
|
|
107
102
|
[Providers.GOOGLE]: GoogleClientOptions;
|
|
108
103
|
[Providers.VERTEXAI]: VertexAIClientOptions;
|
|
109
104
|
[Providers.DEEPSEEK]: DeepSeekClientOptions;
|
|
@@ -118,7 +113,6 @@ export type ProviderOptionsMap = {
|
|
|
118
113
|
export type ChatModelMap = {
|
|
119
114
|
[Providers.XAI]: ChatXAI;
|
|
120
115
|
[Providers.OPENAI]: ChatOpenAI;
|
|
121
|
-
[Providers.OLLAMA]: ChatOllama;
|
|
122
116
|
[Providers.AZURE]: AzureChatOpenAI;
|
|
123
117
|
[Providers.DEEPSEEK]: ChatDeepSeek;
|
|
124
118
|
[Providers.VERTEXAI]: ChatVertexAI;
|
package/src/utils/llm.ts
CHANGED
package/src/utils/llmConfig.ts
CHANGED
|
@@ -78,12 +78,14 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
|
|
|
78
78
|
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
|
79
79
|
model: process.env.AZURE_MODEL_NAME ?? 'gpt-4o',
|
|
80
80
|
},
|
|
81
|
-
|
|
82
|
-
provider: Providers.
|
|
81
|
+
ollama: {
|
|
82
|
+
provider: Providers.OPENAI,
|
|
83
83
|
model: 'gpt-oss:20b',
|
|
84
84
|
streaming: true,
|
|
85
85
|
streamUsage: true,
|
|
86
|
-
|
|
86
|
+
configuration: {
|
|
87
|
+
baseURL: 'http://localhost:11434/v1',
|
|
88
|
+
},
|
|
87
89
|
},
|
|
88
90
|
lmstudio: {
|
|
89
91
|
provider: Providers.OPENAI,
|
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
var outputs = require('@langchain/core/outputs');
|
|
5
|
-
var ollama = require('@langchain/ollama');
|
|
6
|
-
var utils = require('./utils.cjs');
|
|
7
|
-
|
|
8
|
-
class ChatOllama extends ollama.ChatOllama {
|
|
9
|
-
static lc_name() {
|
|
10
|
-
return 'LibreChatOllama';
|
|
11
|
-
}
|
|
12
|
-
async *_streamResponseChunks(messages$1, options, runManager) {
|
|
13
|
-
if (this.checkOrPullModel) {
|
|
14
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
15
|
-
// @ts-ignore
|
|
16
|
-
if (!(await this.checkModelExistsOnMachine(this.model))) {
|
|
17
|
-
await this.pull(this.model, {
|
|
18
|
-
logProgress: true,
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
const params = this.invocationParams(options);
|
|
23
|
-
// TODO: remove cast after SDK adds support for tool calls
|
|
24
|
-
const ollamaMessages = utils.convertToOllamaMessages(messages$1);
|
|
25
|
-
const usageMetadata = {
|
|
26
|
-
input_tokens: 0,
|
|
27
|
-
output_tokens: 0,
|
|
28
|
-
total_tokens: 0,
|
|
29
|
-
};
|
|
30
|
-
const stream = await this.client.chat({
|
|
31
|
-
...params,
|
|
32
|
-
messages: ollamaMessages,
|
|
33
|
-
stream: true,
|
|
34
|
-
});
|
|
35
|
-
let lastMetadata;
|
|
36
|
-
for await (const chunk of stream) {
|
|
37
|
-
if (options.signal?.aborted === true) {
|
|
38
|
-
this.client.abort();
|
|
39
|
-
}
|
|
40
|
-
const { message: responseMessage, ...rest } = chunk;
|
|
41
|
-
usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;
|
|
42
|
-
usageMetadata.output_tokens += rest.eval_count ?? 0;
|
|
43
|
-
usageMetadata.total_tokens =
|
|
44
|
-
usageMetadata.input_tokens + usageMetadata.output_tokens;
|
|
45
|
-
lastMetadata = rest;
|
|
46
|
-
if (!responseMessage) {
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
const message = utils.convertOllamaMessagesToLangChain(responseMessage);
|
|
50
|
-
const generationChunk = new outputs.ChatGenerationChunk({
|
|
51
|
-
text: responseMessage.content || '',
|
|
52
|
-
message,
|
|
53
|
-
});
|
|
54
|
-
yield generationChunk;
|
|
55
|
-
await runManager?.handleLLMNewToken(responseMessage.content || '', undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
56
|
-
}
|
|
57
|
-
// Yield the `response_metadata` as the final chunk.
|
|
58
|
-
yield new outputs.ChatGenerationChunk({
|
|
59
|
-
text: '',
|
|
60
|
-
message: new messages.AIMessageChunk({
|
|
61
|
-
content: '',
|
|
62
|
-
response_metadata: lastMetadata,
|
|
63
|
-
usage_metadata: usageMetadata,
|
|
64
|
-
}),
|
|
65
|
-
});
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
exports.ChatOllama = ChatOllama;
|
|
70
|
-
//# sourceMappingURL=index.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n static lc_name(): 'LibreChatOllama' {\n return 'LibreChatOllama';\n }\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama","messages","convertToOllamaMessages","convertOllamaMessagesToLangChain","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;AAcM,MAAO,UAAW,SAAQA,iBAAc,CAAA;AAC5C,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,iBAAiB;;IAE1B,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;;;AAGzB,YAAA,IAAI,EAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAa,EAAE;AACpE,gBAAA,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1B,oBAAA,WAAW,EAAE,IAAI;AAClB,iBAAA,CAAC;;;QAIN,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;;AAE7C,QAAA,MAAM,cAAc,GAAGC,6BAAuB,CAACD,UAAQ,CAAoB;AAE3E,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,YAAY,EAAE,CAAC;SAChB;QAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAA,GAAG,MAAM;AACT,YAAA,QAAQ,EAAE,cAAc;AACxB,YAAA,MAAM,EAAE,IAAI;AACb,SAAA,CAAC;AAEF,QAAA,IAAI,YAA6D;AAEjE,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;;YAErB,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,IAAI,EAAE,GACzC,KAAoC;YACtC,aAAa,CAAC,YAAY,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC;YACzD,aAAa,CAAC,aAAa,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC;AACnD,YAAA,aAAa,CAAC,YAAY;AACxB,gBAAA,aAAa,CAAC,YAAY,GAAG,aAAa,CAAC,aAAa;YAC1D,YAAY,GAAG,IAA2C;YAC1D,IAAI,CAAC,eAAe,EAAE;gBACpB;;AAEF,YAAA,MAAM,OAAO,GAAGE,sCAAgC,CAAC,eAAe,CAAC;AACjE,YAAA,MAAM,eAAe,GAAG,IAAIC,2BAAmB,CAAC;AAC9C,gBAAA,IAAI,EAAE,eAAe,CAAC,OAAO,IAAI,EAAE;gBACnC,OAAO;AACR,aAAA,CAAC;AACF,YAAA,MAAM,eAAe;YACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,eAAe,CAAC,OAAO,IAAI,EAAE,EAC7B,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;QAIH,MAAM,IAAIA,2BAAmB,CAAC;AAC5B,YAAA,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,iBAAiB,EAAE,YAAY;AAC/B,gBAAA,cAAc,EAAE,aAAa;aAC9B,CAAC;AACH,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -1,158 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
var uuid = require('uuid');
|
|
5
|
-
|
|
6
|
-
function convertOllamaMessagesToLangChain(messages$1, extra) {
|
|
7
|
-
const additional_kwargs = {};
|
|
8
|
-
if ('thinking' in messages$1) {
|
|
9
|
-
additional_kwargs.reasoning_content = messages$1.thinking;
|
|
10
|
-
}
|
|
11
|
-
return new messages.AIMessageChunk({
|
|
12
|
-
content: messages$1.content || '',
|
|
13
|
-
tool_call_chunks: messages$1.tool_calls?.map((tc) => ({
|
|
14
|
-
name: tc.function.name,
|
|
15
|
-
args: JSON.stringify(tc.function.arguments),
|
|
16
|
-
type: 'tool_call_chunk',
|
|
17
|
-
index: 0,
|
|
18
|
-
id: uuid.v4(),
|
|
19
|
-
})),
|
|
20
|
-
response_metadata: extra?.responseMetadata,
|
|
21
|
-
usage_metadata: extra?.usageMetadata,
|
|
22
|
-
additional_kwargs,
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
function extractBase64FromDataUrl(dataUrl) {
|
|
26
|
-
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
27
|
-
return match ? match[1] : '';
|
|
28
|
-
}
|
|
29
|
-
function convertAMessagesToOllama(messages) {
|
|
30
|
-
if (typeof messages.content === 'string') {
|
|
31
|
-
return [
|
|
32
|
-
{
|
|
33
|
-
role: 'assistant',
|
|
34
|
-
content: messages.content,
|
|
35
|
-
},
|
|
36
|
-
];
|
|
37
|
-
}
|
|
38
|
-
const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
|
|
39
|
-
const textMessages = textFields.map((c) => ({
|
|
40
|
-
role: 'assistant',
|
|
41
|
-
content: c.text,
|
|
42
|
-
}));
|
|
43
|
-
let toolCallMsgs;
|
|
44
|
-
if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
45
|
-
messages.tool_calls?.length) {
|
|
46
|
-
// `tool_use` content types are accepted if the message has tool calls
|
|
47
|
-
const toolCalls = messages.tool_calls.map((tc) => ({
|
|
48
|
-
id: tc.id,
|
|
49
|
-
type: 'function',
|
|
50
|
-
function: {
|
|
51
|
-
name: tc.name,
|
|
52
|
-
arguments: tc.args,
|
|
53
|
-
},
|
|
54
|
-
}));
|
|
55
|
-
if (toolCalls) {
|
|
56
|
-
toolCallMsgs = {
|
|
57
|
-
role: 'assistant',
|
|
58
|
-
tool_calls: toolCalls,
|
|
59
|
-
content: '',
|
|
60
|
-
};
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
else if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
64
|
-
!messages.tool_calls?.length) {
|
|
65
|
-
throw new Error('\'tool_use\' content type is not supported without tool calls.');
|
|
66
|
-
}
|
|
67
|
-
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
68
|
-
}
|
|
69
|
-
function convertHumanGenericMessagesToOllama(message) {
|
|
70
|
-
if (typeof message.content === 'string') {
|
|
71
|
-
return [
|
|
72
|
-
{
|
|
73
|
-
role: 'user',
|
|
74
|
-
content: message.content,
|
|
75
|
-
},
|
|
76
|
-
];
|
|
77
|
-
}
|
|
78
|
-
return message.content.map((c) => {
|
|
79
|
-
if (c.type === 'text') {
|
|
80
|
-
return {
|
|
81
|
-
role: 'user',
|
|
82
|
-
content: c.text,
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
else if (c.type === 'image_url') {
|
|
86
|
-
if (typeof c.image_url === 'string') {
|
|
87
|
-
return {
|
|
88
|
-
role: 'user',
|
|
89
|
-
content: '',
|
|
90
|
-
images: [extractBase64FromDataUrl(c.image_url)],
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
94
|
-
return {
|
|
95
|
-
role: 'user',
|
|
96
|
-
content: '',
|
|
97
|
-
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
98
|
-
};
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
throw new Error(`Unsupported content type: ${c.type}`);
|
|
102
|
-
});
|
|
103
|
-
}
|
|
104
|
-
function convertSystemMessageToOllama(message) {
|
|
105
|
-
if (typeof message.content === 'string') {
|
|
106
|
-
return [
|
|
107
|
-
{
|
|
108
|
-
role: 'system',
|
|
109
|
-
content: message.content,
|
|
110
|
-
},
|
|
111
|
-
];
|
|
112
|
-
}
|
|
113
|
-
else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
|
|
114
|
-
return message.content.map((c) => ({
|
|
115
|
-
role: 'system',
|
|
116
|
-
content: c.text,
|
|
117
|
-
}));
|
|
118
|
-
}
|
|
119
|
-
else {
|
|
120
|
-
throw new Error(`Unsupported content type(s): ${message.content
|
|
121
|
-
.map((c) => c.type)
|
|
122
|
-
.join(', ')}`);
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
function convertToolMessageToOllama(message) {
|
|
126
|
-
if (typeof message.content !== 'string') {
|
|
127
|
-
throw new Error('Non string tool message content is not supported');
|
|
128
|
-
}
|
|
129
|
-
return [
|
|
130
|
-
{
|
|
131
|
-
role: 'tool',
|
|
132
|
-
content: message.content,
|
|
133
|
-
},
|
|
134
|
-
];
|
|
135
|
-
}
|
|
136
|
-
function convertToOllamaMessages(messages) {
|
|
137
|
-
return messages.flatMap((msg) => {
|
|
138
|
-
if (['human', 'generic'].includes(msg._getType())) {
|
|
139
|
-
return convertHumanGenericMessagesToOllama(msg);
|
|
140
|
-
}
|
|
141
|
-
else if (msg._getType() === 'ai') {
|
|
142
|
-
return convertAMessagesToOllama(msg);
|
|
143
|
-
}
|
|
144
|
-
else if (msg._getType() === 'system') {
|
|
145
|
-
return convertSystemMessageToOllama(msg);
|
|
146
|
-
}
|
|
147
|
-
else if (msg._getType() === 'tool') {
|
|
148
|
-
return convertToolMessageToOllama(msg);
|
|
149
|
-
}
|
|
150
|
-
else {
|
|
151
|
-
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
152
|
-
}
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
exports.convertOllamaMessagesToLangChain = convertOllamaMessagesToLangChain;
|
|
157
|
-
exports.convertToOllamaMessages = convertToOllamaMessages;
|
|
158
|
-
//# sourceMappingURL=utils.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"utils.cjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["messages","AIMessageChunk","uuidv4"],"mappings":";;;;;AAgBgB,SAAA,gCAAgC,CAC9CA,UAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAIA,UAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAGA,UAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAIC,uBAAc,CAAC;AACxB,QAAA,OAAO,EAAED,UAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAEA,UAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEE,OAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;;"}
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
-
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
3
|
-
import { ChatOllama as ChatOllama$1 } from '@langchain/ollama';
|
|
4
|
-
import { convertToOllamaMessages, convertOllamaMessagesToLangChain } from './utils.mjs';
|
|
5
|
-
|
|
6
|
-
class ChatOllama extends ChatOllama$1 {
|
|
7
|
-
static lc_name() {
|
|
8
|
-
return 'LibreChatOllama';
|
|
9
|
-
}
|
|
10
|
-
async *_streamResponseChunks(messages, options, runManager) {
|
|
11
|
-
if (this.checkOrPullModel) {
|
|
12
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
13
|
-
// @ts-ignore
|
|
14
|
-
if (!(await this.checkModelExistsOnMachine(this.model))) {
|
|
15
|
-
await this.pull(this.model, {
|
|
16
|
-
logProgress: true,
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
const params = this.invocationParams(options);
|
|
21
|
-
// TODO: remove cast after SDK adds support for tool calls
|
|
22
|
-
const ollamaMessages = convertToOllamaMessages(messages);
|
|
23
|
-
const usageMetadata = {
|
|
24
|
-
input_tokens: 0,
|
|
25
|
-
output_tokens: 0,
|
|
26
|
-
total_tokens: 0,
|
|
27
|
-
};
|
|
28
|
-
const stream = await this.client.chat({
|
|
29
|
-
...params,
|
|
30
|
-
messages: ollamaMessages,
|
|
31
|
-
stream: true,
|
|
32
|
-
});
|
|
33
|
-
let lastMetadata;
|
|
34
|
-
for await (const chunk of stream) {
|
|
35
|
-
if (options.signal?.aborted === true) {
|
|
36
|
-
this.client.abort();
|
|
37
|
-
}
|
|
38
|
-
const { message: responseMessage, ...rest } = chunk;
|
|
39
|
-
usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;
|
|
40
|
-
usageMetadata.output_tokens += rest.eval_count ?? 0;
|
|
41
|
-
usageMetadata.total_tokens =
|
|
42
|
-
usageMetadata.input_tokens + usageMetadata.output_tokens;
|
|
43
|
-
lastMetadata = rest;
|
|
44
|
-
if (!responseMessage) {
|
|
45
|
-
continue;
|
|
46
|
-
}
|
|
47
|
-
const message = convertOllamaMessagesToLangChain(responseMessage);
|
|
48
|
-
const generationChunk = new ChatGenerationChunk({
|
|
49
|
-
text: responseMessage.content || '',
|
|
50
|
-
message,
|
|
51
|
-
});
|
|
52
|
-
yield generationChunk;
|
|
53
|
-
await runManager?.handleLLMNewToken(responseMessage.content || '', undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
54
|
-
}
|
|
55
|
-
// Yield the `response_metadata` as the final chunk.
|
|
56
|
-
yield new ChatGenerationChunk({
|
|
57
|
-
text: '',
|
|
58
|
-
message: new AIMessageChunk({
|
|
59
|
-
content: '',
|
|
60
|
-
response_metadata: lastMetadata,
|
|
61
|
-
usage_metadata: usageMetadata,
|
|
62
|
-
}),
|
|
63
|
-
});
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
export { ChatOllama };
|
|
68
|
-
//# sourceMappingURL=index.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n static lc_name(): 'LibreChatOllama' {\n return 'LibreChatOllama';\n }\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama"],"mappings":";;;;;AAcM,MAAO,UAAW,SAAQA,YAAc,CAAA;AAC5C,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,iBAAiB;;IAE1B,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;;;AAGzB,YAAA,IAAI,EAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAa,EAAE;AACpE,gBAAA,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1B,oBAAA,WAAW,EAAE,IAAI;AAClB,iBAAA,CAAC;;;QAIN,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;;AAE7C,QAAA,MAAM,cAAc,GAAG,uBAAuB,CAAC,QAAQ,CAAoB;AAE3E,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,YAAY,EAAE,CAAC;SAChB;QAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAA,GAAG,MAAM;AACT,YAAA,QAAQ,EAAE,cAAc;AACxB,YAAA,MAAM,EAAE,IAAI;AACb,SAAA,CAAC;AAEF,QAAA,IAAI,YAA6D;AAEjE,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;;YAErB,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,IAAI,EAAE,GACzC,KAAoC;YACtC,aAAa,CAAC,YAAY,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC;YACzD,aAAa,CAAC,aAAa,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC;AACnD,YAAA,aAAa,CAAC,YAAY;AACxB,gBAAA,aAAa,CAAC,YAAY,GAAG,aAAa,CAAC,aAAa;YAC1D,YAAY,GAAG,IAA2C;YAC1D,IAAI,CAAC,eAAe,EAAE;gBACpB;;AAEF,YAAA,MAAM,OAAO,GAAG,gCAAgC,CAAC,eAAe,CAAC;AACjE,YAAA,MAAM,eAAe,GAAG,IAAI,mBAAmB,CAAC;AAC9C,gBAAA,IAAI,EAAE,eAAe,CAAC,OAAO,IAAI,EAAE;gBACnC,OAAO;AACR,aAAA,CAAC;AACF,YAAA,MAAM,eAAe;YACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,eAAe,CAAC,OAAO,IAAI,EAAE,EAC7B,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;QAIH,MAAM,IAAI,mBAAmB,CAAC;AAC5B,YAAA,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,IAAI,cAAc,CAAC;AAC1B,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,iBAAiB,EAAE,YAAY;AAC/B,gBAAA,cAAc,EAAE,aAAa;aAC9B,CAAC;AACH,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
-
import { v4 } from 'uuid';
|
|
3
|
-
|
|
4
|
-
function convertOllamaMessagesToLangChain(messages, extra) {
|
|
5
|
-
const additional_kwargs = {};
|
|
6
|
-
if ('thinking' in messages) {
|
|
7
|
-
additional_kwargs.reasoning_content = messages.thinking;
|
|
8
|
-
}
|
|
9
|
-
return new AIMessageChunk({
|
|
10
|
-
content: messages.content || '',
|
|
11
|
-
tool_call_chunks: messages.tool_calls?.map((tc) => ({
|
|
12
|
-
name: tc.function.name,
|
|
13
|
-
args: JSON.stringify(tc.function.arguments),
|
|
14
|
-
type: 'tool_call_chunk',
|
|
15
|
-
index: 0,
|
|
16
|
-
id: v4(),
|
|
17
|
-
})),
|
|
18
|
-
response_metadata: extra?.responseMetadata,
|
|
19
|
-
usage_metadata: extra?.usageMetadata,
|
|
20
|
-
additional_kwargs,
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
function extractBase64FromDataUrl(dataUrl) {
|
|
24
|
-
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
25
|
-
return match ? match[1] : '';
|
|
26
|
-
}
|
|
27
|
-
function convertAMessagesToOllama(messages) {
|
|
28
|
-
if (typeof messages.content === 'string') {
|
|
29
|
-
return [
|
|
30
|
-
{
|
|
31
|
-
role: 'assistant',
|
|
32
|
-
content: messages.content,
|
|
33
|
-
},
|
|
34
|
-
];
|
|
35
|
-
}
|
|
36
|
-
const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
|
|
37
|
-
const textMessages = textFields.map((c) => ({
|
|
38
|
-
role: 'assistant',
|
|
39
|
-
content: c.text,
|
|
40
|
-
}));
|
|
41
|
-
let toolCallMsgs;
|
|
42
|
-
if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
43
|
-
messages.tool_calls?.length) {
|
|
44
|
-
// `tool_use` content types are accepted if the message has tool calls
|
|
45
|
-
const toolCalls = messages.tool_calls.map((tc) => ({
|
|
46
|
-
id: tc.id,
|
|
47
|
-
type: 'function',
|
|
48
|
-
function: {
|
|
49
|
-
name: tc.name,
|
|
50
|
-
arguments: tc.args,
|
|
51
|
-
},
|
|
52
|
-
}));
|
|
53
|
-
if (toolCalls) {
|
|
54
|
-
toolCallMsgs = {
|
|
55
|
-
role: 'assistant',
|
|
56
|
-
tool_calls: toolCalls,
|
|
57
|
-
content: '',
|
|
58
|
-
};
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
else if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
62
|
-
!messages.tool_calls?.length) {
|
|
63
|
-
throw new Error('\'tool_use\' content type is not supported without tool calls.');
|
|
64
|
-
}
|
|
65
|
-
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
66
|
-
}
|
|
67
|
-
function convertHumanGenericMessagesToOllama(message) {
|
|
68
|
-
if (typeof message.content === 'string') {
|
|
69
|
-
return [
|
|
70
|
-
{
|
|
71
|
-
role: 'user',
|
|
72
|
-
content: message.content,
|
|
73
|
-
},
|
|
74
|
-
];
|
|
75
|
-
}
|
|
76
|
-
return message.content.map((c) => {
|
|
77
|
-
if (c.type === 'text') {
|
|
78
|
-
return {
|
|
79
|
-
role: 'user',
|
|
80
|
-
content: c.text,
|
|
81
|
-
};
|
|
82
|
-
}
|
|
83
|
-
else if (c.type === 'image_url') {
|
|
84
|
-
if (typeof c.image_url === 'string') {
|
|
85
|
-
return {
|
|
86
|
-
role: 'user',
|
|
87
|
-
content: '',
|
|
88
|
-
images: [extractBase64FromDataUrl(c.image_url)],
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
92
|
-
return {
|
|
93
|
-
role: 'user',
|
|
94
|
-
content: '',
|
|
95
|
-
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
96
|
-
};
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
throw new Error(`Unsupported content type: ${c.type}`);
|
|
100
|
-
});
|
|
101
|
-
}
|
|
102
|
-
function convertSystemMessageToOllama(message) {
|
|
103
|
-
if (typeof message.content === 'string') {
|
|
104
|
-
return [
|
|
105
|
-
{
|
|
106
|
-
role: 'system',
|
|
107
|
-
content: message.content,
|
|
108
|
-
},
|
|
109
|
-
];
|
|
110
|
-
}
|
|
111
|
-
else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
|
|
112
|
-
return message.content.map((c) => ({
|
|
113
|
-
role: 'system',
|
|
114
|
-
content: c.text,
|
|
115
|
-
}));
|
|
116
|
-
}
|
|
117
|
-
else {
|
|
118
|
-
throw new Error(`Unsupported content type(s): ${message.content
|
|
119
|
-
.map((c) => c.type)
|
|
120
|
-
.join(', ')}`);
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
function convertToolMessageToOllama(message) {
|
|
124
|
-
if (typeof message.content !== 'string') {
|
|
125
|
-
throw new Error('Non string tool message content is not supported');
|
|
126
|
-
}
|
|
127
|
-
return [
|
|
128
|
-
{
|
|
129
|
-
role: 'tool',
|
|
130
|
-
content: message.content,
|
|
131
|
-
},
|
|
132
|
-
];
|
|
133
|
-
}
|
|
134
|
-
function convertToOllamaMessages(messages) {
|
|
135
|
-
return messages.flatMap((msg) => {
|
|
136
|
-
if (['human', 'generic'].includes(msg._getType())) {
|
|
137
|
-
return convertHumanGenericMessagesToOllama(msg);
|
|
138
|
-
}
|
|
139
|
-
else if (msg._getType() === 'ai') {
|
|
140
|
-
return convertAMessagesToOllama(msg);
|
|
141
|
-
}
|
|
142
|
-
else if (msg._getType() === 'system') {
|
|
143
|
-
return convertSystemMessageToOllama(msg);
|
|
144
|
-
}
|
|
145
|
-
else if (msg._getType() === 'tool') {
|
|
146
|
-
return convertToolMessageToOllama(msg);
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
150
|
-
}
|
|
151
|
-
});
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
export { convertOllamaMessagesToLangChain, convertToOllamaMessages };
|
|
155
|
-
//# sourceMappingURL=utils.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"utils.mjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["uuidv4"],"mappings":";;;AAgBgB,SAAA,gCAAgC,CAC9C,QAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAI,QAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAG,QAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAI,cAAc,CAAC;AACxB,QAAA,OAAO,EAAE,QAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAE,QAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEA,EAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;"}
|