@theia/ai-chat 1.54.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/lib/browser/ai-chat-frontend-module.d.ts +4 -0
- package/lib/browser/ai-chat-frontend-module.d.ts.map +1 -0
- package/lib/browser/ai-chat-frontend-module.js +51 -0
- package/lib/browser/ai-chat-frontend-module.js.map +1 -0
- package/lib/browser/ai-chat-preferences.d.ts +4 -0
- package/lib/browser/ai-chat-preferences.d.ts.map +1 -0
- package/lib/browser/ai-chat-preferences.js +32 -0
- package/lib/browser/ai-chat-preferences.js.map +1 -0
- package/lib/browser/frontend-chat-service.d.ts +8 -0
- package/lib/browser/frontend-chat-service.d.ts.map +1 -0
- package/lib/browser/frontend-chat-service.js +67 -0
- package/lib/browser/frontend-chat-service.js.map +1 -0
- package/lib/common/chat-agent-service.d.ts +34 -0
- package/lib/common/chat-agent-service.d.ts.map +1 -0
- package/lib/common/chat-agent-service.js +75 -0
- package/lib/common/chat-agent-service.js.map +1 -0
- package/lib/common/chat-agents-variable-contribution.d.ts +17 -0
- package/lib/common/chat-agents-variable-contribution.d.ts.map +1 -0
- package/lib/common/chat-agents-variable-contribution.js +51 -0
- package/lib/common/chat-agents-variable-contribution.js.map +1 -0
- package/lib/common/chat-agents.d.ts +86 -0
- package/lib/common/chat-agents.d.ts.map +1 -0
- package/lib/common/chat-agents.js +307 -0
- package/lib/common/chat-agents.js.map +1 -0
- package/lib/common/chat-model.d.ts +319 -0
- package/lib/common/chat-model.d.ts.map +1 -0
- package/lib/common/chat-model.js +527 -0
- package/lib/common/chat-model.js.map +1 -0
- package/lib/common/chat-request-parser.d.ts +20 -0
- package/lib/common/chat-request-parser.d.ts.map +1 -0
- package/lib/common/chat-request-parser.js +158 -0
- package/lib/common/chat-request-parser.js.map +1 -0
- package/lib/common/chat-request-parser.spec.d.ts +2 -0
- package/lib/common/chat-request-parser.spec.d.ts.map +1 -0
- package/lib/common/chat-request-parser.spec.js +108 -0
- package/lib/common/chat-request-parser.spec.js.map +1 -0
- package/lib/common/chat-service.d.ts +72 -0
- package/lib/common/chat-service.d.ts.map +1 -0
- package/lib/common/chat-service.js +170 -0
- package/lib/common/chat-service.js.map +1 -0
- package/lib/common/command-chat-agents.d.ts +33 -0
- package/lib/common/command-chat-agents.d.ts.map +1 -0
- package/lib/common/command-chat-agents.js +327 -0
- package/lib/common/command-chat-agents.js.map +1 -0
- package/lib/common/index.d.ts +10 -0
- package/lib/common/index.d.ts.map +1 -0
- package/lib/common/index.js +28 -0
- package/lib/common/index.js.map +1 -0
- package/lib/common/orchestrator-chat-agent.d.ts +22 -0
- package/lib/common/orchestrator-chat-agent.d.ts.map +1 -0
- package/lib/common/orchestrator-chat-agent.js +140 -0
- package/lib/common/orchestrator-chat-agent.js.map +1 -0
- package/lib/common/parsed-chat-request.d.ts +66 -0
- package/lib/common/parsed-chat-request.d.ts.map +1 -0
- package/lib/common/parsed-chat-request.js +83 -0
- package/lib/common/parsed-chat-request.js.map +1 -0
- package/lib/common/universal-chat-agent.d.ts +15 -0
- package/lib/common/universal-chat-agent.d.ts.map +1 -0
- package/lib/common/universal-chat-agent.js +102 -0
- package/lib/common/universal-chat-agent.js.map +1 -0
- package/package.json +54 -0
- package/src/browser/ai-chat-frontend-module.ts +66 -0
- package/src/browser/ai-chat-preferences.ts +32 -0
- package/src/browser/frontend-chat-service.ts +66 -0
- package/src/common/chat-agent-service.ts +85 -0
- package/src/common/chat-agents-variable-contribution.ts +81 -0
- package/src/common/chat-agents.ts +384 -0
- package/src/common/chat-model.ts +776 -0
- package/src/common/chat-request-parser.spec.ts +120 -0
- package/src/common/chat-request-parser.ts +220 -0
- package/src/common/chat-service.ts +236 -0
- package/src/common/command-chat-agents.ts +352 -0
- package/src/common/index.ts +24 -0
- package/src/common/orchestrator-chat-agent.ts +151 -0
- package/src/common/parsed-chat-request.ts +112 -0
- package/src/common/universal-chat-agent.ts +109 -0
|
@@ -0,0 +1,384 @@
|
|
|
1
|
+
// *****************************************************************************
|
|
2
|
+
// Copyright (C) 2024 EclipseSource GmbH.
|
|
3
|
+
//
|
|
4
|
+
// This program and the accompanying materials are made available under the
|
|
5
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
6
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
7
|
+
//
|
|
8
|
+
// This Source Code may also be made available under the following Secondary
|
|
9
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
10
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
11
|
+
// with the GNU Classpath Exception which is available at
|
|
12
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
13
|
+
//
|
|
14
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
15
|
+
// *****************************************************************************
|
|
16
|
+
/*---------------------------------------------------------------------------------------------
|
|
17
|
+
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
18
|
+
* Licensed under the MIT License. See License.txt in the project root for license information.
|
|
19
|
+
*--------------------------------------------------------------------------------------------*/
|
|
20
|
+
// Partially copied from https://github.com/microsoft/vscode/blob/a2cab7255c0df424027be05d58e1b7b941f4ea60/src/vs/workbench/contrib/chat/common/chatAgents.ts
|
|
21
|
+
|
|
22
|
+
import {
|
|
23
|
+
CommunicationRecordingService,
|
|
24
|
+
getTextOfResponse,
|
|
25
|
+
LanguageModel,
|
|
26
|
+
LanguageModelRequirement,
|
|
27
|
+
LanguageModelResponse,
|
|
28
|
+
PromptService,
|
|
29
|
+
ResolvedPromptTemplate,
|
|
30
|
+
ToolRequest,
|
|
31
|
+
} from '@theia/ai-core';
|
|
32
|
+
import {
|
|
33
|
+
Agent,
|
|
34
|
+
isLanguageModelStreamResponse,
|
|
35
|
+
isLanguageModelTextResponse,
|
|
36
|
+
LanguageModelRegistry,
|
|
37
|
+
LanguageModelStreamResponsePart,
|
|
38
|
+
MessageActor,
|
|
39
|
+
} from '@theia/ai-core/lib/common';
|
|
40
|
+
import { CancellationToken, CancellationTokenSource, ILogger, isArray } from '@theia/core';
|
|
41
|
+
import { inject, injectable } from '@theia/core/shared/inversify';
|
|
42
|
+
import { ChatAgentService } from './chat-agent-service';
|
|
43
|
+
import {
|
|
44
|
+
ChatModel,
|
|
45
|
+
ChatRequestModel,
|
|
46
|
+
ChatRequestModelImpl,
|
|
47
|
+
ChatResponseContent,
|
|
48
|
+
CodeChatResponseContentImpl,
|
|
49
|
+
ErrorChatResponseContentImpl,
|
|
50
|
+
MarkdownChatResponseContentImpl,
|
|
51
|
+
ToolCallChatResponseContentImpl
|
|
52
|
+
} from './chat-model';
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* A conversation consists of a sequence of ChatMessages.
|
|
56
|
+
* Each ChatMessage is either a user message, AI message or a system message.
|
|
57
|
+
*
|
|
58
|
+
* For now we only support text based messages.
|
|
59
|
+
*/
|
|
60
|
+
export interface ChatMessage {
|
|
61
|
+
actor: MessageActor;
|
|
62
|
+
type: 'text';
|
|
63
|
+
query: string;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* System message content, enriched with function descriptions.
|
|
68
|
+
*/
|
|
69
|
+
export interface SystemMessageDescription {
|
|
70
|
+
text: string;
|
|
71
|
+
/** All functions references in the system message. */
|
|
72
|
+
functionDescriptions?: Map<string, ToolRequest>;
|
|
73
|
+
}
|
|
74
|
+
export namespace SystemMessageDescription {
|
|
75
|
+
export function fromResolvedPromptTemplate(resolvedPrompt: ResolvedPromptTemplate): SystemMessageDescription {
|
|
76
|
+
return {
|
|
77
|
+
text: resolvedPrompt.text,
|
|
78
|
+
functionDescriptions: resolvedPrompt.functionDescriptions
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* The location from where an chat agent may be invoked.
|
|
85
|
+
* Based on the location, a different context may be available.
|
|
86
|
+
*/
|
|
87
|
+
export enum ChatAgentLocation {
|
|
88
|
+
Panel = 'panel',
|
|
89
|
+
Terminal = 'terminal',
|
|
90
|
+
Notebook = 'notebook',
|
|
91
|
+
Editor = 'editor'
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export namespace ChatAgentLocation {
|
|
95
|
+
export const ALL: ChatAgentLocation[] = [ChatAgentLocation.Panel, ChatAgentLocation.Terminal, ChatAgentLocation.Notebook, ChatAgentLocation.Editor];
|
|
96
|
+
|
|
97
|
+
export function fromRaw(value: string): ChatAgentLocation {
|
|
98
|
+
switch (value) {
|
|
99
|
+
case 'panel': return ChatAgentLocation.Panel;
|
|
100
|
+
case 'terminal': return ChatAgentLocation.Terminal;
|
|
101
|
+
case 'notebook': return ChatAgentLocation.Notebook;
|
|
102
|
+
case 'editor': return ChatAgentLocation.Editor;
|
|
103
|
+
}
|
|
104
|
+
return ChatAgentLocation.Panel;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export const ChatAgent = Symbol('ChatAgent');
|
|
109
|
+
/**
|
|
110
|
+
* A chat agent is a specialized agent with a common interface for its invocation.
|
|
111
|
+
*/
|
|
112
|
+
export interface ChatAgent extends Agent {
|
|
113
|
+
locations: ChatAgentLocation[];
|
|
114
|
+
iconClass?: string;
|
|
115
|
+
invoke(request: ChatRequestModelImpl, chatAgentService?: ChatAgentService): Promise<void>;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
@injectable()
|
|
119
|
+
export abstract class AbstractChatAgent {
|
|
120
|
+
@inject(LanguageModelRegistry) protected languageModelRegistry: LanguageModelRegistry;
|
|
121
|
+
@inject(ILogger) protected logger: ILogger;
|
|
122
|
+
@inject(CommunicationRecordingService) protected recordingService: CommunicationRecordingService;
|
|
123
|
+
@inject(PromptService) protected promptService: PromptService;
|
|
124
|
+
constructor(
|
|
125
|
+
public id: string,
|
|
126
|
+
public languageModelRequirements: LanguageModelRequirement[],
|
|
127
|
+
protected defaultLanguageModelPurpose: string,
|
|
128
|
+
public iconClass: string = 'codicon codicon-copilot',
|
|
129
|
+
public locations: ChatAgentLocation[] = ChatAgentLocation.ALL,
|
|
130
|
+
public tags: String[] = ['Chat']) {
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async invoke(request: ChatRequestModelImpl): Promise<void> {
|
|
134
|
+
try {
|
|
135
|
+
const languageModel = await this.getLanguageModel(this.defaultLanguageModelPurpose);
|
|
136
|
+
if (!languageModel) {
|
|
137
|
+
throw new Error('Couldn\'t find a matching language model. Please check your setup!');
|
|
138
|
+
}
|
|
139
|
+
const messages = await this.getMessages(request.session);
|
|
140
|
+
this.recordingService.recordRequest({
|
|
141
|
+
agentId: this.id,
|
|
142
|
+
sessionId: request.session.id,
|
|
143
|
+
timestamp: Date.now(),
|
|
144
|
+
requestId: request.id,
|
|
145
|
+
request: request.request.text,
|
|
146
|
+
messages
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
const systemMessageDescription = await this.getSystemMessageDescription();
|
|
150
|
+
const tools: Map<string, ToolRequest> = new Map();
|
|
151
|
+
if (systemMessageDescription) {
|
|
152
|
+
const systemMsg: ChatMessage = {
|
|
153
|
+
actor: 'system',
|
|
154
|
+
type: 'text',
|
|
155
|
+
query: systemMessageDescription.text
|
|
156
|
+
};
|
|
157
|
+
// insert system message at the beginning of the request messages
|
|
158
|
+
messages.unshift(systemMsg);
|
|
159
|
+
systemMessageDescription.functionDescriptions?.forEach((tool, id) => {
|
|
160
|
+
tools.set(id, tool);
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
this.getTools(request)?.forEach(tool => tools.set(tool.id, tool));
|
|
164
|
+
|
|
165
|
+
const cancellationToken = new CancellationTokenSource();
|
|
166
|
+
request.response.onDidChange(() => {
|
|
167
|
+
if (request.response.isCanceled) {
|
|
168
|
+
cancellationToken.cancel();
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
const languageModelResponse = await this.callLlm(
|
|
173
|
+
languageModel,
|
|
174
|
+
messages,
|
|
175
|
+
tools.size > 0 ? Array.from(tools.values()) : undefined,
|
|
176
|
+
cancellationToken.token
|
|
177
|
+
);
|
|
178
|
+
await this.addContentsToResponse(languageModelResponse, request);
|
|
179
|
+
request.response.complete();
|
|
180
|
+
this.recordingService.recordResponse({
|
|
181
|
+
agentId: this.id,
|
|
182
|
+
sessionId: request.session.id,
|
|
183
|
+
timestamp: Date.now(),
|
|
184
|
+
requestId: request.response.requestId,
|
|
185
|
+
response: request.response.response.asString()
|
|
186
|
+
});
|
|
187
|
+
} catch (e) {
|
|
188
|
+
this.handleError(request, e);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
protected handleError(request: ChatRequestModelImpl, error: Error): void {
|
|
193
|
+
request.response.response.addContent(new ErrorChatResponseContentImpl(error));
|
|
194
|
+
request.response.error(error);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
protected getLanguageModelSelector(languageModelPurpose: string): LanguageModelRequirement {
|
|
198
|
+
return this.languageModelRequirements.find(req => req.purpose === languageModelPurpose)!;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
protected async getLanguageModel(languageModelPurpose: string): Promise<LanguageModel> {
|
|
202
|
+
return this.selectLanguageModel(this.getLanguageModelSelector(languageModelPurpose));
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
protected async selectLanguageModel(selector: LanguageModelRequirement): Promise<LanguageModel> {
|
|
206
|
+
const languageModel = await this.languageModelRegistry.selectLanguageModel({ agent: this.id, ...selector });
|
|
207
|
+
if (!languageModel) {
|
|
208
|
+
throw new Error('Couldn\'t find a language model. Please check your setup!');
|
|
209
|
+
}
|
|
210
|
+
return languageModel;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
protected abstract getSystemMessageDescription(): Promise<SystemMessageDescription | undefined>;
|
|
214
|
+
|
|
215
|
+
protected async getMessages(
|
|
216
|
+
model: ChatModel, includeResponseInProgress = false
|
|
217
|
+
): Promise<ChatMessage[]> {
|
|
218
|
+
const requestMessages = model.getRequests().flatMap(request => {
|
|
219
|
+
const messages: ChatMessage[] = [];
|
|
220
|
+
const text = request.message.parts.map(part => part.promptText).join('');
|
|
221
|
+
messages.push({
|
|
222
|
+
actor: 'user',
|
|
223
|
+
type: 'text',
|
|
224
|
+
query: text,
|
|
225
|
+
});
|
|
226
|
+
if (request.response.isComplete || includeResponseInProgress) {
|
|
227
|
+
messages.push({
|
|
228
|
+
actor: 'ai',
|
|
229
|
+
type: 'text',
|
|
230
|
+
query: request.response.response.asString(),
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
return messages;
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
return requestMessages;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
/**
|
|
240
|
+
* @returns the list of tools used by this agent, or undefined if none is needed.
|
|
241
|
+
*/
|
|
242
|
+
protected getTools(request: ChatRequestModel): ToolRequest[] | undefined {
|
|
243
|
+
return request.message.toolRequests.size > 0
|
|
244
|
+
? [...request.message.toolRequests.values()]
|
|
245
|
+
: undefined;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
protected async callLlm(
|
|
249
|
+
languageModel: LanguageModel,
|
|
250
|
+
messages: ChatMessage[],
|
|
251
|
+
tools: ToolRequest[] | undefined,
|
|
252
|
+
token: CancellationToken
|
|
253
|
+
): Promise<LanguageModelResponse> {
|
|
254
|
+
const languageModelResponse = languageModel.request({
|
|
255
|
+
messages,
|
|
256
|
+
tools,
|
|
257
|
+
}, token);
|
|
258
|
+
return languageModelResponse;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
protected abstract addContentsToResponse(languageModelResponse: LanguageModelResponse, request: ChatRequestModelImpl): Promise<void>;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
@injectable()
|
|
265
|
+
export abstract class AbstractTextToModelParsingChatAgent<T> extends AbstractChatAgent {
|
|
266
|
+
|
|
267
|
+
protected async addContentsToResponse(languageModelResponse: LanguageModelResponse, request: ChatRequestModelImpl): Promise<void> {
|
|
268
|
+
const responseAsText = await getTextOfResponse(languageModelResponse);
|
|
269
|
+
const parsedCommand = await this.parseTextResponse(responseAsText);
|
|
270
|
+
const content = this.createResponseContent(parsedCommand, request);
|
|
271
|
+
request.response.response.addContent(content);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
protected abstract parseTextResponse(text: string): Promise<T>;
|
|
275
|
+
|
|
276
|
+
protected abstract createResponseContent(parsedModel: T, request: ChatRequestModelImpl): ChatResponseContent;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
@injectable()
|
|
280
|
+
export abstract class AbstractStreamParsingChatAgent extends AbstractChatAgent {
|
|
281
|
+
|
|
282
|
+
protected override async addContentsToResponse(languageModelResponse: LanguageModelResponse, request: ChatRequestModelImpl): Promise<void> {
|
|
283
|
+
if (isLanguageModelTextResponse(languageModelResponse)) {
|
|
284
|
+
request.response.response.addContent(
|
|
285
|
+
new MarkdownChatResponseContentImpl(languageModelResponse.text)
|
|
286
|
+
);
|
|
287
|
+
request.response.complete();
|
|
288
|
+
this.recordingService.recordResponse({
|
|
289
|
+
agentId: this.id,
|
|
290
|
+
sessionId: request.session.id,
|
|
291
|
+
timestamp: Date.now(),
|
|
292
|
+
requestId: request.response.requestId,
|
|
293
|
+
response: request.response.response.asString()
|
|
294
|
+
});
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
if (isLanguageModelStreamResponse(languageModelResponse)) {
|
|
298
|
+
for await (const token of languageModelResponse.stream) {
|
|
299
|
+
const newContents = this.parse(token, request.response.response.content);
|
|
300
|
+
if (isArray(newContents)) {
|
|
301
|
+
newContents.forEach(newContent => request.response.response.addContent(newContent));
|
|
302
|
+
} else {
|
|
303
|
+
request.response.response.addContent(newContents);
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
const lastContent = request.response.response.content.pop();
|
|
307
|
+
if (lastContent === undefined) {
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
const text = lastContent.asString?.();
|
|
311
|
+
if (text === undefined) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
let curSearchIndex = 0;
|
|
315
|
+
const result: ChatResponseContent[] = [];
|
|
316
|
+
while (curSearchIndex < text.length) {
|
|
317
|
+
// find start of code block: ```[language]\n<code>[\n]```
|
|
318
|
+
const codeStartIndex = text.indexOf('```', curSearchIndex);
|
|
319
|
+
if (codeStartIndex === -1) {
|
|
320
|
+
break;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// find language specifier if present
|
|
324
|
+
const newLineIndex = text.indexOf('\n', codeStartIndex + 3);
|
|
325
|
+
const language = codeStartIndex + 3 < newLineIndex ? text.substring(codeStartIndex + 3, newLineIndex) : undefined;
|
|
326
|
+
|
|
327
|
+
// find end of code block
|
|
328
|
+
const codeEndIndex = text.indexOf('```', codeStartIndex + 3);
|
|
329
|
+
if (codeEndIndex === -1) {
|
|
330
|
+
break;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// add text before code block as markdown content
|
|
334
|
+
result.push(new MarkdownChatResponseContentImpl(text.substring(curSearchIndex, codeStartIndex)));
|
|
335
|
+
// add code block as code content
|
|
336
|
+
const codeText = text.substring(newLineIndex + 1, codeEndIndex).trimEnd();
|
|
337
|
+
result.push(new CodeChatResponseContentImpl(codeText, language));
|
|
338
|
+
curSearchIndex = codeEndIndex + 3;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
if (result.length > 0) {
|
|
342
|
+
result.forEach(r => {
|
|
343
|
+
request.response.response.addContent(r);
|
|
344
|
+
});
|
|
345
|
+
} else {
|
|
346
|
+
request.response.response.addContent(lastContent);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
request.response.complete();
|
|
350
|
+
this.recordingService.recordResponse({
|
|
351
|
+
agentId: this.id,
|
|
352
|
+
sessionId: request.session.id,
|
|
353
|
+
timestamp: Date.now(),
|
|
354
|
+
requestId: request.response.requestId,
|
|
355
|
+
response: request.response.response.asString()
|
|
356
|
+
});
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
this.logger.error(
|
|
360
|
+
'Received unknown response in agent. Return response as text'
|
|
361
|
+
);
|
|
362
|
+
request.response.response.addContent(
|
|
363
|
+
new MarkdownChatResponseContentImpl(
|
|
364
|
+
JSON.stringify(languageModelResponse)
|
|
365
|
+
)
|
|
366
|
+
);
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
private parse(token: LanguageModelStreamResponsePart, previousContent: ChatResponseContent[]): ChatResponseContent | ChatResponseContent[] {
|
|
370
|
+
const content = token.content;
|
|
371
|
+
// eslint-disable-next-line no-null/no-null
|
|
372
|
+
if (content !== undefined && content !== null) {
|
|
373
|
+
return new MarkdownChatResponseContentImpl(content);
|
|
374
|
+
}
|
|
375
|
+
const toolCalls = token.tool_calls;
|
|
376
|
+
if (toolCalls !== undefined) {
|
|
377
|
+
const toolCallContents = toolCalls.map(toolCall =>
|
|
378
|
+
new ToolCallChatResponseContentImpl(toolCall.id, toolCall.function?.name, toolCall.function?.arguments, toolCall.finished, toolCall.result));
|
|
379
|
+
return toolCallContents;
|
|
380
|
+
}
|
|
381
|
+
return new MarkdownChatResponseContentImpl('');
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
}
|