@rws-framework/ai-tools 1.1.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
|
|
2
|
-
import RWSPrompt, {
|
|
3
|
-
import
|
|
2
|
+
import RWSPrompt, { IChainCallOutput } from '@rws-framework/ai-tools/src/models/prompts/_prompt';
|
|
3
|
+
import { ILLMChunk, IRWSPromptRequestExecutor, IRWSSinglePromptRequestExecutor, IRWSPromptStreamExecutor, IRWSPromptJSON, ChainStreamType, IAIRequestOptions, IAITool, IAIToolSchema, IAIToolParameter, IToolCall, ToolHandler } from './types/IPrompt';
|
|
4
|
+
import { EmbedLoader as RWSEmbed, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams } from './models/convo/EmbedLoader';
|
|
4
5
|
import RWSVectorStore from './models/convo/VectorStore';
|
|
5
6
|
import { VectorStoreService } from './services/VectorStoreService';
|
|
6
7
|
import { IContextToken } from './types/IContextToken';
|
|
@@ -10,7 +11,7 @@ export {
|
|
|
10
11
|
IAiCfg,
|
|
11
12
|
VectorStoreService,
|
|
12
13
|
RWSVectorStore,
|
|
13
|
-
|
|
14
|
+
RWSEmbed,
|
|
14
15
|
RWSPrompt,
|
|
15
16
|
ILLMChunk,
|
|
16
17
|
IRWSPromptRequestExecutor,
|
|
@@ -22,5 +23,11 @@ export {
|
|
|
22
23
|
IConvoDebugXMLData,
|
|
23
24
|
IEmbeddingsHandler,
|
|
24
25
|
ISplitterParams,
|
|
25
|
-
IContextToken
|
|
26
|
-
|
|
26
|
+
IContextToken,
|
|
27
|
+
IAIRequestOptions,
|
|
28
|
+
IAITool,
|
|
29
|
+
IAIToolSchema,
|
|
30
|
+
IAIToolParameter,
|
|
31
|
+
IToolCall,
|
|
32
|
+
ToolHandler
|
|
33
|
+
};
|
|
@@ -2,9 +2,10 @@ import 'reflect-metadata';
|
|
|
2
2
|
|
|
3
3
|
import { ConsoleService, RWSConfigService, RWSErrorCodes} from '@rws-framework/server';
|
|
4
4
|
import { InjectServices } from '@rws-framework/server/src/services/_inject';
|
|
5
|
-
import RWSPrompt
|
|
5
|
+
import RWSPrompt from '../prompts/_prompt';
|
|
6
|
+
import { IRWSPromptJSON, ILLMChunk } from '../../types/IPrompt';
|
|
6
7
|
import {VectorStoreService} from '../../services/VectorStoreService';
|
|
7
|
-
import RWSVectorStore, { VectorDocType } from '
|
|
8
|
+
import RWSVectorStore, { VectorDocType } from './VectorStore';
|
|
8
9
|
|
|
9
10
|
import { Document } from '@langchain/core/documents';
|
|
10
11
|
import { UnstructuredLoader } from '@langchain/community/document_loaders/fs/unstructured';
|
|
@@ -66,15 +67,12 @@ interface IEmbeddingsHandler<T extends object> {
|
|
|
66
67
|
type LLMType = BaseLanguageModelInterface | Runnable<BaseLanguageModelInput, string> | Runnable<BaseLanguageModelInput, BaseMessage>;
|
|
67
68
|
|
|
68
69
|
@InjectServices([VectorStoreService])
|
|
69
|
-
class
|
|
70
|
+
class EmbedLoader<LLMChat extends BaseChatModel> {
|
|
70
71
|
private loader: UnstructuredLoader;
|
|
71
|
-
// private docSplitter: RecursiveCharacterTextSplitter;
|
|
72
|
-
|
|
73
72
|
private embeddings: IEmbeddingsHandler<any>;
|
|
74
73
|
|
|
75
74
|
private docs: Document[] = [];
|
|
76
75
|
private _initiated = false;
|
|
77
|
-
private store: RWSVectorStore;
|
|
78
76
|
private convo_id: string;
|
|
79
77
|
private llmChat: LLMChat;
|
|
80
78
|
private chatConstructor: new (config: any) => LLMChat;
|
|
@@ -97,7 +95,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
97
95
|
) {
|
|
98
96
|
this.embeddings = embeddings;
|
|
99
97
|
if(convoId === null) {
|
|
100
|
-
this.convo_id =
|
|
98
|
+
this.convo_id = EmbedLoader.uuid();
|
|
101
99
|
} else {
|
|
102
100
|
this.convo_id = convoId;
|
|
103
101
|
}
|
|
@@ -108,17 +106,40 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
108
106
|
static uuid(): string
|
|
109
107
|
{
|
|
110
108
|
return uuid();
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
getId(): string {
|
|
112
|
+
return this.convo_id;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
getDocs(): VectorDocType
|
|
116
|
+
{
|
|
117
|
+
return this.docs;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
isInitiated(): boolean
|
|
121
|
+
{
|
|
122
|
+
return this._initiated;
|
|
111
123
|
}
|
|
112
124
|
|
|
125
|
+
getChat(): LLMChat
|
|
126
|
+
{
|
|
127
|
+
return this.llmChat;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
private avgDocLength = (documents: Document[]): number => {
|
|
131
|
+
return documents.reduce((sum, doc: Document) => sum + doc.pageContent.length, 0) / documents.length;
|
|
132
|
+
};
|
|
113
133
|
|
|
114
|
-
async splitDocs(filePath: string, params: ISplitterParams)
|
|
134
|
+
async splitDocs(filePath: string, params: ISplitterParams): Promise<RWSVectorStore>
|
|
115
135
|
{
|
|
116
136
|
|
|
117
137
|
if(!this.embeddings){
|
|
118
138
|
throw new Error('No embeddings provided for ConvoLoader\'s constructor. ConvoLoader.splitDocs aborting...');
|
|
119
139
|
}
|
|
120
140
|
|
|
121
|
-
const splitDir =
|
|
141
|
+
const splitDir = EmbedLoader.debugSplitDir(this.getId());
|
|
142
|
+
const finalDocs = [];
|
|
122
143
|
|
|
123
144
|
if(!fs.existsSync(splitDir)){
|
|
124
145
|
console.log(`Split dir ${ConsoleService.color().magentaBright(splitDir)} doesn't exist. Splitting docs...`);
|
|
@@ -142,10 +163,9 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
142
163
|
logConvo(`After the split we have ${splitDocs.length} documents more than the original ${orgDocs.length}.`);
|
|
143
164
|
logConvo(`Average length among ${orgDocs.length} documents (after split) is ${avgCharCountPost} characters.`);
|
|
144
165
|
|
|
145
|
-
this.docs = splitDocs;
|
|
146
|
-
|
|
147
166
|
let i = 0;
|
|
148
|
-
|
|
167
|
+
splitDocs.forEach((doc: Document) => {
|
|
168
|
+
finalDocs.push(doc);
|
|
149
169
|
fs.writeFileSync(this.debugSplitFile(i), doc.pageContent);
|
|
150
170
|
i++;
|
|
151
171
|
});
|
|
@@ -154,67 +174,17 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
154
174
|
|
|
155
175
|
for(const filePath of splitFiles) {
|
|
156
176
|
const txt = fs.readFileSync(splitDir + '/' + filePath, 'utf-8');
|
|
157
|
-
|
|
177
|
+
finalDocs.push(new Document({ pageContent: txt }));
|
|
158
178
|
}
|
|
159
179
|
}
|
|
160
180
|
|
|
161
|
-
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
getId(): string {
|
|
165
|
-
return this.convo_id;
|
|
181
|
+
return await this.vectorStoreService.createStore(finalDocs, await this.embeddings.generateEmbeddings());
|
|
166
182
|
}
|
|
167
183
|
|
|
168
|
-
|
|
169
|
-
{
|
|
170
|
-
return this.docs;
|
|
171
|
-
}
|
|
172
|
-
getStore(): RWSVectorStore
|
|
173
|
-
{
|
|
174
|
-
return this.store;
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
isInitiated(): boolean
|
|
178
|
-
{
|
|
179
|
-
return this._initiated;
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
setPrompt(prompt: RWSPrompt): ConvoLoader<LLMChat>
|
|
183
|
-
{
|
|
184
|
-
this.thePrompt = prompt;
|
|
185
|
-
|
|
186
|
-
this.llmChat = new this.chatConstructor({
|
|
187
|
-
streaming: true,
|
|
188
|
-
region: this.configService.get('aws_bedrock_region'),
|
|
189
|
-
credentials: {
|
|
190
|
-
accessKeyId: this.configService.get('aws_access_key'),
|
|
191
|
-
secretAccessKey: this.configService.get('aws_secret_key'),
|
|
192
|
-
},
|
|
193
|
-
model: 'anthropic.claude-v2',
|
|
194
|
-
maxTokens: prompt.getHyperParameter<number>('max_tokens_to_sample'),
|
|
195
|
-
temperature: prompt.getHyperParameter<number>('temperature'),
|
|
196
|
-
modelKwargs: {
|
|
197
|
-
top_p: prompt.getHyperParameter<number>('top_p'),
|
|
198
|
-
top_k: prompt.getHyperParameter<number>('top_k'),
|
|
199
|
-
}
|
|
200
|
-
});
|
|
201
|
-
|
|
202
|
-
return this;
|
|
203
|
-
}
|
|
204
|
-
|
|
205
|
-
getChat(): LLMChat
|
|
206
|
-
{
|
|
207
|
-
return this.llmChat;
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
private avgDocLength = (documents: Document[]): number => {
|
|
211
|
-
return documents.reduce((sum, doc: Document) => sum + doc.pageContent.length, 0) / documents.length;
|
|
212
|
-
};
|
|
213
|
-
|
|
214
|
-
async similaritySearch(query: string, splitCount: number): Promise<string>
|
|
184
|
+
async similaritySearch(query: string, splitCount: number, store: RWSVectorStore): Promise<string>
|
|
215
185
|
{
|
|
216
186
|
console.log('Store is ready. Searching for embedds...');
|
|
217
|
-
const texts = await
|
|
187
|
+
const texts = await store.getFaiss().similaritySearchWithScore(`${query}`, splitCount);
|
|
218
188
|
console.log('Found best parts: ' + texts.length);
|
|
219
189
|
return texts.map(([doc, score]: [any, number]) => `${doc['pageContent']}`).join('\n\n');
|
|
220
190
|
}
|
|
@@ -240,7 +210,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
240
210
|
}
|
|
241
211
|
|
|
242
212
|
|
|
243
|
-
async waitForInit(): Promise<
|
|
213
|
+
async waitForInit(): Promise<EmbedLoader<LLMChat> | null>
|
|
244
214
|
{
|
|
245
215
|
const _self = this;
|
|
246
216
|
return new Promise((resolve, reject)=>{
|
|
@@ -262,6 +232,10 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
262
232
|
});
|
|
263
233
|
}
|
|
264
234
|
|
|
235
|
+
async setPrompt(prompt: RWSPrompt){
|
|
236
|
+
this.thePrompt = prompt;
|
|
237
|
+
}
|
|
238
|
+
|
|
265
239
|
private parseXML(xml: string, callback: (err: Error, result: any) => void): xml2js.Parser
|
|
266
240
|
{
|
|
267
241
|
const parser = new xml2js.Parser();
|
|
@@ -279,11 +253,11 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
279
253
|
}
|
|
280
254
|
|
|
281
255
|
public debugConvoFile(){
|
|
282
|
-
return `${
|
|
256
|
+
return `${EmbedLoader.debugConvoDir(this.getId())}/conversation.xml`;
|
|
283
257
|
}
|
|
284
258
|
|
|
285
259
|
public debugSplitFile(i: number){
|
|
286
|
-
return `${
|
|
260
|
+
return `${EmbedLoader.debugSplitDir(this.getId())}/${i}.splitfile`;
|
|
287
261
|
}
|
|
288
262
|
|
|
289
263
|
private initDebugFile(): IConvoDebugXMLOutput
|
|
@@ -291,7 +265,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
291
265
|
let xmlContent: string;
|
|
292
266
|
let debugXML: IConvoDebugXMLData = null;
|
|
293
267
|
|
|
294
|
-
const convoDir =
|
|
268
|
+
const convoDir = EmbedLoader.debugConvoDir(this.getId());
|
|
295
269
|
|
|
296
270
|
if(!fs.existsSync(convoDir)){
|
|
297
271
|
fs.mkdirSync(convoDir, { recursive: true });
|
|
@@ -326,5 +300,4 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
|
|
|
326
300
|
|
|
327
301
|
}
|
|
328
302
|
|
|
329
|
-
export
|
|
330
|
-
export { IChainCallOutput, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams, IBaseLangchainHyperParams };
|
|
303
|
+
export { EmbedLoader, IChainCallOutput, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams, IBaseLangchainHyperParams };
|
|
@@ -1,87 +1,32 @@
|
|
|
1
1
|
import { Readable } from 'stream';
|
|
2
2
|
import { PromptTemplate } from '@langchain/core/prompts';
|
|
3
|
-
import
|
|
4
|
-
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
5
|
-
import { IterableReadableStream } from '@langchain/core/utils/stream';
|
|
6
|
-
import { ChainValues } from '@langchain/core/utils/types';
|
|
7
|
-
import { IContextToken } from '../../types/IContextToken';
|
|
3
|
+
import { EmbedLoader, IChainCallOutput } from '../convo/EmbedLoader';
|
|
8
4
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
5
|
+
import { IToolCall } from '../../types/IPrompt'
|
|
6
|
+
import {
|
|
7
|
+
IPromptSender,
|
|
8
|
+
IPromptEnchantment,
|
|
9
|
+
IPromptParams,
|
|
10
|
+
IPromptHyperParameters,
|
|
11
|
+
IRWSPromptRequestExecutor,
|
|
12
|
+
IRWSPromptStreamExecutor,
|
|
13
|
+
IRWSSinglePromptRequestExecutor,
|
|
14
|
+
IRWSPromptJSON,
|
|
15
|
+
ChainStreamType,
|
|
16
|
+
ILLMChunk,
|
|
17
|
+
IAITool,
|
|
18
|
+
IRWSHistoryMessage,
|
|
19
|
+
InputType,
|
|
20
|
+
CompoundInput,
|
|
21
|
+
ToolHandler
|
|
22
|
+
} from '../../types/IPrompt';
|
|
23
|
+
import { IContextToken } from '../../types/IContextToken';
|
|
9
24
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
top_p?: number,
|
|
14
|
-
[key: string]: number
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
interface IRWSHistoryMessage {
|
|
18
|
-
content: string, creator: string
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
interface ILLMChunk {
|
|
22
|
-
content: string
|
|
23
|
-
status: string
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
interface IPromptParams {
|
|
27
|
-
hyperParameters?: IPromptHyperParameters;
|
|
28
|
-
input: CompoundInput[];
|
|
29
|
-
modelId: string;
|
|
30
|
-
modelType: string;
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
type InputType = 'text' | 'image';
|
|
34
|
-
|
|
35
|
-
interface IPromptEnchantment {
|
|
36
|
-
enhancementId: string,
|
|
37
|
-
enhancementName: string,
|
|
38
|
-
enhancementParams: any,
|
|
39
|
-
input: CompoundInput
|
|
40
|
-
output: string
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
type IPromptSender = (prompt: RWSPrompt) => Promise<void>;
|
|
44
|
-
|
|
45
|
-
interface IRWSPromptRequestExecutor {
|
|
46
|
-
promptRequest: (prompt: RWSPrompt, contextToken?: IContextToken | null, intruderPrompt?: string | null, debugVars?: any) => Promise<RWSPrompt>
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
interface IRWSSinglePromptRequestExecutor {
|
|
51
|
-
singlePromptRequest: (prompt: RWSPrompt, contextToken?: IContextToken | null, intruderPrompt?: string | null, ensureJson?: boolean, debugVars?: any) => Promise<RWSPrompt>
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
interface IRWSPromptStreamExecutor {
|
|
56
|
-
promptStream: (prompt: RWSPrompt, read: (chunk: ILLMChunk) => void, end: () => void, debugVars?: any) => Promise<RWSPrompt>
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
interface IRWSPromptJSON {
|
|
60
|
-
input: CompoundInput[];
|
|
61
|
-
enhancedInput: IPromptEnchantment[];
|
|
62
|
-
sentInput: CompoundInput[];
|
|
63
|
-
originalInput: CompoundInput[];
|
|
64
|
-
output: string;
|
|
65
|
-
modelId: string;
|
|
66
|
-
modelType: string;
|
|
67
|
-
multiTemplate: PromptTemplate;
|
|
68
|
-
convo: { id: string };
|
|
69
|
-
hyperParameters: IPromptHyperParameters;
|
|
70
|
-
created_at: string;
|
|
71
|
-
var_storage: any;
|
|
25
|
+
type EntryParams = {
|
|
26
|
+
modelId: string,
|
|
27
|
+
body: string,
|
|
72
28
|
}
|
|
73
29
|
|
|
74
|
-
type ChainStreamType = AsyncGenerator<IterableReadableStream<ChainValues>>;
|
|
75
|
-
|
|
76
|
-
interface CompoundInput {
|
|
77
|
-
type: InputType,
|
|
78
|
-
text?: string,
|
|
79
|
-
source?: {
|
|
80
|
-
type: string,
|
|
81
|
-
media_type: string,
|
|
82
|
-
data: string
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
30
|
|
|
86
31
|
class RWSPrompt {
|
|
87
32
|
public _stream: ChainStreamType;
|
|
@@ -93,10 +38,10 @@ class RWSPrompt {
|
|
|
93
38
|
private modelId: string;
|
|
94
39
|
private modelType: string;
|
|
95
40
|
private multiTemplate: PromptTemplate;
|
|
96
|
-
private
|
|
41
|
+
private embedLoader: EmbedLoader<any>;
|
|
97
42
|
private hyperParameters: IPromptHyperParameters;
|
|
98
43
|
private created_at: Date;
|
|
99
|
-
|
|
44
|
+
private toolHandlers: Map<string, ToolHandler> = new Map();
|
|
100
45
|
private varStorage: any = {};
|
|
101
46
|
|
|
102
47
|
private onStream = (chunk: string) => {
|
|
@@ -226,16 +171,16 @@ class RWSPrompt {
|
|
|
226
171
|
return this.multiTemplate;
|
|
227
172
|
}
|
|
228
173
|
|
|
229
|
-
|
|
174
|
+
setEmbedLoader(embedLoader: EmbedLoader<BaseChatModel>): RWSPrompt
|
|
230
175
|
{
|
|
231
|
-
this.
|
|
176
|
+
this.embedLoader = embedLoader;
|
|
232
177
|
|
|
233
178
|
return this;
|
|
234
179
|
}
|
|
235
180
|
|
|
236
|
-
|
|
181
|
+
getEmbedLoader<T extends BaseChatModel>(): EmbedLoader<T>
|
|
237
182
|
{
|
|
238
|
-
return this.
|
|
183
|
+
return this.embedLoader;
|
|
239
184
|
}
|
|
240
185
|
|
|
241
186
|
replacePromptVar(key: string, val: string)
|
|
@@ -248,23 +193,23 @@ class RWSPrompt {
|
|
|
248
193
|
return [this.modelType, this.modelId];
|
|
249
194
|
}
|
|
250
195
|
|
|
251
|
-
async requestWith(executor: IRWSPromptRequestExecutor, intruderPrompt: string = null, debugVars: any = {}): Promise<void>
|
|
196
|
+
async requestWith(executor: IRWSPromptRequestExecutor, intruderPrompt: string = null, debugVars: any = {}, tools?: IAITool[]): Promise<void>
|
|
252
197
|
{
|
|
253
198
|
this.sentInput = this.input;
|
|
254
|
-
const returnedRWS = await executor.promptRequest(this,
|
|
199
|
+
const returnedRWS = await executor.promptRequest(this, { intruderPrompt, debugVars, tools });
|
|
255
200
|
this.output = returnedRWS.readOutput();
|
|
256
201
|
}
|
|
257
202
|
|
|
258
|
-
async singleRequestWith(executor: IRWSSinglePromptRequestExecutor, intruderPrompt: string = null, ensureJson: boolean = false): Promise<void>
|
|
203
|
+
async singleRequestWith(executor: IRWSSinglePromptRequestExecutor, intruderPrompt: string = null, ensureJson: boolean = false, tools?: IAITool[]): Promise<void>
|
|
259
204
|
{
|
|
260
|
-
await executor.singlePromptRequest(this,
|
|
205
|
+
await executor.singlePromptRequest(this, { intruderPrompt, ensureJson, tools });
|
|
261
206
|
this.sentInput = this.input;
|
|
262
207
|
}
|
|
263
208
|
|
|
264
|
-
async streamWith(executor: IRWSPromptStreamExecutor, read: (chunk: ILLMChunk) => void, end: () => void = () => {}, debugVars: any = {}): Promise<RWSPrompt>
|
|
209
|
+
async streamWith(executor: IRWSPromptStreamExecutor, read: (chunk: ILLMChunk) => void, end: () => void = () => {}, debugVars: any = {}, tools?: IAITool[]): Promise<RWSPrompt>
|
|
265
210
|
{
|
|
266
211
|
this.sentInput = this.input;
|
|
267
|
-
return executor.promptStream(this, read, end, debugVars);
|
|
212
|
+
return executor.promptStream(this, read, end, { debugVars, tools });
|
|
268
213
|
}
|
|
269
214
|
|
|
270
215
|
addInput(content: CompoundInput): RWSPrompt
|
|
@@ -284,35 +229,6 @@ class RWSPrompt {
|
|
|
284
229
|
return this;
|
|
285
230
|
}
|
|
286
231
|
|
|
287
|
-
async _oldreadStream(stream: Readable, react: (chunk: string) => void): Promise<void>
|
|
288
|
-
{
|
|
289
|
-
let first = true;
|
|
290
|
-
const chunks: string[] = []; // Replace 'any' with the actual type of your chunks
|
|
291
|
-
|
|
292
|
-
for await (const event of stream) {
|
|
293
|
-
// Assuming 'event' has a specific structure. Adjust according to actual event structure.
|
|
294
|
-
if ('chunk' in event && event.chunk.bytes) {
|
|
295
|
-
const chunk = JSON.parse(Buffer.from(event.chunk.bytes).toString('utf-8'));
|
|
296
|
-
if(first){
|
|
297
|
-
console.log('chunk', chunk);
|
|
298
|
-
first = false;
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
react(chunk.completion);
|
|
302
|
-
|
|
303
|
-
chunks.push(chunk.completion || chunk.generation ); // Use the actual property of 'chunk' you're interested in
|
|
304
|
-
} else if (
|
|
305
|
-
'internalServerException' in event ||
|
|
306
|
-
'modelStreamErrorException' in event ||
|
|
307
|
-
'throttlingException' in event ||
|
|
308
|
-
'validationException' in event
|
|
309
|
-
) {
|
|
310
|
-
console.error(event);
|
|
311
|
-
break;
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
|
|
316
232
|
private async isChainStreamType(source: any): Promise<boolean> {
|
|
317
233
|
if (source && typeof source[Symbol.asyncIterator] === 'function') {
|
|
318
234
|
const asyncIterator = source[Symbol.asyncIterator]();
|
|
@@ -331,7 +247,7 @@ class RWSPrompt {
|
|
|
331
247
|
return false;
|
|
332
248
|
}
|
|
333
249
|
|
|
334
|
-
async
|
|
250
|
+
async readStreamAsText(readableStream: ReadableStream, callback: (txt: string) => void) {
|
|
335
251
|
const reader = readableStream.getReader();
|
|
336
252
|
|
|
337
253
|
let readResult: any;
|
|
@@ -362,6 +278,33 @@ class RWSPrompt {
|
|
|
362
278
|
}
|
|
363
279
|
}
|
|
364
280
|
|
|
281
|
+
registerToolHandlers(toolHandlers: { [key: string]: ToolHandler }){
|
|
282
|
+
for(const key of Object.keys(toolHandlers)){
|
|
283
|
+
this.toolHandlers.set(key, toolHandlers[key]);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
async callTools<T = unknown>(tools: IToolCall[]): Promise<T[]>
|
|
288
|
+
{
|
|
289
|
+
const results: T[] = [];
|
|
290
|
+
for(const tool of tools){
|
|
291
|
+
if(this.toolHandlers.has(tool.name)){
|
|
292
|
+
const result = await this.callAiTool<T>(tool);
|
|
293
|
+
if(result){
|
|
294
|
+
results.push(result);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
return results;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
private async callAiTool<T>(tool: IToolCall): Promise<T>
|
|
303
|
+
{
|
|
304
|
+
const handler = this.toolHandlers.get(tool.name);
|
|
305
|
+
return await handler(tool.arguments);
|
|
306
|
+
}
|
|
307
|
+
|
|
365
308
|
toJSON(): IRWSPromptJSON
|
|
366
309
|
{
|
|
367
310
|
return {
|
|
@@ -373,8 +316,8 @@ class RWSPrompt {
|
|
|
373
316
|
modelId: this.modelId,
|
|
374
317
|
modelType: this.modelType,
|
|
375
318
|
multiTemplate: this.multiTemplate,
|
|
376
|
-
|
|
377
|
-
id: this.
|
|
319
|
+
embed: {
|
|
320
|
+
id: this.embedLoader.getId()
|
|
378
321
|
},
|
|
379
322
|
hyperParameters: this.hyperParameters,
|
|
380
323
|
var_storage: this.varStorage,
|
|
@@ -385,16 +328,4 @@ class RWSPrompt {
|
|
|
385
328
|
|
|
386
329
|
export default RWSPrompt;
|
|
387
330
|
|
|
388
|
-
export {
|
|
389
|
-
IPromptSender,
|
|
390
|
-
IPromptEnchantment,
|
|
391
|
-
IPromptParams,
|
|
392
|
-
IPromptHyperParameters,
|
|
393
|
-
IRWSPromptRequestExecutor,
|
|
394
|
-
IRWSPromptStreamExecutor,
|
|
395
|
-
IRWSSinglePromptRequestExecutor,
|
|
396
|
-
IRWSPromptJSON,
|
|
397
|
-
IChainCallOutput,
|
|
398
|
-
ChainStreamType,
|
|
399
|
-
ILLMChunk
|
|
400
|
-
};
|
|
331
|
+
export { IChainCallOutput };
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { PromptTemplate } from '@langchain/core/prompts';
|
|
2
|
+
import { IterableReadableStream } from '@langchain/core/utils/stream';
|
|
3
|
+
import { ChainValues } from '@langchain/core/utils/types';
|
|
4
|
+
import { IContextToken } from './IContextToken';
|
|
5
|
+
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
6
|
+
|
|
7
|
+
// General tool interfaces for AI models
|
|
8
|
+
interface IAIToolParameterBase {
|
|
9
|
+
type: string;
|
|
10
|
+
description?: string;
|
|
11
|
+
enum?: string[];
|
|
12
|
+
required?: boolean;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
interface IAIToolParameterObject extends IAIToolParameterBase {
|
|
16
|
+
type: 'object';
|
|
17
|
+
properties: Record<string, IAIToolParameter>;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
interface IAIToolParameterArray extends IAIToolParameterBase {
|
|
21
|
+
type: 'array';
|
|
22
|
+
items: IAIToolParameter;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
interface IAIToolParameterPrimitive extends IAIToolParameterBase {
|
|
26
|
+
type: 'string' | 'number' | 'boolean' | 'integer';
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
type IAIToolParameter = IAIToolParameterObject | IAIToolParameterArray | IAIToolParameterPrimitive;
|
|
30
|
+
|
|
31
|
+
interface IAIToolSchema {
|
|
32
|
+
type: 'object';
|
|
33
|
+
properties: Record<string, IAIToolParameter>;
|
|
34
|
+
required?: string[];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
interface IToolCall {
|
|
38
|
+
id: string;
|
|
39
|
+
name: string;
|
|
40
|
+
arguments: Record<string, any>;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
interface IAITool {
|
|
44
|
+
name: string;
|
|
45
|
+
description: string;
|
|
46
|
+
input_schema: IAIToolSchema;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
interface IPromptHyperParameters {
|
|
50
|
+
temperature: number,
|
|
51
|
+
top_k?: number,
|
|
52
|
+
top_p?: number,
|
|
53
|
+
[key: string]: number
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
interface IRWSHistoryMessage {
|
|
57
|
+
content: string, creator: string
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
interface ILLMChunk {
|
|
61
|
+
content: string
|
|
62
|
+
status: string
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
interface IPromptParams {
|
|
66
|
+
hyperParameters?: IPromptHyperParameters;
|
|
67
|
+
input: CompoundInput[];
|
|
68
|
+
modelId: string;
|
|
69
|
+
modelType: string;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
type InputType = 'text' | 'image';
|
|
73
|
+
|
|
74
|
+
interface IPromptEnchantment {
|
|
75
|
+
enhancementId: string,
|
|
76
|
+
enhancementName: string,
|
|
77
|
+
enhancementParams: any,
|
|
78
|
+
input: CompoundInput
|
|
79
|
+
output: string
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Forward reference to RWSPrompt to avoid circular dependencies
|
|
83
|
+
type RWSPrompt = import('../models/prompts/_prompt').default;
|
|
84
|
+
|
|
85
|
+
type IPromptSender = (prompt: RWSPrompt) => Promise<void>;
|
|
86
|
+
|
|
87
|
+
interface IAIRequestOptions {
|
|
88
|
+
contextToken?: IContextToken | null,
|
|
89
|
+
intruderPrompt?: string | null,
|
|
90
|
+
ensureJson?: boolean,
|
|
91
|
+
debugVars?: any,
|
|
92
|
+
tools?: IAITool[]
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
interface IRWSPromptRequestExecutor {
|
|
96
|
+
promptRequest: (prompt: RWSPrompt, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
interface IRWSSinglePromptRequestExecutor {
|
|
100
|
+
singlePromptRequest: (prompt: RWSPrompt, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
interface IRWSPromptStreamExecutor {
|
|
104
|
+
promptStream: (prompt: RWSPrompt, read: (chunk: ILLMChunk) => void, end: () => void, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
type ToolHandler<T = any> = (...args: any) => Promise<T>;
|
|
108
|
+
|
|
109
|
+
interface IRWSPromptJSON {
|
|
110
|
+
input: CompoundInput[];
|
|
111
|
+
enhancedInput: IPromptEnchantment[];
|
|
112
|
+
sentInput: CompoundInput[];
|
|
113
|
+
originalInput: CompoundInput[];
|
|
114
|
+
output: string;
|
|
115
|
+
modelId: string;
|
|
116
|
+
modelType: string;
|
|
117
|
+
multiTemplate: PromptTemplate;
|
|
118
|
+
embed?: { id: string };
|
|
119
|
+
hyperParameters: IPromptHyperParameters;
|
|
120
|
+
created_at: string;
|
|
121
|
+
var_storage: any;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
type ChainStreamType = AsyncGenerator<IterableReadableStream<ChainValues>>;
|
|
125
|
+
|
|
126
|
+
interface CompoundInput {
|
|
127
|
+
type: InputType,
|
|
128
|
+
text?: string,
|
|
129
|
+
source?: {
|
|
130
|
+
type: string,
|
|
131
|
+
media_type: string,
|
|
132
|
+
data: string
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export {
|
|
137
|
+
IPromptSender,
|
|
138
|
+
IPromptEnchantment,
|
|
139
|
+
IPromptParams,
|
|
140
|
+
IPromptHyperParameters,
|
|
141
|
+
IRWSPromptRequestExecutor,
|
|
142
|
+
IRWSPromptStreamExecutor,
|
|
143
|
+
IRWSSinglePromptRequestExecutor,
|
|
144
|
+
IRWSPromptJSON,
|
|
145
|
+
ChainStreamType,
|
|
146
|
+
ILLMChunk,
|
|
147
|
+
IAIRequestOptions,
|
|
148
|
+
IAITool,
|
|
149
|
+
IAIToolSchema,
|
|
150
|
+
IAIToolParameter,
|
|
151
|
+
IAIToolParameterBase,
|
|
152
|
+
IAIToolParameterObject,
|
|
153
|
+
IAIToolParameterArray,
|
|
154
|
+
IAIToolParameterPrimitive,
|
|
155
|
+
IRWSHistoryMessage,
|
|
156
|
+
InputType,
|
|
157
|
+
CompoundInput,
|
|
158
|
+
IToolCall,
|
|
159
|
+
ToolHandler
|
|
160
|
+
};
|
|
161
|
+
|