@rws-framework/ai-tools 1.0.5 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@rws-framework/ai-tools",
3
3
  "private": false,
4
- "version": "1.0.5",
4
+ "version": "2.0.0",
5
5
  "description": "",
6
6
  "main": "src/index.ts",
7
7
  "scripts": {},
package/src/index.ts CHANGED
@@ -1,6 +1,7 @@
1
1
 
2
- import RWSPrompt, { ILLMChunk, IRWSPromptRequestExecutor, IRWSSinglePromptRequestExecutor, IRWSPromptStreamExecutor, IChainCallOutput, IRWSPromptJSON, ChainStreamType } from '@rws-framework/ai-tools/src/models/prompts/_prompt';
3
- import RWSConvo, { IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams } from './models/convo/ConvoLoader';
2
+ import RWSPrompt, { IChainCallOutput } from '@rws-framework/ai-tools/src/models/prompts/_prompt';
3
+ import { ILLMChunk, IRWSPromptRequestExecutor, IRWSSinglePromptRequestExecutor, IRWSPromptStreamExecutor, IRWSPromptJSON, ChainStreamType, IAIRequestOptions, IAITool, IAIToolSchema, IAIToolParameter, IToolCall, ToolHandler } from './types/IPrompt';
4
+ import { EmbedLoader as RWSEmbed, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams } from './models/convo/EmbedLoader';
4
5
  import RWSVectorStore from './models/convo/VectorStore';
5
6
  import { VectorStoreService } from './services/VectorStoreService';
6
7
  import { IContextToken } from './types/IContextToken';
@@ -10,7 +11,7 @@ export {
10
11
  IAiCfg,
11
12
  VectorStoreService,
12
13
  RWSVectorStore,
13
- RWSConvo,
14
+ RWSEmbed,
14
15
  RWSPrompt,
15
16
  ILLMChunk,
16
17
  IRWSPromptRequestExecutor,
@@ -22,5 +23,11 @@ export {
22
23
  IConvoDebugXMLData,
23
24
  IEmbeddingsHandler,
24
25
  ISplitterParams,
25
- IContextToken
26
- };
26
+ IContextToken,
27
+ IAIRequestOptions,
28
+ IAITool,
29
+ IAIToolSchema,
30
+ IAIToolParameter,
31
+ IToolCall,
32
+ ToolHandler
33
+ };
@@ -2,9 +2,10 @@ import 'reflect-metadata';
2
2
 
3
3
  import { ConsoleService, RWSConfigService, RWSErrorCodes} from '@rws-framework/server';
4
4
  import { InjectServices } from '@rws-framework/server/src/services/_inject';
5
- import RWSPrompt, { IRWSPromptJSON, ILLMChunk } from '../../models/prompts/_prompt';
5
+ import RWSPrompt from '../prompts/_prompt';
6
+ import { IRWSPromptJSON, ILLMChunk } from '../../types/IPrompt';
6
7
  import {VectorStoreService} from '../../services/VectorStoreService';
7
- import RWSVectorStore, { VectorDocType } from '../../models/convo/VectorStore';
8
+ import RWSVectorStore, { VectorDocType } from './VectorStore';
8
9
 
9
10
  import { Document } from '@langchain/core/documents';
10
11
  import { UnstructuredLoader } from '@langchain/community/document_loaders/fs/unstructured';
@@ -66,15 +67,12 @@ interface IEmbeddingsHandler<T extends object> {
66
67
  type LLMType = BaseLanguageModelInterface | Runnable<BaseLanguageModelInput, string> | Runnable<BaseLanguageModelInput, BaseMessage>;
67
68
 
68
69
  @InjectServices([VectorStoreService])
69
- class ConvoLoader<LLMChat extends BaseChatModel> {
70
+ class EmbedLoader<LLMChat extends BaseChatModel> {
70
71
  private loader: UnstructuredLoader;
71
- // private docSplitter: RecursiveCharacterTextSplitter;
72
-
73
72
  private embeddings: IEmbeddingsHandler<any>;
74
73
 
75
74
  private docs: Document[] = [];
76
75
  private _initiated = false;
77
- private store: RWSVectorStore;
78
76
  private convo_id: string;
79
77
  private llmChat: LLMChat;
80
78
  private chatConstructor: new (config: any) => LLMChat;
@@ -97,7 +95,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
97
95
  ) {
98
96
  this.embeddings = embeddings;
99
97
  if(convoId === null) {
100
- this.convo_id = ConvoLoader.uuid();
98
+ this.convo_id = EmbedLoader.uuid();
101
99
  } else {
102
100
  this.convo_id = convoId;
103
101
  }
@@ -108,17 +106,40 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
108
106
  static uuid(): string
109
107
  {
110
108
  return uuid();
109
+ }
110
+
111
+ getId(): string {
112
+ return this.convo_id;
113
+ }
114
+
115
+ getDocs(): VectorDocType
116
+ {
117
+ return this.docs;
118
+ }
119
+
120
+ isInitiated(): boolean
121
+ {
122
+ return this._initiated;
111
123
  }
112
124
 
125
+ getChat(): LLMChat
126
+ {
127
+ return this.llmChat;
128
+ }
129
+
130
+ private avgDocLength = (documents: Document[]): number => {
131
+ return documents.reduce((sum, doc: Document) => sum + doc.pageContent.length, 0) / documents.length;
132
+ };
113
133
 
114
- async splitDocs(filePath: string, params: ISplitterParams)
134
+ async splitDocs(filePath: string, params: ISplitterParams): Promise<RWSVectorStore>
115
135
  {
116
136
 
117
137
  if(!this.embeddings){
118
138
  throw new Error('No embeddings provided for ConvoLoader\'s constructor. ConvoLoader.splitDocs aborting...');
119
139
  }
120
140
 
121
- const splitDir = ConvoLoader.debugSplitDir(this.getId());
141
+ const splitDir = EmbedLoader.debugSplitDir(this.getId());
142
+ const finalDocs = [];
122
143
 
123
144
  if(!fs.existsSync(splitDir)){
124
145
  console.log(`Split dir ${ConsoleService.color().magentaBright(splitDir)} doesn't exist. Splitting docs...`);
@@ -142,10 +163,9 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
142
163
  logConvo(`After the split we have ${splitDocs.length} documents more than the original ${orgDocs.length}.`);
143
164
  logConvo(`Average length among ${orgDocs.length} documents (after split) is ${avgCharCountPost} characters.`);
144
165
 
145
- this.docs = splitDocs;
146
-
147
166
  let i = 0;
148
- this.docs.forEach((doc: Document) => {
167
+ splitDocs.forEach((doc: Document) => {
168
+ finalDocs.push(doc);
149
169
  fs.writeFileSync(this.debugSplitFile(i), doc.pageContent);
150
170
  i++;
151
171
  });
@@ -154,67 +174,17 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
154
174
 
155
175
  for(const filePath of splitFiles) {
156
176
  const txt = fs.readFileSync(splitDir + '/' + filePath, 'utf-8');
157
- this.docs.push(new Document({ pageContent: txt }));
177
+ finalDocs.push(new Document({ pageContent: txt }));
158
178
  }
159
179
  }
160
180
 
161
- this.store = await this.vectorStoreService.createStore(this.docs, await this.embeddings.generateEmbeddings());
162
- }
163
-
164
- getId(): string {
165
- return this.convo_id;
181
+ return await this.vectorStoreService.createStore(finalDocs, await this.embeddings.generateEmbeddings());
166
182
  }
167
183
 
168
- getDocs(): VectorDocType
169
- {
170
- return this.docs;
171
- }
172
- getStore(): RWSVectorStore
173
- {
174
- return this.store;
175
- }
176
-
177
- isInitiated(): boolean
178
- {
179
- return this._initiated;
180
- }
181
-
182
- setPrompt(prompt: RWSPrompt): ConvoLoader<LLMChat>
183
- {
184
- this.thePrompt = prompt;
185
-
186
- this.llmChat = new this.chatConstructor({
187
- streaming: true,
188
- region: this.configService.get('aws_bedrock_region'),
189
- credentials: {
190
- accessKeyId: this.configService.get('aws_access_key'),
191
- secretAccessKey: this.configService.get('aws_secret_key'),
192
- },
193
- model: 'anthropic.claude-v2',
194
- maxTokens: prompt.getHyperParameter<number>('max_tokens_to_sample'),
195
- temperature: prompt.getHyperParameter<number>('temperature'),
196
- modelKwargs: {
197
- top_p: prompt.getHyperParameter<number>('top_p'),
198
- top_k: prompt.getHyperParameter<number>('top_k'),
199
- }
200
- });
201
-
202
- return this;
203
- }
204
-
205
- getChat(): LLMChat
206
- {
207
- return this.llmChat;
208
- }
209
-
210
- private avgDocLength = (documents: Document[]): number => {
211
- return documents.reduce((sum, doc: Document) => sum + doc.pageContent.length, 0) / documents.length;
212
- };
213
-
214
- async similaritySearch(query: string, splitCount: number): Promise<string>
184
+ async similaritySearch(query: string, splitCount: number, store: RWSVectorStore): Promise<string>
215
185
  {
216
186
  console.log('Store is ready. Searching for embedds...');
217
- const texts = await this.getStore().getFaiss().similaritySearchWithScore(`${query}`, splitCount);
187
+ const texts = await store.getFaiss().similaritySearchWithScore(`${query}`, splitCount);
218
188
  console.log('Found best parts: ' + texts.length);
219
189
  return texts.map(([doc, score]: [any, number]) => `${doc['pageContent']}`).join('\n\n');
220
190
  }
@@ -240,7 +210,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
240
210
  }
241
211
 
242
212
 
243
- async waitForInit(): Promise<ConvoLoader<LLMChat> | null>
213
+ async waitForInit(): Promise<EmbedLoader<LLMChat> | null>
244
214
  {
245
215
  const _self = this;
246
216
  return new Promise((resolve, reject)=>{
@@ -262,6 +232,10 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
262
232
  });
263
233
  }
264
234
 
235
+ async setPrompt(prompt: RWSPrompt){
236
+ this.thePrompt = prompt;
237
+ }
238
+
265
239
  private parseXML(xml: string, callback: (err: Error, result: any) => void): xml2js.Parser
266
240
  {
267
241
  const parser = new xml2js.Parser();
@@ -279,11 +253,11 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
279
253
  }
280
254
 
281
255
  public debugConvoFile(){
282
- return `${ConvoLoader.debugConvoDir(this.getId())}/conversation.xml`;
256
+ return `${EmbedLoader.debugConvoDir(this.getId())}/conversation.xml`;
283
257
  }
284
258
 
285
259
  public debugSplitFile(i: number){
286
- return `${ConvoLoader.debugSplitDir(this.getId())}/${i}.splitfile`;
260
+ return `${EmbedLoader.debugSplitDir(this.getId())}/${i}.splitfile`;
287
261
  }
288
262
 
289
263
  private initDebugFile(): IConvoDebugXMLOutput
@@ -291,7 +265,7 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
291
265
  let xmlContent: string;
292
266
  let debugXML: IConvoDebugXMLData = null;
293
267
 
294
- const convoDir = ConvoLoader.debugConvoDir(this.getId());
268
+ const convoDir = EmbedLoader.debugConvoDir(this.getId());
295
269
 
296
270
  if(!fs.existsSync(convoDir)){
297
271
  fs.mkdirSync(convoDir, { recursive: true });
@@ -326,5 +300,4 @@ class ConvoLoader<LLMChat extends BaseChatModel> {
326
300
 
327
301
  }
328
302
 
329
- export default ConvoLoader;
330
- export { IChainCallOutput, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams, IBaseLangchainHyperParams };
303
+ export { EmbedLoader, IChainCallOutput, IConvoDebugXMLData, IEmbeddingsHandler, ISplitterParams, IBaseLangchainHyperParams };
@@ -26,6 +26,11 @@ export default class RWSVectorStore
26
26
  {
27
27
  return this.faiss;
28
28
  }
29
+
30
+ getDocs()
31
+ {
32
+ return this.docs;
33
+ }
29
34
  }
30
35
 
31
36
  export {
@@ -1,90 +1,47 @@
1
1
  import { Readable } from 'stream';
2
2
  import { PromptTemplate } from '@langchain/core/prompts';
3
- import ConvoLoader, { IChainCallOutput } from '../../models/convo/ConvoLoader';
4
- import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
5
- import { IterableReadableStream } from '@langchain/core/utils/stream';
6
- import { ChainValues } from '@langchain/core/utils/types';
7
- import { IContextToken } from '../../types/IContextToken';
3
+ import { EmbedLoader, IChainCallOutput } from '../convo/EmbedLoader';
8
4
  import { BaseChatModel } from '@langchain/core/language_models/chat_models';
5
+ import { IToolCall } from '../../types/IPrompt'
6
+ import {
7
+ IPromptSender,
8
+ IPromptEnchantment,
9
+ IPromptParams,
10
+ IPromptHyperParameters,
11
+ IRWSPromptRequestExecutor,
12
+ IRWSPromptStreamExecutor,
13
+ IRWSSinglePromptRequestExecutor,
14
+ IRWSPromptJSON,
15
+ ChainStreamType,
16
+ ILLMChunk,
17
+ IAITool,
18
+ IRWSHistoryMessage,
19
+ InputType,
20
+ CompoundInput,
21
+ ToolHandler
22
+ } from '../../types/IPrompt';
23
+ import { IContextToken } from '../../types/IContextToken';
9
24
 
10
- interface IPromptHyperParameters {
11
- temperature: number,
12
- top_k?: number,
13
- top_p?: number,
14
- [key: string]: number
15
- }
16
-
17
- interface IRWSHistoryMessage {
18
- content: string, creator: string
19
- }
20
-
21
- interface ILLMChunk {
22
- content: string
23
- status: string
24
- }
25
-
26
- interface IPromptParams {
27
- hyperParameters?: IPromptHyperParameters;
28
- input?: string;
29
- modelId: string;
30
- modelType: string;
31
- }
32
-
33
- interface IPromptEnchantment {
34
- enhancementId: string,
35
- enhancementName: string,
36
- enhancementParams: any,
37
- input: string
38
- output: string
39
- }
40
-
41
- type IPromptSender = (prompt: RWSPrompt) => Promise<void>;
42
-
43
- interface IRWSPromptRequestExecutor {
44
- promptRequest: (prompt: RWSPrompt, contextToken?: IContextToken | null, intruderPrompt?: string | null, debugVars?: any) => Promise<RWSPrompt>
45
- }
46
-
47
-
48
- interface IRWSSinglePromptRequestExecutor {
49
- singlePromptRequest: (prompt: RWSPrompt, contextToken?: IContextToken | null, intruderPrompt?: string | null, ensureJson?: boolean, debugVars?: any) => Promise<RWSPrompt>
50
- }
51
-
52
-
53
- interface IRWSPromptStreamExecutor {
54
- promptStream: (prompt: RWSPrompt, read: (chunk: ILLMChunk) => void, end: () => void, debugVars?: any) => Promise<RWSPrompt>
55
- }
56
-
57
- interface IRWSPromptJSON {
58
- input: string;
59
- enhancedInput: IPromptEnchantment[];
60
- sentInput: string;
61
- originalInput: string;
62
- output: string;
63
- modelId: string;
64
- modelType: string;
65
- multiTemplate: PromptTemplate;
66
- convo: { id: string };
67
- hyperParameters: IPromptHyperParameters;
68
- created_at: string;
69
- var_storage: any;
25
+ type EntryParams = {
26
+ modelId: string,
27
+ body: string,
70
28
  }
71
29
 
72
- type ChainStreamType = AsyncGenerator<IterableReadableStream<ChainValues>>;
73
30
 
74
31
  class RWSPrompt {
75
32
  public _stream: ChainStreamType;
76
- private input: string = '';
33
+ private input: CompoundInput[] = [];
77
34
  private enhancedInput: IPromptEnchantment[];
78
- private sentInput: string = '';
79
- private originalInput: string = '';
35
+ private sentInput: CompoundInput[] = [];
36
+ private originalInput: CompoundInput[] = [];
80
37
  private output: string = '';
81
38
  private modelId: string;
82
39
  private modelType: string;
83
40
  private multiTemplate: PromptTemplate;
84
- private convo: ConvoLoader<any>;
41
+ private embedLoader: EmbedLoader<any>;
85
42
  private hyperParameters: IPromptHyperParameters;
86
43
  private created_at: Date;
87
-
44
+ private toolHandlers: Map<string, ToolHandler> = new Map();
88
45
  private varStorage: any = {};
89
46
 
90
47
  private onStream = (chunk: string) => {
@@ -123,7 +80,7 @@ class RWSPrompt {
123
80
  addEnchantment(enchantment: IPromptEnchantment): void
124
81
  {
125
82
  this.enhancedInput.push(enchantment);
126
- this.input = enchantment.input;
83
+ this.input.push(enchantment.input);
127
84
  }
128
85
 
129
86
  getEnchantedInput(): string | null
@@ -136,23 +93,23 @@ class RWSPrompt {
136
93
  return this.modelId;
137
94
  }
138
95
 
139
- readSentInput(): string
96
+ readSentInput(): CompoundInput[]
140
97
  {
141
98
  return this.sentInput;
142
99
  }
143
100
 
144
- readInput(): string
101
+ readInput(): CompoundInput[]
145
102
  {
146
103
  return this.input;
147
104
  }
148
105
 
149
106
 
150
- readBaseInput(): string
107
+ readBaseInput(): CompoundInput[]
151
108
  {
152
109
  return this.originalInput;
153
110
  }
154
111
 
155
- setBaseInput(input: string): RWSPrompt
112
+ setBaseInput(input: CompoundInput[]): RWSPrompt
156
113
  {
157
114
  this.originalInput = input;
158
115
 
@@ -214,16 +171,16 @@ class RWSPrompt {
214
171
  return this.multiTemplate;
215
172
  }
216
173
 
217
- setConvo(convo: ConvoLoader<BaseChatModel>): RWSPrompt
174
+ setEmbedLoader(embedLoader: EmbedLoader<BaseChatModel>): RWSPrompt
218
175
  {
219
- this.convo = convo.setPrompt(this);
176
+ this.embedLoader = embedLoader;
220
177
 
221
178
  return this;
222
179
  }
223
180
 
224
- getConvo<T extends BaseChatModel>(): ConvoLoader<T>
181
+ getEmbedLoader<T extends BaseChatModel>(): EmbedLoader<T>
225
182
  {
226
- return this.convo;
183
+ return this.embedLoader;
227
184
  }
228
185
 
229
186
  replacePromptVar(key: string, val: string)
@@ -236,28 +193,28 @@ class RWSPrompt {
236
193
  return [this.modelType, this.modelId];
237
194
  }
238
195
 
239
- async requestWith(executor: IRWSPromptRequestExecutor, intruderPrompt: string = null, debugVars: any = {}): Promise<void>
196
+ async requestWith(executor: IRWSPromptRequestExecutor, intruderPrompt: string = null, debugVars: any = {}, tools?: IAITool[]): Promise<void>
240
197
  {
241
198
  this.sentInput = this.input;
242
- const returnedRWS = await executor.promptRequest(this, null, intruderPrompt, debugVars);
199
+ const returnedRWS = await executor.promptRequest(this, { intruderPrompt, debugVars, tools });
243
200
  this.output = returnedRWS.readOutput();
244
201
  }
245
202
 
246
- async singleRequestWith(executor: IRWSSinglePromptRequestExecutor, intruderPrompt: string = null, ensureJson: boolean = false): Promise<void>
203
+ async singleRequestWith(executor: IRWSSinglePromptRequestExecutor, intruderPrompt: string = null, ensureJson: boolean = false, tools?: IAITool[]): Promise<void>
247
204
  {
248
- await executor.singlePromptRequest(this, null, intruderPrompt, ensureJson);
205
+ await executor.singlePromptRequest(this, { intruderPrompt, ensureJson, tools });
249
206
  this.sentInput = this.input;
250
207
  }
251
208
 
252
- async streamWith(executor: IRWSPromptStreamExecutor, read: (chunk: ILLMChunk) => void, end: () => void = () => {}, debugVars: any = {}): Promise<RWSPrompt>
209
+ async streamWith(executor: IRWSPromptStreamExecutor, read: (chunk: ILLMChunk) => void, end: () => void = () => {}, debugVars: any = {}, tools?: IAITool[]): Promise<RWSPrompt>
253
210
  {
254
211
  this.sentInput = this.input;
255
- return executor.promptStream(this, read, end, debugVars);
212
+ return executor.promptStream(this, read, end, { debugVars, tools });
256
213
  }
257
214
 
258
- setInput(content: string): RWSPrompt
215
+ addInput(content: CompoundInput): RWSPrompt
259
216
  {
260
- this.input = content;
217
+ this.input.push(content);
261
218
  return this;
262
219
  }
263
220
 
@@ -272,35 +229,6 @@ class RWSPrompt {
272
229
  return this;
273
230
  }
274
231
 
275
- async _oldreadStream(stream: Readable, react: (chunk: string) => void): Promise<void>
276
- {
277
- let first = true;
278
- const chunks: string[] = []; // Replace 'any' with the actual type of your chunks
279
-
280
- for await (const event of stream) {
281
- // Assuming 'event' has a specific structure. Adjust according to actual event structure.
282
- if ('chunk' in event && event.chunk.bytes) {
283
- const chunk = JSON.parse(Buffer.from(event.chunk.bytes).toString('utf-8'));
284
- if(first){
285
- console.log('chunk', chunk);
286
- first = false;
287
- }
288
-
289
- react(chunk.completion);
290
-
291
- chunks.push(chunk.completion || chunk.generation ); // Use the actual property of 'chunk' you're interested in
292
- } else if (
293
- 'internalServerException' in event ||
294
- 'modelStreamErrorException' in event ||
295
- 'throttlingException' in event ||
296
- 'validationException' in event
297
- ) {
298
- console.error(event);
299
- break;
300
- }
301
- }
302
- }
303
-
304
232
  private async isChainStreamType(source: any): Promise<boolean> {
305
233
  if (source && typeof source[Symbol.asyncIterator] === 'function') {
306
234
  const asyncIterator = source[Symbol.asyncIterator]();
@@ -319,7 +247,7 @@ class RWSPrompt {
319
247
  return false;
320
248
  }
321
249
 
322
- async readStreamAsText(readableStream: ReadableStream, callback: (txt: string) => void) {
250
+ async readStreamAsText(readableStream: ReadableStream, callback: (txt: string) => void) {
323
251
  const reader = readableStream.getReader();
324
252
 
325
253
  let readResult: any;
@@ -346,8 +274,35 @@ class RWSPrompt {
346
274
  if(callback){
347
275
  callback(messages, prompt);
348
276
  }else{
349
- this.input = prompt + this.input;
277
+ this.input = [{type: 'text', text: prompt}, ...this.input];
278
+ }
279
+ }
280
+
281
+ registerToolHandlers(toolHandlers: { [key: string]: ToolHandler }){
282
+ for(const key of Object.keys(toolHandlers)){
283
+ this.toolHandlers.set(key, toolHandlers[key]);
284
+ }
285
+ }
286
+
287
+ async callTools<T = unknown>(tools: IToolCall[]): Promise<T[]>
288
+ {
289
+ const results: T[] = [];
290
+ for(const tool of tools){
291
+ if(this.toolHandlers.has(tool.name)){
292
+ const result = await this.callAiTool<T>(tool);
293
+ if(result){
294
+ results.push(result);
295
+ }
296
+ }
350
297
  }
298
+
299
+ return results;
300
+ }
301
+
302
+ private async callAiTool<T>(tool: IToolCall): Promise<T>
303
+ {
304
+ const handler = this.toolHandlers.get(tool.name);
305
+ return await handler(tool.arguments);
351
306
  }
352
307
 
353
308
  toJSON(): IRWSPromptJSON
@@ -361,8 +316,8 @@ class RWSPrompt {
361
316
  modelId: this.modelId,
362
317
  modelType: this.modelType,
363
318
  multiTemplate: this.multiTemplate,
364
- convo: {
365
- id: this.convo.getId()
319
+ embed: {
320
+ id: this.embedLoader.getId()
366
321
  },
367
322
  hyperParameters: this.hyperParameters,
368
323
  var_storage: this.varStorage,
@@ -373,16 +328,4 @@ class RWSPrompt {
373
328
 
374
329
  export default RWSPrompt;
375
330
 
376
- export {
377
- IPromptSender,
378
- IPromptEnchantment,
379
- IPromptParams,
380
- IPromptHyperParameters,
381
- IRWSPromptRequestExecutor,
382
- IRWSPromptStreamExecutor,
383
- IRWSSinglePromptRequestExecutor,
384
- IRWSPromptJSON,
385
- IChainCallOutput,
386
- ChainStreamType,
387
- ILLMChunk
388
- };
331
+ export { IChainCallOutput };
@@ -0,0 +1,161 @@
1
+ import { PromptTemplate } from '@langchain/core/prompts';
2
+ import { IterableReadableStream } from '@langchain/core/utils/stream';
3
+ import { ChainValues } from '@langchain/core/utils/types';
4
+ import { IContextToken } from './IContextToken';
5
+ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
6
+
7
+ // General tool interfaces for AI models
8
+ interface IAIToolParameterBase {
9
+ type: string;
10
+ description?: string;
11
+ enum?: string[];
12
+ required?: boolean;
13
+ }
14
+
15
+ interface IAIToolParameterObject extends IAIToolParameterBase {
16
+ type: 'object';
17
+ properties: Record<string, IAIToolParameter>;
18
+ }
19
+
20
+ interface IAIToolParameterArray extends IAIToolParameterBase {
21
+ type: 'array';
22
+ items: IAIToolParameter;
23
+ }
24
+
25
+ interface IAIToolParameterPrimitive extends IAIToolParameterBase {
26
+ type: 'string' | 'number' | 'boolean' | 'integer';
27
+ }
28
+
29
+ type IAIToolParameter = IAIToolParameterObject | IAIToolParameterArray | IAIToolParameterPrimitive;
30
+
31
+ interface IAIToolSchema {
32
+ type: 'object';
33
+ properties: Record<string, IAIToolParameter>;
34
+ required?: string[];
35
+ }
36
+
37
+ interface IToolCall {
38
+ id: string;
39
+ name: string;
40
+ arguments: Record<string, any>;
41
+ }
42
+
43
+ interface IAITool {
44
+ name: string;
45
+ description: string;
46
+ input_schema: IAIToolSchema;
47
+ }
48
+
49
+ interface IPromptHyperParameters {
50
+ temperature: number,
51
+ top_k?: number,
52
+ top_p?: number,
53
+ [key: string]: number
54
+ }
55
+
56
+ interface IRWSHistoryMessage {
57
+ content: string, creator: string
58
+ }
59
+
60
+ interface ILLMChunk {
61
+ content: string
62
+ status: string
63
+ }
64
+
65
+ interface IPromptParams {
66
+ hyperParameters?: IPromptHyperParameters;
67
+ input: CompoundInput[];
68
+ modelId: string;
69
+ modelType: string;
70
+ }
71
+
72
+ type InputType = 'text' | 'image';
73
+
74
+ interface IPromptEnchantment {
75
+ enhancementId: string,
76
+ enhancementName: string,
77
+ enhancementParams: any,
78
+ input: CompoundInput
79
+ output: string
80
+ }
81
+
82
+ // Forward reference to RWSPrompt to avoid circular dependencies
83
+ type RWSPrompt = import('../models/prompts/_prompt').default;
84
+
85
+ type IPromptSender = (prompt: RWSPrompt) => Promise<void>;
86
+
87
+ interface IAIRequestOptions {
88
+ contextToken?: IContextToken | null,
89
+ intruderPrompt?: string | null,
90
+ ensureJson?: boolean,
91
+ debugVars?: any,
92
+ tools?: IAITool[]
93
+ }
94
+
95
+ interface IRWSPromptRequestExecutor {
96
+ promptRequest: (prompt: RWSPrompt, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
97
+ }
98
+
99
+ interface IRWSSinglePromptRequestExecutor {
100
+ singlePromptRequest: (prompt: RWSPrompt, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
101
+ }
102
+
103
+ interface IRWSPromptStreamExecutor {
104
+ promptStream: (prompt: RWSPrompt, read: (chunk: ILLMChunk) => void, end: () => void, aiOptions?: IAIRequestOptions) => Promise<RWSPrompt>
105
+ }
106
+
107
+ type ToolHandler<T = any> = (...args: any) => Promise<T>;
108
+
109
+ interface IRWSPromptJSON {
110
+ input: CompoundInput[];
111
+ enhancedInput: IPromptEnchantment[];
112
+ sentInput: CompoundInput[];
113
+ originalInput: CompoundInput[];
114
+ output: string;
115
+ modelId: string;
116
+ modelType: string;
117
+ multiTemplate: PromptTemplate;
118
+ embed?: { id: string };
119
+ hyperParameters: IPromptHyperParameters;
120
+ created_at: string;
121
+ var_storage: any;
122
+ }
123
+
124
+ type ChainStreamType = AsyncGenerator<IterableReadableStream<ChainValues>>;
125
+
126
+ interface CompoundInput {
127
+ type: InputType,
128
+ text?: string,
129
+ source?: {
130
+ type: string,
131
+ media_type: string,
132
+ data: string
133
+ }
134
+ }
135
+
136
+ export {
137
+ IPromptSender,
138
+ IPromptEnchantment,
139
+ IPromptParams,
140
+ IPromptHyperParameters,
141
+ IRWSPromptRequestExecutor,
142
+ IRWSPromptStreamExecutor,
143
+ IRWSSinglePromptRequestExecutor,
144
+ IRWSPromptJSON,
145
+ ChainStreamType,
146
+ ILLMChunk,
147
+ IAIRequestOptions,
148
+ IAITool,
149
+ IAIToolSchema,
150
+ IAIToolParameter,
151
+ IAIToolParameterBase,
152
+ IAIToolParameterObject,
153
+ IAIToolParameterArray,
154
+ IAIToolParameterPrimitive,
155
+ IRWSHistoryMessage,
156
+ InputType,
157
+ CompoundInput,
158
+ IToolCall,
159
+ ToolHandler
160
+ };
161
+