cactus-react-native 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +872 -146
- package/android/src/main/CMakeLists.txt +1 -1
- package/android/src/main/jniLibs/arm64-v8a/libcactus.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/libcactus_v8.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/libcactus_v8_2.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/libcactus_v8_2_dotprod.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/libcactus_v8_2_dotprod_i8mm.so +0 -0
- package/android/src/main/jniLibs/arm64-v8a/libcactus_v8_2_i8mm.so +0 -0
- package/android/src/main/jniLibs/x86_64/libcactus.so +0 -0
- package/android/src/main/jniLibs/x86_64/libcactus_x86_64.so +0 -0
- package/ios/CMakeLists.txt +6 -6
- package/ios/cactus.xcframework/ios-arm64/cactus.framework/Headers/cactus.h +12 -0
- package/ios/cactus.xcframework/ios-arm64/cactus.framework/cactus +0 -0
- package/ios/cactus.xcframework/ios-arm64_x86_64-simulator/cactus.framework/Headers/cactus.h +12 -0
- package/ios/cactus.xcframework/ios-arm64_x86_64-simulator/cactus.framework/cactus +0 -0
- package/ios/cactus.xcframework/tvos-arm64/cactus.framework/Headers/cactus.h +12 -0
- package/ios/cactus.xcframework/tvos-arm64/cactus.framework/cactus +0 -0
- package/ios/cactus.xcframework/tvos-arm64_x86_64-simulator/cactus.framework/Headers/cactus.h +12 -0
- package/ios/cactus.xcframework/tvos-arm64_x86_64-simulator/cactus.framework/cactus +0 -0
- package/lib/commonjs/index.js.map +1 -1
- package/lib/commonjs/lm.js.map +1 -0
- package/lib/commonjs/tts.js.map +1 -0
- package/lib/commonjs/vlm.js.map +0 -0
- package/lib/module/index.js.map +1 -1
- package/lib/module/lm.js.map +0 -0
- package/lib/module/tts.js.map +1 -0
- package/lib/module/vlm.js.map +1 -0
- package/lib/typescript/index.d.ts +5 -1
- package/lib/typescript/index.d.ts.map +1 -1
- package/lib/typescript/lm.d.ts +41 -0
- package/lib/typescript/lm.d.ts.map +1 -0
- package/lib/typescript/tts.d.ts +10 -0
- package/lib/typescript/tts.d.ts.map +1 -0
- package/lib/typescript/vlm.d.ts +44 -0
- package/lib/typescript/vlm.d.ts.map +1 -0
- package/package.json +2 -1
- package/src/index.ts +11 -1
- package/src/lm.ts +49 -0
- package/src/tts.ts +45 -0
- package/src/vlm.ts +70 -0
- package/lib/commonjs/NativeCactus.js +0 -10
- package/lib/commonjs/chat.js +0 -37
- package/lib/commonjs/grammar.js +0 -560
- package/lib/commonjs/index.js +0 -412
- package/lib/commonjs/tools.js +0 -118
- package/lib/commonjs/tools.js.map +0 -1
- package/lib/module/NativeCactus.js +0 -8
- package/lib/module/chat.js +0 -33
- package/lib/module/grammar.js +0 -553
- package/lib/module/index.js +0 -363
- package/lib/module/tools.js +0 -110
- package/lib/module/tools.js.map +0 -1
package/lib/module/index.js
DELETED
|
@@ -1,363 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
import { NativeEventEmitter, DeviceEventEmitter, Platform } from 'react-native';
|
|
4
|
-
import Cactus from './NativeCactus';
|
|
5
|
-
import { SchemaGrammarConverter, convertJsonSchemaToGrammar } from './grammar';
|
|
6
|
-
import { formatChat } from './chat';
|
|
7
|
-
import { Tools, injectToolsIntoMessages, parseAndExecuteTool, updateMessagesWithToolCall } from './tools';
|
|
8
|
-
export { SchemaGrammarConverter, convertJsonSchemaToGrammar, Tools };
|
|
9
|
-
const EVENT_ON_INIT_CONTEXT_PROGRESS = '@Cactus_onInitContextProgress';
|
|
10
|
-
const EVENT_ON_TOKEN = '@Cactus_onToken';
|
|
11
|
-
const EVENT_ON_NATIVE_LOG = '@Cactus_onNativeLog';
|
|
12
|
-
let EventEmitter;
|
|
13
|
-
if (Platform.OS === 'ios') {
|
|
14
|
-
// @ts-ignore
|
|
15
|
-
EventEmitter = new NativeEventEmitter(Cactus);
|
|
16
|
-
}
|
|
17
|
-
if (Platform.OS === 'android') {
|
|
18
|
-
EventEmitter = DeviceEventEmitter;
|
|
19
|
-
}
|
|
20
|
-
const logListeners = [];
|
|
21
|
-
|
|
22
|
-
// @ts-ignore
|
|
23
|
-
if (EventEmitter) {
|
|
24
|
-
EventEmitter.addListener(EVENT_ON_NATIVE_LOG, evt => {
|
|
25
|
-
logListeners.forEach(listener => listener(evt.level, evt.text));
|
|
26
|
-
});
|
|
27
|
-
// Trigger unset to use default log callback
|
|
28
|
-
Cactus?.toggleNativeLog?.(false)?.catch?.(() => {});
|
|
29
|
-
}
|
|
30
|
-
const getJsonSchema = responseFormat => {
|
|
31
|
-
if (responseFormat?.type === 'json_schema') {
|
|
32
|
-
return responseFormat.json_schema?.schema;
|
|
33
|
-
}
|
|
34
|
-
if (responseFormat?.type === 'json_object') {
|
|
35
|
-
return responseFormat.schema || {};
|
|
36
|
-
}
|
|
37
|
-
return null;
|
|
38
|
-
};
|
|
39
|
-
export class LlamaContext {
|
|
40
|
-
gpu = false;
|
|
41
|
-
reasonNoGPU = '';
|
|
42
|
-
constructor({
|
|
43
|
-
contextId,
|
|
44
|
-
gpu,
|
|
45
|
-
reasonNoGPU,
|
|
46
|
-
model
|
|
47
|
-
}) {
|
|
48
|
-
this.id = contextId;
|
|
49
|
-
this.gpu = gpu;
|
|
50
|
-
this.reasonNoGPU = reasonNoGPU;
|
|
51
|
-
this.model = model;
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/**
|
|
55
|
-
* Load cached prompt & completion state from a file.
|
|
56
|
-
*/
|
|
57
|
-
async loadSession(filepath) {
|
|
58
|
-
let path = filepath;
|
|
59
|
-
if (path.startsWith('file://')) path = path.slice(7);
|
|
60
|
-
return Cactus.loadSession(this.id, path);
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
/**
|
|
64
|
-
* Save current cached prompt & completion state to a file.
|
|
65
|
-
*/
|
|
66
|
-
async saveSession(filepath, options) {
|
|
67
|
-
return Cactus.saveSession(this.id, filepath, options?.tokenSize || -1);
|
|
68
|
-
}
|
|
69
|
-
isLlamaChatSupported() {
|
|
70
|
-
return !!this.model.chatTemplates.llamaChat;
|
|
71
|
-
}
|
|
72
|
-
isJinjaSupported() {
|
|
73
|
-
const {
|
|
74
|
-
minja
|
|
75
|
-
} = this.model.chatTemplates;
|
|
76
|
-
return !!minja?.toolUse || !!minja?.default;
|
|
77
|
-
}
|
|
78
|
-
async getFormattedChat(messages, template, params) {
|
|
79
|
-
const chat = formatChat(messages);
|
|
80
|
-
const useJinja = this.isJinjaSupported() && params?.jinja;
|
|
81
|
-
let tmpl = this.isLlamaChatSupported() || useJinja ? undefined : 'chatml';
|
|
82
|
-
if (template) tmpl = template; // Force replace if provided
|
|
83
|
-
const jsonSchema = getJsonSchema(params?.response_format);
|
|
84
|
-
return Cactus.getFormattedChat(this.id, JSON.stringify(chat), tmpl, {
|
|
85
|
-
jinja: useJinja,
|
|
86
|
-
json_schema: jsonSchema ? JSON.stringify(jsonSchema) : undefined,
|
|
87
|
-
tools: params?.tools ? JSON.stringify(params.tools) : undefined,
|
|
88
|
-
parallel_tool_calls: params?.parallel_tool_calls ? JSON.stringify(params.parallel_tool_calls) : undefined,
|
|
89
|
-
tool_choice: params?.tool_choice
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
async completionWithTools(params, callback, recursionCount = 0, recursionLimit = 3) {
|
|
93
|
-
if (!params.messages) {
|
|
94
|
-
// tool calling only works with messages
|
|
95
|
-
return this.completion(params, callback);
|
|
96
|
-
}
|
|
97
|
-
if (!params.tools) {
|
|
98
|
-
// no tools => default completion
|
|
99
|
-
return this.completion(params, callback);
|
|
100
|
-
}
|
|
101
|
-
if (recursionCount >= recursionLimit) {
|
|
102
|
-
return this.completion(params, callback);
|
|
103
|
-
}
|
|
104
|
-
let messages = [...(params?.messages || [])]; // avoid mutating the original
|
|
105
|
-
|
|
106
|
-
if (recursionCount === 0) {
|
|
107
|
-
messages = injectToolsIntoMessages(messages, params.tools);
|
|
108
|
-
}
|
|
109
|
-
const result = await this.completion({
|
|
110
|
-
...params,
|
|
111
|
-
messages
|
|
112
|
-
}, callback);
|
|
113
|
-
const {
|
|
114
|
-
toolCalled,
|
|
115
|
-
toolName,
|
|
116
|
-
toolInput,
|
|
117
|
-
toolOutput
|
|
118
|
-
} = await parseAndExecuteTool(result, params.tools);
|
|
119
|
-
if (toolCalled && toolName && toolInput) {
|
|
120
|
-
const newMessages = updateMessagesWithToolCall(messages, toolName, toolInput, toolOutput);
|
|
121
|
-
return await this.completionWithTools({
|
|
122
|
-
...params,
|
|
123
|
-
messages: newMessages
|
|
124
|
-
}, callback, recursionCount + 1, recursionLimit);
|
|
125
|
-
}
|
|
126
|
-
return result;
|
|
127
|
-
}
|
|
128
|
-
async completion(params, callback) {
|
|
129
|
-
const nativeParams = {
|
|
130
|
-
...params,
|
|
131
|
-
prompt: params.prompt || '',
|
|
132
|
-
emit_partial_completion: !!callback
|
|
133
|
-
};
|
|
134
|
-
if (params.messages) {
|
|
135
|
-
// messages always win
|
|
136
|
-
const formattedResult = await this.getFormattedChat(params.messages, params.chat_template || params.chatTemplate, {
|
|
137
|
-
jinja: params.jinja,
|
|
138
|
-
tools: params.tools,
|
|
139
|
-
parallel_tool_calls: params.parallel_tool_calls,
|
|
140
|
-
tool_choice: params.tool_choice
|
|
141
|
-
});
|
|
142
|
-
if (typeof formattedResult === 'string') {
|
|
143
|
-
nativeParams.prompt = formattedResult || '';
|
|
144
|
-
} else {
|
|
145
|
-
nativeParams.prompt = formattedResult.prompt || '';
|
|
146
|
-
if (typeof formattedResult.chat_format === 'number') nativeParams.chat_format = formattedResult.chat_format;
|
|
147
|
-
if (formattedResult.grammar) nativeParams.grammar = formattedResult.grammar;
|
|
148
|
-
if (typeof formattedResult.grammar_lazy === 'boolean') nativeParams.grammar_lazy = formattedResult.grammar_lazy;
|
|
149
|
-
if (formattedResult.grammar_triggers) nativeParams.grammar_triggers = formattedResult.grammar_triggers;
|
|
150
|
-
if (formattedResult.preserved_tokens) nativeParams.preserved_tokens = formattedResult.preserved_tokens;
|
|
151
|
-
if (formattedResult.additional_stops) {
|
|
152
|
-
if (!nativeParams.stop) nativeParams.stop = [];
|
|
153
|
-
nativeParams.stop.push(...formattedResult.additional_stops);
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
} else {
|
|
157
|
-
nativeParams.prompt = params.prompt || '';
|
|
158
|
-
}
|
|
159
|
-
if (nativeParams.response_format && !nativeParams.grammar) {
|
|
160
|
-
const jsonSchema = getJsonSchema(params.response_format);
|
|
161
|
-
if (jsonSchema) nativeParams.json_schema = JSON.stringify(jsonSchema);
|
|
162
|
-
}
|
|
163
|
-
let tokenListener = callback && EventEmitter.addListener(EVENT_ON_TOKEN, evt => {
|
|
164
|
-
const {
|
|
165
|
-
contextId,
|
|
166
|
-
tokenResult
|
|
167
|
-
} = evt;
|
|
168
|
-
if (contextId !== this.id) return;
|
|
169
|
-
callback(tokenResult);
|
|
170
|
-
});
|
|
171
|
-
if (!nativeParams.prompt) throw new Error('Prompt is required');
|
|
172
|
-
const promise = Cactus.completion(this.id, nativeParams);
|
|
173
|
-
return promise.then(completionResult => {
|
|
174
|
-
tokenListener?.remove();
|
|
175
|
-
tokenListener = null;
|
|
176
|
-
return completionResult;
|
|
177
|
-
}).catch(err => {
|
|
178
|
-
tokenListener?.remove();
|
|
179
|
-
tokenListener = null;
|
|
180
|
-
throw err;
|
|
181
|
-
});
|
|
182
|
-
}
|
|
183
|
-
stopCompletion() {
|
|
184
|
-
return Cactus.stopCompletion(this.id);
|
|
185
|
-
}
|
|
186
|
-
tokenize(text) {
|
|
187
|
-
return Cactus.tokenize(this.id, text);
|
|
188
|
-
}
|
|
189
|
-
detokenize(tokens) {
|
|
190
|
-
return Cactus.detokenize(this.id, tokens);
|
|
191
|
-
}
|
|
192
|
-
embedding(text, params) {
|
|
193
|
-
return Cactus.embedding(this.id, text, params || {});
|
|
194
|
-
}
|
|
195
|
-
async bench(pp, tg, pl, nr) {
|
|
196
|
-
const result = await Cactus.bench(this.id, pp, tg, pl, nr);
|
|
197
|
-
const [modelDesc, modelSize, modelNParams, ppAvg, ppStd, tgAvg, tgStd] = JSON.parse(result);
|
|
198
|
-
return {
|
|
199
|
-
modelDesc,
|
|
200
|
-
modelSize,
|
|
201
|
-
modelNParams,
|
|
202
|
-
ppAvg,
|
|
203
|
-
ppStd,
|
|
204
|
-
tgAvg,
|
|
205
|
-
tgStd
|
|
206
|
-
};
|
|
207
|
-
}
|
|
208
|
-
async applyLoraAdapters(loraList) {
|
|
209
|
-
let loraAdapters = [];
|
|
210
|
-
if (loraList) loraAdapters = loraList.map(l => ({
|
|
211
|
-
path: l.path.replace(/file:\/\//, ''),
|
|
212
|
-
scaled: l.scaled
|
|
213
|
-
}));
|
|
214
|
-
return Cactus.applyLoraAdapters(this.id, loraAdapters);
|
|
215
|
-
}
|
|
216
|
-
async removeLoraAdapters() {
|
|
217
|
-
return Cactus.removeLoraAdapters(this.id);
|
|
218
|
-
}
|
|
219
|
-
async getLoadedLoraAdapters() {
|
|
220
|
-
return Cactus.getLoadedLoraAdapters(this.id);
|
|
221
|
-
}
|
|
222
|
-
async release() {
|
|
223
|
-
return Cactus.releaseContext(this.id);
|
|
224
|
-
}
|
|
225
|
-
}
|
|
226
|
-
export async function toggleNativeLog(enabled) {
|
|
227
|
-
return Cactus.toggleNativeLog(enabled);
|
|
228
|
-
}
|
|
229
|
-
export function addNativeLogListener(listener) {
|
|
230
|
-
logListeners.push(listener);
|
|
231
|
-
return {
|
|
232
|
-
remove: () => {
|
|
233
|
-
logListeners.splice(logListeners.indexOf(listener), 1);
|
|
234
|
-
}
|
|
235
|
-
};
|
|
236
|
-
}
|
|
237
|
-
export async function setContextLimit(limit) {
|
|
238
|
-
return Cactus.setContextLimit(limit);
|
|
239
|
-
}
|
|
240
|
-
let contextIdCounter = 0;
|
|
241
|
-
const contextIdRandom = () => process.env.NODE_ENV === 'test' ? 0 : Math.floor(Math.random() * 100000);
|
|
242
|
-
const modelInfoSkip = [
|
|
243
|
-
// Large fields
|
|
244
|
-
'tokenizer.ggml.tokens', 'tokenizer.ggml.token_type', 'tokenizer.ggml.merges', 'tokenizer.ggml.scores'];
|
|
245
|
-
export async function loadLlamaModelInfo(model) {
|
|
246
|
-
let path = model;
|
|
247
|
-
if (path.startsWith('file://')) path = path.slice(7);
|
|
248
|
-
return Cactus.modelInfo(path, modelInfoSkip);
|
|
249
|
-
}
|
|
250
|
-
const poolTypeMap = {
|
|
251
|
-
// -1 is unspecified as undefined
|
|
252
|
-
none: 0,
|
|
253
|
-
mean: 1,
|
|
254
|
-
cls: 2,
|
|
255
|
-
last: 3,
|
|
256
|
-
rank: 4
|
|
257
|
-
};
|
|
258
|
-
export async function initLlama({
|
|
259
|
-
model,
|
|
260
|
-
is_model_asset: isModelAsset,
|
|
261
|
-
pooling_type: poolingType,
|
|
262
|
-
lora,
|
|
263
|
-
lora_list: loraList,
|
|
264
|
-
...rest
|
|
265
|
-
}, onProgress) {
|
|
266
|
-
let path = model;
|
|
267
|
-
if (path.startsWith('file://')) path = path.slice(7);
|
|
268
|
-
let loraPath = lora;
|
|
269
|
-
if (loraPath?.startsWith('file://')) loraPath = loraPath.slice(7);
|
|
270
|
-
let loraAdapters = [];
|
|
271
|
-
if (loraList) loraAdapters = loraList.map(l => ({
|
|
272
|
-
path: l.path.replace(/file:\/\//, ''),
|
|
273
|
-
scaled: l.scaled
|
|
274
|
-
}));
|
|
275
|
-
const contextId = contextIdCounter + contextIdRandom();
|
|
276
|
-
contextIdCounter += 1;
|
|
277
|
-
let removeProgressListener = null;
|
|
278
|
-
if (onProgress) {
|
|
279
|
-
removeProgressListener = EventEmitter.addListener(EVENT_ON_INIT_CONTEXT_PROGRESS, evt => {
|
|
280
|
-
if (evt.contextId !== contextId) return;
|
|
281
|
-
onProgress(evt.progress);
|
|
282
|
-
});
|
|
283
|
-
}
|
|
284
|
-
const poolType = poolTypeMap[poolingType];
|
|
285
|
-
const {
|
|
286
|
-
gpu,
|
|
287
|
-
reasonNoGPU,
|
|
288
|
-
model: modelDetails,
|
|
289
|
-
androidLib
|
|
290
|
-
} = await Cactus.initContext(contextId, {
|
|
291
|
-
model: path,
|
|
292
|
-
is_model_asset: !!isModelAsset,
|
|
293
|
-
use_progress_callback: !!onProgress,
|
|
294
|
-
pooling_type: poolType,
|
|
295
|
-
lora: loraPath,
|
|
296
|
-
lora_list: loraAdapters,
|
|
297
|
-
...rest
|
|
298
|
-
}).catch(err => {
|
|
299
|
-
removeProgressListener?.remove();
|
|
300
|
-
throw err;
|
|
301
|
-
});
|
|
302
|
-
removeProgressListener?.remove();
|
|
303
|
-
return new LlamaContext({
|
|
304
|
-
contextId,
|
|
305
|
-
gpu,
|
|
306
|
-
reasonNoGPU,
|
|
307
|
-
model: modelDetails,
|
|
308
|
-
androidLib
|
|
309
|
-
});
|
|
310
|
-
}
|
|
311
|
-
export async function releaseAllLlama() {
|
|
312
|
-
return Cactus.releaseAllContexts();
|
|
313
|
-
}
|
|
314
|
-
export const initContext = async params => {
|
|
315
|
-
return await Cactus.initContext(contextIdCounter++, params);
|
|
316
|
-
};
|
|
317
|
-
export const initMultimodal = async (contextId, mmprojPath, useGpu = false) => {
|
|
318
|
-
return await Cactus.initMultimodal(contextId, mmprojPath, useGpu);
|
|
319
|
-
};
|
|
320
|
-
export const isMultimodalEnabled = async contextId => {
|
|
321
|
-
return await Cactus.isMultimodalEnabled(contextId);
|
|
322
|
-
};
|
|
323
|
-
export const isMultimodalSupportVision = async contextId => {
|
|
324
|
-
return await Cactus.isMultimodalSupportVision(contextId);
|
|
325
|
-
};
|
|
326
|
-
export const isMultimodalSupportAudio = async contextId => {
|
|
327
|
-
return await Cactus.isMultimodalSupportAudio(contextId);
|
|
328
|
-
};
|
|
329
|
-
export const releaseMultimodal = async contextId => {
|
|
330
|
-
return await Cactus.releaseMultimodal(contextId);
|
|
331
|
-
};
|
|
332
|
-
export const multimodalCompletion = async (contextId, prompt, mediaPaths, params) => {
|
|
333
|
-
return await Cactus.multimodalCompletion(contextId, prompt, mediaPaths, params);
|
|
334
|
-
};
|
|
335
|
-
export const initVocoder = async (contextId, vocoderModelPath) => {
|
|
336
|
-
return await Cactus.initVocoder(contextId, vocoderModelPath);
|
|
337
|
-
};
|
|
338
|
-
export const isVocoderEnabled = async contextId => {
|
|
339
|
-
return await Cactus.isVocoderEnabled(contextId);
|
|
340
|
-
};
|
|
341
|
-
export const getTTSType = async contextId => {
|
|
342
|
-
return await Cactus.getTTSType(contextId);
|
|
343
|
-
};
|
|
344
|
-
export const getFormattedAudioCompletion = async (contextId, speakerJsonStr, textToSpeak) => {
|
|
345
|
-
return await Cactus.getFormattedAudioCompletion(contextId, speakerJsonStr, textToSpeak);
|
|
346
|
-
};
|
|
347
|
-
export const getAudioCompletionGuideTokens = async (contextId, textToSpeak) => {
|
|
348
|
-
return await Cactus.getAudioCompletionGuideTokens(contextId, textToSpeak);
|
|
349
|
-
};
|
|
350
|
-
export const decodeAudioTokens = async (contextId, tokens) => {
|
|
351
|
-
return await Cactus.decodeAudioTokens(contextId, tokens);
|
|
352
|
-
};
|
|
353
|
-
export const releaseVocoder = async contextId => {
|
|
354
|
-
return await Cactus.releaseVocoder(contextId);
|
|
355
|
-
};
|
|
356
|
-
export const tokenize = async (contextId, text, mediaPaths) => {
|
|
357
|
-
if (mediaPaths && mediaPaths.length > 0) {
|
|
358
|
-
return await Cactus.tokenize(contextId, text, mediaPaths);
|
|
359
|
-
} else {
|
|
360
|
-
return await Cactus.tokenize(contextId, text);
|
|
361
|
-
}
|
|
362
|
-
};
|
|
363
|
-
//# sourceMappingURL=index.js.map
|
package/lib/module/tools.js
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
export class Tools {
|
|
4
|
-
tools = new Map();
|
|
5
|
-
add(func, description, parameters) {
|
|
6
|
-
this.tools.set(func.name, {
|
|
7
|
-
func,
|
|
8
|
-
description,
|
|
9
|
-
parameters,
|
|
10
|
-
required: Object.entries(parameters).filter(([_, param]) => param.required).map(([key, _]) => key)
|
|
11
|
-
});
|
|
12
|
-
return func;
|
|
13
|
-
}
|
|
14
|
-
getSchemas() {
|
|
15
|
-
return Array.from(this.tools.entries()).map(([name, {
|
|
16
|
-
description,
|
|
17
|
-
parameters,
|
|
18
|
-
required
|
|
19
|
-
}]) => ({
|
|
20
|
-
type: "function",
|
|
21
|
-
function: {
|
|
22
|
-
name,
|
|
23
|
-
description,
|
|
24
|
-
parameters: {
|
|
25
|
-
type: "object",
|
|
26
|
-
properties: parameters,
|
|
27
|
-
required
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
}));
|
|
31
|
-
}
|
|
32
|
-
async execute(name, args) {
|
|
33
|
-
const tool = this.tools.get(name);
|
|
34
|
-
if (!tool) throw new Error(`Tool ${name} not found`);
|
|
35
|
-
return await tool.func(...Object.values(args));
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
export function injectToolsIntoMessages(messages, tools) {
|
|
39
|
-
const newMessages = [...messages];
|
|
40
|
-
const toolsSchemas = tools.getSchemas();
|
|
41
|
-
const promptToolInjection = `You have access to the following functions. Use them if required -
|
|
42
|
-
${JSON.stringify(toolsSchemas, null, 2)}
|
|
43
|
-
Only use an available tool if needed. If a tool is chosen, respond ONLY with a JSON object matching the following schema:
|
|
44
|
-
\`\`\`json
|
|
45
|
-
{
|
|
46
|
-
"tool_name": "<name of the tool>",
|
|
47
|
-
"tool_input": {
|
|
48
|
-
"<parameter_name>": "<parameter_value>",
|
|
49
|
-
...
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
\`\`\`
|
|
53
|
-
Remember, if you are calling a tool, you must respond with the JSON object and the JSON object ONLY!
|
|
54
|
-
If no tool is needed, respond normally.
|
|
55
|
-
`;
|
|
56
|
-
const systemMessage = newMessages.find(m => m.role === 'system');
|
|
57
|
-
if (!systemMessage) {
|
|
58
|
-
newMessages.unshift({
|
|
59
|
-
role: 'system',
|
|
60
|
-
content: promptToolInjection
|
|
61
|
-
});
|
|
62
|
-
} else {
|
|
63
|
-
systemMessage.content = `${systemMessage.content}\n\n${promptToolInjection}`;
|
|
64
|
-
}
|
|
65
|
-
return newMessages;
|
|
66
|
-
}
|
|
67
|
-
export async function parseAndExecuteTool(result, tools) {
|
|
68
|
-
const match = result.content.match(/```json\s*([\s\S]*?)\s*```/);
|
|
69
|
-
if (!match || !match[1]) return {
|
|
70
|
-
toolCalled: false
|
|
71
|
-
};
|
|
72
|
-
try {
|
|
73
|
-
const jsonContent = JSON.parse(match[1]);
|
|
74
|
-
const {
|
|
75
|
-
tool_name,
|
|
76
|
-
tool_input
|
|
77
|
-
} = jsonContent;
|
|
78
|
-
// console.log('Calling tool:', tool_name, tool_input);
|
|
79
|
-
const toolOutput = (await tools.execute(tool_name, tool_input)) || true;
|
|
80
|
-
// console.log('Tool called result:', toolOutput);
|
|
81
|
-
|
|
82
|
-
return {
|
|
83
|
-
toolCalled: true,
|
|
84
|
-
toolName: tool_name,
|
|
85
|
-
toolInput: tool_input,
|
|
86
|
-
toolOutput
|
|
87
|
-
};
|
|
88
|
-
} catch (error) {
|
|
89
|
-
// console.error('Error parsing JSON:', match, error);
|
|
90
|
-
return {
|
|
91
|
-
toolCalled: false
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
export function updateMessagesWithToolCall(messages, toolName, toolInput, toolOutput) {
|
|
96
|
-
const newMessages = [...messages];
|
|
97
|
-
newMessages.push({
|
|
98
|
-
role: 'function-call',
|
|
99
|
-
content: JSON.stringify({
|
|
100
|
-
name: toolName,
|
|
101
|
-
arguments: toolInput
|
|
102
|
-
}, null, 2)
|
|
103
|
-
});
|
|
104
|
-
newMessages.push({
|
|
105
|
-
role: 'function-response',
|
|
106
|
-
content: JSON.stringify(toolOutput, null, 2)
|
|
107
|
-
});
|
|
108
|
-
return newMessages;
|
|
109
|
-
}
|
|
110
|
-
//# sourceMappingURL=tools.js.map
|
package/lib/module/tools.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"names":["Tools","tools","Map","add","func","description","parameters","set","name","required","Object","entries","filter","_","param","map","key","getSchemas","Array","from","type","function","properties","execute","args","tool","get","Error","values","injectToolsIntoMessages","messages","newMessages","toolsSchemas","promptToolInjection","JSON","stringify","systemMessage","find","m","role","unshift","content","parseAndExecuteTool","result","match","toolCalled","jsonContent","parse","tool_name","tool_input","toolOutput","toolName","toolInput","error","updateMessagesWithToolCall","push","arguments"],"sourceRoot":"../../src","sources":["tools.ts"],"mappings":";;AAgBA,OAAO,MAAMA,KAAK,CAAC;EACTC,KAAK,GAAG,IAAIC,GAAG,CAAe,CAAC;EAEvCC,GAAGA,CACCC,IAAc,EACdC,WAAmB,EACnBC,UAAsC,EACtC;IACA,IAAI,CAACL,KAAK,CAACM,GAAG,CAACH,IAAI,CAACI,IAAI,EAAE;MACxBJ,IAAI;MACJC,WAAW;MACXC,UAAU;MACVG,QAAQ,EAAEC,MAAM,CAACC,OAAO,CAACL,UAAU,CAAC,CACjCM,MAAM,CAAC,CAAC,CAACC,CAAC,EAAEC,KAAK,CAAC,KAAKA,KAAK,CAACL,QAAQ,CAAC,CACtCM,GAAG,CAAC,CAAC,CAACC,GAAG,EAAEH,CAAC,CAAC,KAAKG,GAAG;IAC1B,CAAC,CAAC;IACF,OAAOZ,IAAI;EACb;EAEFa,UAAUA,CAAA,EAAG;IACT,OAAOC,KAAK,CAACC,IAAI,CAAC,IAAI,CAAClB,KAAK,CAACU,OAAO,CAAC,CAAC,CAAC,CAACI,GAAG,CAAC,CAAC,CAACP,IAAI,EAAE;MAAEH,WAAW;MAAEC,UAAU;MAAEG;IAAS,CAAC,CAAC,MAAM;MAC9FW,IAAI,EAAE,UAAU;MAChBC,QAAQ,EAAE;QACRb,IAAI;QACJH,WAAW;QACXC,UAAU,EAAE;UACVc,IAAI,EAAE,QAAQ;UACdE,UAAU,EAAEhB,UAAU;UACtBG;QACF;MACF;IACF,CAAC,CAAC,CAAC;EACL;EAEF,MAAMc,OAAOA,CAACf,IAAY,EAAEgB,IAAS,EAAE;IACnC,MAAMC,IAAI,GAAG,IAAI,CAACxB,KAAK,CAACyB,GAAG,CAAClB,IAAI,CAAC;IACjC,IAAI,CAACiB,IAAI,EAAE,MAAM,IAAIE,KAAK,CAAC,QAAQnB,IAAI,YAAY,CAAC;IACpD,OAAO,MAAMiB,IAAI,CAACrB,IAAI,CAAC,GAAGM,MAAM,CAACkB,MAAM,CAACJ,IAAI,CAAC,CAAC;EAClD;AACF;AAEA,OAAO,SAASK,uBAAuBA,CAACC,QAAsC,EAAE7B,KAAY,EAAgC;EAC1H,MAAM8B,WAAW,GAAG,CAAC,GAAGD,QAAQ,CAAC;EACjC,MAAME,YAAY,GAAG/B,KAAK,CAACgB,UAAU,CAAC,CAAC;EACvC,MAAMgB,mBAAmB,GAAG;AAC9B,EAAEC,IAAI,CAACC,SAAS,CAACH,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;AACvC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;EAED,MAAMI,aAAa,GAAGL,WAAW,CAACM,IAAI,CAACC,CAAC,IAAIA,CAAC,CAACC,IAAI,KAAK,QAAQ,CAAC;EAChE,IAAI,CAACH,aAAa,EAAE;IAChBL,WAAW,CAACS,OAAO,CAAC;MAChBD,IAAI,EAAE,QAAQ;MACdE,OAAO,EAAER;IACb,CAAC,CAAC;EACN,CAAC,MAAM;IACHG,aAAa,CAACK,OAAO,GAAG,GAAGL,aAAa,CAACK,OAAO,OAAOR,mBAAmB,EAAE;EAChF;EAEA,OAAOF,WAAW;AACpB;AAEA,OAAO,eAAeW,mBAAmBA,CAACC,MAA8B,EAAE1C,KAAY,EAAwF;EAC5K,MAAM2C,KAAK,GAAGD,MAAM,CAACF,OAAO,CAACG,KAAK,CAAC,4BAA4B,CAAC;EAEhE,IAAI,CAACA,KAAK,IAAI,CAACA,KAAK,CAAC,CAAC,CAAC,EAAE,OAAO;IAACC,UAAU,EAAE;EAAK,CAAC;EAEnD,IAAI;IACA,MAAMC,WAAW,GAAGZ,IAAI,CAACa,KAAK,CAACH,KAAK,CAAC,CAAC,CAAC,CAAC;IACxC,MAAM;MAAEI,SAAS;MAAEC;IAAW,CAAC,GAAGH,WAAW;IAC7C;IACA,MAAMI,UAAU,GAAG,OAAMjD,KAAK,CAACsB,OAAO,CAACyB,SAAS,EAAEC,UAAU,CAAC,KAAI,IAAI;IACrE;;IAEA,OAAO;MACHJ,UAAU,EAAE,IAAI;MAChBM,QAAQ,EAAEH,SAAS;MACnBI,SAAS,EAAEH,UAAU;MACrBC;IACJ,CAAC;EACL,CAAC,CAAC,OAAOG,KAAK,EAAE;IACZ;IACA,OAAO;MAACR,UAAU,EAAE;IAAK,CAAC;EAC9B;AACF;AAEA,OAAO,SAASS,0BAA0BA,CAACxB,QAAsC,EAAEqB,QAAgB,EAAEC,SAAc,EAAEF,UAAe,EAAgC;EAClK,MAAMnB,WAAW,GAAG,CAAC,GAAGD,QAAQ,CAAC;EAEjCC,WAAW,CAACwB,IAAI,CAAC;IACbhB,IAAI,EAAE,eAAe;IACrBE,OAAO,EAAEP,IAAI,CAACC,SAAS,CAAC;MAAC3B,IAAI,EAAE2C,QAAQ;MAAEK,SAAS,EAAEJ;IAAS,CAAC,EAAE,IAAI,EAAE,CAAC;EAC3E,CAAC,CAAC;EACFrB,WAAW,CAACwB,IAAI,CAAC;IACbhB,IAAI,EAAE,mBAAmB;IACzBE,OAAO,EAAEP,IAAI,CAACC,SAAS,CAACe,UAAU,EAAE,IAAI,EAAE,CAAC;EAC/C,CAAC,CAAC;EAEF,OAAOnB,WAAW;AACpB","ignoreList":[]}
|