@kernel.chat/kbot 2.16.0 → 2.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,308 @@
1
+ // K:BOT Embedded Inference Engine
2
+ // Runs GGUF models directly via node-llama-cpp — no Ollama, no external service needed.
3
+ // GPU-accelerated on Mac (Metal), Linux (CUDA/Vulkan), Windows (CUDA/Vulkan).
4
+ //
5
+ // node-llama-cpp is an OPTIONAL dependency — kbot works fine without it.
6
+ // All imports are dynamic to avoid compile errors when it's not installed.
7
+ import { homedir } from 'node:os';
8
+ import { join, basename } from 'node:path';
9
+ import { existsSync, mkdirSync, readdirSync, statSync, unlinkSync } from 'node:fs';
10
+ const MODELS_DIR = join(homedir(), '.kbot', 'models');
11
+ // ── Dynamic import helper (avoids TS errors for optional dep) ──
12
+ // Use a variable to prevent TypeScript from resolving the optional module at compile time
13
+ const LLAMA_MODULE = 'node-llama-cpp';
14
+ async function importLlama() {
15
+ try {
16
+ return await import(LLAMA_MODULE);
17
+ }
18
+ catch {
19
+ throw new Error('node-llama-cpp is not installed. Install it with:\n' +
20
+ ' npm install -g node-llama-cpp\n' +
21
+ 'Or use Ollama instead: kbot local');
22
+ }
23
+ }
24
+ // ── Default models for auto-download ──
25
+ export const DEFAULT_MODELS = {
26
+ 'llama3.1-8b': {
27
+ hf: 'hf:mradermacher/Meta-Llama-3.1-8B-Instruct-GGUF:Q4_K_M',
28
+ description: 'General-purpose, great balance of speed and quality',
29
+ size: '~4.9 GB',
30
+ },
31
+ 'qwen2.5-coder-7b': {
32
+ hf: 'hf:Qwen/Qwen2.5-Coder-7B-Instruct-GGUF:qwen2.5-coder-7b-instruct-q4_k_m.gguf',
33
+ description: 'Code-specialized, excellent for programming tasks',
34
+ size: '~4.7 GB',
35
+ },
36
+ 'deepseek-r1-8b': {
37
+ hf: 'hf:mradermacher/DeepSeek-R1-Distill-Qwen-7B-GGUF:Q4_K_M',
38
+ description: 'Reasoning-specialized, chain-of-thought capable',
39
+ size: '~4.7 GB',
40
+ },
41
+ 'gemma3-4b': {
42
+ hf: 'hf:google/gemma-3-4b-it-qat-q4_0-gguf:gemma-3-4b-it-q4_0.gguf',
43
+ description: 'Lightweight and fast, good for quick tasks',
44
+ size: '~2.5 GB',
45
+ },
46
+ 'phi4-14b': {
47
+ hf: 'hf:mradermacher/phi-4-GGUF:Q4_K_M',
48
+ description: 'Microsoft Phi-4, strong reasoning for its size',
49
+ size: '~8.4 GB',
50
+ },
51
+ };
52
+ // ── Lazy-loaded engine state ──
53
+ let _llama = null;
54
+ let _model = null;
55
+ let _context = null;
56
+ let _session = null;
57
+ let _loadedModelPath = null;
58
+ let _loadedModelName = null;
59
+ // ── Ensure models directory exists ──
60
+ export function ensureModelsDir() {
61
+ if (!existsSync(MODELS_DIR)) {
62
+ mkdirSync(MODELS_DIR, { recursive: true });
63
+ }
64
+ return MODELS_DIR;
65
+ }
66
+ // ── Model management ──
67
+ export function listLocalModels() {
68
+ ensureModelsDir();
69
+ const files = readdirSync(MODELS_DIR).filter(f => f.endsWith('.gguf'));
70
+ return files.map(f => {
71
+ const fullPath = join(MODELS_DIR, f);
72
+ const stat = statSync(fullPath);
73
+ const sizeGB = (stat.size / (1024 * 1024 * 1024)).toFixed(1);
74
+ return {
75
+ name: f.replace('.gguf', ''),
76
+ path: fullPath,
77
+ size: `${sizeGB} GB`,
78
+ modified: stat.mtime.toISOString().slice(0, 10),
79
+ };
80
+ });
81
+ }
82
+ export async function downloadModel(nameOrHf, onProgress) {
83
+ const llama = await importLlama();
84
+ const modelsDir = ensureModelsDir();
85
+ // Check if it's a known preset
86
+ const preset = DEFAULT_MODELS[nameOrHf];
87
+ const hfUri = preset ? preset.hf : nameOrHf;
88
+ const modelPath = await llama.resolveModelFile(hfUri, modelsDir, {
89
+ onProgress: onProgress
90
+ ? ({ downloadedSize, totalSize }) => {
91
+ if (totalSize > 0)
92
+ onProgress(Math.round((downloadedSize / totalSize) * 100));
93
+ }
94
+ : undefined,
95
+ });
96
+ return modelPath;
97
+ }
98
+ export function removeModel(name) {
99
+ const modelsDir = ensureModelsDir();
100
+ const files = readdirSync(modelsDir).filter(f => f.endsWith('.gguf'));
101
+ const match = files.find(f => f.toLowerCase().includes(name.toLowerCase()));
102
+ if (match) {
103
+ unlinkSync(join(modelsDir, match));
104
+ return true;
105
+ }
106
+ return false;
107
+ }
108
+ // ── Engine lifecycle ──
109
+ export async function loadModel(modelPath) {
110
+ const llama = await importLlama();
111
+ // If same model is already loaded, skip
112
+ if (_model && _loadedModelPath === modelPath)
113
+ return;
114
+ // Unload previous model if any
115
+ await unloadModel();
116
+ // Find model to load
117
+ let pathToLoad = modelPath;
118
+ if (!pathToLoad) {
119
+ // Auto-select: prefer largest available model
120
+ const models = listLocalModels();
121
+ if (models.length === 0) {
122
+ throw new Error('No models found. Run `kbot models pull llama3.1-8b` to download one.');
123
+ }
124
+ // Sort by file size descending, pick largest
125
+ const sorted = models.sort((a, b) => parseFloat(b.size) - parseFloat(a.size));
126
+ pathToLoad = sorted[0].path;
127
+ }
128
+ _llama = await llama.getLlama();
129
+ _model = await _llama.loadModel({ modelPath: pathToLoad });
130
+ _context = await _model.createContext();
131
+ _session = new llama.LlamaChatSession({ contextSequence: _context.getSequence() });
132
+ _loadedModelPath = pathToLoad;
133
+ _loadedModelName = basename(pathToLoad).replace('.gguf', '');
134
+ }
135
+ export async function unloadModel() {
136
+ if (_context) {
137
+ try {
138
+ await _context.dispose();
139
+ }
140
+ catch { /* ignore */ }
141
+ }
142
+ if (_model) {
143
+ try {
144
+ await _model.dispose();
145
+ }
146
+ catch { /* ignore */ }
147
+ }
148
+ _llama = null;
149
+ _model = null;
150
+ _context = null;
151
+ _session = null;
152
+ _loadedModelPath = null;
153
+ _loadedModelName = null;
154
+ }
155
+ export function getLoadedModelName() {
156
+ return _loadedModelName;
157
+ }
158
+ export function isModelLoaded() {
159
+ return _model !== null;
160
+ }
161
+ export async function chatCompletion(systemPrompt, messages, tools, onChunk) {
162
+ if (!_session || !_model) {
163
+ await loadModel();
164
+ }
165
+ // Build the prompt from messages
166
+ const lastUserMsg = messages.filter(m => m.role === 'user').pop();
167
+ if (!lastUserMsg) {
168
+ return {
169
+ content: '',
170
+ model: _loadedModelName || 'embedded',
171
+ usage: { input_tokens: 0, output_tokens: 0 },
172
+ };
173
+ }
174
+ const prompt = systemPrompt
175
+ ? `${systemPrompt}\n\n${lastUserMsg.content}`
176
+ : lastUserMsg.content;
177
+ // Set up function calling if tools are provided
178
+ let functions;
179
+ if (tools && tools.length > 0) {
180
+ const llama = await importLlama();
181
+ functions = {};
182
+ for (const tool of tools) {
183
+ functions[tool.name] = llama.defineChatSessionFunction({
184
+ description: tool.description,
185
+ params: tool.input_schema,
186
+ async handler(params) {
187
+ return JSON.stringify({ __kbot_tool_call: true, name: tool.name, arguments: params });
188
+ },
189
+ });
190
+ }
191
+ }
192
+ let outputTokens = 0;
193
+ const response = await _session.prompt(prompt, {
194
+ maxTokens: 8192,
195
+ functions,
196
+ onTextChunk: onChunk
197
+ ? (chunk) => {
198
+ outputTokens += Math.ceil(chunk.length / 4);
199
+ onChunk(chunk);
200
+ }
201
+ : undefined,
202
+ });
203
+ const inputTokens = Math.ceil(prompt.length / 4);
204
+ if (!outputTokens)
205
+ outputTokens = Math.ceil(response.length / 4);
206
+ const result = {
207
+ content: response,
208
+ model: _loadedModelName || 'embedded',
209
+ usage: { input_tokens: inputTokens, output_tokens: outputTokens },
210
+ stop_reason: 'end_turn',
211
+ };
212
+ // Parse tool calls from the response
213
+ if (tools && tools.length > 0) {
214
+ const toolCalls = parseToolCallsFromResponse(response, tools.map(t => t.name));
215
+ if (toolCalls.length > 0) {
216
+ result.tool_calls = toolCalls;
217
+ result.content = response
218
+ .replace(/```(?:json)?\s*\{[\s\S]*?\}\s*```/g, '')
219
+ .replace(/\{[\s\S]*?"__kbot_tool_call"[\s\S]*?\}/g, '')
220
+ .replace(/\{[\s\S]*?"name"\s*:\s*"[a-z_]+"[\s\S]*?\}/g, '')
221
+ .trim();
222
+ }
223
+ }
224
+ return result;
225
+ }
226
+ function parseToolCallsFromResponse(content, knownTools) {
227
+ const calls = [];
228
+ // Pattern 1: kbot-captured tool calls via handler
229
+ const kbotPattern = /\{[^{}]*"__kbot_tool_call"\s*:\s*true[^{}]*\}/g;
230
+ let match;
231
+ while ((match = kbotPattern.exec(content)) !== null) {
232
+ try {
233
+ const obj = JSON.parse(match[0]);
234
+ if (obj.name && knownTools.includes(obj.name)) {
235
+ calls.push({
236
+ id: `emb_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`,
237
+ name: obj.name,
238
+ arguments: obj.arguments || {},
239
+ });
240
+ }
241
+ }
242
+ catch { /* skip */ }
243
+ }
244
+ if (calls.length > 0)
245
+ return calls;
246
+ // Pattern 2: Code blocks with JSON
247
+ const codeBlockPattern = /```(?:json)?\s*(\{[\s\S]*?\})\s*```/g;
248
+ while ((match = codeBlockPattern.exec(content)) !== null) {
249
+ try {
250
+ const obj = JSON.parse(match[1]);
251
+ const name = obj.name || obj.function?.name;
252
+ if (name && knownTools.includes(name)) {
253
+ const args = obj.arguments || obj.parameters || obj.input || {};
254
+ calls.push({
255
+ id: `emb_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`,
256
+ name,
257
+ arguments: typeof args === 'string' ? JSON.parse(args) : args,
258
+ });
259
+ }
260
+ }
261
+ catch { /* skip */ }
262
+ }
263
+ if (calls.length > 0)
264
+ return calls;
265
+ // Pattern 3: Raw JSON with known tool names
266
+ const rawPattern = /\{[^{}]*"name"\s*:\s*"([a-z_]+)"[^{}]*\}/g;
267
+ while ((match = rawPattern.exec(content)) !== null) {
268
+ if (knownTools.includes(match[1])) {
269
+ try {
270
+ const obj = JSON.parse(match[0]);
271
+ calls.push({
272
+ id: `emb_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`,
273
+ name: obj.name,
274
+ arguments: obj.arguments || obj.parameters || {},
275
+ });
276
+ }
277
+ catch { /* skip */ }
278
+ }
279
+ }
280
+ return calls;
281
+ }
282
+ // ── Reset session ──
283
+ export async function resetSession() {
284
+ if (_context && _model) {
285
+ const llama = await importLlama();
286
+ _session = new llama.LlamaChatSession({ contextSequence: _context.getSequence() });
287
+ }
288
+ }
289
+ // ── Health check ──
290
+ export async function isEmbeddedAvailable() {
291
+ try {
292
+ await importLlama();
293
+ return true;
294
+ }
295
+ catch {
296
+ return false;
297
+ }
298
+ }
299
+ // ── Model info ──
300
+ export function getModelInfo() {
301
+ return {
302
+ name: _loadedModelName,
303
+ path: _loadedModelPath,
304
+ modelsDir: MODELS_DIR,
305
+ availableModels: listLocalModels().length,
306
+ };
307
+ }
308
+ //# sourceMappingURL=inference.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"inference.js","sourceRoot":"","sources":["../src/inference.ts"],"names":[],"mappings":"AAAA,kCAAkC;AAClC,wFAAwF;AACxF,8EAA8E;AAC9E,EAAE;AACF,yEAAyE;AACzE,2EAA2E;AAE3E,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAA;AACjC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,WAAW,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AAElF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAA;AAErD,kEAAkE;AAElE,0FAA0F;AAC1F,MAAM,YAAY,GAAG,gBAAgB,CAAA;AAErC,KAAK,UAAU,WAAW;IACxB,IAAI,CAAC;QACH,OAAO,MAAM,MAAM,CAAC,YAAY,CAAC,CAAA;IACnC,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CACb,qDAAqD;YACrD,mCAAmC;YACnC,mCAAmC,CACpC,CAAA;IACH,CAAC;AACH,CAAC;AAED,yCAAyC;AAEzC,MAAM,CAAC,MAAM,cAAc,GAAsE;IAC/F,aAAa,EAAE;QACb,EAAE,EAAE,wDAAwD;QAC5D,WAAW,EAAE,qDAAqD;QAClE,IAAI,EAAE,SAAS;KAChB;IACD,kBAAkB,EAAE;QAClB,EAAE,EAAE,8EAA8E;QAClF,WAAW,EAAE,mDAAmD;QAChE,IAAI,EAAE,SAAS;KAChB;IACD,gBAAgB,EAAE;QAChB,EAAE,EAAE,yDAAyD;QAC7D,WAAW,EAAE,iDAAiD;QAC9D,IAAI,EAAE,SAAS;KAChB;IACD,WAAW,EAAE;QACX,EAAE,EAAE,+DAA+D;QACnE,WAAW,EAAE,4CAA4C;QACzD,IAAI,EAAE,SAAS;KAChB;IACD,UAAU,EAAE;QACV,EAAE,EAAE,mCAAmC;QACvC,WAAW,EAAE,gDAAgD;QAC7D,IAAI,EAAE,SAAS;KAChB;CACF,CAAA;AAED,iCAAiC;AAEjC,IAAI,MAAM,GAAQ,IAAI,CAAA;AACtB,IAAI,MAAM,GAAQ,IAAI,CAAA;AACtB,IAAI,QAAQ,GAAQ,IAAI,CAAA;AACxB,IAAI,QAAQ,GAAQ,IAAI,CAAA;AACxB,IAAI,gBAAgB,GAAkB,IAAI,CAAA;AAC1C,IAAI,gBAAgB,GAAkB,IAAI,CAAA;AAE1C,uCAAuC;AAEvC,MAAM,UAAU,eAAe;IAC7B,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC5B,SAAS,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAA;IAC5C,CAAC;IACD,OAAO,UAAU,CAAA;AACnB,CAAC;AAED,yBAAyB;AAEzB,MAAM,UAAU,eAAe;IAC7B,eAAe,EAAE,CAAA;IACjB,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAA;IACtE,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAA;QAC/B,MAAM,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAC5D,OAAO;YACL,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC5B,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,GAAG,MAAM,KAAK;YACpB,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC;SAChD,CAAA;IACH,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,QAAgB,EAChB,UAAkC;IAElC,MAAM,KAAK,GAAG,MAAM,WAAW,EAAE,CAAA;IACjC,MAAM,SAAS,GAAG,eAAe,EAAE,CAAA;IAEnC,+BAA+B;IAC/B,MAAM,MAAM,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAA;IACvC,MAAM,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAA;IAE3C,MAAM,SAAS,GAAG,MAAM,KAAK,CAAC,gBAAgB,CAAC,KAAK,EAAE,SAAS,EAAE;QAC/D,UAAU,EAAE,UAAU;YACpB,CAAC,CAAC,CAAC,EAAE,cAAc,EAAE,SAAS,EAAiD,EAAE,EAAE;gBAC/E,IAAI,SAAS,GAAG,CAAC;oBAAE,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,cAAc,GAAG,SAAS,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA;YAC/E,CAAC;YACH,CAAC,CAAC,SAAS;KACd,CAAC,CAAA;IAEF,OAAO,SAAS,CAAA;AAClB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,MAAM,SAAS,GAAG,eAAe,EAAE,CAAA;IACnC,MAAM,KAAK,GAAG,WAAW,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAA;IACrE,MAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC,CAAA;IAC3E,IAAI,KAAK,EAAE,CAAC;QACV,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAA;QAClC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAED,yBAAyB;AAEzB,MAAM,CAAC,KAAK,UAAU,SAAS,CAAC,SAAkB;IAChD,MAAM,KAAK,GAAG,MAAM,WAAW,EAAE,CAAA;IAEjC,wCAAwC;IACxC,IAAI,MAAM,IAAI,gBAAgB,KAAK,SAAS;QAAE,OAAM;IAEpD,+BAA+B;IAC/B,MAAM,WAAW,EAAE,CAAA;IAEnB,qBAAqB;IACrB,IAAI,UAAU,GAAG,SAAS,CAAA;IAC1B,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,8CAA8C;QAC9C,MAAM,MAAM,GAAG,eAAe,EAAE,CAAA;QAChC,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,sEAAsE,CAAC,CAAA;QACzF,CAAC;QACD,6CAA6C;QAC7C,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;QAC7E,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IAC7B,CAAC;IAED,MAAM,GAAG,MAAM,KAAK,CAAC,QAAQ,EAAE,CAAA;IAC/B,MAAM,GAAG,MAAM,MAAM,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC,CAAA;IAC1D,QAAQ,GAAG,MAAM,MAAM,CAAC,aAAa,EAAE,CAAA;IACvC,QAAQ,GAAG,IAAI,KAAK,CAAC,gBAAgB,CAAC,EAAE,eAAe,EAAE,QAAQ,CAAC,WAAW,EAAE,EAAE,CAAC,CAAA;IAClF,gBAAgB,GAAG,UAAU,CAAA;IAC7B,gBAAgB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAA;AAC9D,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,WAAW;IAC/B,IAAI,QAAQ,EAAE,CAAC;QACb,IAAI,CAAC;YAAC,MAAM,QAAQ,CAAC,OAAO,EAAE,CAAA;QAAC,CAAC;QAAC,MAAM,CAAC,CAAC,YAAY,CAAC,CAAC;IACzD,CAAC;IACD,IAAI,MAAM,EAAE,CAAC;QACX,IAAI,CAAC;YAAC,MAAM,MAAM,CAAC,OAAO,EAAE,CAAA;QAAC,CAAC;QAAC,MAAM,CAAC,CAAC,YAAY,CAAC,CAAC;IACvD,CAAC;IACD,MAAM,GAAG,IAAI,CAAA;IACb,MAAM,GAAG,IAAI,CAAA;IACb,QAAQ,GAAG,IAAI,CAAA;IACf,QAAQ,GAAG,IAAI,CAAA;IACf,gBAAgB,GAAG,IAAI,CAAA;IACvB,gBAAgB,GAAG,IAAI,CAAA;AACzB,CAAC;AAED,MAAM,UAAU,kBAAkB;IAChC,OAAO,gBAAgB,CAAA;AACzB,CAAC;AAED,MAAM,UAAU,aAAa;IAC3B,OAAO,MAAM,KAAK,IAAI,CAAA;AACxB,CAAC;AAYD,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,YAAoB,EACpB,QAAkD,EAClD,KAA2F,EAC3F,OAAgC;IAEhC,IAAI,CAAC,QAAQ,IAAI,CAAC,MAAM,EAAE,CAAC;QACzB,MAAM,SAAS,EAAE,CAAA;IACnB,CAAC;IAED,iCAAiC;IACjC,MAAM,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,GAAG,EAAE,CAAA;IACjE,IAAI,CAAC,WAAW,EAAE,CAAC;QACjB,OAAO;YACL,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,gBAAgB,IAAI,UAAU;YACrC,KAAK,EAAE,EAAE,YAAY,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE;SAC7C,CAAA;IACH,CAAC;IAED,MAAM,MAAM,GAAG,YAAY;QACzB,CAAC,CAAC,GAAG,YAAY,OAAO,WAAW,CAAC,OAAO,EAAE;QAC7C,CAAC,CAAC,WAAW,CAAC,OAAO,CAAA;IAEvB,gDAAgD;IAChD,IAAI,SAA0C,CAAA;IAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC9B,MAAM,KAAK,GAAG,MAAM,WAAW,EAAE,CAAA;QACjC,SAAS,GAAG,EAAE,CAAA;QACd,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,yBAAyB,CAAC;gBACrD,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,MAAM,EAAE,IAAI,CAAC,YAAmB;gBAChC,KAAK,CAAC,OAAO,CAAC,MAAW;oBACvB,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,gBAAgB,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC,CAAA;gBACvF,CAAC;aACF,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,IAAI,YAAY,GAAG,CAAC,CAAA;IAEpB,MAAM,QAAQ,GAAG,MAAM,QAAS,CAAC,MAAM,CAAC,MAAM,EAAE;QAC9C,SAAS,EAAE,IAAI;QACf,SAAS;QACT,WAAW,EAAE,OAAO;YAClB,CAAC,CAAC,CAAC,KAAa,EAAE,EAAE;gBAChB,YAAY,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC3C,OAAO,CAAC,KAAK,CAAC,CAAA;YAChB,CAAC;YACH,CAAC,CAAC,SAAS;KACd,CAAC,CAAA;IAEF,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IAChD,IAAI,CAAC,YAAY;QAAE,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IAEhE,MAAM,MAAM,GAAmB;QAC7B,OAAO,EAAE,QAAQ;QACjB,KAAK,EAAE,gBAAgB,IAAI,UAAU;QACrC,KAAK,EAAE,EAAE,YAAY,EAAE,WAAW,EAAE,aAAa,EAAE,YAAY,EAAE;QACjE,WAAW,EAAE,UAAU;KACxB,CAAA;IAED,qCAAqC;IACrC,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC9B,MAAM,SAAS,GAAG,0BAA0B,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;QAC9E,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACzB,MAAM,CAAC,UAAU,GAAG,SAAS,CAAA;YAC7B,MAAM,CAAC,OAAO,GAAG,QAAQ;iBACtB,OAAO,CAAC,oCAAoC,EAAE,EAAE,CAAC;iBACjD,OAAO,CAAC,yCAAyC,EAAE,EAAE,CAAC;iBACtD,OAAO,CAAC,6CAA6C,EAAE,EAAE,CAAC;iBAC1D,IAAI,EAAE,CAAA;QACX,CAAC;IACH,CAAC;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAED,SAAS,0BAA0B,CACjC,OAAe,EACf,UAAoB;IAEpB,MAAM,KAAK,GAA4E,EAAE,CAAA;IAEzF,kDAAkD;IAClD,MAAM,WAAW,GAAG,gDAAgD,CAAA;IACpE,IAAI,KAAK,CAAA;IACT,OAAO,CAAC,KAAK,GAAG,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,KAAK,IAAI,EAAE,CAAC;QACpD,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;YAChC,IAAI,GAAG,CAAC,IAAI,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC9C,KAAK,CAAC,IAAI,CAAC;oBACT,EAAE,EAAE,OAAO,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;oBACjE,IAAI,EAAE,GAAG,CAAC,IAAI;oBACd,SAAS,EAAE,GAAG,CAAC,SAAS,IAAI,EAAE;iBAC/B,CAAC,CAAA;YACJ,CAAC;QACH,CAAC;QAAC,MAAM,CAAC,CAAC,UAAU,CAAC,CAAC;IACxB,CAAC;IACD,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAElC,mCAAmC;IACnC,MAAM,gBAAgB,GAAG,sCAAsC,CAAA;IAC/D,OAAO,CAAC,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,KAAK,IAAI,EAAE,CAAC;QACzD,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;YAChC,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,QAAQ,EAAE,IAAI,CAAA;YAC3C,IAAI,IAAI,IAAI,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;gBACtC,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,KAAK,IAAI,EAAE,CAAA;gBAC/D,KAAK,CAAC,IAAI,CAAC;oBACT,EAAE,EAAE,OAAO,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;oBACjE,IAAI;oBACJ,SAAS,EAAE,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI;iBAC9D,CAAC,CAAA;YACJ,CAAC;QACH,CAAC;QAAC,MAAM,CAAC,CAAC,UAAU,CAAC,CAAC;IACxB,CAAC;IACD,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAElC,4CAA4C;IAC5C,MAAM,UAAU,GAAG,2CAA2C,CAAA;IAC9D,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,KAAK,IAAI,EAAE,CAAC;QACnD,IAAI,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAClC,IAAI,CAAC;gBACH,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;gBAChC,KAAK,CAAC,IAAI,CAAC;oBACT,EAAE,EAAE,OAAO,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;oBACjE,IAAI,EAAE,GAAG,CAAC,IAAI;oBACd,SAAS,EAAE,GAAG,CAAC,SAAS,IAAI,GAAG,CAAC,UAAU,IAAI,EAAE;iBACjD,CAAC,CAAA;YACJ,CAAC;YAAC,MAAM,CAAC,CAAC,UAAU,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAED,sBAAsB;AAEtB,MAAM,CAAC,KAAK,UAAU,YAAY;IAChC,IAAI,QAAQ,IAAI,MAAM,EAAE,CAAC;QACvB,MAAM,KAAK,GAAG,MAAM,WAAW,EAAE,CAAA;QACjC,QAAQ,GAAG,IAAI,KAAK,CAAC,gBAAgB,CAAC,EAAE,eAAe,EAAE,QAAQ,CAAC,WAAW,EAAE,EAAE,CAAC,CAAA;IACpF,CAAC;AACH,CAAC;AAED,qBAAqB;AAErB,MAAM,CAAC,KAAK,UAAU,mBAAmB;IACvC,IAAI,CAAC;QACH,MAAM,WAAW,EAAE,CAAA;QACnB,OAAO,IAAI,CAAA;IACb,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC;AAED,mBAAmB;AAEnB,MAAM,UAAU,YAAY;IAM1B,OAAO;QACL,IAAI,EAAE,gBAAgB;QACtB,IAAI,EAAE,gBAAgB;QACtB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE,eAAe,EAAE,CAAC,MAAM;KAC1C,CAAA;AACH,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@kernel.chat/kbot",
3
- "version": "2.16.0",
4
- "description": "Universal AI agent for your terminal. 39 specialists, 216 tools, 19 providers. Covers every code ecosystem npm, PyPI, CRAN, Cargo, HuggingFace, arXiv, Docker, and more. VFX tools for Houdini VEX, GLSL shaders, FFmpeg, ImageMagick, Blender. Self-evolving, learns your patterns, runs offline with Ollama.",
3
+ "version": "2.17.0",
4
+ "description": "Universal AI agent for your terminal. 39 specialists, 216 tools, 20 providers. Embedded llama.cpp engineruns GGUF models directly, no Ollama needed. Covers every code ecosystem. VFX, research, containers, creative coding. Self-evolving, learns your patterns.",
5
5
  "type": "module",
6
6
  "repository": {
7
7
  "type": "git",
@@ -109,6 +109,9 @@
109
109
  "readline": "^1.3.0",
110
110
  "zod": "^3.23.0"
111
111
  },
112
+ "optionalDependencies": {
113
+ "node-llama-cpp": "^3.0.0"
114
+ },
112
115
  "devDependencies": {
113
116
  "@types/node": "^22.0.0",
114
117
  "tsx": "^4.19.0",