@bike4mind/cli 0.2.44-feat-cli-ollama-host-flag.20587 → 0.2.44-feat-playwright-setup.20601
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/bike4mind-cli.mjs +0 -7
- package/dist/{chunk-6ELP6ZPO.js → chunk-2CPSGARV.js} +7 -8
- package/dist/{chunk-357RJTWI.js → chunk-ABHBER4L.js} +2 -2
- package/dist/{chunk-E2I7MPL4.js → chunk-HK3UOA3W.js} +1 -1
- package/dist/{chunk-WOXLDWVH.js → chunk-KMEJIQYC.js} +1 -1
- package/dist/{chunk-7PMOD7GR.js → chunk-R4EINN7L.js} +1 -1
- package/dist/{chunk-WVVSPMXX.js → chunk-SSIEEQNS.js} +51 -100
- package/dist/commands/doctorCommand.js +1 -1
- package/dist/commands/headlessCommand.js +5 -5
- package/dist/commands/updateCommand.js +1 -1
- package/dist/{create-2WDOOUTP.js → create-IUA6762E.js} +2 -2
- package/dist/index.js +9 -61
- package/dist/{mementoService-XUXVOI5D.js → mementoService-6IHJK4UA.js} +2 -2
- package/dist/{src-BIPZW6CA.js → src-UTUFIV6C.js} +1 -1
- package/dist/{subtractCredits-2CBO265A.js → subtractCredits-OA6YAHP6.js} +2 -2
- package/package.json +7 -8
package/bin/bike4mind-cli.mjs
CHANGED
|
@@ -67,10 +67,6 @@ const argv = await yargs(hideBin(process.argv))
|
|
|
67
67
|
description: 'When using -p, auto-allow all tool permission prompts (use with caution in CI/CD)',
|
|
68
68
|
default: false,
|
|
69
69
|
})
|
|
70
|
-
.option('ollama-host', {
|
|
71
|
-
type: 'string',
|
|
72
|
-
description: 'Add local Ollama models to the model picker (e.g. http://localhost:11434)',
|
|
73
|
-
})
|
|
74
70
|
.command('mcp', 'Manage MCP (Model Context Protocol) servers', (yargs) => {
|
|
75
71
|
return yargs
|
|
76
72
|
.command('list', 'List configured MCP servers', {}, async () => {
|
|
@@ -139,9 +135,6 @@ if (argv['add-dir'] && argv['add-dir'].length > 0) {
|
|
|
139
135
|
const resolvedDirs = argv['add-dir'].map(d => resolve(d));
|
|
140
136
|
process.env.B4M_ADDITIONAL_DIRS = JSON.stringify(resolvedDirs);
|
|
141
137
|
}
|
|
142
|
-
if (argv['ollama-host']) {
|
|
143
|
-
process.env.B4M_OLLAMA_HOST = argv['ollama-host'];
|
|
144
|
-
}
|
|
145
138
|
|
|
146
139
|
// Auto-detect environment: prefer production mode when dist exists
|
|
147
140
|
const distPath = join(__dirname, '../dist/index.js');
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
// package.json
|
|
4
4
|
var package_default = {
|
|
5
5
|
name: "@bike4mind/cli",
|
|
6
|
-
version: "0.2.44-feat-
|
|
6
|
+
version: "0.2.44-feat-playwright-setup.20601+f0cecf089",
|
|
7
7
|
type: "module",
|
|
8
8
|
description: "Interactive CLI tool for Bike4Mind with ReAct agents",
|
|
9
9
|
license: "UNLICENSED",
|
|
@@ -92,7 +92,7 @@ var package_default = {
|
|
|
92
92
|
mathjs: "^14.2.0",
|
|
93
93
|
"mime-types": "^2.1.35",
|
|
94
94
|
mongoose: "^8.8.3",
|
|
95
|
-
ollama: "^0.
|
|
95
|
+
ollama: "^0.5.12",
|
|
96
96
|
open: "^11.0.0",
|
|
97
97
|
openai: "^6.18.0",
|
|
98
98
|
"p-limit": "^6.2.0",
|
|
@@ -104,7 +104,6 @@ var package_default = {
|
|
|
104
104
|
tiktoken: "^1.0.16",
|
|
105
105
|
"tree-sitter-wasms": "^0.1.13",
|
|
106
106
|
turndown: "^7.2.0",
|
|
107
|
-
undici: "^7.0.0",
|
|
108
107
|
unpdf: "^0.10.0",
|
|
109
108
|
uuid: "^9.0.1",
|
|
110
109
|
voyageai: "^0.0.4",
|
|
@@ -118,10 +117,10 @@ var package_default = {
|
|
|
118
117
|
},
|
|
119
118
|
devDependencies: {
|
|
120
119
|
"@bike4mind/agents": "0.1.0",
|
|
121
|
-
"@bike4mind/common": "2.63.1-feat-
|
|
122
|
-
"@bike4mind/mcp": "1.33.6-feat-
|
|
123
|
-
"@bike4mind/services": "2.59.1-feat-
|
|
124
|
-
"@bike4mind/utils": "2.13.2-feat-
|
|
120
|
+
"@bike4mind/common": "2.63.1-feat-playwright-setup.20601+f0cecf089",
|
|
121
|
+
"@bike4mind/mcp": "1.33.6-feat-playwright-setup.20601+f0cecf089",
|
|
122
|
+
"@bike4mind/services": "2.59.1-feat-playwright-setup.20601+f0cecf089",
|
|
123
|
+
"@bike4mind/utils": "2.13.2-feat-playwright-setup.20601+f0cecf089",
|
|
125
124
|
"@types/better-sqlite3": "^7.6.13",
|
|
126
125
|
"@types/diff": "^5.0.9",
|
|
127
126
|
"@types/jsonwebtoken": "^9.0.4",
|
|
@@ -139,7 +138,7 @@ var package_default = {
|
|
|
139
138
|
optionalDependencies: {
|
|
140
139
|
"@vscode/ripgrep": "^1.17.0"
|
|
141
140
|
},
|
|
142
|
-
gitHead: "
|
|
141
|
+
gitHead: "f0cecf089ffa89fa6b150aedd721eae6bfe84ed4"
|
|
143
142
|
};
|
|
144
143
|
|
|
145
144
|
// src/utils/updateChecker.ts
|
|
@@ -4,7 +4,7 @@ import {
|
|
|
4
4
|
getOpenWeatherKey,
|
|
5
5
|
getSerperKey,
|
|
6
6
|
getWolframAlphaKey
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-R4EINN7L.js";
|
|
8
8
|
import {
|
|
9
9
|
BFLImageService,
|
|
10
10
|
BaseStorage,
|
|
@@ -16,7 +16,7 @@ import {
|
|
|
16
16
|
OpenAIBackend,
|
|
17
17
|
OpenAIImageService,
|
|
18
18
|
XAIImageService
|
|
19
|
-
} from "./chunk-
|
|
19
|
+
} from "./chunk-SSIEEQNS.js";
|
|
20
20
|
import {
|
|
21
21
|
Logger
|
|
22
22
|
} from "./chunk-PFBYGCOW.js";
|
|
@@ -6543,14 +6543,11 @@ var GeminiBackend = class {
|
|
|
6543
6543
|
|
|
6544
6544
|
// ../../b4m-core/packages/utils/dist/src/llm/ollamaBackend.js
|
|
6545
6545
|
import { Ollama } from "ollama";
|
|
6546
|
-
import { Agent as Agent2 } from "undici";
|
|
6547
6546
|
var OllamaBackend = class {
|
|
6548
6547
|
_host;
|
|
6549
6548
|
_api;
|
|
6550
|
-
_logger;
|
|
6551
6549
|
currentModel = "";
|
|
6552
|
-
constructor(host
|
|
6553
|
-
this._logger = logger ?? new Logger();
|
|
6550
|
+
constructor(host) {
|
|
6554
6551
|
this._host = host ?? "http://localhost:11434";
|
|
6555
6552
|
const url = new URL(this._host);
|
|
6556
6553
|
const headers = {};
|
|
@@ -6559,12 +6556,7 @@ var OllamaBackend = class {
|
|
|
6559
6556
|
url.username = "";
|
|
6560
6557
|
url.password = "";
|
|
6561
6558
|
}
|
|
6562
|
-
|
|
6563
|
-
const fetchWithTimeout = (input, init) => globalThis.fetch(input, {
|
|
6564
|
-
...init,
|
|
6565
|
-
dispatcher: agent
|
|
6566
|
-
});
|
|
6567
|
-
this._api = new Ollama({ host: url.toString(), headers, fetch: fetchWithTimeout });
|
|
6559
|
+
this._api = new Ollama({ host: url.toString(), headers });
|
|
6568
6560
|
}
|
|
6569
6561
|
async getModelInfo() {
|
|
6570
6562
|
try {
|
|
@@ -6597,17 +6589,18 @@ var OllamaBackend = class {
|
|
|
6597
6589
|
if (error instanceof Error && error.message.includes("503 Service Temporarily Unavailable")) {
|
|
6598
6590
|
errorMessage = "Ollama server is temporarily unavailable. Please try again later.";
|
|
6599
6591
|
}
|
|
6600
|
-
|
|
6592
|
+
console.warn("[OllamaBackend] Error fetching model info from Ollama:", errorMessage);
|
|
6601
6593
|
return [];
|
|
6602
6594
|
}
|
|
6603
6595
|
}
|
|
6604
6596
|
async complete(model, messages, options, callback) {
|
|
6605
6597
|
this.currentModel = model;
|
|
6606
|
-
const formattedTools = this.formatTools(options.tools ?? []);
|
|
6607
6598
|
const baseRequest = {
|
|
6608
6599
|
model,
|
|
6609
|
-
messages:
|
|
6610
|
-
|
|
6600
|
+
messages: messages.map((msg) => ({
|
|
6601
|
+
role: msg.role,
|
|
6602
|
+
content: msg.content.toString()
|
|
6603
|
+
}))
|
|
6611
6604
|
};
|
|
6612
6605
|
try {
|
|
6613
6606
|
if (options.stream) {
|
|
@@ -6618,112 +6611,37 @@ var OllamaBackend = class {
|
|
|
6618
6611
|
let inputTokens = 0;
|
|
6619
6612
|
let outputTokens = 0;
|
|
6620
6613
|
let stoppedThinking = false;
|
|
6621
|
-
const accumulatedToolCalls = [];
|
|
6622
6614
|
for await (const chunk of response) {
|
|
6623
|
-
|
|
6624
|
-
accumulatedToolCalls.push(...chunk.message.tool_calls);
|
|
6625
|
-
}
|
|
6615
|
+
const streamedText = [];
|
|
6626
6616
|
let content = chunk.message.content || "";
|
|
6627
6617
|
stoppedThinking = stoppedThinking || content.includes("</think>");
|
|
6628
6618
|
if (chunk.done && !stoppedThinking) {
|
|
6629
6619
|
content = `${content}</think>`;
|
|
6630
6620
|
}
|
|
6621
|
+
streamedText[0] = content;
|
|
6631
6622
|
inputTokens = Math.max(inputTokens, chunk.prompt_eval_count || 0);
|
|
6632
6623
|
outputTokens += chunk.eval_count || 0;
|
|
6633
|
-
|
|
6634
|
-
if (chunk.done && accumulatedToolCalls.length > 0) {
|
|
6635
|
-
completionInfo.toolsUsed = accumulatedToolCalls.map((tc, i) => ({
|
|
6636
|
-
name: tc.function.name,
|
|
6637
|
-
arguments: JSON.stringify(tc.function.arguments),
|
|
6638
|
-
id: `ollama-tool-${i}-${tc.function.name}`
|
|
6639
|
-
}));
|
|
6640
|
-
}
|
|
6641
|
-
await callback([content], completionInfo);
|
|
6624
|
+
await callback(streamedText, { inputTokens, outputTokens });
|
|
6642
6625
|
}
|
|
6643
6626
|
} else {
|
|
6644
6627
|
const response = await this._api.chat({
|
|
6645
6628
|
...baseRequest,
|
|
6646
6629
|
stream: false
|
|
6647
6630
|
});
|
|
6648
|
-
|
|
6631
|
+
console.log("[OllamaBackend] Received response from Ollama:", response);
|
|
6632
|
+
const streamedText = [];
|
|
6633
|
+
streamedText[0] = response.message.content || "";
|
|
6649
6634
|
const completionInfo = {
|
|
6650
6635
|
inputTokens: response.prompt_eval_count || 0,
|
|
6651
6636
|
outputTokens: response.eval_count || 0
|
|
6652
6637
|
};
|
|
6653
|
-
|
|
6654
|
-
completionInfo.toolsUsed = toolCalls.map((tc, i) => ({
|
|
6655
|
-
name: tc.function.name,
|
|
6656
|
-
arguments: JSON.stringify(tc.function.arguments),
|
|
6657
|
-
id: `ollama-tool-${i}-${tc.function.name}`
|
|
6658
|
-
}));
|
|
6659
|
-
}
|
|
6660
|
-
await callback([response.message.content || ""], completionInfo);
|
|
6638
|
+
await callback(streamedText, completionInfo);
|
|
6661
6639
|
}
|
|
6662
6640
|
} catch (error) {
|
|
6663
|
-
|
|
6641
|
+
console.error("[OllamaBackend] Error during Ollama API call:", error);
|
|
6664
6642
|
throw error;
|
|
6665
6643
|
}
|
|
6666
6644
|
}
|
|
6667
|
-
pushToolMessages(messages, tool, result, _thinkingBlocks) {
|
|
6668
|
-
let argumentsObj;
|
|
6669
|
-
try {
|
|
6670
|
-
argumentsObj = JSON.parse(tool.parameters);
|
|
6671
|
-
} catch {
|
|
6672
|
-
argumentsObj = { _raw: tool.parameters };
|
|
6673
|
-
}
|
|
6674
|
-
messages.push({
|
|
6675
|
-
content: "",
|
|
6676
|
-
role: "assistant",
|
|
6677
|
-
tool_calls: [
|
|
6678
|
-
{
|
|
6679
|
-
function: {
|
|
6680
|
-
name: tool.name,
|
|
6681
|
-
arguments: argumentsObj
|
|
6682
|
-
}
|
|
6683
|
-
}
|
|
6684
|
-
]
|
|
6685
|
-
});
|
|
6686
|
-
messages.push({
|
|
6687
|
-
role: "tool",
|
|
6688
|
-
tool_name: tool.name,
|
|
6689
|
-
content: result
|
|
6690
|
-
});
|
|
6691
|
-
}
|
|
6692
|
-
/**
|
|
6693
|
-
* Convert ICompletionOptionTools into Ollama's Tool schema format.
|
|
6694
|
-
*/
|
|
6695
|
-
formatTools(tools) {
|
|
6696
|
-
return tools.map((tool) => ({
|
|
6697
|
-
type: "function",
|
|
6698
|
-
function: {
|
|
6699
|
-
...tool.toolSchema,
|
|
6700
|
-
parameters: {
|
|
6701
|
-
...tool.toolSchema.parameters,
|
|
6702
|
-
required: tool.toolSchema.parameters.required ?? []
|
|
6703
|
-
}
|
|
6704
|
-
}
|
|
6705
|
-
}));
|
|
6706
|
-
}
|
|
6707
|
-
/**
|
|
6708
|
-
* Map IMessage[] to Ollama's Message[], preserving tool_calls for multi-turn
|
|
6709
|
-
* tool conversations (added by pushToolMessages).
|
|
6710
|
-
*/
|
|
6711
|
-
buildMessages(messages) {
|
|
6712
|
-
return messages.map((msg) => {
|
|
6713
|
-
const raw = msg;
|
|
6714
|
-
const mapped = {
|
|
6715
|
-
role: msg.role,
|
|
6716
|
-
content: msg.content != null ? String(msg.content) : ""
|
|
6717
|
-
};
|
|
6718
|
-
if (Array.isArray(raw.tool_calls)) {
|
|
6719
|
-
mapped.tool_calls = raw.tool_calls;
|
|
6720
|
-
}
|
|
6721
|
-
if (typeof raw.tool_name === "string") {
|
|
6722
|
-
mapped.tool_name = raw.tool_name;
|
|
6723
|
-
}
|
|
6724
|
-
return mapped;
|
|
6725
|
-
});
|
|
6726
|
-
}
|
|
6727
6645
|
formatMessages(messages) {
|
|
6728
6646
|
return messages.map((msg) => {
|
|
6729
6647
|
let content = "";
|
|
@@ -6744,14 +6662,47 @@ var OllamaBackend = class {
|
|
|
6744
6662
|
};
|
|
6745
6663
|
});
|
|
6746
6664
|
}
|
|
6665
|
+
pushToolMessages(messages, tool, result, _thinkingBlocks) {
|
|
6666
|
+
messages.push({
|
|
6667
|
+
content: null,
|
|
6668
|
+
role: "assistant",
|
|
6669
|
+
tool_calls: [
|
|
6670
|
+
{
|
|
6671
|
+
id: tool.id,
|
|
6672
|
+
type: "function",
|
|
6673
|
+
function: {
|
|
6674
|
+
name: tool.name,
|
|
6675
|
+
arguments: tool.parameters
|
|
6676
|
+
}
|
|
6677
|
+
}
|
|
6678
|
+
]
|
|
6679
|
+
});
|
|
6680
|
+
messages.push({
|
|
6681
|
+
role: "tool",
|
|
6682
|
+
content: result,
|
|
6683
|
+
tool_call_id: tool.id
|
|
6684
|
+
});
|
|
6685
|
+
}
|
|
6686
|
+
// private formatTools(tools: ICompletionOptionTools[]): Tool[] {
|
|
6687
|
+
// return tools.map(tool => ({
|
|
6688
|
+
// type: 'function' as const,
|
|
6689
|
+
// function: {
|
|
6690
|
+
// ...tool.toolSchema,
|
|
6691
|
+
// parameters: {
|
|
6692
|
+
// ...tool.toolSchema.parameters,
|
|
6693
|
+
// required: tool.toolSchema.parameters.required || [],
|
|
6694
|
+
// },
|
|
6695
|
+
// },
|
|
6696
|
+
// }));
|
|
6697
|
+
// }
|
|
6747
6698
|
async listModels() {
|
|
6748
6699
|
try {
|
|
6749
|
-
|
|
6700
|
+
console.log("[OllamaBackend] Listing models from Ollama");
|
|
6750
6701
|
const response = await this._api.list();
|
|
6751
|
-
|
|
6702
|
+
console.log("[OllamaBackend] Models listed from Ollama:", JSON.stringify(response.models, null, 2));
|
|
6752
6703
|
return response.models;
|
|
6753
6704
|
} catch (error) {
|
|
6754
|
-
|
|
6705
|
+
console.error("[OllamaBackend] Error listing models from Ollama:", error);
|
|
6755
6706
|
if (error.message?.includes("ECONNREFUSED") || error.message?.includes("Failed to fetch")) {
|
|
6756
6707
|
throw new Error(`Could not connect to Ollama. Please make sure it is running at ${this._host}`);
|
|
6757
6708
|
}
|
|
@@ -36,13 +36,13 @@ import {
|
|
|
36
36
|
isReadOnlyTool,
|
|
37
37
|
loadContextFiles,
|
|
38
38
|
setWebSocketToolExecutor
|
|
39
|
-
} from "../chunk-
|
|
39
|
+
} from "../chunk-ABHBER4L.js";
|
|
40
40
|
import "../chunk-BDQBOLYG.js";
|
|
41
|
-
import "../chunk-
|
|
41
|
+
import "../chunk-R4EINN7L.js";
|
|
42
42
|
import "../chunk-GQGOWACU.js";
|
|
43
|
-
import "../chunk-
|
|
44
|
-
import "../chunk-
|
|
45
|
-
import "../chunk-
|
|
43
|
+
import "../chunk-HK3UOA3W.js";
|
|
44
|
+
import "../chunk-KMEJIQYC.js";
|
|
45
|
+
import "../chunk-SSIEEQNS.js";
|
|
46
46
|
import "../chunk-PFBYGCOW.js";
|
|
47
47
|
import "../chunk-BPFEGDC7.js";
|
|
48
48
|
import {
|
package/dist/index.js
CHANGED
|
@@ -46,15 +46,13 @@ import {
|
|
|
46
46
|
setWebSocketToolExecutor,
|
|
47
47
|
substituteArguments,
|
|
48
48
|
warmFileCache
|
|
49
|
-
} from "./chunk-
|
|
49
|
+
} from "./chunk-ABHBER4L.js";
|
|
50
50
|
import "./chunk-BDQBOLYG.js";
|
|
51
|
-
import "./chunk-
|
|
51
|
+
import "./chunk-R4EINN7L.js";
|
|
52
52
|
import "./chunk-GQGOWACU.js";
|
|
53
|
-
import "./chunk-
|
|
54
|
-
import "./chunk-
|
|
55
|
-
import
|
|
56
|
-
OllamaBackend
|
|
57
|
-
} from "./chunk-WVVSPMXX.js";
|
|
53
|
+
import "./chunk-HK3UOA3W.js";
|
|
54
|
+
import "./chunk-KMEJIQYC.js";
|
|
55
|
+
import "./chunk-SSIEEQNS.js";
|
|
58
56
|
import "./chunk-PFBYGCOW.js";
|
|
59
57
|
import "./chunk-BPFEGDC7.js";
|
|
60
58
|
import {
|
|
@@ -64,7 +62,7 @@ import {
|
|
|
64
62
|
import {
|
|
65
63
|
checkForUpdate,
|
|
66
64
|
package_default
|
|
67
|
-
} from "./chunk-
|
|
65
|
+
} from "./chunk-2CPSGARV.js";
|
|
68
66
|
import {
|
|
69
67
|
selectActiveBackgroundAgents,
|
|
70
68
|
useCliStore
|
|
@@ -2480,31 +2478,6 @@ Please acknowledge these background agent results and incorporate them into your
|
|
|
2480
2478
|
}
|
|
2481
2479
|
};
|
|
2482
2480
|
|
|
2483
|
-
// src/llm/MultiLlmBackend.ts
|
|
2484
|
-
var MultiLlmBackend = class {
|
|
2485
|
-
constructor(serverBackend, ollamaBackend, serverModels, ollamaModels, initialModel) {
|
|
2486
|
-
this.serverBackend = serverBackend;
|
|
2487
|
-
this.ollamaBackend = ollamaBackend;
|
|
2488
|
-
this.serverModels = serverModels;
|
|
2489
|
-
this.ollamaModels = ollamaModels;
|
|
2490
|
-
this.currentModel = initialModel;
|
|
2491
|
-
this.ollamaModelIds = new Set(ollamaModels.map((m) => m.id));
|
|
2492
|
-
}
|
|
2493
|
-
get activeBackend() {
|
|
2494
|
-
return this.ollamaModelIds.has(this.currentModel) ? this.ollamaBackend : this.serverBackend;
|
|
2495
|
-
}
|
|
2496
|
-
async complete(model, messages, options, callback) {
|
|
2497
|
-
const backend = this.ollamaModelIds.has(model) ? this.ollamaBackend : this.serverBackend;
|
|
2498
|
-
return backend.complete(model, messages, options, callback);
|
|
2499
|
-
}
|
|
2500
|
-
pushToolMessages(messages, tool, result, thinkingBlocks) {
|
|
2501
|
-
this.activeBackend.pushToolMessages(messages, tool, result, thinkingBlocks);
|
|
2502
|
-
}
|
|
2503
|
-
async getModelInfo() {
|
|
2504
|
-
return [...this.serverModels, ...this.ollamaModels];
|
|
2505
|
-
}
|
|
2506
|
-
};
|
|
2507
|
-
|
|
2508
2481
|
// src/index.tsx
|
|
2509
2482
|
import { isAxiosError } from "axios";
|
|
2510
2483
|
|
|
@@ -2876,34 +2849,9 @@ function CliApp() {
|
|
|
2876
2849
|
model: config.defaultModel
|
|
2877
2850
|
});
|
|
2878
2851
|
}
|
|
2879
|
-
const
|
|
2880
|
-
|
|
2881
|
-
|
|
2882
|
-
const ollamaBackend = new OllamaBackend(ollamaHost, {
|
|
2883
|
-
debug: (...args) => logger.debug(String(args[0])),
|
|
2884
|
-
info: (...args) => logger.info(String(args[0])),
|
|
2885
|
-
warn: (...args) => logger.warn(String(args[0])),
|
|
2886
|
-
error: (...args) => logger.error(String(args[0]))
|
|
2887
|
-
});
|
|
2888
|
-
const [serverModels, ollamaModels] = await Promise.all([llm.getModelInfo(), ollamaBackend.getModelInfo()]);
|
|
2889
|
-
if (serverModels.length === 0 && ollamaModels.length === 0) {
|
|
2890
|
-
throw new Error(
|
|
2891
|
-
`No models available from server or Ollama at ${ollamaHost}.
|
|
2892
|
-
Pull a model: ollama pull qwen2.5:14b`
|
|
2893
|
-
);
|
|
2894
|
-
}
|
|
2895
|
-
if (ollamaModels.length === 0) {
|
|
2896
|
-
startupLog.push(`\u26A0\uFE0F No models found in Ollama at ${ollamaHost}. Pull one with: ollama pull qwen2.5:14b`);
|
|
2897
|
-
}
|
|
2898
|
-
const serverBackend = llm;
|
|
2899
|
-
llm = new MultiLlmBackend(serverBackend, ollamaBackend, serverModels, ollamaModels, config.defaultModel);
|
|
2900
|
-
models = await llm.getModelInfo();
|
|
2901
|
-
startupLog.push(`\u{1F999} Self-hosted Ollama: ${ollamaModels.length} model(s) added to picker`);
|
|
2902
|
-
} else {
|
|
2903
|
-
models = await llm.getModelInfo();
|
|
2904
|
-
if (models.length === 0) {
|
|
2905
|
-
throw new Error("No models available from server.");
|
|
2906
|
-
}
|
|
2852
|
+
const models = await llm.getModelInfo();
|
|
2853
|
+
if (models.length === 0) {
|
|
2854
|
+
throw new Error("No models available from server.");
|
|
2907
2855
|
}
|
|
2908
2856
|
logger.debug(`\u{1F4CB} Available models: ${models.map((m) => m.id).join(", ")}`);
|
|
2909
2857
|
const modelInfo = models.find((m) => m.id === config.defaultModel) || models[0];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bike4mind/cli",
|
|
3
|
-
"version": "0.2.44-feat-
|
|
3
|
+
"version": "0.2.44-feat-playwright-setup.20601+f0cecf089",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Interactive CLI tool for Bike4Mind with ReAct agents",
|
|
6
6
|
"license": "UNLICENSED",
|
|
@@ -89,7 +89,7 @@
|
|
|
89
89
|
"mathjs": "^14.2.0",
|
|
90
90
|
"mime-types": "^2.1.35",
|
|
91
91
|
"mongoose": "^8.8.3",
|
|
92
|
-
"ollama": "^0.
|
|
92
|
+
"ollama": "^0.5.12",
|
|
93
93
|
"open": "^11.0.0",
|
|
94
94
|
"openai": "^6.18.0",
|
|
95
95
|
"p-limit": "^6.2.0",
|
|
@@ -101,7 +101,6 @@
|
|
|
101
101
|
"tiktoken": "^1.0.16",
|
|
102
102
|
"tree-sitter-wasms": "^0.1.13",
|
|
103
103
|
"turndown": "^7.2.0",
|
|
104
|
-
"undici": "^7.0.0",
|
|
105
104
|
"unpdf": "^0.10.0",
|
|
106
105
|
"uuid": "^9.0.1",
|
|
107
106
|
"voyageai": "^0.0.4",
|
|
@@ -115,10 +114,10 @@
|
|
|
115
114
|
},
|
|
116
115
|
"devDependencies": {
|
|
117
116
|
"@bike4mind/agents": "0.1.0",
|
|
118
|
-
"@bike4mind/common": "2.63.1-feat-
|
|
119
|
-
"@bike4mind/mcp": "1.33.6-feat-
|
|
120
|
-
"@bike4mind/services": "2.59.1-feat-
|
|
121
|
-
"@bike4mind/utils": "2.13.2-feat-
|
|
117
|
+
"@bike4mind/common": "2.63.1-feat-playwright-setup.20601+f0cecf089",
|
|
118
|
+
"@bike4mind/mcp": "1.33.6-feat-playwright-setup.20601+f0cecf089",
|
|
119
|
+
"@bike4mind/services": "2.59.1-feat-playwright-setup.20601+f0cecf089",
|
|
120
|
+
"@bike4mind/utils": "2.13.2-feat-playwright-setup.20601+f0cecf089",
|
|
122
121
|
"@types/better-sqlite3": "^7.6.13",
|
|
123
122
|
"@types/diff": "^5.0.9",
|
|
124
123
|
"@types/jsonwebtoken": "^9.0.4",
|
|
@@ -136,5 +135,5 @@
|
|
|
136
135
|
"optionalDependencies": {
|
|
137
136
|
"@vscode/ripgrep": "^1.17.0"
|
|
138
137
|
},
|
|
139
|
-
"gitHead": "
|
|
138
|
+
"gitHead": "f0cecf089ffa89fa6b150aedd721eae6bfe84ed4"
|
|
140
139
|
}
|