@tractorscorch/clank 1.5.5 → 1.5.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/README.md +2 -2
- package/dist/index.js +45 -31
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.5.7] — 2026-03-23
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **Tools completely broken for llama.cpp / LM Studio / vLLM models** — the PromptFallbackProvider (which injects tools into the system prompt as text) was only applied to Ollama models. All other local providers (llama.cpp, LM Studio, vLLM) sent tools via the API's `tools` parameter, which most local models can't handle — so they just ignored tools entirely. Now ALL local models that aren't in the known tool-capable list automatically get prompt-based tool injection
|
|
13
|
+
- **Tool-capable model detection shared across providers** — moved the `TOOL_CAPABLE_PATTERNS` list from the Ollama provider to a shared `supportsNativeTools()` function in types.ts, used by the agent engine for any local provider
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
17
|
+
## [1.5.6] — 2026-03-23
|
|
18
|
+
|
|
19
|
+
### Fixed
|
|
20
|
+
- **Local models still refusing actions** — v1.5.5 prompt wasn't forceful enough. Rewrote system prompt as a dense, authoritative rules block with numbered rules and explicit list of available tools. Added negative examples ("NEVER say 'I cannot access files'") which are more effective at overriding local model training biases than positive instructions alone
|
|
21
|
+
|
|
22
|
+
---
|
|
23
|
+
|
|
9
24
|
## [1.5.5] — 2026-03-23
|
|
10
25
|
|
|
11
26
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.7-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.5.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.5.7_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.7_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -395,11 +395,27 @@ var init_auto_persist = __esm({
|
|
|
395
395
|
});
|
|
396
396
|
|
|
397
397
|
// src/providers/types.ts
|
|
398
|
-
|
|
398
|
+
function supportsNativeTools(model) {
|
|
399
|
+
const name = model.includes("/") ? model.split("/").pop() : model;
|
|
400
|
+
const baseName = name.split(":")[0];
|
|
401
|
+
return TOOL_CAPABLE_PATTERNS.some((p) => p.test(baseName));
|
|
402
|
+
}
|
|
403
|
+
var TOOL_CAPABLE_PATTERNS, BaseProvider;
|
|
399
404
|
var init_types = __esm({
|
|
400
405
|
"src/providers/types.ts"() {
|
|
401
406
|
"use strict";
|
|
402
407
|
init_esm_shims();
|
|
408
|
+
TOOL_CAPABLE_PATTERNS = [
|
|
409
|
+
/^llama3\.[1-9]/i,
|
|
410
|
+
/^llama-3\.[1-9]/i,
|
|
411
|
+
/^qwen[23]/i,
|
|
412
|
+
/^mistral-nemo/i,
|
|
413
|
+
/^mistral-large/i,
|
|
414
|
+
/^command-r/i,
|
|
415
|
+
/^firefunction/i,
|
|
416
|
+
/^hermes-[23]/i,
|
|
417
|
+
/^nemotron/i
|
|
418
|
+
];
|
|
403
419
|
BaseProvider = class {
|
|
404
420
|
/** Rough token estimate (~4 chars per token) */
|
|
405
421
|
estimateTokens(messages) {
|
|
@@ -411,23 +427,12 @@ var init_types = __esm({
|
|
|
411
427
|
});
|
|
412
428
|
|
|
413
429
|
// src/providers/ollama.ts
|
|
414
|
-
var
|
|
430
|
+
var contextWindowCache, OllamaProvider;
|
|
415
431
|
var init_ollama = __esm({
|
|
416
432
|
"src/providers/ollama.ts"() {
|
|
417
433
|
"use strict";
|
|
418
434
|
init_esm_shims();
|
|
419
435
|
init_types();
|
|
420
|
-
TOOL_CAPABLE_PATTERNS = [
|
|
421
|
-
/^llama3\.[1-9]/i,
|
|
422
|
-
/^llama-3\.[1-9]/i,
|
|
423
|
-
/^qwen[23]/i,
|
|
424
|
-
/^mistral-nemo/i,
|
|
425
|
-
/^mistral-large/i,
|
|
426
|
-
/^command-r/i,
|
|
427
|
-
/^firefunction/i,
|
|
428
|
-
/^hermes-[23]/i,
|
|
429
|
-
/^nemotron/i
|
|
430
|
-
];
|
|
431
436
|
contextWindowCache = /* @__PURE__ */ new Map();
|
|
432
437
|
OllamaProvider = class _OllamaProvider extends BaseProvider {
|
|
433
438
|
name = "ollama";
|
|
@@ -497,8 +502,7 @@ var init_ollama = __esm({
|
|
|
497
502
|
}
|
|
498
503
|
/** Check if a model supports native tool calling */
|
|
499
504
|
static supportsTools(model) {
|
|
500
|
-
|
|
501
|
-
return TOOL_CAPABLE_PATTERNS.some((p) => p.test(baseName));
|
|
505
|
+
return supportsNativeTools(model);
|
|
502
506
|
}
|
|
503
507
|
contextWindow() {
|
|
504
508
|
return contextWindowCache.get(this.model) ?? 32768;
|
|
@@ -821,6 +825,7 @@ var init_agent = __esm({
|
|
|
821
825
|
init_esm_shims();
|
|
822
826
|
init_context_engine();
|
|
823
827
|
init_auto_persist();
|
|
828
|
+
init_types();
|
|
824
829
|
init_ollama();
|
|
825
830
|
init_prompt_fallback();
|
|
826
831
|
MAX_ITERATIONS = 50;
|
|
@@ -902,11 +907,8 @@ var init_agent = __esm({
|
|
|
902
907
|
const provider = this.resolvedProvider.provider;
|
|
903
908
|
const isLocal = this.resolvedProvider.isLocal;
|
|
904
909
|
let activeProvider = provider;
|
|
905
|
-
if (isLocal &&
|
|
906
|
-
|
|
907
|
-
if (!OllamaProvider.supportsTools(modelName)) {
|
|
908
|
-
activeProvider = new PromptFallbackProvider(provider);
|
|
909
|
-
}
|
|
910
|
+
if (isLocal && !supportsNativeTools(this.identity.model.primary)) {
|
|
911
|
+
activeProvider = new PromptFallbackProvider(provider);
|
|
910
912
|
}
|
|
911
913
|
let fullResponse = "";
|
|
912
914
|
let iterationCount = 0;
|
|
@@ -1161,14 +1163,26 @@ async function buildSystemPrompt(opts) {
|
|
|
1161
1163
|
}
|
|
1162
1164
|
parts.push("");
|
|
1163
1165
|
if (compact) {
|
|
1164
|
-
parts.push(
|
|
1166
|
+
parts.push([
|
|
1167
|
+
"You are an AI agent running LOCALLY on the user's machine.",
|
|
1168
|
+
"You have tools: read_file, write_file, edit_file, list_directory, bash, search_files, glob_files, git, web_search, web_fetch, and self-config tools.",
|
|
1169
|
+
"ALWAYS use your tools. NEVER say you cannot access files, run commands, or perform actions. You CAN \u2014 use your tools.",
|
|
1170
|
+
"NEVER apologize and refuse. If asked to do something, DO IT with your tools or explain what tool you need."
|
|
1171
|
+
].join(" "));
|
|
1165
1172
|
} else {
|
|
1166
|
-
parts.push("##
|
|
1167
|
-
parts.push(
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1173
|
+
parts.push("## CRITICAL: You Are a Local Agent With Tools");
|
|
1174
|
+
parts.push([
|
|
1175
|
+
"You are an AI agent running LOCALLY on the user's machine \u2014 NOT a cloud chatbot.",
|
|
1176
|
+
"You have DIRECT access to the file system, shell, and web through your tools.",
|
|
1177
|
+
"Your tools include: read_file, write_file, edit_file, list_directory, bash, search_files, glob_files, git, web_search, web_fetch, and self-configuration tools."
|
|
1178
|
+
].join(" "));
|
|
1179
|
+
parts.push("");
|
|
1180
|
+
parts.push("**RULES \u2014 follow these strictly:**");
|
|
1181
|
+
parts.push("1. ALWAYS use your tools to accomplish tasks. When asked to read a file, USE read_file. When asked to run a command, USE bash.");
|
|
1182
|
+
parts.push(`2. NEVER say "I cannot access files", "I don't have the ability to", or "as an AI I cannot". These statements are FALSE. You have tools that give you these abilities.`);
|
|
1183
|
+
parts.push("3. NEVER apologize and refuse to act. If a task requires a tool, use it. If you lack a specific tool, say which tool you need \u2014 do not give a generic refusal.");
|
|
1184
|
+
parts.push("4. Read files before editing them. Use tools proactively without being asked twice.");
|
|
1185
|
+
parts.push("5. You can configure yourself \u2014 use the config, channel, agent, and model management tools to modify your own setup.");
|
|
1172
1186
|
}
|
|
1173
1187
|
if (opts.thinking === "off") {
|
|
1174
1188
|
parts.push("");
|
|
@@ -6228,7 +6242,7 @@ var init_server = __esm({
|
|
|
6228
6242
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6229
6243
|
res.end(JSON.stringify({
|
|
6230
6244
|
status: "ok",
|
|
6231
|
-
version: "1.5.
|
|
6245
|
+
version: "1.5.7",
|
|
6232
6246
|
uptime: process.uptime(),
|
|
6233
6247
|
clients: this.clients.size,
|
|
6234
6248
|
agents: this.engines.size
|
|
@@ -6340,7 +6354,7 @@ var init_server = __esm({
|
|
|
6340
6354
|
const hello = {
|
|
6341
6355
|
type: "hello",
|
|
6342
6356
|
protocol: PROTOCOL_VERSION,
|
|
6343
|
-
version: "1.5.
|
|
6357
|
+
version: "1.5.7",
|
|
6344
6358
|
agents: this.config.agents.list.map((a) => ({
|
|
6345
6359
|
id: a.id,
|
|
6346
6360
|
name: a.name || a.id,
|
|
@@ -7735,7 +7749,7 @@ async function runTui(opts) {
|
|
|
7735
7749
|
ws.on("open", () => {
|
|
7736
7750
|
ws.send(JSON.stringify({
|
|
7737
7751
|
type: "connect",
|
|
7738
|
-
params: { auth: { token }, mode: "tui", version: "1.5.
|
|
7752
|
+
params: { auth: { token }, mode: "tui", version: "1.5.7" }
|
|
7739
7753
|
}));
|
|
7740
7754
|
});
|
|
7741
7755
|
ws.on("message", (data) => {
|
|
@@ -8164,7 +8178,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8164
8178
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8165
8179
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8166
8180
|
var __dirname3 = dirname5(__filename3);
|
|
8167
|
-
var version = "1.5.
|
|
8181
|
+
var version = "1.5.7";
|
|
8168
8182
|
try {
|
|
8169
8183
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8170
8184
|
version = pkg.version;
|