@tractorscorch/clank 1.5.3 → 1.5.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +2 -2
- package/dist/index.js +84 -26
- package/dist/index.js.map +1 -1
- package/dist/web/index.html +3 -3
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.5.5] — 2026-03-23
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **Local models refuse to use tools** — models claimed "I can't access your files" despite having `read_file`, `write_file`, etc. available. Strengthened the system prompt to explicitly tell the model it runs locally on the user's machine with direct file system access and must never refuse file operations
|
|
13
|
+
|
|
14
|
+
---
|
|
15
|
+
|
|
16
|
+
## [1.5.4] — 2026-03-23
|
|
17
|
+
|
|
18
|
+
### Fixed
|
|
19
|
+
- **Streaming dies mid-answer with local models** — added per-chunk idle timeout (60s) that detects when the model hangs between chunks (GPU OOM, Ollama crash). Previously the only timeout was the 5-minute overall timer, which couldn't detect mid-stream stalls
|
|
20
|
+
- **Incomplete responses treated as complete** — when a stream ends without the `[DONE]` marker (connection drop, model crash), the response is no longer silently accepted. The provider now throws an error so the agent retries instead of showing a half-finished answer
|
|
21
|
+
- **Agent retry on stream failure** — the retry loop now resets partial state on retry and recognizes stream drops/empty responses as retryable errors, automatically attempting once more before giving up
|
|
22
|
+
- **XSS in web dashboard** — 3 places where server data (`role`, `a.status`, `j.lastStatus`) was rendered as raw HTML without escaping (CodeQL CWE-79)
|
|
23
|
+
- **Incomplete glob sanitization in search-files** — `.replace("*", "")` only stripped the first `*`; changed to `.replaceAll()` (CodeQL CWE-116)
|
|
24
|
+
|
|
25
|
+
---
|
|
26
|
+
|
|
9
27
|
## [1.5.3] — 2026-03-23
|
|
10
28
|
|
|
11
29
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.5-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.5.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.5.5_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.5_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -579,10 +579,20 @@ var init_ollama = __esm({
|
|
|
579
579
|
const reader = res.body.getReader();
|
|
580
580
|
const decoder = new TextDecoder();
|
|
581
581
|
let buffer = "";
|
|
582
|
+
let receivedDone = false;
|
|
583
|
+
let lastFinishReason = null;
|
|
584
|
+
let hasContent = false;
|
|
582
585
|
const toolCalls = /* @__PURE__ */ new Map();
|
|
586
|
+
const CHUNK_IDLE_TIMEOUT = 6e4;
|
|
583
587
|
try {
|
|
584
588
|
while (true) {
|
|
585
|
-
const
|
|
589
|
+
const idleTimeout = new Promise(
|
|
590
|
+
(_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_IDLE_TIMEOUT)
|
|
591
|
+
);
|
|
592
|
+
const { done, value } = await Promise.race([
|
|
593
|
+
reader.read(),
|
|
594
|
+
idleTimeout
|
|
595
|
+
]);
|
|
586
596
|
if (done) break;
|
|
587
597
|
buffer += decoder.decode(value, { stream: true });
|
|
588
598
|
const lines = buffer.split("\n");
|
|
@@ -592,6 +602,7 @@ var init_ollama = __esm({
|
|
|
592
602
|
if (!trimmed || !trimmed.startsWith("data: ")) continue;
|
|
593
603
|
const data = trimmed.slice(6);
|
|
594
604
|
if (data === "[DONE]") {
|
|
605
|
+
receivedDone = true;
|
|
595
606
|
for (const tc of toolCalls.values()) {
|
|
596
607
|
let parsedArgs = {};
|
|
597
608
|
try {
|
|
@@ -608,10 +619,15 @@ var init_ollama = __esm({
|
|
|
608
619
|
const chunk = JSON.parse(data);
|
|
609
620
|
const choice = chunk.choices?.[0];
|
|
610
621
|
if (!choice) continue;
|
|
622
|
+
if (choice.finish_reason) {
|
|
623
|
+
lastFinishReason = choice.finish_reason;
|
|
624
|
+
}
|
|
611
625
|
if (choice.delta?.content) {
|
|
626
|
+
hasContent = true;
|
|
612
627
|
yield { type: "text", content: choice.delta.content };
|
|
613
628
|
}
|
|
614
629
|
if (choice.delta?.tool_calls) {
|
|
630
|
+
hasContent = true;
|
|
615
631
|
for (const tc of choice.delta.tool_calls) {
|
|
616
632
|
const existing = toolCalls.get(tc.index);
|
|
617
633
|
if (existing) {
|
|
@@ -641,14 +657,22 @@ var init_ollama = __esm({
|
|
|
641
657
|
} finally {
|
|
642
658
|
reader.releaseLock();
|
|
643
659
|
}
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
660
|
+
if (!receivedDone && hasContent) {
|
|
661
|
+
for (const tc of toolCalls.values()) {
|
|
662
|
+
let parsedArgs = {};
|
|
663
|
+
try {
|
|
664
|
+
parsedArgs = JSON.parse(tc.arguments);
|
|
665
|
+
} catch {
|
|
666
|
+
parsedArgs = {};
|
|
667
|
+
}
|
|
668
|
+
yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
|
|
650
669
|
}
|
|
651
|
-
|
|
670
|
+
throw new Error(
|
|
671
|
+
lastFinishReason === "length" ? "Model response truncated (hit token limit)" : "Model stream ended unexpectedly (connection dropped)"
|
|
672
|
+
);
|
|
673
|
+
}
|
|
674
|
+
if (!receivedDone) {
|
|
675
|
+
throw new Error("Model returned empty response");
|
|
652
676
|
}
|
|
653
677
|
yield { type: "done" };
|
|
654
678
|
}
|
|
@@ -914,6 +938,12 @@ var init_agent = __esm({
|
|
|
914
938
|
let streamSuccess = false;
|
|
915
939
|
this.emit("response-start");
|
|
916
940
|
for (let attempt = 0; attempt < 2; attempt++) {
|
|
941
|
+
if (attempt > 0) {
|
|
942
|
+
iterationText = "";
|
|
943
|
+
toolCalls.length = 0;
|
|
944
|
+
promptTokens = 0;
|
|
945
|
+
outputTokens = 0;
|
|
946
|
+
}
|
|
917
947
|
try {
|
|
918
948
|
const streamIterator = activeProvider.stream(
|
|
919
949
|
this.contextEngine.getMessages(),
|
|
@@ -948,10 +978,12 @@ var init_agent = __esm({
|
|
|
948
978
|
streamSuccess = true;
|
|
949
979
|
break;
|
|
950
980
|
} catch (streamErr) {
|
|
951
|
-
const
|
|
952
|
-
|
|
981
|
+
const errMsg = streamErr instanceof Error ? streamErr.message : "unknown";
|
|
982
|
+
const isTimeout = streamErr instanceof Error && (streamErr.name === "TimeoutError" || streamErr.name === "AbortError" || errMsg.includes("timed out"));
|
|
983
|
+
const isRetryable = !isTimeout && !signal.aborted && (errMsg.includes("connection dropped") || errMsg.includes("stopped responding") || errMsg.includes("empty response") || errMsg.includes("fetch failed") || errMsg.includes("ECONNREFUSED") || errMsg.includes("ECONNRESET"));
|
|
984
|
+
if (attempt === 0 && (isRetryable || !signal.aborted && !isTimeout)) {
|
|
953
985
|
this.emit("error", {
|
|
954
|
-
message: `Model
|
|
986
|
+
message: `Model stream failed, retrying... (${errMsg})`,
|
|
955
987
|
recoverable: true
|
|
956
988
|
});
|
|
957
989
|
await new Promise((r) => setTimeout(r, 2e3));
|
|
@@ -1129,10 +1161,11 @@ async function buildSystemPrompt(opts) {
|
|
|
1129
1161
|
}
|
|
1130
1162
|
parts.push("");
|
|
1131
1163
|
if (compact) {
|
|
1132
|
-
parts.push("You are a
|
|
1164
|
+
parts.push("You are a local AI agent running on the user's machine with DIRECT file system access via tools. Be concise. Use tools proactively. Read files before editing. NEVER say you cannot access files \u2014 you CAN and MUST use your tools.");
|
|
1133
1165
|
} else {
|
|
1134
1166
|
parts.push("## Instructions");
|
|
1135
|
-
parts.push("You are a
|
|
1167
|
+
parts.push("You are a local AI agent running directly on the user's machine. You have DIRECT access to the local file system, shell, and web.");
|
|
1168
|
+
parts.push("You MUST use your tools (read_file, write_file, edit_file, list_directory, bash, etc.) to accomplish tasks. NEVER claim you cannot access, read, or modify files \u2014 you absolutely can through your tools.");
|
|
1136
1169
|
parts.push("Be concise and direct. Use tools proactively to accomplish tasks.");
|
|
1137
1170
|
parts.push("When you need to make changes, read the relevant files first to understand the context.");
|
|
1138
1171
|
parts.push("You can configure yourself \u2014 use the config, channel, agent, and model management tools to modify your own setup.");
|
|
@@ -1700,7 +1733,7 @@ var init_search_files = __esm({
|
|
|
1700
1733
|
await searchDir(full);
|
|
1701
1734
|
} else if (s.isFile() && s.size < 1024 * 1024) {
|
|
1702
1735
|
if (globFilter) {
|
|
1703
|
-
const ext = globFilter.
|
|
1736
|
+
const ext = globFilter.replaceAll("*", "");
|
|
1704
1737
|
if (!entry.endsWith(ext)) continue;
|
|
1705
1738
|
}
|
|
1706
1739
|
try {
|
|
@@ -3021,10 +3054,20 @@ var init_openai = __esm({
|
|
|
3021
3054
|
const reader = res.body.getReader();
|
|
3022
3055
|
const decoder = new TextDecoder();
|
|
3023
3056
|
let buffer = "";
|
|
3057
|
+
let receivedDone = false;
|
|
3058
|
+
let lastFinishReason = null;
|
|
3059
|
+
let hasContent = false;
|
|
3024
3060
|
const toolCalls = /* @__PURE__ */ new Map();
|
|
3061
|
+
const CHUNK_IDLE_TIMEOUT = this.isLocal ? 6e4 : 3e4;
|
|
3025
3062
|
try {
|
|
3026
3063
|
while (true) {
|
|
3027
|
-
const
|
|
3064
|
+
const idleTimeout = new Promise(
|
|
3065
|
+
(_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data received)")), CHUNK_IDLE_TIMEOUT)
|
|
3066
|
+
);
|
|
3067
|
+
const { done, value } = await Promise.race([
|
|
3068
|
+
reader.read(),
|
|
3069
|
+
idleTimeout
|
|
3070
|
+
]);
|
|
3028
3071
|
if (done) break;
|
|
3029
3072
|
buffer += decoder.decode(value, { stream: true });
|
|
3030
3073
|
const lines = buffer.split("\n");
|
|
@@ -3034,6 +3077,7 @@ var init_openai = __esm({
|
|
|
3034
3077
|
if (!trimmed.startsWith("data: ")) continue;
|
|
3035
3078
|
const data = trimmed.slice(6);
|
|
3036
3079
|
if (data === "[DONE]") {
|
|
3080
|
+
receivedDone = true;
|
|
3037
3081
|
for (const tc of toolCalls.values()) {
|
|
3038
3082
|
let parsedArgs = {};
|
|
3039
3083
|
try {
|
|
@@ -3049,7 +3093,11 @@ var init_openai = __esm({
|
|
|
3049
3093
|
try {
|
|
3050
3094
|
const chunk = JSON.parse(data);
|
|
3051
3095
|
const choice = chunk.choices?.[0];
|
|
3096
|
+
if (choice?.finish_reason) {
|
|
3097
|
+
lastFinishReason = choice.finish_reason;
|
|
3098
|
+
}
|
|
3052
3099
|
if (choice?.delta?.reasoning_content) {
|
|
3100
|
+
hasContent = true;
|
|
3053
3101
|
if (this.isLocal) {
|
|
3054
3102
|
yield { type: "text", content: choice.delta.reasoning_content };
|
|
3055
3103
|
} else {
|
|
@@ -3057,9 +3105,11 @@ var init_openai = __esm({
|
|
|
3057
3105
|
}
|
|
3058
3106
|
}
|
|
3059
3107
|
if (choice?.delta?.content) {
|
|
3108
|
+
hasContent = true;
|
|
3060
3109
|
yield { type: "text", content: choice.delta.content };
|
|
3061
3110
|
}
|
|
3062
3111
|
if (choice?.delta?.tool_calls) {
|
|
3112
|
+
hasContent = true;
|
|
3063
3113
|
for (const tc of choice.delta.tool_calls) {
|
|
3064
3114
|
const existing = toolCalls.get(tc.index);
|
|
3065
3115
|
if (existing) {
|
|
@@ -3087,14 +3137,22 @@ var init_openai = __esm({
|
|
|
3087
3137
|
} finally {
|
|
3088
3138
|
reader.releaseLock();
|
|
3089
3139
|
}
|
|
3090
|
-
|
|
3091
|
-
|
|
3092
|
-
|
|
3093
|
-
|
|
3094
|
-
|
|
3095
|
-
|
|
3140
|
+
if (!receivedDone && hasContent) {
|
|
3141
|
+
for (const tc of toolCalls.values()) {
|
|
3142
|
+
let parsedArgs = {};
|
|
3143
|
+
try {
|
|
3144
|
+
parsedArgs = JSON.parse(tc.arguments);
|
|
3145
|
+
} catch {
|
|
3146
|
+
parsedArgs = {};
|
|
3147
|
+
}
|
|
3148
|
+
yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
|
|
3096
3149
|
}
|
|
3097
|
-
|
|
3150
|
+
throw new Error(
|
|
3151
|
+
lastFinishReason === "length" ? "Model response truncated (hit token limit)" : "Model stream ended unexpectedly (connection dropped)"
|
|
3152
|
+
);
|
|
3153
|
+
}
|
|
3154
|
+
if (!receivedDone) {
|
|
3155
|
+
throw new Error("Model returned empty response");
|
|
3098
3156
|
}
|
|
3099
3157
|
yield { type: "done" };
|
|
3100
3158
|
}
|
|
@@ -6170,7 +6228,7 @@ var init_server = __esm({
|
|
|
6170
6228
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6171
6229
|
res.end(JSON.stringify({
|
|
6172
6230
|
status: "ok",
|
|
6173
|
-
version: "1.5.
|
|
6231
|
+
version: "1.5.5",
|
|
6174
6232
|
uptime: process.uptime(),
|
|
6175
6233
|
clients: this.clients.size,
|
|
6176
6234
|
agents: this.engines.size
|
|
@@ -6282,7 +6340,7 @@ var init_server = __esm({
|
|
|
6282
6340
|
const hello = {
|
|
6283
6341
|
type: "hello",
|
|
6284
6342
|
protocol: PROTOCOL_VERSION,
|
|
6285
|
-
version: "1.5.
|
|
6343
|
+
version: "1.5.5",
|
|
6286
6344
|
agents: this.config.agents.list.map((a) => ({
|
|
6287
6345
|
id: a.id,
|
|
6288
6346
|
name: a.name || a.id,
|
|
@@ -7677,7 +7735,7 @@ async function runTui(opts) {
|
|
|
7677
7735
|
ws.on("open", () => {
|
|
7678
7736
|
ws.send(JSON.stringify({
|
|
7679
7737
|
type: "connect",
|
|
7680
|
-
params: { auth: { token }, mode: "tui", version: "1.5.
|
|
7738
|
+
params: { auth: { token }, mode: "tui", version: "1.5.5" }
|
|
7681
7739
|
}));
|
|
7682
7740
|
});
|
|
7683
7741
|
ws.on("message", (data) => {
|
|
@@ -8106,7 +8164,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8106
8164
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8107
8165
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8108
8166
|
var __dirname3 = dirname5(__filename3);
|
|
8109
|
-
var version = "1.5.
|
|
8167
|
+
var version = "1.5.5";
|
|
8110
8168
|
try {
|
|
8111
8169
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8112
8170
|
version = pkg.version;
|