@tractorscorch/clank 1.5.3 → 1.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -6,6 +6,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
6
6
 
7
7
  ---
8
8
 
9
+ ## [1.5.4] — 2026-03-23
10
+
11
+ ### Fixed
12
+ - **Streaming dies mid-answer with local models** — added per-chunk idle timeout (60s) that detects when the model hangs between chunks (GPU OOM, Ollama crash). Previously the only timeout was the 5-minute overall timer, which couldn't detect mid-stream stalls
13
+ - **Incomplete responses treated as complete** — when a stream ends without the `[DONE]` marker (connection drop, model crash), the response is no longer silently accepted. The provider now throws an error so the agent retries instead of showing a half-finished answer
14
+ - **Agent retry on stream failure** — the retry loop now resets partial state on retry and recognizes stream drops/empty responses as retryable errors, automatically attempting once more before giving up
15
+ - **XSS in web dashboard** — 3 places where server data (`role`, `a.status`, `j.lastStatus`) was rendered as raw HTML without escaping (CodeQL CWE-79)
16
+ - **Incomplete glob sanitization in search-files** — `.replace("*", "")` only stripped the first `*`; changed to `.replaceAll()` (CodeQL CWE-116)
17
+
18
+ ---
19
+
9
20
  ## [1.5.3] — 2026-03-23
10
21
 
11
22
  ### Fixed
package/README.md CHANGED
@@ -9,7 +9,7 @@
9
9
  </p>
10
10
 
11
11
  <p align="center">
12
- <a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.3-blue.svg" alt="Version" /></a>
12
+ <a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.4-blue.svg" alt="Version" /></a>
13
13
  <a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
14
14
  <a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
15
15
  <a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
75
75
  | Platform | Download |
76
76
  |----------|----------|
77
77
  | **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
78
- | **macOS** (Apple Silicon) | [Clank_1.5.3_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.3_macos) |
78
+ | **macOS** (Apple Silicon) | [Clank_1.5.4_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.4_macos) |
79
79
 
80
80
  ## Features
81
81
 
package/dist/index.js CHANGED
@@ -579,10 +579,20 @@ var init_ollama = __esm({
579
579
  const reader = res.body.getReader();
580
580
  const decoder = new TextDecoder();
581
581
  let buffer = "";
582
+ let receivedDone = false;
583
+ let lastFinishReason = null;
584
+ let hasContent = false;
582
585
  const toolCalls = /* @__PURE__ */ new Map();
586
+ const CHUNK_IDLE_TIMEOUT = 6e4;
583
587
  try {
584
588
  while (true) {
585
- const { done, value } = await reader.read();
589
+ const idleTimeout = new Promise(
590
+ (_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_IDLE_TIMEOUT)
591
+ );
592
+ const { done, value } = await Promise.race([
593
+ reader.read(),
594
+ idleTimeout
595
+ ]);
586
596
  if (done) break;
587
597
  buffer += decoder.decode(value, { stream: true });
588
598
  const lines = buffer.split("\n");
@@ -592,6 +602,7 @@ var init_ollama = __esm({
592
602
  if (!trimmed || !trimmed.startsWith("data: ")) continue;
593
603
  const data = trimmed.slice(6);
594
604
  if (data === "[DONE]") {
605
+ receivedDone = true;
595
606
  for (const tc of toolCalls.values()) {
596
607
  let parsedArgs = {};
597
608
  try {
@@ -608,10 +619,15 @@ var init_ollama = __esm({
608
619
  const chunk = JSON.parse(data);
609
620
  const choice = chunk.choices?.[0];
610
621
  if (!choice) continue;
622
+ if (choice.finish_reason) {
623
+ lastFinishReason = choice.finish_reason;
624
+ }
611
625
  if (choice.delta?.content) {
626
+ hasContent = true;
612
627
  yield { type: "text", content: choice.delta.content };
613
628
  }
614
629
  if (choice.delta?.tool_calls) {
630
+ hasContent = true;
615
631
  for (const tc of choice.delta.tool_calls) {
616
632
  const existing = toolCalls.get(tc.index);
617
633
  if (existing) {
@@ -641,14 +657,22 @@ var init_ollama = __esm({
641
657
  } finally {
642
658
  reader.releaseLock();
643
659
  }
644
- for (const tc of toolCalls.values()) {
645
- let parsedArgs = {};
646
- try {
647
- parsedArgs = JSON.parse(tc.arguments);
648
- } catch {
649
- parsedArgs = {};
660
+ if (!receivedDone && hasContent) {
661
+ for (const tc of toolCalls.values()) {
662
+ let parsedArgs = {};
663
+ try {
664
+ parsedArgs = JSON.parse(tc.arguments);
665
+ } catch {
666
+ parsedArgs = {};
667
+ }
668
+ yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
650
669
  }
651
- yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
670
+ throw new Error(
671
+ lastFinishReason === "length" ? "Model response truncated (hit token limit)" : "Model stream ended unexpectedly (connection dropped)"
672
+ );
673
+ }
674
+ if (!receivedDone) {
675
+ throw new Error("Model returned empty response");
652
676
  }
653
677
  yield { type: "done" };
654
678
  }
@@ -914,6 +938,12 @@ var init_agent = __esm({
914
938
  let streamSuccess = false;
915
939
  this.emit("response-start");
916
940
  for (let attempt = 0; attempt < 2; attempt++) {
941
+ if (attempt > 0) {
942
+ iterationText = "";
943
+ toolCalls.length = 0;
944
+ promptTokens = 0;
945
+ outputTokens = 0;
946
+ }
917
947
  try {
918
948
  const streamIterator = activeProvider.stream(
919
949
  this.contextEngine.getMessages(),
@@ -948,10 +978,12 @@ var init_agent = __esm({
948
978
  streamSuccess = true;
949
979
  break;
950
980
  } catch (streamErr) {
951
- const isTimeout = streamErr instanceof Error && (streamErr.name === "TimeoutError" || streamErr.name === "AbortError" || streamErr.message.includes("timed out"));
952
- if (attempt === 0 && !signal.aborted && !isTimeout) {
981
+ const errMsg = streamErr instanceof Error ? streamErr.message : "unknown";
982
+ const isTimeout = streamErr instanceof Error && (streamErr.name === "TimeoutError" || streamErr.name === "AbortError" || errMsg.includes("timed out"));
983
+ const isRetryable = !isTimeout && !signal.aborted && (errMsg.includes("connection dropped") || errMsg.includes("stopped responding") || errMsg.includes("empty response") || errMsg.includes("fetch failed") || errMsg.includes("ECONNREFUSED") || errMsg.includes("ECONNRESET"));
984
+ if (attempt === 0 && (isRetryable || !signal.aborted && !isTimeout)) {
953
985
  this.emit("error", {
954
- message: `Model connection failed, retrying... (${streamErr instanceof Error ? streamErr.message : "unknown"})`,
986
+ message: `Model stream failed, retrying... (${errMsg})`,
955
987
  recoverable: true
956
988
  });
957
989
  await new Promise((r) => setTimeout(r, 2e3));
@@ -1700,7 +1732,7 @@ var init_search_files = __esm({
1700
1732
  await searchDir(full);
1701
1733
  } else if (s.isFile() && s.size < 1024 * 1024) {
1702
1734
  if (globFilter) {
1703
- const ext = globFilter.replace("*", "");
1735
+ const ext = globFilter.replaceAll("*", "");
1704
1736
  if (!entry.endsWith(ext)) continue;
1705
1737
  }
1706
1738
  try {
@@ -3021,10 +3053,20 @@ var init_openai = __esm({
3021
3053
  const reader = res.body.getReader();
3022
3054
  const decoder = new TextDecoder();
3023
3055
  let buffer = "";
3056
+ let receivedDone = false;
3057
+ let lastFinishReason = null;
3058
+ let hasContent = false;
3024
3059
  const toolCalls = /* @__PURE__ */ new Map();
3060
+ const CHUNK_IDLE_TIMEOUT = this.isLocal ? 6e4 : 3e4;
3025
3061
  try {
3026
3062
  while (true) {
3027
- const { done, value } = await reader.read();
3063
+ const idleTimeout = new Promise(
3064
+ (_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data received)")), CHUNK_IDLE_TIMEOUT)
3065
+ );
3066
+ const { done, value } = await Promise.race([
3067
+ reader.read(),
3068
+ idleTimeout
3069
+ ]);
3028
3070
  if (done) break;
3029
3071
  buffer += decoder.decode(value, { stream: true });
3030
3072
  const lines = buffer.split("\n");
@@ -3034,6 +3076,7 @@ var init_openai = __esm({
3034
3076
  if (!trimmed.startsWith("data: ")) continue;
3035
3077
  const data = trimmed.slice(6);
3036
3078
  if (data === "[DONE]") {
3079
+ receivedDone = true;
3037
3080
  for (const tc of toolCalls.values()) {
3038
3081
  let parsedArgs = {};
3039
3082
  try {
@@ -3049,7 +3092,11 @@ var init_openai = __esm({
3049
3092
  try {
3050
3093
  const chunk = JSON.parse(data);
3051
3094
  const choice = chunk.choices?.[0];
3095
+ if (choice?.finish_reason) {
3096
+ lastFinishReason = choice.finish_reason;
3097
+ }
3052
3098
  if (choice?.delta?.reasoning_content) {
3099
+ hasContent = true;
3053
3100
  if (this.isLocal) {
3054
3101
  yield { type: "text", content: choice.delta.reasoning_content };
3055
3102
  } else {
@@ -3057,9 +3104,11 @@ var init_openai = __esm({
3057
3104
  }
3058
3105
  }
3059
3106
  if (choice?.delta?.content) {
3107
+ hasContent = true;
3060
3108
  yield { type: "text", content: choice.delta.content };
3061
3109
  }
3062
3110
  if (choice?.delta?.tool_calls) {
3111
+ hasContent = true;
3063
3112
  for (const tc of choice.delta.tool_calls) {
3064
3113
  const existing = toolCalls.get(tc.index);
3065
3114
  if (existing) {
@@ -3087,14 +3136,22 @@ var init_openai = __esm({
3087
3136
  } finally {
3088
3137
  reader.releaseLock();
3089
3138
  }
3090
- for (const tc of toolCalls.values()) {
3091
- let parsedArgs = {};
3092
- try {
3093
- parsedArgs = JSON.parse(tc.arguments);
3094
- } catch {
3095
- parsedArgs = {};
3139
+ if (!receivedDone && hasContent) {
3140
+ for (const tc of toolCalls.values()) {
3141
+ let parsedArgs = {};
3142
+ try {
3143
+ parsedArgs = JSON.parse(tc.arguments);
3144
+ } catch {
3145
+ parsedArgs = {};
3146
+ }
3147
+ yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
3096
3148
  }
3097
- yield { type: "tool_call", id: tc.id, name: tc.name, arguments: parsedArgs };
3149
+ throw new Error(
3150
+ lastFinishReason === "length" ? "Model response truncated (hit token limit)" : "Model stream ended unexpectedly (connection dropped)"
3151
+ );
3152
+ }
3153
+ if (!receivedDone) {
3154
+ throw new Error("Model returned empty response");
3098
3155
  }
3099
3156
  yield { type: "done" };
3100
3157
  }
@@ -6170,7 +6227,7 @@ var init_server = __esm({
6170
6227
  res.writeHead(200, { "Content-Type": "application/json" });
6171
6228
  res.end(JSON.stringify({
6172
6229
  status: "ok",
6173
- version: "1.5.3",
6230
+ version: "1.5.4",
6174
6231
  uptime: process.uptime(),
6175
6232
  clients: this.clients.size,
6176
6233
  agents: this.engines.size
@@ -6282,7 +6339,7 @@ var init_server = __esm({
6282
6339
  const hello = {
6283
6340
  type: "hello",
6284
6341
  protocol: PROTOCOL_VERSION,
6285
- version: "1.5.3",
6342
+ version: "1.5.4",
6286
6343
  agents: this.config.agents.list.map((a) => ({
6287
6344
  id: a.id,
6288
6345
  name: a.name || a.id,
@@ -7677,7 +7734,7 @@ async function runTui(opts) {
7677
7734
  ws.on("open", () => {
7678
7735
  ws.send(JSON.stringify({
7679
7736
  type: "connect",
7680
- params: { auth: { token }, mode: "tui", version: "1.5.3" }
7737
+ params: { auth: { token }, mode: "tui", version: "1.5.4" }
7681
7738
  }));
7682
7739
  });
7683
7740
  ws.on("message", (data) => {
@@ -8106,7 +8163,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
8106
8163
  import { dirname as dirname5, join as join19 } from "path";
8107
8164
  var __filename3 = fileURLToPath5(import.meta.url);
8108
8165
  var __dirname3 = dirname5(__filename3);
8109
- var version = "1.5.3";
8166
+ var version = "1.5.4";
8110
8167
  try {
8111
8168
  const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
8112
8169
  version = pkg.version;