@tractorscorch/clank 1.5.0 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -6,6 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
6
6
 
7
7
  ---
8
8
 
9
+ ## [1.5.1] — 2026-03-23
10
+
11
+ ### Fixed
12
+ - **Local models timing out on tool calls** — removed per-chunk read timeout that was killing legitimate slow processing; a 35B quantized model can take minutes for prefill on large contexts, that's normal not a hang
13
+ - **Local model timeout increased to 5 minutes** — was 120s (too short for large quantized models doing prefill on big contexts with tool results)
14
+ - **Memory budget reduced for local models** — memory injection now uses 1.5K chars (was 4K) to avoid eating the limited context window of local models (8K-32K vs 128K+ for cloud)
15
+
16
+ ---
17
+
9
18
  ## [1.5.0] — 2026-03-23
10
19
 
11
20
  ### Fixed
package/README.md CHANGED
@@ -9,7 +9,7 @@
9
9
  </p>
10
10
 
11
11
  <p align="center">
12
- <a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.0-blue.svg" alt="Version" /></a>
12
+ <a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.1-blue.svg" alt="Version" /></a>
13
13
  <a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
14
14
  <a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
15
15
  <a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
75
75
  | Platform | Download |
76
76
  |----------|----------|
77
77
  | **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
78
- | **macOS** (Apple Silicon) | [Clank_1.5.0_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.0_macos) |
78
+ | **macOS** (Apple Silicon) | [Clank_1.5.1_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.1_macos) |
79
79
 
80
80
  ## Features
81
81
 
package/dist/index.js CHANGED
@@ -561,7 +561,7 @@ var init_ollama = __esm({
561
561
  if (this.maxResponseTokens) {
562
562
  body.max_tokens = this.maxResponseTokens;
563
563
  }
564
- const timeoutSignal = AbortSignal.timeout(12e4);
564
+ const timeoutSignal = AbortSignal.timeout(3e5);
565
565
  const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
566
566
  const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
567
567
  method: "POST",
@@ -580,14 +580,9 @@ var init_ollama = __esm({
580
580
  const decoder = new TextDecoder();
581
581
  let buffer = "";
582
582
  const toolCalls = /* @__PURE__ */ new Map();
583
- const CHUNK_TIMEOUT = 6e4;
584
583
  try {
585
584
  while (true) {
586
- const readPromise = reader.read();
587
- const timeoutPromise = new Promise(
588
- (_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_TIMEOUT)
589
- );
590
- const { done, value } = await Promise.race([readPromise, timeoutPromise]);
585
+ const { done, value } = await reader.read();
591
586
  if (done) break;
592
587
  buffer += decoder.decode(value, { stream: true });
593
588
  const lines = buffer.split("\n");
@@ -3004,7 +2999,7 @@ var init_openai = __esm({
3004
2999
  if (this.apiKey) {
3005
3000
  headers["Authorization"] = `Bearer ${this.apiKey}`;
3006
3001
  }
3007
- const timeoutMs = this.isLocal ? 12e4 : 9e4;
3002
+ const timeoutMs = this.isLocal ? 3e5 : 9e4;
3008
3003
  const timeoutSignal = AbortSignal.timeout(timeoutMs);
3009
3004
  const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
3010
3005
  const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
@@ -3022,14 +3017,9 @@ var init_openai = __esm({
3022
3017
  const decoder = new TextDecoder();
3023
3018
  let buffer = "";
3024
3019
  const toolCalls = /* @__PURE__ */ new Map();
3025
- const CHUNK_TIMEOUT = 6e4;
3026
3020
  try {
3027
3021
  while (true) {
3028
- const readPromise = reader.read();
3029
- const timeoutPromise = new Promise(
3030
- (_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_TIMEOUT)
3031
- );
3032
- const { done, value } = await Promise.race([readPromise, timeoutPromise]);
3022
+ const { done, value } = await reader.read();
3033
3023
  if (done) break;
3034
3024
  buffer += decoder.decode(value, { stream: true });
3035
3025
  const lines = buffer.split("\n");
@@ -6165,7 +6155,7 @@ var init_server = __esm({
6165
6155
  res.writeHead(200, { "Content-Type": "application/json" });
6166
6156
  res.end(JSON.stringify({
6167
6157
  status: "ok",
6168
- version: "1.5.0",
6158
+ version: "1.5.1",
6169
6159
  uptime: process.uptime(),
6170
6160
  clients: this.clients.size,
6171
6161
  agents: this.engines.size
@@ -6277,7 +6267,7 @@ var init_server = __esm({
6277
6267
  const hello = {
6278
6268
  type: "hello",
6279
6269
  protocol: PROTOCOL_VERSION,
6280
- version: "1.5.0",
6270
+ version: "1.5.1",
6281
6271
  agents: this.config.agents.list.map((a) => ({
6282
6272
  id: a.id,
6283
6273
  name: a.name || a.id,
@@ -6519,7 +6509,8 @@ var init_server = __esm({
6519
6509
  compact,
6520
6510
  thinking
6521
6511
  });
6522
- const memoryBlock = await this.memoryManager.buildMemoryBlock("", identity.workspace);
6512
+ const memoryBudget = resolved.isLocal ? 1500 : 4e3;
6513
+ const memoryBlock = await this.memoryManager.buildMemoryBlock("", identity.workspace, memoryBudget);
6523
6514
  const fullPrompt = memoryBlock ? systemPrompt + "\n\n---\n\n" + memoryBlock : systemPrompt;
6524
6515
  engine = new AgentEngine({
6525
6516
  identity,
@@ -7671,7 +7662,7 @@ async function runTui(opts) {
7671
7662
  ws.on("open", () => {
7672
7663
  ws.send(JSON.stringify({
7673
7664
  type: "connect",
7674
- params: { auth: { token }, mode: "tui", version: "1.5.0" }
7665
+ params: { auth: { token }, mode: "tui", version: "1.5.1" }
7675
7666
  }));
7676
7667
  });
7677
7668
  ws.on("message", (data) => {
@@ -8100,7 +8091,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
8100
8091
  import { dirname as dirname5, join as join19 } from "path";
8101
8092
  var __filename3 = fileURLToPath5(import.meta.url);
8102
8093
  var __dirname3 = dirname5(__filename3);
8103
- var version = "1.5.0";
8094
+ var version = "1.5.1";
8104
8095
  try {
8105
8096
  const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
8106
8097
  version = pkg.version;