@tractorscorch/clank 1.4.9 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/README.md +2 -2
- package/dist/index.js +22 -10
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.5.1] — 2026-03-23
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **Local models timing out on tool calls** — removed per-chunk read timeout that was killing legitimate slow processing; a 35B quantized model can take minutes for prefill on large contexts, that's normal not a hang
|
|
13
|
+
- **Local model timeout increased to 5 minutes** — was 120s (too short for large quantized models doing prefill on big contexts with tool results)
|
|
14
|
+
- **Memory budget reduced for local models** — memory injection now uses 1.5K chars (was 4K) to avoid eating the limited context window of local models (8K-32K vs 128K+ for cloud)
|
|
15
|
+
|
|
16
|
+
---
|
|
17
|
+
|
|
18
|
+
## [1.5.0] — 2026-03-23
|
|
19
|
+
|
|
20
|
+
### Fixed
|
|
21
|
+
- **Model hangs forever on large prompts/tool calls** — the connection-level timeout (120s) only covers the initial HTTP request; once streaming starts, `reader.read()` waits indefinitely for the next chunk. Added per-chunk 60s timeout via `Promise.race` — if the model stops sending data mid-stream (OOM, stuck processing), Clank detects it and reports an error instead of hanging forever
|
|
22
|
+
- **Debug logging for Telegram** — added request/response lifecycle logging to diagnose message handling issues
|
|
23
|
+
|
|
24
|
+
---
|
|
25
|
+
|
|
9
26
|
## [1.4.9] — 2026-03-22
|
|
10
27
|
|
|
11
28
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.1-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.5.1_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.1_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -561,7 +561,7 @@ var init_ollama = __esm({
|
|
|
561
561
|
if (this.maxResponseTokens) {
|
|
562
562
|
body.max_tokens = this.maxResponseTokens;
|
|
563
563
|
}
|
|
564
|
-
const timeoutSignal = AbortSignal.timeout(
|
|
564
|
+
const timeoutSignal = AbortSignal.timeout(3e5);
|
|
565
565
|
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
566
566
|
const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
567
567
|
method: "POST",
|
|
@@ -2999,7 +2999,7 @@ var init_openai = __esm({
|
|
|
2999
2999
|
if (this.apiKey) {
|
|
3000
3000
|
headers["Authorization"] = `Bearer ${this.apiKey}`;
|
|
3001
3001
|
}
|
|
3002
|
-
const timeoutMs = this.isLocal ?
|
|
3002
|
+
const timeoutMs = this.isLocal ? 3e5 : 9e4;
|
|
3003
3003
|
const timeoutSignal = AbortSignal.timeout(timeoutMs);
|
|
3004
3004
|
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
3005
3005
|
const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
@@ -5296,6 +5296,7 @@ var init_telegram = __esm({
|
|
|
5296
5296
|
const processMessage = async () => {
|
|
5297
5297
|
if (!this.gateway) return;
|
|
5298
5298
|
try {
|
|
5299
|
+
console.log(` Telegram: processing message from ${userId} in ${chatId}`);
|
|
5299
5300
|
await ctx.api.sendChatAction(chatId, "typing");
|
|
5300
5301
|
let streamMsgId = null;
|
|
5301
5302
|
let sendingInitial = false;
|
|
@@ -5365,13 +5366,17 @@ var init_telegram = __esm({
|
|
|
5365
5366
|
await ctx.api.sendMessage(chatId, chunk);
|
|
5366
5367
|
}
|
|
5367
5368
|
}
|
|
5369
|
+
console.log(` Telegram: response complete (${response?.length || 0} chars)`);
|
|
5368
5370
|
} catch (err) {
|
|
5369
5371
|
const errMsg = err instanceof Error ? err.message : String(err);
|
|
5370
|
-
|
|
5372
|
+
console.error(` Telegram: message handler error \u2014 ${errMsg}`);
|
|
5373
|
+
await ctx.api.sendMessage(chatId, `Error: ${errMsg.slice(0, 200)}`).catch(() => {
|
|
5374
|
+
});
|
|
5371
5375
|
}
|
|
5372
5376
|
};
|
|
5373
5377
|
const prev = chatLocks.get(chatId) || Promise.resolve();
|
|
5374
|
-
const next = prev.then(processMessage).catch(() => {
|
|
5378
|
+
const next = prev.then(processMessage).catch((err) => {
|
|
5379
|
+
console.error(` Telegram: queue error \u2014 ${err instanceof Error ? err.message : err}`);
|
|
5375
5380
|
});
|
|
5376
5381
|
chatLocks.set(chatId, next);
|
|
5377
5382
|
});
|
|
@@ -6104,7 +6109,13 @@ var init_server = __esm({
|
|
|
6104
6109
|
listeners.push(["error", fn]);
|
|
6105
6110
|
}
|
|
6106
6111
|
try {
|
|
6107
|
-
|
|
6112
|
+
console.log(` Streaming: sending message to engine (session: ${sessionKey})`);
|
|
6113
|
+
const result = await engine.sendMessage(text);
|
|
6114
|
+
console.log(` Streaming: engine returned (${result?.length || 0} chars)`);
|
|
6115
|
+
return result;
|
|
6116
|
+
} catch (err) {
|
|
6117
|
+
console.error(` Streaming: engine error \u2014 ${err instanceof Error ? err.message : err}`);
|
|
6118
|
+
throw err;
|
|
6108
6119
|
} finally {
|
|
6109
6120
|
for (const [event, fn] of listeners) {
|
|
6110
6121
|
engine.removeListener(event, fn);
|
|
@@ -6144,7 +6155,7 @@ var init_server = __esm({
|
|
|
6144
6155
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6145
6156
|
res.end(JSON.stringify({
|
|
6146
6157
|
status: "ok",
|
|
6147
|
-
version: "1.
|
|
6158
|
+
version: "1.5.1",
|
|
6148
6159
|
uptime: process.uptime(),
|
|
6149
6160
|
clients: this.clients.size,
|
|
6150
6161
|
agents: this.engines.size
|
|
@@ -6256,7 +6267,7 @@ var init_server = __esm({
|
|
|
6256
6267
|
const hello = {
|
|
6257
6268
|
type: "hello",
|
|
6258
6269
|
protocol: PROTOCOL_VERSION,
|
|
6259
|
-
version: "1.
|
|
6270
|
+
version: "1.5.1",
|
|
6260
6271
|
agents: this.config.agents.list.map((a) => ({
|
|
6261
6272
|
id: a.id,
|
|
6262
6273
|
name: a.name || a.id,
|
|
@@ -6498,7 +6509,8 @@ var init_server = __esm({
|
|
|
6498
6509
|
compact,
|
|
6499
6510
|
thinking
|
|
6500
6511
|
});
|
|
6501
|
-
const
|
|
6512
|
+
const memoryBudget = resolved.isLocal ? 1500 : 4e3;
|
|
6513
|
+
const memoryBlock = await this.memoryManager.buildMemoryBlock("", identity.workspace, memoryBudget);
|
|
6502
6514
|
const fullPrompt = memoryBlock ? systemPrompt + "\n\n---\n\n" + memoryBlock : systemPrompt;
|
|
6503
6515
|
engine = new AgentEngine({
|
|
6504
6516
|
identity,
|
|
@@ -7650,7 +7662,7 @@ async function runTui(opts) {
|
|
|
7650
7662
|
ws.on("open", () => {
|
|
7651
7663
|
ws.send(JSON.stringify({
|
|
7652
7664
|
type: "connect",
|
|
7653
|
-
params: { auth: { token }, mode: "tui", version: "1.
|
|
7665
|
+
params: { auth: { token }, mode: "tui", version: "1.5.1" }
|
|
7654
7666
|
}));
|
|
7655
7667
|
});
|
|
7656
7668
|
ws.on("message", (data) => {
|
|
@@ -8079,7 +8091,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8079
8091
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8080
8092
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8081
8093
|
var __dirname3 = dirname5(__filename3);
|
|
8082
|
-
var version = "1.
|
|
8094
|
+
var version = "1.5.1";
|
|
8083
8095
|
try {
|
|
8084
8096
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8085
8097
|
version = pkg.version;
|