@tractorscorch/clank 1.4.9 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +2 -2
- package/dist/index.js +30 -9
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.5.0] — 2026-03-23
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **Model hangs forever on large prompts/tool calls** — the connection-level timeout (120s) only covers the initial HTTP request; once streaming starts, `reader.read()` waits indefinitely for the next chunk. Added per-chunk 60s timeout via `Promise.race` — if the model stops sending data mid-stream (OOM, stuck processing), Clank detects it and reports an error instead of hanging forever
|
|
13
|
+
- **Debug logging for Telegram** — added request/response lifecycle logging to diagnose message handling issues
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
9
17
|
## [1.4.9] — 2026-03-22
|
|
10
18
|
|
|
11
19
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.5.0-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.5.0_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.5.0_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -580,9 +580,14 @@ var init_ollama = __esm({
|
|
|
580
580
|
const decoder = new TextDecoder();
|
|
581
581
|
let buffer = "";
|
|
582
582
|
const toolCalls = /* @__PURE__ */ new Map();
|
|
583
|
+
const CHUNK_TIMEOUT = 6e4;
|
|
583
584
|
try {
|
|
584
585
|
while (true) {
|
|
585
|
-
const
|
|
586
|
+
const readPromise = reader.read();
|
|
587
|
+
const timeoutPromise = new Promise(
|
|
588
|
+
(_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_TIMEOUT)
|
|
589
|
+
);
|
|
590
|
+
const { done, value } = await Promise.race([readPromise, timeoutPromise]);
|
|
586
591
|
if (done) break;
|
|
587
592
|
buffer += decoder.decode(value, { stream: true });
|
|
588
593
|
const lines = buffer.split("\n");
|
|
@@ -3017,9 +3022,14 @@ var init_openai = __esm({
|
|
|
3017
3022
|
const decoder = new TextDecoder();
|
|
3018
3023
|
let buffer = "";
|
|
3019
3024
|
const toolCalls = /* @__PURE__ */ new Map();
|
|
3025
|
+
const CHUNK_TIMEOUT = 6e4;
|
|
3020
3026
|
try {
|
|
3021
3027
|
while (true) {
|
|
3022
|
-
const
|
|
3028
|
+
const readPromise = reader.read();
|
|
3029
|
+
const timeoutPromise = new Promise(
|
|
3030
|
+
(_, reject) => setTimeout(() => reject(new Error("Model stopped responding (no data for 60s)")), CHUNK_TIMEOUT)
|
|
3031
|
+
);
|
|
3032
|
+
const { done, value } = await Promise.race([readPromise, timeoutPromise]);
|
|
3023
3033
|
if (done) break;
|
|
3024
3034
|
buffer += decoder.decode(value, { stream: true });
|
|
3025
3035
|
const lines = buffer.split("\n");
|
|
@@ -5296,6 +5306,7 @@ var init_telegram = __esm({
|
|
|
5296
5306
|
const processMessage = async () => {
|
|
5297
5307
|
if (!this.gateway) return;
|
|
5298
5308
|
try {
|
|
5309
|
+
console.log(` Telegram: processing message from ${userId} in ${chatId}`);
|
|
5299
5310
|
await ctx.api.sendChatAction(chatId, "typing");
|
|
5300
5311
|
let streamMsgId = null;
|
|
5301
5312
|
let sendingInitial = false;
|
|
@@ -5365,13 +5376,17 @@ var init_telegram = __esm({
|
|
|
5365
5376
|
await ctx.api.sendMessage(chatId, chunk);
|
|
5366
5377
|
}
|
|
5367
5378
|
}
|
|
5379
|
+
console.log(` Telegram: response complete (${response?.length || 0} chars)`);
|
|
5368
5380
|
} catch (err) {
|
|
5369
5381
|
const errMsg = err instanceof Error ? err.message : String(err);
|
|
5370
|
-
|
|
5382
|
+
console.error(` Telegram: message handler error \u2014 ${errMsg}`);
|
|
5383
|
+
await ctx.api.sendMessage(chatId, `Error: ${errMsg.slice(0, 200)}`).catch(() => {
|
|
5384
|
+
});
|
|
5371
5385
|
}
|
|
5372
5386
|
};
|
|
5373
5387
|
const prev = chatLocks.get(chatId) || Promise.resolve();
|
|
5374
|
-
const next = prev.then(processMessage).catch(() => {
|
|
5388
|
+
const next = prev.then(processMessage).catch((err) => {
|
|
5389
|
+
console.error(` Telegram: queue error \u2014 ${err instanceof Error ? err.message : err}`);
|
|
5375
5390
|
});
|
|
5376
5391
|
chatLocks.set(chatId, next);
|
|
5377
5392
|
});
|
|
@@ -6104,7 +6119,13 @@ var init_server = __esm({
|
|
|
6104
6119
|
listeners.push(["error", fn]);
|
|
6105
6120
|
}
|
|
6106
6121
|
try {
|
|
6107
|
-
|
|
6122
|
+
console.log(` Streaming: sending message to engine (session: ${sessionKey})`);
|
|
6123
|
+
const result = await engine.sendMessage(text);
|
|
6124
|
+
console.log(` Streaming: engine returned (${result?.length || 0} chars)`);
|
|
6125
|
+
return result;
|
|
6126
|
+
} catch (err) {
|
|
6127
|
+
console.error(` Streaming: engine error \u2014 ${err instanceof Error ? err.message : err}`);
|
|
6128
|
+
throw err;
|
|
6108
6129
|
} finally {
|
|
6109
6130
|
for (const [event, fn] of listeners) {
|
|
6110
6131
|
engine.removeListener(event, fn);
|
|
@@ -6144,7 +6165,7 @@ var init_server = __esm({
|
|
|
6144
6165
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6145
6166
|
res.end(JSON.stringify({
|
|
6146
6167
|
status: "ok",
|
|
6147
|
-
version: "1.
|
|
6168
|
+
version: "1.5.0",
|
|
6148
6169
|
uptime: process.uptime(),
|
|
6149
6170
|
clients: this.clients.size,
|
|
6150
6171
|
agents: this.engines.size
|
|
@@ -6256,7 +6277,7 @@ var init_server = __esm({
|
|
|
6256
6277
|
const hello = {
|
|
6257
6278
|
type: "hello",
|
|
6258
6279
|
protocol: PROTOCOL_VERSION,
|
|
6259
|
-
version: "1.
|
|
6280
|
+
version: "1.5.0",
|
|
6260
6281
|
agents: this.config.agents.list.map((a) => ({
|
|
6261
6282
|
id: a.id,
|
|
6262
6283
|
name: a.name || a.id,
|
|
@@ -7650,7 +7671,7 @@ async function runTui(opts) {
|
|
|
7650
7671
|
ws.on("open", () => {
|
|
7651
7672
|
ws.send(JSON.stringify({
|
|
7652
7673
|
type: "connect",
|
|
7653
|
-
params: { auth: { token }, mode: "tui", version: "1.
|
|
7674
|
+
params: { auth: { token }, mode: "tui", version: "1.5.0" }
|
|
7654
7675
|
}));
|
|
7655
7676
|
});
|
|
7656
7677
|
ws.on("message", (data) => {
|
|
@@ -8079,7 +8100,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8079
8100
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8080
8101
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8081
8102
|
var __dirname3 = dirname5(__filename3);
|
|
8082
|
-
var version = "1.
|
|
8103
|
+
var version = "1.5.0";
|
|
8083
8104
|
try {
|
|
8084
8105
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8085
8106
|
version = pkg.version;
|