@tractorscorch/clank 1.4.7 → 1.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +2 -2
- package/dist/index.js +28 -10
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.4.9] — 2026-03-22
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **llama.cpp/local models crashing on tool calls** — OpenAI provider (used for llama.cpp, LM Studio, vLLM) was missing the orphaned tool result filter that Ollama had; orphaned tool results after compaction caused 400 API errors and permanent session corruption
|
|
13
|
+
- **Local model timeout too short** — OpenAI provider used 90s cloud timeout for local models; now uses 120s for local (matching Ollama) since large quantized models need time to process
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
17
|
+
## [1.4.8] — 2026-03-22
|
|
18
|
+
|
|
19
|
+
### Fixed
|
|
20
|
+
- **Model hangs permanently after tool calls** — provider timeout was bypassed when the engine passed its own AbortSignal (always); now uses `AbortSignal.any()` to combine the caller's signal with a hard 120s timeout so hung models are detected and reported instead of blocking forever
|
|
21
|
+
- **No retry on timeout** — engine no longer retries when a model times out (was doubling the wait to 240s with no chance of success); timeouts propagate immediately as errors
|
|
22
|
+
|
|
23
|
+
---
|
|
24
|
+
|
|
9
25
|
## [1.4.7] — 2026-03-22
|
|
10
26
|
|
|
11
27
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.4.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.4.9-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.4.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.4.9_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.4.9_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -561,7 +561,8 @@ var init_ollama = __esm({
|
|
|
561
561
|
if (this.maxResponseTokens) {
|
|
562
562
|
body.max_tokens = this.maxResponseTokens;
|
|
563
563
|
}
|
|
564
|
-
const
|
|
564
|
+
const timeoutSignal = AbortSignal.timeout(12e4);
|
|
565
|
+
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
565
566
|
const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
566
567
|
method: "POST",
|
|
567
568
|
headers: { "Content-Type": "application/json" },
|
|
@@ -947,7 +948,8 @@ var init_agent = __esm({
|
|
|
947
948
|
streamSuccess = true;
|
|
948
949
|
break;
|
|
949
950
|
} catch (streamErr) {
|
|
950
|
-
|
|
951
|
+
const isTimeout = streamErr instanceof Error && (streamErr.name === "TimeoutError" || streamErr.name === "AbortError" || streamErr.message.includes("timed out"));
|
|
952
|
+
if (attempt === 0 && !signal.aborted && !isTimeout) {
|
|
951
953
|
this.emit("error", {
|
|
952
954
|
message: `Model connection failed, retrying... (${streamErr instanceof Error ? streamErr.message : "unknown"})`,
|
|
953
955
|
recoverable: true
|
|
@@ -2757,7 +2759,8 @@ var init_anthropic = __esm({
|
|
|
2757
2759
|
if (tools.length > 0) {
|
|
2758
2760
|
body.tools = this.formatTools(tools);
|
|
2759
2761
|
}
|
|
2760
|
-
const
|
|
2762
|
+
const timeoutSignal = AbortSignal.timeout(9e4);
|
|
2763
|
+
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
2761
2764
|
const res = await fetch(`${this.baseUrl}/v1/messages`, {
|
|
2762
2765
|
method: "POST",
|
|
2763
2766
|
headers: {
|
|
@@ -2966,9 +2969,21 @@ var init_openai = __esm({
|
|
|
2966
2969
|
return result;
|
|
2967
2970
|
}
|
|
2968
2971
|
async *stream(messages, systemPrompt, tools, signal) {
|
|
2972
|
+
const toolCallIds = /* @__PURE__ */ new Set();
|
|
2973
|
+
for (const msg of messages) {
|
|
2974
|
+
if (msg.role === "assistant" && msg.tool_calls) {
|
|
2975
|
+
for (const tc of msg.tool_calls) toolCallIds.add(tc.id);
|
|
2976
|
+
}
|
|
2977
|
+
}
|
|
2978
|
+
const sanitized = messages.filter((msg) => {
|
|
2979
|
+
if (msg.role === "tool" && msg.tool_call_id && !toolCallIds.has(msg.tool_call_id)) {
|
|
2980
|
+
return false;
|
|
2981
|
+
}
|
|
2982
|
+
return true;
|
|
2983
|
+
});
|
|
2969
2984
|
const body = {
|
|
2970
2985
|
model: this.model,
|
|
2971
|
-
messages: this.prepareMessages(
|
|
2986
|
+
messages: this.prepareMessages(sanitized, systemPrompt),
|
|
2972
2987
|
stream: true,
|
|
2973
2988
|
stream_options: { include_usage: true }
|
|
2974
2989
|
};
|
|
@@ -2984,7 +2999,9 @@ var init_openai = __esm({
|
|
|
2984
2999
|
if (this.apiKey) {
|
|
2985
3000
|
headers["Authorization"] = `Bearer ${this.apiKey}`;
|
|
2986
3001
|
}
|
|
2987
|
-
const
|
|
3002
|
+
const timeoutMs = this.isLocal ? 12e4 : 9e4;
|
|
3003
|
+
const timeoutSignal = AbortSignal.timeout(timeoutMs);
|
|
3004
|
+
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
2988
3005
|
const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
2989
3006
|
method: "POST",
|
|
2990
3007
|
headers,
|
|
@@ -3211,7 +3228,8 @@ var init_google = __esm({
|
|
|
3211
3228
|
body.tools = this.formatTools(tools);
|
|
3212
3229
|
}
|
|
3213
3230
|
const url = `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:streamGenerateContent?key=${this.apiKey}&alt=sse`;
|
|
3214
|
-
const
|
|
3231
|
+
const timeoutSignal = AbortSignal.timeout(9e4);
|
|
3232
|
+
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
3215
3233
|
const res = await fetch(url, {
|
|
3216
3234
|
method: "POST",
|
|
3217
3235
|
headers: { "Content-Type": "application/json" },
|
|
@@ -6126,7 +6144,7 @@ var init_server = __esm({
|
|
|
6126
6144
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6127
6145
|
res.end(JSON.stringify({
|
|
6128
6146
|
status: "ok",
|
|
6129
|
-
version: "1.4.
|
|
6147
|
+
version: "1.4.9",
|
|
6130
6148
|
uptime: process.uptime(),
|
|
6131
6149
|
clients: this.clients.size,
|
|
6132
6150
|
agents: this.engines.size
|
|
@@ -6238,7 +6256,7 @@ var init_server = __esm({
|
|
|
6238
6256
|
const hello = {
|
|
6239
6257
|
type: "hello",
|
|
6240
6258
|
protocol: PROTOCOL_VERSION,
|
|
6241
|
-
version: "1.4.
|
|
6259
|
+
version: "1.4.9",
|
|
6242
6260
|
agents: this.config.agents.list.map((a) => ({
|
|
6243
6261
|
id: a.id,
|
|
6244
6262
|
name: a.name || a.id,
|
|
@@ -7632,7 +7650,7 @@ async function runTui(opts) {
|
|
|
7632
7650
|
ws.on("open", () => {
|
|
7633
7651
|
ws.send(JSON.stringify({
|
|
7634
7652
|
type: "connect",
|
|
7635
|
-
params: { auth: { token }, mode: "tui", version: "1.4.
|
|
7653
|
+
params: { auth: { token }, mode: "tui", version: "1.4.9" }
|
|
7636
7654
|
}));
|
|
7637
7655
|
});
|
|
7638
7656
|
ws.on("message", (data) => {
|
|
@@ -8061,7 +8079,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8061
8079
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8062
8080
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8063
8081
|
var __dirname3 = dirname5(__filename3);
|
|
8064
|
-
var version = "1.4.
|
|
8082
|
+
var version = "1.4.9";
|
|
8065
8083
|
try {
|
|
8066
8084
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8067
8085
|
version = pkg.version;
|