@tractorscorch/clank 1.4.8 → 1.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +2 -2
- package/dist/index.js +19 -6
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -6,6 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
|
|
6
6
|
|
|
7
7
|
---
|
|
8
8
|
|
|
9
|
+
## [1.4.9] — 2026-03-22
|
|
10
|
+
|
|
11
|
+
### Fixed
|
|
12
|
+
- **llama.cpp/local models crashing on tool calls** — OpenAI provider (used for llama.cpp, LM Studio, vLLM) was missing the orphaned tool result filter that Ollama had; orphaned tool results after compaction caused 400 API errors and permanent session corruption
|
|
13
|
+
- **Local model timeout too short** — OpenAI provider used 90s cloud timeout for local models; now uses 120s for local (matching Ollama) since large quantized models need time to process
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
9
17
|
## [1.4.8] — 2026-03-22
|
|
10
18
|
|
|
11
19
|
### Fixed
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
</p>
|
|
10
10
|
|
|
11
11
|
<p align="center">
|
|
12
|
-
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.4.
|
|
12
|
+
<a href="https://github.com/ItsTrag1c/Clank/releases/latest"><img src="https://img.shields.io/badge/version-1.4.9-blue.svg" alt="Version" /></a>
|
|
13
13
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License" /></a>
|
|
14
14
|
<a href="https://www.npmjs.com/package/@tractorscorch/clank"><img src="https://img.shields.io/npm/v/@tractorscorch/clank.svg" alt="npm" /></a>
|
|
15
15
|
<a href="https://github.com/ItsTrag1c/Clank/stargazers"><img src="https://img.shields.io/github/stars/ItsTrag1c/Clank.svg" alt="Stars" /></a>
|
|
@@ -75,7 +75,7 @@ That's it. Setup auto-detects your local models, configures the gateway, and get
|
|
|
75
75
|
| Platform | Download |
|
|
76
76
|
|----------|----------|
|
|
77
77
|
| **npm** (all platforms) | `npm install -g @tractorscorch/clank` |
|
|
78
|
-
| **macOS** (Apple Silicon) | [Clank_1.4.
|
|
78
|
+
| **macOS** (Apple Silicon) | [Clank_1.4.9_macos](https://github.com/ItsTrag1c/Clank/releases/latest/download/Clank_1.4.9_macos) |
|
|
79
79
|
|
|
80
80
|
## Features
|
|
81
81
|
|
package/dist/index.js
CHANGED
|
@@ -2969,9 +2969,21 @@ var init_openai = __esm({
|
|
|
2969
2969
|
return result;
|
|
2970
2970
|
}
|
|
2971
2971
|
async *stream(messages, systemPrompt, tools, signal) {
|
|
2972
|
+
const toolCallIds = /* @__PURE__ */ new Set();
|
|
2973
|
+
for (const msg of messages) {
|
|
2974
|
+
if (msg.role === "assistant" && msg.tool_calls) {
|
|
2975
|
+
for (const tc of msg.tool_calls) toolCallIds.add(tc.id);
|
|
2976
|
+
}
|
|
2977
|
+
}
|
|
2978
|
+
const sanitized = messages.filter((msg) => {
|
|
2979
|
+
if (msg.role === "tool" && msg.tool_call_id && !toolCallIds.has(msg.tool_call_id)) {
|
|
2980
|
+
return false;
|
|
2981
|
+
}
|
|
2982
|
+
return true;
|
|
2983
|
+
});
|
|
2972
2984
|
const body = {
|
|
2973
2985
|
model: this.model,
|
|
2974
|
-
messages: this.prepareMessages(
|
|
2986
|
+
messages: this.prepareMessages(sanitized, systemPrompt),
|
|
2975
2987
|
stream: true,
|
|
2976
2988
|
stream_options: { include_usage: true }
|
|
2977
2989
|
};
|
|
@@ -2987,7 +2999,8 @@ var init_openai = __esm({
|
|
|
2987
2999
|
if (this.apiKey) {
|
|
2988
3000
|
headers["Authorization"] = `Bearer ${this.apiKey}`;
|
|
2989
3001
|
}
|
|
2990
|
-
const
|
|
3002
|
+
const timeoutMs = this.isLocal ? 12e4 : 9e4;
|
|
3003
|
+
const timeoutSignal = AbortSignal.timeout(timeoutMs);
|
|
2991
3004
|
const effectiveSignal = signal ? AbortSignal.any([signal, timeoutSignal]) : timeoutSignal;
|
|
2992
3005
|
const res = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
2993
3006
|
method: "POST",
|
|
@@ -6131,7 +6144,7 @@ var init_server = __esm({
|
|
|
6131
6144
|
res.writeHead(200, { "Content-Type": "application/json" });
|
|
6132
6145
|
res.end(JSON.stringify({
|
|
6133
6146
|
status: "ok",
|
|
6134
|
-
version: "1.4.
|
|
6147
|
+
version: "1.4.9",
|
|
6135
6148
|
uptime: process.uptime(),
|
|
6136
6149
|
clients: this.clients.size,
|
|
6137
6150
|
agents: this.engines.size
|
|
@@ -6243,7 +6256,7 @@ var init_server = __esm({
|
|
|
6243
6256
|
const hello = {
|
|
6244
6257
|
type: "hello",
|
|
6245
6258
|
protocol: PROTOCOL_VERSION,
|
|
6246
|
-
version: "1.4.
|
|
6259
|
+
version: "1.4.9",
|
|
6247
6260
|
agents: this.config.agents.list.map((a) => ({
|
|
6248
6261
|
id: a.id,
|
|
6249
6262
|
name: a.name || a.id,
|
|
@@ -7637,7 +7650,7 @@ async function runTui(opts) {
|
|
|
7637
7650
|
ws.on("open", () => {
|
|
7638
7651
|
ws.send(JSON.stringify({
|
|
7639
7652
|
type: "connect",
|
|
7640
|
-
params: { auth: { token }, mode: "tui", version: "1.4.
|
|
7653
|
+
params: { auth: { token }, mode: "tui", version: "1.4.9" }
|
|
7641
7654
|
}));
|
|
7642
7655
|
});
|
|
7643
7656
|
ws.on("message", (data) => {
|
|
@@ -8066,7 +8079,7 @@ import { fileURLToPath as fileURLToPath5 } from "url";
|
|
|
8066
8079
|
import { dirname as dirname5, join as join19 } from "path";
|
|
8067
8080
|
var __filename3 = fileURLToPath5(import.meta.url);
|
|
8068
8081
|
var __dirname3 = dirname5(__filename3);
|
|
8069
|
-
var version = "1.4.
|
|
8082
|
+
var version = "1.4.9";
|
|
8070
8083
|
try {
|
|
8071
8084
|
const pkg = JSON.parse(readFileSync(join19(__dirname3, "..", "package.json"), "utf-8"));
|
|
8072
8085
|
version = pkg.version;
|