mohdel 0.105.1 → 0.105.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -216,13 +216,27 @@ export async function * anthropic (envelope, deps = {}) {
|
|
|
216
216
|
// output_tokens and what actually streamed as visible output (text +
|
|
217
217
|
// tool input JSON) — catches redacted_thinking blocks (opus 4.7 default)
|
|
218
218
|
// that consume output tokens but emit no streaming deltas.
|
|
219
|
+
//
|
|
220
|
+
// When the caller explicitly disabled thinking via `outputEffort: 'none'`,
|
|
221
|
+
// we did NOT send `request.thinking` (see buildRequest) — Anthropic
|
|
222
|
+
// won't emit thinking content, redacted or otherwise — so the fallback
|
|
223
|
+
// heuristic would only attribute the natural chars/4 estimation gap
|
|
224
|
+
// (Anthropic packs denser than 4 chars/token on most content) to a
|
|
225
|
+
// non-existent thinking budget. Trust the explicit opt-out and report
|
|
226
|
+
// zero.
|
|
219
227
|
const streamedOutput = currentOutput()
|
|
220
228
|
const streamedOutputChars = streamedOutput.length +
|
|
221
229
|
[...toolBlocks.values()].reduce((s, b) => s + b.inputJson.length, 0)
|
|
222
230
|
const streamedOutputTokens = Math.ceil(streamedOutputChars / ANTHROPIC_THINKING_CHARS_PER_TOKEN)
|
|
223
|
-
const
|
|
224
|
-
|
|
225
|
-
|
|
231
|
+
const thinkingDisabled = envelope.outputEffort === 'none'
|
|
232
|
+
let estimatedThinkingTokens
|
|
233
|
+
if (thinkingDisabled) {
|
|
234
|
+
estimatedThinkingTokens = 0
|
|
235
|
+
} else if (thinkingChars > 0) {
|
|
236
|
+
estimatedThinkingTokens = Math.min(Math.ceil(thinkingChars / ANTHROPIC_THINKING_CHARS_PER_TOKEN), outputTokens)
|
|
237
|
+
} else {
|
|
238
|
+
estimatedThinkingTokens = Math.max(0, outputTokens - streamedOutputTokens)
|
|
239
|
+
}
|
|
226
240
|
const messageOutputTokens = Math.max(0, outputTokens - estimatedThinkingTokens)
|
|
227
241
|
|
|
228
242
|
/** @type {import('#core/events.js').DoneEvent} */
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mohdel",
|
|
3
|
-
"version": "0.105.
|
|
3
|
+
"version": "0.105.2",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Christophe Le Bars",
|
|
@@ -87,7 +87,7 @@
|
|
|
87
87
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.218.0",
|
|
88
88
|
"@opentelemetry/sdk-node": "^0.218.0",
|
|
89
89
|
"chalk": "^5.4.0",
|
|
90
|
-
"mohdel-thin-gate-linux-x64-gnu": "0.105.
|
|
90
|
+
"mohdel-thin-gate-linux-x64-gnu": "0.105.2"
|
|
91
91
|
},
|
|
92
92
|
"dependencies": {
|
|
93
93
|
"@anthropic-ai/sdk": "^0.95.2",
|