reasonix 0.0.4 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +133 -58
- package/dist/cli/index.js +716 -54
- package/dist/cli/index.js.map +1 -1
- package/dist/index.d.ts +96 -6
- package/dist/index.js +460 -20
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-Y7L6L5QS.js +0 -262
- package/dist/chunk-Y7L6L5QS.js.map +0 -1
- package/dist/cli/chunk-T2ODXAJP.js +0 -263
- package/dist/cli/chunk-T2ODXAJP.js.map +0 -1
- package/dist/cli/client-RIVGDOJP.js +0 -10
- package/dist/cli/client-RIVGDOJP.js.map +0 -1
- package/dist/client-KEA2D52Q.js +0 -9
- package/dist/client-KEA2D52Q.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,8 +1,258 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
// src/client.ts
|
|
2
|
+
import { createParser } from "eventsource-parser";
|
|
3
|
+
|
|
4
|
+
// src/retry.ts
|
|
5
|
+
var DEFAULT_RETRYABLE_STATUSES = [408, 429, 500, 502, 503, 504];
|
|
6
|
+
async function fetchWithRetry(fetchFn, url, init, opts = {}) {
|
|
7
|
+
const maxAttempts = opts.maxAttempts ?? 4;
|
|
8
|
+
const initial = opts.initialBackoffMs ?? 500;
|
|
9
|
+
const cap = opts.maxBackoffMs ?? 1e4;
|
|
10
|
+
const retryable = new Set(opts.retryableStatuses ?? DEFAULT_RETRYABLE_STATUSES);
|
|
11
|
+
let lastError;
|
|
12
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
13
|
+
if (opts.signal?.aborted) throw new Error("aborted");
|
|
14
|
+
try {
|
|
15
|
+
const resp = await fetchFn(url, init);
|
|
16
|
+
if (resp.ok || !retryable.has(resp.status)) return resp;
|
|
17
|
+
if (attempt === maxAttempts - 1) return resp;
|
|
18
|
+
await resp.text().catch(() => void 0);
|
|
19
|
+
const waitMs = computeWait(attempt, initial, cap, resp.headers.get("Retry-After"));
|
|
20
|
+
opts.onRetry?.({ attempt: attempt + 1, reason: `http ${resp.status}`, waitMs });
|
|
21
|
+
await sleep(waitMs, opts.signal);
|
|
22
|
+
} catch (err) {
|
|
23
|
+
lastError = err;
|
|
24
|
+
if (isAbortError(err) || opts.signal?.aborted) throw err;
|
|
25
|
+
if (attempt === maxAttempts - 1) throw err;
|
|
26
|
+
const waitMs = computeWait(attempt, initial, cap, null);
|
|
27
|
+
opts.onRetry?.({
|
|
28
|
+
attempt: attempt + 1,
|
|
29
|
+
reason: `network: ${messageOf(err)}`,
|
|
30
|
+
waitMs
|
|
31
|
+
});
|
|
32
|
+
await sleep(waitMs, opts.signal);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
throw lastError ?? new Error("fetchWithRetry: loop exited unexpectedly");
|
|
36
|
+
}
|
|
37
|
+
function computeWait(attempt, initial, cap, retryAfter) {
|
|
38
|
+
if (retryAfter) {
|
|
39
|
+
const seconds = Number.parseFloat(retryAfter);
|
|
40
|
+
if (Number.isFinite(seconds) && seconds > 0) {
|
|
41
|
+
return Math.min(seconds * 1e3, cap);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
const exp = initial * 2 ** attempt;
|
|
45
|
+
const jitter = exp * (0.75 + Math.random() * 0.5);
|
|
46
|
+
return Math.min(Math.max(jitter, 0), cap);
|
|
47
|
+
}
|
|
48
|
+
function sleep(ms, signal) {
|
|
49
|
+
if (ms <= 0) return Promise.resolve();
|
|
50
|
+
return new Promise((resolve2, reject) => {
|
|
51
|
+
const timer = setTimeout(resolve2, ms);
|
|
52
|
+
if (signal) {
|
|
53
|
+
const onAbort = () => {
|
|
54
|
+
clearTimeout(timer);
|
|
55
|
+
reject(new Error("aborted"));
|
|
56
|
+
};
|
|
57
|
+
if (signal.aborted) onAbort();
|
|
58
|
+
else signal.addEventListener("abort", onAbort, { once: true });
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
function isAbortError(err) {
|
|
63
|
+
if (!err || typeof err !== "object") return false;
|
|
64
|
+
const name = err.name;
|
|
65
|
+
return name === "AbortError";
|
|
66
|
+
}
|
|
67
|
+
function messageOf(err) {
|
|
68
|
+
if (err instanceof Error) return err.message;
|
|
69
|
+
try {
|
|
70
|
+
return String(err);
|
|
71
|
+
} catch {
|
|
72
|
+
return "unknown error";
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// src/client.ts
|
|
77
|
+
var Usage = class _Usage {
|
|
78
|
+
constructor(promptTokens = 0, completionTokens = 0, totalTokens = 0, promptCacheHitTokens = 0, promptCacheMissTokens = 0) {
|
|
79
|
+
this.promptTokens = promptTokens;
|
|
80
|
+
this.completionTokens = completionTokens;
|
|
81
|
+
this.totalTokens = totalTokens;
|
|
82
|
+
this.promptCacheHitTokens = promptCacheHitTokens;
|
|
83
|
+
this.promptCacheMissTokens = promptCacheMissTokens;
|
|
84
|
+
}
|
|
85
|
+
promptTokens;
|
|
86
|
+
completionTokens;
|
|
87
|
+
totalTokens;
|
|
88
|
+
promptCacheHitTokens;
|
|
89
|
+
promptCacheMissTokens;
|
|
90
|
+
get cacheHitRatio() {
|
|
91
|
+
const denom = this.promptCacheHitTokens + this.promptCacheMissTokens;
|
|
92
|
+
return denom > 0 ? this.promptCacheHitTokens / denom : 0;
|
|
93
|
+
}
|
|
94
|
+
static fromApi(raw) {
|
|
95
|
+
const u = raw ?? {};
|
|
96
|
+
return new _Usage(
|
|
97
|
+
u.prompt_tokens ?? 0,
|
|
98
|
+
u.completion_tokens ?? 0,
|
|
99
|
+
u.total_tokens ?? 0,
|
|
100
|
+
u.prompt_cache_hit_tokens ?? 0,
|
|
101
|
+
u.prompt_cache_miss_tokens ?? 0
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
var DeepSeekClient = class {
|
|
106
|
+
apiKey;
|
|
107
|
+
baseUrl;
|
|
108
|
+
timeoutMs;
|
|
109
|
+
retry;
|
|
110
|
+
_fetch;
|
|
111
|
+
constructor(opts = {}) {
|
|
112
|
+
const apiKey = opts.apiKey ?? process.env.DEEPSEEK_API_KEY;
|
|
113
|
+
if (!apiKey) {
|
|
114
|
+
throw new Error(
|
|
115
|
+
"DEEPSEEK_API_KEY is not set. Put it in .env or pass apiKey to DeepSeekClient."
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
this.apiKey = apiKey;
|
|
119
|
+
this.baseUrl = (opts.baseUrl ?? process.env.DEEPSEEK_BASE_URL ?? "https://api.deepseek.com").replace(/\/+$/, "");
|
|
120
|
+
this.timeoutMs = opts.timeoutMs ?? 12e4;
|
|
121
|
+
this._fetch = opts.fetch ?? globalThis.fetch.bind(globalThis);
|
|
122
|
+
this.retry = opts.retry ?? {};
|
|
123
|
+
}
|
|
124
|
+
buildPayload(opts, stream) {
|
|
125
|
+
const payload = {
|
|
126
|
+
model: opts.model,
|
|
127
|
+
messages: opts.messages,
|
|
128
|
+
stream
|
|
129
|
+
};
|
|
130
|
+
if (opts.tools?.length) payload.tools = opts.tools;
|
|
131
|
+
if (opts.temperature !== void 0) payload.temperature = opts.temperature;
|
|
132
|
+
if (opts.maxTokens !== void 0) payload.max_tokens = opts.maxTokens;
|
|
133
|
+
if (opts.responseFormat) payload.response_format = opts.responseFormat;
|
|
134
|
+
return payload;
|
|
135
|
+
}
|
|
136
|
+
async chat(opts) {
|
|
137
|
+
const ctrl = new AbortController();
|
|
138
|
+
const timer = setTimeout(() => ctrl.abort(), this.timeoutMs);
|
|
139
|
+
const signal = opts.signal ?? ctrl.signal;
|
|
140
|
+
try {
|
|
141
|
+
const resp = await fetchWithRetry(
|
|
142
|
+
this._fetch,
|
|
143
|
+
`${this.baseUrl}/chat/completions`,
|
|
144
|
+
{
|
|
145
|
+
method: "POST",
|
|
146
|
+
headers: {
|
|
147
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
148
|
+
"Content-Type": "application/json"
|
|
149
|
+
},
|
|
150
|
+
body: JSON.stringify(this.buildPayload(opts, false)),
|
|
151
|
+
signal
|
|
152
|
+
},
|
|
153
|
+
{ ...this.retry, signal }
|
|
154
|
+
);
|
|
155
|
+
if (!resp.ok) {
|
|
156
|
+
throw new Error(`DeepSeek ${resp.status}: ${await resp.text()}`);
|
|
157
|
+
}
|
|
158
|
+
const data = await resp.json();
|
|
159
|
+
const choice = data.choices?.[0]?.message ?? {};
|
|
160
|
+
return {
|
|
161
|
+
content: choice.content ?? "",
|
|
162
|
+
reasoningContent: choice.reasoning_content ?? null,
|
|
163
|
+
toolCalls: choice.tool_calls ?? [],
|
|
164
|
+
usage: Usage.fromApi(data.usage),
|
|
165
|
+
raw: data
|
|
166
|
+
};
|
|
167
|
+
} finally {
|
|
168
|
+
clearTimeout(timer);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
async *stream(opts) {
|
|
172
|
+
const ctrl = new AbortController();
|
|
173
|
+
const timer = setTimeout(() => ctrl.abort(), this.timeoutMs);
|
|
174
|
+
const signal = opts.signal ?? ctrl.signal;
|
|
175
|
+
let resp;
|
|
176
|
+
try {
|
|
177
|
+
resp = await fetchWithRetry(
|
|
178
|
+
this._fetch,
|
|
179
|
+
`${this.baseUrl}/chat/completions`,
|
|
180
|
+
{
|
|
181
|
+
method: "POST",
|
|
182
|
+
headers: {
|
|
183
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
184
|
+
"Content-Type": "application/json",
|
|
185
|
+
Accept: "text/event-stream"
|
|
186
|
+
},
|
|
187
|
+
body: JSON.stringify(this.buildPayload(opts, true)),
|
|
188
|
+
signal
|
|
189
|
+
},
|
|
190
|
+
{ ...this.retry, signal }
|
|
191
|
+
);
|
|
192
|
+
} catch (err) {
|
|
193
|
+
clearTimeout(timer);
|
|
194
|
+
throw err;
|
|
195
|
+
}
|
|
196
|
+
if (!resp.ok || !resp.body) {
|
|
197
|
+
clearTimeout(timer);
|
|
198
|
+
throw new Error(`DeepSeek ${resp.status}: ${await resp.text().catch(() => "")}`);
|
|
199
|
+
}
|
|
200
|
+
const queue = [];
|
|
201
|
+
let done = false;
|
|
202
|
+
const parser = createParser({
|
|
203
|
+
onEvent: (ev) => {
|
|
204
|
+
if (!ev.data || ev.data === "[DONE]") {
|
|
205
|
+
done = true;
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
try {
|
|
209
|
+
const json = JSON.parse(ev.data);
|
|
210
|
+
const delta = json.choices?.[0]?.delta ?? {};
|
|
211
|
+
const finishReason = json.choices?.[0]?.finish_reason ?? void 0;
|
|
212
|
+
const chunk = { raw: json, finishReason };
|
|
213
|
+
if (typeof delta.content === "string" && delta.content.length > 0) {
|
|
214
|
+
chunk.contentDelta = delta.content;
|
|
215
|
+
}
|
|
216
|
+
if (typeof delta.reasoning_content === "string" && delta.reasoning_content.length > 0) {
|
|
217
|
+
chunk.reasoningDelta = delta.reasoning_content;
|
|
218
|
+
}
|
|
219
|
+
if (Array.isArray(delta.tool_calls) && delta.tool_calls.length > 0) {
|
|
220
|
+
const tc = delta.tool_calls[0];
|
|
221
|
+
chunk.toolCallDelta = {
|
|
222
|
+
index: tc.index ?? 0,
|
|
223
|
+
id: tc.id,
|
|
224
|
+
name: tc.function?.name,
|
|
225
|
+
argumentsDelta: tc.function?.arguments
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
if (json.usage) {
|
|
229
|
+
chunk.usage = Usage.fromApi(json.usage);
|
|
230
|
+
}
|
|
231
|
+
queue.push(chunk);
|
|
232
|
+
} catch {
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
});
|
|
236
|
+
const reader = resp.body.getReader();
|
|
237
|
+
const decoder = new TextDecoder();
|
|
238
|
+
try {
|
|
239
|
+
while (true) {
|
|
240
|
+
if (queue.length > 0) {
|
|
241
|
+
yield queue.shift();
|
|
242
|
+
continue;
|
|
243
|
+
}
|
|
244
|
+
if (done) break;
|
|
245
|
+
const { value, done: streamDone } = await reader.read();
|
|
246
|
+
if (streamDone) break;
|
|
247
|
+
parser.feed(decoder.decode(value, { stream: true }));
|
|
248
|
+
}
|
|
249
|
+
while (queue.length > 0) yield queue.shift();
|
|
250
|
+
} finally {
|
|
251
|
+
clearTimeout(timer);
|
|
252
|
+
reader.releaseLock();
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
};
|
|
6
256
|
|
|
7
257
|
// src/harvest.ts
|
|
8
258
|
function emptyPlanState() {
|
|
@@ -94,6 +344,66 @@ function sanitizeArray(raw, maxItems, maxItemLen) {
|
|
|
94
344
|
return out;
|
|
95
345
|
}
|
|
96
346
|
|
|
347
|
+
// src/consistency.ts
|
|
348
|
+
var defaultSelector = (samples) => {
|
|
349
|
+
if (samples.length === 0) throw new Error("defaultSelector: samples is empty");
|
|
350
|
+
return samples.slice().sort((a, b) => {
|
|
351
|
+
const uDiff = a.planState.uncertainties.length - b.planState.uncertainties.length;
|
|
352
|
+
if (uDiff !== 0) return uDiff;
|
|
353
|
+
const aLen = a.response.content?.length ?? 0;
|
|
354
|
+
const bLen = b.response.content?.length ?? 0;
|
|
355
|
+
return aLen - bLen;
|
|
356
|
+
})[0];
|
|
357
|
+
};
|
|
358
|
+
async function runBranches(client, request, opts = {}) {
|
|
359
|
+
const budget = Math.max(1, opts.budget ?? 1);
|
|
360
|
+
const temperatures = resolveTemperatures(budget, opts.temperatures);
|
|
361
|
+
const selector = opts.selector ?? defaultSelector;
|
|
362
|
+
const samples = await Promise.all(
|
|
363
|
+
temperatures.map(async (temperature, index) => {
|
|
364
|
+
const response = await client.chat({ ...request, temperature });
|
|
365
|
+
const planState = await harvest(response.reasoningContent, client, opts.harvestOptions);
|
|
366
|
+
const sample = { index, temperature, response, planState };
|
|
367
|
+
try {
|
|
368
|
+
opts.onSampleDone?.(sample);
|
|
369
|
+
} catch {
|
|
370
|
+
}
|
|
371
|
+
return sample;
|
|
372
|
+
})
|
|
373
|
+
);
|
|
374
|
+
return { chosen: selector(samples), samples };
|
|
375
|
+
}
|
|
376
|
+
function aggregateBranchUsage(samples) {
|
|
377
|
+
let promptTokens = 0;
|
|
378
|
+
let completionTokens = 0;
|
|
379
|
+
let totalTokens = 0;
|
|
380
|
+
let promptCacheHitTokens = 0;
|
|
381
|
+
let promptCacheMissTokens = 0;
|
|
382
|
+
for (const s of samples) {
|
|
383
|
+
promptTokens += s.response.usage.promptTokens;
|
|
384
|
+
completionTokens += s.response.usage.completionTokens;
|
|
385
|
+
totalTokens += s.response.usage.totalTokens;
|
|
386
|
+
promptCacheHitTokens += s.response.usage.promptCacheHitTokens;
|
|
387
|
+
promptCacheMissTokens += s.response.usage.promptCacheMissTokens;
|
|
388
|
+
}
|
|
389
|
+
return {
|
|
390
|
+
promptTokens,
|
|
391
|
+
completionTokens,
|
|
392
|
+
totalTokens,
|
|
393
|
+
promptCacheHitTokens,
|
|
394
|
+
promptCacheMissTokens
|
|
395
|
+
};
|
|
396
|
+
}
|
|
397
|
+
function resolveTemperatures(budget, custom) {
|
|
398
|
+
if (custom && custom.length >= budget) return [...custom.slice(0, budget)];
|
|
399
|
+
if (budget === 1) return [0];
|
|
400
|
+
const out = [];
|
|
401
|
+
for (let i = 0; i < budget; i++) {
|
|
402
|
+
out.push(Number((i / (budget - 1)).toFixed(2)));
|
|
403
|
+
}
|
|
404
|
+
return out;
|
|
405
|
+
}
|
|
406
|
+
|
|
97
407
|
// src/memory.ts
|
|
98
408
|
import { createHash } from "crypto";
|
|
99
409
|
var ImmutablePrefix = class {
|
|
@@ -611,28 +921,73 @@ var CacheFirstLoop = class {
|
|
|
611
921
|
client;
|
|
612
922
|
prefix;
|
|
613
923
|
tools;
|
|
614
|
-
model;
|
|
615
924
|
maxToolIters;
|
|
616
|
-
stream;
|
|
617
|
-
harvestEnabled;
|
|
618
|
-
harvestOptions;
|
|
619
925
|
log = new AppendOnlyLog();
|
|
620
926
|
scratch = new VolatileScratch();
|
|
621
927
|
stats = new SessionStats();
|
|
622
928
|
repair;
|
|
929
|
+
// Mutable via configure() — slash commands in the TUI / library callers tweak
|
|
930
|
+
// these mid-session so users don't have to restart to try harvest or branch.
|
|
931
|
+
model;
|
|
932
|
+
stream;
|
|
933
|
+
harvestEnabled;
|
|
934
|
+
harvestOptions;
|
|
935
|
+
branchEnabled;
|
|
936
|
+
branchOptions;
|
|
623
937
|
_turn = 0;
|
|
938
|
+
_streamPreference;
|
|
624
939
|
constructor(opts) {
|
|
625
940
|
this.client = opts.client;
|
|
626
941
|
this.prefix = opts.prefix;
|
|
627
942
|
this.tools = opts.tools ?? new ToolRegistry();
|
|
628
943
|
this.model = opts.model ?? "deepseek-chat";
|
|
629
944
|
this.maxToolIters = opts.maxToolIters ?? 8;
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
945
|
+
if (typeof opts.branch === "number") {
|
|
946
|
+
this.branchOptions = { budget: opts.branch };
|
|
947
|
+
} else if (opts.branch && typeof opts.branch === "object") {
|
|
948
|
+
this.branchOptions = opts.branch;
|
|
949
|
+
} else {
|
|
950
|
+
this.branchOptions = {};
|
|
951
|
+
}
|
|
952
|
+
this.branchEnabled = (this.branchOptions.budget ?? 1) > 1;
|
|
953
|
+
const harvestForced = this.branchEnabled;
|
|
954
|
+
this.harvestEnabled = harvestForced || opts.harvest === true || typeof opts.harvest === "object" && opts.harvest !== null;
|
|
955
|
+
this.harvestOptions = typeof opts.harvest === "object" && opts.harvest !== null ? opts.harvest : this.branchOptions.harvestOptions ?? {};
|
|
956
|
+
this._streamPreference = opts.stream ?? true;
|
|
957
|
+
this.stream = this.branchEnabled ? false : this._streamPreference;
|
|
633
958
|
const allowedNames = /* @__PURE__ */ new Set([...this.prefix.toolSpecs.map((s) => s.function.name)]);
|
|
634
959
|
this.repair = new ToolCallRepair({ allowedToolNames: allowedNames });
|
|
635
960
|
}
|
|
961
|
+
/**
|
|
962
|
+
* Reconfigure model/harvest/branch/stream mid-session. The loop's log,
|
|
963
|
+
* scratch, and stats are preserved — only the per-turn behavior changes.
|
|
964
|
+
* Used by the TUI's slash commands and by library callers who want to
|
|
965
|
+
* flip a knob between turns.
|
|
966
|
+
*/
|
|
967
|
+
configure(opts) {
|
|
968
|
+
if (opts.model !== void 0) this.model = opts.model;
|
|
969
|
+
if (opts.stream !== void 0) this._streamPreference = opts.stream;
|
|
970
|
+
if (opts.branch !== void 0) {
|
|
971
|
+
if (typeof opts.branch === "number") {
|
|
972
|
+
this.branchOptions = { budget: opts.branch };
|
|
973
|
+
} else if (opts.branch && typeof opts.branch === "object") {
|
|
974
|
+
this.branchOptions = opts.branch;
|
|
975
|
+
} else {
|
|
976
|
+
this.branchOptions = {};
|
|
977
|
+
}
|
|
978
|
+
this.branchEnabled = (this.branchOptions.budget ?? 1) > 1;
|
|
979
|
+
}
|
|
980
|
+
if (opts.harvest !== void 0) {
|
|
981
|
+
const want = opts.harvest === true || typeof opts.harvest === "object" && opts.harvest !== null;
|
|
982
|
+
this.harvestEnabled = want || this.branchEnabled;
|
|
983
|
+
if (typeof opts.harvest === "object" && opts.harvest !== null) {
|
|
984
|
+
this.harvestOptions = opts.harvest;
|
|
985
|
+
}
|
|
986
|
+
} else if (this.branchEnabled) {
|
|
987
|
+
this.harvestEnabled = true;
|
|
988
|
+
}
|
|
989
|
+
this.stream = this.branchEnabled ? false : this._streamPreference;
|
|
990
|
+
}
|
|
636
991
|
buildMessages(pendingUser) {
|
|
637
992
|
const msgs = [...this.prefix.toMessages(), ...this.log.toMessages()];
|
|
638
993
|
if (pendingUser !== null) msgs.push({ role: "user", content: pendingUser });
|
|
@@ -649,8 +1004,85 @@ var CacheFirstLoop = class {
|
|
|
649
1004
|
let reasoningContent = "";
|
|
650
1005
|
let toolCalls = [];
|
|
651
1006
|
let usage = null;
|
|
1007
|
+
let branchSummary;
|
|
1008
|
+
let preHarvestedPlanState;
|
|
652
1009
|
try {
|
|
653
|
-
if (this.
|
|
1010
|
+
if (this.branchEnabled) {
|
|
1011
|
+
const budget = this.branchOptions.budget ?? 1;
|
|
1012
|
+
yield {
|
|
1013
|
+
turn: this._turn,
|
|
1014
|
+
role: "branch_start",
|
|
1015
|
+
content: "",
|
|
1016
|
+
branchProgress: {
|
|
1017
|
+
completed: 0,
|
|
1018
|
+
total: budget,
|
|
1019
|
+
latestIndex: -1,
|
|
1020
|
+
latestTemperature: -1,
|
|
1021
|
+
latestUncertainties: -1
|
|
1022
|
+
}
|
|
1023
|
+
};
|
|
1024
|
+
const queue = [];
|
|
1025
|
+
let waiter = null;
|
|
1026
|
+
const onSampleDone = (sample) => {
|
|
1027
|
+
if (waiter) {
|
|
1028
|
+
const w = waiter;
|
|
1029
|
+
waiter = null;
|
|
1030
|
+
w(sample);
|
|
1031
|
+
} else {
|
|
1032
|
+
queue.push(sample);
|
|
1033
|
+
}
|
|
1034
|
+
};
|
|
1035
|
+
const branchPromise = runBranches(
|
|
1036
|
+
this.client,
|
|
1037
|
+
{
|
|
1038
|
+
model: this.model,
|
|
1039
|
+
messages,
|
|
1040
|
+
tools: toolSpecs.length ? toolSpecs : void 0
|
|
1041
|
+
},
|
|
1042
|
+
{
|
|
1043
|
+
...this.branchOptions,
|
|
1044
|
+
harvestOptions: this.harvestOptions,
|
|
1045
|
+
onSampleDone
|
|
1046
|
+
}
|
|
1047
|
+
);
|
|
1048
|
+
for (let k = 0; k < budget; k++) {
|
|
1049
|
+
const sample = queue.shift() ?? await new Promise((resolve2) => {
|
|
1050
|
+
waiter = resolve2;
|
|
1051
|
+
});
|
|
1052
|
+
yield {
|
|
1053
|
+
turn: this._turn,
|
|
1054
|
+
role: "branch_progress",
|
|
1055
|
+
content: "",
|
|
1056
|
+
branchProgress: {
|
|
1057
|
+
completed: k + 1,
|
|
1058
|
+
total: budget,
|
|
1059
|
+
latestIndex: sample.index,
|
|
1060
|
+
latestTemperature: sample.temperature,
|
|
1061
|
+
latestUncertainties: sample.planState.uncertainties.length
|
|
1062
|
+
}
|
|
1063
|
+
};
|
|
1064
|
+
}
|
|
1065
|
+
const result = await branchPromise;
|
|
1066
|
+
assistantContent = result.chosen.response.content;
|
|
1067
|
+
reasoningContent = result.chosen.response.reasoningContent ?? "";
|
|
1068
|
+
toolCalls = result.chosen.response.toolCalls;
|
|
1069
|
+
const agg = aggregateBranchUsage(result.samples);
|
|
1070
|
+
usage = new Usage(
|
|
1071
|
+
agg.promptTokens,
|
|
1072
|
+
agg.completionTokens,
|
|
1073
|
+
agg.totalTokens,
|
|
1074
|
+
agg.promptCacheHitTokens,
|
|
1075
|
+
agg.promptCacheMissTokens
|
|
1076
|
+
);
|
|
1077
|
+
preHarvestedPlanState = result.chosen.planState;
|
|
1078
|
+
branchSummary = summarizeBranch(result.chosen, result.samples);
|
|
1079
|
+
yield {
|
|
1080
|
+
turn: this._turn,
|
|
1081
|
+
role: "branch_done",
|
|
1082
|
+
content: "",
|
|
1083
|
+
branch: branchSummary
|
|
1084
|
+
};
|
|
1085
|
+
} else if (this.stream) {
|
|
654
1086
|
const callBuf = /* @__PURE__ */ new Map();
|
|
655
1087
|
for await (const chunk of this.client.stream({
|
|
656
1088
|
model: this.model,
|
|
@@ -710,17 +1142,13 @@ var CacheFirstLoop = class {
|
|
|
710
1142
|
};
|
|
711
1143
|
return;
|
|
712
1144
|
}
|
|
713
|
-
const turnStats = this.stats.record(
|
|
714
|
-
this._turn,
|
|
715
|
-
this.model,
|
|
716
|
-
usage ?? new (await import("./client-KEA2D52Q.js")).Usage()
|
|
717
|
-
);
|
|
1145
|
+
const turnStats = this.stats.record(this._turn, this.model, usage ?? new Usage());
|
|
718
1146
|
if (pendingUser !== null) {
|
|
719
1147
|
this.log.append({ role: "user", content: pendingUser });
|
|
720
1148
|
pendingUser = null;
|
|
721
1149
|
}
|
|
722
1150
|
this.scratch.reasoning = reasoningContent || null;
|
|
723
|
-
const planState = this.harvestEnabled ? await harvest(reasoningContent || null, this.client, this.harvestOptions) : emptyPlanState();
|
|
1151
|
+
const planState = preHarvestedPlanState ? preHarvestedPlanState : this.harvestEnabled ? await harvest(reasoningContent || null, this.client, this.harvestOptions) : emptyPlanState();
|
|
724
1152
|
const { calls: repairedCalls, report } = this.repair.process(
|
|
725
1153
|
toolCalls,
|
|
726
1154
|
reasoningContent || null
|
|
@@ -732,7 +1160,8 @@ var CacheFirstLoop = class {
|
|
|
732
1160
|
content: assistantContent,
|
|
733
1161
|
stats: turnStats,
|
|
734
1162
|
planState,
|
|
735
|
-
repair: report
|
|
1163
|
+
repair: report,
|
|
1164
|
+
branch: branchSummary
|
|
736
1165
|
};
|
|
737
1166
|
if (repairedCalls.length === 0) {
|
|
738
1167
|
yield { turn: this._turn, role: "done", content: assistantContent };
|
|
@@ -768,6 +1197,14 @@ var CacheFirstLoop = class {
|
|
|
768
1197
|
return msg;
|
|
769
1198
|
}
|
|
770
1199
|
};
|
|
1200
|
+
function summarizeBranch(chosen, samples) {
|
|
1201
|
+
return {
|
|
1202
|
+
budget: samples.length,
|
|
1203
|
+
chosenIndex: chosen.index,
|
|
1204
|
+
uncertainties: samples.map((s) => s.planState.uncertainties.length),
|
|
1205
|
+
temperatures: samples.map((s) => s.temperature)
|
|
1206
|
+
};
|
|
1207
|
+
}
|
|
771
1208
|
|
|
772
1209
|
// src/env.ts
|
|
773
1210
|
import { readFileSync } from "fs";
|
|
@@ -850,10 +1287,12 @@ export {
|
|
|
850
1287
|
Usage,
|
|
851
1288
|
VERSION,
|
|
852
1289
|
VolatileScratch,
|
|
1290
|
+
aggregateBranchUsage,
|
|
853
1291
|
analyzeSchema,
|
|
854
1292
|
claudeEquivalentCost,
|
|
855
1293
|
costUsd,
|
|
856
1294
|
defaultConfigPath,
|
|
1295
|
+
defaultSelector,
|
|
857
1296
|
emptyPlanState,
|
|
858
1297
|
fetchWithRetry,
|
|
859
1298
|
flattenSchema,
|
|
@@ -866,6 +1305,7 @@ export {
|
|
|
866
1305
|
readConfig,
|
|
867
1306
|
redactKey,
|
|
868
1307
|
repairTruncatedJson,
|
|
1308
|
+
runBranches,
|
|
869
1309
|
saveApiKey,
|
|
870
1310
|
scavengeToolCalls,
|
|
871
1311
|
writeConfig
|