@jaex/dstsx 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2089 -0
- package/dist/index.cjs +2883 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +1597 -0
- package/dist/index.d.ts +1597 -0
- package/dist/index.js +2781 -0
- package/dist/index.js.map +1 -0
- package/package.json +113 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,2781 @@
|
|
|
1
|
+
// src/signatures/Field.ts
|
|
2
|
+
function InputField(meta = {}) {
|
|
3
|
+
return { type: "string", ...meta };
|
|
4
|
+
}
|
|
5
|
+
function OutputField(meta = {}) {
|
|
6
|
+
return { type: "string", ...meta };
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
// src/signatures/Signature.ts
|
|
10
|
+
var Signature = class _Signature {
|
|
11
|
+
inputs;
|
|
12
|
+
outputs;
|
|
13
|
+
instructions;
|
|
14
|
+
constructor(meta) {
|
|
15
|
+
this.inputs = meta.inputs;
|
|
16
|
+
this.outputs = meta.outputs;
|
|
17
|
+
this.instructions = meta.instructions;
|
|
18
|
+
}
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
// Factory helpers
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
/**
|
|
23
|
+
* Parse a shorthand signature string of the form:
|
|
24
|
+
* `"field1, field2? -> out1, out2"`
|
|
25
|
+
*
|
|
26
|
+
* A trailing `?` marks the field as optional.
|
|
27
|
+
*/
|
|
28
|
+
static from(shorthand, instructions) {
|
|
29
|
+
const [inputPart, outputPart] = shorthand.split("->").map((s) => s.trim());
|
|
30
|
+
if (outputPart === void 0) {
|
|
31
|
+
throw new Error(
|
|
32
|
+
`Invalid signature shorthand "${shorthand}". Expected "inputs -> outputs".`
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
const parseFields = (part) => {
|
|
36
|
+
const map = /* @__PURE__ */ new Map();
|
|
37
|
+
for (const raw of part.split(",")) {
|
|
38
|
+
const trimmed = raw.trim();
|
|
39
|
+
const isOptional = trimmed.endsWith("?");
|
|
40
|
+
const name = trimmed.replace(/\?$/, "");
|
|
41
|
+
if (name.length === 0) continue;
|
|
42
|
+
map.set(name, InputField(isOptional ? { optional: true } : {}));
|
|
43
|
+
}
|
|
44
|
+
return map;
|
|
45
|
+
};
|
|
46
|
+
const inputs = parseFields(inputPart ?? "");
|
|
47
|
+
const outputs = parseFields(outputPart);
|
|
48
|
+
for (const [key, val] of outputs) {
|
|
49
|
+
outputs.set(key, OutputField(val));
|
|
50
|
+
}
|
|
51
|
+
return new _Signature({ inputs, outputs, instructions });
|
|
52
|
+
}
|
|
53
|
+
// ---------------------------------------------------------------------------
|
|
54
|
+
// Mutation helpers (return new Signature; never mutates)
|
|
55
|
+
// ---------------------------------------------------------------------------
|
|
56
|
+
/**
|
|
57
|
+
* Return a new Signature with additional or overridden fields / instructions.
|
|
58
|
+
*/
|
|
59
|
+
with(overrides) {
|
|
60
|
+
return new _Signature({
|
|
61
|
+
inputs: overrides.inputs ?? new Map(this.inputs),
|
|
62
|
+
outputs: overrides.outputs ?? new Map(this.outputs),
|
|
63
|
+
instructions: overrides.instructions ?? this.instructions
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Append an extra input field and return a new Signature.
|
|
68
|
+
*/
|
|
69
|
+
withInput(name, meta = {}) {
|
|
70
|
+
const inputs = new Map(this.inputs);
|
|
71
|
+
inputs.set(name, InputField(meta));
|
|
72
|
+
return this.with({ inputs });
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Append an extra output field and return a new Signature.
|
|
76
|
+
*/
|
|
77
|
+
withOutput(name, meta = {}) {
|
|
78
|
+
const outputs = new Map(this.outputs);
|
|
79
|
+
outputs.set(name, OutputField(meta));
|
|
80
|
+
return this.with({ outputs });
|
|
81
|
+
}
|
|
82
|
+
// ---------------------------------------------------------------------------
|
|
83
|
+
// Serialization
|
|
84
|
+
// ---------------------------------------------------------------------------
|
|
85
|
+
toJSON() {
|
|
86
|
+
return {
|
|
87
|
+
inputs: Object.fromEntries(this.inputs),
|
|
88
|
+
outputs: Object.fromEntries(this.outputs),
|
|
89
|
+
instructions: this.instructions
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
static fromJSON(json) {
|
|
93
|
+
const toMap = (obj) => {
|
|
94
|
+
if (typeof obj !== "object" || obj === null) return /* @__PURE__ */ new Map();
|
|
95
|
+
return new Map(Object.entries(obj));
|
|
96
|
+
};
|
|
97
|
+
return new _Signature({
|
|
98
|
+
inputs: toMap(json["inputs"]),
|
|
99
|
+
outputs: toMap(json["outputs"]),
|
|
100
|
+
instructions: typeof json["instructions"] === "string" ? json["instructions"] : void 0
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
// src/primitives/Example.ts
|
|
106
|
+
var Example = class _Example {
|
|
107
|
+
#data;
|
|
108
|
+
constructor(data) {
|
|
109
|
+
this.#data = Object.freeze({ ...data });
|
|
110
|
+
}
|
|
111
|
+
/** Return the value for `key`, or `undefined` if absent. */
|
|
112
|
+
get(key) {
|
|
113
|
+
return this.#data[key];
|
|
114
|
+
}
|
|
115
|
+
/** Return a shallow-frozen copy of the underlying data record. */
|
|
116
|
+
toDict() {
|
|
117
|
+
return this.#data;
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Return a new Example with the provided key-value pairs merged in.
|
|
121
|
+
*/
|
|
122
|
+
with(overrides) {
|
|
123
|
+
return new _Example({ ...this.#data, ...overrides });
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Return a new Example containing only the keys listed in `keys`.
|
|
127
|
+
*/
|
|
128
|
+
inputs(keys) {
|
|
129
|
+
return new _Example(
|
|
130
|
+
Object.fromEntries(keys.filter((k) => k in this.#data).map((k) => [k, this.#data[k]]))
|
|
131
|
+
);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Return a new Example containing only the keys NOT listed in `inputKeys`
|
|
135
|
+
* (i.e. the label / output keys).
|
|
136
|
+
*/
|
|
137
|
+
labels(inputKeys) {
|
|
138
|
+
return new _Example(
|
|
139
|
+
Object.fromEntries(
|
|
140
|
+
Object.entries(this.#data).filter(([k]) => !inputKeys.includes(k))
|
|
141
|
+
)
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
// ---------------------------------------------------------------------------
|
|
145
|
+
// Serialization
|
|
146
|
+
// ---------------------------------------------------------------------------
|
|
147
|
+
toJSON() {
|
|
148
|
+
return { ...this.#data };
|
|
149
|
+
}
|
|
150
|
+
static fromDict(data) {
|
|
151
|
+
return new _Example(data);
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// src/primitives/Prediction.ts
|
|
156
|
+
var Prediction = class extends Example {
|
|
157
|
+
/** All candidate completions when `n > 1` was requested. */
|
|
158
|
+
completions;
|
|
159
|
+
constructor(data, completions = []) {
|
|
160
|
+
super(data);
|
|
161
|
+
this.completions = Object.freeze([...completions]);
|
|
162
|
+
}
|
|
163
|
+
/** Typed accessor — casts the value to `T` (caller is responsible for type safety). */
|
|
164
|
+
getTyped(key) {
|
|
165
|
+
return this.get(key);
|
|
166
|
+
}
|
|
167
|
+
// ---------------------------------------------------------------------------
|
|
168
|
+
// Serialization
|
|
169
|
+
// ---------------------------------------------------------------------------
|
|
170
|
+
toJSON() {
|
|
171
|
+
return {
|
|
172
|
+
...super.toJSON(),
|
|
173
|
+
completions: this.completions
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
// src/primitives/majority.ts
|
|
179
|
+
function majority(field = "answer") {
|
|
180
|
+
return (predictions) => {
|
|
181
|
+
if (predictions.length === 0) {
|
|
182
|
+
throw new Error("majority: empty predictions array");
|
|
183
|
+
}
|
|
184
|
+
const counts = /* @__PURE__ */ new Map();
|
|
185
|
+
for (const p of predictions) {
|
|
186
|
+
const val = JSON.stringify(p.get(field));
|
|
187
|
+
counts.set(val, (counts.get(val) ?? 0) + 1);
|
|
188
|
+
}
|
|
189
|
+
let bestKey = "";
|
|
190
|
+
let bestCount = 0;
|
|
191
|
+
for (const [k, c] of counts) {
|
|
192
|
+
if (c > bestCount) {
|
|
193
|
+
bestCount = c;
|
|
194
|
+
bestKey = k;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return predictions.find((p) => JSON.stringify(p.get(field)) === bestKey);
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// src/primitives/Image.ts
|
|
202
|
+
import { readFileSync } from "fs";
|
|
203
|
+
var Image = class _Image {
|
|
204
|
+
url;
|
|
205
|
+
base64;
|
|
206
|
+
mimeType;
|
|
207
|
+
constructor(init) {
|
|
208
|
+
this.url = init.url;
|
|
209
|
+
this.base64 = init.base64;
|
|
210
|
+
this.mimeType = init.mimeType;
|
|
211
|
+
}
|
|
212
|
+
/** Create an Image from a URL. */
|
|
213
|
+
static fromURL(url) {
|
|
214
|
+
return new _Image({ url });
|
|
215
|
+
}
|
|
216
|
+
/** Create an Image from base64-encoded data. */
|
|
217
|
+
static fromBase64(data, mimeType = "image/jpeg") {
|
|
218
|
+
return new _Image({ base64: data, mimeType });
|
|
219
|
+
}
|
|
220
|
+
/** Create an Image by reading a local file synchronously. */
|
|
221
|
+
static fromFile(path, mimeType) {
|
|
222
|
+
const data = readFileSync(path);
|
|
223
|
+
const base64 = data.toString("base64");
|
|
224
|
+
const ext = path.split(".").pop()?.toLowerCase();
|
|
225
|
+
const detectedMime = ext === "png" ? "image/png" : ext === "gif" ? "image/gif" : ext === "webp" ? "image/webp" : "image/jpeg";
|
|
226
|
+
return new _Image({ base64, mimeType: mimeType ?? detectedMime });
|
|
227
|
+
}
|
|
228
|
+
/** Serialize to an OpenAI-compatible image_url content part. */
|
|
229
|
+
toOpenAIContentPart() {
|
|
230
|
+
if (this.url) {
|
|
231
|
+
return { type: "image_url", image_url: { url: this.url } };
|
|
232
|
+
}
|
|
233
|
+
if (this.base64 && this.mimeType) {
|
|
234
|
+
return { type: "image_url", image_url: { url: `data:${this.mimeType};base64,${this.base64}` } };
|
|
235
|
+
}
|
|
236
|
+
throw new Error("Image: no url or base64 data available");
|
|
237
|
+
}
|
|
238
|
+
/** Serialize to an Anthropic-compatible image content block. */
|
|
239
|
+
toAnthropicContentBlock() {
|
|
240
|
+
if (this.url) {
|
|
241
|
+
return { type: "image", source: { type: "url", url: this.url } };
|
|
242
|
+
}
|
|
243
|
+
if (this.base64 && this.mimeType) {
|
|
244
|
+
return { type: "image", source: { type: "base64", media_type: this.mimeType, data: this.base64 } };
|
|
245
|
+
}
|
|
246
|
+
throw new Error("Image: no url or base64 data available");
|
|
247
|
+
}
|
|
248
|
+
/** Returns a string representation (used when Image is serialized in prompts). */
|
|
249
|
+
toString() {
|
|
250
|
+
if (this.url) return `[Image: ${this.url}]`;
|
|
251
|
+
if (this.base64) return `[Image: base64 data, ${this.mimeType ?? "unknown type"}]`;
|
|
252
|
+
return "[Image]";
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
// src/lm/cache.ts
|
|
257
|
+
var LRUCache = class {
|
|
258
|
+
#maxSize;
|
|
259
|
+
#ttlMs;
|
|
260
|
+
#map = /* @__PURE__ */ new Map();
|
|
261
|
+
constructor(maxSize = 512, ttlMs = 6e4 * 60) {
|
|
262
|
+
this.#maxSize = maxSize;
|
|
263
|
+
this.#ttlMs = ttlMs;
|
|
264
|
+
}
|
|
265
|
+
get(key) {
|
|
266
|
+
const entry = this.#map.get(key);
|
|
267
|
+
if (!entry) return void 0;
|
|
268
|
+
if (Date.now() > entry.expiresAt) {
|
|
269
|
+
this.#map.delete(key);
|
|
270
|
+
return void 0;
|
|
271
|
+
}
|
|
272
|
+
this.#map.delete(key);
|
|
273
|
+
this.#map.set(key, entry);
|
|
274
|
+
return entry.value;
|
|
275
|
+
}
|
|
276
|
+
set(key, value) {
|
|
277
|
+
if (this.#map.has(key)) this.#map.delete(key);
|
|
278
|
+
else if (this.#map.size >= this.#maxSize) {
|
|
279
|
+
const oldest = this.#map.keys().next().value;
|
|
280
|
+
if (oldest !== void 0) this.#map.delete(oldest);
|
|
281
|
+
}
|
|
282
|
+
this.#map.set(key, { value, expiresAt: Date.now() + this.#ttlMs });
|
|
283
|
+
}
|
|
284
|
+
has(key) {
|
|
285
|
+
return this.get(key) !== void 0;
|
|
286
|
+
}
|
|
287
|
+
delete(key) {
|
|
288
|
+
this.#map.delete(key);
|
|
289
|
+
}
|
|
290
|
+
clear() {
|
|
291
|
+
this.#map.clear();
|
|
292
|
+
}
|
|
293
|
+
get size() {
|
|
294
|
+
return this.#map.size;
|
|
295
|
+
}
|
|
296
|
+
};
|
|
297
|
+
|
|
298
|
+
// src/lm/DiskCache.ts
|
|
299
|
+
import { createHash } from "crypto";
|
|
300
|
+
import {
|
|
301
|
+
readFileSync as readFileSync2,
|
|
302
|
+
writeFileSync,
|
|
303
|
+
readdirSync,
|
|
304
|
+
unlinkSync,
|
|
305
|
+
mkdirSync,
|
|
306
|
+
statSync
|
|
307
|
+
} from "fs";
|
|
308
|
+
import { join } from "path";
|
|
309
|
+
var DiskCache = class {
|
|
310
|
+
#cacheDir;
|
|
311
|
+
#maxSize;
|
|
312
|
+
#ttlMs;
|
|
313
|
+
constructor(cacheDir, maxSize = 500, ttlMs) {
|
|
314
|
+
this.#cacheDir = cacheDir;
|
|
315
|
+
this.#maxSize = maxSize;
|
|
316
|
+
this.#ttlMs = ttlMs;
|
|
317
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
318
|
+
}
|
|
319
|
+
get(key) {
|
|
320
|
+
const path = this.#pathFor(key);
|
|
321
|
+
try {
|
|
322
|
+
const raw = readFileSync2(path, "utf8");
|
|
323
|
+
const entry = JSON.parse(raw);
|
|
324
|
+
if (entry.expiresAt !== null && Date.now() > entry.expiresAt) {
|
|
325
|
+
unlinkSync(path);
|
|
326
|
+
return void 0;
|
|
327
|
+
}
|
|
328
|
+
return entry.value;
|
|
329
|
+
} catch {
|
|
330
|
+
return void 0;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
set(key, value) {
|
|
334
|
+
this.#evictIfNeeded();
|
|
335
|
+
const entry = {
|
|
336
|
+
key,
|
|
337
|
+
value,
|
|
338
|
+
expiresAt: this.#ttlMs != null ? Date.now() + this.#ttlMs : null
|
|
339
|
+
};
|
|
340
|
+
writeFileSync(this.#pathFor(key), JSON.stringify(entry), "utf8");
|
|
341
|
+
}
|
|
342
|
+
clear() {
|
|
343
|
+
try {
|
|
344
|
+
for (const file of readdirSync(this.#cacheDir)) {
|
|
345
|
+
if (file.endsWith(".json")) {
|
|
346
|
+
try {
|
|
347
|
+
unlinkSync(join(this.#cacheDir, file));
|
|
348
|
+
} catch {
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
} catch {
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
#pathFor(key) {
|
|
356
|
+
const hash = createHash("sha256").update(key).digest("hex").slice(0, 16);
|
|
357
|
+
return join(this.#cacheDir, `${hash}.json`);
|
|
358
|
+
}
|
|
359
|
+
#evictIfNeeded() {
|
|
360
|
+
let files;
|
|
361
|
+
try {
|
|
362
|
+
files = readdirSync(this.#cacheDir).filter((f) => f.endsWith(".json")).map((f) => {
|
|
363
|
+
const p = join(this.#cacheDir, f);
|
|
364
|
+
try {
|
|
365
|
+
return { name: f, mtime: statSync(p).mtimeMs };
|
|
366
|
+
} catch {
|
|
367
|
+
return { name: f, mtime: 0 };
|
|
368
|
+
}
|
|
369
|
+
});
|
|
370
|
+
} catch {
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
if (files.length < this.#maxSize) return;
|
|
374
|
+
const sorted = files.sort((a, b) => a.mtime - b.mtime);
|
|
375
|
+
const toDelete = sorted.slice(0, files.length - this.#maxSize + 1);
|
|
376
|
+
for (const f of toDelete) {
|
|
377
|
+
try {
|
|
378
|
+
unlinkSync(join(this.#cacheDir, f.name));
|
|
379
|
+
} catch {
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
};
|
|
384
|
+
|
|
385
|
+
// src/lm/LM.ts
|
|
386
|
+
var LM = class {
|
|
387
|
+
/** Human-readable name / identifier for this model instance. */
|
|
388
|
+
model;
|
|
389
|
+
#cache;
|
|
390
|
+
#diskCache;
|
|
391
|
+
#requestCount = 0;
|
|
392
|
+
#tokenUsage = { promptTokens: 0, completionTokens: 0, totalTokens: 0 };
|
|
393
|
+
constructor(model, cacheOptions = {}) {
|
|
394
|
+
this.model = model;
|
|
395
|
+
this.#cache = new LRUCache(cacheOptions.maxSize, cacheOptions.ttlMs);
|
|
396
|
+
this.#diskCache = cacheOptions.cacheDir !== void 0 ? new DiskCache(cacheOptions.cacheDir, cacheOptions.maxSize, cacheOptions.ttlMs) : void 0;
|
|
397
|
+
}
|
|
398
|
+
// ---------------------------------------------------------------------------
|
|
399
|
+
// Public API
|
|
400
|
+
// ---------------------------------------------------------------------------
|
|
401
|
+
/**
|
|
402
|
+
* Call the language model with either a plain string prompt or a list of
|
|
403
|
+
* chat messages.
|
|
404
|
+
*/
|
|
405
|
+
async call(prompt, config = {}) {
|
|
406
|
+
const cacheKey = config.cacheKey ?? this.#buildCacheKey(prompt, config);
|
|
407
|
+
const cached = this.#cache.get(cacheKey);
|
|
408
|
+
if (cached) return cached;
|
|
409
|
+
if (this.#diskCache) {
|
|
410
|
+
const diskCached = this.#diskCache.get(cacheKey);
|
|
411
|
+
if (diskCached) {
|
|
412
|
+
this.#cache.set(cacheKey, diskCached);
|
|
413
|
+
return diskCached;
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
const response = await this._call(prompt, config);
|
|
417
|
+
this.#cache.set(cacheKey, response);
|
|
418
|
+
if (this.#diskCache) {
|
|
419
|
+
this.#diskCache.set(cacheKey, response);
|
|
420
|
+
}
|
|
421
|
+
this.#requestCount += 1;
|
|
422
|
+
if (response.usage) {
|
|
423
|
+
this.#tokenUsage.promptTokens += response.usage.promptTokens;
|
|
424
|
+
this.#tokenUsage.completionTokens += response.usage.completionTokens;
|
|
425
|
+
this.#tokenUsage.totalTokens += response.usage.totalTokens;
|
|
426
|
+
}
|
|
427
|
+
return response;
|
|
428
|
+
}
|
|
429
|
+
/** Total number of (non-cached) API calls made. */
|
|
430
|
+
get requestCount() {
|
|
431
|
+
return this.#requestCount;
|
|
432
|
+
}
|
|
433
|
+
/** Accumulated token usage across all (non-cached) calls. */
|
|
434
|
+
get tokenUsage() {
|
|
435
|
+
return { ...this.#tokenUsage };
|
|
436
|
+
}
|
|
437
|
+
/** Clear the in-memory response cache. */
|
|
438
|
+
clearCache() {
|
|
439
|
+
this.#cache.clear();
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* Stream the language model response token by token.
|
|
443
|
+
*
|
|
444
|
+
* Returns an `AsyncIterable<StreamChunk>`. The last chunk has `done: true`.
|
|
445
|
+
* Subclasses override this to provide real streaming; the base implementation
|
|
446
|
+
* falls back to calling {@link LM.call} and yielding the full response as a
|
|
447
|
+
* single chunk.
|
|
448
|
+
*/
|
|
449
|
+
async *stream(prompt, config = {}) {
|
|
450
|
+
const response = await this.call(prompt, config);
|
|
451
|
+
yield { delta: response.text, done: true, raw: response.raw };
|
|
452
|
+
}
|
|
453
|
+
// ---------------------------------------------------------------------------
|
|
454
|
+
// Private helpers
|
|
455
|
+
// ---------------------------------------------------------------------------
|
|
456
|
+
#buildCacheKey(prompt, config) {
|
|
457
|
+
const promptStr = typeof prompt === "string" ? prompt : JSON.stringify(prompt);
|
|
458
|
+
const configStr = JSON.stringify({
|
|
459
|
+
model: config.model ?? this.model,
|
|
460
|
+
temperature: config.temperature,
|
|
461
|
+
maxTokens: config.maxTokens,
|
|
462
|
+
stop: config.stop,
|
|
463
|
+
n: config.n
|
|
464
|
+
});
|
|
465
|
+
return `${promptStr}|||${configStr}`;
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
// src/lm/adapters/OpenAI.ts
|
|
470
|
+
var OpenAI = class extends LM {
|
|
471
|
+
#options;
|
|
472
|
+
constructor(options = {}) {
|
|
473
|
+
super(options.model ?? "gpt-4o");
|
|
474
|
+
this.#options = options;
|
|
475
|
+
}
|
|
476
|
+
async _call(prompt, config) {
|
|
477
|
+
const { default: OpenAIClient } = await import("openai").catch(() => {
|
|
478
|
+
throw new Error(
|
|
479
|
+
"The `openai` package is required for the OpenAI adapter.\nInstall it with: npm install openai"
|
|
480
|
+
);
|
|
481
|
+
});
|
|
482
|
+
const client = new OpenAIClient({
|
|
483
|
+
apiKey: this.#options.apiKey ?? process.env["OPENAI_API_KEY"],
|
|
484
|
+
baseURL: this.#options.baseURL,
|
|
485
|
+
maxRetries: this.#options.maxRetries ?? 3
|
|
486
|
+
});
|
|
487
|
+
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
488
|
+
const response = await client.chat.completions.create({
|
|
489
|
+
model: config.model ?? this.model,
|
|
490
|
+
messages,
|
|
491
|
+
temperature: config.temperature,
|
|
492
|
+
max_tokens: config.maxTokens,
|
|
493
|
+
stop: config.stop,
|
|
494
|
+
n: config.n ?? 1,
|
|
495
|
+
...config.extra ?? {}
|
|
496
|
+
});
|
|
497
|
+
const texts = (response.choices ?? []).map(
|
|
498
|
+
(c) => c.message?.content ?? ""
|
|
499
|
+
);
|
|
500
|
+
return {
|
|
501
|
+
text: texts[0] ?? "",
|
|
502
|
+
texts,
|
|
503
|
+
usage: response.usage ? {
|
|
504
|
+
promptTokens: response.usage.prompt_tokens,
|
|
505
|
+
completionTokens: response.usage.completion_tokens,
|
|
506
|
+
totalTokens: response.usage.total_tokens
|
|
507
|
+
} : null,
|
|
508
|
+
raw: response
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
async *stream(prompt, config = {}) {
|
|
512
|
+
const { default: OpenAIClient } = await import("openai").catch(() => {
|
|
513
|
+
throw new Error(
|
|
514
|
+
"The `openai` package is required for the OpenAI adapter.\nInstall it with: npm install openai"
|
|
515
|
+
);
|
|
516
|
+
});
|
|
517
|
+
const client = new OpenAIClient({
|
|
518
|
+
apiKey: this.#options.apiKey ?? process.env["OPENAI_API_KEY"],
|
|
519
|
+
baseURL: this.#options.baseURL,
|
|
520
|
+
maxRetries: this.#options.maxRetries ?? 3
|
|
521
|
+
});
|
|
522
|
+
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
523
|
+
const stream = await client.chat.completions.create({
|
|
524
|
+
model: config.model ?? this.model,
|
|
525
|
+
messages,
|
|
526
|
+
temperature: config.temperature,
|
|
527
|
+
max_tokens: config.maxTokens,
|
|
528
|
+
stop: config.stop,
|
|
529
|
+
stream: true,
|
|
530
|
+
...config.extra ?? {}
|
|
531
|
+
});
|
|
532
|
+
for await (const chunk of stream) {
|
|
533
|
+
const c = chunk.choices?.[0];
|
|
534
|
+
const delta = c?.delta?.content ?? "";
|
|
535
|
+
const done = c?.finish_reason != null;
|
|
536
|
+
yield { delta, done, raw: chunk };
|
|
537
|
+
if (done) break;
|
|
538
|
+
}
|
|
539
|
+
}
|
|
540
|
+
};
|
|
541
|
+
|
|
542
|
+
// src/lm/adapters/Anthropic.ts
|
|
543
|
+
var Anthropic = class extends LM {
|
|
544
|
+
#options;
|
|
545
|
+
constructor(options = {}) {
|
|
546
|
+
super(options.model ?? "claude-3-5-sonnet-20241022");
|
|
547
|
+
this.#options = options;
|
|
548
|
+
}
|
|
549
|
+
async _call(prompt, config) {
|
|
550
|
+
const { default: Anthropic2 } = await import("@anthropic-ai/sdk").catch(() => {
|
|
551
|
+
throw new Error(
|
|
552
|
+
"The `@anthropic-ai/sdk` package is required for the Anthropic adapter.\nInstall it with: npm install @anthropic-ai/sdk"
|
|
553
|
+
);
|
|
554
|
+
});
|
|
555
|
+
const client = new Anthropic2({
|
|
556
|
+
apiKey: this.#options.apiKey ?? process.env["ANTHROPIC_API_KEY"],
|
|
557
|
+
maxRetries: this.#options.maxRetries ?? 3
|
|
558
|
+
});
|
|
559
|
+
const msgs = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
560
|
+
const systemMsg = msgs.find((m) => m.role === "system");
|
|
561
|
+
const userMsgs = msgs.filter((m) => m.role !== "system");
|
|
562
|
+
const response = await client.messages.create({
|
|
563
|
+
model: config.model ?? this.model,
|
|
564
|
+
max_tokens: config.maxTokens ?? 1024,
|
|
565
|
+
system: systemMsg?.content,
|
|
566
|
+
messages: userMsgs.map((m) => ({ role: m.role, content: m.content })),
|
|
567
|
+
temperature: config.temperature,
|
|
568
|
+
...config.extra ?? {}
|
|
569
|
+
});
|
|
570
|
+
const text = response.content.filter((b) => b.type === "text").map((b) => b.text ?? "").join("") ?? "";
|
|
571
|
+
return {
|
|
572
|
+
text,
|
|
573
|
+
texts: [text],
|
|
574
|
+
usage: response.usage ? {
|
|
575
|
+
promptTokens: response.usage.input_tokens,
|
|
576
|
+
completionTokens: response.usage.output_tokens,
|
|
577
|
+
totalTokens: response.usage.input_tokens + response.usage.output_tokens
|
|
578
|
+
} : null,
|
|
579
|
+
raw: response
|
|
580
|
+
};
|
|
581
|
+
}
|
|
582
|
+
async *stream(prompt, config = {}) {
|
|
583
|
+
const { default: Anthropic2 } = await import("@anthropic-ai/sdk").catch(() => {
|
|
584
|
+
throw new Error(
|
|
585
|
+
"The `@anthropic-ai/sdk` package is required for the Anthropic adapter.\nInstall it with: npm install @anthropic-ai/sdk"
|
|
586
|
+
);
|
|
587
|
+
});
|
|
588
|
+
const client = new Anthropic2({
|
|
589
|
+
apiKey: this.#options.apiKey ?? process.env["ANTHROPIC_API_KEY"],
|
|
590
|
+
maxRetries: this.#options.maxRetries ?? 3
|
|
591
|
+
});
|
|
592
|
+
const msgs = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
593
|
+
const systemMsg = msgs.find((m) => m.role === "system");
|
|
594
|
+
const userMsgs = msgs.filter((m) => m.role !== "system");
|
|
595
|
+
const stream = client.messages.stream({
|
|
596
|
+
model: config.model ?? this.model,
|
|
597
|
+
max_tokens: config.maxTokens ?? 1024,
|
|
598
|
+
system: systemMsg?.content,
|
|
599
|
+
messages: userMsgs.map((m) => ({ role: m.role, content: m.content })),
|
|
600
|
+
...config.extra ?? {}
|
|
601
|
+
});
|
|
602
|
+
for await (const event of stream) {
|
|
603
|
+
const e = event;
|
|
604
|
+
if (e.type === "content_block_delta" && e.delta?.type === "text_delta") {
|
|
605
|
+
yield { delta: e.delta.text ?? "", done: false, raw: event };
|
|
606
|
+
} else if (e.type === "message_stop") {
|
|
607
|
+
yield { delta: "", done: true, raw: event };
|
|
608
|
+
break;
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
};
|
|
613
|
+
|
|
614
|
+
// src/lm/adapters/Cohere.ts
|
|
615
|
+
var Cohere = class extends LM {
|
|
616
|
+
#options;
|
|
617
|
+
constructor(options = {}) {
|
|
618
|
+
super(options.model ?? "command-r-plus");
|
|
619
|
+
this.#options = options;
|
|
620
|
+
}
|
|
621
|
+
async _call(prompt, config) {
|
|
622
|
+
const { CohereClient } = await import("cohere-ai").catch(() => {
|
|
623
|
+
throw new Error(
|
|
624
|
+
"The `cohere-ai` package is required for the Cohere adapter.\nInstall it with: npm install cohere-ai"
|
|
625
|
+
);
|
|
626
|
+
});
|
|
627
|
+
const client = new CohereClient({
|
|
628
|
+
token: this.#options.apiKey ?? process.env["COHERE_API_KEY"]
|
|
629
|
+
});
|
|
630
|
+
const text = typeof prompt === "string" ? prompt : this.#messagesToText(prompt);
|
|
631
|
+
const response = await client.chat({
|
|
632
|
+
model: config.model ?? this.model,
|
|
633
|
+
message: text,
|
|
634
|
+
temperature: config.temperature,
|
|
635
|
+
maxTokens: config.maxTokens,
|
|
636
|
+
...config.extra ?? {}
|
|
637
|
+
});
|
|
638
|
+
const responseText = response.text ?? "";
|
|
639
|
+
return {
|
|
640
|
+
text: responseText,
|
|
641
|
+
texts: [responseText],
|
|
642
|
+
usage: response.meta?.tokens ? {
|
|
643
|
+
promptTokens: response.meta.tokens.inputTokens ?? 0,
|
|
644
|
+
completionTokens: response.meta.tokens.outputTokens ?? 0,
|
|
645
|
+
totalTokens: (response.meta.tokens.inputTokens ?? 0) + (response.meta.tokens.outputTokens ?? 0)
|
|
646
|
+
} : null,
|
|
647
|
+
raw: response
|
|
648
|
+
};
|
|
649
|
+
}
|
|
650
|
+
#messagesToText(messages) {
|
|
651
|
+
return messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
|
652
|
+
}
|
|
653
|
+
};
|
|
654
|
+
|
|
655
|
+
// src/lm/adapters/GoogleAI.ts
|
|
656
|
+
var GoogleAI = class extends LM {
|
|
657
|
+
#options;
|
|
658
|
+
constructor(options = {}) {
|
|
659
|
+
super(options.model ?? "gemini-1.5-pro");
|
|
660
|
+
this.#options = options;
|
|
661
|
+
}
|
|
662
|
+
async _call(prompt, config) {
|
|
663
|
+
const { GoogleGenerativeAI } = await import("@google/generative-ai").catch(() => {
|
|
664
|
+
throw new Error(
|
|
665
|
+
"The `@google/generative-ai` package is required for the GoogleAI adapter.\nInstall it with: npm install @google/generative-ai"
|
|
666
|
+
);
|
|
667
|
+
});
|
|
668
|
+
const client = new GoogleGenerativeAI(
|
|
669
|
+
this.#options.apiKey ?? process.env["GOOGLE_API_KEY"] ?? ""
|
|
670
|
+
);
|
|
671
|
+
const genModel = client.getGenerativeModel({ model: config.model ?? this.model });
|
|
672
|
+
const text = typeof prompt === "string" ? prompt : this.#messagesToText(prompt);
|
|
673
|
+
const result = await genModel.generateContent({
|
|
674
|
+
contents: [{ role: "user", parts: [{ text }] }],
|
|
675
|
+
generationConfig: {
|
|
676
|
+
temperature: config.temperature,
|
|
677
|
+
maxOutputTokens: config.maxTokens,
|
|
678
|
+
stopSequences: config.stop,
|
|
679
|
+
candidateCount: config.n ?? 1
|
|
680
|
+
}
|
|
681
|
+
});
|
|
682
|
+
const texts = (result.response.candidates ?? []).map(
|
|
683
|
+
(c) => c.content?.parts?.map((p) => p.text ?? "").join("") ?? ""
|
|
684
|
+
);
|
|
685
|
+
return {
|
|
686
|
+
text: texts[0] ?? "",
|
|
687
|
+
texts,
|
|
688
|
+
usage: null,
|
|
689
|
+
raw: result
|
|
690
|
+
};
|
|
691
|
+
}
|
|
692
|
+
#messagesToText(messages) {
|
|
693
|
+
return messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
|
694
|
+
}
|
|
695
|
+
};
|
|
696
|
+
|
|
697
|
+
// src/lm/adapters/Ollama.ts
|
|
698
|
+
var Ollama = class extends LM {
|
|
699
|
+
#baseURL;
|
|
700
|
+
constructor(options = {}) {
|
|
701
|
+
super(options.model ?? "llama3");
|
|
702
|
+
this.#baseURL = options.baseURL ?? "http://localhost:11434";
|
|
703
|
+
}
|
|
704
|
+
async _call(prompt, config) {
|
|
705
|
+
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
706
|
+
const response = await fetch(`${this.#baseURL}/api/chat`, {
|
|
707
|
+
method: "POST",
|
|
708
|
+
headers: { "Content-Type": "application/json" },
|
|
709
|
+
body: JSON.stringify({
|
|
710
|
+
model: config.model ?? this.model,
|
|
711
|
+
messages,
|
|
712
|
+
stream: false,
|
|
713
|
+
options: {
|
|
714
|
+
temperature: config.temperature,
|
|
715
|
+
num_predict: config.maxTokens,
|
|
716
|
+
stop: config.stop,
|
|
717
|
+
...config.extra ?? {}
|
|
718
|
+
}
|
|
719
|
+
})
|
|
720
|
+
});
|
|
721
|
+
if (!response.ok) {
|
|
722
|
+
throw new Error(`Ollama request failed: ${response.status} ${response.statusText}`);
|
|
723
|
+
}
|
|
724
|
+
const data = await response.json();
|
|
725
|
+
const text = data.message?.content ?? "";
|
|
726
|
+
return {
|
|
727
|
+
text,
|
|
728
|
+
texts: [text],
|
|
729
|
+
usage: data.prompt_eval_count != null ? {
|
|
730
|
+
promptTokens: data.prompt_eval_count,
|
|
731
|
+
completionTokens: data.eval_count ?? 0,
|
|
732
|
+
totalTokens: (data.prompt_eval_count ?? 0) + (data.eval_count ?? 0)
|
|
733
|
+
} : null,
|
|
734
|
+
raw: data
|
|
735
|
+
};
|
|
736
|
+
}
|
|
737
|
+
};
|
|
738
|
+
|
|
739
|
+
// src/lm/adapters/LMStudio.ts
|
|
740
|
+
var LMStudio = class extends LM {
|
|
741
|
+
#baseURL;
|
|
742
|
+
constructor(options = {}) {
|
|
743
|
+
super(options.model ?? "local-model");
|
|
744
|
+
this.#baseURL = options.baseURL ?? "http://localhost:1234/v1";
|
|
745
|
+
}
|
|
746
|
+
async _call(prompt, config) {
|
|
747
|
+
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
|
|
748
|
+
const response = await fetch(`${this.#baseURL}/chat/completions`, {
|
|
749
|
+
method: "POST",
|
|
750
|
+
headers: { "Content-Type": "application/json" },
|
|
751
|
+
body: JSON.stringify({
|
|
752
|
+
model: config.model ?? this.model,
|
|
753
|
+
messages,
|
|
754
|
+
temperature: config.temperature,
|
|
755
|
+
max_tokens: config.maxTokens,
|
|
756
|
+
stop: config.stop,
|
|
757
|
+
n: config.n ?? 1,
|
|
758
|
+
...config.extra ?? {}
|
|
759
|
+
})
|
|
760
|
+
});
|
|
761
|
+
if (!response.ok) {
|
|
762
|
+
throw new Error(`LMStudio request failed: ${response.status} ${response.statusText}`);
|
|
763
|
+
}
|
|
764
|
+
const data = await response.json();
|
|
765
|
+
const texts = (data.choices ?? []).map((c) => c.message?.content ?? "");
|
|
766
|
+
return {
|
|
767
|
+
text: texts[0] ?? "",
|
|
768
|
+
texts,
|
|
769
|
+
usage: data.usage ? {
|
|
770
|
+
promptTokens: data.usage.prompt_tokens,
|
|
771
|
+
completionTokens: data.usage.completion_tokens,
|
|
772
|
+
totalTokens: data.usage.total_tokens
|
|
773
|
+
} : null,
|
|
774
|
+
raw: data
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
};
|
|
778
|
+
|
|
779
|
+
// src/lm/adapters/HuggingFace.ts
|
|
780
|
+
var HuggingFace = class extends LM {
|
|
781
|
+
#options;
|
|
782
|
+
constructor(options = {}) {
|
|
783
|
+
super(options.model ?? "mistralai/Mistral-7B-Instruct-v0.3");
|
|
784
|
+
this.#options = options;
|
|
785
|
+
}
|
|
786
|
+
async _call(prompt, config) {
|
|
787
|
+
const apiKey = this.#options.apiKey ?? process.env["HF_API_KEY"];
|
|
788
|
+
const model = config.model ?? this.model;
|
|
789
|
+
const url = this.#options.endpointURL ?? `https://api-inference.huggingface.co/models/${model}`;
|
|
790
|
+
const inputText = typeof prompt === "string" ? prompt : this.#messagesToText(prompt);
|
|
791
|
+
const response = await fetch(url, {
|
|
792
|
+
method: "POST",
|
|
793
|
+
headers: {
|
|
794
|
+
"Content-Type": "application/json",
|
|
795
|
+
...apiKey ? { Authorization: `Bearer ${apiKey}` } : {}
|
|
796
|
+
},
|
|
797
|
+
body: JSON.stringify({
|
|
798
|
+
inputs: inputText,
|
|
799
|
+
parameters: {
|
|
800
|
+
temperature: config.temperature,
|
|
801
|
+
max_new_tokens: config.maxTokens,
|
|
802
|
+
stop: config.stop,
|
|
803
|
+
...config.extra ?? {}
|
|
804
|
+
}
|
|
805
|
+
})
|
|
806
|
+
});
|
|
807
|
+
if (!response.ok) {
|
|
808
|
+
throw new Error(`HuggingFace request failed: ${response.status} ${response.statusText}`);
|
|
809
|
+
}
|
|
810
|
+
const data = await response.json();
|
|
811
|
+
const generated = Array.isArray(data) ? data[0]?.generated_text ?? "" : data.generated_text ?? "";
|
|
812
|
+
return {
|
|
813
|
+
text: generated,
|
|
814
|
+
texts: [generated],
|
|
815
|
+
usage: null,
|
|
816
|
+
raw: data
|
|
817
|
+
};
|
|
818
|
+
}
|
|
819
|
+
#messagesToText(messages) {
|
|
820
|
+
return messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
|
821
|
+
}
|
|
822
|
+
};
|
|
823
|
+
|
|
824
|
+
// src/lm/adapters/MockLM.ts
|
|
825
|
+
var MockLM = class extends LM {
|
|
826
|
+
#responses;
|
|
827
|
+
#defaultResponse;
|
|
828
|
+
constructor(responses = {}, defaultResponse) {
|
|
829
|
+
super("mock");
|
|
830
|
+
this.#responses = new Map(Object.entries(responses));
|
|
831
|
+
this.#defaultResponse = defaultResponse;
|
|
832
|
+
}
|
|
833
|
+
async _call(prompt, config) {
|
|
834
|
+
const key = typeof prompt === "string" ? prompt : JSON.stringify(prompt);
|
|
835
|
+
const text = this.#responses.get(key) ?? this.#defaultResponse ?? (() => {
|
|
836
|
+
throw new Error(`MockLM: no response configured for prompt: "${key}"`);
|
|
837
|
+
})();
|
|
838
|
+
const n = config.n ?? 1;
|
|
839
|
+
return {
|
|
840
|
+
text,
|
|
841
|
+
texts: Array.from({ length: n }, () => text),
|
|
842
|
+
usage: null,
|
|
843
|
+
raw: null
|
|
844
|
+
};
|
|
845
|
+
}
|
|
846
|
+
/** Register (or overwrite) a prompt → response mapping at runtime. */
|
|
847
|
+
addResponse(prompt, response) {
|
|
848
|
+
this.#responses.set(prompt, response);
|
|
849
|
+
}
|
|
850
|
+
};
|
|
851
|
+
|
|
852
|
+
// src/modules/Module.ts
|
|
853
|
+
var Module = class _Module {
|
|
854
|
+
/**
|
|
855
|
+
* Recursively discover all {@link Predict} sub-modules by walking the own
|
|
856
|
+
* enumerable properties of this instance.
|
|
857
|
+
*/
|
|
858
|
+
namedPredictors() {
|
|
859
|
+
const results = [];
|
|
860
|
+
for (const [key, value] of Object.entries(this)) {
|
|
861
|
+
if (value instanceof _Module) {
|
|
862
|
+
results.push([key, value]);
|
|
863
|
+
results.push(...value.namedPredictors().map(([k, v]) => [`${key}.${k}`, v]));
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
return results;
|
|
867
|
+
}
|
|
868
|
+
/**
|
|
869
|
+
* Serialize the module's learnable parameters (e.g. `Predict.demos`) to a
|
|
870
|
+
* plain JSON-compatible object.
|
|
871
|
+
*/
|
|
872
|
+
dump() {
|
|
873
|
+
const state = {};
|
|
874
|
+
for (const [name, predictor] of this.namedPredictors()) {
|
|
875
|
+
state[name] = predictor.dump();
|
|
876
|
+
}
|
|
877
|
+
return state;
|
|
878
|
+
}
|
|
879
|
+
/**
|
|
880
|
+
* Restore learnable parameters from a plain object previously produced by
|
|
881
|
+
* {@link Module.dump}.
|
|
882
|
+
*/
|
|
883
|
+
load(state) {
|
|
884
|
+
for (const [name, predictor] of this.namedPredictors()) {
|
|
885
|
+
const sub = state[name];
|
|
886
|
+
if (sub && typeof sub === "object") {
|
|
887
|
+
predictor.load(sub);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
/**
|
|
892
|
+
* Create a deep clone of this module.
|
|
893
|
+
*
|
|
894
|
+
* Returns a new module with the same prototype. All sub-{@link Module}
|
|
895
|
+
* properties are recursively cloned so that mutating the clone's learnable
|
|
896
|
+
* parameters (e.g. `Predict.demos`) does **not** affect the original.
|
|
897
|
+
* Array properties are shallow-copied (their elements are not cloned).
|
|
898
|
+
* All other properties are copied by reference.
|
|
899
|
+
*/
|
|
900
|
+
clone() {
|
|
901
|
+
const cloned = Object.create(Object.getPrototypeOf(this));
|
|
902
|
+
for (const key of Object.keys(this)) {
|
|
903
|
+
const value = this[key];
|
|
904
|
+
if (value instanceof _Module) {
|
|
905
|
+
cloned[key] = value.clone();
|
|
906
|
+
} else if (Array.isArray(value)) {
|
|
907
|
+
cloned[key] = [...value];
|
|
908
|
+
} else {
|
|
909
|
+
cloned[key] = value;
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
return cloned;
|
|
913
|
+
}
|
|
914
|
+
};
|
|
915
|
+
|
|
916
|
+
// src/settings/Settings.ts
|
|
917
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
918
|
+
var contextStore = new AsyncLocalStorage();
|
|
919
|
+
var Settings = class {
|
|
920
|
+
#global = {};
|
|
921
|
+
// ---------------------------------------------------------------------------
|
|
922
|
+
// Effective settings: async-context overrides take precedence over globals.
|
|
923
|
+
// ---------------------------------------------------------------------------
|
|
924
|
+
get #current() {
|
|
925
|
+
const ctx = contextStore.getStore();
|
|
926
|
+
return ctx !== void 0 ? { ...this.#global, ...ctx } : this.#global;
|
|
927
|
+
}
|
|
928
|
+
// ---------------------------------------------------------------------------
|
|
929
|
+
// Accessors
|
|
930
|
+
// ---------------------------------------------------------------------------
|
|
931
|
+
get lm() {
|
|
932
|
+
return this.#current.lm;
|
|
933
|
+
}
|
|
934
|
+
get rm() {
|
|
935
|
+
return this.#current.rm;
|
|
936
|
+
}
|
|
937
|
+
get lmConfig() {
|
|
938
|
+
return this.#current.lmConfig;
|
|
939
|
+
}
|
|
940
|
+
get logLevel() {
|
|
941
|
+
return this.#current.logLevel ?? "warn";
|
|
942
|
+
}
|
|
943
|
+
get cacheDir() {
|
|
944
|
+
return this.#current.cacheDir;
|
|
945
|
+
}
|
|
946
|
+
// ---------------------------------------------------------------------------
|
|
947
|
+
// Mutation
|
|
948
|
+
// ---------------------------------------------------------------------------
|
|
949
|
+
/**
|
|
950
|
+
* Merge `options` into the global settings. Existing keys are overwritten;
|
|
951
|
+
* omitted keys are unchanged. This does NOT affect currently running
|
|
952
|
+
* {@link Settings.context} scopes.
|
|
953
|
+
*/
|
|
954
|
+
configure(options) {
|
|
955
|
+
this.#global = { ...this.#global, ...options };
|
|
956
|
+
}
|
|
957
|
+
/**
|
|
958
|
+
* Reset all global settings to their defaults.
|
|
959
|
+
*/
|
|
960
|
+
reset() {
|
|
961
|
+
this.#global = {};
|
|
962
|
+
}
|
|
963
|
+
/**
|
|
964
|
+
* Return a deep-frozen snapshot of the currently effective settings
|
|
965
|
+
* (respects any active async-context overrides).
|
|
966
|
+
*/
|
|
967
|
+
inspect() {
|
|
968
|
+
return Object.freeze({ ...this.#current });
|
|
969
|
+
}
|
|
970
|
+
/**
|
|
971
|
+
* Run `fn` inside an async-context-local settings scope.
|
|
972
|
+
*
|
|
973
|
+
* The `overrides` are merged on top of the current global settings and
|
|
974
|
+
* stored in an `AsyncLocalStorage` context. Concurrent calls each get
|
|
975
|
+
* their own isolated snapshot — they never overwrite each other's settings.
|
|
976
|
+
*
|
|
977
|
+
* @example
|
|
978
|
+
* ```ts
|
|
979
|
+
* // In an Express/Fastify handler:
|
|
980
|
+
* await settings.context({ lm: perRequestLM }, () => program.forward(inputs));
|
|
981
|
+
* ```
|
|
982
|
+
*/
|
|
983
|
+
async context(overrides, fn) {
|
|
984
|
+
const merged = { ...this.#global, ...overrides };
|
|
985
|
+
return contextStore.run(merged, fn);
|
|
986
|
+
}
|
|
987
|
+
};
|
|
988
|
+
var settings = new Settings();
|
|
989
|
+
|
|
990
|
+
// src/modules/Predict.ts
|
|
991
|
+
var Predict = class extends Module {
|
|
992
|
+
signature;
|
|
993
|
+
/** Few-shot demonstration examples (learnable parameter). */
|
|
994
|
+
demos;
|
|
995
|
+
/** System instruction override (learnable parameter). */
|
|
996
|
+
instructions;
|
|
997
|
+
constructor(signature) {
|
|
998
|
+
super();
|
|
999
|
+
this.signature = typeof signature === "string" ? Signature.from(signature) : signature;
|
|
1000
|
+
this.demos = [];
|
|
1001
|
+
this.instructions = this.signature.instructions;
|
|
1002
|
+
}
|
|
1003
|
+
// ---------------------------------------------------------------------------
|
|
1004
|
+
// Forward pass
|
|
1005
|
+
// ---------------------------------------------------------------------------
|
|
1006
|
+
async forward(inputs) {
|
|
1007
|
+
const lm = settings.lm;
|
|
1008
|
+
if (!lm) {
|
|
1009
|
+
throw new Error(
|
|
1010
|
+
"No LM configured. Call settings.configure({ lm }) before using Predict."
|
|
1011
|
+
);
|
|
1012
|
+
}
|
|
1013
|
+
const prompt = this.#buildPrompt(inputs);
|
|
1014
|
+
const config = settings.lmConfig ?? {};
|
|
1015
|
+
const response = await lm.call(prompt, config);
|
|
1016
|
+
const outputs = this.#parseCompletion(response.text);
|
|
1017
|
+
const completions = response.texts.map((t) => this.#parseCompletion(t));
|
|
1018
|
+
return new Prediction(outputs, completions);
|
|
1019
|
+
}
|
|
1020
|
+
/**
|
|
1021
|
+
* Stream the LM response token by token.
|
|
1022
|
+
* Returns an `AsyncGenerator<StreamChunk>`.
|
|
1023
|
+
*/
|
|
1024
|
+
async *stream(inputs) {
|
|
1025
|
+
const lm = settings.lm;
|
|
1026
|
+
if (!lm) throw new Error("No LM configured. Call settings.configure({ lm }) before using Predict.");
|
|
1027
|
+
const prompt = this.#buildPrompt(inputs);
|
|
1028
|
+
const config = settings.lmConfig ?? {};
|
|
1029
|
+
yield* lm.stream(prompt, config);
|
|
1030
|
+
}
|
|
1031
|
+
// ---------------------------------------------------------------------------
|
|
1032
|
+
// Serialization
|
|
1033
|
+
// ---------------------------------------------------------------------------
|
|
1034
|
+
dump() {
|
|
1035
|
+
return {
|
|
1036
|
+
signature: this.signature.toJSON(),
|
|
1037
|
+
demos: this.demos.map((d) => d.toJSON()),
|
|
1038
|
+
instructions: this.instructions
|
|
1039
|
+
};
|
|
1040
|
+
}
|
|
1041
|
+
load(state) {
|
|
1042
|
+
if (Array.isArray(state["demos"])) {
|
|
1043
|
+
this.demos = state["demos"].map(
|
|
1044
|
+
(d) => new Example(d)
|
|
1045
|
+
);
|
|
1046
|
+
}
|
|
1047
|
+
if (typeof state["instructions"] === "string") {
|
|
1048
|
+
this.instructions = state["instructions"];
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
// ---------------------------------------------------------------------------
|
|
1052
|
+
// Private helpers
|
|
1053
|
+
// ---------------------------------------------------------------------------
|
|
1054
|
+
#buildPrompt(inputs) {
|
|
1055
|
+
const lines = [];
|
|
1056
|
+
if (this.instructions) {
|
|
1057
|
+
lines.push(this.instructions, "");
|
|
1058
|
+
}
|
|
1059
|
+
if (this.demos.length > 0) {
|
|
1060
|
+
for (const demo of this.demos) {
|
|
1061
|
+
lines.push(this.#formatExample(demo.toDict()));
|
|
1062
|
+
lines.push("---");
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
lines.push(this.#formatInputs(inputs));
|
|
1066
|
+
for (const [name] of this.signature.outputs) {
|
|
1067
|
+
lines.push(`${name}:`);
|
|
1068
|
+
}
|
|
1069
|
+
return lines.join("\n");
|
|
1070
|
+
}
|
|
1071
|
+
#formatExample(data) {
|
|
1072
|
+
return [...this.signature.inputs, ...this.signature.outputs].map(([name]) => `${name}: ${String(data[name] ?? "")}`).join("\n");
|
|
1073
|
+
}
|
|
1074
|
+
#formatInputs(inputs) {
|
|
1075
|
+
return [...this.signature.inputs].map(([name]) => `${name}: ${String(inputs[name] ?? "")}`).join("\n");
|
|
1076
|
+
}
|
|
1077
|
+
/**
|
|
1078
|
+
* Parse a raw completion string into a map of field name → value.
|
|
1079
|
+
*
|
|
1080
|
+
* Looks for `fieldName: <value>` lines in the completion.
|
|
1081
|
+
*/
|
|
1082
|
+
#parseCompletion(text) {
|
|
1083
|
+
const result = {};
|
|
1084
|
+
const outputKeys = [...this.signature.outputs.keys()];
|
|
1085
|
+
for (const key of outputKeys) {
|
|
1086
|
+
const regex = new RegExp(`^${key}:\\s*(.*)$`, "mi");
|
|
1087
|
+
const match = regex.exec(text);
|
|
1088
|
+
if (match) {
|
|
1089
|
+
result[key] = (match[1] ?? "").trim();
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
if (outputKeys.length === 1 && !(outputKeys[0] in result)) {
|
|
1093
|
+
result[outputKeys[0]] = text.trim();
|
|
1094
|
+
}
|
|
1095
|
+
return result;
|
|
1096
|
+
}
|
|
1097
|
+
};
|
|
1098
|
+
|
|
1099
|
+
// src/modules/ChainOfThought.ts
|
|
1100
|
+
var ChainOfThought = class extends Predict {
|
|
1101
|
+
constructor(signature, options = {}) {
|
|
1102
|
+
const base = typeof signature === "string" ? Signature.from(signature) : signature;
|
|
1103
|
+
const rationaleSig = base.withOutput("rationale", {
|
|
1104
|
+
description: options.rationaleDescription ?? "Think step by step to reason through the problem",
|
|
1105
|
+
prefix: "Reasoning:"
|
|
1106
|
+
});
|
|
1107
|
+
const reordered = new Signature({
|
|
1108
|
+
inputs: rationaleSig.inputs,
|
|
1109
|
+
outputs: new Map([
|
|
1110
|
+
["rationale", rationaleSig.outputs.get("rationale")],
|
|
1111
|
+
...rationaleSig.outputs
|
|
1112
|
+
]),
|
|
1113
|
+
instructions: rationaleSig.instructions
|
|
1114
|
+
});
|
|
1115
|
+
super(reordered);
|
|
1116
|
+
}
|
|
1117
|
+
/** Returns the answer without the internal rationale. */
|
|
1118
|
+
async forward(inputs) {
|
|
1119
|
+
const prediction = await super.forward(inputs);
|
|
1120
|
+
const { rationale: _rationale, ...rest } = prediction.toDict();
|
|
1121
|
+
void _rationale;
|
|
1122
|
+
return new Prediction(rest, prediction.completions);
|
|
1123
|
+
}
|
|
1124
|
+
};
|
|
1125
|
+
|
|
1126
|
+
// src/modules/ChainOfThoughtWithHint.ts
|
|
1127
|
+
var ChainOfThoughtWithHint = class extends ChainOfThought {
|
|
1128
|
+
constructor(signature, options = {}) {
|
|
1129
|
+
const base = typeof signature === "string" ? signature : signature;
|
|
1130
|
+
super(base, options);
|
|
1131
|
+
const extendedSig = this.signature.withInput("hint", {
|
|
1132
|
+
description: "An optional hint to guide the reasoning",
|
|
1133
|
+
optional: true
|
|
1134
|
+
});
|
|
1135
|
+
Object.assign(this, { signature: extendedSig });
|
|
1136
|
+
}
|
|
1137
|
+
async forward(inputs) {
|
|
1138
|
+
return super.forward(inputs);
|
|
1139
|
+
}
|
|
1140
|
+
};
|
|
1141
|
+
|
|
1142
|
+
// src/modules/MultiChainComparison.ts
|
|
1143
|
+
var MultiChainComparison = class extends Module {
|
|
1144
|
+
M;
|
|
1145
|
+
#cot;
|
|
1146
|
+
#aggregator;
|
|
1147
|
+
constructor(signature, M = 3) {
|
|
1148
|
+
super();
|
|
1149
|
+
this.M = M;
|
|
1150
|
+
this.#cot = new ChainOfThought(signature);
|
|
1151
|
+
this.#aggregator = new Predict(
|
|
1152
|
+
"completions -> answer"
|
|
1153
|
+
);
|
|
1154
|
+
}
|
|
1155
|
+
async forward(inputs) {
|
|
1156
|
+
const completions = [];
|
|
1157
|
+
for (let i = 0; i < this.M; i++) {
|
|
1158
|
+
const result = await this.#cot.forward(inputs);
|
|
1159
|
+
const outputKey = [...this.#cot.signature.outputs.keys()].find((k) => k !== "rationale");
|
|
1160
|
+
completions.push(String(result.get(outputKey ?? "answer") ?? ""));
|
|
1161
|
+
}
|
|
1162
|
+
return this.#aggregator.forward({
|
|
1163
|
+
completions: completions.map((c, i) => `Option ${i + 1}: ${c}`).join("\n")
|
|
1164
|
+
});
|
|
1165
|
+
}
|
|
1166
|
+
};
|
|
1167
|
+
|
|
1168
|
+
// src/modules/ReAct.ts
|
|
1169
|
+
var ReAct = class extends Module {
|
|
1170
|
+
tools;
|
|
1171
|
+
maxIter;
|
|
1172
|
+
#predictor;
|
|
1173
|
+
constructor(signature, tools, maxIter = 5) {
|
|
1174
|
+
super();
|
|
1175
|
+
this.tools = new Map(tools.map((t) => [t.name, t]));
|
|
1176
|
+
this.maxIter = maxIter;
|
|
1177
|
+
const toolDescriptions = tools.map((t) => `${t.name}: ${t.description}`).join("\n");
|
|
1178
|
+
const base = typeof signature === "string" ? signature : signature;
|
|
1179
|
+
const instructions = `You are an agent. Use the following tools:
|
|
1180
|
+
${toolDescriptions}
|
|
1181
|
+
|
|
1182
|
+
Respond in the format:
|
|
1183
|
+
Thought: <reasoning>
|
|
1184
|
+
Action: <tool>[<args>]
|
|
1185
|
+
Observation: <result>
|
|
1186
|
+
...
|
|
1187
|
+
Finish[<answer>]`;
|
|
1188
|
+
this.#predictor = new Predict(
|
|
1189
|
+
typeof base === "string" ? `${base}` : base
|
|
1190
|
+
);
|
|
1191
|
+
this.#predictor.instructions = instructions;
|
|
1192
|
+
}
|
|
1193
|
+
async forward(inputs) {
|
|
1194
|
+
const trajectory = [];
|
|
1195
|
+
let finalAnswer = "";
|
|
1196
|
+
for (let i = 0; i < this.maxIter; i++) {
|
|
1197
|
+
const augmented = {
|
|
1198
|
+
...inputs,
|
|
1199
|
+
trajectory: trajectory.join("\n")
|
|
1200
|
+
};
|
|
1201
|
+
const result = await this.#predictor.forward(augmented);
|
|
1202
|
+
const text = String(result.get([...this.#predictor.signature.outputs.keys()][0] ?? "") ?? "");
|
|
1203
|
+
trajectory.push(text);
|
|
1204
|
+
const finishMatch = /Finish\[(.+)\]/i.exec(text);
|
|
1205
|
+
if (finishMatch) {
|
|
1206
|
+
finalAnswer = finishMatch[1] ?? "";
|
|
1207
|
+
break;
|
|
1208
|
+
}
|
|
1209
|
+
const actionMatch = /Action:\s*(\w+)\[(.+?)\]/i.exec(text);
|
|
1210
|
+
if (actionMatch) {
|
|
1211
|
+
const toolName = actionMatch[1] ?? "";
|
|
1212
|
+
const toolArgs = actionMatch[2] ?? "";
|
|
1213
|
+
const tool = this.tools.get(toolName);
|
|
1214
|
+
const observation = tool ? await tool.fn(toolArgs) : `Tool "${toolName}" not found.`;
|
|
1215
|
+
trajectory.push(`Observation: ${observation}`);
|
|
1216
|
+
}
|
|
1217
|
+
}
|
|
1218
|
+
const outputKey = [...this.#predictor.signature.outputs.keys()][0] ?? "answer";
|
|
1219
|
+
return new Prediction({
|
|
1220
|
+
[outputKey]: finalAnswer || trajectory.at(-1) || "",
|
|
1221
|
+
trajectory: trajectory.join("\n")
|
|
1222
|
+
});
|
|
1223
|
+
}
|
|
1224
|
+
};
|
|
1225
|
+
|
|
1226
|
+
// src/modules/ProgramOfThought.ts
|
|
1227
|
+
var ProgramOfThought = class extends Module {
|
|
1228
|
+
maxAttempts;
|
|
1229
|
+
/** Wall-clock timeout (ms) for each code execution attempt. */
|
|
1230
|
+
timeoutMs;
|
|
1231
|
+
sandbox;
|
|
1232
|
+
#codeGenerator;
|
|
1233
|
+
#corrector;
|
|
1234
|
+
#outputKey;
|
|
1235
|
+
constructor(signature, maxAttempts = 3, timeoutMs = 5e3, sandbox = "function") {
|
|
1236
|
+
super();
|
|
1237
|
+
this.maxAttempts = maxAttempts;
|
|
1238
|
+
this.timeoutMs = timeoutMs;
|
|
1239
|
+
this.sandbox = sandbox;
|
|
1240
|
+
const base = typeof signature === "string" ? Signature.from(signature) : signature;
|
|
1241
|
+
const genSig = new Signature({
|
|
1242
|
+
inputs: new Map([
|
|
1243
|
+
...base.inputs,
|
|
1244
|
+
[
|
|
1245
|
+
"instructions",
|
|
1246
|
+
InputField({
|
|
1247
|
+
description: "Task instructions for code generation",
|
|
1248
|
+
optional: true
|
|
1249
|
+
})
|
|
1250
|
+
]
|
|
1251
|
+
]),
|
|
1252
|
+
outputs: /* @__PURE__ */ new Map([
|
|
1253
|
+
[
|
|
1254
|
+
"code",
|
|
1255
|
+
OutputField({
|
|
1256
|
+
description: "JavaScript code that computes and returns the answer via a `return` statement"
|
|
1257
|
+
})
|
|
1258
|
+
]
|
|
1259
|
+
]),
|
|
1260
|
+
instructions: base.instructions
|
|
1261
|
+
});
|
|
1262
|
+
this.#codeGenerator = new Predict(genSig);
|
|
1263
|
+
this.#corrector = new Predict("code, error -> fixed_code");
|
|
1264
|
+
this.#outputKey = [...base.outputs.keys()][0] ?? "answer";
|
|
1265
|
+
}
|
|
1266
|
+
async forward(inputs) {
|
|
1267
|
+
let code = "";
|
|
1268
|
+
let result;
|
|
1269
|
+
let lastError = "";
|
|
1270
|
+
for (let attempt = 0; attempt < this.maxAttempts; attempt++) {
|
|
1271
|
+
const genInputs = attempt === 0 ? {
|
|
1272
|
+
...inputs,
|
|
1273
|
+
instructions: "Write JavaScript code to compute the answer. Use a `return` statement for the final value."
|
|
1274
|
+
} : { code, error: lastError };
|
|
1275
|
+
const generated = attempt === 0 ? await this.#codeGenerator.forward(genInputs) : await this.#corrector.forward(genInputs);
|
|
1276
|
+
code = String(generated.get("code") ?? generated.get("fixed_code") ?? "");
|
|
1277
|
+
try {
|
|
1278
|
+
if (this.sandbox === "worker") {
|
|
1279
|
+
result = await this.#executeInWorker(code, this.timeoutMs);
|
|
1280
|
+
} else if (this.sandbox === "none") {
|
|
1281
|
+
const fn = new Function(`return (async () => { ${code} })()`);
|
|
1282
|
+
result = await fn();
|
|
1283
|
+
} else {
|
|
1284
|
+
const fn = new Function(`return (async () => { ${code} })()`);
|
|
1285
|
+
result = await this.#executeWithTimeout(fn(), this.timeoutMs);
|
|
1286
|
+
}
|
|
1287
|
+
break;
|
|
1288
|
+
} catch (err) {
|
|
1289
|
+
lastError = err instanceof Error ? err.message : String(err);
|
|
1290
|
+
result = void 0;
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
return new Prediction({
|
|
1294
|
+
[this.#outputKey]: result !== void 0 ? String(result) : "",
|
|
1295
|
+
code
|
|
1296
|
+
});
|
|
1297
|
+
}
|
|
1298
|
+
async #executeInWorker(code, timeoutMs) {
|
|
1299
|
+
const { Worker } = await import("worker_threads");
|
|
1300
|
+
const WORKER_CODE = `
|
|
1301
|
+
const { workerData, parentPort } = require('node:worker_threads');
|
|
1302
|
+
const { code } = workerData;
|
|
1303
|
+
(async () => {
|
|
1304
|
+
try {
|
|
1305
|
+
const fn = new Function('return (async () => { ' + code + ' })()');
|
|
1306
|
+
const result = await fn();
|
|
1307
|
+
parentPort.postMessage({ result: String(result ?? '') });
|
|
1308
|
+
} catch (err) {
|
|
1309
|
+
parentPort.postMessage({ error: err.message ?? String(err) });
|
|
1310
|
+
}
|
|
1311
|
+
})();
|
|
1312
|
+
`;
|
|
1313
|
+
return new Promise((resolve, reject) => {
|
|
1314
|
+
const worker = new Worker(WORKER_CODE, {
|
|
1315
|
+
eval: true,
|
|
1316
|
+
workerData: { code }
|
|
1317
|
+
});
|
|
1318
|
+
const timer = setTimeout(() => {
|
|
1319
|
+
void worker.terminate();
|
|
1320
|
+
reject(new Error("ProgramOfThought: worker execution timed out"));
|
|
1321
|
+
}, timeoutMs);
|
|
1322
|
+
worker.on("message", (msg) => {
|
|
1323
|
+
clearTimeout(timer);
|
|
1324
|
+
void worker.terminate();
|
|
1325
|
+
if (msg.error) reject(new Error(msg.error));
|
|
1326
|
+
else resolve(msg.result ?? "");
|
|
1327
|
+
});
|
|
1328
|
+
worker.on("error", (err) => {
|
|
1329
|
+
clearTimeout(timer);
|
|
1330
|
+
reject(err);
|
|
1331
|
+
});
|
|
1332
|
+
});
|
|
1333
|
+
}
|
|
1334
|
+
/**
|
|
1335
|
+
* Race `promise` against a wall-clock timer.
|
|
1336
|
+
* The underlying async work is not cancelled on timeout (no true abort), but
|
|
1337
|
+
* the returned Promise rejects promptly.
|
|
1338
|
+
*/
|
|
1339
|
+
#executeWithTimeout(promise, timeoutMs) {
|
|
1340
|
+
return new Promise((resolve, reject) => {
|
|
1341
|
+
const timer = setTimeout(
|
|
1342
|
+
() => reject(
|
|
1343
|
+
new Error(
|
|
1344
|
+
`ProgramOfThought: code execution timed out after ${timeoutMs}ms`
|
|
1345
|
+
)
|
|
1346
|
+
),
|
|
1347
|
+
timeoutMs
|
|
1348
|
+
);
|
|
1349
|
+
promise.then(
|
|
1350
|
+
(value) => {
|
|
1351
|
+
clearTimeout(timer);
|
|
1352
|
+
resolve(value);
|
|
1353
|
+
},
|
|
1354
|
+
(err) => {
|
|
1355
|
+
clearTimeout(timer);
|
|
1356
|
+
reject(err);
|
|
1357
|
+
}
|
|
1358
|
+
);
|
|
1359
|
+
});
|
|
1360
|
+
}
|
|
1361
|
+
};
|
|
1362
|
+
|
|
1363
|
+
// src/modules/Retrieve.ts
|
|
1364
|
+
var Retrieve = class extends Module {
|
|
1365
|
+
k;
|
|
1366
|
+
constructor(k = 3) {
|
|
1367
|
+
super();
|
|
1368
|
+
this.k = k;
|
|
1369
|
+
}
|
|
1370
|
+
async forward(query) {
|
|
1371
|
+
const rm = settings.rm;
|
|
1372
|
+
if (!rm) {
|
|
1373
|
+
throw new Error(
|
|
1374
|
+
"No retriever configured. Call settings.configure({ rm }) before using Retrieve."
|
|
1375
|
+
);
|
|
1376
|
+
}
|
|
1377
|
+
const passages = await rm.retrieve(query, this.k);
|
|
1378
|
+
return new Prediction({ passages, query });
|
|
1379
|
+
}
|
|
1380
|
+
};
|
|
1381
|
+
|
|
1382
|
+
// src/assertions/Assert.ts
|
|
1383
|
+
var AssertionError = class extends Error {
|
|
1384
|
+
constructor(message = "Assertion failed") {
|
|
1385
|
+
super(message);
|
|
1386
|
+
this.name = "AssertionError";
|
|
1387
|
+
}
|
|
1388
|
+
};
|
|
1389
|
+
function Assert(condition, message) {
|
|
1390
|
+
if (!condition) {
|
|
1391
|
+
throw new AssertionError(message ?? "Assertion failed");
|
|
1392
|
+
}
|
|
1393
|
+
}
|
|
1394
|
+
|
|
1395
|
+
// src/assertions/Suggest.ts
|
|
1396
|
+
function Suggest(condition, message) {
|
|
1397
|
+
if (!condition) {
|
|
1398
|
+
console.warn(`[DSTsx Suggest] ${message ?? "Condition not met"}`);
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1401
|
+
|
|
1402
|
+
// src/modules/Retry.ts
|
|
1403
|
+
var Retry = class extends Module {
|
|
1404
|
+
#inner;
|
|
1405
|
+
maxAttempts;
|
|
1406
|
+
constructor(inner, maxAttempts = 3) {
|
|
1407
|
+
super();
|
|
1408
|
+
this.#inner = inner;
|
|
1409
|
+
this.maxAttempts = maxAttempts;
|
|
1410
|
+
}
|
|
1411
|
+
async forward(...args) {
|
|
1412
|
+
let lastError;
|
|
1413
|
+
for (let attempt = 0; attempt < this.maxAttempts; attempt++) {
|
|
1414
|
+
try {
|
|
1415
|
+
return await this.#inner.forward(...args);
|
|
1416
|
+
} catch (err) {
|
|
1417
|
+
if (err instanceof AssertionError) {
|
|
1418
|
+
lastError = err;
|
|
1419
|
+
const firstArg = args[0];
|
|
1420
|
+
if (firstArg && typeof firstArg === "object") {
|
|
1421
|
+
firstArg["feedback"] = err.message;
|
|
1422
|
+
}
|
|
1423
|
+
continue;
|
|
1424
|
+
}
|
|
1425
|
+
throw err;
|
|
1426
|
+
}
|
|
1427
|
+
}
|
|
1428
|
+
throw lastError ?? new Error("Retry: all attempts exhausted");
|
|
1429
|
+
}
|
|
1430
|
+
};
|
|
1431
|
+
|
|
1432
|
+
// src/modules/BestOfN.ts
|
|
1433
|
+
var BestOfN = class extends Module {
|
|
1434
|
+
N;
|
|
1435
|
+
#inner;
|
|
1436
|
+
#reduce;
|
|
1437
|
+
constructor(inner, N = 3, reduceFunc) {
|
|
1438
|
+
super();
|
|
1439
|
+
this.#inner = inner;
|
|
1440
|
+
this.N = N;
|
|
1441
|
+
this.#reduce = reduceFunc ?? ((preds) => preds[0] ?? new Prediction({}));
|
|
1442
|
+
}
|
|
1443
|
+
async forward(...args) {
|
|
1444
|
+
const results = await Promise.all(
|
|
1445
|
+
Array.from(
|
|
1446
|
+
{ length: this.N },
|
|
1447
|
+
() => this.#inner.forward(...args)
|
|
1448
|
+
)
|
|
1449
|
+
);
|
|
1450
|
+
return this.#reduce(results);
|
|
1451
|
+
}
|
|
1452
|
+
};
|
|
1453
|
+
|
|
1454
|
+
// src/modules/Ensemble.ts
|
|
1455
|
+
var Ensemble = class extends Module {
|
|
1456
|
+
#modules;
|
|
1457
|
+
#reduce;
|
|
1458
|
+
constructor(modules, reduceFunc) {
|
|
1459
|
+
super();
|
|
1460
|
+
this.#modules = modules;
|
|
1461
|
+
this.#reduce = reduceFunc ?? ((preds) => preds[0] ?? new Prediction({}));
|
|
1462
|
+
}
|
|
1463
|
+
async forward(...args) {
|
|
1464
|
+
const results = await Promise.all(
|
|
1465
|
+
this.#modules.map(
|
|
1466
|
+
(m) => m.forward(...args)
|
|
1467
|
+
)
|
|
1468
|
+
);
|
|
1469
|
+
return this.#reduce(results);
|
|
1470
|
+
}
|
|
1471
|
+
};
|
|
1472
|
+
|
|
1473
|
+
// src/modules/TypedPredictor.ts
|
|
1474
|
+
var TypedPrediction = class extends Prediction {
|
|
1475
|
+
typed;
|
|
1476
|
+
constructor(data, typed, completions = []) {
|
|
1477
|
+
super(data, completions);
|
|
1478
|
+
this.typed = typed;
|
|
1479
|
+
}
|
|
1480
|
+
};
|
|
1481
|
+
var TypedPredictor = class _TypedPredictor extends Predict {
|
|
1482
|
+
#schema;
|
|
1483
|
+
#maxRetries;
|
|
1484
|
+
constructor(signature, schema, options = {}) {
|
|
1485
|
+
super(signature);
|
|
1486
|
+
this.#schema = schema;
|
|
1487
|
+
this.#maxRetries = options.maxRetries ?? 3;
|
|
1488
|
+
}
|
|
1489
|
+
async forward(inputs) {
|
|
1490
|
+
const origInstructions = this.instructions;
|
|
1491
|
+
const jsonSuffix = "\n\nRespond with a JSON object matching the output schema.";
|
|
1492
|
+
this.instructions = (origInstructions ?? "") + jsonSuffix;
|
|
1493
|
+
let lastError;
|
|
1494
|
+
try {
|
|
1495
|
+
for (let attempt = 0; attempt <= this.#maxRetries; attempt++) {
|
|
1496
|
+
try {
|
|
1497
|
+
const prediction = await super.forward(inputs);
|
|
1498
|
+
const dict = prediction.toDict();
|
|
1499
|
+
let parsed;
|
|
1500
|
+
let found = false;
|
|
1501
|
+
let lastParseError;
|
|
1502
|
+
for (const key of this.signature.outputs.keys()) {
|
|
1503
|
+
const val = dict[key];
|
|
1504
|
+
if (typeof val === "string" && val.length > 0) {
|
|
1505
|
+
try {
|
|
1506
|
+
parsed = _TypedPredictor.#parseJSON(val);
|
|
1507
|
+
found = true;
|
|
1508
|
+
break;
|
|
1509
|
+
} catch (parseErr) {
|
|
1510
|
+
lastParseError = parseErr;
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
}
|
|
1514
|
+
if (!found) {
|
|
1515
|
+
if (lastParseError !== void 0) {
|
|
1516
|
+
throw lastParseError;
|
|
1517
|
+
}
|
|
1518
|
+
parsed = dict;
|
|
1519
|
+
}
|
|
1520
|
+
let typed;
|
|
1521
|
+
if (this.#schema) {
|
|
1522
|
+
typed = this.#schema.parse(parsed);
|
|
1523
|
+
} else {
|
|
1524
|
+
typed = parsed;
|
|
1525
|
+
}
|
|
1526
|
+
return new TypedPrediction(
|
|
1527
|
+
dict,
|
|
1528
|
+
typed,
|
|
1529
|
+
prediction.completions
|
|
1530
|
+
);
|
|
1531
|
+
} catch (err) {
|
|
1532
|
+
lastError = err;
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
} finally {
|
|
1536
|
+
this.instructions = origInstructions;
|
|
1537
|
+
}
|
|
1538
|
+
throw lastError;
|
|
1539
|
+
}
|
|
1540
|
+
static #parseJSON(raw) {
|
|
1541
|
+
if (typeof raw !== "string") return raw;
|
|
1542
|
+
let text = raw.trim();
|
|
1543
|
+
const fence = /^```(?:json)?\s*([\s\S]*?)\s*```$/m.exec(text);
|
|
1544
|
+
if (fence) text = (fence[1] ?? "").trim();
|
|
1545
|
+
return JSON.parse(text);
|
|
1546
|
+
}
|
|
1547
|
+
};
|
|
1548
|
+
var TypedChainOfThought = class extends TypedPredictor {
|
|
1549
|
+
constructor(signature, schema, options = {}) {
|
|
1550
|
+
const base = typeof signature === "string" ? Signature.from(signature) : signature;
|
|
1551
|
+
const withRationale = base.withOutput("rationale", {
|
|
1552
|
+
description: "Think step by step to reason through the problem",
|
|
1553
|
+
prefix: "Reasoning:"
|
|
1554
|
+
});
|
|
1555
|
+
const reordered = new Signature({
|
|
1556
|
+
inputs: withRationale.inputs,
|
|
1557
|
+
outputs: new Map([
|
|
1558
|
+
["rationale", withRationale.outputs.get("rationale")],
|
|
1559
|
+
...withRationale.outputs
|
|
1560
|
+
]),
|
|
1561
|
+
instructions: withRationale.instructions
|
|
1562
|
+
});
|
|
1563
|
+
super(reordered, schema, options);
|
|
1564
|
+
}
|
|
1565
|
+
async forward(inputs) {
|
|
1566
|
+
const result = await super.forward(inputs);
|
|
1567
|
+
const { rationale: _rationale, ...rest } = result.toDict();
|
|
1568
|
+
void _rationale;
|
|
1569
|
+
return new TypedPrediction(
|
|
1570
|
+
rest,
|
|
1571
|
+
result.typed,
|
|
1572
|
+
result.completions
|
|
1573
|
+
);
|
|
1574
|
+
}
|
|
1575
|
+
};
|
|
1576
|
+
|
|
1577
|
+
// src/modules/Parallel.ts
|
|
1578
|
+
var Parallel = class extends Module {
|
|
1579
|
+
#modules;
|
|
1580
|
+
#timeoutMs;
|
|
1581
|
+
constructor(modules, options = {}) {
|
|
1582
|
+
super();
|
|
1583
|
+
this.#modules = modules;
|
|
1584
|
+
this.#timeoutMs = options.timeoutMs;
|
|
1585
|
+
}
|
|
1586
|
+
/** Run all modules in parallel and return all predictions. */
|
|
1587
|
+
async run(...args) {
|
|
1588
|
+
const tasks = this.#modules.map(
|
|
1589
|
+
(m) => m.forward(...args)
|
|
1590
|
+
);
|
|
1591
|
+
if (this.#timeoutMs !== void 0) {
|
|
1592
|
+
const timeoutMs = this.#timeoutMs;
|
|
1593
|
+
const withTimeout = tasks.map(
|
|
1594
|
+
(t) => Promise.race([
|
|
1595
|
+
t,
|
|
1596
|
+
new Promise(
|
|
1597
|
+
(_, reject) => setTimeout(() => reject(new Error("Parallel: timeout")), timeoutMs)
|
|
1598
|
+
)
|
|
1599
|
+
])
|
|
1600
|
+
);
|
|
1601
|
+
return Promise.all(withTimeout);
|
|
1602
|
+
}
|
|
1603
|
+
return Promise.all(tasks);
|
|
1604
|
+
}
|
|
1605
|
+
/** For Module interface compatibility — returns first prediction. */
|
|
1606
|
+
async forward(...args) {
|
|
1607
|
+
const results = await this.run(...args);
|
|
1608
|
+
return results[0];
|
|
1609
|
+
}
|
|
1610
|
+
};
|
|
1611
|
+
|
|
1612
|
+
// src/modules/Refine.ts
|
|
1613
|
+
var Refine = class extends Module {
|
|
1614
|
+
#inner;
|
|
1615
|
+
#maxRefinements;
|
|
1616
|
+
#feedbackField;
|
|
1617
|
+
#stopCondition;
|
|
1618
|
+
#critic;
|
|
1619
|
+
constructor(inner, options = {}) {
|
|
1620
|
+
super();
|
|
1621
|
+
this.#inner = inner;
|
|
1622
|
+
this.#maxRefinements = options.maxRefinements ?? 2;
|
|
1623
|
+
this.#feedbackField = options.feedbackField ?? "feedback";
|
|
1624
|
+
this.#stopCondition = options.stopCondition;
|
|
1625
|
+
this.#critic = new Predict("output -> critique, is_satisfactory");
|
|
1626
|
+
}
|
|
1627
|
+
async forward(...args) {
|
|
1628
|
+
const innerForward = this.#inner.forward.bind(this.#inner);
|
|
1629
|
+
let prediction = await innerForward(...args);
|
|
1630
|
+
for (let i = 0; i < this.#maxRefinements; i++) {
|
|
1631
|
+
if (this.#stopCondition?.(prediction)) break;
|
|
1632
|
+
const outputStr = JSON.stringify(prediction.toDict());
|
|
1633
|
+
let critique;
|
|
1634
|
+
try {
|
|
1635
|
+
critique = await this.#critic.forward({ output: outputStr });
|
|
1636
|
+
} catch {
|
|
1637
|
+
break;
|
|
1638
|
+
}
|
|
1639
|
+
const isSatisfactory = String(
|
|
1640
|
+
critique.get("is_satisfactory") ?? ""
|
|
1641
|
+
).toLowerCase().trim();
|
|
1642
|
+
if (isSatisfactory === "yes" || isSatisfactory === "true") break;
|
|
1643
|
+
const feedback = String(critique.get("critique") ?? "");
|
|
1644
|
+
const newArgs = [...args];
|
|
1645
|
+
if (newArgs.length > 0 && typeof newArgs[0] === "object" && newArgs[0] !== null) {
|
|
1646
|
+
newArgs[0] = {
|
|
1647
|
+
...newArgs[0],
|
|
1648
|
+
[this.#feedbackField]: feedback
|
|
1649
|
+
};
|
|
1650
|
+
}
|
|
1651
|
+
try {
|
|
1652
|
+
prediction = await innerForward(...newArgs);
|
|
1653
|
+
} catch {
|
|
1654
|
+
break;
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
1657
|
+
return prediction;
|
|
1658
|
+
}
|
|
1659
|
+
};
|
|
1660
|
+
|
|
1661
|
+
// src/modules/NativeReAct.ts
|
|
1662
|
+
var NativeReAct = class extends Module {
|
|
1663
|
+
tools;
|
|
1664
|
+
maxIter;
|
|
1665
|
+
#signatureStr;
|
|
1666
|
+
#outputKey;
|
|
1667
|
+
constructor(signatureStr, tools, maxIter = 5) {
|
|
1668
|
+
super();
|
|
1669
|
+
this.#signatureStr = signatureStr;
|
|
1670
|
+
this.tools = new Map(tools.map((t) => [t.name, t]));
|
|
1671
|
+
this.maxIter = maxIter;
|
|
1672
|
+
const sig = Signature.from(signatureStr);
|
|
1673
|
+
this.#outputKey = [...sig.outputs.keys()][0] ?? "answer";
|
|
1674
|
+
}
|
|
1675
|
+
async forward(inputs) {
|
|
1676
|
+
const lm = settings.lm;
|
|
1677
|
+
if (!lm) throw new Error("No LM configured.");
|
|
1678
|
+
const toolSchemas = [...this.tools.values()].map((t) => ({
|
|
1679
|
+
type: "function",
|
|
1680
|
+
function: {
|
|
1681
|
+
name: t.name,
|
|
1682
|
+
description: t.description,
|
|
1683
|
+
parameters: {
|
|
1684
|
+
type: "object",
|
|
1685
|
+
properties: { args: { type: "string", description: "Tool arguments as JSON or plain string" } },
|
|
1686
|
+
required: ["args"]
|
|
1687
|
+
}
|
|
1688
|
+
}
|
|
1689
|
+
}));
|
|
1690
|
+
const inputStr = Object.entries(inputs).map(([k, v]) => `${k}: ${String(v)}`).join("\n");
|
|
1691
|
+
const messages = [
|
|
1692
|
+
{
|
|
1693
|
+
role: "system",
|
|
1694
|
+
content: `You are a helpful assistant. Use tools when needed.
|
|
1695
|
+
Signature: ${this.#signatureStr}
|
|
1696
|
+
Tools: ${[...this.tools.keys()].join(", ")}`
|
|
1697
|
+
},
|
|
1698
|
+
{ role: "user", content: inputStr }
|
|
1699
|
+
];
|
|
1700
|
+
let finalAnswer = "";
|
|
1701
|
+
const trajectory = [];
|
|
1702
|
+
for (let i = 0; i < this.maxIter; i++) {
|
|
1703
|
+
const response = await lm.call(messages, {
|
|
1704
|
+
extra: { tools: toolSchemas, tool_choice: "auto" }
|
|
1705
|
+
});
|
|
1706
|
+
const raw = response.raw;
|
|
1707
|
+
const choices = raw?.["choices"] ?? [];
|
|
1708
|
+
const choice = choices[0];
|
|
1709
|
+
const toolCalls = choice?.["message"]?.["tool_calls"];
|
|
1710
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
1711
|
+
for (const tc of toolCalls) {
|
|
1712
|
+
const toolName = tc.function.name;
|
|
1713
|
+
const args = tc.function.arguments;
|
|
1714
|
+
const tool = this.tools.get(toolName);
|
|
1715
|
+
const observation = tool ? await tool.fn(args).catch((e) => String(e)) : `Unknown tool: ${toolName}`;
|
|
1716
|
+
trajectory.push({
|
|
1717
|
+
thought: `Using tool: ${toolName}`,
|
|
1718
|
+
action: `${toolName}(${args})`,
|
|
1719
|
+
observation
|
|
1720
|
+
});
|
|
1721
|
+
messages.push({ role: "assistant", content: `Tool: ${toolName}
|
|
1722
|
+
Args: ${args}` });
|
|
1723
|
+
messages.push({ role: "user", content: `Observation: ${observation}` });
|
|
1724
|
+
}
|
|
1725
|
+
} else {
|
|
1726
|
+
finalAnswer = response.text;
|
|
1727
|
+
break;
|
|
1728
|
+
}
|
|
1729
|
+
}
|
|
1730
|
+
return new Prediction({ [this.#outputKey]: finalAnswer, trajectory: JSON.stringify(trajectory) });
|
|
1731
|
+
}
|
|
1732
|
+
};
|
|
1733
|
+
|
|
1734
|
+
// src/retrieve/Retriever.ts
|
|
1735
|
+
var Retriever = class {
|
|
1736
|
+
};
|
|
1737
|
+
|
|
1738
|
+
// src/retrieve/backends/ColBERTv2.ts
|
|
1739
|
+
var ColBERTv2 = class extends Retriever {
|
|
1740
|
+
#url;
|
|
1741
|
+
constructor(options) {
|
|
1742
|
+
super();
|
|
1743
|
+
this.#url = typeof options === "string" ? options : options.url;
|
|
1744
|
+
}
|
|
1745
|
+
async retrieve(query, k) {
|
|
1746
|
+
const url = new URL("/search", this.#url);
|
|
1747
|
+
url.searchParams.set("query", query);
|
|
1748
|
+
url.searchParams.set("k", String(k));
|
|
1749
|
+
const response = await fetch(url.toString());
|
|
1750
|
+
if (!response.ok) {
|
|
1751
|
+
throw new Error(`ColBERTv2 request failed: ${response.status} ${response.statusText}`);
|
|
1752
|
+
}
|
|
1753
|
+
const data = await response.json();
|
|
1754
|
+
return (data.topk ?? []).map((p) => p.content ?? "").filter(Boolean);
|
|
1755
|
+
}
|
|
1756
|
+
};
|
|
1757
|
+
|
|
1758
|
+
// src/retrieve/backends/PineconeRM.ts
|
|
1759
|
+
var PineconeRM = class extends Retriever {
|
|
1760
|
+
#options;
|
|
1761
|
+
constructor(options) {
|
|
1762
|
+
super();
|
|
1763
|
+
this.#options = options;
|
|
1764
|
+
}
|
|
1765
|
+
async retrieve(query, k) {
|
|
1766
|
+
const { Pinecone } = await import("@pinecone-database/pinecone").catch(() => {
|
|
1767
|
+
throw new Error(
|
|
1768
|
+
"The `@pinecone-database/pinecone` package is required.\nInstall it with: npm install @pinecone-database/pinecone"
|
|
1769
|
+
);
|
|
1770
|
+
});
|
|
1771
|
+
const client = new Pinecone({
|
|
1772
|
+
apiKey: this.#options.apiKey ?? process.env["PINECONE_API_KEY"] ?? ""
|
|
1773
|
+
});
|
|
1774
|
+
const index = client.index(this.#options.indexName);
|
|
1775
|
+
const embedding = await this.#options.embeddingFn(query);
|
|
1776
|
+
const queryResponse = await index.namespace(this.#options.namespace ?? "").query({
|
|
1777
|
+
vector: embedding,
|
|
1778
|
+
topK: k,
|
|
1779
|
+
includeMetadata: true
|
|
1780
|
+
});
|
|
1781
|
+
return (queryResponse.matches ?? []).map(
|
|
1782
|
+
(m) => String(m.metadata?.["text"] ?? m.metadata?.["content"] ?? "")
|
|
1783
|
+
).filter(Boolean);
|
|
1784
|
+
}
|
|
1785
|
+
};
|
|
1786
|
+
|
|
1787
|
+
// src/retrieve/backends/WeaviateRM.ts
|
|
1788
|
+
var WeaviateRM = class extends Retriever {
|
|
1789
|
+
#options;
|
|
1790
|
+
constructor(options) {
|
|
1791
|
+
super();
|
|
1792
|
+
this.#options = options;
|
|
1793
|
+
}
|
|
1794
|
+
async retrieve(query, k) {
|
|
1795
|
+
const weaviate = await import("weaviate-client").catch(() => {
|
|
1796
|
+
throw new Error(
|
|
1797
|
+
"The `weaviate-client` package is required.\nInstall it with: npm install weaviate-client"
|
|
1798
|
+
);
|
|
1799
|
+
});
|
|
1800
|
+
const client = await weaviate.default.connectToCustom({
|
|
1801
|
+
httpHost: this.#options.url,
|
|
1802
|
+
...this.#options.apiKey ? { authCredentials: new weaviate.default.ApiKey(this.#options.apiKey) } : {}
|
|
1803
|
+
});
|
|
1804
|
+
const embedding = await this.#options.embeddingFn(query);
|
|
1805
|
+
const textProp = this.#options.textProperty ?? "text";
|
|
1806
|
+
const result = await client.collections.get(this.#options.className).query.nearVector(embedding, { limit: k, returnProperties: [textProp] });
|
|
1807
|
+
return (result.objects ?? []).map(
|
|
1808
|
+
(obj) => String(obj.properties?.[textProp] ?? "")
|
|
1809
|
+
).filter(Boolean);
|
|
1810
|
+
}
|
|
1811
|
+
};
|
|
1812
|
+
|
|
1813
|
+
// src/retrieve/backends/ChromadbRM.ts
|
|
1814
|
+
var ChromadbRM = class extends Retriever {
|
|
1815
|
+
#options;
|
|
1816
|
+
constructor(options) {
|
|
1817
|
+
super();
|
|
1818
|
+
this.#options = options;
|
|
1819
|
+
}
|
|
1820
|
+
async retrieve(query, k) {
|
|
1821
|
+
const { ChromaClient } = await import("chromadb").catch(() => {
|
|
1822
|
+
throw new Error(
|
|
1823
|
+
"The `chromadb` package is required.\nInstall it with: npm install chromadb"
|
|
1824
|
+
);
|
|
1825
|
+
});
|
|
1826
|
+
const client = new ChromaClient({ path: this.#options.url ?? "http://localhost:8000" });
|
|
1827
|
+
const collection = await client.getCollection({ name: this.#options.collectionName });
|
|
1828
|
+
const results = await collection.query({
|
|
1829
|
+
queryTexts: [query],
|
|
1830
|
+
nResults: k
|
|
1831
|
+
});
|
|
1832
|
+
return (results.documents?.[0] ?? []).filter((d) => d !== null);
|
|
1833
|
+
}
|
|
1834
|
+
};
|
|
1835
|
+
|
|
1836
|
+
// src/retrieve/backends/QdrantRM.ts
|
|
1837
|
+
var QdrantRM = class extends Retriever {
|
|
1838
|
+
#options;
|
|
1839
|
+
constructor(options) {
|
|
1840
|
+
super();
|
|
1841
|
+
this.#options = options;
|
|
1842
|
+
}
|
|
1843
|
+
async retrieve(query, k) {
|
|
1844
|
+
const { QdrantClient } = await import("@qdrant/js-client-rest").catch(() => {
|
|
1845
|
+
throw new Error(
|
|
1846
|
+
"The `@qdrant/js-client-rest` package is required.\nInstall it with: npm install @qdrant/js-client-rest"
|
|
1847
|
+
);
|
|
1848
|
+
});
|
|
1849
|
+
const client = new QdrantClient({
|
|
1850
|
+
url: this.#options.url ?? "http://localhost:6333",
|
|
1851
|
+
apiKey: this.#options.apiKey
|
|
1852
|
+
});
|
|
1853
|
+
const embedding = await this.#options.embeddingFn(query);
|
|
1854
|
+
const textField = this.#options.textField ?? "text";
|
|
1855
|
+
const result = await client.search(this.#options.collectionName, {
|
|
1856
|
+
vector: embedding,
|
|
1857
|
+
limit: k,
|
|
1858
|
+
with_payload: [textField]
|
|
1859
|
+
});
|
|
1860
|
+
return result.map(
|
|
1861
|
+
(hit) => String(hit.payload?.[textField] ?? "")
|
|
1862
|
+
).filter(Boolean);
|
|
1863
|
+
}
|
|
1864
|
+
};
|
|
1865
|
+
|
|
1866
|
+
// src/retrieve/backends/FaissRM.ts
|
|
1867
|
+
var FaissRM = class extends Retriever {
|
|
1868
|
+
#passages;
|
|
1869
|
+
#precomputedEmbeddings;
|
|
1870
|
+
#embeddingFn;
|
|
1871
|
+
/** Lazily populated passage-embedding cache. */
|
|
1872
|
+
#cachedEmbeddings;
|
|
1873
|
+
constructor(options) {
|
|
1874
|
+
super();
|
|
1875
|
+
this.#passages = options.passages;
|
|
1876
|
+
this.#precomputedEmbeddings = options.embeddings;
|
|
1877
|
+
this.#embeddingFn = options.embeddingFn;
|
|
1878
|
+
}
|
|
1879
|
+
async retrieve(query, k) {
|
|
1880
|
+
const queryEmbedding = await this.#embeddingFn(query);
|
|
1881
|
+
if (!this.#cachedEmbeddings) {
|
|
1882
|
+
this.#cachedEmbeddings = this.#precomputedEmbeddings ? [...this.#precomputedEmbeddings] : await Promise.all(this.#passages.map((p) => this.#embeddingFn(p)));
|
|
1883
|
+
}
|
|
1884
|
+
const scored = this.#cachedEmbeddings.map((emb, i) => ({
|
|
1885
|
+
index: i,
|
|
1886
|
+
score: this.#cosineSimilarity(queryEmbedding, emb)
|
|
1887
|
+
}));
|
|
1888
|
+
scored.sort((a, b) => b.score - a.score);
|
|
1889
|
+
return scored.slice(0, k).map((s) => this.#passages[s.index] ?? "");
|
|
1890
|
+
}
|
|
1891
|
+
#cosineSimilarity(a, b) {
|
|
1892
|
+
let dot = 0;
|
|
1893
|
+
let normA = 0;
|
|
1894
|
+
let normB = 0;
|
|
1895
|
+
for (let i = 0; i < a.length; i++) {
|
|
1896
|
+
dot += (a[i] ?? 0) * (b[i] ?? 0);
|
|
1897
|
+
normA += (a[i] ?? 0) ** 2;
|
|
1898
|
+
normB += (b[i] ?? 0) ** 2;
|
|
1899
|
+
}
|
|
1900
|
+
return normA === 0 || normB === 0 ? 0 : dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
1901
|
+
}
|
|
1902
|
+
};
|
|
1903
|
+
|
|
1904
|
+
// src/retrieve/backends/YouRM.ts
|
|
1905
|
+
var YouRM = class extends Retriever {
|
|
1906
|
+
#options;
|
|
1907
|
+
constructor(options = {}) {
|
|
1908
|
+
super();
|
|
1909
|
+
this.#options = options;
|
|
1910
|
+
}
|
|
1911
|
+
async retrieve(query, k) {
|
|
1912
|
+
const apiKey = this.#options.apiKey ?? process.env["YDC_API_KEY"];
|
|
1913
|
+
const url = new URL("https://api.ydc-index.io/search");
|
|
1914
|
+
url.searchParams.set("query", query);
|
|
1915
|
+
url.searchParams.set("num_web_results", String(k));
|
|
1916
|
+
const response = await fetch(url.toString(), {
|
|
1917
|
+
headers: {
|
|
1918
|
+
...apiKey ? { "X-API-Key": apiKey } : {}
|
|
1919
|
+
}
|
|
1920
|
+
});
|
|
1921
|
+
if (!response.ok) {
|
|
1922
|
+
throw new Error(`YouRM request failed: ${response.status} ${response.statusText}`);
|
|
1923
|
+
}
|
|
1924
|
+
const data = await response.json();
|
|
1925
|
+
return (data.hits ?? []).flatMap((hit) => hit.snippets ?? []).slice(0, k);
|
|
1926
|
+
}
|
|
1927
|
+
};
|
|
1928
|
+
|
|
1929
|
+
// src/retrieve/backends/MockRetriever.ts
|
|
1930
|
+
var MockRetriever = class extends Retriever {
|
|
1931
|
+
#passages;
|
|
1932
|
+
constructor(passages) {
|
|
1933
|
+
super();
|
|
1934
|
+
this.#passages = passages;
|
|
1935
|
+
}
|
|
1936
|
+
async retrieve(query, k) {
|
|
1937
|
+
const lower = query.toLowerCase();
|
|
1938
|
+
const matches = this.#passages.filter((p) => p.toLowerCase().includes(lower));
|
|
1939
|
+
const results = matches.length > 0 ? matches : this.#passages;
|
|
1940
|
+
return results.slice(0, k);
|
|
1941
|
+
}
|
|
1942
|
+
};
|
|
1943
|
+
|
|
1944
|
+
// src/optimizers/Optimizer.ts
|
|
1945
|
+
var Optimizer = class {
|
|
1946
|
+
};
|
|
1947
|
+
|
|
1948
|
+
// src/optimizers/LabeledFewShot.ts
|
|
1949
|
+
var LabeledFewShot = class extends Optimizer {
|
|
1950
|
+
#k;
|
|
1951
|
+
/**
|
|
1952
|
+
* @param k - Maximum number of demos to assign per predictor (default: 16).
|
|
1953
|
+
*/
|
|
1954
|
+
constructor(k = 16) {
|
|
1955
|
+
super();
|
|
1956
|
+
this.#k = k;
|
|
1957
|
+
}
|
|
1958
|
+
async compile(student, trainset, _metric) {
|
|
1959
|
+
const optimized = student.clone();
|
|
1960
|
+
for (const [, predictor] of optimized.namedPredictors()) {
|
|
1961
|
+
if (predictor instanceof Predict) {
|
|
1962
|
+
predictor.demos = trainset.slice(0, this.#k);
|
|
1963
|
+
}
|
|
1964
|
+
}
|
|
1965
|
+
return optimized;
|
|
1966
|
+
}
|
|
1967
|
+
};
|
|
1968
|
+
|
|
1969
|
+
// src/optimizers/BootstrapFewShot.ts
|
|
1970
|
+
var BootstrapFewShot = class extends Optimizer {
|
|
1971
|
+
#opts;
|
|
1972
|
+
constructor(options = {}) {
|
|
1973
|
+
super();
|
|
1974
|
+
this.#opts = {
|
|
1975
|
+
maxBootstrappedDemos: options.maxBootstrappedDemos ?? 4,
|
|
1976
|
+
maxLabeledDemos: options.maxLabeledDemos ?? 16,
|
|
1977
|
+
teacher: options.teacher ?? null
|
|
1978
|
+
};
|
|
1979
|
+
}
|
|
1980
|
+
async compile(student, trainset, metric) {
|
|
1981
|
+
const teacher = this.#opts.teacher ?? student;
|
|
1982
|
+
const demos = [];
|
|
1983
|
+
for (const example of trainset) {
|
|
1984
|
+
if (demos.length >= this.#opts.maxBootstrappedDemos) break;
|
|
1985
|
+
try {
|
|
1986
|
+
const inputs = example.toDict();
|
|
1987
|
+
const prediction = await teacher.forward(inputs);
|
|
1988
|
+
const raw = metric(example, prediction);
|
|
1989
|
+
const passed = typeof raw === "boolean" ? raw : raw > 0;
|
|
1990
|
+
if (passed) {
|
|
1991
|
+
demos.push(example.with(prediction.toDict()));
|
|
1992
|
+
}
|
|
1993
|
+
} catch {
|
|
1994
|
+
}
|
|
1995
|
+
}
|
|
1996
|
+
const optimized = student.clone();
|
|
1997
|
+
for (const [, predictor] of optimized.namedPredictors()) {
|
|
1998
|
+
if (predictor instanceof Predict) {
|
|
1999
|
+
predictor.demos = demos.slice(0, this.#opts.maxBootstrappedDemos);
|
|
2000
|
+
}
|
|
2001
|
+
}
|
|
2002
|
+
return optimized;
|
|
2003
|
+
}
|
|
2004
|
+
};
|
|
2005
|
+
|
|
2006
|
+
// src/evaluate/evaluate.ts
|
|
2007
|
+
async function evaluate(program, examples, metric, options = {}) {
|
|
2008
|
+
const { numThreads = 1, displayProgress = false } = options;
|
|
2009
|
+
const results = [];
|
|
2010
|
+
const runExample = async (example) => {
|
|
2011
|
+
const inputs = example.toDict();
|
|
2012
|
+
let prediction;
|
|
2013
|
+
try {
|
|
2014
|
+
prediction = await program.forward(inputs);
|
|
2015
|
+
} catch {
|
|
2016
|
+
prediction = new Prediction({});
|
|
2017
|
+
}
|
|
2018
|
+
const raw = metric(example, prediction);
|
|
2019
|
+
const score2 = typeof raw === "boolean" ? raw ? 1 : 0 : raw;
|
|
2020
|
+
return { example, prediction, score: score2, passed: score2 > 0 };
|
|
2021
|
+
};
|
|
2022
|
+
if (numThreads <= 1) {
|
|
2023
|
+
for (let i = 0; i < examples.length; i++) {
|
|
2024
|
+
const result = await runExample(examples[i]);
|
|
2025
|
+
results.push(result);
|
|
2026
|
+
if (displayProgress) {
|
|
2027
|
+
console.log(`[${i + 1}/${examples.length}] score=${result.score.toFixed(2)}`);
|
|
2028
|
+
}
|
|
2029
|
+
}
|
|
2030
|
+
} else {
|
|
2031
|
+
for (let i = 0; i < examples.length; i += numThreads) {
|
|
2032
|
+
const batch = examples.slice(i, i + numThreads);
|
|
2033
|
+
const batchResults = await Promise.all(batch.map(runExample));
|
|
2034
|
+
results.push(...batchResults);
|
|
2035
|
+
if (displayProgress) {
|
|
2036
|
+
console.log(`[${i + batchResults.length}/${examples.length}]`);
|
|
2037
|
+
}
|
|
2038
|
+
}
|
|
2039
|
+
}
|
|
2040
|
+
const total = results.length;
|
|
2041
|
+
const numPassed = results.filter((r) => r.passed).length;
|
|
2042
|
+
const score = total > 0 ? results.reduce((s, r) => s + r.score, 0) / total : 0;
|
|
2043
|
+
return { score, numPassed, total, results };
|
|
2044
|
+
}
|
|
2045
|
+
|
|
2046
|
+
// src/evaluate/metrics.ts
|
|
2047
|
+
function exactMatch(field = "answer", caseSensitive = false) {
|
|
2048
|
+
return (example, prediction) => {
|
|
2049
|
+
const expected = String(example.get(field) ?? "");
|
|
2050
|
+
const actual = String(prediction.get(field) ?? "");
|
|
2051
|
+
return caseSensitive ? expected === actual : expected.toLowerCase() === actual.toLowerCase();
|
|
2052
|
+
};
|
|
2053
|
+
}
|
|
2054
|
+
function f1(field = "answer") {
|
|
2055
|
+
return (example, prediction) => {
|
|
2056
|
+
const expected = tokenize(String(example.get(field) ?? ""));
|
|
2057
|
+
const actual = tokenize(String(prediction.get(field) ?? ""));
|
|
2058
|
+
if (expected.length === 0 && actual.length === 0) return 1;
|
|
2059
|
+
if (expected.length === 0 || actual.length === 0) return 0;
|
|
2060
|
+
const expectedSet = new Set(expected);
|
|
2061
|
+
const actualSet = new Set(actual);
|
|
2062
|
+
const common = [...expectedSet].filter((t) => actualSet.has(t));
|
|
2063
|
+
const precision = common.length / actual.length;
|
|
2064
|
+
const recall = common.length / expected.length;
|
|
2065
|
+
if (precision + recall === 0) return 0;
|
|
2066
|
+
return 2 * precision * recall / (precision + recall);
|
|
2067
|
+
};
|
|
2068
|
+
}
|
|
2069
|
+
function passAtK(innerMetric, k) {
|
|
2070
|
+
return (example, prediction, trace) => {
|
|
2071
|
+
for (let i = 0; i < Math.min(k, prediction.completions.length + 1); i++) {
|
|
2072
|
+
const candidate = i === 0 ? prediction : buildPrediction(prediction.completions[i - 1] ?? {});
|
|
2073
|
+
const result = innerMetric(example, candidate, trace);
|
|
2074
|
+
if (result === true || typeof result === "number" && result > 0) return 1;
|
|
2075
|
+
}
|
|
2076
|
+
return 0;
|
|
2077
|
+
};
|
|
2078
|
+
}
|
|
2079
|
+
function bleu(field = "answer") {
|
|
2080
|
+
return (example, prediction) => {
|
|
2081
|
+
const reference = tokenize(String(example.get(field) ?? ""));
|
|
2082
|
+
const hypothesis = tokenize(String(prediction.get(field) ?? ""));
|
|
2083
|
+
if (hypothesis.length === 0) return 0;
|
|
2084
|
+
const refSet = new Set(reference);
|
|
2085
|
+
const uni = hypothesis.filter((t) => refSet.has(t)).length / hypothesis.length;
|
|
2086
|
+
if (hypothesis.length < 2) return uni;
|
|
2087
|
+
const refBigrams = new Set(
|
|
2088
|
+
reference.slice(0, -1).map((t, i) => `${t} ${reference[i + 1] ?? ""}`)
|
|
2089
|
+
);
|
|
2090
|
+
const hypBigrams = hypothesis.slice(0, -1).map((t, i) => `${t} ${hypothesis[i + 1] ?? ""}`);
|
|
2091
|
+
const bi = hypBigrams.filter((b) => refBigrams.has(b)).length / hypBigrams.length;
|
|
2092
|
+
return Math.sqrt(uni * bi);
|
|
2093
|
+
};
|
|
2094
|
+
}
|
|
2095
|
+
function rouge(field = "answer") {
|
|
2096
|
+
return (example, prediction) => {
|
|
2097
|
+
const reference = tokenize(String(example.get(field) ?? ""));
|
|
2098
|
+
const hypothesis = tokenize(String(prediction.get(field) ?? ""));
|
|
2099
|
+
if (reference.length === 0 && hypothesis.length === 0) return 1;
|
|
2100
|
+
if (reference.length === 0 || hypothesis.length === 0) return 0;
|
|
2101
|
+
const lcsLen = lcs(reference, hypothesis);
|
|
2102
|
+
const precision = lcsLen / hypothesis.length;
|
|
2103
|
+
const recall = lcsLen / reference.length;
|
|
2104
|
+
if (precision + recall === 0) return 0;
|
|
2105
|
+
return 2 * precision * recall / (precision + recall);
|
|
2106
|
+
};
|
|
2107
|
+
}
|
|
2108
|
+
function tokenize(text) {
|
|
2109
|
+
return text.toLowerCase().replace(/[^\w\s]/g, "").split(/\s+/).filter(Boolean);
|
|
2110
|
+
}
|
|
2111
|
+
function lcs(a, b) {
|
|
2112
|
+
const m = a.length;
|
|
2113
|
+
const n = b.length;
|
|
2114
|
+
const dp = Array.from({ length: m + 1 }, () => new Array(n + 1).fill(0));
|
|
2115
|
+
for (let i = 1; i <= m; i++) {
|
|
2116
|
+
for (let j = 1; j <= n; j++) {
|
|
2117
|
+
dp[i][j] = a[i - 1] === b[j - 1] ? dp[i - 1][j - 1] + 1 : Math.max(dp[i - 1][j], dp[i][j - 1]);
|
|
2118
|
+
}
|
|
2119
|
+
}
|
|
2120
|
+
return dp[m][n];
|
|
2121
|
+
}
|
|
2122
|
+
function buildPrediction(data) {
|
|
2123
|
+
return new Prediction(data);
|
|
2124
|
+
}
|
|
2125
|
+
|
|
2126
|
+
// src/optimizers/BootstrapFewShotWithRandomSearch.ts
|
|
2127
|
+
var BootstrapFewShotWithRandomSearch = class extends BootstrapFewShot {
|
|
2128
|
+
#numCandidates;
|
|
2129
|
+
#valset;
|
|
2130
|
+
constructor(options = {}) {
|
|
2131
|
+
super(options);
|
|
2132
|
+
this.#numCandidates = options.numCandidatePrograms ?? 8;
|
|
2133
|
+
this.#valset = options.valset;
|
|
2134
|
+
}
|
|
2135
|
+
async compile(student, trainset, metric) {
|
|
2136
|
+
const bootstrapped = await super.compile(student, trainset, metric);
|
|
2137
|
+
const allDemos = [];
|
|
2138
|
+
for (const [, pred] of bootstrapped.namedPredictors()) {
|
|
2139
|
+
if (pred instanceof Predict) {
|
|
2140
|
+
allDemos.push(...pred.demos);
|
|
2141
|
+
}
|
|
2142
|
+
}
|
|
2143
|
+
if (allDemos.length === 0) return bootstrapped;
|
|
2144
|
+
const evalSet = this.#valset ?? trainset;
|
|
2145
|
+
let bestScore = -Infinity;
|
|
2146
|
+
let bestModule = bootstrapped;
|
|
2147
|
+
for (let i = 0; i < this.#numCandidates; i++) {
|
|
2148
|
+
const candidate = bootstrapped.clone();
|
|
2149
|
+
const shuffle = [...allDemos].sort(() => Math.random() - 0.5);
|
|
2150
|
+
const k = Math.floor(Math.random() * allDemos.length) + 1;
|
|
2151
|
+
for (const [, pred] of candidate.namedPredictors()) {
|
|
2152
|
+
if (pred instanceof Predict) {
|
|
2153
|
+
pred.demos = shuffle.slice(0, k);
|
|
2154
|
+
}
|
|
2155
|
+
}
|
|
2156
|
+
const { score } = await evaluate(candidate, evalSet, metric);
|
|
2157
|
+
if (score > bestScore) {
|
|
2158
|
+
bestScore = score;
|
|
2159
|
+
bestModule = candidate;
|
|
2160
|
+
}
|
|
2161
|
+
}
|
|
2162
|
+
return bestModule;
|
|
2163
|
+
}
|
|
2164
|
+
};
|
|
2165
|
+
|
|
2166
|
+
// src/optimizers/BootstrapFewShotWithOptuna.ts
|
|
2167
|
+
var BootstrapFewShotWithOptuna = class extends BootstrapFewShot {
|
|
2168
|
+
#numTrials;
|
|
2169
|
+
#valset;
|
|
2170
|
+
constructor(options = {}) {
|
|
2171
|
+
super(options);
|
|
2172
|
+
this.#numTrials = options.numTrials ?? 20;
|
|
2173
|
+
this.#valset = options.valset;
|
|
2174
|
+
}
|
|
2175
|
+
async compile(student, trainset, metric) {
|
|
2176
|
+
const bootstrapped = await super.compile(student, trainset, metric);
|
|
2177
|
+
const allDemos = [];
|
|
2178
|
+
for (const [, predictor] of bootstrapped.namedPredictors()) {
|
|
2179
|
+
if (predictor instanceof Predict) {
|
|
2180
|
+
allDemos.push(...predictor.demos);
|
|
2181
|
+
}
|
|
2182
|
+
}
|
|
2183
|
+
if (allDemos.length === 0) {
|
|
2184
|
+
return bootstrapped;
|
|
2185
|
+
}
|
|
2186
|
+
const evalSet = this.#valset ?? trainset;
|
|
2187
|
+
const maxDemos = Math.max(1, allDemos.length);
|
|
2188
|
+
const TOP_TRIALS_FRACTION = 0.25;
|
|
2189
|
+
const GOOD_TRIAL_SAMPLING_PROBABILITY = 0.7;
|
|
2190
|
+
const trials = [];
|
|
2191
|
+
const evaluate2 = async (candidate) => {
|
|
2192
|
+
let score = 0;
|
|
2193
|
+
for (const example of evalSet) {
|
|
2194
|
+
try {
|
|
2195
|
+
const inputs = example.toDict();
|
|
2196
|
+
const prediction = await candidate.forward(inputs);
|
|
2197
|
+
const raw = metric(example, prediction);
|
|
2198
|
+
score += typeof raw === "boolean" ? raw ? 1 : 0 : raw;
|
|
2199
|
+
} catch {
|
|
2200
|
+
}
|
|
2201
|
+
}
|
|
2202
|
+
return evalSet.length > 0 ? score / evalSet.length : 0;
|
|
2203
|
+
};
|
|
2204
|
+
const sampleIndices = (goodTrials, badTrials, n) => {
|
|
2205
|
+
const useGood = goodTrials.length > 0 && Math.random() < GOOD_TRIAL_SAMPLING_PROBABILITY;
|
|
2206
|
+
const pool = useGood ? goodTrials : badTrials.length > 0 ? badTrials : null;
|
|
2207
|
+
if (pool !== null && pool.length > 0) {
|
|
2208
|
+
const base = pool[Math.floor(Math.random() * pool.length)];
|
|
2209
|
+
const result = new Set(base.indices);
|
|
2210
|
+
if (Math.random() < 0.5 && result.size < maxDemos) {
|
|
2211
|
+
result.add(Math.floor(Math.random() * maxDemos));
|
|
2212
|
+
} else if (result.size > 1) {
|
|
2213
|
+
const arr = [...result];
|
|
2214
|
+
result.delete(arr[Math.floor(Math.random() * arr.length)]);
|
|
2215
|
+
}
|
|
2216
|
+
return [...result].slice(0, n);
|
|
2217
|
+
}
|
|
2218
|
+
const indices = Array.from({ length: maxDemos }, (_, i) => i);
|
|
2219
|
+
return indices.sort(() => Math.random() - 0.5).slice(0, Math.min(n, maxDemos));
|
|
2220
|
+
};
|
|
2221
|
+
let bestScore = -Infinity;
|
|
2222
|
+
let bestModule = bootstrapped;
|
|
2223
|
+
for (let t = 0; t < this.#numTrials; t++) {
|
|
2224
|
+
const sortedTrials = [...trials].sort((a, b) => b.score - a.score);
|
|
2225
|
+
const topK = Math.max(1, Math.floor(sortedTrials.length * TOP_TRIALS_FRACTION));
|
|
2226
|
+
const goodTrials = sortedTrials.slice(0, topK);
|
|
2227
|
+
const badTrials = sortedTrials.slice(topK);
|
|
2228
|
+
const numDemos = Math.max(1, Math.floor(maxDemos * 0.5));
|
|
2229
|
+
const indices = sampleIndices(goodTrials, badTrials, numDemos);
|
|
2230
|
+
const selectedDemos = indices.map((i) => allDemos[i]).filter((d) => d !== void 0);
|
|
2231
|
+
const candidate = bootstrapped.clone();
|
|
2232
|
+
for (const [, predictor] of candidate.namedPredictors()) {
|
|
2233
|
+
if (predictor instanceof Predict) {
|
|
2234
|
+
predictor.demos = selectedDemos;
|
|
2235
|
+
}
|
|
2236
|
+
}
|
|
2237
|
+
const score = await evaluate2(candidate);
|
|
2238
|
+
trials.push({ indices, score });
|
|
2239
|
+
if (score > bestScore) {
|
|
2240
|
+
bestScore = score;
|
|
2241
|
+
bestModule = candidate;
|
|
2242
|
+
}
|
|
2243
|
+
}
|
|
2244
|
+
return bestModule;
|
|
2245
|
+
}
|
|
2246
|
+
};
|
|
2247
|
+
|
|
2248
|
+
// src/optimizers/COPRO.ts
|
|
2249
|
+
var COPRO = class extends Optimizer {
|
|
2250
|
+
#breadth;
|
|
2251
|
+
#depth;
|
|
2252
|
+
constructor(options = {}) {
|
|
2253
|
+
super();
|
|
2254
|
+
this.#breadth = options.breadth ?? 5;
|
|
2255
|
+
this.#depth = options.depth ?? 3;
|
|
2256
|
+
}
|
|
2257
|
+
async compile(student, trainset, metric) {
|
|
2258
|
+
const lm = settings.lm;
|
|
2259
|
+
if (!lm) throw new Error("COPRO requires a configured LM.");
|
|
2260
|
+
let best = student;
|
|
2261
|
+
let bestScore = (await evaluate(student, trainset.slice(0, 10), metric)).score;
|
|
2262
|
+
for (let round = 0; round < this.#depth; round++) {
|
|
2263
|
+
for (const [name, predictor] of best.namedPredictors()) {
|
|
2264
|
+
if (!(predictor instanceof Predict)) continue;
|
|
2265
|
+
const candidates = [];
|
|
2266
|
+
for (let i = 0; i < this.#breadth; i++) {
|
|
2267
|
+
const prompt = this.#buildInstructionPrompt(predictor.instructions ?? "", name);
|
|
2268
|
+
const resp = await lm.call(prompt, { temperature: 0.9 });
|
|
2269
|
+
candidates.push(resp.text.trim());
|
|
2270
|
+
}
|
|
2271
|
+
for (const candidate of candidates) {
|
|
2272
|
+
const clone = best.clone();
|
|
2273
|
+
for (const [n, p] of clone.namedPredictors()) {
|
|
2274
|
+
if (n === name && p instanceof Predict) {
|
|
2275
|
+
p.instructions = candidate;
|
|
2276
|
+
}
|
|
2277
|
+
}
|
|
2278
|
+
const { score } = await evaluate(clone, trainset.slice(0, 10), metric);
|
|
2279
|
+
if (score > bestScore) {
|
|
2280
|
+
bestScore = score;
|
|
2281
|
+
best = clone;
|
|
2282
|
+
}
|
|
2283
|
+
}
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
return best;
|
|
2287
|
+
}
|
|
2288
|
+
#buildInstructionPrompt(currentInstruction, fieldName) {
|
|
2289
|
+
return `You are an expert prompt engineer.
|
|
2290
|
+
Current instruction for the "${fieldName}" field: "${currentInstruction}"
|
|
2291
|
+
|
|
2292
|
+
Write an improved, concise instruction for this field that will produce better outputs from a language model. Output only the instruction text.`;
|
|
2293
|
+
}
|
|
2294
|
+
};
|
|
2295
|
+
|
|
2296
|
+
// src/optimizers/MIPRO.ts
|
|
2297
|
+
var MIPRO = class extends Optimizer {
|
|
2298
|
+
#opts;
|
|
2299
|
+
constructor(options = {}) {
|
|
2300
|
+
super();
|
|
2301
|
+
this.#opts = {
|
|
2302
|
+
numCandidates: options.numCandidates ?? 5,
|
|
2303
|
+
initTemperature: options.initTemperature ?? 0.9,
|
|
2304
|
+
numCandidatePrograms: options.numCandidatePrograms ?? 8,
|
|
2305
|
+
verbose: options.verbose ?? false
|
|
2306
|
+
};
|
|
2307
|
+
}
|
|
2308
|
+
async compile(student, trainset, metric) {
|
|
2309
|
+
if (this.#opts.verbose) console.log("[MIPRO] Phase 1: Instruction optimization (COPRO)");
|
|
2310
|
+
const copro = new COPRO({ breadth: this.#opts.numCandidates, depth: 2 });
|
|
2311
|
+
const instructionOptimized = await copro.compile(student, trainset, metric);
|
|
2312
|
+
if (this.#opts.verbose) console.log("[MIPRO] Phase 2: Demo optimization (BootstrapFewShotWithRandomSearch)");
|
|
2313
|
+
const bootstrap = new BootstrapFewShotWithRandomSearch({
|
|
2314
|
+
numCandidatePrograms: this.#opts.numCandidatePrograms
|
|
2315
|
+
});
|
|
2316
|
+
return bootstrap.compile(instructionOptimized, trainset, metric);
|
|
2317
|
+
}
|
|
2318
|
+
};
|
|
2319
|
+
|
|
2320
|
+
// src/optimizers/KNNFewShot.ts
|
|
2321
|
+
var KNNFewShot = class extends Optimizer {
|
|
2322
|
+
#opts;
|
|
2323
|
+
constructor(options) {
|
|
2324
|
+
super();
|
|
2325
|
+
this.#opts = {
|
|
2326
|
+
k: options.k ?? 3,
|
|
2327
|
+
embeddingFn: options.embeddingFn,
|
|
2328
|
+
keyField: options.keyField ?? ""
|
|
2329
|
+
};
|
|
2330
|
+
}
|
|
2331
|
+
async compile(student, trainset, _metric) {
|
|
2332
|
+
const embeddingFn = this.#opts.embeddingFn;
|
|
2333
|
+
const k = this.#opts.k;
|
|
2334
|
+
const trainEmbeddings = await Promise.all(
|
|
2335
|
+
trainset.map(async (ex) => {
|
|
2336
|
+
const key = this.#opts.keyField ? String(ex.get(this.#opts.keyField) ?? "") : Object.values(ex.toDict()).join(" ");
|
|
2337
|
+
return { ex, embedding: await embeddingFn(key) };
|
|
2338
|
+
})
|
|
2339
|
+
);
|
|
2340
|
+
const optimized = student.clone();
|
|
2341
|
+
for (const [, predictor] of optimized.namedPredictors()) {
|
|
2342
|
+
if (!(predictor instanceof Predict)) continue;
|
|
2343
|
+
const originalForward = predictor.forward.bind(predictor);
|
|
2344
|
+
predictor.forward = async (inputs) => {
|
|
2345
|
+
const queryText = Object.values(inputs).join(" ");
|
|
2346
|
+
const queryEmb = await embeddingFn(queryText);
|
|
2347
|
+
const scored = trainEmbeddings.map(({ ex, embedding }) => ({
|
|
2348
|
+
ex,
|
|
2349
|
+
score: cosineSimilarity(queryEmb, embedding)
|
|
2350
|
+
}));
|
|
2351
|
+
scored.sort((a, b) => b.score - a.score);
|
|
2352
|
+
predictor.demos = scored.slice(0, k).map((s) => s.ex);
|
|
2353
|
+
return originalForward(inputs);
|
|
2354
|
+
};
|
|
2355
|
+
}
|
|
2356
|
+
return optimized;
|
|
2357
|
+
}
|
|
2358
|
+
};
|
|
2359
|
+
function cosineSimilarity(a, b) {
|
|
2360
|
+
let dot = 0;
|
|
2361
|
+
let normA = 0;
|
|
2362
|
+
let normB = 0;
|
|
2363
|
+
for (let i = 0; i < a.length; i++) {
|
|
2364
|
+
dot += (a[i] ?? 0) * (b[i] ?? 0);
|
|
2365
|
+
normA += (a[i] ?? 0) ** 2;
|
|
2366
|
+
normB += (b[i] ?? 0) ** 2;
|
|
2367
|
+
}
|
|
2368
|
+
return normA === 0 || normB === 0 ? 0 : dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
2369
|
+
}
|
|
2370
|
+
|
|
2371
|
+
// src/optimizers/Ensemble.ts
|
|
2372
|
+
var EnsembleOptimizer = class extends Optimizer {
|
|
2373
|
+
#reduceFunc;
|
|
2374
|
+
constructor(options = {}) {
|
|
2375
|
+
super();
|
|
2376
|
+
this.#reduceFunc = options.reduceFunc;
|
|
2377
|
+
}
|
|
2378
|
+
async compile(student, _trainset, _metric) {
|
|
2379
|
+
const reduceFunc = this.#reduceFunc;
|
|
2380
|
+
const wrapper = {
|
|
2381
|
+
namedPredictors: student.namedPredictors.bind(student),
|
|
2382
|
+
dump: student.dump.bind(student),
|
|
2383
|
+
load: student.load.bind(student),
|
|
2384
|
+
async forward(...args) {
|
|
2385
|
+
const pred = await student.forward(
|
|
2386
|
+
...args
|
|
2387
|
+
);
|
|
2388
|
+
return reduceFunc ? reduceFunc([pred]) : pred;
|
|
2389
|
+
}
|
|
2390
|
+
};
|
|
2391
|
+
return wrapper;
|
|
2392
|
+
}
|
|
2393
|
+
};
|
|
2394
|
+
|
|
2395
|
+
// src/optimizers/BootstrapFinetune.ts
|
|
2396
|
+
import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2 } from "fs";
|
|
2397
|
+
import { dirname } from "path";
|
|
2398
|
+
var BootstrapFinetune = class extends Optimizer {
|
|
2399
|
+
#exportPath;
|
|
2400
|
+
#format;
|
|
2401
|
+
#maxBootstrappedDemos;
|
|
2402
|
+
#bootstrap;
|
|
2403
|
+
constructor(options = {}) {
|
|
2404
|
+
super();
|
|
2405
|
+
this.#exportPath = options.exportPath ?? "./finetune_data.jsonl";
|
|
2406
|
+
this.#format = options.format ?? "openai";
|
|
2407
|
+
this.#maxBootstrappedDemos = options.maxBootstrappedDemos ?? 4;
|
|
2408
|
+
this.#bootstrap = new BootstrapFewShot({
|
|
2409
|
+
maxBootstrappedDemos: this.#maxBootstrappedDemos
|
|
2410
|
+
});
|
|
2411
|
+
}
|
|
2412
|
+
async compile(student, trainset, metric) {
|
|
2413
|
+
const compiled = await this.#bootstrap.compile(student, trainset, metric);
|
|
2414
|
+
const records = [];
|
|
2415
|
+
for (const [, predictor] of compiled.namedPredictors()) {
|
|
2416
|
+
if (predictor instanceof Predict) {
|
|
2417
|
+
for (const demo of predictor.demos) {
|
|
2418
|
+
const dict = demo.toDict();
|
|
2419
|
+
const inputFields = [...predictor.signature.inputs.keys()];
|
|
2420
|
+
const outputFields = [...predictor.signature.outputs.keys()];
|
|
2421
|
+
const inputStr = inputFields.map((k) => `${k}: ${String(dict[k] ?? "")}`).join("\n");
|
|
2422
|
+
const outputStr = outputFields.map((k) => `${k}: ${String(dict[k] ?? "")}`).join("\n");
|
|
2423
|
+
if (this.#format === "openai") {
|
|
2424
|
+
records.push(
|
|
2425
|
+
JSON.stringify({
|
|
2426
|
+
messages: [
|
|
2427
|
+
{ role: "user", content: inputStr },
|
|
2428
|
+
{ role: "assistant", content: outputStr }
|
|
2429
|
+
]
|
|
2430
|
+
})
|
|
2431
|
+
);
|
|
2432
|
+
} else {
|
|
2433
|
+
records.push(JSON.stringify({ prompt: inputStr, completion: outputStr }));
|
|
2434
|
+
}
|
|
2435
|
+
}
|
|
2436
|
+
}
|
|
2437
|
+
}
|
|
2438
|
+
const dir = dirname(this.#exportPath);
|
|
2439
|
+
mkdirSync2(dir, { recursive: true });
|
|
2440
|
+
writeFileSync2(this.#exportPath, records.join("\n"), "utf8");
|
|
2441
|
+
return compiled;
|
|
2442
|
+
}
|
|
2443
|
+
};
|
|
2444
|
+
|
|
2445
|
+
// src/optimizers/GRPO.ts
|
|
2446
|
+
var GRPO = class extends Optimizer {
|
|
2447
|
+
#numSteps;
|
|
2448
|
+
#groupSize;
|
|
2449
|
+
#temperature;
|
|
2450
|
+
#maxLabeledDemos;
|
|
2451
|
+
constructor(options = {}) {
|
|
2452
|
+
super();
|
|
2453
|
+
this.#numSteps = options.numSteps ?? 20;
|
|
2454
|
+
this.#groupSize = options.groupSize ?? 8;
|
|
2455
|
+
this.#temperature = options.temperature ?? 1;
|
|
2456
|
+
this.#maxLabeledDemos = options.maxLabeledDemos ?? 16;
|
|
2457
|
+
}
|
|
2458
|
+
async compile(student, trainset, metric) {
|
|
2459
|
+
const lm = settings.lm;
|
|
2460
|
+
if (!lm) throw new Error("GRPO requires a configured LM.");
|
|
2461
|
+
const bootstrap = new BootstrapFewShot({
|
|
2462
|
+
maxBootstrappedDemos: this.#maxLabeledDemos
|
|
2463
|
+
});
|
|
2464
|
+
let best = await bootstrap.compile(student, trainset, metric);
|
|
2465
|
+
const evalSet = trainset.slice(0, Math.min(10, trainset.length));
|
|
2466
|
+
let bestScore = (await evaluate(best, evalSet, metric)).score;
|
|
2467
|
+
for (let step = 0; step < this.#numSteps; step++) {
|
|
2468
|
+
const candidates = [];
|
|
2469
|
+
for (let g = 0; g < this.#groupSize; g++) {
|
|
2470
|
+
const candidate = best.clone();
|
|
2471
|
+
for (const [, predictor] of candidate.namedPredictors()) {
|
|
2472
|
+
if (predictor instanceof Predict) {
|
|
2473
|
+
const currentInstr = predictor.instructions ?? "";
|
|
2474
|
+
const prompt = `You are an expert prompt engineer.
|
|
2475
|
+
Current instruction: "${currentInstr}"
|
|
2476
|
+
|
|
2477
|
+
Write an improved instruction for a language model. Output only the instruction text.`;
|
|
2478
|
+
const resp = await lm.call(prompt, { temperature: this.#temperature });
|
|
2479
|
+
predictor.instructions = resp.text.trim();
|
|
2480
|
+
}
|
|
2481
|
+
}
|
|
2482
|
+
const { score } = await evaluate(candidate, evalSet, metric);
|
|
2483
|
+
candidates.push({ module: candidate, score });
|
|
2484
|
+
}
|
|
2485
|
+
const scores = candidates.map((c) => c.score);
|
|
2486
|
+
const mean = scores.reduce((a, b) => a + b, 0) / scores.length;
|
|
2487
|
+
const std = Math.sqrt(scores.reduce((a, b) => a + (b - mean) ** 2, 0) / scores.length) || 1;
|
|
2488
|
+
const advantages = scores.map((s) => (s - mean) / std);
|
|
2489
|
+
const bestIdx = advantages.indexOf(Math.max(...advantages));
|
|
2490
|
+
const topScore = candidates[bestIdx]?.score ?? 0;
|
|
2491
|
+
if (topScore > bestScore) {
|
|
2492
|
+
bestScore = topScore;
|
|
2493
|
+
best = candidates[bestIdx].module;
|
|
2494
|
+
}
|
|
2495
|
+
}
|
|
2496
|
+
return best;
|
|
2497
|
+
}
|
|
2498
|
+
};
|
|
2499
|
+
|
|
2500
|
+
// src/optimizers/SIMBA.ts
|
|
2501
|
+
var SIMBA = class extends Optimizer {
|
|
2502
|
+
#numIter;
|
|
2503
|
+
#batchSize;
|
|
2504
|
+
#maxBootstrappedDemos;
|
|
2505
|
+
constructor(options = {}) {
|
|
2506
|
+
super();
|
|
2507
|
+
this.#numIter = options.numIter ?? 10;
|
|
2508
|
+
this.#batchSize = options.batchSize ?? 8;
|
|
2509
|
+
this.#maxBootstrappedDemos = options.maxBootstrappedDemos ?? 4;
|
|
2510
|
+
}
|
|
2511
|
+
async compile(student, trainset, metric) {
|
|
2512
|
+
const bootstrap = new BootstrapFewShot({
|
|
2513
|
+
maxBootstrappedDemos: this.#maxBootstrappedDemos
|
|
2514
|
+
});
|
|
2515
|
+
let best = await bootstrap.compile(student, trainset, metric);
|
|
2516
|
+
const evalBatch = trainset.slice(0, Math.min(this.#batchSize, trainset.length));
|
|
2517
|
+
let bestScore = (await evaluate(best, evalBatch, metric)).score;
|
|
2518
|
+
for (let iter = 0; iter < this.#numIter; iter++) {
|
|
2519
|
+
const shuffled = [...trainset];
|
|
2520
|
+
for (let i = shuffled.length - 1; i > 0; i--) {
|
|
2521
|
+
const j = Math.floor(Math.random() * (i + 1));
|
|
2522
|
+
[shuffled[i], shuffled[j]] = [shuffled[j], shuffled[i]];
|
|
2523
|
+
}
|
|
2524
|
+
const batch = shuffled.slice(0, Math.min(this.#batchSize, shuffled.length));
|
|
2525
|
+
const candidate = best.clone();
|
|
2526
|
+
for (const [, predictor] of candidate.namedPredictors()) {
|
|
2527
|
+
if (predictor instanceof Predict && predictor.demos.length > 1) {
|
|
2528
|
+
const dropIdx = Math.floor(Math.random() * predictor.demos.length);
|
|
2529
|
+
predictor.demos = predictor.demos.filter((_, i) => i !== dropIdx);
|
|
2530
|
+
}
|
|
2531
|
+
}
|
|
2532
|
+
const { score } = await evaluate(candidate, batch, metric);
|
|
2533
|
+
if (score >= bestScore) {
|
|
2534
|
+
bestScore = score;
|
|
2535
|
+
best = candidate;
|
|
2536
|
+
}
|
|
2537
|
+
}
|
|
2538
|
+
return best;
|
|
2539
|
+
}
|
|
2540
|
+
};
|
|
2541
|
+
|
|
2542
|
+
// src/optimizers/AvatarOptimizer.ts
|
|
2543
|
+
var AvatarOptimizer = class extends Optimizer {
|
|
2544
|
+
#numAvatars;
|
|
2545
|
+
#maxLabeledDemos;
|
|
2546
|
+
constructor(options = {}) {
|
|
2547
|
+
super();
|
|
2548
|
+
this.#numAvatars = options.numAvatars ?? 4;
|
|
2549
|
+
this.#maxLabeledDemos = options.maxLabeledDemos ?? 8;
|
|
2550
|
+
}
|
|
2551
|
+
async compile(student, trainset, metric) {
|
|
2552
|
+
const lm = settings.lm;
|
|
2553
|
+
if (!lm) throw new Error("AvatarOptimizer requires a configured LM.");
|
|
2554
|
+
let best = student.clone();
|
|
2555
|
+
const evalSet = trainset.slice(0, Math.min(this.#maxLabeledDemos, trainset.length));
|
|
2556
|
+
let bestScore = (await evaluate(best, evalSet, metric)).score;
|
|
2557
|
+
for (const [name, predictor] of best.namedPredictors()) {
|
|
2558
|
+
if (!(predictor instanceof Predict)) continue;
|
|
2559
|
+
const avatarCandidates = [];
|
|
2560
|
+
for (let i = 0; i < this.#numAvatars; i++) {
|
|
2561
|
+
const prompt = `You are an expert at designing AI personas.
|
|
2562
|
+
Task field: "${name}"
|
|
2563
|
+
Current instruction: "${predictor.instructions ?? ""}"
|
|
2564
|
+
|
|
2565
|
+
Write a concise role/persona prefix (1-2 sentences) for an AI assistant that excels at this task. Output only the persona description.`;
|
|
2566
|
+
const resp = await lm.call(prompt, { temperature: 0.9 });
|
|
2567
|
+
avatarCandidates.push(resp.text.trim());
|
|
2568
|
+
}
|
|
2569
|
+
for (const avatar of avatarCandidates) {
|
|
2570
|
+
const clone = best.clone();
|
|
2571
|
+
for (const [n, p] of clone.namedPredictors()) {
|
|
2572
|
+
if (n === name && p instanceof Predict) {
|
|
2573
|
+
const base = p.instructions ?? "";
|
|
2574
|
+
p.instructions = `${avatar}
|
|
2575
|
+
|
|
2576
|
+
${base}`.trim();
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
const { score } = await evaluate(clone, evalSet, metric);
|
|
2580
|
+
if (score > bestScore) {
|
|
2581
|
+
bestScore = score;
|
|
2582
|
+
best = clone;
|
|
2583
|
+
}
|
|
2584
|
+
}
|
|
2585
|
+
}
|
|
2586
|
+
return best;
|
|
2587
|
+
}
|
|
2588
|
+
};
|
|
2589
|
+
|
|
2590
|
+
// src/mcp/MCPAdapter.ts
|
|
2591
|
+
var MCPToolAdapter = class {
|
|
2592
|
+
#options;
|
|
2593
|
+
#tools;
|
|
2594
|
+
constructor(options = {}) {
|
|
2595
|
+
this.#options = options;
|
|
2596
|
+
}
|
|
2597
|
+
async getTools() {
|
|
2598
|
+
if (this.#tools) return this.#tools;
|
|
2599
|
+
if (this.#options.tools && this.#options.callHandler) {
|
|
2600
|
+
const callHandler = this.#options.callHandler;
|
|
2601
|
+
this.#tools = this.#options.tools.map((t) => ({
|
|
2602
|
+
name: t.name,
|
|
2603
|
+
description: t.description,
|
|
2604
|
+
fn: async (args) => {
|
|
2605
|
+
let parsed;
|
|
2606
|
+
try {
|
|
2607
|
+
parsed = JSON.parse(args);
|
|
2608
|
+
} catch {
|
|
2609
|
+
parsed = { input: args };
|
|
2610
|
+
}
|
|
2611
|
+
const result = await callHandler(t.name, parsed);
|
|
2612
|
+
return typeof result === "string" ? result : JSON.stringify(result);
|
|
2613
|
+
}
|
|
2614
|
+
}));
|
|
2615
|
+
return this.#tools;
|
|
2616
|
+
}
|
|
2617
|
+
await import("@modelcontextprotocol/sdk/client/index.js").catch(() => {
|
|
2618
|
+
throw new Error(
|
|
2619
|
+
"The `@modelcontextprotocol/sdk` package is required for MCPToolAdapter.\nInstall it with: npm install @modelcontextprotocol/sdk"
|
|
2620
|
+
);
|
|
2621
|
+
});
|
|
2622
|
+
throw new Error(
|
|
2623
|
+
"Live MCP connection not yet implemented. Use tools+callHandler for now."
|
|
2624
|
+
);
|
|
2625
|
+
}
|
|
2626
|
+
};
|
|
2627
|
+
|
|
2628
|
+
// src/mcp/DSTsxMCPServer.ts
|
|
2629
|
+
var DSTsxMCPServer = class {
|
|
2630
|
+
#tools = /* @__PURE__ */ new Map();
|
|
2631
|
+
registerModule(name, description, module, inputFields) {
|
|
2632
|
+
const properties = {};
|
|
2633
|
+
for (const field of inputFields) {
|
|
2634
|
+
properties[field] = { type: "string" };
|
|
2635
|
+
}
|
|
2636
|
+
this.#tools.set(name, {
|
|
2637
|
+
name,
|
|
2638
|
+
description,
|
|
2639
|
+
inputSchema: { type: "object", properties, required: inputFields },
|
|
2640
|
+
handler: async (inputs) => {
|
|
2641
|
+
const result = await module.forward(inputs);
|
|
2642
|
+
return result.toJSON();
|
|
2643
|
+
}
|
|
2644
|
+
});
|
|
2645
|
+
return this;
|
|
2646
|
+
}
|
|
2647
|
+
getToolDefinitions() {
|
|
2648
|
+
return [...this.#tools.values()];
|
|
2649
|
+
}
|
|
2650
|
+
async callTool(name, inputs) {
|
|
2651
|
+
const tool = this.#tools.get(name);
|
|
2652
|
+
if (!tool) throw new Error(`Tool "${name}" not found.`);
|
|
2653
|
+
return tool.handler(inputs);
|
|
2654
|
+
}
|
|
2655
|
+
async createStdioServer() {
|
|
2656
|
+
await import("@modelcontextprotocol/sdk/server/index.js").catch(() => {
|
|
2657
|
+
throw new Error(
|
|
2658
|
+
"The `@modelcontextprotocol/sdk` package is required.\nInstall it with: npm install @modelcontextprotocol/sdk"
|
|
2659
|
+
);
|
|
2660
|
+
});
|
|
2661
|
+
throw new Error(
|
|
2662
|
+
"createStdioServer requires @modelcontextprotocol/sdk to be installed."
|
|
2663
|
+
);
|
|
2664
|
+
}
|
|
2665
|
+
};
|
|
2666
|
+
|
|
2667
|
+
// src/tracking/Tracker.ts
|
|
2668
|
+
var Tracker = class {
|
|
2669
|
+
};
|
|
2670
|
+
|
|
2671
|
+
// src/tracking/ConsoleTracker.ts
|
|
2672
|
+
var ConsoleTracker = class extends Tracker {
|
|
2673
|
+
log(event) {
|
|
2674
|
+
const parts = [`[${event.type.toUpperCase()}]`];
|
|
2675
|
+
if (event.step !== void 0) parts.push(`step=${event.step}`);
|
|
2676
|
+
if (event.score !== void 0) parts.push(`score=${event.score.toFixed(4)}`);
|
|
2677
|
+
if (event.metadata) parts.push(JSON.stringify(event.metadata));
|
|
2678
|
+
console.log(parts.join(" "));
|
|
2679
|
+
}
|
|
2680
|
+
async flush() {
|
|
2681
|
+
}
|
|
2682
|
+
};
|
|
2683
|
+
|
|
2684
|
+
// src/tracking/JsonFileTracker.ts
|
|
2685
|
+
import { mkdirSync as mkdirSync3 } from "fs";
|
|
2686
|
+
import { appendFile } from "fs/promises";
|
|
2687
|
+
import { dirname as dirname2 } from "path";
|
|
2688
|
+
var JsonFileTracker = class extends Tracker {
|
|
2689
|
+
#path;
|
|
2690
|
+
#buffer = [];
|
|
2691
|
+
constructor(path) {
|
|
2692
|
+
super();
|
|
2693
|
+
this.#path = path;
|
|
2694
|
+
mkdirSync3(dirname2(path), { recursive: true });
|
|
2695
|
+
}
|
|
2696
|
+
log(event) {
|
|
2697
|
+
this.#buffer.push(JSON.stringify({ ...event, ts: (/* @__PURE__ */ new Date()).toISOString() }));
|
|
2698
|
+
}
|
|
2699
|
+
async flush() {
|
|
2700
|
+
if (this.#buffer.length === 0) return;
|
|
2701
|
+
const content = this.#buffer.join("\n") + "\n";
|
|
2702
|
+
this.#buffer.length = 0;
|
|
2703
|
+
await appendFile(this.#path, content, "utf8");
|
|
2704
|
+
}
|
|
2705
|
+
};
|
|
2706
|
+
export {
|
|
2707
|
+
Anthropic,
|
|
2708
|
+
Assert,
|
|
2709
|
+
AssertionError,
|
|
2710
|
+
AvatarOptimizer,
|
|
2711
|
+
BestOfN,
|
|
2712
|
+
BootstrapFewShot,
|
|
2713
|
+
BootstrapFewShotWithOptuna,
|
|
2714
|
+
BootstrapFewShotWithRandomSearch,
|
|
2715
|
+
BootstrapFinetune,
|
|
2716
|
+
COPRO,
|
|
2717
|
+
ChainOfThought,
|
|
2718
|
+
ChainOfThoughtWithHint,
|
|
2719
|
+
ChromadbRM,
|
|
2720
|
+
Cohere,
|
|
2721
|
+
ColBERTv2,
|
|
2722
|
+
ConsoleTracker,
|
|
2723
|
+
DSTsxMCPServer,
|
|
2724
|
+
DiskCache,
|
|
2725
|
+
Ensemble,
|
|
2726
|
+
EnsembleOptimizer,
|
|
2727
|
+
Example,
|
|
2728
|
+
FaissRM,
|
|
2729
|
+
GRPO,
|
|
2730
|
+
GoogleAI,
|
|
2731
|
+
HuggingFace,
|
|
2732
|
+
Image,
|
|
2733
|
+
InputField,
|
|
2734
|
+
JsonFileTracker,
|
|
2735
|
+
KNNFewShot,
|
|
2736
|
+
LM,
|
|
2737
|
+
LMStudio,
|
|
2738
|
+
LRUCache,
|
|
2739
|
+
LabeledFewShot,
|
|
2740
|
+
MCPToolAdapter,
|
|
2741
|
+
MIPRO,
|
|
2742
|
+
MockLM,
|
|
2743
|
+
MockRetriever,
|
|
2744
|
+
Module,
|
|
2745
|
+
MultiChainComparison,
|
|
2746
|
+
NativeReAct,
|
|
2747
|
+
Ollama,
|
|
2748
|
+
OpenAI,
|
|
2749
|
+
Optimizer,
|
|
2750
|
+
OutputField,
|
|
2751
|
+
Parallel,
|
|
2752
|
+
PineconeRM,
|
|
2753
|
+
Predict,
|
|
2754
|
+
Prediction,
|
|
2755
|
+
ProgramOfThought,
|
|
2756
|
+
QdrantRM,
|
|
2757
|
+
ReAct,
|
|
2758
|
+
Refine,
|
|
2759
|
+
Retrieve,
|
|
2760
|
+
Retriever,
|
|
2761
|
+
Retry,
|
|
2762
|
+
SIMBA,
|
|
2763
|
+
Settings,
|
|
2764
|
+
Signature,
|
|
2765
|
+
Suggest,
|
|
2766
|
+
Tracker,
|
|
2767
|
+
TypedChainOfThought,
|
|
2768
|
+
TypedPrediction,
|
|
2769
|
+
TypedPredictor,
|
|
2770
|
+
WeaviateRM,
|
|
2771
|
+
YouRM,
|
|
2772
|
+
bleu,
|
|
2773
|
+
evaluate,
|
|
2774
|
+
exactMatch,
|
|
2775
|
+
f1,
|
|
2776
|
+
majority,
|
|
2777
|
+
passAtK,
|
|
2778
|
+
rouge,
|
|
2779
|
+
settings
|
|
2780
|
+
};
|
|
2781
|
+
//# sourceMappingURL=index.js.map
|