@probeo/anymodel 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/cli.cjs +24 -13
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +24 -13
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +24 -13
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +5 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +24 -13
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.d.cts
CHANGED
|
@@ -456,6 +456,7 @@ declare class BatchManager {
|
|
|
456
456
|
private syncNativeBatchStatus;
|
|
457
457
|
/**
|
|
458
458
|
* Process batch requests concurrently (fallback path).
|
|
459
|
+
* Streams requests from disk to avoid holding them all in memory.
|
|
459
460
|
*/
|
|
460
461
|
private processConcurrentBatch;
|
|
461
462
|
}
|
|
@@ -575,6 +576,10 @@ declare class BatchStore {
|
|
|
575
576
|
* List all batch IDs.
|
|
576
577
|
*/
|
|
577
578
|
listBatches(): Promise<string[]>;
|
|
579
|
+
/**
|
|
580
|
+
* Stream requests from JSONL one line at a time (memory-efficient).
|
|
581
|
+
*/
|
|
582
|
+
streamRequests(id: string): AsyncGenerator<unknown>;
|
|
578
583
|
/**
|
|
579
584
|
* Check if a batch exists.
|
|
580
585
|
*/
|
package/dist/index.d.ts
CHANGED
|
@@ -456,6 +456,7 @@ declare class BatchManager {
|
|
|
456
456
|
private syncNativeBatchStatus;
|
|
457
457
|
/**
|
|
458
458
|
* Process batch requests concurrently (fallback path).
|
|
459
|
+
* Streams requests from disk to avoid holding them all in memory.
|
|
459
460
|
*/
|
|
460
461
|
private processConcurrentBatch;
|
|
461
462
|
}
|
|
@@ -575,6 +576,10 @@ declare class BatchStore {
|
|
|
575
576
|
* List all batch IDs.
|
|
576
577
|
*/
|
|
577
578
|
listBatches(): Promise<string[]>;
|
|
579
|
+
/**
|
|
580
|
+
* Stream requests from JSONL one line at a time (memory-efficient).
|
|
581
|
+
*/
|
|
582
|
+
streamRequests(id: string): AsyncGenerator<unknown>;
|
|
578
583
|
/**
|
|
579
584
|
* Check if a batch exists.
|
|
580
585
|
*/
|
package/dist/index.js
CHANGED
|
@@ -1953,6 +1953,17 @@ var BatchStore = class {
|
|
|
1953
1953
|
const entries = await readDirQueued(this.dir);
|
|
1954
1954
|
return entries.filter((d) => d.isDirectory()).map((d) => d.name).sort();
|
|
1955
1955
|
}
|
|
1956
|
+
/**
|
|
1957
|
+
* Stream requests from JSONL one line at a time (memory-efficient).
|
|
1958
|
+
*/
|
|
1959
|
+
async *streamRequests(id) {
|
|
1960
|
+
const p = joinPath(this.batchDir(id), "requests.jsonl");
|
|
1961
|
+
if (!await fileExistsQueued(p)) return;
|
|
1962
|
+
const raw = await readFileQueued(p, "utf8");
|
|
1963
|
+
for (const line of raw.split("\n")) {
|
|
1964
|
+
if (line.trim()) yield JSON.parse(line);
|
|
1965
|
+
}
|
|
1966
|
+
}
|
|
1956
1967
|
/**
|
|
1957
1968
|
* Check if a batch exists.
|
|
1958
1969
|
*/
|
|
@@ -2017,7 +2028,7 @@ var BatchManager = class {
|
|
|
2017
2028
|
this.processNativeBatch(id, request, native.adapter).catch(() => {
|
|
2018
2029
|
});
|
|
2019
2030
|
} else {
|
|
2020
|
-
this.processConcurrentBatch(id, request).catch(() => {
|
|
2031
|
+
this.processConcurrentBatch(id, request.model, request.options).catch(() => {
|
|
2021
2032
|
});
|
|
2022
2033
|
}
|
|
2023
2034
|
return batch;
|
|
@@ -2197,28 +2208,28 @@ var BatchManager = class {
|
|
|
2197
2208
|
}
|
|
2198
2209
|
/**
|
|
2199
2210
|
* Process batch requests concurrently (fallback path).
|
|
2211
|
+
* Streams requests from disk to avoid holding them all in memory.
|
|
2200
2212
|
*/
|
|
2201
|
-
async processConcurrentBatch(batchId,
|
|
2213
|
+
async processConcurrentBatch(batchId, model, options) {
|
|
2202
2214
|
const batch = await this.store.getMeta(batchId);
|
|
2203
2215
|
if (!batch) return;
|
|
2204
2216
|
batch.status = "processing";
|
|
2205
2217
|
await this.store.updateMeta(batch);
|
|
2206
|
-
const items = request.requests;
|
|
2207
2218
|
const active = /* @__PURE__ */ new Set();
|
|
2208
2219
|
const processItem = async (item) => {
|
|
2209
2220
|
const current = await this.store.getMeta(batchId);
|
|
2210
2221
|
if (current?.status === "cancelled") return;
|
|
2211
2222
|
const chatRequest = {
|
|
2212
|
-
model
|
|
2223
|
+
model,
|
|
2213
2224
|
messages: item.messages,
|
|
2214
|
-
max_tokens: item.max_tokens ??
|
|
2215
|
-
temperature: item.temperature ??
|
|
2216
|
-
top_p: item.top_p ??
|
|
2217
|
-
top_k: item.top_k ??
|
|
2218
|
-
stop: item.stop ??
|
|
2219
|
-
response_format: item.response_format ??
|
|
2220
|
-
tools: item.tools ??
|
|
2221
|
-
tool_choice: item.tool_choice ??
|
|
2225
|
+
max_tokens: item.max_tokens ?? options?.max_tokens,
|
|
2226
|
+
temperature: item.temperature ?? options?.temperature,
|
|
2227
|
+
top_p: item.top_p ?? options?.top_p,
|
|
2228
|
+
top_k: item.top_k ?? options?.top_k,
|
|
2229
|
+
stop: item.stop ?? options?.stop,
|
|
2230
|
+
response_format: item.response_format ?? options?.response_format,
|
|
2231
|
+
tools: item.tools ?? options?.tools,
|
|
2232
|
+
tool_choice: item.tool_choice ?? options?.tool_choice
|
|
2222
2233
|
};
|
|
2223
2234
|
let result;
|
|
2224
2235
|
try {
|
|
@@ -2249,7 +2260,7 @@ var BatchManager = class {
|
|
|
2249
2260
|
await this.store.updateMeta(meta);
|
|
2250
2261
|
}
|
|
2251
2262
|
};
|
|
2252
|
-
for (const item of
|
|
2263
|
+
for await (const item of this.store.streamRequests(batchId)) {
|
|
2253
2264
|
const current = await this.store.getMeta(batchId);
|
|
2254
2265
|
if (current?.status === "cancelled") break;
|
|
2255
2266
|
if (active.size >= this.concurrencyLimit) {
|