@probeo/anymodel 0.2.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -142,7 +142,7 @@ async function withRetry(fn, options = {}) {
142
142
  throw error;
143
143
  }
144
144
  const delay = computeDelay(attempt, opts, error);
145
- await new Promise((resolve3) => setTimeout(resolve3, delay));
145
+ await new Promise((resolve2) => setTimeout(resolve2, delay));
146
146
  }
147
147
  }
148
148
  throw lastError;
@@ -501,8 +501,8 @@ var SUPPORTED_PARAMS = /* @__PURE__ */ new Set([
501
501
  ]);
502
502
  function createOpenAIAdapter(apiKey, baseURL) {
503
503
  const base = baseURL || OPENAI_API_BASE;
504
- async function makeRequest(path, body, method = "POST") {
505
- const res = await fetch(`${base}${path}`, {
504
+ async function makeRequest(path2, body, method = "POST") {
505
+ const res = await fetch(`${base}${path2}`, {
506
506
  method,
507
507
  headers: {
508
508
  "Content-Type": "application/json",
@@ -653,6 +653,9 @@ function createOpenAIAdapter(apiKey, baseURL) {
653
653
  supportsParameter(param) {
654
654
  return SUPPORTED_PARAMS.has(param);
655
655
  },
656
+ supportsBatch() {
657
+ return true;
658
+ },
656
659
  async sendRequest(request) {
657
660
  const body = buildRequestBody(request);
658
661
  const res = await makeRequest("/chat/completions", body);
@@ -707,8 +710,8 @@ var FALLBACK_MODELS = [
707
710
  { id: "anthropic/claude-3-5-haiku-20241022", name: "Claude 3.5 Haiku", created: 0, description: "Legacy fast model", context_length: 2e5, pricing: { prompt: "0.0000008", completion: "0.000004" }, architecture: { modality: "text+image->text", input_modalities: ["text", "image"], output_modalities: ["text"], tokenizer: "claude" }, top_provider: { context_length: 2e5, max_completion_tokens: 8192, is_moderated: false }, supported_parameters: Array.from(SUPPORTED_PARAMS2) }
708
711
  ];
709
712
  function createAnthropicAdapter(apiKey) {
710
- async function makeRequest(path, body, stream = false) {
711
- const res = await fetch(`${ANTHROPIC_API_BASE}${path}`, {
713
+ async function makeRequest(path2, body, stream = false) {
714
+ const res = await fetch(`${ANTHROPIC_API_BASE}${path2}`, {
712
715
  method: "POST",
713
716
  headers: {
714
717
  "Content-Type": "application/json",
@@ -1002,6 +1005,9 @@ ${body.system}` : jsonInstruction;
1002
1005
  supportsParameter(param) {
1003
1006
  return SUPPORTED_PARAMS2.has(param);
1004
1007
  },
1008
+ supportsBatch() {
1009
+ return true;
1010
+ },
1005
1011
  async sendRequest(request) {
1006
1012
  const body = translateRequest(request);
1007
1013
  const res = await makeRequest("/messages", body);
@@ -1281,6 +1287,9 @@ function createGoogleAdapter(apiKey) {
1281
1287
  supportsParameter(param) {
1282
1288
  return SUPPORTED_PARAMS3.has(param);
1283
1289
  },
1290
+ supportsBatch() {
1291
+ return false;
1292
+ },
1284
1293
  async sendRequest(request) {
1285
1294
  const body = translateRequest(request);
1286
1295
  const url = getModelEndpoint(request.model, false);
@@ -1339,6 +1348,9 @@ function createCustomAdapter(name, config) {
1339
1348
  return {
1340
1349
  ...openaiAdapter,
1341
1350
  name,
1351
+ supportsBatch() {
1352
+ return false;
1353
+ },
1342
1354
  async listModels() {
1343
1355
  if (config.models && config.models.length > 0) {
1344
1356
  return config.models.map((modelId) => ({
@@ -1404,10 +1416,10 @@ function interpolateDeep(obj) {
1404
1416
  }
1405
1417
  return obj;
1406
1418
  }
1407
- function loadJsonFile(path) {
1408
- if (!existsSync(path)) return null;
1419
+ function loadJsonFile(path2) {
1420
+ if (!existsSync(path2)) return null;
1409
1421
  try {
1410
- const raw = readFileSync(path, "utf-8");
1422
+ const raw = readFileSync(path2, "utf-8");
1411
1423
  const parsed = JSON.parse(raw);
1412
1424
  return interpolateDeep(parsed);
1413
1425
  } catch {
@@ -1508,93 +1520,237 @@ var GenerationStatsStore = class {
1508
1520
  }
1509
1521
  };
1510
1522
 
1523
+ // src/utils/fs-io.ts
1524
+ import { mkdir, open, readFile as fsReadFile, rename, writeFile as fsWriteFile, readdir as fsReaddir, stat as fsStat } from "fs/promises";
1525
+ import { createWriteStream } from "fs";
1526
+ import path from "path";
1527
+ import PQueue from "p-queue";
1528
+ var writeQueue = new PQueue({ concurrency: 10 });
1529
+ var readQueue = new PQueue({ concurrency: 20 });
1530
+ function configureFsIO(options) {
1531
+ if (options.readConcurrency !== void 0) {
1532
+ readQueue.concurrency = options.readConcurrency;
1533
+ }
1534
+ if (options.writeConcurrency !== void 0) {
1535
+ writeQueue.concurrency = options.writeConcurrency;
1536
+ }
1537
+ }
1538
+ var ensuredDirs = /* @__PURE__ */ new Set();
1539
+ var joinPathCache = /* @__PURE__ */ new Map();
1540
+ var dirnameCache = /* @__PURE__ */ new Map();
1541
+ var resolvePathCache = /* @__PURE__ */ new Map();
1542
+ async function ensureDir(dir) {
1543
+ if (!dir) return;
1544
+ if (ensuredDirs.has(dir)) return;
1545
+ await mkdir(dir, { recursive: true });
1546
+ ensuredDirs.add(dir);
1547
+ }
1548
+ async function readFileQueued(filePath, encoding = "utf8") {
1549
+ return readQueue.add(async () => {
1550
+ return fsReadFile(filePath, encoding);
1551
+ });
1552
+ }
1553
+ async function readJsonQueued(filePath) {
1554
+ const raw = await readFileQueued(filePath, "utf8");
1555
+ return JSON.parse(raw);
1556
+ }
1557
+ async function readDirQueued(dirPath) {
1558
+ return readQueue.add(async () => {
1559
+ return fsReaddir(dirPath, { withFileTypes: true });
1560
+ });
1561
+ }
1562
+ async function pathExistsQueued(p) {
1563
+ return readQueue.add(async () => {
1564
+ try {
1565
+ await fsStat(p);
1566
+ return true;
1567
+ } catch {
1568
+ return false;
1569
+ }
1570
+ });
1571
+ }
1572
+ async function fileExistsQueued(filePath) {
1573
+ return readQueue.add(async () => {
1574
+ try {
1575
+ const s = await fsStat(filePath);
1576
+ return s.isFile();
1577
+ } catch {
1578
+ return false;
1579
+ }
1580
+ });
1581
+ }
1582
+ async function writeFileQueued(filePath, data) {
1583
+ await writeQueue.add(async () => {
1584
+ const dir = dirnameOf(filePath);
1585
+ await ensureDir(dir);
1586
+ await fsWriteFile(filePath, data);
1587
+ });
1588
+ }
1589
+ async function appendFileQueued(filePath, data) {
1590
+ await writeQueue.add(async () => {
1591
+ const dir = dirnameOf(filePath);
1592
+ await ensureDir(dir);
1593
+ await fsWriteFile(filePath, data, { flag: "a" });
1594
+ });
1595
+ }
1596
+ async function writeFileFlushedQueued(filePath, data) {
1597
+ await writeQueue.add(async () => {
1598
+ const dir = dirnameOf(filePath);
1599
+ await ensureDir(dir);
1600
+ const tmpPath = joinPath(
1601
+ dir,
1602
+ `.${path.basename(filePath)}.${Date.now()}.${Math.random().toString(16).slice(2)}.tmp`
1603
+ );
1604
+ const fh = await open(tmpPath, "w");
1605
+ try {
1606
+ await fh.writeFile(data);
1607
+ await fh.sync();
1608
+ } finally {
1609
+ await fh.close();
1610
+ }
1611
+ await rename(tmpPath, filePath);
1612
+ try {
1613
+ const dh = await open(dir, "r");
1614
+ try {
1615
+ await dh.sync();
1616
+ } finally {
1617
+ await dh.close();
1618
+ }
1619
+ } catch {
1620
+ }
1621
+ });
1622
+ }
1623
+ function joinPath(...segments) {
1624
+ const key = segments.join("\0");
1625
+ const cached = joinPathCache.get(key);
1626
+ if (cached !== void 0) return cached;
1627
+ const out = path.join(...segments);
1628
+ joinPathCache.set(key, out);
1629
+ return out;
1630
+ }
1631
+ function dirnameOf(p) {
1632
+ const cached = dirnameCache.get(p);
1633
+ if (cached !== void 0) return cached;
1634
+ const out = path.dirname(p);
1635
+ dirnameCache.set(p, out);
1636
+ return out;
1637
+ }
1638
+ function resolvePath(...segments) {
1639
+ const key = segments.join("\0");
1640
+ const cached = resolvePathCache.get(key);
1641
+ if (cached !== void 0) return cached;
1642
+ const out = path.resolve(...segments);
1643
+ resolvePathCache.set(key, out);
1644
+ return out;
1645
+ }
1646
+ function getFsQueueStatus() {
1647
+ return {
1648
+ read: { size: readQueue.size, pending: readQueue.pending },
1649
+ write: { size: writeQueue.size, pending: writeQueue.pending }
1650
+ };
1651
+ }
1652
+ async function waitForFsQueuesIdle() {
1653
+ await Promise.all([writeQueue.onIdle(), readQueue.onIdle()]);
1654
+ }
1655
+
1511
1656
  // src/batch/store.ts
1512
- import { mkdirSync, writeFileSync, readFileSync as readFileSync2, existsSync as existsSync2, readdirSync, appendFileSync } from "fs";
1513
- import { join as join2, resolve as resolve2 } from "path";
1514
- import { homedir as homedir2 } from "os";
1515
- var DEFAULT_BATCH_DIR = join2(homedir2(), ".anymodel", "batches");
1657
+ var DEFAULT_BATCH_DIR = joinPath(process.cwd(), ".anymodel", "batches");
1516
1658
  var BatchStore = class {
1517
1659
  dir;
1660
+ initialized = false;
1518
1661
  constructor(dir) {
1519
- this.dir = resolve2(dir || DEFAULT_BATCH_DIR);
1520
- mkdirSync(this.dir, { recursive: true });
1662
+ this.dir = resolvePath(dir || DEFAULT_BATCH_DIR);
1663
+ }
1664
+ async init() {
1665
+ if (this.initialized) return;
1666
+ await ensureDir(this.dir);
1667
+ this.initialized = true;
1521
1668
  }
1522
1669
  batchDir(id) {
1523
- return join2(this.dir, id);
1670
+ return joinPath(this.dir, id);
1524
1671
  }
1525
1672
  /**
1526
1673
  * Create a new batch directory and save initial metadata.
1527
1674
  */
1528
- create(batch) {
1675
+ async create(batch) {
1676
+ await this.init();
1529
1677
  const dir = this.batchDir(batch.id);
1530
- mkdirSync(dir, { recursive: true });
1531
- writeFileSync(join2(dir, "meta.json"), JSON.stringify(batch, null, 2));
1678
+ await ensureDir(dir);
1679
+ await writeFileFlushedQueued(joinPath(dir, "meta.json"), JSON.stringify(batch, null, 2));
1532
1680
  }
1533
1681
  /**
1534
- * Update batch metadata.
1682
+ * Update batch metadata (atomic write).
1535
1683
  */
1536
- updateMeta(batch) {
1537
- const dir = this.batchDir(batch.id);
1538
- writeFileSync(join2(dir, "meta.json"), JSON.stringify(batch, null, 2));
1684
+ async updateMeta(batch) {
1685
+ await writeFileFlushedQueued(
1686
+ joinPath(this.batchDir(batch.id), "meta.json"),
1687
+ JSON.stringify(batch, null, 2)
1688
+ );
1539
1689
  }
1540
1690
  /**
1541
1691
  * Save requests as JSONL.
1542
1692
  */
1543
- saveRequests(id, requests) {
1544
- const dir = this.batchDir(id);
1693
+ async saveRequests(id, requests) {
1545
1694
  const lines = requests.map((r) => JSON.stringify(r)).join("\n") + "\n";
1546
- writeFileSync(join2(dir, "requests.jsonl"), lines);
1695
+ await writeFileQueued(joinPath(this.batchDir(id), "requests.jsonl"), lines);
1547
1696
  }
1548
1697
  /**
1549
1698
  * Append a result to results.jsonl.
1550
1699
  */
1551
- appendResult(id, result) {
1552
- const dir = this.batchDir(id);
1553
- appendFileSync(join2(dir, "results.jsonl"), JSON.stringify(result) + "\n");
1700
+ async appendResult(id, result) {
1701
+ await appendFileQueued(
1702
+ joinPath(this.batchDir(id), "results.jsonl"),
1703
+ JSON.stringify(result) + "\n"
1704
+ );
1554
1705
  }
1555
1706
  /**
1556
1707
  * Save provider-specific state (e.g., provider batch ID).
1557
1708
  */
1558
- saveProviderState(id, state) {
1559
- const dir = this.batchDir(id);
1560
- writeFileSync(join2(dir, "provider.json"), JSON.stringify(state, null, 2));
1709
+ async saveProviderState(id, state) {
1710
+ await writeFileFlushedQueued(
1711
+ joinPath(this.batchDir(id), "provider.json"),
1712
+ JSON.stringify(state, null, 2)
1713
+ );
1561
1714
  }
1562
1715
  /**
1563
1716
  * Load provider state.
1564
1717
  */
1565
- loadProviderState(id) {
1566
- const path = join2(this.batchDir(id), "provider.json");
1567
- if (!existsSync2(path)) return null;
1568
- return JSON.parse(readFileSync2(path, "utf-8"));
1718
+ async loadProviderState(id) {
1719
+ const p = joinPath(this.batchDir(id), "provider.json");
1720
+ if (!await fileExistsQueued(p)) return null;
1721
+ return readJsonQueued(p);
1569
1722
  }
1570
1723
  /**
1571
1724
  * Get batch metadata.
1572
1725
  */
1573
- getMeta(id) {
1574
- const path = join2(this.batchDir(id), "meta.json");
1575
- if (!existsSync2(path)) return null;
1576
- return JSON.parse(readFileSync2(path, "utf-8"));
1726
+ async getMeta(id) {
1727
+ const p = joinPath(this.batchDir(id), "meta.json");
1728
+ if (!await fileExistsQueued(p)) return null;
1729
+ return readJsonQueued(p);
1577
1730
  }
1578
1731
  /**
1579
1732
  * Get all results for a batch.
1580
1733
  */
1581
- getResults(id) {
1582
- const path = join2(this.batchDir(id), "results.jsonl");
1583
- if (!existsSync2(path)) return [];
1584
- return readFileSync2(path, "utf-8").trim().split("\n").filter(Boolean).map((line) => JSON.parse(line));
1734
+ async getResults(id) {
1735
+ const p = joinPath(this.batchDir(id), "results.jsonl");
1736
+ if (!await fileExistsQueued(p)) return [];
1737
+ const raw = await readFileQueued(p, "utf8");
1738
+ return raw.trim().split("\n").filter(Boolean).map((line) => JSON.parse(line));
1585
1739
  }
1586
1740
  /**
1587
1741
  * List all batch IDs.
1588
1742
  */
1589
- listBatches() {
1590
- if (!existsSync2(this.dir)) return [];
1591
- return readdirSync(this.dir, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => d.name).sort();
1743
+ async listBatches() {
1744
+ await this.init();
1745
+ if (!await pathExistsQueued(this.dir)) return [];
1746
+ const entries = await readDirQueued(this.dir);
1747
+ return entries.filter((d) => d.isDirectory()).map((d) => d.name).sort();
1592
1748
  }
1593
1749
  /**
1594
1750
  * Check if a batch exists.
1595
1751
  */
1596
- exists(id) {
1597
- return existsSync2(join2(this.batchDir(id), "meta.json"));
1752
+ async exists(id) {
1753
+ return fileExistsQueued(joinPath(this.batchDir(id), "meta.json"));
1598
1754
  }
1599
1755
  };
1600
1756
 
@@ -1603,10 +1759,27 @@ var BatchManager = class {
1603
1759
  store;
1604
1760
  router;
1605
1761
  concurrencyLimit;
1762
+ defaultPollInterval;
1763
+ batchAdapters = /* @__PURE__ */ new Map();
1606
1764
  constructor(router, options) {
1607
1765
  this.store = new BatchStore(options?.dir);
1608
1766
  this.router = router;
1609
1767
  this.concurrencyLimit = options?.concurrency ?? 5;
1768
+ this.defaultPollInterval = options?.pollInterval ?? 5e3;
1769
+ }
1770
+ /**
1771
+ * Register a native batch adapter for a provider.
1772
+ */
1773
+ registerBatchAdapter(providerName, adapter) {
1774
+ this.batchAdapters.set(providerName, adapter);
1775
+ }
1776
+ /**
1777
+ * Check if a provider has native batch support.
1778
+ */
1779
+ getNativeBatchAdapter(model) {
1780
+ const providerName = model.split("/")[0];
1781
+ const adapter = this.batchAdapters.get(providerName);
1782
+ return adapter ? { adapter, providerName } : null;
1610
1783
  }
1611
1784
  /**
1612
1785
  * Create a batch and return immediately (no polling).
@@ -1614,13 +1787,16 @@ var BatchManager = class {
1614
1787
  async create(request) {
1615
1788
  const id = generateId("batch");
1616
1789
  const now = (/* @__PURE__ */ new Date()).toISOString();
1790
+ const providerName = request.model.split("/")[0] || "unknown";
1791
+ const native = this.getNativeBatchAdapter(request.model);
1792
+ const batchMode = native ? "native" : "concurrent";
1617
1793
  const batch = {
1618
1794
  id,
1619
1795
  object: "batch",
1620
1796
  status: "pending",
1621
1797
  model: request.model,
1622
- provider_name: request.model.split("/")[0] || "unknown",
1623
- batch_mode: "concurrent",
1798
+ provider_name: providerName,
1799
+ batch_mode: batchMode,
1624
1800
  total: request.requests.length,
1625
1801
  completed: 0,
1626
1802
  failed: 0,
@@ -1628,10 +1804,15 @@ var BatchManager = class {
1628
1804
  completed_at: null,
1629
1805
  expires_at: null
1630
1806
  };
1631
- this.store.create(batch);
1632
- this.store.saveRequests(id, request.requests);
1633
- this.processBatch(id, request).catch(() => {
1634
- });
1807
+ await this.store.create(batch);
1808
+ await this.store.saveRequests(id, request.requests);
1809
+ if (native) {
1810
+ this.processNativeBatch(id, request, native.adapter).catch(() => {
1811
+ });
1812
+ } else {
1813
+ this.processConcurrentBatch(id, request).catch(() => {
1814
+ });
1815
+ }
1635
1816
  return batch;
1636
1817
  }
1637
1818
  /**
@@ -1645,14 +1826,19 @@ var BatchManager = class {
1645
1826
  * Poll an existing batch until completion.
1646
1827
  */
1647
1828
  async poll(id, options = {}) {
1648
- const interval = options.interval ?? 5e3;
1829
+ const interval = options.interval ?? this.defaultPollInterval;
1649
1830
  const timeout = options.timeout ?? 0;
1650
1831
  const startTime = Date.now();
1651
1832
  while (true) {
1652
- const batch = this.store.getMeta(id);
1833
+ let batch = await this.store.getMeta(id);
1653
1834
  if (!batch) {
1654
1835
  throw new AnyModelError(404, `Batch ${id} not found`);
1655
1836
  }
1837
+ if (batch.batch_mode === "native" && batch.status === "processing") {
1838
+ await this.syncNativeBatchStatus(id);
1839
+ batch = await this.store.getMeta(id);
1840
+ if (!batch) throw new AnyModelError(404, `Batch ${id} not found`);
1841
+ }
1656
1842
  if (options.onProgress) {
1657
1843
  options.onProgress(batch);
1658
1844
  }
@@ -1662,24 +1848,24 @@ var BatchManager = class {
1662
1848
  if (timeout > 0 && Date.now() - startTime > timeout) {
1663
1849
  throw new AnyModelError(408, `Batch ${id} timed out after ${timeout}ms`);
1664
1850
  }
1665
- await new Promise((resolve3) => setTimeout(resolve3, interval));
1851
+ await new Promise((resolve2) => setTimeout(resolve2, interval));
1666
1852
  }
1667
1853
  }
1668
1854
  /**
1669
1855
  * Get the current status of a batch.
1670
1856
  */
1671
- get(id) {
1857
+ async get(id) {
1672
1858
  return this.store.getMeta(id);
1673
1859
  }
1674
1860
  /**
1675
1861
  * Get results for a completed batch.
1676
1862
  */
1677
- getResults(id) {
1678
- const batch = this.store.getMeta(id);
1863
+ async getResults(id) {
1864
+ const batch = await this.store.getMeta(id);
1679
1865
  if (!batch) {
1680
1866
  throw new AnyModelError(404, `Batch ${id} not found`);
1681
1867
  }
1682
- const results = this.store.getResults(id);
1868
+ const results = await this.store.getResults(id);
1683
1869
  const usage = {
1684
1870
  total_prompt_tokens: 0,
1685
1871
  total_completion_tokens: 0,
@@ -1701,37 +1887,119 @@ var BatchManager = class {
1701
1887
  /**
1702
1888
  * List all batches.
1703
1889
  */
1704
- list() {
1705
- return this.store.listBatches().map((id) => this.store.getMeta(id)).filter((b) => b !== null);
1890
+ async list() {
1891
+ const ids = await this.store.listBatches();
1892
+ const batches = [];
1893
+ for (const id of ids) {
1894
+ const meta = await this.store.getMeta(id);
1895
+ if (meta) batches.push(meta);
1896
+ }
1897
+ return batches;
1706
1898
  }
1707
1899
  /**
1708
1900
  * Cancel a batch.
1709
1901
  */
1710
- cancel(id) {
1711
- const batch = this.store.getMeta(id);
1902
+ async cancel(id) {
1903
+ const batch = await this.store.getMeta(id);
1712
1904
  if (!batch) {
1713
1905
  throw new AnyModelError(404, `Batch ${id} not found`);
1714
1906
  }
1715
1907
  if (batch.status === "completed" || batch.status === "cancelled") {
1716
1908
  return batch;
1717
1909
  }
1910
+ if (batch.batch_mode === "native") {
1911
+ const providerState = await this.store.loadProviderState(id);
1912
+ const adapter = this.batchAdapters.get(batch.provider_name);
1913
+ if (adapter && providerState?.providerBatchId) {
1914
+ try {
1915
+ await adapter.cancelBatch(providerState.providerBatchId);
1916
+ } catch {
1917
+ }
1918
+ }
1919
+ }
1718
1920
  batch.status = "cancelled";
1719
1921
  batch.completed_at = (/* @__PURE__ */ new Date()).toISOString();
1720
- this.store.updateMeta(batch);
1922
+ await this.store.updateMeta(batch);
1721
1923
  return batch;
1722
1924
  }
1723
1925
  /**
1724
- * Process batch requests concurrently.
1926
+ * Process batch via native provider batch API.
1725
1927
  */
1726
- async processBatch(batchId, request) {
1727
- const batch = this.store.getMeta(batchId);
1928
+ async processNativeBatch(batchId, request, adapter) {
1929
+ const batch = await this.store.getMeta(batchId);
1930
+ if (!batch) return;
1931
+ try {
1932
+ const model = request.model.includes("/") ? request.model.split("/").slice(1).join("/") : request.model;
1933
+ const { providerBatchId, metadata } = await adapter.createBatch(
1934
+ model,
1935
+ request.requests,
1936
+ request.options
1937
+ );
1938
+ await this.store.saveProviderState(batchId, {
1939
+ providerBatchId,
1940
+ providerName: batch.provider_name,
1941
+ ...metadata
1942
+ });
1943
+ batch.status = "processing";
1944
+ await this.store.updateMeta(batch);
1945
+ } catch (err) {
1946
+ batch.status = "failed";
1947
+ batch.completed_at = (/* @__PURE__ */ new Date()).toISOString();
1948
+ await this.store.updateMeta(batch);
1949
+ throw err;
1950
+ }
1951
+ }
1952
+ /**
1953
+ * Sync native batch status from provider.
1954
+ */
1955
+ async syncNativeBatchStatus(batchId) {
1956
+ const batch = await this.store.getMeta(batchId);
1957
+ if (!batch) return;
1958
+ const providerState = await this.store.loadProviderState(batchId);
1959
+ if (!providerState?.providerBatchId) return;
1960
+ const adapter = this.batchAdapters.get(batch.provider_name);
1961
+ if (!adapter) return;
1962
+ try {
1963
+ const status = await adapter.pollBatch(providerState.providerBatchId);
1964
+ batch.total = status.total || batch.total;
1965
+ batch.completed = status.completed;
1966
+ batch.failed = status.failed;
1967
+ if (status.status === "completed" || status.status === "failed" || status.status === "cancelled") {
1968
+ batch.status = status.status;
1969
+ batch.completed_at = (/* @__PURE__ */ new Date()).toISOString();
1970
+ if (status.status === "completed" || status.status === "failed") {
1971
+ try {
1972
+ const results = await adapter.getBatchResults(providerState.providerBatchId);
1973
+ for (const result of results) {
1974
+ await this.store.appendResult(batchId, result);
1975
+ }
1976
+ batch.completed = results.filter((r) => r.status === "success").length;
1977
+ batch.failed = results.filter((r) => r.status === "error").length;
1978
+ } catch {
1979
+ if (batch.status !== "failed") {
1980
+ batch.status = "failed";
1981
+ }
1982
+ }
1983
+ }
1984
+ } else {
1985
+ batch.status = "processing";
1986
+ }
1987
+ await this.store.updateMeta(batch);
1988
+ } catch {
1989
+ }
1990
+ }
1991
+ /**
1992
+ * Process batch requests concurrently (fallback path).
1993
+ */
1994
+ async processConcurrentBatch(batchId, request) {
1995
+ const batch = await this.store.getMeta(batchId);
1996
+ if (!batch) return;
1728
1997
  batch.status = "processing";
1729
- this.store.updateMeta(batch);
1998
+ await this.store.updateMeta(batch);
1730
1999
  const items = request.requests;
1731
- const queue = [...items];
1732
2000
  const active = /* @__PURE__ */ new Set();
1733
2001
  const processItem = async (item) => {
1734
- const current = this.store.getMeta(batchId);
2002
+ const current = await this.store.getMeta(batchId);
1735
2003
  if (current?.status === "cancelled") return;
1736
2004
  const chatRequest = {
1737
2005
  model: request.model,
@@ -1763,17 +2031,19 @@ var BatchManager = class {
1763
2031
  error: { code: error.code, message: error.message }
1764
2032
  };
1765
2033
  }
1766
- this.store.appendResult(batchId, result);
1767
- const meta = this.store.getMeta(batchId);
1768
- if (result.status === "success") {
1769
- meta.completed++;
1770
- } else {
1771
- meta.failed++;
2034
+ await this.store.appendResult(batchId, result);
2035
+ const meta = await this.store.getMeta(batchId);
2036
+ if (meta) {
2037
+ if (result.status === "success") {
2038
+ meta.completed++;
2039
+ } else {
2040
+ meta.failed++;
2041
+ }
2042
+ await this.store.updateMeta(meta);
1772
2043
  }
1773
- this.store.updateMeta(meta);
1774
2044
  };
1775
- for (const item of queue) {
1776
- const current = this.store.getMeta(batchId);
2045
+ for (const item of items) {
2046
+ const current = await this.store.getMeta(batchId);
1777
2047
  if (current?.status === "cancelled") break;
1778
2048
  if (active.size >= this.concurrencyLimit) {
1779
2049
  await Promise.race(active);
@@ -1784,15 +2054,411 @@ var BatchManager = class {
1784
2054
  active.add(promise);
1785
2055
  }
1786
2056
  await Promise.all(active);
1787
- const finalMeta = this.store.getMeta(batchId);
1788
- if (finalMeta.status !== "cancelled") {
2057
+ const finalMeta = await this.store.getMeta(batchId);
2058
+ if (finalMeta && finalMeta.status !== "cancelled") {
1789
2059
  finalMeta.status = finalMeta.failed === finalMeta.total ? "failed" : "completed";
1790
2060
  finalMeta.completed_at = (/* @__PURE__ */ new Date()).toISOString();
1791
- this.store.updateMeta(finalMeta);
2061
+ await this.store.updateMeta(finalMeta);
1792
2062
  }
1793
2063
  }
1794
2064
  };
1795
2065
 
2066
+ // src/providers/openai-batch.ts
2067
+ var OPENAI_API_BASE2 = "https://api.openai.com/v1";
2068
+ function createOpenAIBatchAdapter(apiKey) {
2069
+ async function apiRequest(path2, options = {}) {
2070
+ const headers = {
2071
+ "Authorization": `Bearer ${apiKey}`
2072
+ };
2073
+ let fetchBody;
2074
+ if (options.formData) {
2075
+ fetchBody = options.formData;
2076
+ } else if (options.body) {
2077
+ headers["Content-Type"] = "application/json";
2078
+ fetchBody = JSON.stringify(options.body);
2079
+ }
2080
+ const res = await fetch(`${OPENAI_API_BASE2}${path2}`, {
2081
+ method: options.method || "GET",
2082
+ headers,
2083
+ body: fetchBody
2084
+ });
2085
+ if (!res.ok) {
2086
+ let errorBody;
2087
+ try {
2088
+ errorBody = await res.json();
2089
+ } catch {
2090
+ errorBody = { message: res.statusText };
2091
+ }
2092
+ const msg = errorBody?.error?.message || errorBody?.message || res.statusText;
2093
+ throw new AnyModelError(res.status >= 500 ? 502 : res.status, msg, {
2094
+ provider_name: "openai",
2095
+ raw: errorBody
2096
+ });
2097
+ }
2098
+ return res;
2099
+ }
2100
+ function buildJSONL(model, requests) {
2101
+ return requests.map((req) => {
2102
+ const body = {
2103
+ model,
2104
+ messages: req.messages
2105
+ };
2106
+ if (req.max_tokens !== void 0) body.max_tokens = req.max_tokens;
2107
+ if (req.temperature !== void 0) body.temperature = req.temperature;
2108
+ if (req.top_p !== void 0) body.top_p = req.top_p;
2109
+ if (req.stop !== void 0) body.stop = req.stop;
2110
+ if (req.response_format !== void 0) body.response_format = req.response_format;
2111
+ if (req.tools !== void 0) body.tools = req.tools;
2112
+ if (req.tool_choice !== void 0) body.tool_choice = req.tool_choice;
2113
+ return JSON.stringify({
2114
+ custom_id: req.custom_id,
2115
+ method: "POST",
2116
+ url: "/v1/chat/completions",
2117
+ body
2118
+ });
2119
+ }).join("\n");
2120
+ }
2121
+ function rePrefixId(id) {
2122
+ if (id && id.startsWith("chatcmpl-")) {
2123
+ return `gen-${id.substring(9)}`;
2124
+ }
2125
+ return id.startsWith("gen-") ? id : `gen-${id}`;
2126
+ }
2127
+ function translateOpenAIResponse(body) {
2128
+ return {
2129
+ id: rePrefixId(body.id || generateId()),
2130
+ object: "chat.completion",
2131
+ created: body.created || Math.floor(Date.now() / 1e3),
2132
+ model: `openai/${body.model}`,
2133
+ choices: body.choices,
2134
+ usage: body.usage
2135
+ };
2136
+ }
2137
+ function mapStatus(openaiStatus) {
2138
+ switch (openaiStatus) {
2139
+ case "validating":
2140
+ case "finalizing":
2141
+ return "processing";
2142
+ case "in_progress":
2143
+ return "processing";
2144
+ case "completed":
2145
+ return "completed";
2146
+ case "failed":
2147
+ return "failed";
2148
+ case "expired":
2149
+ return "failed";
2150
+ case "cancelled":
2151
+ case "cancelling":
2152
+ return "cancelled";
2153
+ default:
2154
+ return "pending";
2155
+ }
2156
+ }
2157
+ return {
2158
+ async createBatch(model, requests, options) {
2159
+ const jsonlContent = buildJSONL(model, requests);
2160
+ const blob = new Blob([jsonlContent], { type: "application/jsonl" });
2161
+ const formData = new FormData();
2162
+ formData.append("purpose", "batch");
2163
+ formData.append("file", blob, "batch_input.jsonl");
2164
+ const uploadRes = await apiRequest("/files", { method: "POST", formData });
2165
+ const fileData = await uploadRes.json();
2166
+ const inputFileId = fileData.id;
2167
+ const batchRes = await apiRequest("/batches", {
2168
+ method: "POST",
2169
+ body: {
2170
+ input_file_id: inputFileId,
2171
+ endpoint: "/v1/chat/completions",
2172
+ completion_window: "24h",
2173
+ metadata: options?.metadata
2174
+ }
2175
+ });
2176
+ const batchData = await batchRes.json();
2177
+ return {
2178
+ providerBatchId: batchData.id,
2179
+ metadata: {
2180
+ input_file_id: inputFileId,
2181
+ openai_status: batchData.status
2182
+ }
2183
+ };
2184
+ },
2185
+ async pollBatch(providerBatchId) {
2186
+ const res = await apiRequest(`/batches/${providerBatchId}`);
2187
+ const data = await res.json();
2188
+ const requestCounts = data.request_counts || {};
2189
+ return {
2190
+ status: mapStatus(data.status),
2191
+ total: requestCounts.total || 0,
2192
+ completed: requestCounts.completed || 0,
2193
+ failed: requestCounts.failed || 0
2194
+ };
2195
+ },
2196
+ async getBatchResults(providerBatchId) {
2197
+ const batchRes = await apiRequest(`/batches/${providerBatchId}`);
2198
+ const batchData = await batchRes.json();
2199
+ const results = [];
2200
+ if (batchData.output_file_id) {
2201
+ const outputRes = await apiRequest(`/files/${batchData.output_file_id}/content`);
2202
+ const outputText = await outputRes.text();
2203
+ for (const line of outputText.trim().split("\n")) {
2204
+ if (!line) continue;
2205
+ const item = JSON.parse(line);
2206
+ if (item.response?.status_code === 200) {
2207
+ results.push({
2208
+ custom_id: item.custom_id,
2209
+ status: "success",
2210
+ response: translateOpenAIResponse(item.response.body),
2211
+ error: null
2212
+ });
2213
+ } else {
2214
+ results.push({
2215
+ custom_id: item.custom_id,
2216
+ status: "error",
2217
+ response: null,
2218
+ error: {
2219
+ code: item.response?.status_code || 500,
2220
+ message: item.error?.message || item.response?.body?.error?.message || "Unknown error"
2221
+ }
2222
+ });
2223
+ }
2224
+ }
2225
+ }
2226
+ if (batchData.error_file_id) {
2227
+ const errorRes = await apiRequest(`/files/${batchData.error_file_id}/content`);
2228
+ const errorText = await errorRes.text();
2229
+ for (const line of errorText.trim().split("\n")) {
2230
+ if (!line) continue;
2231
+ const item = JSON.parse(line);
2232
+ const existing = results.find((r) => r.custom_id === item.custom_id);
2233
+ if (!existing) {
2234
+ results.push({
2235
+ custom_id: item.custom_id,
2236
+ status: "error",
2237
+ response: null,
2238
+ error: {
2239
+ code: item.response?.status_code || 500,
2240
+ message: item.error?.message || "Batch item error"
2241
+ }
2242
+ });
2243
+ }
2244
+ }
2245
+ }
2246
+ return results;
2247
+ },
2248
+ async cancelBatch(providerBatchId) {
2249
+ await apiRequest(`/batches/${providerBatchId}/cancel`, { method: "POST" });
2250
+ }
2251
+ };
2252
+ }
2253
+
2254
+ // src/providers/anthropic-batch.ts
2255
+ var ANTHROPIC_API_BASE2 = "https://api.anthropic.com/v1";
2256
+ var ANTHROPIC_VERSION2 = "2023-06-01";
2257
+ var DEFAULT_MAX_TOKENS2 = 4096;
2258
+ function createAnthropicBatchAdapter(apiKey) {
2259
+ async function apiRequest(path2, options = {}) {
2260
+ const headers = {
2261
+ "x-api-key": apiKey,
2262
+ "anthropic-version": ANTHROPIC_VERSION2,
2263
+ "Content-Type": "application/json"
2264
+ };
2265
+ const res = await fetch(`${ANTHROPIC_API_BASE2}${path2}`, {
2266
+ method: options.method || "GET",
2267
+ headers,
2268
+ body: options.body ? JSON.stringify(options.body) : void 0
2269
+ });
2270
+ if (!res.ok) {
2271
+ let errorBody;
2272
+ try {
2273
+ errorBody = await res.json();
2274
+ } catch {
2275
+ errorBody = { message: res.statusText };
2276
+ }
2277
+ const msg = errorBody?.error?.message || errorBody?.message || res.statusText;
2278
+ throw new AnyModelError(res.status >= 500 ? 502 : res.status, msg, {
2279
+ provider_name: "anthropic",
2280
+ raw: errorBody
2281
+ });
2282
+ }
2283
+ return res;
2284
+ }
2285
+ function translateToAnthropicParams(model, req) {
2286
+ const params = {
2287
+ model,
2288
+ max_tokens: req.max_tokens || DEFAULT_MAX_TOKENS2
2289
+ };
2290
+ const systemMessages = req.messages.filter((m) => m.role === "system");
2291
+ const nonSystemMessages = req.messages.filter((m) => m.role !== "system");
2292
+ if (systemMessages.length > 0) {
2293
+ params.system = systemMessages.map((m) => typeof m.content === "string" ? m.content : "").join("\n");
2294
+ }
2295
+ params.messages = nonSystemMessages.map((m) => ({
2296
+ role: m.role === "tool" ? "user" : m.role,
2297
+ content: m.tool_call_id ? [{ type: "tool_result", tool_use_id: m.tool_call_id, content: typeof m.content === "string" ? m.content : "" }] : m.content
2298
+ }));
2299
+ if (req.temperature !== void 0) params.temperature = req.temperature;
2300
+ if (req.top_p !== void 0) params.top_p = req.top_p;
2301
+ if (req.top_k !== void 0) params.top_k = req.top_k;
2302
+ if (req.stop !== void 0) params.stop_sequences = Array.isArray(req.stop) ? req.stop : [req.stop];
2303
+ if (req.tools && req.tools.length > 0) {
2304
+ params.tools = req.tools.map((t) => ({
2305
+ name: t.function.name,
2306
+ description: t.function.description || "",
2307
+ input_schema: t.function.parameters || { type: "object", properties: {} }
2308
+ }));
2309
+ if (req.tool_choice) {
2310
+ if (req.tool_choice === "auto") {
2311
+ params.tool_choice = { type: "auto" };
2312
+ } else if (req.tool_choice === "required") {
2313
+ params.tool_choice = { type: "any" };
2314
+ } else if (req.tool_choice === "none") {
2315
+ delete params.tools;
2316
+ } else if (typeof req.tool_choice === "object") {
2317
+ params.tool_choice = { type: "tool", name: req.tool_choice.function.name };
2318
+ }
2319
+ }
2320
+ }
2321
+ if (req.response_format) {
2322
+ if (req.response_format.type === "json_object" || req.response_format.type === "json_schema") {
2323
+ const jsonInstruction = "Respond with valid JSON only. Do not include any text outside the JSON object.";
2324
+ params.system = params.system ? `${jsonInstruction}
2325
+
2326
+ ${params.system}` : jsonInstruction;
2327
+ }
2328
+ }
2329
+ return params;
2330
+ }
2331
+ function mapStopReason(reason) {
2332
+ switch (reason) {
2333
+ case "end_turn":
2334
+ return "stop";
2335
+ case "max_tokens":
2336
+ return "length";
2337
+ case "tool_use":
2338
+ return "tool_calls";
2339
+ case "stop_sequence":
2340
+ return "stop";
2341
+ default:
2342
+ return "stop";
2343
+ }
2344
+ }
2345
+ function translateAnthropicMessage(msg) {
2346
+ let content = "";
2347
+ const toolCalls = [];
2348
+ for (const block of msg.content || []) {
2349
+ if (block.type === "text") {
2350
+ content += block.text;
2351
+ } else if (block.type === "tool_use") {
2352
+ toolCalls.push({
2353
+ id: block.id,
2354
+ type: "function",
2355
+ function: {
2356
+ name: block.name,
2357
+ arguments: JSON.stringify(block.input)
2358
+ }
2359
+ });
2360
+ }
2361
+ }
2362
+ const message = { role: "assistant", content };
2363
+ if (toolCalls.length > 0) {
2364
+ message.tool_calls = toolCalls;
2365
+ }
2366
+ return {
2367
+ id: generateId(),
2368
+ object: "chat.completion",
2369
+ created: Math.floor(Date.now() / 1e3),
2370
+ model: `anthropic/${msg.model}`,
2371
+ choices: [{
2372
+ index: 0,
2373
+ message,
2374
+ finish_reason: mapStopReason(msg.stop_reason)
2375
+ }],
2376
+ usage: {
2377
+ prompt_tokens: msg.usage?.input_tokens || 0,
2378
+ completion_tokens: msg.usage?.output_tokens || 0,
2379
+ total_tokens: (msg.usage?.input_tokens || 0) + (msg.usage?.output_tokens || 0)
2380
+ }
2381
+ };
2382
+ }
2383
+ return {
2384
+ async createBatch(model, requests, _options) {
2385
+ const batchRequests = requests.map((req) => ({
2386
+ custom_id: req.custom_id,
2387
+ params: translateToAnthropicParams(model, req)
2388
+ }));
2389
+ const res = await apiRequest("/messages/batches", {
2390
+ method: "POST",
2391
+ body: { requests: batchRequests }
2392
+ });
2393
+ const data = await res.json();
2394
+ return {
2395
+ providerBatchId: data.id,
2396
+ metadata: {
2397
+ anthropic_type: data.type,
2398
+ created_at: data.created_at
2399
+ }
2400
+ };
2401
+ },
2402
+ async pollBatch(providerBatchId) {
2403
+ const res = await apiRequest(`/messages/batches/${providerBatchId}`);
2404
+ const data = await res.json();
2405
+ const counts = data.request_counts || {};
2406
+ const total = (counts.processing || 0) + (counts.succeeded || 0) + (counts.errored || 0) + (counts.canceled || 0) + (counts.expired || 0);
2407
+ let status;
2408
+ if (data.processing_status === "ended") {
2409
+ if (counts.succeeded === 0 && (counts.errored > 0 || counts.expired > 0 || counts.canceled > 0)) {
2410
+ status = "failed";
2411
+ } else if (data.cancel_initiated_at) {
2412
+ status = "cancelled";
2413
+ } else {
2414
+ status = "completed";
2415
+ }
2416
+ } else {
2417
+ status = "processing";
2418
+ }
2419
+ return {
2420
+ status,
2421
+ total,
2422
+ completed: counts.succeeded || 0,
2423
+ failed: (counts.errored || 0) + (counts.expired || 0) + (counts.canceled || 0)
2424
+ };
2425
+ },
2426
+ async getBatchResults(providerBatchId) {
2427
+ const res = await apiRequest(`/messages/batches/${providerBatchId}/results`);
2428
+ const text = await res.text();
2429
+ const results = [];
2430
+ for (const line of text.trim().split("\n")) {
2431
+ if (!line) continue;
2432
+ const item = JSON.parse(line);
2433
+ if (item.result?.type === "succeeded") {
2434
+ results.push({
2435
+ custom_id: item.custom_id,
2436
+ status: "success",
2437
+ response: translateAnthropicMessage(item.result.message),
2438
+ error: null
2439
+ });
2440
+ } else {
2441
+ const errorType = item.result?.type || "unknown";
2442
+ const errorMsg = item.result?.error?.message || `Batch item ${errorType}`;
2443
+ results.push({
2444
+ custom_id: item.custom_id,
2445
+ status: "error",
2446
+ response: null,
2447
+ error: {
2448
+ code: errorType === "expired" ? 408 : 500,
2449
+ message: errorMsg
2450
+ }
2451
+ });
2452
+ }
2453
+ }
2454
+ return results;
2455
+ },
2456
+ async cancelBatch(providerBatchId) {
2457
+ await apiRequest(`/messages/batches/${providerBatchId}/cancel`, { method: "POST" });
2458
+ }
2459
+ };
2460
+ }
2461
+
1796
2462
  // src/client.ts
1797
2463
  var AnyModel = class {
1798
2464
  registry;
@@ -1808,6 +2474,9 @@ var AnyModel = class {
1808
2474
  constructor(config = {}) {
1809
2475
  this.config = resolveConfig(config);
1810
2476
  this.registry = new ProviderRegistry();
2477
+ if (this.config.io) {
2478
+ configureFsIO(this.config.io);
2479
+ }
1811
2480
  this.registerProviders();
1812
2481
  this.router = new Router(this.registry, this.config.aliases, this.config);
1813
2482
  this.chat = {
@@ -1857,8 +2526,10 @@ var AnyModel = class {
1857
2526
  };
1858
2527
  this.batchManager = new BatchManager(this.router, {
1859
2528
  dir: this.config.batch?.dir,
1860
- concurrency: this.config.batch?.concurrencyFallback
2529
+ concurrency: this.config.batch?.concurrencyFallback,
2530
+ pollInterval: this.config.batch?.pollInterval
1861
2531
  });
2532
+ this.registerBatchAdapters();
1862
2533
  this.batches = {
1863
2534
  create: (request) => this.batchManager.create(request),
1864
2535
  createAndPoll: (request, options) => this.batchManager.createAndPoll(request, options),
@@ -1910,6 +2581,17 @@ var AnyModel = class {
1910
2581
  }
1911
2582
  }
1912
2583
  }
2584
+ registerBatchAdapters() {
2585
+ const config = this.config;
2586
+ const openaiKey = config.openai?.apiKey || process.env.OPENAI_API_KEY;
2587
+ if (openaiKey) {
2588
+ this.batchManager.registerBatchAdapter("openai", createOpenAIBatchAdapter(openaiKey));
2589
+ }
2590
+ const anthropicKey = config.anthropic?.apiKey || process.env.ANTHROPIC_API_KEY;
2591
+ if (anthropicKey) {
2592
+ this.batchManager.registerBatchAdapter("anthropic", createAnthropicBatchAdapter(anthropicKey));
2593
+ }
2594
+ }
1913
2595
  applyDefaults(request) {
1914
2596
  const defaults = this.config.defaults;
1915
2597
  if (!defaults) return request;
@@ -1938,10 +2620,10 @@ var AnyModel = class {
1938
2620
  // src/server.ts
1939
2621
  import { createServer } from "http";
1940
2622
  function parseBody(req) {
1941
- return new Promise((resolve3, reject) => {
2623
+ return new Promise((resolve2, reject) => {
1942
2624
  const chunks = [];
1943
2625
  req.on("data", (chunk) => chunks.push(chunk));
1944
- req.on("end", () => resolve3(Buffer.concat(chunks).toString()));
2626
+ req.on("end", () => resolve2(Buffer.concat(chunks).toString()));
1945
2627
  req.on("error", reject);
1946
2628
  });
1947
2629
  }
@@ -1971,7 +2653,7 @@ function createAnyModelServer(options = {}) {
1971
2653
  const basePath = "/api/v1";
1972
2654
  const server = createServer(async (req, res) => {
1973
2655
  const url = new URL(req.url || "/", `http://${req.headers.host}`);
1974
- const path = url.pathname;
2656
+ const path2 = url.pathname;
1975
2657
  res.setHeader("Access-Control-Allow-Origin", "*");
1976
2658
  res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
1977
2659
  res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization");
@@ -1981,11 +2663,11 @@ function createAnyModelServer(options = {}) {
1981
2663
  return;
1982
2664
  }
1983
2665
  try {
1984
- if (path === "/health" && req.method === "GET") {
2666
+ if (path2 === "/health" && req.method === "GET") {
1985
2667
  sendJSON(res, 200, { status: "ok" });
1986
2668
  return;
1987
2669
  }
1988
- if (path === `${basePath}/chat/completions` && req.method === "POST") {
2670
+ if (path2 === `${basePath}/chat/completions` && req.method === "POST") {
1989
2671
  const body = JSON.parse(await parseBody(req));
1990
2672
  if (body.stream) {
1991
2673
  const stream = await client.chat.completions.create(body);
@@ -1996,14 +2678,14 @@ function createAnyModelServer(options = {}) {
1996
2678
  }
1997
2679
  return;
1998
2680
  }
1999
- if (path === `${basePath}/models` && req.method === "GET") {
2681
+ if (path2 === `${basePath}/models` && req.method === "GET") {
2000
2682
  const provider = url.searchParams.get("provider") || void 0;
2001
2683
  const models = await client.models.list({ provider });
2002
2684
  sendJSON(res, 200, { object: "list", data: models });
2003
2685
  return;
2004
2686
  }
2005
- if (path.startsWith(`${basePath}/generation/`) && req.method === "GET") {
2006
- const id = path.substring(`${basePath}/generation/`.length);
2687
+ if (path2.startsWith(`${basePath}/generation/`) && req.method === "GET") {
2688
+ const id = path2.substring(`${basePath}/generation/`.length);
2007
2689
  const stats = client.generation.get(id);
2008
2690
  if (!stats) {
2009
2691
  sendError(res, 404, `Generation ${id} not found`);
@@ -2012,26 +2694,26 @@ function createAnyModelServer(options = {}) {
2012
2694
  sendJSON(res, 200, stats);
2013
2695
  return;
2014
2696
  }
2015
- if (path === `${basePath}/batches` && req.method === "POST") {
2697
+ if (path2 === `${basePath}/batches` && req.method === "POST") {
2016
2698
  const body = JSON.parse(await parseBody(req));
2017
2699
  const batch = await client.batches.create(body);
2018
2700
  sendJSON(res, 201, batch);
2019
2701
  return;
2020
2702
  }
2021
- if (path === `${basePath}/batches` && req.method === "GET") {
2022
- const batches = client.batches.list();
2703
+ if (path2 === `${basePath}/batches` && req.method === "GET") {
2704
+ const batches = await client.batches.list();
2023
2705
  sendJSON(res, 200, { object: "list", data: batches });
2024
2706
  return;
2025
2707
  }
2026
- if (path.startsWith(`${basePath}/batches/`) && req.method === "GET") {
2027
- const parts = path.substring(`${basePath}/batches/`.length).split("/");
2708
+ if (path2.startsWith(`${basePath}/batches/`) && req.method === "GET") {
2709
+ const parts = path2.substring(`${basePath}/batches/`.length).split("/");
2028
2710
  const id = parts[0];
2029
2711
  if (parts[1] === "results") {
2030
- const results = client.batches.results(id);
2712
+ const results = await client.batches.results(id);
2031
2713
  sendJSON(res, 200, results);
2032
2714
  return;
2033
2715
  }
2034
- const batch = client.batches.get(id);
2716
+ const batch = await client.batches.get(id);
2035
2717
  if (!batch) {
2036
2718
  sendError(res, 404, `Batch ${id} not found`);
2037
2719
  return;
@@ -2039,16 +2721,16 @@ function createAnyModelServer(options = {}) {
2039
2721
  sendJSON(res, 200, batch);
2040
2722
  return;
2041
2723
  }
2042
- if (path.startsWith(`${basePath}/batches/`) && req.method === "POST") {
2043
- const parts = path.substring(`${basePath}/batches/`.length).split("/");
2724
+ if (path2.startsWith(`${basePath}/batches/`) && req.method === "POST") {
2725
+ const parts = path2.substring(`${basePath}/batches/`.length).split("/");
2044
2726
  const id = parts[0];
2045
2727
  if (parts[1] === "cancel") {
2046
- const batch = client.batches.cancel(id);
2728
+ const batch = await client.batches.cancel(id);
2047
2729
  sendJSON(res, 200, batch);
2048
2730
  return;
2049
2731
  }
2050
2732
  }
2051
- sendError(res, 404, `Not found: ${path}`);
2733
+ sendError(res, 404, `Not found: ${path2}`);
2052
2734
  } catch (err) {
2053
2735
  const code = err?.code || 500;
2054
2736
  const message = err?.message || "Internal server error";
@@ -2083,8 +2765,19 @@ export {
2083
2765
  BatchManager,
2084
2766
  BatchStore,
2085
2767
  GenerationStatsStore,
2768
+ appendFileQueued,
2769
+ configureFsIO,
2770
+ createAnthropicBatchAdapter,
2086
2771
  createAnyModelServer,
2772
+ createOpenAIBatchAdapter,
2773
+ ensureDir,
2774
+ getFsQueueStatus,
2775
+ joinPath,
2776
+ readFileQueued,
2087
2777
  resolveConfig,
2088
- startServer
2778
+ startServer,
2779
+ waitForFsQueuesIdle,
2780
+ writeFileFlushedQueued,
2781
+ writeFileQueued
2089
2782
  };
2090
2783
  //# sourceMappingURL=index.js.map