@yourgpt/llm-sdk 0.1.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -40
- package/dist/adapters/index.d.mts +4 -258
- package/dist/adapters/index.d.ts +4 -258
- package/dist/adapters/index.js +0 -113
- package/dist/adapters/index.js.map +1 -1
- package/dist/adapters/index.mjs +1 -112
- package/dist/adapters/index.mjs.map +1 -1
- package/dist/base-D_FyHFKj.d.mts +235 -0
- package/dist/base-D_FyHFKj.d.ts +235 -0
- package/dist/index.d.mts +209 -451
- package/dist/index.d.ts +209 -451
- package/dist/index.js +1905 -311
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1895 -309
- package/dist/index.mjs.map +1 -1
- package/dist/providers/anthropic/index.d.mts +61 -0
- package/dist/providers/anthropic/index.d.ts +61 -0
- package/dist/providers/anthropic/index.js +939 -0
- package/dist/providers/anthropic/index.js.map +1 -0
- package/dist/providers/anthropic/index.mjs +934 -0
- package/dist/providers/anthropic/index.mjs.map +1 -0
- package/dist/providers/azure/index.d.mts +38 -0
- package/dist/providers/azure/index.d.ts +38 -0
- package/dist/providers/azure/index.js +380 -0
- package/dist/providers/azure/index.js.map +1 -0
- package/dist/providers/azure/index.mjs +377 -0
- package/dist/providers/azure/index.mjs.map +1 -0
- package/dist/providers/google/index.d.mts +72 -0
- package/dist/providers/google/index.d.ts +72 -0
- package/dist/providers/google/index.js +790 -0
- package/dist/providers/google/index.js.map +1 -0
- package/dist/providers/google/index.mjs +785 -0
- package/dist/providers/google/index.mjs.map +1 -0
- package/dist/providers/ollama/index.d.mts +24 -0
- package/dist/providers/ollama/index.d.ts +24 -0
- package/dist/providers/ollama/index.js +235 -0
- package/dist/providers/ollama/index.js.map +1 -0
- package/dist/providers/ollama/index.mjs +232 -0
- package/dist/providers/ollama/index.mjs.map +1 -0
- package/dist/providers/openai/index.d.mts +82 -0
- package/dist/providers/openai/index.d.ts +82 -0
- package/dist/providers/openai/index.js +679 -0
- package/dist/providers/openai/index.js.map +1 -0
- package/dist/providers/openai/index.mjs +674 -0
- package/dist/providers/openai/index.mjs.map +1 -0
- package/dist/providers/xai/index.d.mts +78 -0
- package/dist/providers/xai/index.d.ts +78 -0
- package/dist/providers/xai/index.js +671 -0
- package/dist/providers/xai/index.js.map +1 -0
- package/dist/providers/xai/index.mjs +666 -0
- package/dist/providers/xai/index.mjs.map +1 -0
- package/dist/types-BBCZ3Fxy.d.mts +308 -0
- package/dist/types-CdORv1Yu.d.mts +338 -0
- package/dist/types-CdORv1Yu.d.ts +338 -0
- package/dist/types-DcoCaVVC.d.ts +308 -0
- package/package.json +34 -3
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/providers/openai/provider.ts","../../../src/adapters/base.ts","../../../src/adapters/openai.ts","../../../src/providers/openai/index.ts"],"names":["client","formatMessagesForOpenAI","OPENAI_MODELS"],"mappings":";;;AAwCA,IAAM,aAAA,GAAmD;AAAA;AAAA,EAEvD,QAAA,EAAU,EAAE,MAAA,EAAQ,IAAA,EAAM,OAAO,IAAA,EAAM,QAAA,EAAU,IAAA,EAAM,SAAA,EAAW,KAAA,EAAO;AAAA,EACzE,aAAA,EAAe;AAAA,IACb,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,mBAAA,EAAqB;AAAA,IACnB,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,mBAAA,EAAqB;AAAA,IACnB,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,aAAA,EAAe;AAAA,IACb,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,qBAAA,EAAuB;AAAA,IACrB,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,OAAA,EAAS,EAAE,MAAA,EAAQ,KAAA,EAAO,OAAO,IAAA,EAAM,QAAA,EAAU,KAAA,EAAO,SAAA,EAAW,IAAA,EAAK;AAAA,EACxE,WAAA,EAAa;AAAA,IACX,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,eAAA,EAAiB;AAAA,IACf,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,EAAA,EAAI,EAAE,MAAA,EAAQ,IAAA,EAAM,OAAO,KAAA,EAAO,QAAA,EAAU,KAAA,EAAO,SAAA,EAAW,KAAA,EAAO;AAAA,EACrE,SAAA,EAAW,EAAE,MAAA,EAAQ,IAAA,EAAM,OAAO,KAAA,EAAO,QAAA,EAAU,KAAA,EAAO,SAAA,EAAW,KAAA,EAAO;AAAA,EAC5E,YAAA,EAAc;AAAA,IACZ,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,SAAA,EAAW,EAAE,MAAA,EAAQ,IAAA,EAAM,OAAO,KAAA,EAAO,QAAA,EAAU,KAAA,EAAO,SAAA,EAAW,KAAA;AACvE,CAAA;AAwCO,SAAS,MAAA,CACd,OAAA,EACA,OAAA,GAAiC,EAAC,EACnB;AACf,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,cAAA;AAC7C,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,IAAW,2BAAA;AAGnC,EAAA,IAAI,MAAA,GAAc,IAAA;AAClB,EAAA,eAAe,SAAA,GAA0B;AACvC,IAAA,IAAI,CAAC,MAAA,EAAQ;AACX,MAAA,MAAM,EAAE,OAAA,EAAS,MAAA,EAAO,GAAI,MAAM,OAAO,QAAQ,CAAA;AACjD,MAAA,MAAA,GAAS,IAAI,MAAA,CAAO;AAAA,QAClB,MAAA;AAAA,QACA,OAAA;AAAA,QACA,cAAc,OAAA,CAAQ,YAAA;AAAA,QACtB,gBAAgB,OAAA,CAAQ;AAAA,OACzB,CAAA;AAAA,IACH;AACA,IAAA,OAAO,MAAA;AAAA,EACT;AAGA,EAAA,MAAM,WAAA,GAAc,aAAA,CAAc,OAAO,CAAA,IAAK,cAAc,QAAQ,CAAA;AAEpE,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,QAAA;AAAA,IACV,OAAA;AAAA,IAEA,YAAA,EAAc;AAAA,MACZ,gBAAgB,WAAA,CAAY,MAAA;AAAA,MAC5B,eAAe,WAAA,CAAY,KAAA;AAAA,MAC3B,iBAAA,EAAmB,IAAA;AAAA,MACnB,kBAAkB,WAAA,CAAY,QAAA;AAAA,MAC9B,gBAAA,EAAkB,KAAA;AAAA,MAClB,WAAA,EAAa,KAAA;AAAA,MACb,WAAW,WAAA,CAAY,SAAA;AAAA,MACvB,mBAAA,EAAqB,YAAY,MAAA,GAC7B,CAAC,aAAa,YAAA,EAAc,WAAA,EAAa,YAAY,CAAA,GACrD;AAAC,KACP;AAAA,IAEA,MAAM,WAAW,MAAA,EAAqD;AACpE,MAAA,MAAMA,OAAAA,GAAS,MAAM,SAAA,EAAU;AAE/B,MAAA,MAAM,QAAA,GAAW,uBAAA,CAAwB,MAAA,CAAO,QAAQ,CAAA;AAExD,MAAA,MAAM,QAAA,GAAW,MAAMA,OAAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,QACpD,KAAA,EAAO,OAAA;AAAA,QACP,QAAA;AAAA,QACA,OAAO,MAAA,CAAO,KAAA;AAAA,QACd,aAAa,MAAA,CAAO,WAAA;AAAA,QACpB,YAAY,MAAA,CAAO;AAAA,OACpB,CAAA;AAED,MAAA,MAAM,MAAA,GAAS,QAAA,CAAS,OAAA,CAAQ,CAAC,CAAA;AACjC,MAAA,MAAM,UAAU,MAAA,CAAO,OAAA;AAGvB,MAAA,MAAM,SAAA,GAAA,CAAyB,OAAA,CAAQ,UAAA,IAAc,EAAC,EAAG,GAAA;AAAA,QACvD,CAAC,EAAA,MAAa;AAAA,UACZ,IAAI,EAAA,CAAG,EAAA;AAAA,UACP,IAAA,EAAM,GAAG,QAAA,CAAS,IAAA;AAAA,UAClB,MAAM,IAAA,CAAK,KAAA,CAAM,EAAA,CAAG,QAAA,CAAS,aAAa,IAAI;AAAA,SAChD;AAAA,OACF;AAEA,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,QAAQ,OAAA,IAAW,EAAA;AAAA,QACzB,SAAA;AAAA,QACA,YAAA,EAAc,eAAA,CAAgB,MAAA,CAAO,aAAa,CAAA;AAAA,QAClD,KAAA,EAAO;AAAA,UACL,YAAA,EAAc,QAAA,CAAS,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,UAC/C,gBAAA,EAAkB,QAAA,CAAS,KAAA,EAAO,iBAAA,IAAqB,CAAA;AAAA,UACvD,WAAA,EAAa,QAAA,CAAS,KAAA,EAAO,YAAA,IAAgB;AAAA,SAC/C;AAAA,QACA,WAAA,EAAa;AAAA,OACf;AAAA,IACF,CAAA;AAAA,IAEA,OAAO,SAAS,MAAA,EAAuD;AACrE,MAAA,MAAMA,OAAAA,GAAS,MAAM,SAAA,EAAU;AAE/B,MAAA,MAAM,QAAA,GAAW,uBAAA,CAAwB,MAAA,CAAO,QAAQ,CAAA;AAExD,MAAA,MAAM,MAAA,GAAS,MAAMA,OAAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,QAClD,KAAA,EAAO,OAAA;AAAA,QACP,QAAA;AAAA,QACA,OAAO,MAAA,CAAO,KAAA;AAAA,QACd,aAAa,MAAA,CAAO,WAAA;AAAA,QACpB,YAAY,MAAA,CAAO,SAAA;AAAA,QACnB,MAAA,EAAQ;AAAA,OACT,CAAA;AAGD,MAAA,IAAI,eAAA,GAIO,IAAA;AAEX,MAAA,IAAI,iBAAA,GAAoB,CAAA;AACxB,MAAA,IAAI,qBAAA,GAAwB,CAAA;AAE5B,MAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAEhC,QAAA,IAAI,MAAA,CAAO,QAAQ,OAAA,EAAS;AAC1B,UAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,OAAO,IAAI,KAAA,CAAM,SAAS,CAAA,EAAE;AACnD,UAAA;AAAA,QACF;AAEA,QAAA,MAAM,MAAA,GAAS,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA;AAC9B,QAAA,MAAM,QAAQ,MAAA,EAAQ,KAAA;AAGtB,QAAA,IAAI,OAAO,OAAA,EAAS;AAClB,UAAA,MAAM,EAAE,IAAA,EAAM,YAAA,EAAc,IAAA,EAAM,MAAM,OAAA,EAAQ;AAAA,QAClD;AAGA,QAAA,IAAI,OAAO,UAAA,EAAY;AACrB,UAAA,KAAA,MAAW,EAAA,IAAM,MAAM,UAAA,EAAY;AACjC,YAAA,IAAI,GAAG,EAAA,EAAI;AAET,cAAA,IAAI,eAAA,EAAiB;AACnB,gBAAA,MAAM;AAAA,kBACJ,IAAA,EAAM,WAAA;AAAA,kBACN,QAAA,EAAU;AAAA,oBACR,IAAI,eAAA,CAAgB,EAAA;AAAA,oBACpB,MAAM,eAAA,CAAgB,IAAA;AAAA,oBACtB,IAAA,EAAM,IAAA,CAAK,KAAA,CAAM,eAAA,CAAgB,aAAa,IAAI;AAAA;AACpD,iBACF;AAAA,cACF;AACA,cAAA,eAAA,GAAkB;AAAA,gBAChB,IAAI,EAAA,CAAG,EAAA;AAAA,gBACP,IAAA,EAAM,EAAA,CAAG,QAAA,EAAU,IAAA,IAAQ,EAAA;AAAA,gBAC3B,SAAA,EAAW,EAAA,CAAG,QAAA,EAAU,SAAA,IAAa;AAAA,eACvC;AAAA,YACF,CAAA,MAAA,IAAW,eAAA,IAAmB,EAAA,CAAG,QAAA,EAAU,SAAA,EAAW;AAEpD,cAAA,eAAA,CAAgB,SAAA,IAAa,GAAG,QAAA,CAAS,SAAA;AAAA,YAC3C;AAAA,UACF;AAAA,QACF;AAGA,QAAA,IAAI,QAAQ,aAAA,EAAe;AAEzB,UAAA,IAAI,eAAA,EAAiB;AACnB,YAAA,MAAM;AAAA,cACJ,IAAA,EAAM,WAAA;AAAA,cACN,QAAA,EAAU;AAAA,gBACR,IAAI,eAAA,CAAgB,EAAA;AAAA,gBACpB,MAAM,eAAA,CAAgB,IAAA;AAAA,gBACtB,IAAA,EAAM,IAAA,CAAK,KAAA,CAAM,eAAA,CAAgB,aAAa,IAAI;AAAA;AACpD,aACF;AACA,YAAA,eAAA,GAAkB,IAAA;AAAA,UACpB;AAGA,UAAA,IAAI,MAAM,KAAA,EAAO;AACf,YAAA,iBAAA,GAAoB,MAAM,KAAA,CAAM,aAAA;AAChC,YAAA,qBAAA,GAAwB,MAAM,KAAA,CAAM,iBAAA;AAAA,UACtC;AAEA,UAAA,MAAM;AAAA,YACJ,IAAA,EAAM,QAAA;AAAA,YACN,YAAA,EAAc,eAAA,CAAgB,MAAA,CAAO,aAAa,CAAA;AAAA,YAClD,KAAA,EAAO;AAAA,cACL,YAAA,EAAc,iBAAA;AAAA,cACd,gBAAA,EAAkB,qBAAA;AAAA,cAClB,aAAa,iBAAA,GAAoB;AAAA;AACnC,WACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,GACF;AACF;AASA,SAAS,gBAAgB,MAAA,EAAqC;AAC5D,EAAA,QAAQ,MAAA;AAAQ,IACd,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,QAAA;AAAA,IACT,KAAK,YAAA;AAAA,IACL,KAAK,eAAA;AACH,MAAA,OAAO,YAAA;AAAA,IACT,KAAK,gBAAA;AACH,MAAA,OAAO,gBAAA;AAAA,IACT;AACE,MAAA,OAAO,SAAA;AAAA;AAEb;AAKA,SAAS,wBAAwB,QAAA,EAAgC;AAC/D,EAAA,OAAO,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,KAAQ;AAC3B,IAAA,QAAQ,IAAI,IAAA;AAAM,MAChB,KAAK,QAAA;AACH,QAAA,OAAO,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,IAAI,OAAA,EAAQ;AAAA,MAEhD,KAAK,MAAA;AACH,QAAA,IAAI,OAAO,GAAA,CAAI,OAAA,KAAY,QAAA,EAAU;AACnC,UAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,IAAI,OAAA,EAAQ;AAAA,QAC9C;AAEA,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,MAAA;AAAA,UACN,OAAA,EAAS,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,CAAC,IAAA,KAAS;AACjC,YAAA,IAAI,IAAA,CAAK,SAAS,MAAA,EAAQ;AACxB,cAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,KAAK,IAAA,EAAK;AAAA,YACzC;AACA,YAAA,IAAI,IAAA,CAAK,SAAS,OAAA,EAAS;AACzB,cAAA,MAAM,SAAA,GACJ,OAAO,IAAA,CAAK,KAAA,KAAU,QAAA,GAClB,IAAA,CAAK,KAAA,GACL,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,KAAK,CAAA,CAAE,SAAS,QAAQ,CAAA;AAC/C,cAAA,MAAM,GAAA,GAAM,SAAA,CAAU,UAAA,CAAW,OAAO,CAAA,GACpC,SAAA,GACA,CAAA,KAAA,EAAQ,IAAA,CAAK,QAAA,IAAY,WAAW,CAAA,QAAA,EAAW,SAAS,CAAA,CAAA;AAC5D,cAAA,OAAO,EAAE,MAAM,WAAA,EAAa,SAAA,EAAW,EAAE,GAAA,EAAK,MAAA,EAAQ,QAAO,EAAE;AAAA,YACjE;AACA,YAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,EAAA,EAAG;AAAA,UAClC,CAAC;AAAA,SACH;AAAA,MAEF,KAAK,WAAA;AACH,QAAA,MAAM,YAAA,GAAoB;AAAA,UACxB,IAAA,EAAM,WAAA;AAAA,UACN,SAAS,GAAA,CAAI;AAAA,SACf;AACA,QAAA,IAAI,GAAA,CAAI,SAAA,IAAa,GAAA,CAAI,SAAA,CAAU,SAAS,CAAA,EAAG;AAC7C,UAAA,YAAA,CAAa,UAAA,GAAa,GAAA,CAAI,SAAA,CAAU,GAAA,CAAI,CAAC,EAAA,MAAQ;AAAA,YACnD,IAAI,EAAA,CAAG,EAAA;AAAA,YACP,IAAA,EAAM,UAAA;AAAA,YACN,QAAA,EAAU;AAAA,cACR,MAAM,EAAA,CAAG,IAAA;AAAA,cACT,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,EAAA,CAAG,IAAI;AAAA;AACnC,WACF,CAAE,CAAA;AAAA,QACJ;AACA,QAAA,OAAO,YAAA;AAAA,MAET,KAAK,MAAA;AACH,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,MAAA;AAAA,UACN,cAAc,GAAA,CAAI,UAAA;AAAA,UAClB,SAAS,GAAA,CAAI;AAAA,SACf;AAAA,MAEF;AACE,QAAA,OAAO,GAAA;AAAA;AACX,EACF,CAAC,CAAA;AACH;;;AC1TA,SAAS,sBAAsB,KAAA,EAMH;AAC1B,EAAA,MAAM,MAAA,GAAkC;AAAA,IACtC,MAAM,KAAA,CAAM;AAAA,GACd;AAEA,EAAA,IAAI,MAAM,WAAA,EAAa;AACrB,IAAA,MAAA,CAAO,cAAc,KAAA,CAAM,WAAA;AAAA,EAC7B;AAEA,EAAA,IAAI,MAAM,IAAA,EAAM;AACd,IAAA,MAAA,CAAO,OAAO,KAAA,CAAM,IAAA;AAAA,EACtB;AAGA,EAAA,IAAI,KAAA,CAAM,IAAA,KAAS,OAAA,IAAW,KAAA,CAAM,KAAA,EAAO;AACzC,IAAA,MAAA,CAAO,KAAA,GAAQ,qBAAA;AAAA,MACb,KAAA,CAAM;AAAA,KAOR;AAAA,EACF;AAGA,EAAA,IAAI,KAAA,CAAM,IAAA,KAAS,QAAA,IAAY,KAAA,CAAM,UAAA,EAAY;AAC/C,IAAA,MAAA,CAAO,aAAa,MAAA,CAAO,WAAA;AAAA,MACzB,MAAA,CAAO,OAAA,CAAQ,KAAA,CAAM,UAAU,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,IAAI,CAAA,KAAM;AAAA,QACpD,GAAA;AAAA,QACA,qBAAA;AAAA,UACE;AAAA;AAOF,OACD;AAAA,KACH;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKO,SAAS,YAAY,OAAA,EAOzB;AACD,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAC,MAAA,MAAY;AAAA,IAC9B,IAAA,EAAM,UAAA;AAAA,IACN,QAAA,EAAU;AAAA,MACR,MAAM,MAAA,CAAO,IAAA;AAAA,MACb,aAAa,MAAA,CAAO,WAAA;AAAA,MACpB,UAAA,EAAY;AAAA,QACV,IAAA,EAAM,QAAA;AAAA,QACN,UAAA,EAAY,MAAA,CAAO,UAAA,GACf,MAAA,CAAO,WAAA;AAAA,UACL,MAAA,CAAO,OAAA,CAAQ,MAAA,CAAO,UAAU,CAAA,CAAE,IAAI,CAAC,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AAAA,YACtD,GAAA;AAAA,YACA,sBAAsB,KAAK;AAAA,WAC5B;AAAA,YAEH,EAAC;AAAA,QACL,QAAA,EAAU,MAAA,CAAO,UAAA,GACb,MAAA,CAAO,OAAA,CAAQ,OAAO,UAAU,CAAA,CAC7B,MAAA,CAAO,CAAC,GAAG,KAAK,CAAA,KAAM,KAAA,CAAM,QAAQ,CAAA,CACpC,GAAA,CAAI,CAAC,CAAC,GAAG,CAAA,KAAM,GAAG,CAAA,GACrB;AAAC;AACP;AACF,GACF,CAAE,CAAA;AACJ;AAoDO,SAAS,oBAAoB,OAAA,EAA2B;AAC7D,EAAA,MAAM,WAAA,GAAc,QAAQ,QAAA,EAAU,WAAA;AACtC,EAAA,OAAO,aAAa,IAAA,CAAK,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,OAAO,CAAA,IAAK,KAAA;AACzD;AA8EO,SAAS,wBACd,UAAA,EAC2B;AAC3B,EAAA,IAAI,UAAA,CAAW,IAAA,KAAS,OAAA,EAAS,OAAO,IAAA;AAExC,EAAA,IAAI,QAAA;AAGJ,EAAA,IAAI,WAAW,GAAA,EAAK;AAClB,IAAA,QAAA,GAAW,UAAA,CAAW,GAAA;AAAA,EACxB,CAAA,MAAA,IAAW,WAAW,IAAA,EAAM;AAE1B,IAAA,QAAA,GAAW,UAAA,CAAW,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA,GACzC,UAAA,CAAW,IAAA,GACX,CAAA,KAAA,EAAQ,UAAA,CAAW,QAAA,IAAY,WAAW,CAAA,QAAA,EAAW,WAAW,IAAI,CAAA,CAAA;AAAA,EAC1E,CAAA,MAAO;AACL,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,WAAA;AAAA,IACN,SAAA,EAAW;AAAA,MACT,GAAA,EAAK,QAAA;AAAA,MACL,MAAA,EAAQ;AAAA;AACV,GACF;AACF;AAqGO,SAAS,uBACd,OAAA,EAC+B;AAC/B,EAAA,MAAM,WAAA,GAAc,QAAQ,QAAA,EAAU,WAAA;AACtC,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,IAAW,EAAA;AAGnC,EAAA,IAAI,CAAC,mBAAA,CAAoB,OAAO,CAAA,EAAG;AACjC,IAAA,OAAO,OAAA;AAAA,EACT;AAGA,EAAA,MAAM,SAA+B,EAAC;AAGtC,EAAA,IAAI,OAAA,EAAS;AACX,IAAA,MAAA,CAAO,KAAK,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,SAAS,CAAA;AAAA,EAC7C;AAGA,EAAA,IAAI,WAAA,EAAa;AACf,IAAA,KAAA,MAAW,cAAc,WAAA,EAAa;AACpC,MAAA,MAAM,UAAA,GAAa,wBAAwB,UAAU,CAAA;AACrD,MAAA,IAAI,UAAA,EAAY;AACd,QAAA,MAAA,CAAO,KAAK,UAAU,CAAA;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AA+IO,SAASC,wBAAAA,CACd,UACA,YAAA,EACiB;AACjB,EAAA,MAAM,YAA6B,EAAC;AAGpC,EAAA,IAAI,YAAA,EAAc;AAChB,IAAA,SAAA,CAAU,KAAK,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,cAAc,CAAA;AAAA,EAC1D;AAEA,EAAA,KAAA,MAAW,OAAO,QAAA,EAAU;AAC1B,IAAA,IAAI,GAAA,CAAI,SAAS,QAAA,EAAU;AACzB,MAAA,SAAA,CAAU,IAAA,CAAK,EAAE,IAAA,EAAM,QAAA,EAAU,SAAS,GAAA,CAAI,OAAA,IAAW,IAAI,CAAA;AAAA,IAC/D,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,MAAA,EAAQ;AAC9B,MAAA,SAAA,CAAU,IAAA,CAAK;AAAA,QACb,IAAA,EAAM,MAAA;AAAA,QACN,OAAA,EAAS,uBAAuB,GAAG;AAAA,OACpC,CAAA;AAAA,IACH,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,WAAA,EAAa;AACnC,MAAA,MAAM,YAAA,GAA8B;AAAA,QAClC,IAAA,EAAM,WAAA;AAAA,QACN,SAAS,GAAA,CAAI;AAAA,OACf;AACA,MAAA,IAAI,GAAA,CAAI,UAAA,IAAc,GAAA,CAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AAC/C,QAAC,YAAA,CAAuD,aACtD,GAAA,CAAI,UAAA;AAAA,MACR;AACA,MAAA,SAAA,CAAU,KAAK,YAAY,CAAA;AAAA,IAC7B,CAAA,MAAA,IAAW,GAAA,CAAI,IAAA,KAAS,MAAA,IAAU,IAAI,YAAA,EAAc;AAClD,MAAA,SAAA,CAAU,IAAA,CAAK;AAAA,QACb,IAAA,EAAM,MAAA;AAAA,QACN,OAAA,EAAS,IAAI,OAAA,IAAW,EAAA;AAAA,QACxB,cAAc,GAAA,CAAI;AAAA,OACnB,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,OAAO,SAAA;AACT;;;AC3nBO,IAAM,gBAAN,MAA0C;AAAA,EAO/C,YAAY,MAAA,EAA6B;AANzC,IAAA,IAAA,CAAS,QAAA,GAAW,QAAA;AAOlB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,KAAA,GAAQ,OAAO,KAAA,IAAS,QAAA;AAAA,EAC/B;AAAA,EAEA,MAAc,SAAA,GAAY;AACxB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAEhB,MAAA,MAAM,EAAE,OAAA,EAAS,MAAA,EAAO,GAAI,MAAM,OAAO,QAAQ,CAAA;AACjD,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,MAAA,CAAO;AAAA,QACvB,MAAA,EAAQ,KAAK,MAAA,CAAO,MAAA;AAAA,QACpB,OAAA,EAAS,KAAK,MAAA,CAAO;AAAA,OACtB,CAAA;AAAA,IACH;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA,EAEA,OAAO,OAAO,OAAA,EAA6D;AACzE,IAAA,MAAM,MAAA,GAAS,MAAM,IAAA,CAAK,SAAA,EAAU;AAGpC,IAAA,IAAI,QAAA;AACJ,IAAA,IAAI,OAAA,CAAQ,WAAA,IAAe,OAAA,CAAQ,WAAA,CAAY,SAAS,CAAA,EAAG;AAEzD,MAAA,MAAM,iBAAA,GAAoB,OAAA,CAAQ,WAAA,CAAY,GAAA,CAAI,CAAC,GAAA,KAAQ;AAEzD,QAAA,MAAM,cAAA,GACJ,GAAA,CAAI,WAAA,IACJ,KAAA,CAAM,OAAA,CAAQ,IAAI,WAAW,CAAA,IAC7B,GAAA,CAAI,WAAA,CAAY,MAAA,GAAS,CAAA;AAE3B,QAAA,IAAI,cAAA,EAAgB;AAElB,UAAA,MAAM,UAA0C,EAAC;AAGjD,UAAA,IAAI,IAAI,OAAA,EAAS;AACf,YAAA,OAAA,CAAQ,KAAK,EAAE,IAAA,EAAM,QAAQ,IAAA,EAAM,GAAA,CAAI,SAAS,CAAA;AAAA,UAClD;AAGA,UAAA,KAAA,MAAW,UAAA,IAAc,IAAI,WAAA,EAKzB;AACF,YAAA,IAAI,UAAA,CAAW,SAAS,OAAA,EAAS;AAC/B,cAAA,IAAI,QAAA;AAEJ,cAAA,IAAI,WAAW,GAAA,EAAK;AAElB,gBAAA,QAAA,GAAW,UAAA,CAAW,GAAA;AAAA,cACxB,CAAA,MAAA,IAAW,WAAW,IAAA,EAAM;AAE1B,gBAAA,QAAA,GAAW,UAAA,CAAW,IAAA,CAAK,UAAA,CAAW,OAAO,CAAA,GACzC,UAAA,CAAW,IAAA,GACX,CAAA,KAAA,EAAQ,UAAA,CAAW,QAAA,IAAY,WAAW,CAAA,QAAA,EAAW,WAAW,IAAI,CAAA,CAAA;AAAA,cAC1E,CAAA,MAAO;AACL,gBAAA;AAAA,cACF;AAEA,cAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,gBACX,IAAA,EAAM,WAAA;AAAA,gBACN,SAAA,EAAW,EAAE,GAAA,EAAK,QAAA,EAAU,QAAQ,MAAA;AAAO,eAC5C,CAAA;AAAA,YACH;AAAA,UACF;AAEA,UAAA,OAAO,EAAE,GAAG,GAAA,EAAK,OAAA,EAAS,aAAa,MAAA,EAAU;AAAA,QACnD;AACA,QAAA,OAAO,GAAA;AAAA,MACT,CAAC,CAAA;AAGD,MAAA,IAAI,QAAQ,YAAA,EAAc;AACxB,QAAA,MAAM,YAAY,iBAAA,CAAkB,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,SAAS,QAAQ,CAAA;AACnE,QAAA,IAAI,CAAC,SAAA,EAAW;AACd,UAAA,QAAA,GAAW;AAAA,YACT,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,QAAQ,YAAA,EAAa;AAAA,YAChD,GAAG;AAAA,WACL;AAAA,QACF,CAAA,MAAO;AACL,UAAA,QAAA,GAAW,iBAAA;AAAA,QACb;AAAA,MACF,CAAA,MAAO;AACL,QAAA,QAAA,GAAW,iBAAA;AAAA,MACb;AAAA,IACF,CAAA,MAAO;AAEL,MAAA,QAAA,GAAWA,wBAAAA;AAAA,QACT,OAAA,CAAQ,QAAA;AAAA,QACR,OAAA,CAAQ;AAAA,OACV;AAAA,IACF;AAEA,IAAA,MAAM,QAAQ,OAAA,CAAQ,OAAA,EAAS,SAC3B,WAAA,CAAY,OAAA,CAAQ,OAAO,CAAA,GAC3B,MAAA;AAEJ,IAAA,MAAM,YAAY,iBAAA,EAAkB;AAGpC,IAAA,MAAM,EAAE,IAAA,EAAM,eAAA,EAAiB,EAAA,EAAI,SAAA,EAAU;AAE7C,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,MAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,QAClD,KAAA,EAAO,OAAA,CAAQ,MAAA,EAAQ,KAAA,IAAS,IAAA,CAAK,KAAA;AAAA,QACrC,QAAA;AAAA,QACA,KAAA;AAAA,QACA,WAAA,EAAa,OAAA,CAAQ,MAAA,EAAQ,WAAA,IAAe,KAAK,MAAA,CAAO,WAAA;AAAA,QACxD,UAAA,EAAY,OAAA,CAAQ,MAAA,EAAQ,SAAA,IAAa,KAAK,MAAA,CAAO,SAAA;AAAA,QACrD,MAAA,EAAQ;AAAA,OACT,CAAA;AAED,MAAA,IAAI,eAAA,GAIO,IAAA;AAEX,MAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAEhC,QAAA,IAAI,OAAA,CAAQ,QAAQ,OAAA,EAAS;AAC3B,UAAA;AAAA,QACF;AAEA,QAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA,EAAG,KAAA;AAGhC,QAAA,IAAI,OAAO,OAAA,EAAS;AAClB,UAAA,MAAM,EAAE,IAAA,EAAM,eAAA,EAAiB,OAAA,EAAS,MAAM,OAAA,EAAQ;AAAA,QACxD;AAGA,QAAA,IAAI,OAAO,UAAA,EAAY;AACrB,UAAA,KAAA,MAAW,QAAA,IAAY,MAAM,UAAA,EAAY;AAEvC,YAAA,IAAI,SAAS,EAAA,EAAI;AAEf,cAAA,IAAI,eAAA,EAAiB;AACnB,gBAAA,MAAM;AAAA,kBACJ,IAAA,EAAM,aAAA;AAAA,kBACN,IAAI,eAAA,CAAgB,EAAA;AAAA,kBACpB,MAAM,eAAA,CAAgB;AAAA,iBACxB;AAAA,cACF;AAEA,cAAA,eAAA,GAAkB;AAAA,gBAChB,IAAI,QAAA,CAAS,EAAA;AAAA,gBACb,IAAA,EAAM,QAAA,CAAS,QAAA,EAAU,IAAA,IAAQ,EAAA;AAAA,gBACjC,SAAA,EAAW,QAAA,CAAS,QAAA,EAAU,SAAA,IAAa;AAAA,eAC7C;AAEA,cAAA,MAAM;AAAA,gBACJ,IAAA,EAAM,cAAA;AAAA,gBACN,IAAI,eAAA,CAAgB,EAAA;AAAA,gBACpB,MAAM,eAAA,CAAgB;AAAA,eACxB;AAAA,YACF,CAAA,MAAA,IAAW,eAAA,IAAmB,QAAA,CAAS,QAAA,EAAU,SAAA,EAAW;AAE1D,cAAA,eAAA,CAAgB,SAAA,IAAa,SAAS,QAAA,CAAS,SAAA;AAAA,YACjD;AAAA,UACF;AAAA,QACF;AAGA,QAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA,EAAG,aAAA,EAAe;AAEnC,UAAA,IAAI,eAAA,EAAiB;AACnB,YAAA,MAAM;AAAA,cACJ,IAAA,EAAM,aAAA;AAAA,cACN,IAAI,eAAA,CAAgB,EAAA;AAAA,cACpB,MAAM,eAAA,CAAgB;AAAA,aACxB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,MAAA,MAAM,EAAE,MAAM,aAAA,EAAc;AAC5B,MAAA,MAAM,EAAE,MAAM,MAAA,EAAO;AAAA,IACvB,SAAS,KAAA,EAAO;AACd,MAAA,MAAM;AAAA,QACJ,IAAA,EAAM,OAAA;AAAA,QACN,OAAA,EAAS,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,eAAA;AAAA,QAClD,IAAA,EAAM;AAAA,OACR;AAAA,IACF;AAAA,EACF;AACF,CAAA;AAKO,SAAS,oBACd,MAAA,EACe;AACf,EAAA,OAAO,IAAI,cAAc,MAAM,CAAA;AACjC;;;ACrMA,IAAMC,cAAAA,GAAmD;AAAA;AAAA,EAEvD,QAAA,EAAU;AAAA,IACR,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,aAAA,EAAe;AAAA,IACb,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,mBAAA,EAAqB;AAAA,IACnB,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,IAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,mBAAA,EAAqB;AAAA,IACnB,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,aAAA,EAAe;AAAA,IACb,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,qBAAA,EAAuB;AAAA,IACrB,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,OAAA,EAAS;AAAA,IACP,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,WAAA,EAAa;AAAA,IACX,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,eAAA,EAAiB;AAAA,IACf,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,mBAAA,EAAqB;AAAA,IACnB,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,IAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,IAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,EAAA,EAAI;AAAA,IACF,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,KAAA;AAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,SAAA,EAAW;AAAA,IACT,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,KAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA,EACA,YAAA,EAAc;AAAA,IACZ,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,KAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA,GACb;AAAA;AAAA,EAGA,SAAA,EAAW;AAAA,IACT,MAAA,EAAQ,IAAA;AAAA,IACR,KAAA,EAAO,KAAA;AAAA,IACP,KAAA,EAAO,KAAA;AAAA,IACP,QAAA,EAAU,KAAA;AAAA,IACV,SAAA,EAAW;AAAA;AAEf,CAAA;AAsBO,SAAS,YAAA,CAAa,MAAA,GAA+B,EAAC,EAAe;AAC1E,EAAA,MAAM,MAAA,GAAS,MAAA,CAAO,MAAA,IAAU,OAAA,CAAQ,IAAI,cAAA,IAAkB,EAAA;AAE9D,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,QAAA;AAAA,IACN,eAAA,EAAiB,MAAA,CAAO,IAAA,CAAKA,cAAa,CAAA;AAAA,IAE1C,cAAc,OAAA,EAAiB;AAC7B,MAAA,OAAO,mBAAA,CAAoB;AAAA,QACzB,MAAA;AAAA,QACA,KAAA,EAAO,OAAA;AAAA,QACP,SAAS,MAAA,CAAO;AAAA,OACjB,CAAA;AAAA,IACH,CAAA;AAAA,IAEA,gBAAgB,OAAA,EAAuC;AACrD,MAAA,MAAM,KAAA,GAAQA,cAAAA,CAAc,OAAO,CAAA,IAAKA,eAAc,QAAQ,CAAA;AAE9D,MAAA,OAAO;AAAA,QACL,gBAAgB,KAAA,CAAM,MAAA;AAAA,QACtB,eAAe,KAAA,CAAM,KAAA;AAAA,QACrB,gBAAA,EAAkB,KAAA;AAAA;AAAA,QAClB,iBAAA,EAAmB,IAAA;AAAA,QACnB,WAAA,EAAa,KAAA;AAAA;AAAA,QACb,eAAe,KAAA,CAAM,KAAA;AAAA,QACrB,aAAA,EAAe,KAAA;AAAA,QACf,WAAW,KAAA,CAAM,SAAA;AAAA,QACjB,mBAAA,EAAqB,MAAM,MAAA,GACvB,CAAC,aAAa,YAAA,EAAc,WAAA,EAAa,YAAY,CAAA,GACrD,EAAC;AAAA,QACL,mBAAA,EAAqB,MAAM,KAAA,GACvB,CAAC,aAAa,WAAA,EAAa,YAAY,IACvC,EAAC;AAAA,QACL,kBAAkB,KAAA,CAAM,QAAA;AAAA,QACxB,sBAAA,EAAwB;AAAA,OAC1B;AAAA,IACF;AAAA,GACF;AACF;AAGO,IAAM,oBAAA,GAAuB","file":"index.mjs","sourcesContent":["/**\n * OpenAI Provider - Modern Pattern\n *\n * Returns a LanguageModel instance that can be used directly with\n * generateText() and streamText().\n *\n * @example\n * ```ts\n * import { openai } from '@yourgpt/llm-sdk/openai';\n * import { generateText } from '@yourgpt/llm-sdk';\n *\n * const result = await generateText({\n * model: openai('gpt-4o'),\n * prompt: 'Hello!',\n * });\n * ```\n */\n\nimport type {\n LanguageModel,\n ModelCapabilities,\n DoGenerateParams,\n DoGenerateResult,\n StreamChunk,\n ToolCall,\n FinishReason,\n CoreMessage,\n} from \"../../core/types\";\n\n// ============================================\n// Model Definitions\n// ============================================\n\ninterface OpenAIModelConfig {\n vision: boolean;\n tools: boolean;\n jsonMode: boolean;\n maxTokens: number;\n}\n\nconst OPENAI_MODELS: Record<string, OpenAIModelConfig> = {\n // GPT-4o series\n \"gpt-4o\": { vision: true, tools: true, jsonMode: true, maxTokens: 128000 },\n \"gpt-4o-mini\": {\n vision: true,\n tools: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4o-2024-11-20\": {\n vision: true,\n tools: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4o-2024-08-06\": {\n vision: true,\n tools: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n\n // GPT-4 Turbo\n \"gpt-4-turbo\": {\n vision: true,\n tools: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4-turbo-preview\": {\n vision: false,\n tools: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n\n // GPT-4\n \"gpt-4\": { vision: false, tools: true, jsonMode: false, maxTokens: 8192 },\n \"gpt-4-32k\": {\n vision: false,\n tools: true,\n jsonMode: false,\n maxTokens: 32768,\n },\n\n // GPT-3.5\n \"gpt-3.5-turbo\": {\n vision: false,\n tools: true,\n jsonMode: true,\n maxTokens: 16385,\n },\n\n // O1 series (reasoning)\n o1: { vision: true, tools: false, jsonMode: false, maxTokens: 128000 },\n \"o1-mini\": { vision: true, tools: false, jsonMode: false, maxTokens: 128000 },\n \"o1-preview\": {\n vision: true,\n tools: false,\n jsonMode: false,\n maxTokens: 128000,\n },\n\n // O3 series\n \"o3-mini\": { vision: true, tools: false, jsonMode: false, maxTokens: 128000 },\n};\n\n// ============================================\n// Provider Options\n// ============================================\n\nexport interface OpenAIProviderOptions {\n /** API key (defaults to OPENAI_API_KEY env var) */\n apiKey?: string;\n /** Base URL for API (defaults to https://api.openai.com/v1) */\n baseURL?: string;\n /** Organization ID */\n organization?: string;\n /** Default headers */\n headers?: Record<string, string>;\n}\n\n// ============================================\n// Provider Implementation\n// ============================================\n\n/**\n * Create an OpenAI language model\n *\n * @param modelId - Model ID (e.g., 'gpt-4o', 'gpt-4o-mini')\n * @param options - Provider options\n * @returns LanguageModel instance\n *\n * @example\n * ```ts\n * // Basic usage\n * const model = openai('gpt-4o');\n *\n * // With custom options\n * const model = openai('gpt-4o', {\n * apiKey: 'sk-...',\n * baseURL: 'https://custom-endpoint.com/v1',\n * });\n * ```\n */\nexport function openai(\n modelId: string,\n options: OpenAIProviderOptions = {},\n): LanguageModel {\n const apiKey = options.apiKey ?? process.env.OPENAI_API_KEY;\n const baseURL = options.baseURL ?? \"https://api.openai.com/v1\";\n\n // Lazy-load OpenAI client\n let client: any = null;\n async function getClient(): Promise<any> {\n if (!client) {\n const { default: OpenAI } = await import(\"openai\");\n client = new OpenAI({\n apiKey,\n baseURL,\n organization: options.organization,\n defaultHeaders: options.headers,\n });\n }\n return client;\n }\n\n // Get model config\n const modelConfig = OPENAI_MODELS[modelId] ?? OPENAI_MODELS[\"gpt-4o\"];\n\n return {\n provider: \"openai\",\n modelId,\n\n capabilities: {\n supportsVision: modelConfig.vision,\n supportsTools: modelConfig.tools,\n supportsStreaming: true,\n supportsJsonMode: modelConfig.jsonMode,\n supportsThinking: false,\n supportsPDF: false,\n maxTokens: modelConfig.maxTokens,\n supportedImageTypes: modelConfig.vision\n ? [\"image/png\", \"image/jpeg\", \"image/gif\", \"image/webp\"]\n : [],\n },\n\n async doGenerate(params: DoGenerateParams): Promise<DoGenerateResult> {\n const client = await getClient();\n\n const messages = formatMessagesForOpenAI(params.messages);\n\n const response = await client.chat.completions.create({\n model: modelId,\n messages,\n tools: params.tools as any,\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n });\n\n const choice = response.choices[0];\n const message = choice.message;\n\n // Parse tool calls\n const toolCalls: ToolCall[] = (message.tool_calls ?? []).map(\n (tc: any) => ({\n id: tc.id,\n name: tc.function.name,\n args: JSON.parse(tc.function.arguments || \"{}\"),\n }),\n );\n\n return {\n text: message.content ?? \"\",\n toolCalls,\n finishReason: mapFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? 0,\n completionTokens: response.usage?.completion_tokens ?? 0,\n totalTokens: response.usage?.total_tokens ?? 0,\n },\n rawResponse: response,\n };\n },\n\n async *doStream(params: DoGenerateParams): AsyncGenerator<StreamChunk> {\n const client = await getClient();\n\n const messages = formatMessagesForOpenAI(params.messages);\n\n const stream = await client.chat.completions.create({\n model: modelId,\n messages,\n tools: params.tools as any,\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n stream: true,\n });\n\n // Track current tool call being built\n let currentToolCall: {\n id: string;\n name: string;\n arguments: string;\n } | null = null;\n\n let totalPromptTokens = 0;\n let totalCompletionTokens = 0;\n\n for await (const chunk of stream) {\n // Check abort\n if (params.signal?.aborted) {\n yield { type: \"error\", error: new Error(\"Aborted\") };\n return;\n }\n\n const choice = chunk.choices[0];\n const delta = choice?.delta;\n\n // Text content\n if (delta?.content) {\n yield { type: \"text-delta\", text: delta.content };\n }\n\n // Tool calls\n if (delta?.tool_calls) {\n for (const tc of delta.tool_calls) {\n if (tc.id) {\n // New tool call - emit previous if exists\n if (currentToolCall) {\n yield {\n type: \"tool-call\",\n toolCall: {\n id: currentToolCall.id,\n name: currentToolCall.name,\n args: JSON.parse(currentToolCall.arguments || \"{}\"),\n },\n };\n }\n currentToolCall = {\n id: tc.id,\n name: tc.function?.name ?? \"\",\n arguments: tc.function?.arguments ?? \"\",\n };\n } else if (currentToolCall && tc.function?.arguments) {\n // Append arguments\n currentToolCall.arguments += tc.function.arguments;\n }\n }\n }\n\n // Finish reason\n if (choice?.finish_reason) {\n // Emit pending tool call\n if (currentToolCall) {\n yield {\n type: \"tool-call\",\n toolCall: {\n id: currentToolCall.id,\n name: currentToolCall.name,\n args: JSON.parse(currentToolCall.arguments || \"{}\"),\n },\n };\n currentToolCall = null;\n }\n\n // Usage from final chunk (if available)\n if (chunk.usage) {\n totalPromptTokens = chunk.usage.prompt_tokens;\n totalCompletionTokens = chunk.usage.completion_tokens;\n }\n\n yield {\n type: \"finish\",\n finishReason: mapFinishReason(choice.finish_reason),\n usage: {\n promptTokens: totalPromptTokens,\n completionTokens: totalCompletionTokens,\n totalTokens: totalPromptTokens + totalCompletionTokens,\n },\n };\n }\n }\n },\n };\n}\n\n// ============================================\n// Helper Functions\n// ============================================\n\n/**\n * Map OpenAI finish reason to our FinishReason type\n */\nfunction mapFinishReason(reason: string | null): FinishReason {\n switch (reason) {\n case \"stop\":\n return \"stop\";\n case \"length\":\n return \"length\";\n case \"tool_calls\":\n case \"function_call\":\n return \"tool-calls\";\n case \"content_filter\":\n return \"content-filter\";\n default:\n return \"unknown\";\n }\n}\n\n/**\n * Format CoreMessage[] for OpenAI API\n */\nfunction formatMessagesForOpenAI(messages: CoreMessage[]): any[] {\n return messages.map((msg) => {\n switch (msg.role) {\n case \"system\":\n return { role: \"system\", content: msg.content };\n\n case \"user\":\n if (typeof msg.content === \"string\") {\n return { role: \"user\", content: msg.content };\n }\n // Handle multimodal content\n return {\n role: \"user\",\n content: msg.content.map((part) => {\n if (part.type === \"text\") {\n return { type: \"text\", text: part.text };\n }\n if (part.type === \"image\") {\n const imageData =\n typeof part.image === \"string\"\n ? part.image\n : Buffer.from(part.image).toString(\"base64\");\n const url = imageData.startsWith(\"data:\")\n ? imageData\n : `data:${part.mimeType ?? \"image/png\"};base64,${imageData}`;\n return { type: \"image_url\", image_url: { url, detail: \"auto\" } };\n }\n return { type: \"text\", text: \"\" };\n }),\n };\n\n case \"assistant\":\n const assistantMsg: any = {\n role: \"assistant\",\n content: msg.content,\n };\n if (msg.toolCalls && msg.toolCalls.length > 0) {\n assistantMsg.tool_calls = msg.toolCalls.map((tc) => ({\n id: tc.id,\n type: \"function\",\n function: {\n name: tc.name,\n arguments: JSON.stringify(tc.args),\n },\n }));\n }\n return assistantMsg;\n\n case \"tool\":\n return {\n role: \"tool\",\n tool_call_id: msg.toolCallId,\n content: msg.content,\n };\n\n default:\n return msg;\n }\n });\n}\n\n// Also export as createOpenAI for backward compatibility\nexport { openai as createOpenAI };\n","import type {\n Message,\n MessageAttachment,\n ActionDefinition,\n StreamEvent,\n LLMConfig,\n} from \"@yourgpt/copilot-sdk/core\";\n\n/**\n * Chat completion request\n */\nexport interface ChatCompletionRequest {\n /** Conversation messages */\n messages: Message[];\n /**\n * Raw provider-formatted messages (for agent loop with tool calls)\n * When provided, these are used instead of converting from Message[]\n * This allows passing messages with tool_calls and tool role\n */\n rawMessages?: Array<Record<string, unknown>>;\n /** Available actions/tools */\n actions?: ActionDefinition[];\n /** System prompt */\n systemPrompt?: string;\n /** LLM configuration overrides */\n config?: Partial<LLMConfig>;\n /** Abort signal for cancellation */\n signal?: AbortSignal;\n}\n\n/**\n * Non-streaming completion result\n */\nexport interface CompletionResult {\n /** Text content */\n content: string;\n /** Tool calls */\n toolCalls: Array<{ id: string; name: string; args: Record<string, unknown> }>;\n /** Thinking content (if extended thinking enabled) */\n thinking?: string;\n /** Raw provider response for debugging */\n rawResponse: Record<string, unknown>;\n}\n\n/**\n * Base LLM adapter interface\n */\nexport interface LLMAdapter {\n /** Provider name */\n readonly provider: string;\n\n /** Model name */\n readonly model: string;\n\n /**\n * Stream a chat completion\n */\n stream(request: ChatCompletionRequest): AsyncGenerator<StreamEvent>;\n\n /**\n * Non-streaming chat completion (for debugging/comparison)\n */\n complete?(request: ChatCompletionRequest): Promise<CompletionResult>;\n}\n\n/**\n * Adapter factory function type\n */\nexport type AdapterFactory = (config: LLMConfig) => LLMAdapter;\n\n/**\n * Convert messages to provider format (simple text only)\n */\nexport function formatMessages(\n messages: Message[],\n systemPrompt?: string,\n): Array<{ role: string; content: string }> {\n const formatted: Array<{ role: string; content: string }> = [];\n\n // Add system prompt if provided\n if (systemPrompt) {\n formatted.push({ role: \"system\", content: systemPrompt });\n }\n\n // Add conversation messages\n for (const msg of messages) {\n formatted.push({\n role: msg.role,\n content: msg.content ?? \"\",\n });\n }\n\n return formatted;\n}\n\n/**\n * Convert ActionParameter to JSON Schema format recursively\n */\nfunction parameterToJsonSchema(param: {\n type: string;\n description?: string;\n enum?: string[];\n items?: unknown;\n properties?: Record<string, unknown>;\n}): Record<string, unknown> {\n const schema: Record<string, unknown> = {\n type: param.type,\n };\n\n if (param.description) {\n schema.description = param.description;\n }\n\n if (param.enum) {\n schema.enum = param.enum;\n }\n\n // Handle array items\n if (param.type === \"array\" && param.items) {\n schema.items = parameterToJsonSchema(\n param.items as {\n type: string;\n description?: string;\n enum?: string[];\n items?: unknown;\n properties?: Record<string, unknown>;\n },\n );\n }\n\n // Handle nested object properties\n if (param.type === \"object\" && param.properties) {\n schema.properties = Object.fromEntries(\n Object.entries(param.properties).map(([key, prop]) => [\n key,\n parameterToJsonSchema(\n prop as {\n type: string;\n description?: string;\n enum?: string[];\n items?: unknown;\n properties?: Record<string, unknown>;\n },\n ),\n ]),\n );\n }\n\n return schema;\n}\n\n/**\n * Convert actions to OpenAI tool format\n */\nexport function formatTools(actions: ActionDefinition[]): Array<{\n type: \"function\";\n function: {\n name: string;\n description: string;\n parameters: object;\n };\n}> {\n return actions.map((action) => ({\n type: \"function\" as const,\n function: {\n name: action.name,\n description: action.description,\n parameters: {\n type: \"object\",\n properties: action.parameters\n ? Object.fromEntries(\n Object.entries(action.parameters).map(([key, param]) => [\n key,\n parameterToJsonSchema(param),\n ]),\n )\n : {},\n required: action.parameters\n ? Object.entries(action.parameters)\n .filter(([, param]) => param.required)\n .map(([key]) => key)\n : [],\n },\n },\n }));\n}\n\n// ============================================\n// Vision/Multimodal Support\n// ============================================\n\n/**\n * Content block types for multimodal messages\n */\nexport type AnthropicContentBlock =\n | { type: \"text\"; text: string }\n | {\n type: \"image\";\n source:\n | {\n type: \"base64\";\n media_type: string;\n data: string;\n }\n | {\n type: \"url\";\n url: string;\n };\n }\n | {\n type: \"document\";\n source:\n | {\n type: \"base64\";\n media_type: string;\n data: string;\n }\n | {\n type: \"url\";\n url: string;\n };\n };\n\nexport type OpenAIContentBlock =\n | { type: \"text\"; text: string }\n | {\n type: \"image_url\";\n image_url: {\n url: string;\n detail?: \"low\" | \"high\" | \"auto\";\n };\n };\n\n/**\n * Check if a message has image attachments\n * Supports both new format (metadata.attachments) and legacy (attachments)\n */\nexport function hasImageAttachments(message: Message): boolean {\n const attachments = message.metadata?.attachments;\n return attachments?.some((a) => a.type === \"image\") ?? false;\n}\n\n/**\n * Check if a message has media attachments (images or PDFs)\n */\nexport function hasMediaAttachments(message: Message): boolean {\n const attachments = message.metadata?.attachments;\n return (\n attachments?.some(\n (a) =>\n a.type === \"image\" ||\n (a.type === \"file\" && a.mimeType === \"application/pdf\"),\n ) ?? false\n );\n}\n\n/**\n * Convert MessageAttachment to Anthropic image content block\n *\n * Anthropic format:\n * {\n * type: \"image\",\n * source: {\n * type: \"base64\",\n * media_type: \"image/png\",\n * data: \"base64data...\"\n * }\n * }\n */\nexport function attachmentToAnthropicImage(\n attachment: MessageAttachment,\n): AnthropicContentBlock | null {\n if (attachment.type !== \"image\") return null;\n\n // Use URL if available (cloud storage)\n if (attachment.url) {\n return {\n type: \"image\",\n source: {\n type: \"url\",\n url: attachment.url,\n },\n };\n }\n\n // Fall back to base64 data\n if (!attachment.data) return null;\n\n // Extract base64 data (remove data URI prefix if present)\n let base64Data = attachment.data;\n if (base64Data.startsWith(\"data:\")) {\n const commaIndex = base64Data.indexOf(\",\");\n if (commaIndex !== -1) {\n base64Data = base64Data.slice(commaIndex + 1);\n }\n }\n\n return {\n type: \"image\",\n source: {\n type: \"base64\",\n media_type: attachment.mimeType || \"image/png\",\n data: base64Data,\n },\n };\n}\n\n/**\n * Convert MessageAttachment to OpenAI image_url content block\n *\n * OpenAI format:\n * {\n * type: \"image_url\",\n * image_url: {\n * url: \"data:image/png;base64,...\"\n * }\n * }\n */\nexport function attachmentToOpenAIImage(\n attachment: MessageAttachment,\n): OpenAIContentBlock | null {\n if (attachment.type !== \"image\") return null;\n\n let imageUrl: string;\n\n // Use URL if available (cloud storage)\n if (attachment.url) {\n imageUrl = attachment.url;\n } else if (attachment.data) {\n // Build data URI if not already one\n imageUrl = attachment.data.startsWith(\"data:\")\n ? attachment.data\n : `data:${attachment.mimeType || \"image/png\"};base64,${attachment.data}`;\n } else {\n return null;\n }\n\n return {\n type: \"image_url\",\n image_url: {\n url: imageUrl,\n detail: \"auto\",\n },\n };\n}\n\n/**\n * Convert MessageAttachment (PDF) to Anthropic document content block\n *\n * Anthropic format:\n * {\n * type: \"document\",\n * source: {\n * type: \"base64\",\n * media_type: \"application/pdf\",\n * data: \"base64data...\"\n * }\n * }\n */\nexport function attachmentToAnthropicDocument(\n attachment: MessageAttachment,\n): AnthropicContentBlock | null {\n // Only handle PDF files\n if (attachment.type !== \"file\" || attachment.mimeType !== \"application/pdf\") {\n return null;\n }\n\n // Use URL if available (cloud storage)\n if (attachment.url) {\n return {\n type: \"document\",\n source: {\n type: \"url\",\n url: attachment.url,\n },\n };\n }\n\n // Fall back to base64 data\n if (!attachment.data) return null;\n\n // Extract base64 data (remove data URI prefix if present)\n let base64Data = attachment.data;\n if (base64Data.startsWith(\"data:\")) {\n const commaIndex = base64Data.indexOf(\",\");\n if (commaIndex !== -1) {\n base64Data = base64Data.slice(commaIndex + 1);\n }\n }\n\n return {\n type: \"document\",\n source: {\n type: \"base64\",\n media_type: \"application/pdf\",\n data: base64Data,\n },\n };\n}\n\n/**\n * Convert a Message to Anthropic multimodal content blocks\n */\nexport function messageToAnthropicContent(\n message: Message,\n): string | AnthropicContentBlock[] {\n const attachments = message.metadata?.attachments;\n const content = message.content ?? \"\";\n\n // If no media attachments (images or PDFs), return simple string\n if (!hasMediaAttachments(message)) {\n return content;\n }\n\n // Build content blocks array\n const blocks: AnthropicContentBlock[] = [];\n\n // Add media attachments first (Claude recommends media before text)\n if (attachments) {\n for (const attachment of attachments) {\n // Try image first\n const imageBlock = attachmentToAnthropicImage(attachment);\n if (imageBlock) {\n blocks.push(imageBlock);\n continue;\n }\n // Try document (PDF)\n const docBlock = attachmentToAnthropicDocument(attachment);\n if (docBlock) {\n blocks.push(docBlock);\n }\n }\n }\n\n // Add text content\n if (content) {\n blocks.push({ type: \"text\", text: content });\n }\n\n return blocks;\n}\n\n/**\n * Convert a Message to OpenAI multimodal content blocks\n */\nexport function messageToOpenAIContent(\n message: Message,\n): string | OpenAIContentBlock[] {\n const attachments = message.metadata?.attachments;\n const content = message.content ?? \"\";\n\n // If no image attachments, return simple string\n if (!hasImageAttachments(message)) {\n return content;\n }\n\n // Build content blocks array\n const blocks: OpenAIContentBlock[] = [];\n\n // Add text content first\n if (content) {\n blocks.push({ type: \"text\", text: content });\n }\n\n // Add image attachments\n if (attachments) {\n for (const attachment of attachments) {\n const imageBlock = attachmentToOpenAIImage(attachment);\n if (imageBlock) {\n blocks.push(imageBlock);\n }\n }\n }\n\n return blocks;\n}\n\n/**\n * Anthropic content block types (extended for tools)\n */\nexport type AnthropicToolUseBlock = {\n type: \"tool_use\";\n id: string;\n name: string;\n input: Record<string, unknown>;\n};\n\nexport type AnthropicToolResultBlock = {\n type: \"tool_result\";\n tool_use_id: string;\n content: string;\n};\n\nexport type AnthropicMessageContent =\n | string\n | Array<\n AnthropicContentBlock | AnthropicToolUseBlock | AnthropicToolResultBlock\n >;\n\n/**\n * Format messages for Anthropic with full tool support\n * Handles: text, images, tool_use, and tool_result\n *\n * Key differences from OpenAI:\n * - tool_calls become tool_use blocks in assistant content\n * - tool results become tool_result blocks in user content\n */\nexport function formatMessagesForAnthropic(\n messages: Message[],\n systemPrompt?: string,\n): {\n system: string;\n messages: Array<{\n role: \"user\" | \"assistant\";\n content: AnthropicMessageContent;\n }>;\n} {\n const formatted: Array<{\n role: \"user\" | \"assistant\";\n content: AnthropicMessageContent;\n }> = [];\n\n for (let i = 0; i < messages.length; i++) {\n const msg = messages[i];\n\n if (msg.role === \"system\") continue; // System handled separately\n\n if (msg.role === \"assistant\") {\n // Build content array for assistant\n const content: Array<AnthropicContentBlock | AnthropicToolUseBlock> = [];\n\n // Add text content if present\n if (msg.content) {\n content.push({ type: \"text\", text: msg.content });\n }\n\n // Convert tool_calls to tool_use blocks\n if (msg.tool_calls && msg.tool_calls.length > 0) {\n for (const tc of msg.tool_calls) {\n content.push({\n type: \"tool_use\",\n id: tc.id,\n name: tc.function.name,\n input: JSON.parse(tc.function.arguments),\n });\n }\n }\n\n formatted.push({\n role: \"assistant\",\n content:\n content.length === 1 && content[0].type === \"text\"\n ? (content[0] as { type: \"text\"; text: string }).text\n : content,\n });\n } else if (msg.role === \"tool\" && msg.tool_call_id) {\n // Tool results go in user message as tool_result blocks\n // Group consecutive tool messages together\n const toolResults: AnthropicToolResultBlock[] = [\n {\n type: \"tool_result\",\n tool_use_id: msg.tool_call_id,\n content: msg.content ?? \"\",\n },\n ];\n\n // Look ahead for more consecutive tool messages\n while (i + 1 < messages.length && messages[i + 1].role === \"tool\") {\n i++;\n const nextTool = messages[i];\n if (nextTool.tool_call_id) {\n toolResults.push({\n type: \"tool_result\",\n tool_use_id: nextTool.tool_call_id,\n content: nextTool.content ?? \"\",\n });\n }\n }\n\n formatted.push({\n role: \"user\",\n content: toolResults,\n });\n } else if (msg.role === \"user\") {\n formatted.push({\n role: \"user\",\n content: messageToAnthropicContent(msg),\n });\n }\n }\n\n return {\n system: systemPrompt || \"\",\n messages: formatted,\n };\n}\n\n/**\n * OpenAI message format with tool support\n */\nexport type OpenAIMessage =\n | { role: \"system\"; content: string }\n | { role: \"user\"; content: string | OpenAIContentBlock[] }\n | {\n role: \"assistant\";\n content: string | null;\n tool_calls?: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }>;\n }\n | { role: \"tool\"; content: string; tool_call_id: string };\n\n/**\n * Format messages for OpenAI with full tool support\n * Handles: text, images, tool_calls, and tool results\n */\nexport function formatMessagesForOpenAI(\n messages: Message[],\n systemPrompt?: string,\n): OpenAIMessage[] {\n const formatted: OpenAIMessage[] = [];\n\n // Add system prompt if provided\n if (systemPrompt) {\n formatted.push({ role: \"system\", content: systemPrompt });\n }\n\n for (const msg of messages) {\n if (msg.role === \"system\") {\n formatted.push({ role: \"system\", content: msg.content ?? \"\" });\n } else if (msg.role === \"user\") {\n formatted.push({\n role: \"user\",\n content: messageToOpenAIContent(msg),\n });\n } else if (msg.role === \"assistant\") {\n const assistantMsg: OpenAIMessage = {\n role: \"assistant\",\n content: msg.content,\n };\n if (msg.tool_calls && msg.tool_calls.length > 0) {\n (assistantMsg as { tool_calls: typeof msg.tool_calls }).tool_calls =\n msg.tool_calls;\n }\n formatted.push(assistantMsg);\n } else if (msg.role === \"tool\" && msg.tool_call_id) {\n formatted.push({\n role: \"tool\",\n content: msg.content ?? \"\",\n tool_call_id: msg.tool_call_id,\n });\n }\n }\n\n return formatted;\n}\n","import type { LLMConfig, StreamEvent } from \"@yourgpt/copilot-sdk/core\";\nimport {\n generateMessageId,\n generateToolCallId,\n} from \"@yourgpt/copilot-sdk/core\";\nimport type { LLMAdapter, ChatCompletionRequest } from \"./base\";\nimport { formatMessagesForOpenAI, formatTools } from \"./base\";\n\n/**\n * OpenAI adapter configuration\n */\nexport interface OpenAIAdapterConfig extends Partial<LLMConfig> {\n apiKey: string;\n model?: string;\n baseUrl?: string;\n}\n\n/**\n * OpenAI LLM Adapter\n *\n * Supports: GPT-4, GPT-4o, GPT-3.5-turbo, etc.\n */\nexport class OpenAIAdapter implements LLMAdapter {\n readonly provider = \"openai\";\n readonly model: string;\n\n private client: any; // OpenAI client (lazy loaded)\n private config: OpenAIAdapterConfig;\n\n constructor(config: OpenAIAdapterConfig) {\n this.config = config;\n this.model = config.model || \"gpt-4o\";\n }\n\n private async getClient() {\n if (!this.client) {\n // Dynamic import to make openai optional\n const { default: OpenAI } = await import(\"openai\");\n this.client = new OpenAI({\n apiKey: this.config.apiKey,\n baseURL: this.config.baseUrl,\n });\n }\n return this.client;\n }\n\n async *stream(request: ChatCompletionRequest): AsyncGenerator<StreamEvent> {\n const client = await this.getClient();\n\n // Use raw messages if provided (for agent loop with tool calls), otherwise format from Message[]\n let messages: Array<Record<string, unknown>>;\n if (request.rawMessages && request.rawMessages.length > 0) {\n // Process raw messages - convert any attachments to OpenAI vision format\n const processedMessages = request.rawMessages.map((msg) => {\n // Check if message has attachments (images)\n const hasAttachments =\n msg.attachments &&\n Array.isArray(msg.attachments) &&\n msg.attachments.length > 0;\n\n if (hasAttachments) {\n // Convert to OpenAI multimodal content format\n const content: Array<Record<string, unknown>> = [];\n\n // Add text content if present\n if (msg.content) {\n content.push({ type: \"text\", text: msg.content });\n }\n\n // Add image attachments\n for (const attachment of msg.attachments as Array<{\n type: string;\n data?: string;\n url?: string;\n mimeType?: string;\n }>) {\n if (attachment.type === \"image\") {\n let imageUrl: string;\n\n if (attachment.url) {\n // Use URL directly (cloud storage)\n imageUrl = attachment.url;\n } else if (attachment.data) {\n // Use base64 data\n imageUrl = attachment.data.startsWith(\"data:\")\n ? attachment.data\n : `data:${attachment.mimeType || \"image/png\"};base64,${attachment.data}`;\n } else {\n continue; // Skip if no data or URL\n }\n\n content.push({\n type: \"image_url\",\n image_url: { url: imageUrl, detail: \"auto\" },\n });\n }\n }\n\n return { ...msg, content, attachments: undefined };\n }\n return msg;\n });\n\n // Add system prompt at the start if provided and not already present\n if (request.systemPrompt) {\n const hasSystem = processedMessages.some((m) => m.role === \"system\");\n if (!hasSystem) {\n messages = [\n { role: \"system\", content: request.systemPrompt },\n ...processedMessages,\n ];\n } else {\n messages = processedMessages;\n }\n } else {\n messages = processedMessages;\n }\n } else {\n // Format from Message[] with multimodal support (images, attachments)\n messages = formatMessagesForOpenAI(\n request.messages,\n request.systemPrompt,\n ) as Array<Record<string, unknown>>;\n }\n\n const tools = request.actions?.length\n ? formatTools(request.actions)\n : undefined;\n\n const messageId = generateMessageId();\n\n // Emit message start\n yield { type: \"message:start\", id: messageId };\n\n try {\n const stream = await client.chat.completions.create({\n model: request.config?.model || this.model,\n messages,\n tools,\n temperature: request.config?.temperature ?? this.config.temperature,\n max_tokens: request.config?.maxTokens ?? this.config.maxTokens,\n stream: true,\n });\n\n let currentToolCall: {\n id: string;\n name: string;\n arguments: string;\n } | null = null;\n\n for await (const chunk of stream) {\n // Check for abort\n if (request.signal?.aborted) {\n break;\n }\n\n const delta = chunk.choices[0]?.delta;\n\n // Handle content\n if (delta?.content) {\n yield { type: \"message:delta\", content: delta.content };\n }\n\n // Handle tool calls\n if (delta?.tool_calls) {\n for (const toolCall of delta.tool_calls) {\n // New tool call\n if (toolCall.id) {\n // End previous tool call if any\n if (currentToolCall) {\n yield {\n type: \"action:args\",\n id: currentToolCall.id,\n args: currentToolCall.arguments,\n };\n }\n\n currentToolCall = {\n id: toolCall.id,\n name: toolCall.function?.name || \"\",\n arguments: toolCall.function?.arguments || \"\",\n };\n\n yield {\n type: \"action:start\",\n id: currentToolCall.id,\n name: currentToolCall.name,\n };\n } else if (currentToolCall && toolCall.function?.arguments) {\n // Append to current tool call arguments\n currentToolCall.arguments += toolCall.function.arguments;\n }\n }\n }\n\n // Check for finish\n if (chunk.choices[0]?.finish_reason) {\n // Complete any pending tool call\n if (currentToolCall) {\n yield {\n type: \"action:args\",\n id: currentToolCall.id,\n args: currentToolCall.arguments,\n };\n }\n }\n }\n\n // Emit message end\n yield { type: \"message:end\" };\n yield { type: \"done\" };\n } catch (error) {\n yield {\n type: \"error\",\n message: error instanceof Error ? error.message : \"Unknown error\",\n code: \"OPENAI_ERROR\",\n };\n }\n }\n}\n\n/**\n * Create OpenAI adapter\n */\nexport function createOpenAIAdapter(\n config: OpenAIAdapterConfig,\n): OpenAIAdapter {\n return new OpenAIAdapter(config);\n}\n","/**\n * OpenAI Provider\n *\n * Modern pattern: openai('gpt-4o') returns a LanguageModel\n * Legacy pattern: createOpenAI({ apiKey }) returns an AIProvider\n */\n\n// NEW: Modern pattern - openai() function\nexport { openai, createOpenAI as createOpenAIModel } from \"./provider\";\nexport type { OpenAIProviderOptions } from \"./provider\";\n\n// LEGACY: Keep existing createOpenAI for backward compatibility\nimport { createOpenAIAdapter } from \"../../adapters/openai\";\nimport type {\n AIProvider,\n ProviderCapabilities,\n OpenAIProviderConfig,\n} from \"../types\";\n\n// ============================================\n// Model Definitions\n// ============================================\n\ninterface ModelCapabilities {\n vision: boolean;\n tools: boolean;\n audio: boolean;\n jsonMode: boolean;\n maxTokens: number;\n}\n\nconst OPENAI_MODELS: Record<string, ModelCapabilities> = {\n // GPT-4o series\n \"gpt-4o\": {\n vision: true,\n tools: true,\n audio: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4o-mini\": {\n vision: true,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4o-2024-11-20\": {\n vision: true,\n tools: true,\n audio: true,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4o-2024-08-06\": {\n vision: true,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 128000,\n },\n\n // GPT-4 Turbo series\n \"gpt-4-turbo\": {\n vision: true,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 128000,\n },\n \"gpt-4-turbo-preview\": {\n vision: false,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 128000,\n },\n\n // GPT-4 series\n \"gpt-4\": {\n vision: false,\n tools: true,\n audio: false,\n jsonMode: false,\n maxTokens: 8192,\n },\n \"gpt-4-32k\": {\n vision: false,\n tools: true,\n audio: false,\n jsonMode: false,\n maxTokens: 32768,\n },\n\n // GPT-3.5 series\n \"gpt-3.5-turbo\": {\n vision: false,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 16385,\n },\n \"gpt-3.5-turbo-16k\": {\n vision: false,\n tools: true,\n audio: false,\n jsonMode: true,\n maxTokens: 16385,\n },\n\n // O1 reasoning series\n o1: {\n vision: true,\n tools: false, // O1 doesn't support tools yet\n audio: false,\n jsonMode: false,\n maxTokens: 128000,\n },\n \"o1-mini\": {\n vision: true,\n tools: false,\n audio: false,\n jsonMode: false,\n maxTokens: 128000,\n },\n \"o1-preview\": {\n vision: true,\n tools: false,\n audio: false,\n jsonMode: false,\n maxTokens: 128000,\n },\n\n // O3 reasoning series\n \"o3-mini\": {\n vision: true,\n tools: false,\n audio: false,\n jsonMode: false,\n maxTokens: 128000,\n },\n};\n\n// ============================================\n// Provider Implementation\n// ============================================\n\n/**\n * Create an OpenAI provider (Legacy API)\n *\n * @deprecated Use `import { openai } from '@yourgpt/llm-sdk/openai'` instead.\n *\n * @example\n * ```typescript\n * // OLD (deprecated):\n * const provider = createOpenAI({ apiKey: '...' });\n * const adapter = provider.languageModel('gpt-4o');\n *\n * // NEW (recommended):\n * import { openai } from '@yourgpt/llm-sdk/openai';\n * const model = openai('gpt-4o');\n * ```\n */\nexport function createOpenAI(config: OpenAIProviderConfig = {}): AIProvider {\n const apiKey = config.apiKey ?? process.env.OPENAI_API_KEY ?? \"\";\n\n return {\n name: \"openai\",\n supportedModels: Object.keys(OPENAI_MODELS),\n\n languageModel(modelId: string) {\n return createOpenAIAdapter({\n apiKey,\n model: modelId,\n baseUrl: config.baseUrl,\n });\n },\n\n getCapabilities(modelId: string): ProviderCapabilities {\n const model = OPENAI_MODELS[modelId] ?? OPENAI_MODELS[\"gpt-4o\"];\n\n return {\n supportsVision: model.vision,\n supportsTools: model.tools,\n supportsThinking: false, // OpenAI doesn't have extended thinking\n supportsStreaming: true,\n supportsPDF: false, // OpenAI doesn't support PDFs directly\n supportsAudio: model.audio,\n supportsVideo: false,\n maxTokens: model.maxTokens,\n supportedImageTypes: model.vision\n ? [\"image/png\", \"image/jpeg\", \"image/gif\", \"image/webp\"]\n : [],\n supportedAudioTypes: model.audio\n ? [\"audio/mp3\", \"audio/wav\", \"audio/webm\"]\n : [],\n supportsJsonMode: model.jsonMode,\n supportsSystemMessages: true,\n };\n },\n };\n}\n\n// Alias for consistency\nexport const createOpenAIProvider = createOpenAI;\n"]}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { L as LanguageModel } from '../../types-CdORv1Yu.mjs';
|
|
2
|
+
import { X as XAIProviderConfig, A as AIProvider } from '../../types-BBCZ3Fxy.mjs';
|
|
3
|
+
import 'zod';
|
|
4
|
+
import '@yourgpt/copilot-sdk/core';
|
|
5
|
+
import '../../base-D_FyHFKj.mjs';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* xAI Provider - Modern Pattern
|
|
9
|
+
*
|
|
10
|
+
* Returns a LanguageModel instance that can be used directly with
|
|
11
|
+
* generateText() and streamText().
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```ts
|
|
15
|
+
* import { xai } from '@yourgpt/llm-sdk/xai';
|
|
16
|
+
* import { generateText } from '@yourgpt/llm-sdk';
|
|
17
|
+
*
|
|
18
|
+
* const result = await generateText({
|
|
19
|
+
* model: xai('grok-3-fast-beta'),
|
|
20
|
+
* prompt: 'Hello!',
|
|
21
|
+
* });
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
interface XAIProviderOptions {
|
|
26
|
+
/** API key (defaults to XAI_API_KEY env var) */
|
|
27
|
+
apiKey?: string;
|
|
28
|
+
/** Base URL for API (defaults to https://api.x.ai/v1) */
|
|
29
|
+
baseURL?: string;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Create an xAI language model
|
|
33
|
+
*
|
|
34
|
+
* @param modelId - Model ID (e.g., 'grok-3-fast-beta', 'grok-4')
|
|
35
|
+
* @param options - Provider options
|
|
36
|
+
* @returns LanguageModel instance
|
|
37
|
+
*
|
|
38
|
+
* @example
|
|
39
|
+
* ```ts
|
|
40
|
+
* // Basic usage
|
|
41
|
+
* const model = xai('grok-3-fast-beta');
|
|
42
|
+
*
|
|
43
|
+
* // With custom options
|
|
44
|
+
* const model = xai('grok-4', {
|
|
45
|
+
* apiKey: 'xai-...',
|
|
46
|
+
* });
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
declare function xai(modelId: string, options?: XAIProviderOptions): LanguageModel;
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* xAI Provider
|
|
53
|
+
*
|
|
54
|
+
* xAI's Grok models are cutting-edge AI models with vision and tool support.
|
|
55
|
+
*
|
|
56
|
+
* Features:
|
|
57
|
+
* - Vision (images)
|
|
58
|
+
* - Tools/Function calling
|
|
59
|
+
* - Real-time information (trained on X/Twitter data)
|
|
60
|
+
* - Ultra-fast inference
|
|
61
|
+
*/
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Create an xAI provider
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* ```typescript
|
|
68
|
+
* const xai = createXAI({
|
|
69
|
+
* apiKey: '...',
|
|
70
|
+
* });
|
|
71
|
+
* const adapter = xai.languageModel('grok-2');
|
|
72
|
+
* const caps = xai.getCapabilities('grok-2');
|
|
73
|
+
* ```
|
|
74
|
+
*/
|
|
75
|
+
declare function createXAI(config?: XAIProviderConfig): AIProvider;
|
|
76
|
+
declare const createXAIProvider: typeof createXAI;
|
|
77
|
+
|
|
78
|
+
export { type XAIProviderOptions, createXAI, xai as createXAIModel, createXAIProvider, xai };
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { L as LanguageModel } from '../../types-CdORv1Yu.js';
|
|
2
|
+
import { X as XAIProviderConfig, A as AIProvider } from '../../types-DcoCaVVC.js';
|
|
3
|
+
import 'zod';
|
|
4
|
+
import '@yourgpt/copilot-sdk/core';
|
|
5
|
+
import '../../base-D_FyHFKj.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* xAI Provider - Modern Pattern
|
|
9
|
+
*
|
|
10
|
+
* Returns a LanguageModel instance that can be used directly with
|
|
11
|
+
* generateText() and streamText().
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```ts
|
|
15
|
+
* import { xai } from '@yourgpt/llm-sdk/xai';
|
|
16
|
+
* import { generateText } from '@yourgpt/llm-sdk';
|
|
17
|
+
*
|
|
18
|
+
* const result = await generateText({
|
|
19
|
+
* model: xai('grok-3-fast-beta'),
|
|
20
|
+
* prompt: 'Hello!',
|
|
21
|
+
* });
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
interface XAIProviderOptions {
|
|
26
|
+
/** API key (defaults to XAI_API_KEY env var) */
|
|
27
|
+
apiKey?: string;
|
|
28
|
+
/** Base URL for API (defaults to https://api.x.ai/v1) */
|
|
29
|
+
baseURL?: string;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Create an xAI language model
|
|
33
|
+
*
|
|
34
|
+
* @param modelId - Model ID (e.g., 'grok-3-fast-beta', 'grok-4')
|
|
35
|
+
* @param options - Provider options
|
|
36
|
+
* @returns LanguageModel instance
|
|
37
|
+
*
|
|
38
|
+
* @example
|
|
39
|
+
* ```ts
|
|
40
|
+
* // Basic usage
|
|
41
|
+
* const model = xai('grok-3-fast-beta');
|
|
42
|
+
*
|
|
43
|
+
* // With custom options
|
|
44
|
+
* const model = xai('grok-4', {
|
|
45
|
+
* apiKey: 'xai-...',
|
|
46
|
+
* });
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
declare function xai(modelId: string, options?: XAIProviderOptions): LanguageModel;
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* xAI Provider
|
|
53
|
+
*
|
|
54
|
+
* xAI's Grok models are cutting-edge AI models with vision and tool support.
|
|
55
|
+
*
|
|
56
|
+
* Features:
|
|
57
|
+
* - Vision (images)
|
|
58
|
+
* - Tools/Function calling
|
|
59
|
+
* - Real-time information (trained on X/Twitter data)
|
|
60
|
+
* - Ultra-fast inference
|
|
61
|
+
*/
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Create an xAI provider
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* ```typescript
|
|
68
|
+
* const xai = createXAI({
|
|
69
|
+
* apiKey: '...',
|
|
70
|
+
* });
|
|
71
|
+
* const adapter = xai.languageModel('grok-2');
|
|
72
|
+
* const caps = xai.getCapabilities('grok-2');
|
|
73
|
+
* ```
|
|
74
|
+
*/
|
|
75
|
+
declare function createXAI(config?: XAIProviderConfig): AIProvider;
|
|
76
|
+
declare const createXAIProvider: typeof createXAI;
|
|
77
|
+
|
|
78
|
+
export { type XAIProviderOptions, createXAI, xai as createXAIModel, createXAIProvider, xai };
|