seacloud-sdk 0.6.0 → 0.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +3 -15
- package/dist/cli.js.map +1 -1
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -546,12 +546,8 @@ function parseArgs(args) {
|
|
|
546
546
|
return options;
|
|
547
547
|
}
|
|
548
548
|
async function testModel(model, options) {
|
|
549
|
-
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;
|
|
549
|
+
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || "";
|
|
550
550
|
const baseUrl = options.baseUrl || process.env.API_BASE_URL || "http://localhost:8080";
|
|
551
|
-
if (!apiKey) {
|
|
552
|
-
console.error("Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var");
|
|
553
|
-
process.exit(1);
|
|
554
|
-
}
|
|
555
551
|
if (!options.params) {
|
|
556
552
|
console.error("Error: --params required. Provide JSON parameters for the model");
|
|
557
553
|
process.exit(1);
|
|
@@ -631,12 +627,8 @@ async function runLlm(prompt, args) {
|
|
|
631
627
|
else if (arg === "--api-key") options.apiKey = args[++i];
|
|
632
628
|
else if (arg === "--base-url") options.baseUrl = args[++i];
|
|
633
629
|
}
|
|
634
|
-
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;
|
|
630
|
+
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || "";
|
|
635
631
|
const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || "http://proxy.sg.seaverse.dev";
|
|
636
|
-
if (!apiKey) {
|
|
637
|
-
console.error("Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var");
|
|
638
|
-
process.exit(1);
|
|
639
|
-
}
|
|
640
632
|
initSeacloud(apiKey, { baseUrl, timeout: 12e4 });
|
|
641
633
|
console.log(`Model: ${options.model}`);
|
|
642
634
|
console.log(`Prompt: ${prompt}
|
|
@@ -682,12 +674,8 @@ async function runAgent(prompt, args) {
|
|
|
682
674
|
else if (arg === "--api-key") options.apiKey = args[++i];
|
|
683
675
|
else if (arg === "--base-url") options.baseUrl = args[++i];
|
|
684
676
|
}
|
|
685
|
-
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;
|
|
677
|
+
const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || "";
|
|
686
678
|
const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || "http://proxy.sg.seaverse.dev";
|
|
687
|
-
if (!apiKey) {
|
|
688
|
-
console.error("Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var");
|
|
689
|
-
process.exit(1);
|
|
690
|
-
}
|
|
691
679
|
initSeacloud(apiKey, { baseUrl, timeout: 3e5 });
|
|
692
680
|
console.log(`Agent: ${options.agentId}`);
|
|
693
681
|
console.log(`Model: ${options.model}`);
|
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/core/types.ts","../src/core/config.ts","../src/core/client.ts","../src/core/global-config.ts","../src/api/llm_chat_completions.ts","../src/api/agent_chat_completions.ts","../src/cli.ts"],"names":["__filename"],"mappings":";;;;;AA4EO,IAAM,aAAA,GAAN,cAA4B,KAAA,CAAM;AAAA,EACvC,WAAA,CACE,OAAA,EACO,UAAA,EACA,QAAA,EACP;AACA,IAAA,KAAA,CAAM,OAAO,CAAA;AAHN,IAAA,IAAA,CAAA,UAAA,GAAA,UAAA;AACA,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AAGP,IAAA,IAAA,CAAK,IAAA,GAAO,eAAA;AAAA,EACd;AACF,CAAA;;;AC9EO,SAAS,YAAA,CAAa,OAAA,GAA0B,EAAC,EAA6B;AAEnF,EAAA,MAAM,MAAA,GAAS,QAAQ,MAAA,KACP,OAAO,YAAY,WAAA,GAAc,OAAA,CAAQ,GAAA,EAAK,iBAAA,GAAoB,MAAA,CAAA,IACnE,EAAA;AAGf,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,KACP,OAAO,YAAY,WAAA,GAAc,OAAA,CAAQ,GAAA,EAAK,iBAAA,GAAoB,MAAA,CAAA,IACnE,8BAAA;AAGhB,EAAA,MAAM,SAAA,GAAY,OAAA,CAAQ,KAAA,IAAS,UAAA,CAAW,KAAA;AAE9C,EAAA,IAAI,CAAC,SAAA,EAAW;AACd,IAAA,MAAM,IAAI,MAAM,mGAAmG,CAAA;AAAA,EACrH;AAEA,EAAA,OAAO;AAAA,IACL,MAAA;AAAA,IACA,OAAA;AAAA,IACA,KAAA,EAAO,SAAA;AAAA,IACP,OAAA,EAAS,QAAQ,OAAA,IAAW;AAAA,GAC9B;AACF;AAOO,SAAS,eAAe,MAAA,EAAwC;AACrE,EAAA,IAAI,CAAC,OAAO,MAAA,EAAQ;AAClB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAM,IAAI,MAAM,qBAAqB,CAAA;AAAA,EACvC;AAEA,EAAA,IAAI,OAAO,MAAA,CAAO,KAAA,KAAU,UAAA,EAAY;AACtC,IAAA,MAAM,IAAI,MAAM,0BAA0B,CAAA;AAAA,EAC5C;AACF;;;AC5CO,IAAM,iBAAN,MAAqB;AAAA,EAG1B,WAAA,CAAY,MAAA,GAAyB,EAAC,EAAG;AACvC,IAAA,IAAA,CAAK,MAAA,GAAS,aAAa,MAAM,CAAA;AACjC,IAAA,cAAA,CAAe,KAAK,MAAM,CAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,UAAA,CAAW,QAAA,EAAkB,IAAA,EAAgD;AACjF,IAAA,MAAM,MAAM,CAAA,EAAG,IAAA,CAAK,MAAA,CAAO,OAAO,GAAG,QAAQ,CAAA,CAAA;AAE7C,IAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,IAAA,MAAM,SAAA,GAAY,WAAW,MAAM,UAAA,CAAW,OAAM,EAAG,IAAA,CAAK,OAAO,OAAO,CAAA;AAE1E,IAAA,IAAI;AACF,MAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,MAAA,CAAO,MAAM,GAAA,EAAK;AAAA,QAC5C,MAAA,EAAQ,MAAA;AAAA,QACR,OAAA,EAAS;AAAA,UACP,cAAA,EAAgB,kBAAA;AAAA,UAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AAAA,SAC/C;AAAA,QACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AAAA,QACzB,QAAQ,UAAA,CAAW;AAAA,OACpB,CAAA;AAED,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,QAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,QAAA,MAAM,IAAI,aAAA;AAAA,UACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,UACrC,QAAA,CAAS,MAAA;AAAA,UACT;AAAA,SACF;AAAA,MACF;AAEA,MAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,MAAA,OAAO,MAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,QAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,MAC1E;AAEA,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,QAC3C,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAAA,CAAc,QAAA,EAAkB,MAAA,EAAqC;AACzE,IAAA,MAAM,GAAA,GAAM,GAAG,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAG,QAAQ,SAAS,MAAM,CAAA,CAAA;AAE5D,IAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,IAAA,MAAM,SAAA,GAAY,WAAW,MAAM,UAAA,CAAW,OAAM,EAAG,IAAA,CAAK,OAAO,OAAO,CAAA;AAE1E,IAAA,IAAI;AACF,MAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,MAAA,CAAO,MAAM,GAAA,EAAK;AAAA,QAC5C,MAAA,EAAQ,KAAA;AAAA,QACR,OAAA,EAAS;AAAA,UACP,eAAA,EAAiB,CAAA,OAAA,EAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AAAA,SAC/C;AAAA,QACA,QAAQ,UAAA,CAAW;AAAA,OACpB,CAAA;AAED,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,QAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,QAAA,MAAM,IAAI,aAAA;AAAA,UACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,UACrC,QAAA,CAAS,MAAA;AAAA,UACT;AAAA,SACF;AAAA,MACF;AAEA,MAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,MAAA,OAAO,MAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,QAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,MAC1E;AAEA,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,QAC3C,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAA,GAAgD;AAC9C,IAAA,OAAO,EAAE,GAAG,IAAA,CAAK,MAAA,EAAO;AAAA,EAC1B;AACF,CAAA;;;ACpHA,IAAM,YAAA,GAA6B;AAAA,EACjC,MAAA,EAAQ,IAAA;AAAA,EACR,qBAAA,EAAuB;AAAA,IACrB,UAAA,EAAY,GAAA;AAAA;AAAA,IACZ,WAAA,EAAa;AAAA;AAAA;AAEjB,CAAA;AAOO,SAAS,YAAA,CAAa,QAAgB,OAAA,EAK1C;AACD,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,MAAM,sDAAsD,CAAA;AAAA,EACxE;AAGA,EAAA,YAAA,CAAa,MAAA,GAAS,IAAI,cAAA,CAAO;AAAA,IAC/B,MAAA;AAAA,IACA,SAAS,OAAA,EAAS,OAAA;AAAA,IAClB,SAAS,OAAA,EAAS;AAAA,GACnB,CAAA;AAGD,EAAA,IAAI,OAAA,EAAS,eAAe,MAAA,EAAW;AACrC,IAAA,YAAA,CAAa,qBAAA,CAAsB,aAAa,OAAA,CAAQ,UAAA;AAAA,EAC1D;AACA,EAAA,IAAI,OAAA,EAAS,gBAAgB,MAAA,EAAW;AACtC,IAAA,YAAA,CAAa,qBAAA,CAAsB,cAAc,OAAA,CAAQ,WAAA;AAAA,EAC3D;AAEA,EAAA,OAAO,YAAA,CAAa,MAAA;AACtB;AAMO,SAAS,SAAA,GAA4B;AAC1C,EAAA,IAAI,CAAC,aAAa,MAAA,EAAQ;AACxB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACA,EAAA,OAAO,YAAA,CAAa,MAAA;AACtB;;;ACwRA,eAAsB,mBACpB,MAAA,EACsE;AACtE,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAM,MAAA,GAAS,OAAO,SAAA,EAAU;AAEhC,EAAA,MAAM,GAAA,GAAM,CAAA,EAAG,MAAA,CAAO,OAAO,CAAA,qBAAA,CAAA;AAE7B,EAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,EAAA,MAAM,YAAY,UAAA,CAAW,MAAM,WAAW,KAAA,EAAM,EAAG,OAAO,OAAO,CAAA;AAErE,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,MAAM,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK;AAAA,MACvC,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA;AAAA,OAC1C;AAAA,MACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AAAA,MAC3B,QAAQ,UAAA,CAAW;AAAA,KACpB,CAAA;AAED,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,QACrC,QAAA,CAAS,MAAA;AAAA,QACT;AAAA,OACF;AAAA,IACF;AAGA,IAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,MAAA,OAAO,uBAAuB,QAAQ,CAAA;AAAA,IACxC;AAGA,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,IAAA,OAAO,MAAA;AAAA,EACT,SAAS,KAAA,EAAO;AACd,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,MAAA,MAAM,KAAA;AAAA,IACR;AAEA,IAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,MAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,IAAI,aAAA;AAAA,MACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,MAC3C,MAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACF;AAKA,gBAAgB,uBACd,QAAA,EACoC;AACpC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAE9B,QAAA,IAAI,CAAC,WAAA,EAAa;AAClB,QAAA,IAAI,gBAAgB,cAAA,EAAgB;AAEpC,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,OAAA,GAAU,WAAA,CAAY,KAAA,CAAM,CAAC,CAAA;AACnC,UAAA,IAAI;AACF,YAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAChC,YAAA,MAAM,KAAA;AAAA,UACR,SAAS,KAAA,EAAO;AACd,YAAA,OAAA,CAAQ,IAAA,CAAK,8BAA8B,OAAO,CAAA;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AACF;;;ACgcA,eAAsB,qBACpB,MAAA,EACgF;AAChF,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAM,MAAA,GAAS,OAAO,SAAA,EAAU;AAEhC,EAAA,MAAM,GAAA,GAAM,CAAA,EAAG,MAAA,CAAO,OAAO,CAAA,8BAAA,CAAA;AAY7B,EAAA,MAAM,KAAA,GAAQ,OAAO,KAAA,IAAS,2CAAA;AAS9B,EAAA,MAAM,kBAAA,GAAqB,OAAO,MAAA,KAAW,KAAA;AAC7C,EAAA,MAAM,WAAA,GAAc;AAAA,IAClB,GAAG,MAAA;AAAA,IACH,KAAA;AAAA,IACA,MAAA,EAAQ;AAAA;AAAA,GACV;AAcA,EAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,EAAA,MAAM,YAAY,UAAA,CAAW,MAAM,WAAW,KAAA,EAAM,EAAG,OAAO,OAAO,CAAA;AAErE,EAAA,IAAI;AAOF,IAAA,MAAM,QAAA,GAAW,MAAM,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK;AAAA,MACvC,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,QACxC,WAAA,EAAa;AAAA;AAAA,OACf;AAAA,MACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,WAAW,CAAA;AAAA,MAChC,QAAQ,UAAA,CAAW;AAAA,KACpB,CAAA;AAED,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,QACrC,QAAA,CAAS,MAAA;AAAA,QACT;AAAA,OACF;AAAA,IACF;AAgBA,IAAA,IAAI,kBAAA,EAAoB;AACtB,MAAA,OAAO,4BAA4B,QAAQ,CAAA;AAAA,IAC7C;AAGA,IAAA,OAAO,MAAM,+BAA+B,QAAQ,CAAA;AAAA,EACtD,SAAS,KAAA,EAAO;AACd,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,MAAA,MAAM,KAAA;AAAA,IACR;AAEA,IAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,MAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,IAAI,aAAA;AAAA,MACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,MAC3C,MAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACF;AAKA,eAAe,+BACb,QAAA,EACsC;AACtC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AACb,EAAA,IAAI,WAAA,GAAc,EAAA;AAClB,EAAA,IAAI,YAA6B,EAAC;AAClC,EAAA,IAAI,YAAA,GAAe,EAAA;AACnB,EAAA,IAAI,SAAA,GAAY,EAAA;AAChB,EAAA,IAAI,KAAA,GAAQ,EAAA;AACZ,EAAA,IAAI,SAAA,GAA6C,IAAA;AACjD,EAAA,IAAI,KAAA;AAEJ,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAE9B,QAAA,IAAI,CAAC,WAAA,EAAa;AAClB,QAAA,IAAI,gBAAgB,cAAA,EAAgB;AACpC,QAAA,IAAI,gBAAgB,kBAAA,EAAoB;AAExC,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,IAAA,GAAO,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAEvC,UAAA,IAAI;AACF,YAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC9B,YAAA,SAAA,GAAY,MAAA;AAEZ,YAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,OAAA,GAAU,CAAC,CAAA,EAAG,KAAA;AAEnC,YAAA,IAAI,OAAO,OAAA,EAAS;AAClB,cAAA,IAAI,OAAO,KAAA,CAAM,OAAA,KAAY,QAAA,EAAU;AACrC,gBAAA,WAAA,IAAe,KAAA,CAAM,OAAA;AAAA,cACvB,CAAA,MAAA,IAAW,KAAA,CAAM,OAAA,CAAQ,KAAA,CAAM,OAAO,CAAA,EAAG;AACvC,gBAAA,KAAA,MAAW,IAAA,IAAQ,MAAM,OAAA,EAAS;AAChC,kBAAA,IAAI,IAAA,CAAK,IAAA,KAAS,MAAA,IAAU,IAAA,CAAK,IAAA,EAAM;AACrC,oBAAA,WAAA,IAAe,IAAA,CAAK,IAAA;AAAA,kBACtB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,YAAA,IAAI,OAAO,SAAA,EAAW;AACpB,cAAA,SAAA,CAAU,IAAA,CAAK,GAAG,KAAA,CAAM,SAAS,CAAA;AAAA,YACnC;AAEA,YAAA,IAAI,MAAA,CAAO,OAAA,GAAU,CAAC,CAAA,EAAG,aAAA,EAAe;AACtC,cAAA,YAAA,GAAe,MAAA,CAAO,OAAA,CAAQ,CAAC,CAAA,CAAE,aAAA;AAAA,YACnC;AAEA,YAAA,IAAI,OAAO,UAAA,EAAY;AACrB,cAAA,SAAA,GAAY,MAAA,CAAO,UAAA;AAAA,YACrB;AAEA,YAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,cAAA,KAAA,GAAQ,MAAA,CAAO,MAAA;AAAA,YACjB;AAEA,YAAA,IAAI,OAAO,KAAA,EAAO;AAChB,cAAA,KAAA,GAAQ,MAAA,CAAO,KAAA;AAAA,YACjB;AAAA,UACF,SAAS,CAAA,EAAG;AAEV,YAAA,OAAA,CAAQ,KAAK,4BAAA,EAA8B,IAAA,CAAK,SAAA,CAAU,CAAA,EAAG,GAAG,CAAC,CAAA;AAAA,UACnE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AAEA,EAAA,IAAI,CAAC,SAAA,EAAW;AACd,IAAA,MAAM,IAAI,cAAc,mCAAmC,CAAA;AAAA,EAC7D;AAEA,EAAA,OAAO;AAAA,IACL,IAAI,SAAA,CAAU,EAAA;AAAA,IACd,MAAA,EAAQ,iBAAA;AAAA,IACR,SAAS,SAAA,CAAU,OAAA;AAAA,IACnB,OAAO,SAAA,CAAU,KAAA;AAAA,IACjB,oBAAoB,SAAA,CAAU,kBAAA;AAAA,IAC9B,OAAA,EAAS;AAAA,MACP;AAAA,QACE,KAAA,EAAO,CAAA;AAAA,QACP,OAAA,EAAS;AAAA,UACP,IAAA,EAAM,WAAA;AAAA,UACN,OAAA,EAAS;AAAA,SACX;AAAA,QACA,eAAe,YAAA,IAAgB;AAAA;AACjC,KACF;AAAA,IACA,KAAA;AAAA,IACA,YAAY,SAAA,IAAa,MAAA;AAAA,IACzB,QAAQ,KAAA,IAAS,MAAA;AAAA,IACjB,SAAA,EAAW,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY;AAAA,GAChD;AACF;AAKA,gBAAgB,4BACd,QAAA,EACyC;AACzC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAG9B,QAAA,IAAI,CAAC,WAAA,IAAe,WAAA,CAAY,UAAA,CAAW,GAAG,CAAA,EAAG;AAGjD,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,SAAS,CAAA,EAAG;AACrC,UAAA,MAAM,SAAA,GAAY,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAC5C,UAAA,IAAI,cAAc,WAAA,EAAa;AAE7B,YAAA;AAAA,UACF;AAAA,QACF;AAGA,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,IAAA,GAAO,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAGvC,UAAA,IAAI,SAAS,QAAA,EAAU;AACrB,YAAA;AAAA,UACF;AAEA,UAAA,IAAI;AACF,YAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC7B,YAAA,MAAM,KAAA;AAAA,UACR,SAAS,KAAA,EAAO;AACd,YAAA,OAAA,CAAQ,IAAA,CAAK,8BAA8B,IAAI,CAAA;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AACF;AAcO,SAAS,iBAAA,CAAkB,MAAwB,IAAA,EAA4B;AACpF,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,SAAS,CAAC,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAM;AAAA,GAClC;AACF;AC5rCA,IAAMA,YAAA,GAAa,aAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAC9B,QAAQA,YAAU;AASpC,SAAS,QAAA,GAAW;AAClB,EAAA,OAAA,CAAQ,GAAA,CAAI;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,CAqDb,CAAA;AACD;AAEA,SAAS,UAAU,IAAA,EAA4B;AAC7C,EAAA,MAAM,UAAsB,EAAC;AAE7B,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAElB,IAAA,QAAQ,GAAA;AAAK,MACX,KAAK,WAAA;AACH,QAAA,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AACzB,QAAA;AAAA,MACF,KAAK,YAAA;AACH,QAAA,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAC1B,QAAA;AAAA,MACF,KAAK,SAAA;AACH,QAAA,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AACxB,QAAA;AAAA,MACF,KAAK,UAAA;AACH,QAAA,IAAI;AACF,UAAA,OAAA,CAAQ,SAAS,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,QACvC,SAAS,CAAA,EAAG;AACV,UAAA,OAAA,CAAQ,MAAM,kCAAkC,CAAA;AAChD,UAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,QAChB;AACA,QAAA;AAAA;AACJ,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAEA,eAAe,SAAA,CAAU,OAAe,OAAA,EAAqB;AAC3D,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,iBAAA;AAC7C,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,YAAA,IAAgB,uBAAA;AAE/D,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAA,CAAQ,MAAM,6EAA6E,CAAA;AAC3F,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,IAAI,CAAC,QAAQ,MAAA,EAAQ;AACnB,IAAA,OAAA,CAAQ,MAAM,iEAAiE,CAAA;AAC/E,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,eAAA,EAAkB,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,OAAO,CAAA,CAAE,CAAA;AAClC,EAAA,OAAA,CAAQ,GAAA,CAAI,eAAe,IAAA,CAAK,SAAA,CAAU,QAAQ,MAAA,EAAQ,IAAA,EAAM,CAAC,CAAC,CAAA;AAClE,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAEd,EAAA,MAAM,SAAS,IAAI,cAAA,CAAe,EAAE,MAAA,EAAQ,SAAS,CAAA;AAErD,EAAA,IAAI;AACF,IAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAC9B,IAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,UAAA,CAAW,sBAAA,EAAwB;AAAA,MAC3D,KAAA;AAAA,MACA,OAAO,CAAC,EAAE,MAAA,EAAQ,OAAA,CAAQ,QAAQ;AAAA,KACnC,CAAA;AAED,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,cAAA,EAAiB,IAAA,CAAK,EAAE,CAAA,CAAE,CAAA;AACtC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,IAAA,CAAK,MAAM,CAAA,CAAE,CAAA;AAC5C,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAEd,IAAA,IAAI,IAAA,CAAK,WAAW,QAAA,EAAU;AAC5B,MAAA,OAAA,CAAQ,KAAA,CAAM,0BAAA,EAA4B,IAAA,CAAK,KAAK,CAAA;AACpD,MAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,IAChB;AAEA,IAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,MAAM,WAAA,GAAc,GAAA;AACpB,IAAA,MAAM,UAAA,GAAa,GAAA;AAEnB,IAAA,OAAO,UAAU,WAAA,EAAa;AAC5B,MAAA,OAAA,EAAA;AAEA,MAAA,MAAM,SAAS,MAAM,MAAA,CAAO,aAAA,CAAc,sBAAA,EAAwB,KAAK,EAAE,CAAA;AACzE,MAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,CAAA,UAAA,EAAa,OAAO,IAAI,WAAW,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,IAAA,CAAM,CAAA;AAEzF,MAAA,IAAI,MAAA,CAAO,WAAW,WAAA,EAAa;AACjC,QAAA,OAAA,CAAQ,IAAI,qBAAqB,CAAA;AACjC,QAAA,OAAA,CAAQ,IAAI,YAAY,CAAA;AACxB,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,OAAO,MAAA,EAAQ,IAAA,EAAM,CAAC,CAAC,CAAA;AAGlD,QAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,UAAA,MAAM,OAAiB,EAAC;AACxB,UAAA,KAAA,MAAW,IAAA,IAAQ,OAAO,MAAA,EAAQ;AAChC,YAAA,IAAI,KAAK,OAAA,EAAS;AAChB,cAAA,KAAA,MAAW,QAAA,IAAY,KAAK,OAAA,EAAS;AACnC,gBAAA,IAAI,SAAS,GAAA,EAAK;AAChB,kBAAA,IAAA,CAAK,IAAA,CAAK,SAAS,GAAG,CAAA;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAEA,UAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,YAAA,OAAA,CAAQ,IAAI,mBAAmB,CAAA;AAC/B,YAAA,IAAA,CAAK,OAAA,CAAQ,CAAC,GAAA,EAAK,CAAA,KAAM;AACvB,cAAA,OAAA,CAAQ,IAAI,CAAA,EAAA,EAAK,CAAA,GAAI,CAAC,CAAA,EAAA,EAAK,GAAG,CAAA,CAAE,CAAA;AAAA,YAClC,CAAC,CAAA;AAAA,UACH;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,IAAI,MAAA,CAAO,WAAW,QAAA,EAAU;AAC9B,QAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAC9B,QAAA,OAAA,CAAQ,KAAA,CAAM,QAAA,EAAU,MAAA,CAAO,KAAK,CAAA;AACpC,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,MAAM,IAAI,OAAA,CAAQ,CAAA,OAAA,KAAW,UAAA,CAAW,OAAA,EAAS,UAAU,CAAC,CAAA;AAAA,IAC9D;AAEA,IAAA,OAAA,CAAQ,IAAI,0DAA0D,CAAA;AACtE,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAEhB,SAAS,KAAA,EAAY;AACnB,IAAA,OAAA,CAAQ,KAAA,CAAM,UAAA,EAAY,KAAA,CAAM,OAAO,CAAA;AACvC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF;AAEA,eAAe,MAAA,CAAO,QAAgB,IAAA,EAAgB;AACpD,EAAA,MAAM,UAAe,EAAE,MAAA,EAAQ,OAAO,KAAA,EAAO,uBAAA,EAAyB,aAAa,GAAA,EAAI;AAEvF,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAClB,IAAA,IAAI,QAAQ,SAAA,EAAW,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACtC,GAAA,KAAQ,UAAA,EAAY,OAAA,CAAQ,MAAA,GAAS,IAAA;AAAA,SAAA,IACrC,GAAA,KAAQ,iBAAiB,OAAA,CAAQ,WAAA,GAAc,WAAW,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,SAAA,IACnE,GAAA,KAAQ,gBAAgB,OAAA,CAAQ,SAAA,GAAY,SAAS,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,SAAA,IAC9D,QAAQ,WAAA,EAAa,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAC9C,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,iBAAA;AAC7C,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,iBAAA,IAAqB,8BAAA;AAEpE,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAA,CAAQ,MAAM,6EAA6E,CAAA;AAC3F,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,YAAA,CAAa,MAAA,EAAQ,EAAE,OAAA,EAAS,OAAA,EAAS,MAAQ,CAAA;AAEjD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,WAAW,MAAM;AAAA,CAAI,CAAA;AAEjC,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,kBAAA,CAAmB;AAAA,MACtC,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,QAAQ,CAAA;AAAA,MAC5C,MAAA,EAAQ,IAAA;AAAA,MACR,aAAa,OAAA,CAAQ,WAAA;AAAA,MACrB,YAAY,OAAA,CAAQ;AAAA,KACrB,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,YAAY,CAAA;AACjC,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAA,CAAQ,CAAC,GAAG,KAAA,EAAO,OAAA;AACzC,MAAA,IAAI,OAAO,OAAA,KAAY,QAAA,EAAU,OAAA,CAAQ,MAAA,CAAO,MAAM,OAAO,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,IAAI,IAAI,CAAA;AAAA,EAClB,CAAA,MAAO;AACL,IAAA,MAAM,QAAA,GAAW,MAAM,kBAAA,CAAmB;AAAA,MACxC,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,QAAQ,CAAA;AAAA,MAC5C,MAAA,EAAQ,KAAA;AAAA,MACR,aAAa,OAAA,CAAQ,WAAA;AAAA,MACrB,YAAY,OAAA,CAAQ;AAAA,KACrB,CAAA;AAED,IAAA,OAAA,CAAQ,IAAI,WAAA,EAAa,QAAA,CAAS,QAAQ,CAAC,CAAA,CAAE,QAAQ,OAAO,CAAA;AAC5D,IAAA,OAAA,CAAQ,GAAA,CAAI,UAAA,EAAY,QAAA,CAAS,KAAK,CAAA;AAAA,EACxC;AACF;AAEA,eAAe,QAAA,CAAS,QAAgB,IAAA,EAAgB;AACtD,EAAA,MAAM,OAAA,GAAe;AAAA,IACnB,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,QAAA;AAAA,IACP,OAAA,EAAS;AAAA,GACX;AAEA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAClB,IAAA,IAAI,QAAQ,SAAA,EAAW,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACtC,GAAA,KAAQ,UAAA,EAAY,OAAA,CAAQ,MAAA,GAAS,IAAA;AAAA,SAAA,IACrC,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAChD,QAAQ,cAAA,EAAgB,OAAA,CAAQ,SAAA,GAAY,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACpD,QAAQ,WAAA,EAAa,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAC9C,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,GAAA,CAAI,iBAAA;AAC7C,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,iBAAA,IAAqB,8BAAA;AAEpE,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAA,CAAQ,MAAM,6EAA6E,CAAA;AAC3F,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,YAAA,CAAa,MAAA,EAAQ,EAAE,OAAA,EAAS,OAAA,EAAS,KAAQ,CAAA;AAEjD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,OAAO,CAAA,CAAE,CAAA;AACvC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,WAAW,MAAM;AAAA,CAAI,CAAA;AAEjC,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,oBAAA,CAAqB;AAAA,MACxC,UAAU,OAAA,CAAQ,OAAA;AAAA,MAClB,QAAA,EAAU,CAAC,iBAAA,CAAkB,MAAA,EAAQ,MAAM,CAAC,CAAA;AAAA,MAC5C,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,MAAA,EAAQ,IAAA;AAAA,MACR,YAAY,OAAA,CAAQ,SAAA;AAAA,MACpB,GAAA,EAAK;AAAA,KACN,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,YAAY,CAAA;AACjC,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAA,CAAQ,CAAC,GAAG,KAAA,EAAO,OAAA;AACzC,MAAA,IAAI,OAAO,OAAA,KAAY,QAAA,EAAU,OAAA,CAAQ,MAAA,CAAO,MAAM,OAAO,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,IAAI,IAAI,CAAA;AAAA,EAClB,CAAA,MAAO;AACL,IAAA,MAAM,QAAA,GAAW,MAAM,oBAAA,CAAqB;AAAA,MAC1C,UAAU,OAAA,CAAQ,OAAA;AAAA,MAClB,QAAA,EAAU,CAAC,iBAAA,CAAkB,MAAA,EAAQ,MAAM,CAAC,CAAA;AAAA,MAC5C,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,MAAA,EAAQ,KAAA;AAAA,MACR,YAAY,OAAA,CAAQ,SAAA;AAAA,MACpB,GAAA,EAAK;AAAA,KACN,CAAA;AAED,IAAA,OAAA,CAAQ,IAAI,WAAA,EAAa,QAAA,CAAS,QAAQ,CAAC,CAAA,CAAE,QAAQ,OAAO,CAAA;AAE5D,IAAA,IAAI,QAAA,CAAS,SAAA,IAAa,QAAA,CAAS,SAAA,CAAU,SAAS,CAAA,EAAG;AACvD,MAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,MAAA,QAAA,CAAS,SAAA,CAAU,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAA,KAAM;AAC1C,QAAA,OAAA,CAAQ,IAAI,CAAA,EAAA,EAAK,CAAA,GAAI,CAAC,CAAA,EAAA,EAAK,QAAA,CAAS,IAAI,CAAA,CAAE,CAAA;AAC1C,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,QAAA,CAAS,GAAG,CAAA,CAAE,CAAA;AAAA,MACzC,CAAC,CAAA;AAAA,IACH;AAEA,IAAA,OAAA,CAAQ,GAAA,CAAI,eAAA,EAAiB,QAAA,CAAS,UAAU,CAAA;AAChD,IAAA,OAAA,CAAQ,GAAA,CAAI,aAAA,EAAe,QAAA,CAAS,MAAM,CAAA;AAAA,EAC5C;AACF;AAEA,eAAe,IAAA,GAAO;AACpB,EAAA,MAAM,IAAA,GAAO,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA;AAEjC,EAAA,IAAI,IAAA,CAAK,MAAA,KAAW,CAAA,IAAK,IAAA,CAAK,CAAC,MAAM,QAAA,IAAY,IAAA,CAAK,CAAC,CAAA,KAAM,IAAA,EAAM;AACjE,IAAA,QAAA,EAAS;AACT,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,MAAM,OAAA,GAAU,KAAK,CAAC,CAAA;AAEtB,EAAA,IAAI;AACF,IAAA,IAAI,YAAY,KAAA,EAAO;AACrB,MAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,QAAA,OAAA,CAAQ,MAAM,wCAAwC,CAAA;AACtD,QAAA,OAAA,CAAQ,IAAI,0CAA0C,CAAA;AACtD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AACA,MAAA,MAAM,OAAO,IAAA,CAAK,CAAC,GAAG,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,IACrC,CAAA,MAAA,IAAW,YAAY,OAAA,EAAS;AAC9B,MAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,QAAA,OAAA,CAAQ,MAAM,0CAA0C,CAAA;AACxD,QAAA,OAAA,CAAQ,IAAI,4CAA4C,CAAA;AACxD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AACA,MAAA,MAAM,SAAS,IAAA,CAAK,CAAC,GAAG,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,IACvC,CAAA,MAAO;AAEL,MAAA,MAAM,KAAA,GAAQ,OAAA;AAEd,MAAA,IAAI,KAAA,CAAM,UAAA,CAAW,IAAI,CAAA,EAAG;AAC1B,QAAA,OAAA,CAAQ,MAAM,uCAAuC,CAAA;AACrD,QAAA,OAAA,CAAQ,IAAI,qCAAqC,CAAA;AACjD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,MAAM,OAAA,GAAU,SAAA,CAAU,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AACvC,MAAA,MAAM,SAAA,CAAU,OAAO,OAAO,CAAA;AAAA,IAChC;AAAA,EACF,SAAS,KAAA,EAAY;AACnB,IAAA,OAAA,CAAQ,KAAA,CAAM,UAAA,EAAY,KAAA,CAAM,OAAO,CAAA;AACvC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF;AAEA,IAAA,EAAK,CAAE,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA","file":"cli.js","sourcesContent":["/**\n * SeaCloud SDK 核心类型定义\n */\n\n/**\n * SDK 配置选项\n */\nexport interface SeacloudConfig {\n /** API Service Token - 从环境变量 API_SERVICE_TOKEN 读取或直接传入 */\n apiKey?: string;\n /** 代理服务器基础 URL,默认为 http://localhost:8080 */\n baseUrl?: string;\n /** 自定义 fetch 实现(可选,用于 Node.js < 18 或测试) */\n fetch?: typeof fetch;\n /** 请求超时时间(毫秒),默认 30000 */\n timeout?: number;\n}\n\n/**\n * 任务状态\n */\nexport type TaskStatus = 'pending' | 'processing' | 'in_progress' | 'completed' | 'failed';\n\n/**\n * 任务错误信息\n */\nexport interface TaskError {\n code: string;\n message: string;\n details?: any;\n}\n\n/**\n * 任务结果\n */\nexport interface TaskResult {\n /** 任务 ID */\n id: string;\n /** 创建时间戳 */\n created_at: number;\n /** 任务状态 */\n status: TaskStatus;\n /** 错误信息(如果失败) */\n error?: TaskError | null;\n /** 模型名称 */\n model: string;\n /** 输出结果 */\n output?: Array<{\n content?: Array<{\n type?: string;\n url?: string;\n size?: number;\n jobId?: string;\n [key: string]: any;\n }>;\n [key: string]: any;\n }>;\n /** 其他字段 */\n [key: string]: any;\n}\n\n/**\n * 任务轮询选项\n */\nexport interface PollingOptions {\n /** 轮询间隔(毫秒),默认 3000 */\n intervalMs?: number;\n /** 最大尝试次数,默认 120 */\n maxAttempts?: number;\n /** 进度回调函数 */\n onProgress?: (attempt: number, status: TaskStatus) => void;\n}\n\n/**\n * HTTP 响应错误\n */\nexport class SeacloudError extends Error {\n constructor(\n message: string,\n public statusCode?: number,\n public response?: any,\n ) {\n super(message);\n this.name = 'SeacloudError';\n }\n}\n","import type { SeacloudConfig } from './types.js';\n\n/**\n * 创建 SDK 配置\n * @param options 配置选项\n * @returns 完整的配置对象\n */\nexport function createConfig(options: SeacloudConfig = {}): Required<SeacloudConfig> {\n // 优先级:传入的 apiKey > 环境变量 API_SERVICE_TOKEN\n const apiKey = options.apiKey ||\n (typeof process !== 'undefined' ? process.env?.API_SERVICE_TOKEN : undefined) ||\n '';\n\n // 使用环境变量或传入的 baseUrl\n const baseUrl = options.baseUrl ||\n (typeof process !== 'undefined' ? process.env?.SEACLOUD_BASE_URL : undefined) ||\n 'http://proxy.sg.seaverse.dev';\n\n // 使用全局 fetch 或自定义实现\n const fetchImpl = options.fetch || globalThis.fetch;\n\n if (!fetchImpl) {\n throw new Error('fetch is not available. Please provide a fetch implementation in config or upgrade to Node.js 18+');\n }\n\n return {\n apiKey,\n baseUrl,\n fetch: fetchImpl,\n timeout: options.timeout || 30000,\n };\n}\n\n/**\n * 验证配置\n * @param config 配置对象\n * @throws 如果配置无效\n */\nexport function validateConfig(config: Required<SeacloudConfig>): void {\n if (!config.apiKey) {\n throw new Error(\n 'API key is required. Set API_SERVICE_TOKEN environment variable or pass apiKey in config.'\n );\n }\n\n if (!config.baseUrl) {\n throw new Error('baseUrl is required');\n }\n\n if (typeof config.fetch !== 'function') {\n throw new Error('fetch must be a function');\n }\n}\n","import type { SeacloudConfig, TaskResult } from './types.js';\nimport { SeacloudError } from './types.js';\nimport { createConfig, validateConfig } from './config.js';\n\n/**\n * SeaCloud HTTP 客户端\n * 用于创建任务和查询任务状态\n */\nexport class SeacloudClient {\n private config: Required<SeacloudConfig>;\n\n constructor(config: SeacloudConfig = {}) {\n this.config = createConfig(config);\n validateConfig(this.config);\n }\n\n /**\n * 创建一个新任务\n * @param endpoint API 端点路径(例如:/model/tasks)\n * @param body 请求体\n * @returns 任务结果\n */\n async createTask(endpoint: string, body: Record<string, any>): Promise<TaskResult> {\n const url = `${this.config.baseUrl}${endpoint}`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);\n\n try {\n const response = await this.config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.config.apiKey}`,\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n const result = await response.json();\n return result as TaskResult;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${this.config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n }\n\n /**\n * 查询任务状态\n * @param endpoint API 端点路径(例如:/model/v1/generation)\n * @param taskId 任务 ID\n * @returns 任务结果\n */\n async getTaskStatus(endpoint: string, taskId: string): Promise<TaskResult> {\n const url = `${this.config.baseUrl}${endpoint}/task/${taskId}`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);\n\n try {\n const response = await this.config.fetch(url, {\n method: 'GET',\n headers: {\n 'Authorization': `Bearer ${this.config.apiKey}`,\n },\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n const result = await response.json();\n return result as TaskResult;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${this.config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n }\n\n /**\n * 获取当前配置\n */\n getConfig(): Readonly<Required<SeacloudConfig>> {\n return { ...this.config };\n }\n}\n","/**\n * 全局配置管理\n */\n\nimport type { SeacloudClient } from './client.js';\nimport type { PollingOptions } from './types.js';\nimport { SeacloudClient as Client } from './client.js';\n\n// 全局配置状态\ninterface GlobalConfig {\n client: SeacloudClient | null;\n defaultPollingOptions: PollingOptions;\n}\n\nconst globalConfig: GlobalConfig = {\n client: null,\n defaultPollingOptions: {\n intervalMs: 3000, // 3秒轮询间隔\n maxAttempts: 100, // 最多尝试100次 (约5分钟)\n },\n};\n\n/**\n * 初始化 SDK\n * @param apiKey API 密钥\n * @param options 可选配置\n */\nexport function initSeacloud(apiKey: string, options?: {\n baseUrl?: string;\n intervalMs?: number; // 轮询间隔(毫秒)\n maxAttempts?: number; // 最大尝试次数\n timeout?: number;\n}) {\n if (!apiKey) {\n throw new Error('API key is required. Please provide a valid API key.');\n }\n\n // 创建客户端\n globalConfig.client = new Client({\n apiKey,\n baseUrl: options?.baseUrl,\n timeout: options?.timeout,\n });\n\n // 设置默认轮询选项\n if (options?.intervalMs !== undefined) {\n globalConfig.defaultPollingOptions.intervalMs = options.intervalMs;\n }\n if (options?.maxAttempts !== undefined) {\n globalConfig.defaultPollingOptions.maxAttempts = options.maxAttempts;\n }\n\n return globalConfig.client;\n}\n\n/**\n * 获取全局客户端\n * @throws 如果未初始化\n */\nexport function getClient(): SeacloudClient {\n if (!globalConfig.client) {\n throw new Error(\n 'SeaCloud SDK is not initialized. Please call initSeacloud(apiKey) first.'\n );\n }\n return globalConfig.client;\n}\n\n/**\n * 获取默认轮询选项\n */\nexport function getDefaultPollingOptions(): PollingOptions {\n return { ...globalConfig.defaultPollingOptions };\n}\n\n/**\n * 更新默认轮询选项\n */\nexport function setDefaultPollingOptions(options: Partial<PollingOptions>) {\n Object.assign(globalConfig.defaultPollingOptions, options);\n}\n\n/**\n * 重置配置(主要用于测试)\n */\nexport function resetConfig() {\n globalConfig.client = null;\n globalConfig.defaultPollingOptions = {\n intervalMs: 3000,\n maxAttempts: 100,\n };\n}\n","import { getClient } from '../core/global-config.js';\nimport { SeacloudError } from '../core/types.js';\n\n/**\n * Chat message role\n */\nexport type ChatMessageRole = 'system' | 'user' | 'assistant';\n\n/**\n * Chat message interface\n */\nexport interface ChatMessage {\n /**\n * Message role\n */\n role: ChatMessageRole;\n /**\n * Message content\n */\n content: string;\n}\n\n/**\n * LLM chat completions request parameters\n */\nexport interface LlmChatCompletionsParams {\n /**\n * Model name to use for chat completion\n * Supports 100+ models including:\n * - Claude: seaart-mix-sonnet-4-5, shaseng-claude-4.5, seacloud-claude-haiku-4.5, seacloud-claude-sonnet-4, seacloud-claude-opus-4.5\n * - Kimi: kimi-k2, kimi-k2-thinking, kimi-k2-250905\n * - Grok: grok-4, grok-code-fast-1\n * - DeepSeek: deepseek-v3.1, deepseek-v3-0324, deepseek-r1, deepseek-r1-0528\n * - Gemini: gemini-2.5-pro, gemini-2.5-flash, gemini-2.0-flash, gemini-3-pro-preview, gemini-3-flash-preview\n * - Qwen: qwen-plus, qwen-plus-character, qwen3-coder-plus\n * - GLM: glm-4.5-air, glm-4.5, glm-4.6, glm-4-plus\n * - GPT: gpt-4o, gpt-4o-mini, gpt-4.1, gpt-4.1-mini, gpt-4.1-nano, gpt-5, gpt-5.1, gpt-5.2\n * - Hunyuan: hunyuan-turbos-latest, hunyuan-large-role-20250822, hunyuan-large-role-plus-20250718\n * - Mistral: mistral-small-3.2-24b-instruct, mistral-small-3.1-24b-instruct, mistral-nemo\n * - Hermes: hermes-4-70b, hermes-4-405b, hermes-3-llama-3.1-405b\n * - Doubao: doubao-romantic, doubao-seed-1-6, doubao-seed-code-preview-251028\n * - Ernie: ernie-5.0-thinking-latest, ernie-5.0-thinking-preview\n * - WizardLM: wizardlm-2-8x22b\n * - O1/O3: o1-pre, o3-pre\n * - LongCat: LongCat-Flash-Chat, LongCat-Flash-Thinking\n * - And many more...\n *\n * @example \"seaart-mix-sonnet-4-5\"\n * @example \"kimi-k2\"\n * @example \"deepseek-v3.1\"\n */\n model: string;\n\n /**\n * Array of messages comprising the conversation so far\n */\n messages: ChatMessage[];\n\n /**\n * Maximum number of tokens to generate in the completion\n * @default undefined (model default)\n */\n max_tokens?: number;\n\n /**\n * Sampling temperature (0-2)\n * Higher values make output more random, lower values more deterministic\n * @default undefined (model default, usually 1.0)\n */\n temperature?: number;\n\n /**\n * Nucleus sampling parameter (0-1)\n * Alternative to temperature sampling\n * @default undefined (model default, usually 1.0)\n */\n top_p?: number;\n\n /**\n * Whether to stream the response\n * If true, returns an async iterator of chunks\n * If false, waits for complete response\n * @default false\n */\n stream?: boolean;\n\n /**\n * Number of completions to generate\n * @default 1\n */\n n?: number;\n\n /**\n * Sequences where the API will stop generating further tokens\n */\n stop?: string | string[];\n\n /**\n * Penalty for new tokens based on whether they appear in the text so far (-2.0 to 2.0)\n * @default 0\n */\n presence_penalty?: number;\n\n /**\n * Penalty for new tokens based on their existing frequency in the text so far (-2.0 to 2.0)\n * @default 0\n */\n frequency_penalty?: number;\n\n /**\n * A unique identifier representing your end-user\n */\n user?: string;\n}\n\n/**\n * Usage statistics for the completion\n */\nexport interface ChatCompletionUsage {\n /**\n * Number of tokens in the prompt\n */\n prompt_tokens: number;\n\n /**\n * Number of tokens in the generated completion\n */\n completion_tokens: number;\n\n /**\n * Total number of tokens used\n */\n total_tokens: number;\n\n /**\n * Cost in USD (if available)\n */\n cost?: number;\n\n /**\n * Additional usage details\n */\n [key: string]: any;\n}\n\n/**\n * A single chat completion choice\n */\nexport interface ChatCompletionChoice {\n /**\n * The index of this choice\n */\n index: number;\n\n /**\n * The message generated by the model\n */\n message: ChatMessage;\n\n /**\n * The reason the model stopped generating tokens\n * @values stop | length | content_filter | function_call\n */\n finish_reason: string | null;\n\n /**\n * Additional choice-specific fields\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion response (non-streaming)\n */\nexport interface ChatCompletionResponse {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * The object type, always \"chat.completion\"\n */\n object: string;\n\n /**\n * Unix timestamp of when the completion was created\n */\n created: number;\n\n /**\n * The model used for completion\n */\n model: string;\n\n /**\n * List of completion choices\n */\n choices: ChatCompletionChoice[];\n\n /**\n * Usage statistics for the completion\n */\n usage?: ChatCompletionUsage;\n\n /**\n * Additional response fields\n */\n [key: string]: any;\n}\n\n/**\n * Delta message for streaming chunks\n */\nexport interface ChatCompletionChunkDelta {\n /**\n * The role of the message (only in first chunk)\n */\n role?: ChatMessageRole;\n\n /**\n * The content delta for this chunk\n */\n content?: string;\n}\n\n/**\n * A single streaming chunk choice\n */\nexport interface ChatCompletionChunkChoice {\n /**\n * The index of this choice\n */\n index: number;\n\n /**\n * The delta message for this chunk\n */\n delta: ChatCompletionChunkDelta;\n\n /**\n * The reason the model stopped generating tokens (only in last chunk)\n */\n finish_reason: string | null;\n\n /**\n * Additional choice-specific fields\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion streaming chunk\n */\nexport interface ChatCompletionChunk {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * The object type, always \"chat.completion.chunk\"\n */\n object: string;\n\n /**\n * Unix timestamp of when the chunk was created\n */\n created: number;\n\n /**\n * The model used for completion\n */\n model: string;\n\n /**\n * List of chunk choices\n */\n choices: ChatCompletionChunkChoice[];\n\n /**\n * Additional chunk fields\n */\n [key: string]: any;\n}\n\n/**\n * Call LLM chat completions API (non-streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, llmChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({ apiKey: 'your-api-key' });\n *\n * const response = await llmChatCompletions({\n * model: 'seaart-mix-sonnet-4-5',\n * messages: [\n * { role: 'user', content: 'Hello!' }\n * ],\n * max_tokens: 100\n * });\n *\n * console.log(response.choices[0].message.content);\n * ```\n *\n * @param params Request parameters\n * @returns Chat completion response\n */\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams & { stream?: false }\n): Promise<ChatCompletionResponse>;\n\n/**\n * Call LLM chat completions API (streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, llmChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({ apiKey: 'your-api-key' });\n *\n * const stream = await llmChatCompletions({\n * model: 'seaart-mix-sonnet-4-5',\n * messages: [\n * { role: 'user', content: 'Tell me a story' }\n * ],\n * stream: true\n * });\n *\n * for await (const chunk of stream) {\n * const content = chunk.choices[0]?.delta?.content;\n * if (content) {\n * process.stdout.write(content);\n * }\n * }\n * ```\n *\n * @param params Request parameters with stream: true\n * @returns Async iterator of chat completion chunks\n */\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams & { stream: true }\n): Promise<AsyncIterable<ChatCompletionChunk>>;\n\n// Implementation\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams\n): Promise<ChatCompletionResponse | AsyncIterable<ChatCompletionChunk>> {\n const client = getClient();\n const config = client.getConfig();\n\n const url = `${config.baseUrl}/llm/chat/completions`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), config.timeout);\n\n try {\n const response = await config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.apiKey}`,\n },\n body: JSON.stringify(params),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n // Streaming response\n if (params.stream) {\n return parseStreamingResponse(response);\n }\n\n // Non-streaming response\n const result = await response.json();\n return result as ChatCompletionResponse;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n}\n\n/**\n * Parse streaming response into async iterable\n */\nasync function* parseStreamingResponse(\n response: Response\n): AsyncIterable<ChatCompletionChunk> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n if (!trimmedLine) continue;\n if (trimmedLine === 'data: [DONE]') continue;\n\n if (trimmedLine.startsWith('data: ')) {\n const jsonStr = trimmedLine.slice(6);\n try {\n const chunk = JSON.parse(jsonStr) as ChatCompletionChunk;\n yield chunk;\n } catch (error) {\n console.warn('Failed to parse SSE chunk:', jsonStr);\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n","/**\n * ============================================================================\n * SeaCloud Fast Agent Chat Completions API\n * ============================================================================\n *\n * This module provides a comprehensive API for interacting with SeaCloud's\n * Fast Agent system, which supports:\n *\n * - Multi-turn conversations with session management\n * - Tool calling for artifact generation (images, videos, music, etc.)\n * - Streaming and non-streaming response modes\n * - Multiple content types (text, image, video, audio, file)\n * - Flexible model selection (Claude, GPT, Gemini, etc.)\n *\n * IMPORTANT NOTES:\n * ================\n *\n * 1. REQUIRED PARAMETERS:\n * - agent_id: Caller-provided agent identifier\n * - messages: Array of conversation messages\n *\n * 2. TIMEOUT CONFIGURATION:\n * - Default timeout: 30 seconds (may be too short for tool calls)\n * - Recommended for agent operations: 120 seconds or more\n * - Set via initSeacloud({ timeout: 120000 })\n *\n * 3. STREAMING MODE:\n * - API always returns Server-Sent Events (SSE) format\n * - params.stream controls how SDK consumes the response\n * - For tool calls (image/video generation), streaming is RECOMMENDED\n *\n * 4. AUTHENTICATION:\n * - Requires 'X-Project: SeaArt' header (automatically added)\n * - Uses Bearer token authentication\n *\n * 5. SESSION MANAGEMENT:\n * - Use session_id to maintain multi-turn conversations\n * - Increment seq for each message in the session\n *\n * @see AGENT_API.md for detailed documentation and examples\n */\n\nimport { getClient } from '../core/global-config.js';\nimport { SeacloudError } from '../core/types.js';\n\n/**\n * Message content type\n */\nexport type AgentMessageContentType = 'text' | 'image' | 'video' | 'audio' | 'file';\n\n/**\n * Base message content\n */\ninterface BaseMessageContent {\n type: AgentMessageContentType;\n}\n\n/**\n * Text message content\n */\ninterface TextMessageContent extends BaseMessageContent {\n type: 'text';\n text: string;\n}\n\n/**\n * Image message content\n */\ninterface ImageMessageContent extends BaseMessageContent {\n type: 'image';\n image_url: string;\n}\n\n/**\n * Video message content\n */\ninterface VideoMessageContent extends BaseMessageContent {\n type: 'video';\n video_url: string;\n}\n\n/**\n * Audio message content\n */\ninterface AudioMessageContent extends BaseMessageContent {\n type: 'audio';\n audio_url: string;\n}\n\n/**\n * File message content\n */\ninterface FileMessageContent extends BaseMessageContent {\n type: 'file';\n file_url: string;\n}\n\n/**\n * Message content item (union type for type safety)\n */\nexport type AgentMessageContent =\n | TextMessageContent\n | ImageMessageContent\n | VideoMessageContent\n | AudioMessageContent\n | FileMessageContent;\n\n/**\n * Message role type\n */\nexport type AgentMessageRole = 'developer' | 'user' | 'assistant' | 'tool';\n\n/**\n * Chat message\n */\nexport interface AgentMessage {\n /**\n * Message role\n * - developer: System instructions/prompts (equivalent to \"system\" role)\n * - user: User input messages\n * - assistant: AI assistant responses (for multi-turn conversations)\n * - tool: Tool execution results\n */\n role: AgentMessageRole;\n\n /**\n * Message content array\n */\n content: AgentMessageContent[];\n\n /**\n * Tool call ID (for role: 'tool')\n */\n tool_call_id?: string;\n\n /**\n * Tool name (for role: 'tool')\n */\n name?: string;\n}\n\n/**\n * Tool function definition\n */\nexport interface AgentToolFunction {\n /**\n * Tool name\n * Available tools:\n *\n * Image Generation:\n * - seagen_text2image_flux1d_artifact_tool\n * - seagen_text2image_seedream40_artifact_tool\n * - seagen_text2image_google_gemini3_pro_image_artifact_tool\n * - seagen_blackforestlabs_flux_2_pro_tool\n * - mm_volces_seedream_4_5_gateway_tool\n *\n * Image Editing:\n * - seagen_edit_image_google_artifact_tool\n * - seagen_blackforestlabs_flux_2_pro_edit_tool\n *\n * Video Generation:\n * - seagen_image2video_wanx26_artifact_tool\n * - seagen_text2video_wanx26_artifact_tool\n * - seagen_image2video_seedance_pro_fast_artifact_tool\n * - mm_text2video_kling_v2_6_gateway_tool\n * - mm_image2video_kling_v2_6_i2v_gateway_tool\n *\n * Music & Audio:\n * - seagen_text2song_mureka_artifact_tool\n * - seagen_text2lyrics_mureka_artifact_tool\n */\n name: string;\n\n /**\n * Tool description (optional)\n */\n description?: string;\n\n /**\n * Tool parameters schema (optional)\n */\n parameters?: Record<string, any>;\n}\n\n/**\n * Tool definition\n */\nexport interface AgentTool {\n /**\n * Tool type (always 'function')\n */\n type: 'function';\n\n /**\n * Function definition\n */\n function: AgentToolFunction;\n}\n\n/**\n * Agent chat completions request parameters\n */\nexport interface AgentChatCompletionsParams {\n /**\n * Agent ID to invoke\n * @example \"seaverse_agent\"\n * @example \"seagen_agent\"\n */\n agent_id: string;\n\n /**\n * Array of messages comprising the conversation\n */\n messages: AgentMessage[];\n\n /**\n * AI model to use for processing\n * @default \"custom_openai/vertex-ai-claude-sonnet-4.5\"\n * @example \"gpt-4o\"\n * @example \"custom_openai/vertex-ai-claude-sonnet-4.5\"\n */\n model?: string;\n\n /**\n * Sampling temperature for creativity control (0.0 to 1.0)\n * - 0.0: Focused and deterministic responses\n * - 1.0: Maximum creativity and randomness\n * @default 0.7\n */\n temperature?: number;\n\n /**\n * Session ID for multi-turn conversations\n * Used to maintain conversation context across multiple requests\n * Server returns this in the response; use it in subsequent calls\n */\n session_id?: string;\n\n /**\n * User identifier for tracking and analytics\n * Optional field for associating requests with specific users\n */\n user_id?: string;\n\n /**\n * Message ID for SSE reconnection (ADVANCED USE)\n * Only use this when resuming an interrupted streaming response\n * Server returns msg_id in response; use it to resume from disconnect point\n *\n * WARNING: Do NOT use for multi-turn conversations\n * For multi-turn, just include full message history + session_id\n *\n * @example\n * // DON'T do this for multi-turn:\n * // { msg_id: previousResponse.msg_id, seq: 1 }\n *\n * // DO this instead:\n * // { session_id: previousResponse.session_id, messages: [...allMessages] }\n */\n msg_id?: string;\n\n /**\n * Sequence number for message resumption (ADVANCED USE)\n * Only used with msg_id for resuming interrupted streams\n * For regular multi-turn conversations, omit this parameter\n *\n * @default 0\n */\n seq?: number;\n\n /**\n * Business-specific parameters\n * Key-value pairs passed through to backend systems\n * Common use cases:\n * - aspect_ratio: Video/image aspect ratio (e.g., \"16:9\", \"1:1\")\n * - duration: Video duration in seconds\n * - resolution: Output resolution (e.g., \"1080p\", \"4k\")\n */\n metadata?: Record<string, any>;\n\n /**\n * List of tools available to the agent\n * Agent automatically selects appropriate tool based on user request\n */\n tools?: AgentTool[];\n\n /**\n * Enable Server-Sent Events streaming\n * @default true\n */\n stream?: boolean;\n\n /**\n * Maximum number of tokens to generate\n */\n max_tokens?: number;\n\n /**\n * Number of completions to generate\n * @default 1\n */\n n?: number;\n\n /**\n * Sequences where the API will stop generating\n */\n stop?: string | string[];\n\n /**\n * Penalty for new tokens based on presence (-2.0 to 2.0)\n * @default 0\n */\n presence_penalty?: number;\n\n /**\n * Penalty for new tokens based on frequency (-2.0 to 2.0)\n * @default 0\n */\n frequency_penalty?: number;\n\n /**\n * Top-p nucleus sampling (0-1)\n */\n top_p?: number;\n}\n\n/**\n * Artifact (generated file)\n */\nexport interface AgentArtifact {\n /**\n * File name\n * @example \"image.webp\"\n */\n name: string;\n\n /**\n * CDN URL to access the file\n */\n url: string;\n\n /**\n * Content type\n * @example \"image\" | \"video\" | \"audio\"\n */\n type: string;\n\n /**\n * MIME type\n * @example \"image/webp\" | \"video/mp4\" | \"audio/mp3\"\n */\n mime_type?: string;\n\n /**\n * File size in bytes\n */\n size?: number;\n\n /**\n * Duration in seconds (for video/audio)\n */\n duration?: number;\n\n /**\n * Width in pixels (for image/video)\n */\n width?: number;\n\n /**\n * Height in pixels (for image/video)\n */\n height?: number;\n\n /**\n * Additional metadata\n */\n [key: string]: any;\n}\n\n/**\n * Tool call information\n */\nexport interface AgentToolCall {\n /**\n * Tool call ID\n */\n id: string;\n\n /**\n * Tool type\n */\n type: 'function';\n\n /**\n * Function call details\n */\n function: {\n /**\n * Function name\n */\n name: string;\n\n /**\n * Function arguments (JSON string)\n */\n arguments: string;\n };\n\n /**\n * Tool call index\n */\n index?: number;\n}\n\n/**\n * Delta content for streaming chunks\n */\nexport interface AgentChatCompletionDelta {\n /**\n * Message role (only in first chunk)\n */\n role?: AgentMessageRole;\n\n /**\n * Content delta (string or array)\n */\n content?: string | AgentMessageContent[];\n\n /**\n * Artifacts (generated files)\n */\n artifacts?: AgentArtifact[];\n\n /**\n * Tool calls\n */\n tool_calls?: AgentToolCall[];\n\n /**\n * Refusal message\n */\n refusal?: string | null;\n\n /**\n * Function call (deprecated, use tool_calls)\n */\n function_call?: any;\n\n /**\n * Audio content\n */\n audio?: any;\n\n /**\n * Additional provider-specific fields\n */\n provider_specific_fields?: any;\n}\n\n/**\n * Content filter result\n */\nexport interface AgentContentFilterResult {\n /**\n * Error information\n */\n error?: {\n /**\n * Error code\n */\n code: string;\n\n /**\n * Error message\n */\n message: string;\n };\n\n /**\n * Additional filter results\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion choice (streaming)\n */\nexport interface AgentChatCompletionChunkChoice {\n /**\n * Choice index\n */\n index: number;\n\n /**\n * Delta message for this chunk\n */\n delta: AgentChatCompletionDelta;\n\n /**\n * Finish reason (only in last chunk)\n * - \"stop\": Natural completion\n * - \"length\": Maximum token limit reached\n * - \"tool_calls\": Tool execution triggered\n * - \"content_filter\": Content filtered\n */\n finish_reason: string | null;\n\n /**\n * Log probabilities\n */\n logprobs?: any;\n\n /**\n * Content filter results\n */\n content_filter_results?: AgentContentFilterResult;\n\n /**\n * Content filter result (singular)\n */\n content_filter_result?: AgentContentFilterResult;\n}\n\n/**\n * Usage statistics\n */\nexport interface AgentUsage {\n /**\n * Prompt tokens\n */\n prompt_tokens: number;\n\n /**\n * Completion tokens\n */\n completion_tokens: number;\n\n /**\n * Total tokens\n */\n total_tokens: number;\n\n /**\n * Completion tokens details\n */\n completion_tokens_details?: {\n /**\n * Accepted prediction tokens\n */\n accepted_prediction_tokens?: number;\n\n /**\n * Audio tokens\n */\n audio_tokens?: number;\n\n /**\n * Reasoning tokens\n */\n reasoning_tokens?: number;\n\n /**\n * Rejected prediction tokens\n */\n rejected_prediction_tokens?: number;\n };\n\n /**\n * Prompt tokens details\n */\n prompt_tokens_details?: {\n /**\n * Audio tokens\n */\n audio_tokens?: number;\n\n /**\n * Cached tokens\n */\n cached_tokens?: number;\n\n /**\n * Text tokens\n */\n text_tokens?: number | null;\n\n /**\n * Image tokens\n */\n image_tokens?: number | null;\n };\n\n /**\n * Cost in USD\n */\n cost?: number;\n}\n\n/**\n * Chat completion streaming chunk\n */\nexport interface AgentChatCompletionChunk {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * Object type (always \"chat.completion.chunk\")\n */\n object: string;\n\n /**\n * Unix timestamp\n */\n created: number;\n\n /**\n * Model used\n */\n model: string;\n\n /**\n * System fingerprint\n */\n system_fingerprint?: string;\n\n /**\n * List of choices\n */\n choices: AgentChatCompletionChunkChoice[];\n\n /**\n * Usage statistics (usually in last chunk)\n */\n usage?: AgentUsage;\n\n /**\n * Session ID for conversation continuation\n */\n session_id?: string;\n\n /**\n * Message ID\n */\n msg_id?: string;\n\n /**\n * Provider-specific fields\n */\n provider_specific_fields?: any;\n\n /**\n * Citations\n */\n citations?: any;\n\n /**\n * Service tier\n */\n service_tier?: string | null;\n\n /**\n * Obfuscation\n */\n obfuscation?: string;\n}\n\n/**\n * Message in non-streaming response\n */\nexport interface AgentResponseMessage {\n /**\n * Message role\n */\n role: AgentMessageRole;\n\n /**\n * Message content\n */\n content: string;\n\n /**\n * Tool calls\n */\n tool_calls?: AgentToolCall[];\n\n /**\n * Refusal message\n */\n refusal?: string | null;\n\n /**\n * Function call (deprecated)\n */\n function_call?: any;\n\n /**\n * Audio content\n */\n audio?: any;\n}\n\n/**\n * Chat completion choice (non-streaming)\n */\nexport interface AgentChatCompletionChoice {\n /**\n * Choice index\n */\n index: number;\n\n /**\n * Message\n */\n message: AgentResponseMessage;\n\n /**\n * Finish reason\n */\n finish_reason: string | null;\n\n /**\n * Log probabilities\n */\n logprobs?: any;\n\n /**\n * Content filter results\n */\n content_filter_results?: AgentContentFilterResult;\n}\n\n/**\n * Chat completion response (non-streaming)\n */\nexport interface AgentChatCompletionResponse {\n /**\n * Unique identifier\n */\n id: string;\n\n /**\n * Object type (always \"chat.completion\")\n */\n object: string;\n\n /**\n * Unix timestamp\n */\n created: number;\n\n /**\n * Model used\n */\n model: string;\n\n /**\n * System fingerprint\n */\n system_fingerprint?: string;\n\n /**\n * List of choices\n */\n choices: AgentChatCompletionChoice[];\n\n /**\n * Usage statistics\n */\n usage?: AgentUsage;\n\n /**\n * Session ID\n */\n session_id?: string;\n\n /**\n * Message ID\n */\n msg_id?: string;\n\n /**\n * Provider-specific fields\n */\n provider_specific_fields?: any;\n\n /**\n * Service tier\n */\n service_tier?: string | null;\n\n /**\n * Generated artifacts\n */\n artifacts?: AgentArtifact[];\n}\n\n/**\n * Call SeaCloud Fast Agent API (non-streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, agentChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({\n * apiKey: 'your-api-key',\n * baseUrl: 'http://proxy.sg.seaverse.dev',\n * });\n *\n * // Simple chat\n * const response = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Hello!' }]\n * }\n * ],\n * model: 'gpt-4o',\n * });\n *\n * console.log(response.choices[0].message.content);\n * ```\n *\n * @example\n * ```typescript\n * // Generate an image\n * const response = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Generate a cute puppy photo' }]\n * }\n * ],\n * model: 'gpt-4o',\n * tools: [\n * {\n * type: 'function',\n * function: { name: 'seagen_text2image_flux1d_artifact_tool' }\n * }\n * ]\n * });\n *\n * // Access generated image\n * const imageUrl = response.artifacts?.[0]?.url;\n * ```\n *\n * @param params Request parameters\n * @returns Chat completion response\n */\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams & { stream?: false }\n): Promise<AgentChatCompletionResponse>;\n\n/**\n * Call SeaCloud Fast Agent API (streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, agentChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({\n * apiKey: 'your-api-key',\n * baseUrl: 'http://proxy.sg.seaverse.dev',\n * });\n *\n * const stream = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Tell me a story' }]\n * }\n * ],\n * model: 'gpt-4o',\n * stream: true,\n * });\n *\n * for await (const chunk of stream) {\n * const content = chunk.choices[0]?.delta?.content;\n * if (typeof content === 'string') {\n * process.stdout.write(content);\n * }\n *\n * // Handle artifacts\n * const artifacts = chunk.choices[0]?.delta?.artifacts;\n * if (artifacts) {\n * console.log('Generated artifacts:', artifacts);\n * }\n * }\n * ```\n *\n * @param params Request parameters with stream: true\n * @returns Async iterator of chat completion chunks\n */\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams & { stream: true }\n): Promise<AsyncIterable<AgentChatCompletionChunk>>;\n\n// Implementation\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams\n): Promise<AgentChatCompletionResponse | AsyncIterable<AgentChatCompletionChunk>> {\n const client = getClient();\n const config = client.getConfig();\n\n const url = `${config.baseUrl}/agent/api/v1/chat/completions`;\n\n // ============================================================================\n // PARAMETER PROCESSING LOGIC\n // ============================================================================\n\n /**\n * Default Model:\n * - If not specified, uses 'custom_openai/vertex-ai-claude-sonnet-4.5'\n * - This is Claude Sonnet 4.5, optimized for agent operations\n * - You can override with any supported model (gpt-4o, claude-4, etc.)\n */\n const model = params.model || 'custom_openai/vertex-ai-claude-sonnet-4.5';\n\n /**\n * Stream Mode:\n * - Agent API ALWAYS returns Server-Sent Events (SSE) format\n * - Even if params.stream=false, API returns SSE chunks\n * - We force stream: true in API request for consistency\n * - For non-streaming (params.stream=false), we collect all chunks internally\n */\n const userWantsStreaming = params.stream !== false; // Default to true\n const requestBody = {\n ...params,\n model,\n stream: true, // Always request SSE from API\n };\n\n // ============================================================================\n // TIMEOUT & REQUEST HANDLING\n // ============================================================================\n\n /**\n * Timeout Configuration:\n * - Default: 30 seconds (set in config)\n * - For agent operations (especially with tool calls like image generation):\n * Recommend setting timeout to 120000ms (2 minutes) or more\n * - Initialize SDK with custom timeout:\n * initSeacloud({ apiKey: '...', timeout: 120000 })\n */\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), config.timeout);\n\n try {\n /**\n * Request Headers:\n * - Content-Type: application/json (standard)\n * - Authorization: Bearer token for authentication\n * - X-Project: SeaArt (REQUIRED for agent API access)\n */\n const response = await config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.apiKey}`,\n 'X-Project': 'SeaArt', // Required header for agent API\n },\n body: JSON.stringify(requestBody),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n // ============================================================================\n // RESPONSE HANDLING\n // ============================================================================\n\n /**\n * Response Routing:\n * - If user wants streaming (params.stream !== false):\n * Return async iterator that yields chunks as they arrive\n *\n * - If user wants non-streaming (params.stream === false):\n * Internally collect all SSE chunks and return complete response\n *\n * Note: Both paths read SSE format from API, just different consumption patterns\n */\n if (userWantsStreaming) {\n return parseAgentStreamingResponse(response);\n }\n\n // Non-streaming: Collect all chunks and return complete response\n return await parseAgentNonStreamingResponse(response);\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n}\n\n/**\n * Parse non-streaming response (API returns SSE even with stream: false)\n */\nasync function parseAgentNonStreamingResponse(\n response: Response\n): Promise<AgentChatCompletionResponse> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let fullContent = '';\n let artifacts: AgentArtifact[] = [];\n let finishReason = '';\n let sessionId = '';\n let msgId = '';\n let lastChunk: AgentChatCompletionChunk | null = null;\n let usage: AgentUsage | undefined;\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n if (!trimmedLine) continue;\n if (trimmedLine === 'data: [DONE]') continue;\n if (trimmedLine === 'event: heartbeat') continue;\n\n if (trimmedLine.startsWith('data: ')) {\n const data = trimmedLine.slice(6).trim();\n\n try {\n const parsed = JSON.parse(data) as AgentChatCompletionChunk;\n lastChunk = parsed;\n\n const delta = parsed.choices?.[0]?.delta;\n\n if (delta?.content) {\n if (typeof delta.content === 'string') {\n fullContent += delta.content;\n } else if (Array.isArray(delta.content)) {\n for (const item of delta.content) {\n if (item.type === 'text' && item.text) {\n fullContent += item.text;\n }\n }\n }\n }\n\n if (delta?.artifacts) {\n artifacts.push(...delta.artifacts);\n }\n\n if (parsed.choices?.[0]?.finish_reason) {\n finishReason = parsed.choices[0].finish_reason;\n }\n\n if (parsed.session_id) {\n sessionId = parsed.session_id;\n }\n\n if (parsed.msg_id) {\n msgId = parsed.msg_id;\n }\n\n if (parsed.usage) {\n usage = parsed.usage;\n }\n } catch (e) {\n // Skip invalid JSON\n console.warn('Failed to parse SSE chunk:', data.substring(0, 100));\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n\n if (!lastChunk) {\n throw new SeacloudError('No valid response chunks received');\n }\n\n return {\n id: lastChunk.id,\n object: 'chat.completion',\n created: lastChunk.created,\n model: lastChunk.model,\n system_fingerprint: lastChunk.system_fingerprint,\n choices: [\n {\n index: 0,\n message: {\n role: 'assistant',\n content: fullContent,\n },\n finish_reason: finishReason || null,\n },\n ],\n usage,\n session_id: sessionId || undefined,\n msg_id: msgId || undefined,\n artifacts: artifacts.length > 0 ? artifacts : undefined,\n };\n}\n\n/**\n * Parse streaming response into async iterable\n */\nasync function* parseAgentStreamingResponse(\n response: Response\n): AsyncIterable<AgentChatCompletionChunk> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n // Skip empty lines and comments\n if (!trimmedLine || trimmedLine.startsWith(':')) continue;\n\n // Handle event type declarations (e.g., \"event: heartbeat\")\n if (trimmedLine.startsWith('event: ')) {\n const eventType = trimmedLine.slice(7).trim();\n if (eventType === 'heartbeat') {\n // Heartbeat events can be logged but don't yield\n continue;\n }\n }\n\n // Handle data events\n if (trimmedLine.startsWith('data: ')) {\n const data = trimmedLine.slice(6).trim();\n\n // Check for stream completion signal\n if (data === '[DONE]') {\n break;\n }\n\n try {\n const chunk = JSON.parse(data) as AgentChatCompletionChunk;\n yield chunk;\n } catch (error) {\n console.warn('Failed to parse SSE chunk:', data);\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n\n/**\n * Helper function to create a text message\n *\n * @param role - Message role ('user' | 'assistant' | 'developer' | 'tool')\n * @param text - Text content\n * @returns Agent message with text content\n *\n * @example\n * ```typescript\n * const message = createTextMessage('user', 'Hello!');\n * ```\n */\nexport function createTextMessage(role: AgentMessageRole, text: string): AgentMessage {\n return {\n role,\n content: [{ type: 'text', text }],\n };\n}\n\n/**\n * Helper function to create a message with image\n *\n * @param role - Message role ('user' | 'assistant' | 'developer' | 'tool')\n * @param text - Text content\n * @param imageUrl - Image URL\n * @returns Agent message with text and image content\n *\n * @example\n * ```typescript\n * const message = createImageMessage('user', 'What is this?', 'https://example.com/image.jpg');\n * ```\n */\nexport function createImageMessage(role: AgentMessageRole, text: string, imageUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'image', image_url: imageUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a message with video\n *\n * @param role - Message role\n * @param text - Text content\n * @param videoUrl - Video URL\n * @returns Agent message with text and video content\n */\nexport function createVideoMessage(role: AgentMessageRole, text: string, videoUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'video', video_url: videoUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a message with audio\n *\n * @param role - Message role\n * @param text - Text content\n * @param audioUrl - Audio URL\n * @returns Agent message with text and audio content\n */\nexport function createAudioMessage(role: AgentMessageRole, text: string, audioUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'audio', audio_url: audioUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a tool\n */\nexport function createTool(toolName: string): AgentTool {\n return {\n type: 'function',\n function: {\n name: toolName,\n },\n };\n}\n","#!/usr/bin/env node\n\nimport { SeacloudClient } from './core/index.js';\nimport { initSeacloud } from './core/global-config.js';\nimport { llmChatCompletions } from './api/llm_chat_completions.js';\nimport { agentChatCompletions, createTextMessage } from './api/agent_chat_completions.js';\nimport { readFileSync } from 'fs';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\ninterface CliOptions {\n apiKey?: string;\n baseUrl?: string;\n model?: string;\n params?: Record<string, any>;\n}\n\nfunction showHelp() {\n console.log(`\nSeaCloud CLI - Test AI models, LLM, and Agent from command line\n\nUsage:\n seacloud <command> [options]\n\nCommands:\n llm <prompt> Chat with LLM models\n agent <prompt> Chat with Fast Agent (supports image/video generation)\n <model> Test specific model generation\n\nLLM Options:\n --model <name> Model name (default: seaart-mix-sonnet-4-5)\n --stream Enable streaming mode\n --temperature <n> Temperature 0-2 (default: 0.7)\n --max-tokens <n> Max tokens to generate\n\nAgent Options:\n --agent-id <id> Agent ID (default: seagen_agent)\n --model <name> Model name (default: gpt-4o)\n --stream Enable streaming mode\n --session-id <id> Session ID for multi-turn conversation\n\nModel Generation Options:\n --api-key <key> API key (or set API_SERVICE_TOKEN env var)\n --base-url <url> Base URL (default: http://proxy.sg.seaverse.dev)\n --params <json> JSON parameters for the model\n\nExamples:\n # Chat with LLM (non-streaming)\n seacloud llm \"What is the capital of France?\"\n\n # Chat with LLM (streaming)\n seacloud llm \"Tell me a story\" --stream\n\n # Chat with LLM using specific model\n seacloud llm \"Hello\" --model deepseek-v3.1 --temperature 1.0\n\n # Chat with Agent (can generate images)\n seacloud agent \"Generate an image of a sunset\"\n\n # Chat with Agent (streaming)\n seacloud agent \"Create a cat image\" --stream\n\n # Test model generation\n seacloud flux_1_1_pro --params '{\"prompt\":\"a beautiful sunset\"}'\n\n # Use custom API key\n seacloud llm \"Hello\" --api-key sa-xxx\n\nEnvironment Variables:\n API_SERVICE_TOKEN API authentication token (required)\n SEACLOUD_BASE_URL Base URL for API endpoints\n`);\n}\n\nfunction parseArgs(args: string[]): CliOptions {\n const options: CliOptions = {};\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n\n switch (arg) {\n case '--api-key':\n options.apiKey = args[++i];\n break;\n case '--base-url':\n options.baseUrl = args[++i];\n break;\n case '--model':\n options.model = args[++i];\n break;\n case '--params':\n try {\n options.params = JSON.parse(args[++i]);\n } catch (e) {\n console.error('Error: Invalid JSON for --params');\n process.exit(1);\n }\n break;\n }\n }\n\n return options;\n}\n\nasync function testModel(model: string, options: CliOptions) {\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;\n const baseUrl = options.baseUrl || process.env.API_BASE_URL || 'http://localhost:8080';\n\n if (!apiKey) {\n console.error('Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var');\n process.exit(1);\n }\n\n if (!options.params) {\n console.error('Error: --params required. Provide JSON parameters for the model');\n process.exit(1);\n }\n\n console.log(`Testing model: ${model}`);\n console.log(`Base URL: ${baseUrl}`);\n console.log(`Parameters:`, JSON.stringify(options.params, null, 2));\n console.log('');\n\n const client = new SeacloudClient({ apiKey, baseUrl });\n\n try {\n console.log('Creating task...');\n const task = await client.createTask('/model/v1/generation', {\n model,\n input: [{ params: options.params }],\n });\n\n console.log(`Task created: ${task.id}`);\n console.log(`Initial status: ${task.status}`);\n console.log('');\n\n if (task.status === 'failed') {\n console.error('Task failed immediately:', task.error);\n process.exit(1);\n }\n\n console.log('Polling for results...');\n let attempt = 0;\n const maxAttempts = 120;\n const intervalMs = 3000;\n\n while (attempt < maxAttempts) {\n attempt++;\n\n const result = await client.getTaskStatus('/model/v1/generation', task.id);\n process.stdout.write(`\\rAttempt ${attempt}/${maxAttempts} - Status: ${result.status} `);\n\n if (result.status === 'completed') {\n console.log('\\n\\nTask completed!');\n console.log('\\nResults:');\n console.log(JSON.stringify(result.output, null, 2));\n\n // Extract URLs if available\n if (result.output) {\n const urls: string[] = [];\n for (const item of result.output) {\n if (item.content) {\n for (const resource of item.content) {\n if (resource.url) {\n urls.push(resource.url);\n }\n }\n }\n }\n\n if (urls.length > 0) {\n console.log('\\nGenerated URLs:');\n urls.forEach((url, i) => {\n console.log(` ${i + 1}. ${url}`);\n });\n }\n }\n\n process.exit(0);\n }\n\n if (result.status === 'failed') {\n console.log('\\n\\nTask failed!');\n console.error('Error:', result.error);\n process.exit(1);\n }\n\n await new Promise(resolve => setTimeout(resolve, intervalMs));\n }\n\n console.log('\\n\\nTimeout: Task did not complete within the time limit');\n process.exit(1);\n\n } catch (error: any) {\n console.error('\\nError:', error.message);\n process.exit(1);\n }\n}\n\nasync function runLlm(prompt: string, args: string[]) {\n const options: any = { stream: false, model: 'seaart-mix-sonnet-4-5', temperature: 0.7 };\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n if (arg === '--model') options.model = args[++i];\n else if (arg === '--stream') options.stream = true;\n else if (arg === '--temperature') options.temperature = parseFloat(args[++i]);\n else if (arg === '--max-tokens') options.maxTokens = parseInt(args[++i]);\n else if (arg === '--api-key') options.apiKey = args[++i];\n else if (arg === '--base-url') options.baseUrl = args[++i];\n }\n\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;\n const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || 'http://proxy.sg.seaverse.dev';\n\n if (!apiKey) {\n console.error('Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var');\n process.exit(1);\n }\n\n initSeacloud(apiKey, { baseUrl, timeout: 120000 });\n\n console.log(`Model: ${options.model}`);\n console.log(`Prompt: ${prompt}\\n`);\n\n if (options.stream) {\n const stream = await llmChatCompletions({\n model: options.model,\n messages: [{ role: 'user', content: prompt }],\n stream: true,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n });\n\n process.stdout.write('Response: ');\n for await (const chunk of stream) {\n const content = chunk.choices[0]?.delta?.content;\n if (typeof content === 'string') process.stdout.write(content);\n }\n console.log('\\n');\n } else {\n const response = await llmChatCompletions({\n model: options.model,\n messages: [{ role: 'user', content: prompt }],\n stream: false,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n });\n\n console.log('Response:', response.choices[0].message.content);\n console.log('\\nUsage:', response.usage);\n }\n}\n\nasync function runAgent(prompt: string, args: string[]) {\n const options: any = {\n stream: false,\n model: 'gpt-4o',\n agentId: 'seagen_agent',\n };\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n if (arg === '--model') options.model = args[++i];\n else if (arg === '--stream') options.stream = true;\n else if (arg === '--agent-id') options.agentId = args[++i];\n else if (arg === '--session-id') options.sessionId = args[++i];\n else if (arg === '--api-key') options.apiKey = args[++i];\n else if (arg === '--base-url') options.baseUrl = args[++i];\n }\n\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN;\n const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || 'http://proxy.sg.seaverse.dev';\n\n if (!apiKey) {\n console.error('Error: API key not provided. Use --api-key or set API_SERVICE_TOKEN env var');\n process.exit(1);\n }\n\n initSeacloud(apiKey, { baseUrl, timeout: 300000 }); // 5 min for agent\n\n console.log(`Agent: ${options.agentId}`);\n console.log(`Model: ${options.model}`);\n console.log(`Prompt: ${prompt}\\n`);\n\n if (options.stream) {\n const stream = await agentChatCompletions({\n agent_id: options.agentId,\n messages: [createTextMessage('user', prompt)],\n model: options.model,\n stream: true,\n session_id: options.sessionId,\n seq: 0,\n });\n\n process.stdout.write('Response: ');\n for await (const chunk of stream) {\n const content = chunk.choices[0]?.delta?.content;\n if (typeof content === 'string') process.stdout.write(content);\n }\n console.log('\\n');\n } else {\n const response = await agentChatCompletions({\n agent_id: options.agentId,\n messages: [createTextMessage('user', prompt)],\n model: options.model,\n stream: false,\n session_id: options.sessionId,\n seq: 0,\n });\n\n console.log('Response:', response.choices[0].message.content);\n\n if (response.artifacts && response.artifacts.length > 0) {\n console.log('\\nGenerated Artifacts:');\n response.artifacts.forEach((artifact, i) => {\n console.log(` ${i + 1}. ${artifact.name}`);\n console.log(` URL: ${artifact.url}`);\n });\n }\n\n console.log('\\nSession ID:', response.session_id);\n console.log('Message ID:', response.msg_id);\n }\n}\n\nasync function main() {\n const args = process.argv.slice(2);\n\n if (args.length === 0 || args[0] === '--help' || args[0] === '-h') {\n showHelp();\n process.exit(0);\n }\n\n const command = args[0];\n\n try {\n if (command === 'llm') {\n if (args.length < 2) {\n console.error('Error: prompt required for llm command');\n console.log('Usage: seacloud llm \"<prompt>\" [options]');\n process.exit(1);\n }\n await runLlm(args[1], args.slice(2));\n } else if (command === 'agent') {\n if (args.length < 2) {\n console.error('Error: prompt required for agent command');\n console.log('Usage: seacloud agent \"<prompt>\" [options]');\n process.exit(1);\n }\n await runAgent(args[1], args.slice(2));\n } else {\n // Model generation command\n const model = command;\n\n if (model.startsWith('--')) {\n console.error('Error: command or model name required');\n console.log('Usage: seacloud <command> [options]');\n process.exit(1);\n }\n\n const options = parseArgs(args.slice(1));\n await testModel(model, options);\n }\n } catch (error: any) {\n console.error('\\nError:', error.message);\n process.exit(1);\n }\n}\n\nmain().catch(console.error);\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/core/types.ts","../src/core/config.ts","../src/core/client.ts","../src/core/global-config.ts","../src/api/llm_chat_completions.ts","../src/api/agent_chat_completions.ts","../src/cli.ts"],"names":["__filename"],"mappings":";;;;;AA4EO,IAAM,aAAA,GAAN,cAA4B,KAAA,CAAM;AAAA,EACvC,WAAA,CACE,OAAA,EACO,UAAA,EACA,QAAA,EACP;AACA,IAAA,KAAA,CAAM,OAAO,CAAA;AAHN,IAAA,IAAA,CAAA,UAAA,GAAA,UAAA;AACA,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AAGP,IAAA,IAAA,CAAK,IAAA,GAAO,eAAA;AAAA,EACd;AACF,CAAA;;;AC9EO,SAAS,YAAA,CAAa,OAAA,GAA0B,EAAC,EAA6B;AAEnF,EAAA,MAAM,MAAA,GAAS,QAAQ,MAAA,KACP,OAAO,YAAY,WAAA,GAAc,OAAA,CAAQ,GAAA,EAAK,iBAAA,GAAoB,MAAA,CAAA,IACnE,EAAA;AAGf,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,KACP,OAAO,YAAY,WAAA,GAAc,OAAA,CAAQ,GAAA,EAAK,iBAAA,GAAoB,MAAA,CAAA,IACnE,8BAAA;AAGhB,EAAA,MAAM,SAAA,GAAY,OAAA,CAAQ,KAAA,IAAS,UAAA,CAAW,KAAA;AAE9C,EAAA,IAAI,CAAC,SAAA,EAAW;AACd,IAAA,MAAM,IAAI,MAAM,mGAAmG,CAAA;AAAA,EACrH;AAEA,EAAA,OAAO;AAAA,IACL,MAAA;AAAA,IACA,OAAA;AAAA,IACA,KAAA,EAAO,SAAA;AAAA,IACP,OAAA,EAAS,QAAQ,OAAA,IAAW;AAAA,GAC9B;AACF;AAOO,SAAS,eAAe,MAAA,EAAwC;AACrE,EAAA,IAAI,CAAC,OAAO,MAAA,EAAQ;AAClB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAEA,EAAA,IAAI,CAAC,OAAO,OAAA,EAAS;AACnB,IAAA,MAAM,IAAI,MAAM,qBAAqB,CAAA;AAAA,EACvC;AAEA,EAAA,IAAI,OAAO,MAAA,CAAO,KAAA,KAAU,UAAA,EAAY;AACtC,IAAA,MAAM,IAAI,MAAM,0BAA0B,CAAA;AAAA,EAC5C;AACF;;;AC5CO,IAAM,iBAAN,MAAqB;AAAA,EAG1B,WAAA,CAAY,MAAA,GAAyB,EAAC,EAAG;AACvC,IAAA,IAAA,CAAK,MAAA,GAAS,aAAa,MAAM,CAAA;AACjC,IAAA,cAAA,CAAe,KAAK,MAAM,CAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,UAAA,CAAW,QAAA,EAAkB,IAAA,EAAgD;AACjF,IAAA,MAAM,MAAM,CAAA,EAAG,IAAA,CAAK,MAAA,CAAO,OAAO,GAAG,QAAQ,CAAA,CAAA;AAE7C,IAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,IAAA,MAAM,SAAA,GAAY,WAAW,MAAM,UAAA,CAAW,OAAM,EAAG,IAAA,CAAK,OAAO,OAAO,CAAA;AAE1E,IAAA,IAAI;AACF,MAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,MAAA,CAAO,MAAM,GAAA,EAAK;AAAA,QAC5C,MAAA,EAAQ,MAAA;AAAA,QACR,OAAA,EAAS;AAAA,UACP,cAAA,EAAgB,kBAAA;AAAA,UAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AAAA,SAC/C;AAAA,QACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI,CAAA;AAAA,QACzB,QAAQ,UAAA,CAAW;AAAA,OACpB,CAAA;AAED,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,QAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,QAAA,MAAM,IAAI,aAAA;AAAA,UACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,UACrC,QAAA,CAAS,MAAA;AAAA,UACT;AAAA,SACF;AAAA,MACF;AAEA,MAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,MAAA,OAAO,MAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,QAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,MAC1E;AAEA,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,QAC3C,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAAA,CAAc,QAAA,EAAkB,MAAA,EAAqC;AACzE,IAAA,MAAM,GAAA,GAAM,GAAG,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAG,QAAQ,SAAS,MAAM,CAAA,CAAA;AAE5D,IAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,IAAA,MAAM,SAAA,GAAY,WAAW,MAAM,UAAA,CAAW,OAAM,EAAG,IAAA,CAAK,OAAO,OAAO,CAAA;AAE1E,IAAA,IAAI;AACF,MAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,MAAA,CAAO,MAAM,GAAA,EAAK;AAAA,QAC5C,MAAA,EAAQ,KAAA;AAAA,QACR,OAAA,EAAS;AAAA,UACP,eAAA,EAAiB,CAAA,OAAA,EAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA;AAAA,SAC/C;AAAA,QACA,QAAQ,UAAA,CAAW;AAAA,OACpB,CAAA;AAED,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,QAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,QAAA,MAAM,IAAI,aAAA;AAAA,UACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,UACrC,QAAA,CAAS,MAAA;AAAA,UACT;AAAA,SACF;AAAA,MACF;AAEA,MAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,MAAA,OAAO,MAAA;AAAA,IACT,SAAS,KAAA,EAAO;AACd,MAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,MAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,QAAA,MAAM,KAAA;AAAA,MACR;AAEA,MAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,QAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,IAAA,CAAK,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,MAC1E;AAEA,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,QAC3C,MAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAA,GAAgD;AAC9C,IAAA,OAAO,EAAE,GAAG,IAAA,CAAK,MAAA,EAAO;AAAA,EAC1B;AACF,CAAA;;;ACpHA,IAAM,YAAA,GAA6B;AAAA,EACjC,MAAA,EAAQ,IAAA;AAAA,EACR,qBAAA,EAAuB;AAAA,IACrB,UAAA,EAAY,GAAA;AAAA;AAAA,IACZ,WAAA,EAAa;AAAA;AAAA;AAEjB,CAAA;AAOO,SAAS,YAAA,CAAa,QAAgB,OAAA,EAK1C;AACD,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,MAAM,sDAAsD,CAAA;AAAA,EACxE;AAGA,EAAA,YAAA,CAAa,MAAA,GAAS,IAAI,cAAA,CAAO;AAAA,IAC/B,MAAA;AAAA,IACA,SAAS,OAAA,EAAS,OAAA;AAAA,IAClB,SAAS,OAAA,EAAS;AAAA,GACnB,CAAA;AAGD,EAAA,IAAI,OAAA,EAAS,eAAe,MAAA,EAAW;AACrC,IAAA,YAAA,CAAa,qBAAA,CAAsB,aAAa,OAAA,CAAQ,UAAA;AAAA,EAC1D;AACA,EAAA,IAAI,OAAA,EAAS,gBAAgB,MAAA,EAAW;AACtC,IAAA,YAAA,CAAa,qBAAA,CAAsB,cAAc,OAAA,CAAQ,WAAA;AAAA,EAC3D;AAEA,EAAA,OAAO,YAAA,CAAa,MAAA;AACtB;AAMO,SAAS,SAAA,GAA4B;AAC1C,EAAA,IAAI,CAAC,aAAa,MAAA,EAAQ;AACxB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACA,EAAA,OAAO,YAAA,CAAa,MAAA;AACtB;;;ACwRA,eAAsB,mBACpB,MAAA,EACsE;AACtE,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAM,MAAA,GAAS,OAAO,SAAA,EAAU;AAEhC,EAAA,MAAM,GAAA,GAAM,CAAA,EAAG,MAAA,CAAO,OAAO,CAAA,qBAAA,CAAA;AAE7B,EAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,EAAA,MAAM,YAAY,UAAA,CAAW,MAAM,WAAW,KAAA,EAAM,EAAG,OAAO,OAAO,CAAA;AAErE,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,MAAM,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK;AAAA,MACvC,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA;AAAA,OAC1C;AAAA,MACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,MAAM,CAAA;AAAA,MAC3B,QAAQ,UAAA,CAAW;AAAA,KACpB,CAAA;AAED,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,QACrC,QAAA,CAAS,MAAA;AAAA,QACT;AAAA,OACF;AAAA,IACF;AAGA,IAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,MAAA,OAAO,uBAAuB,QAAQ,CAAA;AAAA,IACxC;AAGA,IAAA,MAAM,MAAA,GAAS,MAAM,QAAA,CAAS,IAAA,EAAK;AACnC,IAAA,OAAO,MAAA;AAAA,EACT,SAAS,KAAA,EAAO;AACd,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,MAAA,MAAM,KAAA;AAAA,IACR;AAEA,IAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,MAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,IAAI,aAAA;AAAA,MACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,MAC3C,MAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACF;AAKA,gBAAgB,uBACd,QAAA,EACoC;AACpC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAE9B,QAAA,IAAI,CAAC,WAAA,EAAa;AAClB,QAAA,IAAI,gBAAgB,cAAA,EAAgB;AAEpC,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,OAAA,GAAU,WAAA,CAAY,KAAA,CAAM,CAAC,CAAA;AACnC,UAAA,IAAI;AACF,YAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAChC,YAAA,MAAM,KAAA;AAAA,UACR,SAAS,KAAA,EAAO;AACd,YAAA,OAAA,CAAQ,IAAA,CAAK,8BAA8B,OAAO,CAAA;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AACF;;;ACgcA,eAAsB,qBACpB,MAAA,EACgF;AAChF,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAM,MAAA,GAAS,OAAO,SAAA,EAAU;AAEhC,EAAA,MAAM,GAAA,GAAM,CAAA,EAAG,MAAA,CAAO,OAAO,CAAA,8BAAA,CAAA;AAY7B,EAAA,MAAM,KAAA,GAAQ,OAAO,KAAA,IAAS,2CAAA;AAS9B,EAAA,MAAM,kBAAA,GAAqB,OAAO,MAAA,KAAW,KAAA;AAC7C,EAAA,MAAM,WAAA,GAAc;AAAA,IAClB,GAAG,MAAA;AAAA,IACH,KAAA;AAAA,IACA,MAAA,EAAQ;AAAA;AAAA,GACV;AAcA,EAAA,MAAM,UAAA,GAAa,IAAI,eAAA,EAAgB;AACvC,EAAA,MAAM,YAAY,UAAA,CAAW,MAAM,WAAW,KAAA,EAAM,EAAG,OAAO,OAAO,CAAA;AAErE,EAAA,IAAI;AAOF,IAAA,MAAM,QAAA,GAAW,MAAM,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK;AAAA,MACvC,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,eAAA,EAAiB,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,QACxC,WAAA,EAAa;AAAA;AAAA,OACf;AAAA,MACA,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,WAAW,CAAA;AAAA,MAChC,QAAQ,UAAA,CAAW;AAAA,KACpB,CAAA;AAED,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAY,MAAM,QAAA,CAAS,IAAA,EAAK;AACtC,MAAA,MAAM,IAAI,aAAA;AAAA,QACR,CAAA,KAAA,EAAQ,QAAA,CAAS,MAAM,CAAA,EAAA,EAAK,SAAS,CAAA,CAAA;AAAA,QACrC,QAAA,CAAS,MAAA;AAAA,QACT;AAAA,OACF;AAAA,IACF;AAgBA,IAAA,IAAI,kBAAA,EAAoB;AACtB,MAAA,OAAO,4BAA4B,QAAQ,CAAA;AAAA,IAC7C;AAGA,IAAA,OAAO,MAAM,+BAA+B,QAAQ,CAAA;AAAA,EACtD,SAAS,KAAA,EAAO;AACd,IAAA,YAAA,CAAa,SAAS,CAAA;AAEtB,IAAA,IAAI,iBAAiB,aAAA,EAAe;AAClC,MAAA,MAAM,KAAA;AAAA,IACR;AAEA,IAAA,IAAK,KAAA,CAAgB,SAAS,YAAA,EAAc;AAC1C,MAAA,MAAM,IAAI,aAAA,CAAc,CAAA,sBAAA,EAAyB,MAAA,CAAO,OAAO,CAAA,EAAA,CAAI,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,IAAI,aAAA;AAAA,MACR,CAAA,gBAAA,EAAoB,MAAgB,OAAO,CAAA,CAAA;AAAA,MAC3C,MAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACF;AAKA,eAAe,+BACb,QAAA,EACsC;AACtC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AACb,EAAA,IAAI,WAAA,GAAc,EAAA;AAClB,EAAA,IAAI,YAA6B,EAAC;AAClC,EAAA,IAAI,YAAA,GAAe,EAAA;AACnB,EAAA,IAAI,SAAA,GAAY,EAAA;AAChB,EAAA,IAAI,KAAA,GAAQ,EAAA;AACZ,EAAA,IAAI,SAAA,GAA6C,IAAA;AACjD,EAAA,IAAI,KAAA;AAEJ,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAE9B,QAAA,IAAI,CAAC,WAAA,EAAa;AAClB,QAAA,IAAI,gBAAgB,cAAA,EAAgB;AACpC,QAAA,IAAI,gBAAgB,kBAAA,EAAoB;AAExC,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,IAAA,GAAO,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAEvC,UAAA,IAAI;AACF,YAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC9B,YAAA,SAAA,GAAY,MAAA;AAEZ,YAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,OAAA,GAAU,CAAC,CAAA,EAAG,KAAA;AAEnC,YAAA,IAAI,OAAO,OAAA,EAAS;AAClB,cAAA,IAAI,OAAO,KAAA,CAAM,OAAA,KAAY,QAAA,EAAU;AACrC,gBAAA,WAAA,IAAe,KAAA,CAAM,OAAA;AAAA,cACvB,CAAA,MAAA,IAAW,KAAA,CAAM,OAAA,CAAQ,KAAA,CAAM,OAAO,CAAA,EAAG;AACvC,gBAAA,KAAA,MAAW,IAAA,IAAQ,MAAM,OAAA,EAAS;AAChC,kBAAA,IAAI,IAAA,CAAK,IAAA,KAAS,MAAA,IAAU,IAAA,CAAK,IAAA,EAAM;AACrC,oBAAA,WAAA,IAAe,IAAA,CAAK,IAAA;AAAA,kBACtB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,YAAA,IAAI,OAAO,SAAA,EAAW;AACpB,cAAA,SAAA,CAAU,IAAA,CAAK,GAAG,KAAA,CAAM,SAAS,CAAA;AAAA,YACnC;AAEA,YAAA,IAAI,MAAA,CAAO,OAAA,GAAU,CAAC,CAAA,EAAG,aAAA,EAAe;AACtC,cAAA,YAAA,GAAe,MAAA,CAAO,OAAA,CAAQ,CAAC,CAAA,CAAE,aAAA;AAAA,YACnC;AAEA,YAAA,IAAI,OAAO,UAAA,EAAY;AACrB,cAAA,SAAA,GAAY,MAAA,CAAO,UAAA;AAAA,YACrB;AAEA,YAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,cAAA,KAAA,GAAQ,MAAA,CAAO,MAAA;AAAA,YACjB;AAEA,YAAA,IAAI,OAAO,KAAA,EAAO;AAChB,cAAA,KAAA,GAAQ,MAAA,CAAO,KAAA;AAAA,YACjB;AAAA,UACF,SAAS,CAAA,EAAG;AAEV,YAAA,OAAA,CAAQ,KAAK,4BAAA,EAA8B,IAAA,CAAK,SAAA,CAAU,CAAA,EAAG,GAAG,CAAC,CAAA;AAAA,UACnE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AAEA,EAAA,IAAI,CAAC,SAAA,EAAW;AACd,IAAA,MAAM,IAAI,cAAc,mCAAmC,CAAA;AAAA,EAC7D;AAEA,EAAA,OAAO;AAAA,IACL,IAAI,SAAA,CAAU,EAAA;AAAA,IACd,MAAA,EAAQ,iBAAA;AAAA,IACR,SAAS,SAAA,CAAU,OAAA;AAAA,IACnB,OAAO,SAAA,CAAU,KAAA;AAAA,IACjB,oBAAoB,SAAA,CAAU,kBAAA;AAAA,IAC9B,OAAA,EAAS;AAAA,MACP;AAAA,QACE,KAAA,EAAO,CAAA;AAAA,QACP,OAAA,EAAS;AAAA,UACP,IAAA,EAAM,WAAA;AAAA,UACN,OAAA,EAAS;AAAA,SACX;AAAA,QACA,eAAe,YAAA,IAAgB;AAAA;AACjC,KACF;AAAA,IACA,KAAA;AAAA,IACA,YAAY,SAAA,IAAa,MAAA;AAAA,IACzB,QAAQ,KAAA,IAAS,MAAA;AAAA,IACjB,SAAA,EAAW,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY;AAAA,GAChD;AACF;AAKA,gBAAgB,4BACd,QAAA,EACyC;AACzC,EAAA,MAAM,MAAA,GAAS,QAAA,CAAS,IAAA,EAAM,SAAA,EAAU;AACxC,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,cAAc,+BAA+B,CAAA;AAAA,EACzD;AAEA,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,EAAA,IAAI,MAAA,GAAS,EAAA;AAEb,EAAA,IAAI;AACF,IAAA,OAAO,IAAA,EAAM;AACX,MAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,MAAA,IAAI,IAAA,EAAM;AACR,QAAA;AAAA,MACF;AAEA,MAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,MAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAG/B,MAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,QAAA,MAAM,WAAA,GAAc,KAAK,IAAA,EAAK;AAG9B,QAAA,IAAI,CAAC,WAAA,IAAe,WAAA,CAAY,UAAA,CAAW,GAAG,CAAA,EAAG;AAGjD,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,SAAS,CAAA,EAAG;AACrC,UAAA,MAAM,SAAA,GAAY,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAC5C,UAAA,IAAI,cAAc,WAAA,EAAa;AAE7B,YAAA;AAAA,UACF;AAAA,QACF;AAGA,QAAA,IAAI,WAAA,CAAY,UAAA,CAAW,QAAQ,CAAA,EAAG;AACpC,UAAA,MAAM,IAAA,GAAO,WAAA,CAAY,KAAA,CAAM,CAAC,EAAE,IAAA,EAAK;AAGvC,UAAA,IAAI,SAAS,QAAA,EAAU;AACrB,YAAA;AAAA,UACF;AAEA,UAAA,IAAI;AACF,YAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC7B,YAAA,MAAM,KAAA;AAAA,UACR,SAAS,KAAA,EAAO;AACd,YAAA,OAAA,CAAQ,IAAA,CAAK,8BAA8B,IAAI,CAAA;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAA,SAAE;AACA,IAAA,MAAA,CAAO,WAAA,EAAY;AAAA,EACrB;AACF;AAcO,SAAS,iBAAA,CAAkB,MAAwB,IAAA,EAA4B;AACpF,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,SAAS,CAAC,EAAE,IAAA,EAAM,MAAA,EAAQ,MAAM;AAAA,GAClC;AACF;AC5rCA,IAAMA,YAAA,GAAa,aAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAC9B,QAAQA,YAAU;AASpC,SAAS,QAAA,GAAW;AAClB,EAAA,OAAA,CAAQ,GAAA,CAAI;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,CAqDb,CAAA;AACD;AAEA,SAAS,UAAU,IAAA,EAA4B;AAC7C,EAAA,MAAM,UAAsB,EAAC;AAE7B,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAElB,IAAA,QAAQ,GAAA;AAAK,MACX,KAAK,WAAA;AACH,QAAA,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AACzB,QAAA;AAAA,MACF,KAAK,YAAA;AACH,QAAA,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAC1B,QAAA;AAAA,MACF,KAAK,SAAA;AACH,QAAA,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AACxB,QAAA;AAAA,MACF,KAAK,UAAA;AACH,QAAA,IAAI;AACF,UAAA,OAAA,CAAQ,SAAS,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,QACvC,SAAS,CAAA,EAAG;AACV,UAAA,OAAA,CAAQ,MAAM,kCAAkC,CAAA;AAChD,UAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,QAChB;AACA,QAAA;AAAA;AACJ,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAEA,eAAe,SAAA,CAAU,OAAe,OAAA,EAAqB;AAC3D,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,IAAI,iBAAA,IAAqB,EAAA;AAClE,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,YAAA,IAAgB,uBAAA;AAE/D,EAAA,IAAI,CAAC,QAAQ,MAAA,EAAQ;AACnB,IAAA,OAAA,CAAQ,MAAM,iEAAiE,CAAA;AAC/E,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,eAAA,EAAkB,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,OAAO,CAAA,CAAE,CAAA;AAClC,EAAA,OAAA,CAAQ,GAAA,CAAI,eAAe,IAAA,CAAK,SAAA,CAAU,QAAQ,MAAA,EAAQ,IAAA,EAAM,CAAC,CAAC,CAAA;AAClE,EAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAEd,EAAA,MAAM,SAAS,IAAI,cAAA,CAAe,EAAE,MAAA,EAAQ,SAAS,CAAA;AAErD,EAAA,IAAI;AACF,IAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAC9B,IAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,UAAA,CAAW,sBAAA,EAAwB;AAAA,MAC3D,KAAA;AAAA,MACA,OAAO,CAAC,EAAE,MAAA,EAAQ,OAAA,CAAQ,QAAQ;AAAA,KACnC,CAAA;AAED,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,cAAA,EAAiB,IAAA,CAAK,EAAE,CAAA,CAAE,CAAA;AACtC,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,gBAAA,EAAmB,IAAA,CAAK,MAAM,CAAA,CAAE,CAAA;AAC5C,IAAA,OAAA,CAAQ,IAAI,EAAE,CAAA;AAEd,IAAA,IAAI,IAAA,CAAK,WAAW,QAAA,EAAU;AAC5B,MAAA,OAAA,CAAQ,KAAA,CAAM,0BAAA,EAA4B,IAAA,CAAK,KAAK,CAAA;AACpD,MAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,IAChB;AAEA,IAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,MAAM,WAAA,GAAc,GAAA;AACpB,IAAA,MAAM,UAAA,GAAa,GAAA;AAEnB,IAAA,OAAO,UAAU,WAAA,EAAa;AAC5B,MAAA,OAAA,EAAA;AAEA,MAAA,MAAM,SAAS,MAAM,MAAA,CAAO,aAAA,CAAc,sBAAA,EAAwB,KAAK,EAAE,CAAA;AACzE,MAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,CAAA,UAAA,EAAa,OAAO,IAAI,WAAW,CAAA,WAAA,EAAc,MAAA,CAAO,MAAM,CAAA,IAAA,CAAM,CAAA;AAEzF,MAAA,IAAI,MAAA,CAAO,WAAW,WAAA,EAAa;AACjC,QAAA,OAAA,CAAQ,IAAI,qBAAqB,CAAA;AACjC,QAAA,OAAA,CAAQ,IAAI,YAAY,CAAA;AACxB,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,SAAA,CAAU,OAAO,MAAA,EAAQ,IAAA,EAAM,CAAC,CAAC,CAAA;AAGlD,QAAA,IAAI,OAAO,MAAA,EAAQ;AACjB,UAAA,MAAM,OAAiB,EAAC;AACxB,UAAA,KAAA,MAAW,IAAA,IAAQ,OAAO,MAAA,EAAQ;AAChC,YAAA,IAAI,KAAK,OAAA,EAAS;AAChB,cAAA,KAAA,MAAW,QAAA,IAAY,KAAK,OAAA,EAAS;AACnC,gBAAA,IAAI,SAAS,GAAA,EAAK;AAChB,kBAAA,IAAA,CAAK,IAAA,CAAK,SAAS,GAAG,CAAA;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAEA,UAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,YAAA,OAAA,CAAQ,IAAI,mBAAmB,CAAA;AAC/B,YAAA,IAAA,CAAK,OAAA,CAAQ,CAAC,GAAA,EAAK,CAAA,KAAM;AACvB,cAAA,OAAA,CAAQ,IAAI,CAAA,EAAA,EAAK,CAAA,GAAI,CAAC,CAAA,EAAA,EAAK,GAAG,CAAA,CAAE,CAAA;AAAA,YAClC,CAAC,CAAA;AAAA,UACH;AAAA,QACF;AAEA,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,IAAI,MAAA,CAAO,WAAW,QAAA,EAAU;AAC9B,QAAA,OAAA,CAAQ,IAAI,kBAAkB,CAAA;AAC9B,QAAA,OAAA,CAAQ,KAAA,CAAM,QAAA,EAAU,MAAA,CAAO,KAAK,CAAA;AACpC,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,MAAM,IAAI,OAAA,CAAQ,CAAA,OAAA,KAAW,UAAA,CAAW,OAAA,EAAS,UAAU,CAAC,CAAA;AAAA,IAC9D;AAEA,IAAA,OAAA,CAAQ,IAAI,0DAA0D,CAAA;AACtE,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAEhB,SAAS,KAAA,EAAY;AACnB,IAAA,OAAA,CAAQ,KAAA,CAAM,UAAA,EAAY,KAAA,CAAM,OAAO,CAAA;AACvC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF;AAEA,eAAe,MAAA,CAAO,QAAgB,IAAA,EAAgB;AACpD,EAAA,MAAM,UAAe,EAAE,MAAA,EAAQ,OAAO,KAAA,EAAO,uBAAA,EAAyB,aAAa,GAAA,EAAI;AAEvF,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAClB,IAAA,IAAI,QAAQ,SAAA,EAAW,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACtC,GAAA,KAAQ,UAAA,EAAY,OAAA,CAAQ,MAAA,GAAS,IAAA;AAAA,SAAA,IACrC,GAAA,KAAQ,iBAAiB,OAAA,CAAQ,WAAA,GAAc,WAAW,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,SAAA,IACnE,GAAA,KAAQ,gBAAgB,OAAA,CAAQ,SAAA,GAAY,SAAS,IAAA,CAAK,EAAE,CAAC,CAAC,CAAA;AAAA,SAAA,IAC9D,QAAQ,WAAA,EAAa,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAC9C,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,IAAI,iBAAA,IAAqB,EAAA;AAClE,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,iBAAA,IAAqB,8BAAA;AAEpE,EAAA,YAAA,CAAa,MAAA,EAAQ,EAAE,OAAA,EAAS,OAAA,EAAS,MAAQ,CAAA;AAEjD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,WAAW,MAAM;AAAA,CAAI,CAAA;AAEjC,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,kBAAA,CAAmB;AAAA,MACtC,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,QAAQ,CAAA;AAAA,MAC5C,MAAA,EAAQ,IAAA;AAAA,MACR,aAAa,OAAA,CAAQ,WAAA;AAAA,MACrB,YAAY,OAAA,CAAQ;AAAA,KACrB,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,YAAY,CAAA;AACjC,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAA,CAAQ,CAAC,GAAG,KAAA,EAAO,OAAA;AACzC,MAAA,IAAI,OAAO,OAAA,KAAY,QAAA,EAAU,OAAA,CAAQ,MAAA,CAAO,MAAM,OAAO,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,IAAI,IAAI,CAAA;AAAA,EAClB,CAAA,MAAO;AACL,IAAA,MAAM,QAAA,GAAW,MAAM,kBAAA,CAAmB;AAAA,MACxC,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,QAAQ,CAAA;AAAA,MAC5C,MAAA,EAAQ,KAAA;AAAA,MACR,aAAa,OAAA,CAAQ,WAAA;AAAA,MACrB,YAAY,OAAA,CAAQ;AAAA,KACrB,CAAA;AAED,IAAA,OAAA,CAAQ,IAAI,WAAA,EAAa,QAAA,CAAS,QAAQ,CAAC,CAAA,CAAE,QAAQ,OAAO,CAAA;AAC5D,IAAA,OAAA,CAAQ,GAAA,CAAI,UAAA,EAAY,QAAA,CAAS,KAAK,CAAA;AAAA,EACxC;AACF;AAEA,eAAe,QAAA,CAAS,QAAgB,IAAA,EAAgB;AACtD,EAAA,MAAM,OAAA,GAAe;AAAA,IACnB,MAAA,EAAQ,KAAA;AAAA,IACR,KAAA,EAAO,QAAA;AAAA,IACP,OAAA,EAAS;AAAA,GACX;AAEA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,IAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAClB,IAAA,IAAI,QAAQ,SAAA,EAAW,OAAA,CAAQ,KAAA,GAAQ,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACtC,GAAA,KAAQ,UAAA,EAAY,OAAA,CAAQ,MAAA,GAAS,IAAA;AAAA,SAAA,IACrC,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAChD,QAAQ,cAAA,EAAgB,OAAA,CAAQ,SAAA,GAAY,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IACpD,QAAQ,WAAA,EAAa,OAAA,CAAQ,MAAA,GAAS,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,SAAA,IAC9C,QAAQ,YAAA,EAAc,OAAA,CAAQ,OAAA,GAAU,IAAA,CAAK,EAAE,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,MAAA,IAAU,OAAA,CAAQ,IAAI,iBAAA,IAAqB,EAAA;AAClE,EAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,OAAA,IAAW,OAAA,CAAQ,IAAI,iBAAA,IAAqB,8BAAA;AAEpE,EAAA,YAAA,CAAa,MAAA,EAAQ,EAAE,OAAA,EAAS,OAAA,EAAS,KAAQ,CAAA;AAEjD,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,OAAO,CAAA,CAAE,CAAA;AACvC,EAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,OAAA,EAAU,OAAA,CAAQ,KAAK,CAAA,CAAE,CAAA;AACrC,EAAA,OAAA,CAAQ,GAAA,CAAI,WAAW,MAAM;AAAA,CAAI,CAAA;AAEjC,EAAA,IAAI,QAAQ,MAAA,EAAQ;AAClB,IAAA,MAAM,MAAA,GAAS,MAAM,oBAAA,CAAqB;AAAA,MACxC,UAAU,OAAA,CAAQ,OAAA;AAAA,MAClB,QAAA,EAAU,CAAC,iBAAA,CAAkB,MAAA,EAAQ,MAAM,CAAC,CAAA;AAAA,MAC5C,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,MAAA,EAAQ,IAAA;AAAA,MACR,YAAY,OAAA,CAAQ,SAAA;AAAA,MACpB,GAAA,EAAK;AAAA,KACN,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,MAAM,YAAY,CAAA;AACjC,IAAA,WAAA,MAAiB,SAAS,MAAA,EAAQ;AAChC,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAA,CAAQ,CAAC,GAAG,KAAA,EAAO,OAAA;AACzC,MAAA,IAAI,OAAO,OAAA,KAAY,QAAA,EAAU,OAAA,CAAQ,MAAA,CAAO,MAAM,OAAO,CAAA;AAAA,IAC/D;AACA,IAAA,OAAA,CAAQ,IAAI,IAAI,CAAA;AAAA,EAClB,CAAA,MAAO;AACL,IAAA,MAAM,QAAA,GAAW,MAAM,oBAAA,CAAqB;AAAA,MAC1C,UAAU,OAAA,CAAQ,OAAA;AAAA,MAClB,QAAA,EAAU,CAAC,iBAAA,CAAkB,MAAA,EAAQ,MAAM,CAAC,CAAA;AAAA,MAC5C,OAAO,OAAA,CAAQ,KAAA;AAAA,MACf,MAAA,EAAQ,KAAA;AAAA,MACR,YAAY,OAAA,CAAQ,SAAA;AAAA,MACpB,GAAA,EAAK;AAAA,KACN,CAAA;AAED,IAAA,OAAA,CAAQ,IAAI,WAAA,EAAa,QAAA,CAAS,QAAQ,CAAC,CAAA,CAAE,QAAQ,OAAO,CAAA;AAE5D,IAAA,IAAI,QAAA,CAAS,SAAA,IAAa,QAAA,CAAS,SAAA,CAAU,SAAS,CAAA,EAAG;AACvD,MAAA,OAAA,CAAQ,IAAI,wBAAwB,CAAA;AACpC,MAAA,QAAA,CAAS,SAAA,CAAU,OAAA,CAAQ,CAAC,QAAA,EAAU,CAAA,KAAM;AAC1C,QAAA,OAAA,CAAQ,IAAI,CAAA,EAAA,EAAK,CAAA,GAAI,CAAC,CAAA,EAAA,EAAK,QAAA,CAAS,IAAI,CAAA,CAAE,CAAA;AAC1C,QAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,UAAA,EAAa,QAAA,CAAS,GAAG,CAAA,CAAE,CAAA;AAAA,MACzC,CAAC,CAAA;AAAA,IACH;AAEA,IAAA,OAAA,CAAQ,GAAA,CAAI,eAAA,EAAiB,QAAA,CAAS,UAAU,CAAA;AAChD,IAAA,OAAA,CAAQ,GAAA,CAAI,aAAA,EAAe,QAAA,CAAS,MAAM,CAAA;AAAA,EAC5C;AACF;AAEA,eAAe,IAAA,GAAO;AACpB,EAAA,MAAM,IAAA,GAAO,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA;AAEjC,EAAA,IAAI,IAAA,CAAK,MAAA,KAAW,CAAA,IAAK,IAAA,CAAK,CAAC,MAAM,QAAA,IAAY,IAAA,CAAK,CAAC,CAAA,KAAM,IAAA,EAAM;AACjE,IAAA,QAAA,EAAS;AACT,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AAEA,EAAA,MAAM,OAAA,GAAU,KAAK,CAAC,CAAA;AAEtB,EAAA,IAAI;AACF,IAAA,IAAI,YAAY,KAAA,EAAO;AACrB,MAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,QAAA,OAAA,CAAQ,MAAM,wCAAwC,CAAA;AACtD,QAAA,OAAA,CAAQ,IAAI,0CAA0C,CAAA;AACtD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AACA,MAAA,MAAM,OAAO,IAAA,CAAK,CAAC,GAAG,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,IACrC,CAAA,MAAA,IAAW,YAAY,OAAA,EAAS;AAC9B,MAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,QAAA,OAAA,CAAQ,MAAM,0CAA0C,CAAA;AACxD,QAAA,OAAA,CAAQ,IAAI,4CAA4C,CAAA;AACxD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AACA,MAAA,MAAM,SAAS,IAAA,CAAK,CAAC,GAAG,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,IACvC,CAAA,MAAO;AAEL,MAAA,MAAM,KAAA,GAAQ,OAAA;AAEd,MAAA,IAAI,KAAA,CAAM,UAAA,CAAW,IAAI,CAAA,EAAG;AAC1B,QAAA,OAAA,CAAQ,MAAM,uCAAuC,CAAA;AACrD,QAAA,OAAA,CAAQ,IAAI,qCAAqC,CAAA;AACjD,QAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,MAChB;AAEA,MAAA,MAAM,OAAA,GAAU,SAAA,CAAU,IAAA,CAAK,KAAA,CAAM,CAAC,CAAC,CAAA;AACvC,MAAA,MAAM,SAAA,CAAU,OAAO,OAAO,CAAA;AAAA,IAChC;AAAA,EACF,SAAS,KAAA,EAAY;AACnB,IAAA,OAAA,CAAQ,KAAA,CAAM,UAAA,EAAY,KAAA,CAAM,OAAO,CAAA;AACvC,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,EAChB;AACF;AAEA,IAAA,EAAK,CAAE,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA","file":"cli.js","sourcesContent":["/**\n * SeaCloud SDK 核心类型定义\n */\n\n/**\n * SDK 配置选项\n */\nexport interface SeacloudConfig {\n /** API Service Token - 从环境变量 API_SERVICE_TOKEN 读取或直接传入 */\n apiKey?: string;\n /** 代理服务器基础 URL,默认为 http://localhost:8080 */\n baseUrl?: string;\n /** 自定义 fetch 实现(可选,用于 Node.js < 18 或测试) */\n fetch?: typeof fetch;\n /** 请求超时时间(毫秒),默认 30000 */\n timeout?: number;\n}\n\n/**\n * 任务状态\n */\nexport type TaskStatus = 'pending' | 'processing' | 'in_progress' | 'completed' | 'failed';\n\n/**\n * 任务错误信息\n */\nexport interface TaskError {\n code: string;\n message: string;\n details?: any;\n}\n\n/**\n * 任务结果\n */\nexport interface TaskResult {\n /** 任务 ID */\n id: string;\n /** 创建时间戳 */\n created_at: number;\n /** 任务状态 */\n status: TaskStatus;\n /** 错误信息(如果失败) */\n error?: TaskError | null;\n /** 模型名称 */\n model: string;\n /** 输出结果 */\n output?: Array<{\n content?: Array<{\n type?: string;\n url?: string;\n size?: number;\n jobId?: string;\n [key: string]: any;\n }>;\n [key: string]: any;\n }>;\n /** 其他字段 */\n [key: string]: any;\n}\n\n/**\n * 任务轮询选项\n */\nexport interface PollingOptions {\n /** 轮询间隔(毫秒),默认 3000 */\n intervalMs?: number;\n /** 最大尝试次数,默认 120 */\n maxAttempts?: number;\n /** 进度回调函数 */\n onProgress?: (attempt: number, status: TaskStatus) => void;\n}\n\n/**\n * HTTP 响应错误\n */\nexport class SeacloudError extends Error {\n constructor(\n message: string,\n public statusCode?: number,\n public response?: any,\n ) {\n super(message);\n this.name = 'SeacloudError';\n }\n}\n","import type { SeacloudConfig } from './types.js';\n\n/**\n * 创建 SDK 配置\n * @param options 配置选项\n * @returns 完整的配置对象\n */\nexport function createConfig(options: SeacloudConfig = {}): Required<SeacloudConfig> {\n // 优先级:传入的 apiKey > 环境变量 API_SERVICE_TOKEN\n const apiKey = options.apiKey ||\n (typeof process !== 'undefined' ? process.env?.API_SERVICE_TOKEN : undefined) ||\n '';\n\n // 使用环境变量或传入的 baseUrl\n const baseUrl = options.baseUrl ||\n (typeof process !== 'undefined' ? process.env?.SEACLOUD_BASE_URL : undefined) ||\n 'http://proxy.sg.seaverse.dev';\n\n // 使用全局 fetch 或自定义实现\n const fetchImpl = options.fetch || globalThis.fetch;\n\n if (!fetchImpl) {\n throw new Error('fetch is not available. Please provide a fetch implementation in config or upgrade to Node.js 18+');\n }\n\n return {\n apiKey,\n baseUrl,\n fetch: fetchImpl,\n timeout: options.timeout || 30000,\n };\n}\n\n/**\n * 验证配置\n * @param config 配置对象\n * @throws 如果配置无效\n */\nexport function validateConfig(config: Required<SeacloudConfig>): void {\n if (!config.apiKey) {\n throw new Error(\n 'API key is required. Set API_SERVICE_TOKEN environment variable or pass apiKey in config.'\n );\n }\n\n if (!config.baseUrl) {\n throw new Error('baseUrl is required');\n }\n\n if (typeof config.fetch !== 'function') {\n throw new Error('fetch must be a function');\n }\n}\n","import type { SeacloudConfig, TaskResult } from './types.js';\nimport { SeacloudError } from './types.js';\nimport { createConfig, validateConfig } from './config.js';\n\n/**\n * SeaCloud HTTP 客户端\n * 用于创建任务和查询任务状态\n */\nexport class SeacloudClient {\n private config: Required<SeacloudConfig>;\n\n constructor(config: SeacloudConfig = {}) {\n this.config = createConfig(config);\n validateConfig(this.config);\n }\n\n /**\n * 创建一个新任务\n * @param endpoint API 端点路径(例如:/model/tasks)\n * @param body 请求体\n * @returns 任务结果\n */\n async createTask(endpoint: string, body: Record<string, any>): Promise<TaskResult> {\n const url = `${this.config.baseUrl}${endpoint}`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);\n\n try {\n const response = await this.config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.config.apiKey}`,\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n const result = await response.json();\n return result as TaskResult;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${this.config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n }\n\n /**\n * 查询任务状态\n * @param endpoint API 端点路径(例如:/model/v1/generation)\n * @param taskId 任务 ID\n * @returns 任务结果\n */\n async getTaskStatus(endpoint: string, taskId: string): Promise<TaskResult> {\n const url = `${this.config.baseUrl}${endpoint}/task/${taskId}`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);\n\n try {\n const response = await this.config.fetch(url, {\n method: 'GET',\n headers: {\n 'Authorization': `Bearer ${this.config.apiKey}`,\n },\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n const result = await response.json();\n return result as TaskResult;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${this.config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n }\n\n /**\n * 获取当前配置\n */\n getConfig(): Readonly<Required<SeacloudConfig>> {\n return { ...this.config };\n }\n}\n","/**\n * 全局配置管理\n */\n\nimport type { SeacloudClient } from './client.js';\nimport type { PollingOptions } from './types.js';\nimport { SeacloudClient as Client } from './client.js';\n\n// 全局配置状态\ninterface GlobalConfig {\n client: SeacloudClient | null;\n defaultPollingOptions: PollingOptions;\n}\n\nconst globalConfig: GlobalConfig = {\n client: null,\n defaultPollingOptions: {\n intervalMs: 3000, // 3秒轮询间隔\n maxAttempts: 100, // 最多尝试100次 (约5分钟)\n },\n};\n\n/**\n * 初始化 SDK\n * @param apiKey API 密钥\n * @param options 可选配置\n */\nexport function initSeacloud(apiKey: string, options?: {\n baseUrl?: string;\n intervalMs?: number; // 轮询间隔(毫秒)\n maxAttempts?: number; // 最大尝试次数\n timeout?: number;\n}) {\n if (!apiKey) {\n throw new Error('API key is required. Please provide a valid API key.');\n }\n\n // 创建客户端\n globalConfig.client = new Client({\n apiKey,\n baseUrl: options?.baseUrl,\n timeout: options?.timeout,\n });\n\n // 设置默认轮询选项\n if (options?.intervalMs !== undefined) {\n globalConfig.defaultPollingOptions.intervalMs = options.intervalMs;\n }\n if (options?.maxAttempts !== undefined) {\n globalConfig.defaultPollingOptions.maxAttempts = options.maxAttempts;\n }\n\n return globalConfig.client;\n}\n\n/**\n * 获取全局客户端\n * @throws 如果未初始化\n */\nexport function getClient(): SeacloudClient {\n if (!globalConfig.client) {\n throw new Error(\n 'SeaCloud SDK is not initialized. Please call initSeacloud(apiKey) first.'\n );\n }\n return globalConfig.client;\n}\n\n/**\n * 获取默认轮询选项\n */\nexport function getDefaultPollingOptions(): PollingOptions {\n return { ...globalConfig.defaultPollingOptions };\n}\n\n/**\n * 更新默认轮询选项\n */\nexport function setDefaultPollingOptions(options: Partial<PollingOptions>) {\n Object.assign(globalConfig.defaultPollingOptions, options);\n}\n\n/**\n * 重置配置(主要用于测试)\n */\nexport function resetConfig() {\n globalConfig.client = null;\n globalConfig.defaultPollingOptions = {\n intervalMs: 3000,\n maxAttempts: 100,\n };\n}\n","import { getClient } from '../core/global-config.js';\nimport { SeacloudError } from '../core/types.js';\n\n/**\n * Chat message role\n */\nexport type ChatMessageRole = 'system' | 'user' | 'assistant';\n\n/**\n * Chat message interface\n */\nexport interface ChatMessage {\n /**\n * Message role\n */\n role: ChatMessageRole;\n /**\n * Message content\n */\n content: string;\n}\n\n/**\n * LLM chat completions request parameters\n */\nexport interface LlmChatCompletionsParams {\n /**\n * Model name to use for chat completion\n * Supports 100+ models including:\n * - Claude: seaart-mix-sonnet-4-5, shaseng-claude-4.5, seacloud-claude-haiku-4.5, seacloud-claude-sonnet-4, seacloud-claude-opus-4.5\n * - Kimi: kimi-k2, kimi-k2-thinking, kimi-k2-250905\n * - Grok: grok-4, grok-code-fast-1\n * - DeepSeek: deepseek-v3.1, deepseek-v3-0324, deepseek-r1, deepseek-r1-0528\n * - Gemini: gemini-2.5-pro, gemini-2.5-flash, gemini-2.0-flash, gemini-3-pro-preview, gemini-3-flash-preview\n * - Qwen: qwen-plus, qwen-plus-character, qwen3-coder-plus\n * - GLM: glm-4.5-air, glm-4.5, glm-4.6, glm-4-plus\n * - GPT: gpt-4o, gpt-4o-mini, gpt-4.1, gpt-4.1-mini, gpt-4.1-nano, gpt-5, gpt-5.1, gpt-5.2\n * - Hunyuan: hunyuan-turbos-latest, hunyuan-large-role-20250822, hunyuan-large-role-plus-20250718\n * - Mistral: mistral-small-3.2-24b-instruct, mistral-small-3.1-24b-instruct, mistral-nemo\n * - Hermes: hermes-4-70b, hermes-4-405b, hermes-3-llama-3.1-405b\n * - Doubao: doubao-romantic, doubao-seed-1-6, doubao-seed-code-preview-251028\n * - Ernie: ernie-5.0-thinking-latest, ernie-5.0-thinking-preview\n * - WizardLM: wizardlm-2-8x22b\n * - O1/O3: o1-pre, o3-pre\n * - LongCat: LongCat-Flash-Chat, LongCat-Flash-Thinking\n * - And many more...\n *\n * @example \"seaart-mix-sonnet-4-5\"\n * @example \"kimi-k2\"\n * @example \"deepseek-v3.1\"\n */\n model: string;\n\n /**\n * Array of messages comprising the conversation so far\n */\n messages: ChatMessage[];\n\n /**\n * Maximum number of tokens to generate in the completion\n * @default undefined (model default)\n */\n max_tokens?: number;\n\n /**\n * Sampling temperature (0-2)\n * Higher values make output more random, lower values more deterministic\n * @default undefined (model default, usually 1.0)\n */\n temperature?: number;\n\n /**\n * Nucleus sampling parameter (0-1)\n * Alternative to temperature sampling\n * @default undefined (model default, usually 1.0)\n */\n top_p?: number;\n\n /**\n * Whether to stream the response\n * If true, returns an async iterator of chunks\n * If false, waits for complete response\n * @default false\n */\n stream?: boolean;\n\n /**\n * Number of completions to generate\n * @default 1\n */\n n?: number;\n\n /**\n * Sequences where the API will stop generating further tokens\n */\n stop?: string | string[];\n\n /**\n * Penalty for new tokens based on whether they appear in the text so far (-2.0 to 2.0)\n * @default 0\n */\n presence_penalty?: number;\n\n /**\n * Penalty for new tokens based on their existing frequency in the text so far (-2.0 to 2.0)\n * @default 0\n */\n frequency_penalty?: number;\n\n /**\n * A unique identifier representing your end-user\n */\n user?: string;\n}\n\n/**\n * Usage statistics for the completion\n */\nexport interface ChatCompletionUsage {\n /**\n * Number of tokens in the prompt\n */\n prompt_tokens: number;\n\n /**\n * Number of tokens in the generated completion\n */\n completion_tokens: number;\n\n /**\n * Total number of tokens used\n */\n total_tokens: number;\n\n /**\n * Cost in USD (if available)\n */\n cost?: number;\n\n /**\n * Additional usage details\n */\n [key: string]: any;\n}\n\n/**\n * A single chat completion choice\n */\nexport interface ChatCompletionChoice {\n /**\n * The index of this choice\n */\n index: number;\n\n /**\n * The message generated by the model\n */\n message: ChatMessage;\n\n /**\n * The reason the model stopped generating tokens\n * @values stop | length | content_filter | function_call\n */\n finish_reason: string | null;\n\n /**\n * Additional choice-specific fields\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion response (non-streaming)\n */\nexport interface ChatCompletionResponse {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * The object type, always \"chat.completion\"\n */\n object: string;\n\n /**\n * Unix timestamp of when the completion was created\n */\n created: number;\n\n /**\n * The model used for completion\n */\n model: string;\n\n /**\n * List of completion choices\n */\n choices: ChatCompletionChoice[];\n\n /**\n * Usage statistics for the completion\n */\n usage?: ChatCompletionUsage;\n\n /**\n * Additional response fields\n */\n [key: string]: any;\n}\n\n/**\n * Delta message for streaming chunks\n */\nexport interface ChatCompletionChunkDelta {\n /**\n * The role of the message (only in first chunk)\n */\n role?: ChatMessageRole;\n\n /**\n * The content delta for this chunk\n */\n content?: string;\n}\n\n/**\n * A single streaming chunk choice\n */\nexport interface ChatCompletionChunkChoice {\n /**\n * The index of this choice\n */\n index: number;\n\n /**\n * The delta message for this chunk\n */\n delta: ChatCompletionChunkDelta;\n\n /**\n * The reason the model stopped generating tokens (only in last chunk)\n */\n finish_reason: string | null;\n\n /**\n * Additional choice-specific fields\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion streaming chunk\n */\nexport interface ChatCompletionChunk {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * The object type, always \"chat.completion.chunk\"\n */\n object: string;\n\n /**\n * Unix timestamp of when the chunk was created\n */\n created: number;\n\n /**\n * The model used for completion\n */\n model: string;\n\n /**\n * List of chunk choices\n */\n choices: ChatCompletionChunkChoice[];\n\n /**\n * Additional chunk fields\n */\n [key: string]: any;\n}\n\n/**\n * Call LLM chat completions API (non-streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, llmChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({ apiKey: 'your-api-key' });\n *\n * const response = await llmChatCompletions({\n * model: 'seaart-mix-sonnet-4-5',\n * messages: [\n * { role: 'user', content: 'Hello!' }\n * ],\n * max_tokens: 100\n * });\n *\n * console.log(response.choices[0].message.content);\n * ```\n *\n * @param params Request parameters\n * @returns Chat completion response\n */\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams & { stream?: false }\n): Promise<ChatCompletionResponse>;\n\n/**\n * Call LLM chat completions API (streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, llmChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({ apiKey: 'your-api-key' });\n *\n * const stream = await llmChatCompletions({\n * model: 'seaart-mix-sonnet-4-5',\n * messages: [\n * { role: 'user', content: 'Tell me a story' }\n * ],\n * stream: true\n * });\n *\n * for await (const chunk of stream) {\n * const content = chunk.choices[0]?.delta?.content;\n * if (content) {\n * process.stdout.write(content);\n * }\n * }\n * ```\n *\n * @param params Request parameters with stream: true\n * @returns Async iterator of chat completion chunks\n */\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams & { stream: true }\n): Promise<AsyncIterable<ChatCompletionChunk>>;\n\n// Implementation\nexport async function llmChatCompletions(\n params: LlmChatCompletionsParams\n): Promise<ChatCompletionResponse | AsyncIterable<ChatCompletionChunk>> {\n const client = getClient();\n const config = client.getConfig();\n\n const url = `${config.baseUrl}/llm/chat/completions`;\n\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), config.timeout);\n\n try {\n const response = await config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.apiKey}`,\n },\n body: JSON.stringify(params),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n // Streaming response\n if (params.stream) {\n return parseStreamingResponse(response);\n }\n\n // Non-streaming response\n const result = await response.json();\n return result as ChatCompletionResponse;\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n}\n\n/**\n * Parse streaming response into async iterable\n */\nasync function* parseStreamingResponse(\n response: Response\n): AsyncIterable<ChatCompletionChunk> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n if (!trimmedLine) continue;\n if (trimmedLine === 'data: [DONE]') continue;\n\n if (trimmedLine.startsWith('data: ')) {\n const jsonStr = trimmedLine.slice(6);\n try {\n const chunk = JSON.parse(jsonStr) as ChatCompletionChunk;\n yield chunk;\n } catch (error) {\n console.warn('Failed to parse SSE chunk:', jsonStr);\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n","/**\n * ============================================================================\n * SeaCloud Fast Agent Chat Completions API\n * ============================================================================\n *\n * This module provides a comprehensive API for interacting with SeaCloud's\n * Fast Agent system, which supports:\n *\n * - Multi-turn conversations with session management\n * - Tool calling for artifact generation (images, videos, music, etc.)\n * - Streaming and non-streaming response modes\n * - Multiple content types (text, image, video, audio, file)\n * - Flexible model selection (Claude, GPT, Gemini, etc.)\n *\n * IMPORTANT NOTES:\n * ================\n *\n * 1. REQUIRED PARAMETERS:\n * - agent_id: Caller-provided agent identifier\n * - messages: Array of conversation messages\n *\n * 2. TIMEOUT CONFIGURATION:\n * - Default timeout: 30 seconds (may be too short for tool calls)\n * - Recommended for agent operations: 120 seconds or more\n * - Set via initSeacloud({ timeout: 120000 })\n *\n * 3. STREAMING MODE:\n * - API always returns Server-Sent Events (SSE) format\n * - params.stream controls how SDK consumes the response\n * - For tool calls (image/video generation), streaming is RECOMMENDED\n *\n * 4. AUTHENTICATION:\n * - Requires 'X-Project: SeaArt' header (automatically added)\n * - Uses Bearer token authentication\n *\n * 5. SESSION MANAGEMENT:\n * - Use session_id to maintain multi-turn conversations\n * - Increment seq for each message in the session\n *\n * @see AGENT_API.md for detailed documentation and examples\n */\n\nimport { getClient } from '../core/global-config.js';\nimport { SeacloudError } from '../core/types.js';\n\n/**\n * Message content type\n */\nexport type AgentMessageContentType = 'text' | 'image' | 'video' | 'audio' | 'file';\n\n/**\n * Base message content\n */\ninterface BaseMessageContent {\n type: AgentMessageContentType;\n}\n\n/**\n * Text message content\n */\ninterface TextMessageContent extends BaseMessageContent {\n type: 'text';\n text: string;\n}\n\n/**\n * Image message content\n */\ninterface ImageMessageContent extends BaseMessageContent {\n type: 'image';\n image_url: string;\n}\n\n/**\n * Video message content\n */\ninterface VideoMessageContent extends BaseMessageContent {\n type: 'video';\n video_url: string;\n}\n\n/**\n * Audio message content\n */\ninterface AudioMessageContent extends BaseMessageContent {\n type: 'audio';\n audio_url: string;\n}\n\n/**\n * File message content\n */\ninterface FileMessageContent extends BaseMessageContent {\n type: 'file';\n file_url: string;\n}\n\n/**\n * Message content item (union type for type safety)\n */\nexport type AgentMessageContent =\n | TextMessageContent\n | ImageMessageContent\n | VideoMessageContent\n | AudioMessageContent\n | FileMessageContent;\n\n/**\n * Message role type\n */\nexport type AgentMessageRole = 'developer' | 'user' | 'assistant' | 'tool';\n\n/**\n * Chat message\n */\nexport interface AgentMessage {\n /**\n * Message role\n * - developer: System instructions/prompts (equivalent to \"system\" role)\n * - user: User input messages\n * - assistant: AI assistant responses (for multi-turn conversations)\n * - tool: Tool execution results\n */\n role: AgentMessageRole;\n\n /**\n * Message content array\n */\n content: AgentMessageContent[];\n\n /**\n * Tool call ID (for role: 'tool')\n */\n tool_call_id?: string;\n\n /**\n * Tool name (for role: 'tool')\n */\n name?: string;\n}\n\n/**\n * Tool function definition\n */\nexport interface AgentToolFunction {\n /**\n * Tool name\n * Available tools:\n *\n * Image Generation:\n * - seagen_text2image_flux1d_artifact_tool\n * - seagen_text2image_seedream40_artifact_tool\n * - seagen_text2image_google_gemini3_pro_image_artifact_tool\n * - seagen_blackforestlabs_flux_2_pro_tool\n * - mm_volces_seedream_4_5_gateway_tool\n *\n * Image Editing:\n * - seagen_edit_image_google_artifact_tool\n * - seagen_blackforestlabs_flux_2_pro_edit_tool\n *\n * Video Generation:\n * - seagen_image2video_wanx26_artifact_tool\n * - seagen_text2video_wanx26_artifact_tool\n * - seagen_image2video_seedance_pro_fast_artifact_tool\n * - mm_text2video_kling_v2_6_gateway_tool\n * - mm_image2video_kling_v2_6_i2v_gateway_tool\n *\n * Music & Audio:\n * - seagen_text2song_mureka_artifact_tool\n * - seagen_text2lyrics_mureka_artifact_tool\n */\n name: string;\n\n /**\n * Tool description (optional)\n */\n description?: string;\n\n /**\n * Tool parameters schema (optional)\n */\n parameters?: Record<string, any>;\n}\n\n/**\n * Tool definition\n */\nexport interface AgentTool {\n /**\n * Tool type (always 'function')\n */\n type: 'function';\n\n /**\n * Function definition\n */\n function: AgentToolFunction;\n}\n\n/**\n * Agent chat completions request parameters\n */\nexport interface AgentChatCompletionsParams {\n /**\n * Agent ID to invoke\n * @example \"seaverse_agent\"\n * @example \"seagen_agent\"\n */\n agent_id: string;\n\n /**\n * Array of messages comprising the conversation\n */\n messages: AgentMessage[];\n\n /**\n * AI model to use for processing\n * @default \"custom_openai/vertex-ai-claude-sonnet-4.5\"\n * @example \"gpt-4o\"\n * @example \"custom_openai/vertex-ai-claude-sonnet-4.5\"\n */\n model?: string;\n\n /**\n * Sampling temperature for creativity control (0.0 to 1.0)\n * - 0.0: Focused and deterministic responses\n * - 1.0: Maximum creativity and randomness\n * @default 0.7\n */\n temperature?: number;\n\n /**\n * Session ID for multi-turn conversations\n * Used to maintain conversation context across multiple requests\n * Server returns this in the response; use it in subsequent calls\n */\n session_id?: string;\n\n /**\n * User identifier for tracking and analytics\n * Optional field for associating requests with specific users\n */\n user_id?: string;\n\n /**\n * Message ID for SSE reconnection (ADVANCED USE)\n * Only use this when resuming an interrupted streaming response\n * Server returns msg_id in response; use it to resume from disconnect point\n *\n * WARNING: Do NOT use for multi-turn conversations\n * For multi-turn, just include full message history + session_id\n *\n * @example\n * // DON'T do this for multi-turn:\n * // { msg_id: previousResponse.msg_id, seq: 1 }\n *\n * // DO this instead:\n * // { session_id: previousResponse.session_id, messages: [...allMessages] }\n */\n msg_id?: string;\n\n /**\n * Sequence number for message resumption (ADVANCED USE)\n * Only used with msg_id for resuming interrupted streams\n * For regular multi-turn conversations, omit this parameter\n *\n * @default 0\n */\n seq?: number;\n\n /**\n * Business-specific parameters\n * Key-value pairs passed through to backend systems\n * Common use cases:\n * - aspect_ratio: Video/image aspect ratio (e.g., \"16:9\", \"1:1\")\n * - duration: Video duration in seconds\n * - resolution: Output resolution (e.g., \"1080p\", \"4k\")\n */\n metadata?: Record<string, any>;\n\n /**\n * List of tools available to the agent\n * Agent automatically selects appropriate tool based on user request\n */\n tools?: AgentTool[];\n\n /**\n * Enable Server-Sent Events streaming\n * @default true\n */\n stream?: boolean;\n\n /**\n * Maximum number of tokens to generate\n */\n max_tokens?: number;\n\n /**\n * Number of completions to generate\n * @default 1\n */\n n?: number;\n\n /**\n * Sequences where the API will stop generating\n */\n stop?: string | string[];\n\n /**\n * Penalty for new tokens based on presence (-2.0 to 2.0)\n * @default 0\n */\n presence_penalty?: number;\n\n /**\n * Penalty for new tokens based on frequency (-2.0 to 2.0)\n * @default 0\n */\n frequency_penalty?: number;\n\n /**\n * Top-p nucleus sampling (0-1)\n */\n top_p?: number;\n}\n\n/**\n * Artifact (generated file)\n */\nexport interface AgentArtifact {\n /**\n * File name\n * @example \"image.webp\"\n */\n name: string;\n\n /**\n * CDN URL to access the file\n */\n url: string;\n\n /**\n * Content type\n * @example \"image\" | \"video\" | \"audio\"\n */\n type: string;\n\n /**\n * MIME type\n * @example \"image/webp\" | \"video/mp4\" | \"audio/mp3\"\n */\n mime_type?: string;\n\n /**\n * File size in bytes\n */\n size?: number;\n\n /**\n * Duration in seconds (for video/audio)\n */\n duration?: number;\n\n /**\n * Width in pixels (for image/video)\n */\n width?: number;\n\n /**\n * Height in pixels (for image/video)\n */\n height?: number;\n\n /**\n * Additional metadata\n */\n [key: string]: any;\n}\n\n/**\n * Tool call information\n */\nexport interface AgentToolCall {\n /**\n * Tool call ID\n */\n id: string;\n\n /**\n * Tool type\n */\n type: 'function';\n\n /**\n * Function call details\n */\n function: {\n /**\n * Function name\n */\n name: string;\n\n /**\n * Function arguments (JSON string)\n */\n arguments: string;\n };\n\n /**\n * Tool call index\n */\n index?: number;\n}\n\n/**\n * Delta content for streaming chunks\n */\nexport interface AgentChatCompletionDelta {\n /**\n * Message role (only in first chunk)\n */\n role?: AgentMessageRole;\n\n /**\n * Content delta (string or array)\n */\n content?: string | AgentMessageContent[];\n\n /**\n * Artifacts (generated files)\n */\n artifacts?: AgentArtifact[];\n\n /**\n * Tool calls\n */\n tool_calls?: AgentToolCall[];\n\n /**\n * Refusal message\n */\n refusal?: string | null;\n\n /**\n * Function call (deprecated, use tool_calls)\n */\n function_call?: any;\n\n /**\n * Audio content\n */\n audio?: any;\n\n /**\n * Additional provider-specific fields\n */\n provider_specific_fields?: any;\n}\n\n/**\n * Content filter result\n */\nexport interface AgentContentFilterResult {\n /**\n * Error information\n */\n error?: {\n /**\n * Error code\n */\n code: string;\n\n /**\n * Error message\n */\n message: string;\n };\n\n /**\n * Additional filter results\n */\n [key: string]: any;\n}\n\n/**\n * Chat completion choice (streaming)\n */\nexport interface AgentChatCompletionChunkChoice {\n /**\n * Choice index\n */\n index: number;\n\n /**\n * Delta message for this chunk\n */\n delta: AgentChatCompletionDelta;\n\n /**\n * Finish reason (only in last chunk)\n * - \"stop\": Natural completion\n * - \"length\": Maximum token limit reached\n * - \"tool_calls\": Tool execution triggered\n * - \"content_filter\": Content filtered\n */\n finish_reason: string | null;\n\n /**\n * Log probabilities\n */\n logprobs?: any;\n\n /**\n * Content filter results\n */\n content_filter_results?: AgentContentFilterResult;\n\n /**\n * Content filter result (singular)\n */\n content_filter_result?: AgentContentFilterResult;\n}\n\n/**\n * Usage statistics\n */\nexport interface AgentUsage {\n /**\n * Prompt tokens\n */\n prompt_tokens: number;\n\n /**\n * Completion tokens\n */\n completion_tokens: number;\n\n /**\n * Total tokens\n */\n total_tokens: number;\n\n /**\n * Completion tokens details\n */\n completion_tokens_details?: {\n /**\n * Accepted prediction tokens\n */\n accepted_prediction_tokens?: number;\n\n /**\n * Audio tokens\n */\n audio_tokens?: number;\n\n /**\n * Reasoning tokens\n */\n reasoning_tokens?: number;\n\n /**\n * Rejected prediction tokens\n */\n rejected_prediction_tokens?: number;\n };\n\n /**\n * Prompt tokens details\n */\n prompt_tokens_details?: {\n /**\n * Audio tokens\n */\n audio_tokens?: number;\n\n /**\n * Cached tokens\n */\n cached_tokens?: number;\n\n /**\n * Text tokens\n */\n text_tokens?: number | null;\n\n /**\n * Image tokens\n */\n image_tokens?: number | null;\n };\n\n /**\n * Cost in USD\n */\n cost?: number;\n}\n\n/**\n * Chat completion streaming chunk\n */\nexport interface AgentChatCompletionChunk {\n /**\n * Unique identifier for the completion\n */\n id: string;\n\n /**\n * Object type (always \"chat.completion.chunk\")\n */\n object: string;\n\n /**\n * Unix timestamp\n */\n created: number;\n\n /**\n * Model used\n */\n model: string;\n\n /**\n * System fingerprint\n */\n system_fingerprint?: string;\n\n /**\n * List of choices\n */\n choices: AgentChatCompletionChunkChoice[];\n\n /**\n * Usage statistics (usually in last chunk)\n */\n usage?: AgentUsage;\n\n /**\n * Session ID for conversation continuation\n */\n session_id?: string;\n\n /**\n * Message ID\n */\n msg_id?: string;\n\n /**\n * Provider-specific fields\n */\n provider_specific_fields?: any;\n\n /**\n * Citations\n */\n citations?: any;\n\n /**\n * Service tier\n */\n service_tier?: string | null;\n\n /**\n * Obfuscation\n */\n obfuscation?: string;\n}\n\n/**\n * Message in non-streaming response\n */\nexport interface AgentResponseMessage {\n /**\n * Message role\n */\n role: AgentMessageRole;\n\n /**\n * Message content\n */\n content: string;\n\n /**\n * Tool calls\n */\n tool_calls?: AgentToolCall[];\n\n /**\n * Refusal message\n */\n refusal?: string | null;\n\n /**\n * Function call (deprecated)\n */\n function_call?: any;\n\n /**\n * Audio content\n */\n audio?: any;\n}\n\n/**\n * Chat completion choice (non-streaming)\n */\nexport interface AgentChatCompletionChoice {\n /**\n * Choice index\n */\n index: number;\n\n /**\n * Message\n */\n message: AgentResponseMessage;\n\n /**\n * Finish reason\n */\n finish_reason: string | null;\n\n /**\n * Log probabilities\n */\n logprobs?: any;\n\n /**\n * Content filter results\n */\n content_filter_results?: AgentContentFilterResult;\n}\n\n/**\n * Chat completion response (non-streaming)\n */\nexport interface AgentChatCompletionResponse {\n /**\n * Unique identifier\n */\n id: string;\n\n /**\n * Object type (always \"chat.completion\")\n */\n object: string;\n\n /**\n * Unix timestamp\n */\n created: number;\n\n /**\n * Model used\n */\n model: string;\n\n /**\n * System fingerprint\n */\n system_fingerprint?: string;\n\n /**\n * List of choices\n */\n choices: AgentChatCompletionChoice[];\n\n /**\n * Usage statistics\n */\n usage?: AgentUsage;\n\n /**\n * Session ID\n */\n session_id?: string;\n\n /**\n * Message ID\n */\n msg_id?: string;\n\n /**\n * Provider-specific fields\n */\n provider_specific_fields?: any;\n\n /**\n * Service tier\n */\n service_tier?: string | null;\n\n /**\n * Generated artifacts\n */\n artifacts?: AgentArtifact[];\n}\n\n/**\n * Call SeaCloud Fast Agent API (non-streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, agentChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({\n * apiKey: 'your-api-key',\n * baseUrl: 'http://proxy.sg.seaverse.dev',\n * });\n *\n * // Simple chat\n * const response = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Hello!' }]\n * }\n * ],\n * model: 'gpt-4o',\n * });\n *\n * console.log(response.choices[0].message.content);\n * ```\n *\n * @example\n * ```typescript\n * // Generate an image\n * const response = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Generate a cute puppy photo' }]\n * }\n * ],\n * model: 'gpt-4o',\n * tools: [\n * {\n * type: 'function',\n * function: { name: 'seagen_text2image_flux1d_artifact_tool' }\n * }\n * ]\n * });\n *\n * // Access generated image\n * const imageUrl = response.artifacts?.[0]?.url;\n * ```\n *\n * @param params Request parameters\n * @returns Chat completion response\n */\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams & { stream?: false }\n): Promise<AgentChatCompletionResponse>;\n\n/**\n * Call SeaCloud Fast Agent API (streaming)\n *\n * @example\n * ```typescript\n * import { initSeacloud, agentChatCompletions } from 'seacloud-sdk';\n *\n * initSeacloud({\n * apiKey: 'your-api-key',\n * baseUrl: 'http://proxy.sg.seaverse.dev',\n * });\n *\n * const stream = await agentChatCompletions({\n * agent_id: 'seaverse_agent',\n * messages: [\n * {\n * role: 'user',\n * content: [{ type: 'text', text: 'Tell me a story' }]\n * }\n * ],\n * model: 'gpt-4o',\n * stream: true,\n * });\n *\n * for await (const chunk of stream) {\n * const content = chunk.choices[0]?.delta?.content;\n * if (typeof content === 'string') {\n * process.stdout.write(content);\n * }\n *\n * // Handle artifacts\n * const artifacts = chunk.choices[0]?.delta?.artifacts;\n * if (artifacts) {\n * console.log('Generated artifacts:', artifacts);\n * }\n * }\n * ```\n *\n * @param params Request parameters with stream: true\n * @returns Async iterator of chat completion chunks\n */\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams & { stream: true }\n): Promise<AsyncIterable<AgentChatCompletionChunk>>;\n\n// Implementation\nexport async function agentChatCompletions(\n params: AgentChatCompletionsParams\n): Promise<AgentChatCompletionResponse | AsyncIterable<AgentChatCompletionChunk>> {\n const client = getClient();\n const config = client.getConfig();\n\n const url = `${config.baseUrl}/agent/api/v1/chat/completions`;\n\n // ============================================================================\n // PARAMETER PROCESSING LOGIC\n // ============================================================================\n\n /**\n * Default Model:\n * - If not specified, uses 'custom_openai/vertex-ai-claude-sonnet-4.5'\n * - This is Claude Sonnet 4.5, optimized for agent operations\n * - You can override with any supported model (gpt-4o, claude-4, etc.)\n */\n const model = params.model || 'custom_openai/vertex-ai-claude-sonnet-4.5';\n\n /**\n * Stream Mode:\n * - Agent API ALWAYS returns Server-Sent Events (SSE) format\n * - Even if params.stream=false, API returns SSE chunks\n * - We force stream: true in API request for consistency\n * - For non-streaming (params.stream=false), we collect all chunks internally\n */\n const userWantsStreaming = params.stream !== false; // Default to true\n const requestBody = {\n ...params,\n model,\n stream: true, // Always request SSE from API\n };\n\n // ============================================================================\n // TIMEOUT & REQUEST HANDLING\n // ============================================================================\n\n /**\n * Timeout Configuration:\n * - Default: 30 seconds (set in config)\n * - For agent operations (especially with tool calls like image generation):\n * Recommend setting timeout to 120000ms (2 minutes) or more\n * - Initialize SDK with custom timeout:\n * initSeacloud({ apiKey: '...', timeout: 120000 })\n */\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), config.timeout);\n\n try {\n /**\n * Request Headers:\n * - Content-Type: application/json (standard)\n * - Authorization: Bearer token for authentication\n * - X-Project: SeaArt (REQUIRED for agent API access)\n */\n const response = await config.fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.apiKey}`,\n 'X-Project': 'SeaArt', // Required header for agent API\n },\n body: JSON.stringify(requestBody),\n signal: controller.signal,\n });\n\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new SeacloudError(\n `HTTP ${response.status}: ${errorBody}`,\n response.status,\n errorBody,\n );\n }\n\n // ============================================================================\n // RESPONSE HANDLING\n // ============================================================================\n\n /**\n * Response Routing:\n * - If user wants streaming (params.stream !== false):\n * Return async iterator that yields chunks as they arrive\n *\n * - If user wants non-streaming (params.stream === false):\n * Internally collect all SSE chunks and return complete response\n *\n * Note: Both paths read SSE format from API, just different consumption patterns\n */\n if (userWantsStreaming) {\n return parseAgentStreamingResponse(response);\n }\n\n // Non-streaming: Collect all chunks and return complete response\n return await parseAgentNonStreamingResponse(response);\n } catch (error) {\n clearTimeout(timeoutId);\n\n if (error instanceof SeacloudError) {\n throw error;\n }\n\n if ((error as Error).name === 'AbortError') {\n throw new SeacloudError(`Request timeout after ${config.timeout}ms`);\n }\n\n throw new SeacloudError(\n `Request failed: ${(error as Error).message}`,\n undefined,\n error,\n );\n }\n}\n\n/**\n * Parse non-streaming response (API returns SSE even with stream: false)\n */\nasync function parseAgentNonStreamingResponse(\n response: Response\n): Promise<AgentChatCompletionResponse> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n let fullContent = '';\n let artifacts: AgentArtifact[] = [];\n let finishReason = '';\n let sessionId = '';\n let msgId = '';\n let lastChunk: AgentChatCompletionChunk | null = null;\n let usage: AgentUsage | undefined;\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n if (!trimmedLine) continue;\n if (trimmedLine === 'data: [DONE]') continue;\n if (trimmedLine === 'event: heartbeat') continue;\n\n if (trimmedLine.startsWith('data: ')) {\n const data = trimmedLine.slice(6).trim();\n\n try {\n const parsed = JSON.parse(data) as AgentChatCompletionChunk;\n lastChunk = parsed;\n\n const delta = parsed.choices?.[0]?.delta;\n\n if (delta?.content) {\n if (typeof delta.content === 'string') {\n fullContent += delta.content;\n } else if (Array.isArray(delta.content)) {\n for (const item of delta.content) {\n if (item.type === 'text' && item.text) {\n fullContent += item.text;\n }\n }\n }\n }\n\n if (delta?.artifacts) {\n artifacts.push(...delta.artifacts);\n }\n\n if (parsed.choices?.[0]?.finish_reason) {\n finishReason = parsed.choices[0].finish_reason;\n }\n\n if (parsed.session_id) {\n sessionId = parsed.session_id;\n }\n\n if (parsed.msg_id) {\n msgId = parsed.msg_id;\n }\n\n if (parsed.usage) {\n usage = parsed.usage;\n }\n } catch (e) {\n // Skip invalid JSON\n console.warn('Failed to parse SSE chunk:', data.substring(0, 100));\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n\n if (!lastChunk) {\n throw new SeacloudError('No valid response chunks received');\n }\n\n return {\n id: lastChunk.id,\n object: 'chat.completion',\n created: lastChunk.created,\n model: lastChunk.model,\n system_fingerprint: lastChunk.system_fingerprint,\n choices: [\n {\n index: 0,\n message: {\n role: 'assistant',\n content: fullContent,\n },\n finish_reason: finishReason || null,\n },\n ],\n usage,\n session_id: sessionId || undefined,\n msg_id: msgId || undefined,\n artifacts: artifacts.length > 0 ? artifacts : undefined,\n };\n}\n\n/**\n * Parse streaming response into async iterable\n */\nasync function* parseAgentStreamingResponse(\n response: Response\n): AsyncIterable<AgentChatCompletionChunk> {\n const reader = response.body?.getReader();\n if (!reader) {\n throw new SeacloudError('Response body is not readable');\n }\n\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmedLine = line.trim();\n\n // Skip empty lines and comments\n if (!trimmedLine || trimmedLine.startsWith(':')) continue;\n\n // Handle event type declarations (e.g., \"event: heartbeat\")\n if (trimmedLine.startsWith('event: ')) {\n const eventType = trimmedLine.slice(7).trim();\n if (eventType === 'heartbeat') {\n // Heartbeat events can be logged but don't yield\n continue;\n }\n }\n\n // Handle data events\n if (trimmedLine.startsWith('data: ')) {\n const data = trimmedLine.slice(6).trim();\n\n // Check for stream completion signal\n if (data === '[DONE]') {\n break;\n }\n\n try {\n const chunk = JSON.parse(data) as AgentChatCompletionChunk;\n yield chunk;\n } catch (error) {\n console.warn('Failed to parse SSE chunk:', data);\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n\n/**\n * Helper function to create a text message\n *\n * @param role - Message role ('user' | 'assistant' | 'developer' | 'tool')\n * @param text - Text content\n * @returns Agent message with text content\n *\n * @example\n * ```typescript\n * const message = createTextMessage('user', 'Hello!');\n * ```\n */\nexport function createTextMessage(role: AgentMessageRole, text: string): AgentMessage {\n return {\n role,\n content: [{ type: 'text', text }],\n };\n}\n\n/**\n * Helper function to create a message with image\n *\n * @param role - Message role ('user' | 'assistant' | 'developer' | 'tool')\n * @param text - Text content\n * @param imageUrl - Image URL\n * @returns Agent message with text and image content\n *\n * @example\n * ```typescript\n * const message = createImageMessage('user', 'What is this?', 'https://example.com/image.jpg');\n * ```\n */\nexport function createImageMessage(role: AgentMessageRole, text: string, imageUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'image', image_url: imageUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a message with video\n *\n * @param role - Message role\n * @param text - Text content\n * @param videoUrl - Video URL\n * @returns Agent message with text and video content\n */\nexport function createVideoMessage(role: AgentMessageRole, text: string, videoUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'video', video_url: videoUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a message with audio\n *\n * @param role - Message role\n * @param text - Text content\n * @param audioUrl - Audio URL\n * @returns Agent message with text and audio content\n */\nexport function createAudioMessage(role: AgentMessageRole, text: string, audioUrl: string): AgentMessage {\n return {\n role,\n content: [\n { type: 'text', text },\n { type: 'audio', audio_url: audioUrl },\n ],\n };\n}\n\n/**\n * Helper function to create a tool\n */\nexport function createTool(toolName: string): AgentTool {\n return {\n type: 'function',\n function: {\n name: toolName,\n },\n };\n}\n","#!/usr/bin/env node\n\nimport { SeacloudClient } from './core/index.js';\nimport { initSeacloud } from './core/global-config.js';\nimport { llmChatCompletions } from './api/llm_chat_completions.js';\nimport { agentChatCompletions, createTextMessage } from './api/agent_chat_completions.js';\nimport { readFileSync } from 'fs';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\ninterface CliOptions {\n apiKey?: string;\n baseUrl?: string;\n model?: string;\n params?: Record<string, any>;\n}\n\nfunction showHelp() {\n console.log(`\nSeaCloud CLI - Test AI models, LLM, and Agent from command line\n\nUsage:\n seacloud <command> [options]\n\nCommands:\n llm <prompt> Chat with LLM models\n agent <prompt> Chat with Fast Agent (supports image/video generation)\n <model> Test specific model generation\n\nLLM Options:\n --model <name> Model name (default: seaart-mix-sonnet-4-5)\n --stream Enable streaming mode\n --temperature <n> Temperature 0-2 (default: 0.7)\n --max-tokens <n> Max tokens to generate\n\nAgent Options:\n --agent-id <id> Agent ID (default: seagen_agent)\n --model <name> Model name (default: gpt-4o)\n --stream Enable streaming mode\n --session-id <id> Session ID for multi-turn conversation\n\nModel Generation Options:\n --api-key <key> API key (or set API_SERVICE_TOKEN env var)\n --base-url <url> Base URL (default: http://proxy.sg.seaverse.dev)\n --params <json> JSON parameters for the model\n\nExamples:\n # Chat with LLM (non-streaming)\n seacloud llm \"What is the capital of France?\"\n\n # Chat with LLM (streaming)\n seacloud llm \"Tell me a story\" --stream\n\n # Chat with LLM using specific model\n seacloud llm \"Hello\" --model deepseek-v3.1 --temperature 1.0\n\n # Chat with Agent (can generate images)\n seacloud agent \"Generate an image of a sunset\"\n\n # Chat with Agent (streaming)\n seacloud agent \"Create a cat image\" --stream\n\n # Test model generation\n seacloud flux_1_1_pro --params '{\"prompt\":\"a beautiful sunset\"}'\n\n # Use custom API key\n seacloud llm \"Hello\" --api-key sa-xxx\n\nEnvironment Variables:\n API_SERVICE_TOKEN API authentication token (required)\n SEACLOUD_BASE_URL Base URL for API endpoints\n`);\n}\n\nfunction parseArgs(args: string[]): CliOptions {\n const options: CliOptions = {};\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n\n switch (arg) {\n case '--api-key':\n options.apiKey = args[++i];\n break;\n case '--base-url':\n options.baseUrl = args[++i];\n break;\n case '--model':\n options.model = args[++i];\n break;\n case '--params':\n try {\n options.params = JSON.parse(args[++i]);\n } catch (e) {\n console.error('Error: Invalid JSON for --params');\n process.exit(1);\n }\n break;\n }\n }\n\n return options;\n}\n\nasync function testModel(model: string, options: CliOptions) {\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || '';\n const baseUrl = options.baseUrl || process.env.API_BASE_URL || 'http://localhost:8080';\n\n if (!options.params) {\n console.error('Error: --params required. Provide JSON parameters for the model');\n process.exit(1);\n }\n\n console.log(`Testing model: ${model}`);\n console.log(`Base URL: ${baseUrl}`);\n console.log(`Parameters:`, JSON.stringify(options.params, null, 2));\n console.log('');\n\n const client = new SeacloudClient({ apiKey, baseUrl });\n\n try {\n console.log('Creating task...');\n const task = await client.createTask('/model/v1/generation', {\n model,\n input: [{ params: options.params }],\n });\n\n console.log(`Task created: ${task.id}`);\n console.log(`Initial status: ${task.status}`);\n console.log('');\n\n if (task.status === 'failed') {\n console.error('Task failed immediately:', task.error);\n process.exit(1);\n }\n\n console.log('Polling for results...');\n let attempt = 0;\n const maxAttempts = 120;\n const intervalMs = 3000;\n\n while (attempt < maxAttempts) {\n attempt++;\n\n const result = await client.getTaskStatus('/model/v1/generation', task.id);\n process.stdout.write(`\\rAttempt ${attempt}/${maxAttempts} - Status: ${result.status} `);\n\n if (result.status === 'completed') {\n console.log('\\n\\nTask completed!');\n console.log('\\nResults:');\n console.log(JSON.stringify(result.output, null, 2));\n\n // Extract URLs if available\n if (result.output) {\n const urls: string[] = [];\n for (const item of result.output) {\n if (item.content) {\n for (const resource of item.content) {\n if (resource.url) {\n urls.push(resource.url);\n }\n }\n }\n }\n\n if (urls.length > 0) {\n console.log('\\nGenerated URLs:');\n urls.forEach((url, i) => {\n console.log(` ${i + 1}. ${url}`);\n });\n }\n }\n\n process.exit(0);\n }\n\n if (result.status === 'failed') {\n console.log('\\n\\nTask failed!');\n console.error('Error:', result.error);\n process.exit(1);\n }\n\n await new Promise(resolve => setTimeout(resolve, intervalMs));\n }\n\n console.log('\\n\\nTimeout: Task did not complete within the time limit');\n process.exit(1);\n\n } catch (error: any) {\n console.error('\\nError:', error.message);\n process.exit(1);\n }\n}\n\nasync function runLlm(prompt: string, args: string[]) {\n const options: any = { stream: false, model: 'seaart-mix-sonnet-4-5', temperature: 0.7 };\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n if (arg === '--model') options.model = args[++i];\n else if (arg === '--stream') options.stream = true;\n else if (arg === '--temperature') options.temperature = parseFloat(args[++i]);\n else if (arg === '--max-tokens') options.maxTokens = parseInt(args[++i]);\n else if (arg === '--api-key') options.apiKey = args[++i];\n else if (arg === '--base-url') options.baseUrl = args[++i];\n }\n\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || '';\n const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || 'http://proxy.sg.seaverse.dev';\n\n initSeacloud(apiKey, { baseUrl, timeout: 120000 });\n\n console.log(`Model: ${options.model}`);\n console.log(`Prompt: ${prompt}\\n`);\n\n if (options.stream) {\n const stream = await llmChatCompletions({\n model: options.model,\n messages: [{ role: 'user', content: prompt }],\n stream: true,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n });\n\n process.stdout.write('Response: ');\n for await (const chunk of stream) {\n const content = chunk.choices[0]?.delta?.content;\n if (typeof content === 'string') process.stdout.write(content);\n }\n console.log('\\n');\n } else {\n const response = await llmChatCompletions({\n model: options.model,\n messages: [{ role: 'user', content: prompt }],\n stream: false,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n });\n\n console.log('Response:', response.choices[0].message.content);\n console.log('\\nUsage:', response.usage);\n }\n}\n\nasync function runAgent(prompt: string, args: string[]) {\n const options: any = {\n stream: false,\n model: 'gpt-4o',\n agentId: 'seagen_agent',\n };\n\n for (let i = 0; i < args.length; i++) {\n const arg = args[i];\n if (arg === '--model') options.model = args[++i];\n else if (arg === '--stream') options.stream = true;\n else if (arg === '--agent-id') options.agentId = args[++i];\n else if (arg === '--session-id') options.sessionId = args[++i];\n else if (arg === '--api-key') options.apiKey = args[++i];\n else if (arg === '--base-url') options.baseUrl = args[++i];\n }\n\n const apiKey = options.apiKey || process.env.API_SERVICE_TOKEN || '';\n const baseUrl = options.baseUrl || process.env.SEACLOUD_BASE_URL || 'http://proxy.sg.seaverse.dev';\n\n initSeacloud(apiKey, { baseUrl, timeout: 300000 }); // 5 min for agent\n\n console.log(`Agent: ${options.agentId}`);\n console.log(`Model: ${options.model}`);\n console.log(`Prompt: ${prompt}\\n`);\n\n if (options.stream) {\n const stream = await agentChatCompletions({\n agent_id: options.agentId,\n messages: [createTextMessage('user', prompt)],\n model: options.model,\n stream: true,\n session_id: options.sessionId,\n seq: 0,\n });\n\n process.stdout.write('Response: ');\n for await (const chunk of stream) {\n const content = chunk.choices[0]?.delta?.content;\n if (typeof content === 'string') process.stdout.write(content);\n }\n console.log('\\n');\n } else {\n const response = await agentChatCompletions({\n agent_id: options.agentId,\n messages: [createTextMessage('user', prompt)],\n model: options.model,\n stream: false,\n session_id: options.sessionId,\n seq: 0,\n });\n\n console.log('Response:', response.choices[0].message.content);\n\n if (response.artifacts && response.artifacts.length > 0) {\n console.log('\\nGenerated Artifacts:');\n response.artifacts.forEach((artifact, i) => {\n console.log(` ${i + 1}. ${artifact.name}`);\n console.log(` URL: ${artifact.url}`);\n });\n }\n\n console.log('\\nSession ID:', response.session_id);\n console.log('Message ID:', response.msg_id);\n }\n}\n\nasync function main() {\n const args = process.argv.slice(2);\n\n if (args.length === 0 || args[0] === '--help' || args[0] === '-h') {\n showHelp();\n process.exit(0);\n }\n\n const command = args[0];\n\n try {\n if (command === 'llm') {\n if (args.length < 2) {\n console.error('Error: prompt required for llm command');\n console.log('Usage: seacloud llm \"<prompt>\" [options]');\n process.exit(1);\n }\n await runLlm(args[1], args.slice(2));\n } else if (command === 'agent') {\n if (args.length < 2) {\n console.error('Error: prompt required for agent command');\n console.log('Usage: seacloud agent \"<prompt>\" [options]');\n process.exit(1);\n }\n await runAgent(args[1], args.slice(2));\n } else {\n // Model generation command\n const model = command;\n\n if (model.startsWith('--')) {\n console.error('Error: command or model name required');\n console.log('Usage: seacloud <command> [options]');\n process.exit(1);\n }\n\n const options = parseArgs(args.slice(1));\n await testModel(model, options);\n }\n } catch (error: any) {\n console.error('\\nError:', error.message);\n process.exit(1);\n }\n}\n\nmain().catch(console.error);\n"]}
|