deepbox 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +344 -0
- package/dist/CSRMatrix-CwGwQRea.d.cts +219 -0
- package/dist/CSRMatrix-KzNt6QpS.d.ts +219 -0
- package/dist/Tensor-BQLk1ltW.d.cts +147 -0
- package/dist/Tensor-g8mUClel.d.ts +147 -0
- package/dist/chunk-4S73VUBD.js +677 -0
- package/dist/chunk-4S73VUBD.js.map +1 -0
- package/dist/chunk-5R4S63PF.js +2925 -0
- package/dist/chunk-5R4S63PF.js.map +1 -0
- package/dist/chunk-6AE5FKKQ.cjs +9264 -0
- package/dist/chunk-6AE5FKKQ.cjs.map +1 -0
- package/dist/chunk-AD436M45.js +3854 -0
- package/dist/chunk-AD436M45.js.map +1 -0
- package/dist/chunk-ALS7ETWZ.cjs +4263 -0
- package/dist/chunk-ALS7ETWZ.cjs.map +1 -0
- package/dist/chunk-AU7XHGKJ.js +2092 -0
- package/dist/chunk-AU7XHGKJ.js.map +1 -0
- package/dist/chunk-B5TNKUEY.js +1481 -0
- package/dist/chunk-B5TNKUEY.js.map +1 -0
- package/dist/chunk-BCR7G3A6.js +9136 -0
- package/dist/chunk-BCR7G3A6.js.map +1 -0
- package/dist/chunk-C4PKXY74.cjs +1917 -0
- package/dist/chunk-C4PKXY74.cjs.map +1 -0
- package/dist/chunk-DWZY6PIP.cjs +6400 -0
- package/dist/chunk-DWZY6PIP.cjs.map +1 -0
- package/dist/chunk-E3EU5FZO.cjs +2113 -0
- package/dist/chunk-E3EU5FZO.cjs.map +1 -0
- package/dist/chunk-F3JWBINJ.js +1054 -0
- package/dist/chunk-F3JWBINJ.js.map +1 -0
- package/dist/chunk-FJYLIGJX.js +1940 -0
- package/dist/chunk-FJYLIGJX.js.map +1 -0
- package/dist/chunk-JSCDE774.cjs +729 -0
- package/dist/chunk-JSCDE774.cjs.map +1 -0
- package/dist/chunk-LWECRCW2.cjs +2412 -0
- package/dist/chunk-LWECRCW2.cjs.map +1 -0
- package/dist/chunk-MLBMYKCG.js +6379 -0
- package/dist/chunk-MLBMYKCG.js.map +1 -0
- package/dist/chunk-OX6QXFMV.cjs +3874 -0
- package/dist/chunk-OX6QXFMV.cjs.map +1 -0
- package/dist/chunk-PHV2DKRS.cjs +1072 -0
- package/dist/chunk-PHV2DKRS.cjs.map +1 -0
- package/dist/chunk-PL7TAYKI.js +4056 -0
- package/dist/chunk-PL7TAYKI.js.map +1 -0
- package/dist/chunk-PR647I7R.js +1898 -0
- package/dist/chunk-PR647I7R.js.map +1 -0
- package/dist/chunk-QERHVCHC.cjs +2960 -0
- package/dist/chunk-QERHVCHC.cjs.map +1 -0
- package/dist/chunk-XEG44RF6.cjs +1514 -0
- package/dist/chunk-XEG44RF6.cjs.map +1 -0
- package/dist/chunk-XMWVME2W.js +2377 -0
- package/dist/chunk-XMWVME2W.js.map +1 -0
- package/dist/chunk-ZB75FESB.cjs +1979 -0
- package/dist/chunk-ZB75FESB.cjs.map +1 -0
- package/dist/chunk-ZLW62TJG.cjs +4061 -0
- package/dist/chunk-ZLW62TJG.cjs.map +1 -0
- package/dist/chunk-ZXKBDFP3.js +4235 -0
- package/dist/chunk-ZXKBDFP3.js.map +1 -0
- package/dist/core/index.cjs +204 -0
- package/dist/core/index.cjs.map +1 -0
- package/dist/core/index.d.cts +2 -0
- package/dist/core/index.d.ts +2 -0
- package/dist/core/index.js +3 -0
- package/dist/core/index.js.map +1 -0
- package/dist/dataframe/index.cjs +22 -0
- package/dist/dataframe/index.cjs.map +1 -0
- package/dist/dataframe/index.d.cts +3 -0
- package/dist/dataframe/index.d.ts +3 -0
- package/dist/dataframe/index.js +5 -0
- package/dist/dataframe/index.js.map +1 -0
- package/dist/datasets/index.cjs +134 -0
- package/dist/datasets/index.cjs.map +1 -0
- package/dist/datasets/index.d.cts +3 -0
- package/dist/datasets/index.d.ts +3 -0
- package/dist/datasets/index.js +5 -0
- package/dist/datasets/index.js.map +1 -0
- package/dist/index-74AB8Cyh.d.cts +1126 -0
- package/dist/index-9oQx1HgV.d.cts +1180 -0
- package/dist/index-BJY2SI4i.d.ts +483 -0
- package/dist/index-BWGhrDlr.d.ts +733 -0
- package/dist/index-B_DK4FKY.d.cts +242 -0
- package/dist/index-BbA2Gxfl.d.ts +456 -0
- package/dist/index-BgHYAoSS.d.cts +837 -0
- package/dist/index-BndMbqsM.d.ts +1439 -0
- package/dist/index-C1mfVYoo.d.ts +2517 -0
- package/dist/index-CCvlwAmL.d.cts +809 -0
- package/dist/index-CDw5CnOU.d.ts +785 -0
- package/dist/index-Cn3SdB0O.d.ts +1126 -0
- package/dist/index-CrqLlS-a.d.ts +776 -0
- package/dist/index-D61yaSMY.d.cts +483 -0
- package/dist/index-D9Loo1_A.d.cts +2517 -0
- package/dist/index-DIT_OO9C.d.cts +785 -0
- package/dist/index-DIp_RrRt.d.ts +242 -0
- package/dist/index-DbultU6X.d.cts +1427 -0
- package/dist/index-DmEg_LCm.d.cts +776 -0
- package/dist/index-DoPWVxPo.d.cts +1439 -0
- package/dist/index-DuCxd-8d.d.ts +837 -0
- package/dist/index-Dx42TZaY.d.ts +809 -0
- package/dist/index-DyZ4QQf5.d.cts +456 -0
- package/dist/index-GFAVyOWO.d.ts +1427 -0
- package/dist/index-WHQLn0e8.d.cts +733 -0
- package/dist/index-ZtI1Iy4L.d.ts +1180 -0
- package/dist/index-eJgeni9c.d.cts +1911 -0
- package/dist/index-tk4lSYod.d.ts +1911 -0
- package/dist/index.cjs +72 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +17 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +15 -0
- package/dist/index.js.map +1 -0
- package/dist/linalg/index.cjs +86 -0
- package/dist/linalg/index.cjs.map +1 -0
- package/dist/linalg/index.d.cts +3 -0
- package/dist/linalg/index.d.ts +3 -0
- package/dist/linalg/index.js +5 -0
- package/dist/linalg/index.js.map +1 -0
- package/dist/metrics/index.cjs +158 -0
- package/dist/metrics/index.cjs.map +1 -0
- package/dist/metrics/index.d.cts +3 -0
- package/dist/metrics/index.d.ts +3 -0
- package/dist/metrics/index.js +5 -0
- package/dist/metrics/index.js.map +1 -0
- package/dist/ml/index.cjs +87 -0
- package/dist/ml/index.cjs.map +1 -0
- package/dist/ml/index.d.cts +3 -0
- package/dist/ml/index.d.ts +3 -0
- package/dist/ml/index.js +6 -0
- package/dist/ml/index.js.map +1 -0
- package/dist/ndarray/index.cjs +501 -0
- package/dist/ndarray/index.cjs.map +1 -0
- package/dist/ndarray/index.d.cts +5 -0
- package/dist/ndarray/index.d.ts +5 -0
- package/dist/ndarray/index.js +4 -0
- package/dist/ndarray/index.js.map +1 -0
- package/dist/nn/index.cjs +142 -0
- package/dist/nn/index.cjs.map +1 -0
- package/dist/nn/index.d.cts +6 -0
- package/dist/nn/index.d.ts +6 -0
- package/dist/nn/index.js +5 -0
- package/dist/nn/index.js.map +1 -0
- package/dist/optim/index.cjs +77 -0
- package/dist/optim/index.cjs.map +1 -0
- package/dist/optim/index.d.cts +4 -0
- package/dist/optim/index.d.ts +4 -0
- package/dist/optim/index.js +4 -0
- package/dist/optim/index.js.map +1 -0
- package/dist/plot/index.cjs +114 -0
- package/dist/plot/index.cjs.map +1 -0
- package/dist/plot/index.d.cts +6 -0
- package/dist/plot/index.d.ts +6 -0
- package/dist/plot/index.js +5 -0
- package/dist/plot/index.js.map +1 -0
- package/dist/preprocess/index.cjs +82 -0
- package/dist/preprocess/index.cjs.map +1 -0
- package/dist/preprocess/index.d.cts +4 -0
- package/dist/preprocess/index.d.ts +4 -0
- package/dist/preprocess/index.js +5 -0
- package/dist/preprocess/index.js.map +1 -0
- package/dist/random/index.cjs +74 -0
- package/dist/random/index.cjs.map +1 -0
- package/dist/random/index.d.cts +3 -0
- package/dist/random/index.d.ts +3 -0
- package/dist/random/index.js +5 -0
- package/dist/random/index.js.map +1 -0
- package/dist/stats/index.cjs +142 -0
- package/dist/stats/index.cjs.map +1 -0
- package/dist/stats/index.d.cts +3 -0
- package/dist/stats/index.d.ts +3 -0
- package/dist/stats/index.js +5 -0
- package/dist/stats/index.js.map +1 -0
- package/dist/tensor-B96jjJLQ.d.cts +205 -0
- package/dist/tensor-B96jjJLQ.d.ts +205 -0
- package/package.json +226 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/optim/index.ts","../src/optim/Optimizer.ts","../src/optim/_internal.ts","../src/optim/optimizers/adadelta.ts","../src/optim/optimizers/adagrad.ts","../src/optim/optimizers/adam.ts","../src/optim/optimizers/adamw.ts","../src/optim/optimizers/nadam.ts","../src/optim/optimizers/rmsprop.ts","../src/optim/optimizers/sgd.ts","../src/optim/schedulers.ts"],"names":["isRecord"],"mappings":";;;AAAA,IAAA,aAAA,GAAA;AAAA,QAAA,CAAA,aAAA,EAAA;AAAA,EAAA,QAAA,EAAA,MAAA,QAAA;AAAA,EAAA,OAAA,EAAA,MAAA,OAAA;AAAA,EAAA,IAAA,EAAA,MAAA,IAAA;AAAA,EAAA,KAAA,EAAA,MAAA,KAAA;AAAA,EAAA,iBAAA,EAAA,MAAA,iBAAA;AAAA,EAAA,aAAA,EAAA,MAAA,aAAA;AAAA,EAAA,WAAA,EAAA,MAAA,WAAA;AAAA,EAAA,QAAA,EAAA,MAAA,QAAA;AAAA,EAAA,WAAA,EAAA,MAAA,WAAA;AAAA,EAAA,KAAA,EAAA,MAAA,KAAA;AAAA,EAAA,UAAA,EAAA,MAAA,UAAA;AAAA,EAAA,SAAA,EAAA,MAAA,SAAA;AAAA,EAAA,OAAA,EAAA,MAAA,OAAA;AAAA,EAAA,iBAAA,EAAA,MAAA,iBAAA;AAAA,EAAA,GAAA,EAAA,MAAA,GAAA;AAAA,EAAA,MAAA,EAAA,MAAA,MAAA;AAAA,EAAA,QAAA,EAAA,MAAA;AAAA,CAAA,CAAA;;;AC6DA,SAAS,SAAS,KAAA,EAAkD;AAClE,EAAA,OAAO,OAAO,KAAA,KAAU,QAAA,IAAY,KAAA,KAAU,IAAA;AAChD;AAEA,SAAS,YAAA,CAAa,OAAgB,OAAA,EAAiB;AACrD,EAAA,IAAI,CAAC,QAAA,CAAS,KAAK,CAAA,EAAG;AACpB,IAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,EAAG,OAAO,CAAA,kBAAA,CAAoB,CAAA;AAAA,EAC9D;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,cAAc,KAAA,EAAkD;AACvE,EAAA,OAAO,SAAS,KAAK,CAAA;AACvB;AAEA,SAAS,kBAAA,CAAmB,OAAgB,OAAA,EAAiB;AAC3D,EAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,EAAG;AACzB,IAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,EAAG,OAAO,CAAA,6BAAA,CAA+B,CAAA;AAAA,EACzE;AACA,EAAA,MAAM,SAAmB,EAAC;AAC1B,EAAA,KAAA,MAAW,SAAS,KAAA,EAAO;AACzB,IAAA,IAAI,CAAC,MAAA,CAAO,SAAA,CAAU,KAAK,CAAA,EAAG;AAC5B,MAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,EAAG,OAAO,CAAA,2BAAA,CAA6B,CAAA;AAAA,IACvE;AACA,IAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AAAA,EACnB;AACA,EAAA,OAAO,MAAA;AACT;AAYA,SAAS,kBACP,MAAA,EAC8C;AAE9C,EAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,MAAM,GAAG,OAAO,KAAA;AAEnC,EAAA,IAAI,MAAA,CAAO,MAAA,KAAW,CAAA,EAAG,OAAO,IAAA;AAEhC,EAAA,MAAM,KAAA,GAAQ,OAAO,CAAC,CAAA;AACtB,EAAA,IAAI,CAAC,KAAA,IAAS,OAAO,KAAA,KAAU,UAAU,OAAO,KAAA;AAChD,EAAA,OAAO,QAAA,IAAY,KAAA;AACrB;AAQO,IAAe,YAAf,MAGL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0BA,WAAA,CACE,QACmB,QAAA,EACnB;AADmB,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AAGnB,IAAA,IAAA,CAAK,cAAc,EAAC;AAGpB,IAAA,IAAI,CAAC,iBAAA,CAA2B,MAAM,CAAA,EAAG;AAEvC,MAAA,IAAA,CAAK,YAAY,IAAA,CAAK;AAAA,QACpB,MAAA,EAAQ,KAAA,CAAM,IAAA,CAAK,MAAM,CAAA;AAAA;AAAA,QACzB,OAAA,EAAS,EAAE,GAAG,QAAA;AAAS;AAAA,OACxB,CAAA;AAAA,IACH,CAAA,MAAO;AAEL,MAAA,KAAA,MAAW,SAAS,MAAA,EAAQ;AAE1B,QAAA,MAAM,EAAE,MAAA,EAAQ,WAAA,EAAa,GAAG,cAAa,GAAI,KAAA;AACjD,QAAA,IAAA,CAAK,YAAY,IAAA,CAAK;AAAA,UACpB,MAAA,EAAQ,KAAA,CAAM,IAAA,CAAK,WAAW,CAAA;AAAA;AAAA,UAC9B,OAAA,EAAS,EAAE,GAAG,QAAA,EAAU,GAAG,YAAA;AAAa;AAAA,SACzC,CAAA;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA7CO,WAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASG,KAAA,uBAAoC,GAAA,EAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyElD,QAAA,GAAiB;AAEf,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AAEpC,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,KAAA,CAAM,QAAA,EAAS;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,cAAc,UAAA,EAAuC;AAEnD,IAAA,MAAM,EAAE,MAAA,EAAQ,GAAG,OAAA,EAAQ,GAAI,UAAA;AAE/B,IAAA,IAAA,CAAK,YAAY,IAAA,CAAK;AAAA,MACpB,MAAA,EAAQ,KAAA,CAAM,IAAA,CAAK,MAAM,CAAA;AAAA;AAAA,MACzB,SAAS,EAAE,GAAG,IAAA,CAAK,QAAA,EAAU,GAAG,OAAA;AAAQ;AAAA,KACzC,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgCA,SAAA,GAAY;AACV,IAAA,MAAM,UAAA,uBAAiB,GAAA,EAAwB;AAC/C,IAAA,MAAM,gBAA8B,EAAC;AACrC,IAAA,MAAM,UAAA,GAAa,CAAC,KAAA,KAAsB;AACxC,MAAA,MAAM,QAAA,GAAW,UAAA,CAAW,GAAA,CAAI,KAAK,CAAA;AACrC,MAAA,IAAI,QAAA,KAAa,QAAW,OAAO,QAAA;AACnC,MAAA,MAAM,KAAK,aAAA,CAAc,MAAA;AACzB,MAAA,aAAA,CAAc,KAAK,KAAK,CAAA;AACxB,MAAA,UAAA,CAAW,GAAA,CAAI,OAAO,EAAE,CAAA;AACxB,MAAA,OAAO,EAAA;AAAA,IACT,CAAA;AAEA,IAAA,OAAO;AAAA;AAAA,MAEL,KAAA,EAAO,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,KAAA,CAAM,OAAA,EAAS,CAAA,CAAE,GAAA,CAAI,CAAC,CAAC,KAAA,EAAO,KAAK,CAAA,MAAO;AAAA,QAC/D,OAAA,EAAS,WAAW,KAAK,CAAA;AAAA,QACzB,KAAA;AAAA;AAAA,QACA;AAAA;AAAA,OACF,CAAE,CAAA;AAAA;AAAA,MAEF,WAAA,EAAa,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,CAAC,KAAA,MAAW;AAAA,QAC5C,QAAQ,KAAA,CAAM,MAAA;AAAA;AAAA,QACd,QAAA,EAAU,MAAM,MAAA,CAAO,GAAA,CAAI,CAAC,KAAA,KAAU,UAAA,CAAW,KAAK,CAAC,CAAA;AAAA,QACvD,SAAS,KAAA,CAAM;AAAA;AAAA,OACjB,CAAE;AAAA,KACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,cAAc,SAAA,EAA0C;AACtD,IAAA,MAAM,gBAAgB,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAC,KAAA,KAAU,MAAM,MAAM,CAAA;AACtE,IAAA,MAAM,oBAAoB,aAAA,CAAc,MAAA;AACxC,IAAA,MAAM,WAAA,uBAAkB,GAAA,EAAqB;AAC7C,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,aAAA,CAAc,QAAQ,CAAA,EAAA,EAAK;AAC7C,MAAA,WAAA,CAAY,GAAA,CAAI,aAAA,CAAc,CAAC,CAAA,EAAG,CAAC,CAAA;AAAA,IACrC;AAGA,IAAA,IAAI,MAAA,CAAO,MAAA,CAAO,SAAA,EAAW,aAAa,CAAA,EAAG;AAC3C,MAAA,MAAM,SAAA,GAAY,UAAU,aAAa,CAAA;AACzC,MAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC7B,QAAA,MAAM,IAAI,oBAAoB,8BAA8B,CAAA;AAAA,MAC9D;AACA,MAAA,MAAM,WAAA,GAAyB,SAAA;AAE/B,MAAA,IAAI,WAAA,CAAY,WAAW,CAAA,EAAG;AAC5B,QAAA,IAAI,IAAA,CAAK,WAAA,CAAY,MAAA,KAAW,CAAA,EAAG;AACjC,UAAA,MAAM,IAAI,oBAAoB,6BAA6B,CAAA;AAAA,QAC7D;AACA,QAAA,IAAA,CAAK,cAAc,EAAC;AAAA,MACtB,CAAA,MAAO;AACL,QAAA,IAAI,WAAA,CAAY,MAAA,KAAW,IAAA,CAAK,WAAA,CAAY,MAAA,EAAQ;AAClD,UAAA,MAAM,IAAI,oBAAoB,4BAA4B,CAAA;AAAA,QAC5D;AAEA,QAAA,MAAM,YAAA,uBAAmB,GAAA,EAAY;AACrC,QAAA,IAAI,eAAA,GAAkB,CAAA;AACtB,QAAA,IAAI,WAAA,GAAc,KAAA;AAClB,QAAA,IAAI,aAAA,GAAgB,KAAA;AACpB,QAAA,MAAM,aAID,EAAC;AAEN,QAAA,WAAA,CAAY,OAAA,CAAQ,CAAC,QAAA,EAAU,KAAA,KAAU;AACvC,UAAA,MAAM,WAAA,GAAc,YAAA,CAAa,QAAA,EAAU,CAAA,YAAA,EAAe,KAAK,CAAA,CAAA,CAAG,CAAA;AAClE,UAAA,MAAM,aAAa,YAAA,CAAa,WAAA,CAAY,SAAS,CAAA,EAAG,CAAA,YAAA,EAAe,KAAK,CAAA,SAAA,CAAW,CAAA;AACvF,UAAA,MAAM,OAAA,GAAmB,EAAE,GAAG,IAAA,CAAK,QAAA,EAAS;AAC5C,UAAA,MAAM,aAAA,GAAyC,OAAA;AAC/C,UAAA,MAAM,cAAA,GAA0C,EAAE,GAAG,IAAA,CAAK,QAAA,EAAS;AAEnE,UAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,EAAG;AACrD,YAAA,IAAI,MAAA,CAAO,MAAA,CAAO,cAAA,EAAgB,GAAG,CAAA,EAAG;AACtC,cAAA,MAAM,UAAA,GAAa,eAAe,GAAG,CAAA;AACrC,cAAA,MAAM,eAAe,OAAO,UAAA;AAC5B,cAAA,MAAM,aAAa,OAAO,KAAA;AAE1B,cAAA,IAAI,eAAe,YAAA,EAAc;AAC/B,gBAAA,MAAM,IAAI,mBAAA;AAAA,kBACR,6BAA6B,GAAG,CAAA,iBAAA,EAAoB,KAAK,CAAA,YAAA,EAAe,YAAY,SAAS,UAAU,CAAA;AAAA,iBACzG;AAAA,cACF;AACA,cAAA,aAAA,CAAc,GAAG,CAAA,GAAI,KAAA;AAAA,YACvB;AAAA,UACF;AAEA,UAAA,MAAM,WAAA,GAAc,YAAY,UAAU,CAAA;AAC1C,UAAA,MAAM,SAAA,GAAY,YAAY,QAAQ,CAAA;AACtC,UAAA,IAAI,QAAA;AACJ,UAAA,IAAI,gBAAgB,MAAA,EAAW;AAC7B,YAAA,QAAA,GAAW,kBAAA,CAAmB,WAAA,EAAa,CAAA,YAAA,EAAe,KAAK,CAAA,UAAA,CAAY,CAAA;AAC3E,YAAA,WAAA,GAAc,IAAA;AAAA,UAChB,CAAA,MAAO;AACL,YAAA,aAAA,GAAgB,IAAA;AAAA,UAClB;AAEA,UAAA,IAAI,cAAA;AAEJ,UAAA,IAAI,QAAA,EAAU;AACZ,YAAA,KAAA,MAAW,MAAM,QAAA,EAAU;AACzB,cAAA,IAAI,EAAA,GAAK,CAAA,IAAK,EAAA,IAAM,iBAAA,EAAmB;AACrC,gBAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,gBAAA,EAAmB,EAAE,CAAA,eAAA,CAAiB,CAAA;AAAA,cACtE;AACA,cAAA,IAAI,YAAA,CAAa,GAAA,CAAI,EAAE,CAAA,EAAG;AACxB,gBAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,kBAAA,EAAqB,EAAE,CAAA,eAAA,CAAiB,CAAA;AAAA,cACxE;AACA,cAAA,YAAA,CAAa,IAAI,EAAE,CAAA;AAAA,YACrB;AACA,YAAA,eAAA,IAAmB,QAAA,CAAS,MAAA;AAC5B,YAAA,cAAA,GAAiB,QAAA,CAAS,GAAA,CAAI,CAAC,EAAA,KAAO;AACpC,cAAA,MAAM,KAAA,GAAQ,cAAc,EAAE,CAAA;AAC9B,cAAA,IAAI,CAAC,KAAA,EAAO;AACV,gBAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,gBAAA,EAAmB,EAAE,CAAA,eAAA,CAAiB,CAAA;AAAA,cACtE;AACA,cAAA,OAAO,KAAA;AAAA,YACT,CAAC,CAAA;AAAA,UACH;AAEA,UAAA,IAAI,cAAc,MAAA,EAAW;AAC3B,YAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,SAAS,CAAA,EAAG;AAC7B,cAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,YAAA,EAAe,KAAK,CAAA,yBAAA,CAA2B,CAAA;AAAA,YAC/E;AACA,YAAA,MAAM,qBAAmC,EAAC;AAC1C,YAAA,IAAI,UAAA,GAAa,KAAA;AACjB,YAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,cAAA,MAAM,UAAA,GAAa,WAAA,CAAY,GAAA,CAAI,QAAQ,CAAA;AAC3C,cAAA,IAAI,eAAe,MAAA,EAAW;AAC5B,gBAAA,UAAA,GAAa,IAAA;AACb,gBAAA;AAAA,cACF;AACA,cAAA,MAAM,KAAA,GAAQ,cAAc,UAAU,CAAA;AACtC,cAAA,IAAI,CAAC,KAAA,EAAO;AACV,gBAAA,UAAA,GAAa,IAAA;AACb,gBAAA;AAAA,cACF;AACA,cAAA,kBAAA,CAAmB,KAAK,KAAK,CAAA;AAAA,YAC/B;AACA,YAAA,IAAI,CAAC,UAAA,EAAY;AACf,cAAA,IAAI,QAAA,IAAY,QAAA,CAAS,MAAA,KAAW,kBAAA,CAAmB,MAAA,EAAQ;AAC7D,gBAAA,MAAM,IAAI,oBAAoB,8CAA8C,CAAA;AAAA,cAC9E;AACA,cAAA,IAAI,CAAC,cAAA,EAAgB;AACnB,gBAAA,cAAA,GAAiB,kBAAA;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAEA,UAAA,IAAI,CAAC,cAAA,EAAgB;AACnB,YAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,YAAA,EAAe,KAAK,CAAA,iCAAA,CAAmC,CAAA;AAAA,UACvF;AAEA,UAAA,IAAI,aAAa,MAAA,EAAW;AAC1B,YAAA,UAAA,CAAW,IAAA,CAAK,EAAE,MAAA,EAAQ,cAAA,EAAgB,SAAS,CAAA;AAAA,UACrD,CAAA,MAAO;AACL,YAAA,UAAA,CAAW,KAAK,EAAE,MAAA,EAAQ,cAAA,EAAgB,OAAA,EAAS,UAAU,CAAA;AAAA,UAC/D;AAAA,QACF,CAAC,CAAA;AAED,QAAA,IAAI,eAAe,aAAA,EAAe;AAChC,UAAA,MAAM,IAAI,oBAAoB,oDAAoD,CAAA;AAAA,QACpF;AAEA,QAAA,IAAI,WAAA,IAAe,oBAAoB,iBAAA,EAAmB;AACxD,UAAA,MAAM,IAAI,mBAAA;AAAA,YACR,CAAA,mCAAA,EAAsC,iBAAiB,CAAA,MAAA,EAAS,eAAe,CAAA;AAAA,WACjF;AAAA,QACF;AAEA,QAAA,IAAA,CAAK,WAAA,GAAc,UAAA,CAAW,GAAA,CAAI,CAAC,KAAA,MAAW;AAAA,UAC5C,QAAQ,KAAA,CAAM,MAAA;AAAA,UACd,SAAS,KAAA,CAAM;AAAA,SACjB,CAAE,CAAA;AAAA,MACJ;AAAA,IACF;AAGA,IAAA,IAAI,MAAA,CAAO,MAAA,CAAO,SAAA,EAAW,OAAO,CAAA,EAAG;AACrC,MAAA,MAAM,QAAA,GAAW,UAAU,OAAO,CAAA;AAClC,MAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,QAAQ,CAAA,EAAG;AAC5B,QAAA,MAAM,IAAI,oBAAoB,wBAAwB,CAAA;AAAA,MACxD;AACA,MAAA,MAAM,UAAA,GAAwB,QAAA;AAE9B,MAAA,IAAA,CAAK,MAAM,KAAA,EAAM;AAEjB,MAAA,UAAA,CAAW,OAAA,CAAQ,CAAC,QAAA,EAAU,KAAA,KAAU;AACtC,QAAA,MAAM,WAAA,GAAc,YAAA,CAAa,QAAA,EAAU,CAAA,MAAA,EAAS,KAAK,CAAA,CAAA,CAAG,CAAA;AAC5D,QAAA,IAAI,CAAC,MAAA,CAAO,MAAA,CAAO,WAAA,EAAa,OAAO,CAAA,EAAG;AACxC,UAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,MAAA,EAAS,KAAK,CAAA,mBAAA,CAAqB,CAAA;AAAA,QACnE;AACA,QAAA,MAAM,kBAAkB,YAAA,CAAa,WAAA,CAAY,OAAO,CAAA,EAAG,CAAA,MAAA,EAAS,KAAK,CAAA,OAAA,CAAS,CAAA;AAClF,QAAA,IAAI,CAAC,aAAA,CAAc,eAAe,CAAA,EAAG;AACnC,UAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,MAAA,EAAS,KAAK,CAAA,yBAAA,CAA2B,CAAA;AAAA,QACzE;AAEA,QAAA,MAAM,UAAA,GAAa,YAAY,SAAS,CAAA;AACxC,QAAA,MAAM,QAAA,GAAW,YAAY,OAAO,CAAA;AAEpC,QAAA,IAAI,aAAA;AAEJ,QAAA,IAAI,eAAe,MAAA,EAAW;AAC5B,UAAA,IACE,UAAA,KAAe,QACf,OAAO,UAAA,KAAe,YACtB,CAAC,MAAA,CAAO,SAAA,CAAU,UAAU,CAAA,EAC5B;AACA,YAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,gBAAA,EAAmB,MAAA,CAAO,UAAU,CAAC,CAAA,SAAA,CAAW,CAAA;AAAA,UAChF;AACA,UAAA,IAAI,UAAA,GAAa,CAAA,IAAK,UAAA,IAAc,iBAAA,EAAmB;AACrD,YAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,gBAAA,EAAmB,UAAU,CAAA,SAAA,CAAW,CAAA;AAAA,UACxE;AACA,UAAA,MAAM,KAAA,GAAQ,cAAc,UAAU,CAAA;AACtC,UAAA,IAAI,CAAC,KAAA,EAAO;AACV,YAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,gBAAA,EAAmB,UAAU,CAAA,SAAA,CAAW,CAAA;AAAA,UACxE;AACA,UAAA,IAAI,aAAa,MAAA,EAAW;AAC1B,YAAA,MAAM,UAAA,GAAa,WAAA,CAAY,GAAA,CAAI,QAAQ,CAAA;AAC3C,YAAA,IAAI,UAAA,KAAe,MAAA,IAAa,UAAA,KAAe,UAAA,EAAY;AACzD,cAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,QAAA,EAAW,UAAU,CAAA,8BAAA,CAAgC,CAAA;AAAA,YACrF;AAAA,UACF;AACA,UAAA,aAAA,GAAgB,KAAA;AAAA,QAClB,CAAA,MAAO;AACL,UAAA,IAAI,aAAa,MAAA,EAAW;AAC1B,YAAA,MAAM,IAAI,oBAAoB,wCAAwC,CAAA;AAAA,UACxE;AACA,UAAA,MAAM,UAAA,GAAa,WAAA,CAAY,GAAA,CAAI,QAAQ,CAAA;AAC3C,UAAA,IAAI,eAAe,MAAA,EAAW;AAC5B,YAAA,MAAM,IAAI,oBAAoB,wCAAwC,CAAA;AAAA,UACxE;AACA,UAAA,MAAM,KAAA,GAAQ,cAAc,UAAU,CAAA;AACtC,UAAA,IAAI,CAAC,KAAA,EAAO;AACV,YAAA,MAAM,IAAI,oBAAoB,wCAAwC,CAAA;AAAA,UACxE;AACA,UAAA,aAAA,GAAgB,KAAA;AAAA,QAClB;AAEA,QAAA,IAAI,CAAC,aAAA,EAAe;AAClB,UAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,sCAAA,EAAyC,KAAK,CAAA,CAAA,CAAG,CAAA;AAAA,QACjF;AACA,QAAA,IAAI,CAAC,IAAA,CAAK,OAAA,CAAQ,eAAe,CAAA,EAAG;AAClC,UAAA,MAAM,IAAI,mBAAA,CAAoB,CAAA,MAAA,EAAS,KAAK,CAAA,6BAAA,CAA+B,CAAA;AAAA,QAC7E;AACA,QAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,aAAA,EAAe,eAAe,CAAA;AAAA,MAC/C,CAAC,CAAA;AAAA,IACH;AAAA,EACF;AACF;;;AC9gBA,SAAS,kBAAkB,KAAA,EAA0C;AACnE,EAAA,OAAO,KAAA,YAAiB,gBAAgB,KAAA,YAAiB,YAAA;AAC3D;AAYO,SAAS,eAAA,CAAmB,KAAA,EAAqB,KAAA,EAAe,OAAA,EAAoB;AACzF,EAAA,IAAI,KAAA,GAAQ,CAAA,IAAK,KAAA,IAAS,KAAA,CAAM,MAAA,EAAQ;AACtC,IAAA,MAAM,IAAI,WAAW,CAAA,MAAA,EAAS,KAAK,sBAAsB,KAAA,CAAM,MAAM,CAAA,KAAA,EAAQ,OAAO,CAAA,CAAA,EAAI;AAAA,MACtF,KAAA;AAAA,MACA,UAAA,EAAY,CAAC,CAAA,EAAG,KAAA,CAAM,SAAS,CAAC;AAAA,KACjC,CAAA;AAAA,EACH;AACA,EAAA,MAAM,KAAA,GAAQ,MAAM,KAAK,CAAA;AACzB,EAAA,IAAI,UAAU,MAAA,EAAW;AACvB,IAAA,MAAM,IAAI,YAAA,CAAa,CAAA,8BAAA,EAAiC,KAAK,CAAA,IAAA,EAAO,OAAO,CAAA,CAAE,CAAA;AAAA,EAC/E;AACA,EAAA,OAAO,KAAA;AACT;AASO,SAAS,uBAAA,CAAwB,MAAc,KAAA,EAAqB;AACzE,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,IAAK,QAAQ,CAAA,EAAG;AACxC,IAAA,MAAM,IAAI,sBAAsB,CAAA,QAAA,EAAW,IAAI,KAAK,KAAK,CAAA,CAAA,EAAI,MAAM,KAAK,CAAA;AAAA,EAC1E;AACF;AASO,SAAS,oBAAA,CAAqB,MAAc,KAAA,EAAqB;AACtE,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,IAAK,SAAS,CAAA,EAAG;AACzC,IAAA,MAAM,IAAI,sBAAsB,CAAA,QAAA,EAAW,IAAI,KAAK,KAAK,CAAA,cAAA,CAAA,EAAkB,MAAM,KAAK,CAAA;AAAA,EACxF;AACF;AASO,SAAS,YAAA,CAAa,MAAc,KAAA,EAAqB;AAC9D,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AAC3B,IAAA,MAAM,IAAI,sBAAsB,CAAA,QAAA,EAAW,IAAI,KAAK,KAAK,CAAA,CAAA,EAAI,MAAM,KAAK,CAAA;AAAA,EAC1E;AACF;AAWO,SAAS,aAAA,CAAc,IAAA,EAAc,KAAA,EAAe,GAAA,EAAa,GAAA,EAAmB;AACzF,EAAA,IAAI,CAAC,OAAO,QAAA,CAAS,KAAK,KAAK,KAAA,GAAQ,GAAA,IAAO,SAAS,GAAA,EAAK;AAC1D,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,WAAW,IAAI,CAAA,EAAA,EAAK,KAAK,CAAA,oBAAA,EAAuB,GAAG,KAAK,GAAG,CAAA,EAAA,CAAA;AAAA,MAC3D,IAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACF;AAaO,SAAS,kBAAA,CACd,OACA,aAAA,EAMA;AACA,EAAA,IAAI,CAAC,MAAM,YAAA,EAAc;AACvB,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,qDAAA;AAAA,MACA,cAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,IAAI,KAAA,CAAM,IAAA;AAChB,EAAA,IAAI,CAAC,CAAA,EAAG;AACN,IAAA,MAAM,IAAI,cAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,SAAA,GAAY,MAAM,MAAA,CAAO,IAAA;AAC/B,EAAA,MAAM,WAAW,CAAA,CAAE,IAAA;AAEnB,EAAA,IAAI,CAAC,iBAAA,CAAkB,SAAS,KAAK,CAAC,iBAAA,CAAkB,QAAQ,CAAA,EAAG;AACjE,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,GAAG,aAAa,CAAA,qEAAA;AAAA,KAClB;AAAA,EACF;AAEA,EAAA,IAAI,SAAA,CAAU,WAAA,KAAgB,QAAA,CAAS,WAAA,EAAa;AAClD,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,GAAG,aAAa,CAAA,0DAAA;AAAA,KAClB;AAAA,EACF;AAEA,EAAA,IAAI,KAAA,CAAM,MAAA,CAAO,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAChC,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,qDAAqD,KAAA,CAAM,MAAA,CAAO,IAAI,CAAA,QAAA,EAAW,EAAE,IAAI,CAAA,CAAA;AAAA,KACzF;AAAA,EACF;AAEA,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,QAAA;AAAA,IACN,YAAY,CAAA,CAAE,MAAA;AAAA,IACd,KAAA,EAAO,SAAA;AAAA,IACP,WAAA,EAAa,MAAM,MAAA,CAAO;AAAA,GAC5B;AACF;AAUO,SAAS,gBAAA,CACd,MAAA,EACA,YAAA,EACA,UAAA,EACM;AACN,EAAA,IAAI,MAAA,CAAO,WAAW,YAAA,EAAc;AAClC,IAAA,MAAM,IAAI,YAAA;AAAA,MACR,kCAAkC,UAAU,CAAA,WAAA,EAAc,YAAY,CAAA,MAAA,EAAS,OAAO,MAAM,CAAA;AAAA,KAC9F;AAAA,EACF;AACF;;;ACzIO,IAAM,QAAA,GAAN,cAAuB,SAAA,CAA0C;AAAA,EAC9D,UAAA,GAAa,CAAA;AAAA,EAErB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA,EACA,WAAA,CACE,MAAA,EACA,OAAA,GAKI,EAAC,EACL;AACA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,CAAA;AAAA,MAClB,GAAA,EAAK,QAAQ,GAAA,IAAO,GAAA;AAAA,MACpB,GAAA,EAAK,QAAQ,GAAA,IAAO,IAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe;AAAA,KACtC;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAGtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,aAAA,CAAc,KAAA,EAAO,QAAA,CAAS,GAAA,EAAK,CAAA,EAAG,CAAC,CAAA;AACvC,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA,EAEU,QAAQ,KAAA,EAAwD;AACxE,IAAA,OAAO,MAAM,WAAW,CAAA,YAAa,YAAA,IAAgB,KAAA,CAAM,UAAU,CAAA,YAAa,YAAA;AAAA,EACpF;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAEJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAEL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,EAAA,EAAI,GAAA,EAAK,GAAA,EAAK,WAAA,KAAgB,KAAA,CAAM,OAAA;AAG5C,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,aAAA,CAAc,KAAA,EAAO,GAAA,EAAK,CAAA,EAAG,CAAC,CAAA;AAC9B,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AAEzD,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA,EAAY,IAAA;AAAA,UACZ,KAAA,EAAO,KAAA;AAAA,UACP,WAAA,EAAa;AAAA,SACf,GAAI,kBAAA,CAAmB,KAAA,EAAO,UAAU,CAAA;AACxC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAG1B,QAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AAChC,QAAA,IAAI,CAAC,KAAA,EAAO;AACV,UAAA,KAAA,GAAQ;AAAA,YACN,SAAA,EAAW,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,YAChC,QAAA,EAAU,IAAI,YAAA,CAAa,IAAI;AAAA,WACjC;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAAA,QAC7B;AAGA,QAAA,gBAAA,CAAiB,KAAA,CAAM,SAAA,EAAW,IAAA,EAAM,oBAAoB,CAAA;AAC5D,QAAA,gBAAA,CAAiB,KAAA,CAAM,QAAA,EAAU,IAAA,EAAM,mBAAmB,CAAA;AAE1D,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,UAAA,MAAM,GAAA,GAAM,eAAA,CAAgB,QAAA,EAAU,IAAA,GAAO,GAAG,mBAAmB,CAAA;AACnE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,EAAO,IAAA,GAAO,GAAG,oBAAoB,CAAA;AAChE,UAAA,YAAA,CAAa,YAAY,GAAG,CAAA;AAC5B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,MAAM,EAAA,GAAK,WAAA,KAAgB,CAAA,GAAI,GAAA,GAAM,cAAc,EAAA,GAAK,GAAA;AAGxD,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,CAAM,SAAA,EAAW,GAAG,oBAAoB,CAAA;AACnE,UAAA,MAAM,KAAA,GAAQ,GAAA,GAAM,EAAA,GAAA,CAAM,CAAA,GAAI,OAAO,EAAA,GAAK,EAAA;AAC1C,UAAA,KAAA,CAAM,SAAA,CAAU,CAAC,CAAA,GAAI,KAAA;AAGrB,UAAA,MAAM,GAAA,GAAM,IAAA,CAAK,IAAA,CAAK,KAAA,GAAQ,GAAG,CAAA;AAGjC,UAAA,MAAM,IAAA,GAAO,eAAA,CAAgB,KAAA,CAAM,QAAA,EAAU,GAAG,mBAAmB,CAAA;AACnE,UAAA,MAAM,SAAA,GAAY,IAAA,CAAK,IAAA,CAAK,IAAA,GAAO,GAAG,CAAA;AAGtC,UAAA,MAAM,KAAA,GAAS,YAAY,GAAA,GAAO,EAAA;AAGlC,UAAA,KAAA,CAAM,SAAS,CAAC,CAAA,GAAI,MAAM,IAAA,GAAA,CAAQ,CAAA,GAAI,OAAO,KAAA,GAAQ,KAAA;AAGrD,UAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAK,EAAA,GAAK,KAAA;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AChJO,IAAM,OAAA,GAAN,cAAsB,SAAA,CAAwC;AAAA,EAC3D,UAAA,GAAa,CAAA;AAAA,EAErB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA,EACA,WAAA,CACE,MAAA,EACA,OAAA,GAKI,EAAC,EACL;AACA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,GAAA,EAAK,QAAQ,GAAA,IAAO,KAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,OAAA,EAAS,QAAQ,OAAA,IAAW;AAAA,KAC9B;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAEtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAClE,IAAA,uBAAA,CAAwB,UAAA,EAAY,SAAS,OAAO,CAAA;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA,EAEU,QAAQ,KAAA,EAAuD;AACvE,IAAA,OAAO,OAAO,KAAA,CAAM,MAAM,MAAM,QAAA,IAAY,KAAA,CAAM,KAAK,CAAA,YAAa,YAAA;AAAA,EACtE;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAEJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAEL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,EAAA,EAAI,GAAA,EAAK,WAAA,EAAa,OAAA,KAAY,KAAA,CAAM,OAAA;AAEhD,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AACzD,MAAA,uBAAA,CAAwB,YAAY,OAAO,CAAA;AAE3C,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA,EAAY,IAAA;AAAA,UACZ,KAAA,EAAO,KAAA;AAAA,UACP,WAAA,EAAa;AAAA,SACf,GAAI,kBAAA,CAAmB,KAAA,EAAO,SAAS,CAAA;AACvC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAE1B,QAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AACrC,QAAA,MAAM,KAAA,GACJ,aACC,MAAM;AACL,UAAA,MAAM,IAAA,GAAO;AAAA,YACX,IAAA,EAAM,CAAA;AAAA,YACN,GAAA,EAAK,IAAI,YAAA,CAAa,IAAI;AAAA,WAC5B;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,IAAI,CAAA;AAC1B,UAAA,OAAO,IAAA;AAAA,QACT,CAAA,GAAG;AAGL,QAAA,gBAAA,CAAiB,KAAA,CAAM,GAAA,EAAK,IAAA,EAAM,aAAa,CAAA;AAE/C,QAAA,KAAA,CAAM,IAAA,IAAQ,CAAA;AAEd,QAAA,MAAM,GAAA,GAAM,EAAA,IAAM,CAAA,GAAA,CAAK,KAAA,CAAM,OAAO,CAAA,IAAK,OAAA,CAAA;AAEzC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,UAAA,MAAM,GAAA,GAAM,eAAA,CAAgB,QAAA,EAAU,IAAA,GAAO,GAAG,kBAAkB,CAAA;AAClE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,EAAO,IAAA,GAAO,GAAG,mBAAmB,CAAA;AAC/D,UAAA,YAAA,CAAa,YAAY,GAAG,CAAA;AAC5B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAE5B,UAAA,MAAM,EAAA,GAAK,WAAA,KAAgB,CAAA,GAAI,GAAA,GAAM,cAAc,EAAA,GAAK,GAAA;AAExD,UAAA,MAAM,MAAA,GAAS,eAAA,CAAgB,KAAA,CAAM,GAAA,EAAK,GAAG,aAAa,CAAA;AAC1D,UAAA,MAAM,MAAA,GAAS,SAAS,EAAA,GAAK,EAAA;AAC7B,UAAA,KAAA,CAAM,GAAA,CAAI,CAAC,CAAA,GAAI,MAAA;AAEf,UAAA,MAAM,GAAA,GAAM,IAAA,CAAK,IAAA,CAAK,MAAM,CAAA,GAAI,GAAA;AAChC,UAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAK,OAAO,EAAA,GAAK,GAAA,CAAA;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACrIO,IAAM,IAAA,GAAN,cAAmB,SAAA,CAAkC;AAAA,EAClD,UAAA,GAAa,CAAA;AAAA,EAErB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA,EAEA,WAAA,CACE,MAAA,EACA,OAAA,GAOI,EAAC,EACL;AACA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,KAAA,EAAO,QAAQ,KAAA,IAAS,GAAA;AAAA,MACxB,KAAA,EAAO,QAAQ,KAAA,IAAS,KAAA;AAAA,MACxB,GAAA,EAAK,QAAQ,GAAA,IAAO,IAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,OAAA,EAAS,QAAQ,OAAA,IAAW;AAAA,KAC9B;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAEtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA,EAEU,QAAQ,KAAA,EAAoD;AACpE,IAAA,MAAM,WAAA,GACJ,OAAO,KAAA,CAAM,MAAM,CAAA,KAAM,QAAA,IACzB,KAAA,CAAM,QAAQ,CAAA,YAAa,YAAA,IAC3B,KAAA,CAAM,UAAU,CAAA,YAAa,YAAA;AAC/B,IAAA,IAAI,CAAC,aAAa,OAAO,KAAA;AACzB,IAAA,IAAI,KAAA,CAAM,aAAa,CAAA,KAAM,MAAA,IAAa,EAAE,KAAA,CAAM,aAAa,aAAa,YAAA,CAAA,EAAe;AACzF,MAAA,OAAO,KAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAEJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAEA,IAAA,IAAA,CAAK,UAAA,EAAA;AAEL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,IAAI,KAAA,EAAO,KAAA,EAAO,KAAK,WAAA,EAAa,OAAA,KAAY,KAAA,CAAM,OAAA;AAE9D,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AAEzD,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA;AAAA,UACA,KAAA,EAAO,SAAA;AAAA,UACP;AAAA,SACF,GAAI,kBAAA,CAAmB,KAAA,EAAO,MAAM,CAAA;AACpC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAE1B,QAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AACrC,QAAA,MAAM,KAAA,GACJ,aACC,MAAM;AACL,UAAA,MAAM,IAAA,GAAO;AAAA,YACX,IAAA,EAAM,CAAA;AAAA,YACN,MAAA,EAAQ,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,YAC7B,QAAA,EAAU,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,YAC/B,GAAI,UAAU,EAAE,WAAA,EAAa,IAAI,YAAA,CAAa,IAAI,CAAA,EAAE,GAAI;AAAC,WAC3D;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,IAAI,CAAA;AAC1B,UAAA,OAAO,IAAA;AAAA,QACT,CAAA,GAAG;AAGL,QAAA,gBAAA,CAAiB,KAAA,CAAM,MAAA,EAAQ,IAAA,EAAM,aAAa,CAAA;AAClD,QAAA,gBAAA,CAAiB,KAAA,CAAM,QAAA,EAAU,IAAA,EAAM,eAAe,CAAA;AACtD,QAAA,IAAI,OAAA,IAAW,MAAM,WAAA,EAAa;AAChC,UAAA,gBAAA,CAAiB,KAAA,CAAM,WAAA,EAAa,IAAA,EAAM,kBAAkB,CAAA;AAAA,QAC9D;AAEA,QAAA,KAAA,CAAM,IAAA,IAAQ,CAAA;AAGd,QAAA,MAAM,eAAA,GAAkB,CAAA,GAAI,KAAA,IAAS,KAAA,CAAM,IAAA;AAC3C,QAAA,MAAM,eAAA,GAAkB,CAAA,GAAI,KAAA,IAAS,KAAA,CAAM,IAAA;AAE3C,QAAA,MAAM,WAAW,EAAA,GAAK,eAAA;AAEtB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,UAAA,MAAM,GAAA,GAAM,eAAA,CAAgB,QAAA,EAAU,UAAA,GAAa,GAAG,eAAe,CAAA;AACrE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,SAAA,EAAW,WAAA,GAAc,GAAG,gBAAgB,CAAA;AACvE,UAAA,YAAA,CAAa,YAAY,GAAG,CAAA;AAC5B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,MAAM,EAAA,GAAK,WAAA,KAAgB,CAAA,GAAI,GAAA,GAAM,cAAc,EAAA,GAAK,GAAA;AAExD,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,MAAA,EAAQ,GAAG,aAAa,CAAA;AACxD,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,QAAA,EAAU,GAAG,eAAe,CAAA;AAE5D,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,KAAA,IAAS,EAAA;AACvC,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,SAAS,EAAA,GAAK,EAAA;AAE5C,UAAA,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA;AAClB,UAAA,KAAA,CAAM,QAAA,CAAS,CAAC,CAAA,GAAI,IAAA;AAEpB,UAAA,IAAI,OAAA,GAAU,IAAA;AACd,UAAA,IAAI,OAAA,EAAS;AACX,YAAA,MAAM,SAAS,KAAA,CAAM,WAAA;AACrB,YAAA,IAAI,CAAC,MAAA,EAAQ;AACX,cAAA,MAAM,IAAI,aAAa,4DAA4D,CAAA;AAAA,YACrF;AACA,YAAA,MAAM,IAAA,GAAO,KAAK,GAAA,CAAI,eAAA,CAAgB,QAAQ,CAAA,EAAG,kBAAkB,GAAG,IAAI,CAAA;AAC1E,YAAA,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA;AACZ,YAAA,OAAA,GAAU,IAAA;AAAA,UACZ;AAEA,UAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,IAAA,CAAK,OAAA,GAAU,eAAe,CAAA,GAAI,GAAA;AACrD,UAAA,SAAA,CAAU,WAAA,GAAc,CAAC,CAAA,GAAI,EAAA,GAAK,YAAY,IAAA,GAAO,KAAA,CAAA;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AChJO,IAAM,KAAA,GAAN,cAAoB,SAAA,CAAoC;AAAA;AAAA,EAErD,UAAA,GAAa,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,WAAA,CACE,MAAA,EACA,OAAA,GAOI,EAAC,EACL;AAEA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,KAAA,EAAO,QAAQ,KAAA,IAAS,GAAA;AAAA,MACxB,KAAA,EAAO,QAAQ,KAAA,IAAS,KAAA;AAAA,MACxB,GAAA,EAAK,QAAQ,GAAA,IAAO,IAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe,IAAA;AAAA;AAAA,MACpC,OAAA,EAAS,QAAQ,OAAA,IAAW;AAAA,KAC9B;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAGtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUU,QAAQ,KAAA,EAAqD;AACrE,IAAA,MAAM,WAAA,GACJ,OAAO,KAAA,CAAM,MAAM,CAAA,KAAM,QAAA,IACzB,KAAA,CAAM,QAAQ,CAAA,YAAa,YAAA,IAC3B,KAAA,CAAM,UAAU,CAAA,YAAa,YAAA;AAC/B,IAAA,IAAI,CAAC,aAAa,OAAO,KAAA;AACzB,IAAA,IAAI,KAAA,CAAM,aAAa,CAAA,KAAM,MAAA,IAAa,EAAE,KAAA,CAAM,aAAa,aAAa,YAAA,CAAA,EAAe;AACzF,MAAA,OAAO,KAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAGJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAGL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,IAAI,KAAA,EAAO,KAAA,EAAO,KAAK,WAAA,EAAa,OAAA,KAAY,KAAA,CAAM,OAAA;AAG9D,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AAGzD,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAEhC,QAAA,MAAM;AAAA,UACJ,IAAA;AAAA,UACA,UAAA;AAAA,UACA,KAAA,EAAO,KAAA;AAAA,UACP,WAAA,EAAa;AAAA,SACf,GAAI,kBAAA,CAAmB,KAAA,EAAO,OAAO,CAAA;AACrC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAG1B,QAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AACrC,QAAA,MAAM,KAAA,GACJ,aACC,MAAM;AAEL,UAAA,MAAM,IAAA,GAAO;AAAA,YACX,IAAA,EAAM,CAAA;AAAA,YACN,MAAA,EAAQ,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA;AAAA,YAC7B,QAAA,EAAU,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA;AAAA,YAC/B,GAAI,UAAU,EAAE,WAAA,EAAa,IAAI,YAAA,CAAa,IAAI,CAAA,EAAE,GAAI;AAAC;AAAA,WAC3D;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,IAAI,CAAA;AAC1B,UAAA,OAAO,IAAA;AAAA,QACT,CAAA,GAAG;AAGL,QAAA,gBAAA,CAAiB,KAAA,CAAM,MAAA,EAAQ,IAAA,EAAM,cAAc,CAAA;AACnD,QAAA,gBAAA,CAAiB,KAAA,CAAM,QAAA,EAAU,IAAA,EAAM,gBAAgB,CAAA;AACvD,QAAA,IAAI,OAAA,IAAW,MAAM,WAAA,EAAa;AAChC,UAAA,gBAAA,CAAiB,KAAA,CAAM,WAAA,EAAa,IAAA,EAAM,mBAAmB,CAAA;AAAA,QAC/D;AAGA,QAAA,KAAA,CAAM,IAAA,IAAQ,CAAA;AAGd,QAAA,MAAM,eAAA,GAAkB,CAAA,GAAI,KAAA,IAAS,KAAA,CAAM,IAAA;AAC3C,QAAA,MAAM,eAAA,GAAkB,CAAA,GAAI,KAAA,IAAS,KAAA,CAAM,IAAA;AAG3C,QAAA,MAAM,WAAW,EAAA,GAAK,eAAA;AAGtB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAE7B,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,IAAA,EAAM,UAAA,GAAa,GAAG,gBAAgB,CAAA;AACjE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,EAAO,IAAA,GAAO,GAAG,iBAAiB,CAAA;AAG7D,UAAA,YAAA,CAAa,YAAY,EAAE,CAAA;AAC3B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,MAAA,EAAQ,GAAG,cAAc,CAAA;AACzD,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,QAAA,EAAU,GAAG,gBAAgB,CAAA;AAG7D,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,KAAA,IAAS,EAAA;AAGvC,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,SAAS,EAAA,GAAK,EAAA;AAG5C,UAAA,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA;AAClB,UAAA,KAAA,CAAM,QAAA,CAAS,CAAC,CAAA,GAAI,IAAA;AAGpB,UAAA,IAAI,OAAA,GAAU,IAAA;AACd,UAAA,IAAI,OAAA,EAAS;AACX,YAAA,MAAM,SAAS,KAAA,CAAM,WAAA;AACrB,YAAA,IAAI,CAAC,MAAA,EAAQ;AACX,cAAA,MAAM,IAAI,aAAa,4DAA4D,CAAA;AAAA,YACrF;AAEA,YAAA,MAAM,IAAA,GAAO,KAAK,GAAA,CAAI,eAAA,CAAgB,QAAQ,CAAA,EAAG,mBAAmB,GAAG,IAAI,CAAA;AAC3E,YAAA,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA;AACZ,YAAA,OAAA,GAAU,IAAA;AAAA,UACZ;AAGA,UAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,IAAA,CAAK,OAAA,GAAU,eAAe,CAAA,GAAI,GAAA;AAIrD,UAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAK,YAAY,IAAA,GAAO,KAAA,CAAA,GAAS,KAAK,WAAA,GAAc,EAAA;AAAA,QACxE;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACjPO,IAAM,KAAA,GAAN,cAAoB,SAAA,CAAoC;AAAA,EACrD,UAAA,GAAa,CAAA;AAAA,EAErB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA,EACA,WAAA,CACE,MAAA,EACA,OAAA,GAOI,EAAC,EACL;AACA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,KAAA,EAAO,QAAQ,KAAA,IAAS,GAAA;AAAA,MACxB,KAAA,EAAO,QAAQ,KAAA,IAAS,KAAA;AAAA,MACxB,GAAA,EAAK,QAAQ,GAAA,IAAO,IAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,aAAA,EAAe,QAAQ,aAAA,IAAiB;AAAA,KAC1C;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAGtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,aAAA,CAAc,OAAA,EAAS,QAAA,CAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAC3C,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAClE,IAAA,uBAAA,CAAwB,gBAAA,EAAkB,SAAS,aAAa,CAAA;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA,EAEU,QAAQ,KAAA,EAAqD;AACrE,IAAA,OACE,OAAO,KAAA,CAAM,MAAM,CAAA,KAAM,QAAA,IACzB,MAAM,QAAQ,CAAA,YAAa,YAAA,IAC3B,KAAA,CAAM,UAAU,CAAA,YAAa,YAAA,IAC7B,OAAO,KAAA,CAAM,WAAW,CAAA,KAAM,QAAA;AAAA,EAElC;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAEJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAEL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,IAAI,KAAA,EAAO,KAAA,EAAO,KAAK,WAAA,EAAa,aAAA,KAAkB,KAAA,CAAM,OAAA;AAGpE,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,aAAA,CAAc,OAAA,EAAS,KAAA,EAAO,CAAA,EAAG,CAAC,CAAA;AAClC,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AACzD,MAAA,uBAAA,CAAwB,kBAAkB,aAAa,CAAA;AAEvD,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA,EAAY,IAAA;AAAA,UACZ,KAAA,EAAO,KAAA;AAAA,UACP,WAAA,EAAa;AAAA,SACf,GAAI,kBAAA,CAAmB,KAAA,EAAO,OAAO,CAAA;AACrC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAG1B,QAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AAChC,QAAA,IAAI,CAAC,KAAA,EAAO;AACV,UAAA,KAAA,GAAQ;AAAA,YACN,IAAA,EAAM,CAAA;AAAA,YACN,MAAA,EAAQ,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,YAC7B,QAAA,EAAU,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,YAC/B,SAAA,EAAW;AAAA,WACb;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAAA,QAC7B;AAGA,QAAA,gBAAA,CAAiB,KAAA,CAAM,MAAA,EAAQ,IAAA,EAAM,cAAc,CAAA;AACnD,QAAA,gBAAA,CAAiB,KAAA,CAAM,QAAA,EAAU,IAAA,EAAM,gBAAgB,CAAA;AAEvD,QAAA,KAAA,CAAM,IAAA,EAAA;AACN,QAAA,MAAM,IAAI,KAAA,CAAM,IAAA;AAEhB,QAAA,MAAM,eAAA,GAAkB,IAAI,KAAA,IAAS,CAAA;AACrC,QAAA,MAAM,EAAA,GAAK,KAAA,IAAS,CAAA,GAAI,GAAA,GAAM,SAAS,CAAA,GAAI,aAAA,CAAA,CAAA;AAC3C,QAAA,MAAM,SAAS,KAAA,IAAS,CAAA,GAAI,GAAA,GAAM,IAAA,KAAA,CAAU,IAAI,CAAA,IAAK,aAAA,CAAA,CAAA;AACrD,QAAA,MAAM,SAAA,GAAY,MAAM,SAAA,GAAY,EAAA;AACpC,QAAA,MAAM,gBAAgB,SAAA,GAAY,MAAA;AAClC,QAAA,KAAA,CAAM,SAAA,GAAY,SAAA;AAElB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,UAAA,MAAM,GAAA,GAAM,eAAA,CAAgB,QAAA,EAAU,IAAA,GAAO,GAAG,gBAAgB,CAAA;AAChE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,EAAO,IAAA,GAAO,GAAG,iBAAiB,CAAA;AAC7D,UAAA,YAAA,CAAa,YAAY,GAAG,CAAA;AAC5B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,MAAM,EAAA,GAAK,WAAA,KAAgB,CAAA,GAAI,GAAA,GAAM,cAAc,EAAA,GAAK,GAAA;AAGxD,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,MAAA,EAAQ,GAAG,cAAc,CAAA;AACzD,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,KAAA,IAAS,EAAA;AACvC,UAAA,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA;AAGlB,UAAA,MAAM,CAAA,GAAI,eAAA,CAAgB,KAAA,CAAM,QAAA,EAAU,GAAG,gBAAgB,CAAA;AAC7D,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAA,CAAK,CAAA,GAAI,SAAS,EAAA,GAAK,EAAA;AAC5C,UAAA,KAAA,CAAM,QAAA,CAAS,CAAC,CAAA,GAAI,IAAA;AAEpB,UAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,IAAA,CAAK,IAAA,GAAO,eAAe,CAAA,GAAI,GAAA;AAClD,UAAA,MAAM,QAAA,GAAW,QAAQ,CAAA,GAAI,aAAA,CAAA;AAC7B,UAAA,MAAM,IAAA,GAAO,MAAM,CAAA,GAAI,SAAA,CAAA;AACvB,UAAA,MAAM,SAAA,GAAY,MAAA,GAAS,QAAA,GAAA,CAAY,CAAA,GAAI,EAAA,IAAM,IAAA;AACjD,UAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAM,KAAK,SAAA,GAAa,KAAA;AAAA,QAC5C;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AClJO,IAAM,OAAA,GAAN,cAAsB,SAAA,CAAwC;AAAA;AAAA,EAE3D,UAAA,GAAa,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,WAAA,CACE,MAAA,EACA,OAAA,GAOI,EAAC,EACL;AAEA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,KAAA,EAAO,QAAQ,KAAA,IAAS,IAAA;AAAA,MACxB,GAAA,EAAK,QAAQ,GAAA,IAAO,IAAA;AAAA,MACpB,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,QAAA,EAAU,QAAQ,QAAA,IAAY,CAAA;AAAA,MAC9B,QAAA,EAAU,QAAQ,QAAA,IAAY;AAAA,KAChC;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAGtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,QAAA,CAAS,KAAK,CAAA,IAAK,QAAA,CAAS,KAAA,GAAQ,CAAA,IAAK,QAAA,CAAS,KAAA,GAAQ,CAAA,EAAG;AAChF,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,eAAA,EAAkB,SAAS,KAAK,CAAA,0BAAA,CAAA;AAAA,QAChC,OAAA;AAAA,QACA,QAAA,CAAS;AAAA,OACX;AAAA,IACF;AACA,IAAA,oBAAA,CAAqB,SAAA,EAAW,SAAS,GAAG,CAAA;AAC5C,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAClE,IAAA,uBAAA,CAAwB,gBAAA,EAAkB,SAAS,QAAQ,CAAA;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AAAA,EAEU,QAAQ,KAAA,EAAuD;AACvE,IAAA,IAAI,EAAE,KAAA,CAAM,WAAW,CAAA,YAAa,eAAe,OAAO,KAAA;AAC1D,IAAA,IACE,KAAA,CAAM,gBAAgB,CAAA,KAAM,MAAA,IAC5B,EAAE,KAAA,CAAM,gBAAgB,aAAa,YAAA,CAAA,EACrC;AACA,MAAA,OAAO,KAAA;AAAA,IACT;AACA,IAAA,IAAI,KAAA,CAAM,SAAS,CAAA,KAAM,MAAA,IAAa,EAAE,KAAA,CAAM,SAAS,aAAa,YAAA,CAAA,EAAe;AACjF,MAAA,OAAO,KAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAGJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAGL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,IAAI,KAAA,EAAO,GAAA,EAAK,aAAa,QAAA,EAAU,QAAA,KAAa,KAAA,CAAM,OAAA;AAGlE,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,IAAI,CAAC,OAAO,QAAA,CAAS,KAAK,KAAK,KAAA,GAAQ,CAAA,IAAK,QAAQ,CAAA,EAAG;AACrD,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,kBAAkB,KAAK,CAAA,0BAAA,CAAA;AAAA,UACvB,OAAA;AAAA,UACA;AAAA,SACF;AAAA,MACF;AACA,MAAA,oBAAA,CAAqB,WAAW,GAAG,CAAA;AACnC,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AACzD,MAAA,uBAAA,CAAwB,kBAAkB,QAAQ,CAAA;AAGlD,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAEhC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA,EAAY,IAAA;AAAA,UACZ,KAAA,EAAO,KAAA;AAAA,UACP,WAAA,EAAa;AAAA,SACf,GAAI,kBAAA,CAAmB,KAAA,EAAO,SAAS,CAAA;AACvC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAG1B,QAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AAChC,QAAA,IAAI,CAAC,KAAA,EAAO;AACV,UAAA,KAAA,GAAQ;AAAA,YACN,SAAA,EAAW,IAAI,YAAA,CAAa,IAAI;AAAA,WAClC;AACA,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAAA,QAC7B;AAGA,QAAA,IAAI,QAAA,GAAW,CAAA,IAAK,CAAC,KAAA,CAAM,cAAA,EAAgB;AACzC,UAAA,KAAA,CAAM,cAAA,GAAiB,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,QAC9C;AAGA,QAAA,IAAI,QAAA,IAAY,CAAC,KAAA,CAAM,OAAA,EAAS;AAC9B,UAAA,KAAA,CAAM,OAAA,GAAU,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,QACvC;AAGA,QAAA,gBAAA,CAAiB,KAAA,CAAM,SAAA,EAAW,IAAA,EAAM,mBAAmB,CAAA;AAC3D,QAAA,IAAI,QAAA,GAAW,CAAA,IAAK,KAAA,CAAM,cAAA,EAAgB;AACxC,UAAA,gBAAA,CAAiB,KAAA,CAAM,cAAA,EAAgB,IAAA,EAAM,wBAAwB,CAAA;AAAA,QACvE;AACA,QAAA,IAAI,QAAA,IAAY,MAAM,OAAA,EAAS;AAC7B,UAAA,gBAAA,CAAiB,KAAA,CAAM,OAAA,EAAS,IAAA,EAAM,iBAAiB,CAAA;AAAA,QACzD;AAGA,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAE7B,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,QAAA,EAAU,IAAA,GAAO,GAAG,kBAAkB,CAAA;AACjE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,KAAA,EAAO,IAAA,GAAO,GAAG,mBAAmB,CAAA;AAG/D,UAAA,YAAA,CAAa,YAAY,EAAE,CAAA;AAC3B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,IAAI,IAAA,GAAO,EAAA;AACX,UAAA,IAAI,gBAAgB,CAAA,EAAG;AACrB,YAAA,IAAA,GAAO,OAAO,WAAA,GAAc,EAAA;AAAA,UAC9B;AAIA,UAAA,MAAM,KAAA,GAAQ,eAAA,CAAgB,KAAA,CAAM,SAAA,EAAW,GAAG,mBAAmB,CAAA;AACrE,UAAA,MAAM,QAAA,GAAW,KAAA,GAAQ,KAAA,GAAA,CAAS,CAAA,GAAI,SAAS,IAAA,GAAO,IAAA;AACtD,UAAA,KAAA,CAAM,SAAA,CAAU,CAAC,CAAA,GAAI,QAAA;AAGrB,UAAA,IAAI,GAAA,GAAM,QAAA;AAGV,UAAA,IAAI,QAAA,EAAU;AACZ,YAAA,MAAM,IAAA,GAAO,MAAM,OAAA,GAAU,eAAA,CAAgB,MAAM,OAAA,EAAS,CAAA,EAAG,iBAAiB,CAAA,GAAI,CAAA;AAEpF,YAAA,MAAM,OAAA,GAAU,KAAA,GAAQ,IAAA,GAAA,CAAQ,CAAA,GAAI,KAAA,IAAS,IAAA;AAC7C,YAAA,IAAI,KAAA,CAAM,OAAA,EAAS,KAAA,CAAM,OAAA,CAAQ,CAAC,CAAA,GAAI,OAAA;AAEtC,YAAA,GAAA,GAAM,WAAW,OAAA,GAAU,OAAA;AAAA,UAC7B;AAEA,UAAA,MAAM,KAAA,GAAQ,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,KAAK,GAAA,CAAI,GAAA,EAAK,CAAC,CAAA,GAAI,GAAG,CAAA,GAAI,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA,GAAI,GAAA;AAC9E,UAAA,MAAM,iBAAiB,IAAA,GAAO,KAAA;AAG9B,UAAA,IAAI,WAAW,CAAA,EAAG;AAChB,YAAA,MAAM,GAAA,GAAM,MAAM,cAAA,GACd,eAAA,CAAgB,MAAM,cAAA,EAAgB,CAAA,EAAG,wBAAwB,CAAA,GACjE,CAAA;AACJ,YAAA,MAAM,MAAA,GAAS,WAAW,GAAA,GAAM,cAAA;AAChC,YAAA,IAAI,KAAA,CAAM,cAAA,EAAgB,KAAA,CAAM,cAAA,CAAe,CAAC,CAAA,GAAI,MAAA;AAEpD,YAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAK,EAAA,GAAK,MAAA;AAAA,UAC9B,CAAA,MAAO;AAEL,YAAA,KAAA,CAAM,IAAA,GAAO,CAAC,CAAA,GAAI,EAAA,GAAK,EAAA,GAAK,cAAA;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACxPO,IAAM,GAAA,GAAN,cAAkB,SAAA,CAAgC;AAAA;AAAA,EAE/C,UAAA,GAAa,CAAA;AAAA,EAErB,IAAI,SAAA,GAAoB;AACtB,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,WAAA,CACE,MAAA,EACA,OAAA,GAMI,EAAC,EACL;AACA,IAAA,MAAM,QAAA,GAAW;AAAA,MACf,EAAA,EAAI,QAAQ,EAAA,IAAM,IAAA;AAAA,MAClB,QAAA,EAAU,QAAQ,QAAA,IAAY,CAAA;AAAA,MAC9B,SAAA,EAAW,QAAQ,SAAA,IAAa,CAAA;AAAA,MAChC,WAAA,EAAa,QAAQ,WAAA,IAAe,CAAA;AAAA,MACpC,QAAA,EAAU,QAAQ,QAAA,IAAY;AAAA,KAChC;AAEA,IAAA,KAAA,CAAM,QAAQ,QAAQ,CAAA;AAGtB,IAAA,uBAAA,CAAwB,eAAA,EAAiB,SAAS,EAAE,CAAA;AACpD,IAAA,uBAAA,CAAwB,gBAAA,EAAkB,SAAS,QAAQ,CAAA;AAC3D,IAAA,uBAAA,CAAwB,WAAA,EAAa,SAAS,SAAS,CAAA;AACvD,IAAA,uBAAA,CAAwB,oBAAA,EAAsB,SAAS,WAAW,CAAA;AAClE,IAAA,IAAI,SAAS,QAAA,KAAa,QAAA,CAAS,YAAY,CAAA,IAAK,QAAA,CAAS,cAAc,CAAA,CAAA,EAAI;AAC7E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,0DAAA;AAAA,QACA,UAAA;AAAA,QACA;AAAA,UACE,UAAU,QAAA,CAAS,QAAA;AAAA,UACnB,WAAW,QAAA,CAAS,SAAA;AAAA,UACpB,UAAU,QAAA,CAAS;AAAA;AACrB,OACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUU,QAAQ,KAAA,EAAmD;AACnE,IAAA,IACE,KAAA,CAAM,gBAAgB,CAAA,KAAM,MAAA,IAC5B,EAAE,KAAA,CAAM,gBAAgB,aAAa,YAAA,CAAA,EACrC;AACA,MAAA,OAAO,KAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,KAAK,OAAA,EAA4C;AAC/C,IAAA,IAAI,IAAA;AAGJ,IAAA,IAAI,OAAA,EAAS;AACX,MAAA,IAAA,GAAO,OAAA,EAAQ;AAAA,IACjB;AAGA,IAAA,IAAA,CAAK,UAAA,EAAA;AAGL,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,MAAM,EAAE,EAAA,EAAI,QAAA,EAAU,WAAW,WAAA,EAAa,QAAA,KAAa,KAAA,CAAM,OAAA;AAEjE,MAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,MAAA,uBAAA,CAAwB,kBAAkB,QAAQ,CAAA;AAClD,MAAA,uBAAA,CAAwB,aAAa,SAAS,CAAA;AAC9C,MAAA,uBAAA,CAAwB,sBAAsB,WAAW,CAAA;AAEzD,MAAA,IAAI,QAAA,KAAa,QAAA,IAAY,CAAA,IAAK,SAAA,KAAc,CAAA,CAAA,EAAI;AAClD,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,0DAAA;AAAA,UACA,UAAA;AAAA,UACA,EAAE,QAAA,EAAU,SAAA,EAAW,QAAA;AAAS,SAClC;AAAA,MACF;AAEA,MAAA,KAAA,MAAW,KAAA,IAAS,MAAM,MAAA,EAAQ;AAChC,QAAA,MAAM;AAAA,UACJ,IAAA,EAAM,QAAA;AAAA,UACN,UAAA;AAAA,UACA,KAAA,EAAO,SAAA;AAAA,UACP;AAAA,SACF,GAAI,kBAAA,CAAmB,KAAA,EAAO,KAAK,CAAA;AACnC,QAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAA;AAE1B,QAAA,IAAI,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAK,CAAA;AAChC,QAAA,IAAI,CAAC,KAAA,EAAO;AACV,UAAA,KAAA,GAAQ,EAAC;AACT,UAAA,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,KAAA,EAAO,KAAK,CAAA;AAAA,QAC7B;AAGA,QAAA,IAAI,cAAA;AACJ,QAAA,IAAI,aAAa,CAAA,EAAG;AAClB,UAAA,IAAI,CAAC,MAAM,cAAA,EAAgB;AACzB,YAAA,KAAA,CAAM,cAAA,GAAiB,IAAI,YAAA,CAAa,IAAI,CAAA;AAAA,UAC9C;AACA,UAAA,cAAA,GAAiB,KAAA,CAAM,cAAA;AAAA,QACzB;AAEA,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,EAAM,CAAA,EAAA,EAAK;AAC7B,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,QAAA,EAAU,UAAA,GAAa,GAAG,cAAc,CAAA;AACnE,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,SAAA,EAAW,WAAA,GAAc,GAAG,eAAe,CAAA;AACtE,UAAA,YAAA,CAAa,YAAY,EAAE,CAAA;AAC3B,UAAA,YAAA,CAAa,aAAa,EAAE,CAAA;AAG5B,UAAA,IAAI,CAAA,GAAI,EAAA;AACR,UAAA,IAAI,gBAAgB,CAAA,EAAG;AACrB,YAAA,CAAA,GAAI,IAAI,WAAA,GAAc,EAAA;AAAA,UACxB;AAEA,UAAA,IAAI,cAAA,EAAgB;AAClB,YAAA,MAAM,KAAA,GAAQ,eAAA,CAAgB,cAAA,EAAgB,CAAA,EAAG,qBAAqB,CAAA;AACtE,YAAA,MAAM,IAAA,GAAO,QAAA,GAAW,KAAA,GAAA,CAAS,CAAA,GAAI,SAAA,IAAa,CAAA;AAClD,YAAA,cAAA,CAAe,CAAC,CAAA,GAAI,IAAA;AACpB,YAAA,CAAA,GAAI,QAAA,GAAW,CAAA,GAAI,QAAA,GAAW,IAAA,GAAO,IAAA;AAAA,UACvC;AAGA,UAAA,SAAA,CAAU,WAAA,GAAc,CAAC,CAAA,GAAI,EAAA,GAAK,EAAA,GAAK,CAAA;AAAA,QACzC;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAA,CAAgB,WAAW,CAAA,EAAW;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,QAAQ,CAAA;AACvC,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,qBAAA,EAAwB,QAAQ,CAAA,mBAAA,EAAsB,IAAA,CAAK,YAAY,MAAM,CAAA,EAAA,CAAA;AAAA,QAC7E,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,EAAA;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,gBAAgB,EAAA,EAAkB;AAChC,IAAA,uBAAA,CAAwB,iBAAiB,EAAE,CAAA;AAC3C,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,WAAA,EAAa;AACpC,MAAA,KAAA,CAAM,QAAQ,EAAA,GAAK,EAAA;AAAA,IACrB;AAAA,EACF;AACF;;;AC3NA,SAASA,UAAS,KAAA,EAAkD;AAClE,EAAA,OAAO,OAAO,KAAA,KAAU,QAAA,IAAY,KAAA,KAAU,IAAA;AAChD;AAEA,SAAS,cAAA,CAAe,OAA4B,KAAA,EAAe;AACjE,EAAA,MAAM,UAAUA,SAAAA,CAAS,KAAA,CAAM,OAAO,CAAA,GAAI,MAAM,OAAA,GAAU,MAAA;AAC1D,EAAA,MAAM,OAAA,GAAU,KAAA,CAAM,EAAA,IAAM,OAAA,GAAU,IAAI,CAAA;AAC1C,EAAA,IAAI,OAAO,YAAY,QAAA,IAAY,CAAC,OAAO,QAAA,CAAS,OAAO,CAAA,IAAK,OAAA,GAAU,CAAA,EAAG;AAC3E,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,yBAAyB,KAAK,CAAA,4BAAA,CAAA;AAAA,MAC9B,yBAAyB,KAAK,CAAA,IAAA,CAAA;AAAA,MAC9B;AAAA,KACF;AAAA,EACF;AACA,EAAA,OAAO,OAAA;AACT;AAEA,SAAS,UAAA,CAAW,OAA4B,EAAA,EAAY;AAC1D,EAAA,IAAIA,SAAAA,CAAS,KAAA,CAAM,OAAO,CAAA,EAAG;AAC3B,IAAA,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA,GAAI,EAAA;AAAA,EACxB;AACA,EAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,IAAA,KAAA,CAAM,EAAA,GAAK,EAAA;AAAA,EACb;AACA,EAAA,IAAI,EAAE,IAAA,IAAQ,KAAA,CAAA,IAAU,CAACA,SAAAA,CAAS,KAAA,CAAM,OAAO,CAAA,EAAG;AAChD,IAAA,KAAA,CAAM,EAAA,GAAK,EAAA;AAAA,EACb;AACF;AAEA,SAAS,kBAAkB,KAAA,EAAe;AACxC,EAAA,IAAI,CAAC,MAAA,CAAO,SAAA,CAAU,KAAK,CAAA,IAAK,QAAQ,EAAA,EAAI;AAC1C,IAAA,MAAM,IAAI,qBAAA,CAAsB,oCAAA,EAAsC,WAAA,EAAa,KAAK,CAAA;AAAA,EAC1F;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,oBAAA,CAAqB,OAAe,IAAA,EAAc;AACzD,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AAC3B,IAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,EAAG,IAAI,CAAA,eAAA,CAAA,EAAmB,MAAM,KAAK,CAAA;AAAA,EACvE;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,sBAAA,CAAuB,OAAe,IAAA,EAAc;AAC3D,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,IAAK,SAAS,CAAA,EAAG;AACzC,IAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,EAAG,IAAI,CAAA,YAAA,CAAA,EAAgB,MAAM,KAAK,CAAA;AAAA,EACpE;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,uBAAA,CAAwB,OAAe,IAAA,EAAc;AAC5D,EAAA,IAAI,CAAC,MAAA,CAAO,SAAA,CAAU,KAAK,CAAA,IAAK,SAAS,CAAA,EAAG;AAC1C,IAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,EAAG,IAAI,CAAA,2BAAA,CAAA,EAA+B,MAAM,KAAK,CAAA;AAAA,EACnF;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,yBAAA,CAA0B,OAAe,IAAA,EAAc;AAC9D,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,IAAK,QAAQ,CAAA,EAAG;AACxC,IAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,EAAG,IAAI,CAAA,aAAA,CAAA,EAAiB,MAAM,KAAK,CAAA;AAAA,EACrE;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,0BAAA,CAA2B,OAAe,IAAA,EAAc;AAC/D,EAAA,IAAI,CAAC,MAAA,CAAO,SAAA,CAAU,KAAK,CAAA,IAAK,QAAQ,CAAA,EAAG;AACzC,IAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,EAAG,IAAI,CAAA,+BAAA,CAAA,EAAmC,MAAM,KAAK,CAAA;AAAA,EACvF;AACA,EAAA,OAAO,KAAA;AACT;AAEA,SAAS,kBAAkB,SAAA,EAA+B;AACxD,EAAA,IAAI,CAAC,SAAA,IAAa,OAAO,SAAA,KAAc,QAAA,IAAY,CAAC,KAAA,CAAM,OAAA,CAAQ,SAAA,CAAU,WAAW,CAAA,EAAG;AACxF,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,yCAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AACA,EAAA,IAAI,SAAA,CAAU,WAAA,CAAY,MAAA,KAAW,CAAA,EAAG;AACtC,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,uDAAA;AAAA,MACA,uBAAA;AAAA,MACA,SAAA,CAAU;AAAA,KACZ;AAAA,EACF;AACA,EAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,SAAA,CAAU,WAAA,CAAY,QAAQ,CAAA,EAAA,EAAK;AACrD,IAAA,MAAM,KAAA,GAAQ,SAAA,CAAU,WAAA,CAAY,CAAC,CAAA;AACrC,IAAA,IAAI,CAAC,KAAA,IAAS,OAAO,KAAA,KAAU,QAAA,EAAU;AACvC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yBAAyB,CAAC,CAAA,mBAAA,CAAA;AAAA,QAC1B,uBAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,KAAA,CAAM,MAAM,CAAA,EAAG;AAChC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yBAAyB,CAAC,CAAA,yBAAA,CAAA;AAAA,QAC1B,yBAAyB,CAAC,CAAA,QAAA,CAAA;AAAA,QAC1B,KAAA,CAAM;AAAA,OACR;AAAA,IACF;AACA,IAAA,cAAA,CAAe,OAAO,CAAC,CAAA;AAAA,EACzB;AACF;AAEA,SAAS,mBAAmB,UAAA,EAAsB;AAChD,EAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,UAAU,CAAA,IAAK,UAAA,CAAW,WAAW,CAAA,EAAG;AACzD,IAAA,MAAM,IAAI,qBAAA;AAAA,MACR,+DAAA;AAAA,MACA,YAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,MAAA,GAAS,CAAC,GAAG,UAAU,CAAA,CAAE,KAAK,CAAC,CAAA,EAAG,CAAA,KAAM,CAAA,GAAI,CAAC,CAAA;AACnD,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AACtC,IAAA,MAAM,KAAA,GAAQ,OAAO,CAAC,CAAA;AACtB,IAAA,IAAI,KAAA,KAAU,UAAa,CAAC,MAAA,CAAO,UAAU,KAAK,CAAA,IAAK,QAAQ,CAAA,EAAG;AAChE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,oDAAA;AAAA,QACA,YAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAI,IAAI,CAAA,EAAG;AACT,MAAA,MAAM,IAAA,GAAO,MAAA,CAAO,CAAA,GAAI,CAAC,CAAA;AACzB,MAAA,IAAI,IAAA,KAAS,MAAA,IAAa,KAAA,IAAS,IAAA,EAAM;AACvC,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,wCAAA;AAAA,UACA,YAAA;AAAA,UACA;AAAA,SACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,EAAA,OAAO,MAAA;AACT;AAwBO,IAAe,cAAf,MAA2B;AAAA,EACtB,SAAA;AAAA,EACA,SAAA;AAAA,EACA,OAAA;AAAA,EAEV,WAAA,CAAY,SAAA,EAA+B,SAAA,GAAoB,EAAA,EAAI;AACjE,IAAA,iBAAA,CAAkB,SAAS,CAAA;AAC3B,IAAA,IAAA,CAAK,SAAA,GAAY,kBAAkB,SAAS,CAAA;AAC5C,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAGjB,IAAA,IAAA,CAAK,OAAA,GAAU,SAAA,CAAU,WAAA,CAAY,GAAA,CAAI,CAAC,OAAO,KAAA,KAAU,cAAA,CAAe,KAAA,EAAO,KAAK,CAAC,CAAA;AAAA,EACzF;AAAA,EAEU,wBAAwB,SAAA,EAAyB;AACzD,IAAA,MAAM,SAAA,GAAY,kBAAkB,SAAS,CAAA;AAC7C,IAAA,IAAI,YAAY,CAAA,EAAG;AACjB,MAAA;AAAA,IACF;AACA,IAAA,IAAA,CAAK,SAAA,GAAY,EAAA;AACjB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,IAAK,SAAA,EAAW,CAAA,EAAA,EAAK;AACnC,MAAA,IAAA,CAAK,IAAA,EAAK;AAAA,IACZ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,IAAA,GAAa;AACX,IAAA,IAAA,CAAK,SAAA,EAAA;AACL,IAAA,MAAM,MAAA,GAAS,KAAK,KAAA,EAAM;AAE1B,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAK,SAAA,CAAU,WAAA,CAAY,QAAQ,CAAA,EAAA,EAAK;AAC1D,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,SAAA,CAAU,WAAA,CAAY,CAAC,CAAA;AAC1C,MAAA,IAAI,KAAA,EAAO;AACT,QAAA,MAAM,IAAA,GAAO,OAAO,CAAC,CAAA;AACrB,QAAA,IAAI,SAAS,MAAA,EAAW;AACtB,UAAA,UAAA,CAAW,OAAO,IAAI,CAAA;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAA,GAAsB;AACpB,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,WAAA,CAAY,GAAA,CAAI,CAAC,OAAO,KAAA,KAAU,cAAA,CAAe,KAAA,EAAO,KAAK,CAAC,CAAA;AAAA,EACtF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,KAAA,GAAQ;AACV,IAAA,OAAO,IAAA,CAAK,SAAA;AAAA,EACd;AACF;AAkBO,IAAM,MAAA,GAAN,cAAqB,WAAA,CAAY;AAAA,EAC9B,QAAA;AAAA,EACA,KAAA;AAAA,EAER,WAAA,CACE,WACA,OAAA,EACA;AACA,IAAA,MAAM,QAAA,GAAW,uBAAA,CAAwB,OAAA,CAAQ,QAAA,EAAU,UAAU,CAAA;AACrE,IAAA,MAAM,KAAA,GAAQ,sBAAA,CAAuB,OAAA,CAAQ,KAAA,IAAS,KAAK,OAAO,CAAA;AAClE,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,MAAM,MAAA,GAAS,KAAK,KAAA,IAAS,IAAA,CAAK,MAAM,IAAA,CAAK,SAAA,GAAY,KAAK,QAAQ,CAAA;AACtE,IAAA,OAAO,KAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,EAAA,KAAO,KAAK,MAAM,CAAA;AAAA,EAC7C;AACF;AAgBO,IAAM,aAAA,GAAN,cAA4B,WAAA,CAAY;AAAA,EACrC,KAAA;AAAA,EAER,WAAA,CAAY,WAA+B,OAAA,EAAgD;AACzF,IAAA,MAAM,KAAA,GAAQ,sBAAA,CAAuB,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC3D,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,OAAO,IAAA,CAAK,QAAQ,GAAA,CAAI,CAAC,OAAO,EAAA,GAAK,IAAA,CAAK,KAAA,IAAS,IAAA,CAAK,SAAS,CAAA;AAAA,EACnE;AACF;AAeO,IAAM,iBAAA,GAAN,cAAgC,WAAA,CAAY;AAAA,EACzC,KAAA;AAAA,EACA,MAAA;AAAA,EAER,WAAA,CACE,WACA,OAAA,EACA;AACA,IAAA,MAAM,IAAA,GAAO,uBAAA,CAAwB,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC3D,IAAA,MAAM,MAAA,GAAS,yBAAA,CAA0B,OAAA,CAAQ,MAAA,IAAU,GAAG,QAAQ,CAAA;AACtE,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,KAAA,GAAQ,IAAA;AACb,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,OAAO,IAAA,CAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,MAAA,KAAW;AAClC,MAAA,OACE,IAAA,CAAK,MAAA,GAAA,CACH,MAAA,GAAS,IAAA,CAAK,WAAW,CAAA,GAAI,IAAA,CAAK,GAAA,CAAK,IAAA,CAAK,EAAA,GAAK,IAAA,CAAK,SAAA,GAAa,IAAA,CAAK,KAAK,CAAA,CAAA,GAAM,CAAA;AAAA,IAEzF,CAAC,CAAA;AAAA,EACH;AACF;AAiBO,IAAM,WAAA,GAAN,cAA0B,WAAA,CAAY;AAAA,EACnC,gBAAA;AAAA,EACA,KAAA;AAAA,EAER,WAAA,CACE,WACA,OAAA,EACA;AACA,IAAA,MAAM,UAAA,GAAa,kBAAA,CAAmB,OAAA,CAAQ,UAAU,CAAA;AACxD,IAAA,MAAM,KAAA,GAAQ,sBAAA,CAAuB,OAAA,CAAQ,KAAA,IAAS,KAAK,OAAO,CAAA;AAClE,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,gBAAA,GAAmB,UAAA;AACxB,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAEhB,IAAA,IAAI,SAAA,GAAY,CAAA;AAChB,IAAA,KAAA,MAAW,SAAA,IAAa,KAAK,gBAAA,EAAkB;AAC7C,MAAA,IAAI,IAAA,CAAK,aAAa,SAAA,EAAW;AAC/B,QAAA,SAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,MAAM,MAAA,GAAS,KAAK,KAAA,IAAS,SAAA;AAC7B,IAAA,OAAO,KAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,EAAA,KAAO,KAAK,MAAM,CAAA;AAAA,EAC7C;AACF;AAqBO,IAAM,QAAA,GAAN,cAAuB,WAAA,CAAY;AAAA,EAChC,WAAA;AAAA,EACA,SAAA;AAAA,EACA,UAAA;AAAA,EAER,WAAA,CACE,WACA,OAAA,EAMA;AACA,IAAA,MAAM,cAAc,sBAAA,CAAuB,OAAA,CAAQ,WAAA,IAAe,CAAA,GAAI,GAAG,aAAa,CAAA;AACtF,IAAA,MAAM,SAAA,GAAY,sBAAA,CAAuB,OAAA,CAAQ,SAAA,IAAa,GAAK,WAAW,CAAA;AAC9E,IAAA,MAAM,UAAA,GAAa,uBAAA,CAAwB,OAAA,CAAQ,UAAA,EAAY,YAAY,CAAA;AAC3E,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,WAAA,GAAc,WAAA;AACnB,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAClB,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,IAAI,IAAA,CAAK,SAAA,IAAa,IAAA,CAAK,UAAA,EAAY;AACrC,MAAA,OAAO,KAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,EAAA,KAAO,EAAA,GAAK,KAAK,SAAS,CAAA;AAAA,IACrD;AAEA,IAAA,MAAM,MAAA,GACJ,KAAK,WAAA,GAAA,CAAe,IAAA,CAAK,YAAY,IAAA,CAAK,WAAA,KAAgB,IAAA,CAAK,SAAA,GAAY,IAAA,CAAK,UAAA,CAAA;AAClF,IAAA,OAAO,KAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,EAAA,KAAO,KAAK,MAAM,CAAA;AAAA,EAC7C;AACF;AAyBO,IAAM,oBAAN,MAAwB;AAAA,EACrB,SAAA;AAAA,EACA,IAAA;AAAA,EACA,MAAA;AAAA,EACA,QAAA;AAAA,EACA,SAAA;AAAA,EACA,QAAA;AAAA,EACA,KAAA;AAAA,EACA,IAAA;AAAA,EACA,YAAA;AAAA,EACA,eAAA;AAAA,EAER,WAAA,CACE,SAAA,EACA,OAAA,GAOI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,iBAAA,CAAkB,SAAS,CAAA;AAC3B,IAAA,IAAA,CAAK,IAAA,GAAO,QAAQ,IAAA,IAAQ,KAAA;AAC5B,IAAA,IAAI,IAAA,CAAK,IAAA,KAAS,KAAA,IAAS,IAAA,CAAK,SAAS,KAAA,EAAO;AAC9C,MAAA,MAAM,IAAI,qBAAA,CAAsB,6BAAA,EAA+B,MAAA,EAAQ,QAAQ,IAAI,CAAA;AAAA,IACrF;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,oBAAA,CAAqB,OAAA,CAAQ,MAAA,IAAU,KAAK,QAAQ,CAAA;AAClE,IAAA,IAAI,IAAA,CAAK,MAAA,IAAU,CAAA,IAAK,IAAA,CAAK,UAAU,CAAA,EAAG;AACxC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,uCAAA;AAAA,QACA,QAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,0BAAA,CAA2B,OAAA,CAAQ,QAAA,IAAY,IAAI,UAAU,CAAA;AAC7E,IAAA,IAAA,CAAK,SAAA,GAAY,yBAAA,CAA0B,OAAA,CAAQ,SAAA,IAAa,MAAM,WAAW,CAAA;AACjF,IAAA,IAAA,CAAK,QAAA,GAAW,0BAAA,CAA2B,OAAA,CAAQ,QAAA,IAAY,GAAG,UAAU,CAAA;AAC5E,IAAA,IAAA,CAAK,KAAA,GAAQ,yBAAA,CAA0B,OAAA,CAAQ,KAAA,IAAS,GAAG,OAAO,CAAA;AAClE,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA,CAAK,IAAA,KAAS,KAAA,GAAQ,QAAA,GAAW,CAAA,QAAA;AAC7C,IAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AACpB,IAAA,IAAA,CAAK,eAAA,GAAkB,CAAA;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,OAAA,EAA0B;AACzC,IAAA,IAAI,IAAA,CAAK,SAAS,KAAA,EAAO;AACvB,MAAA,OAAO,OAAA,GAAU,IAAA,CAAK,IAAA,GAAO,IAAA,CAAK,SAAA;AAAA,IACpC;AACA,IAAA,OAAO,OAAA,GAAU,IAAA,CAAK,IAAA,GAAO,IAAA,CAAK,SAAA;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,KAAK,MAAA,EAAsB;AACzB,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,MAAM,CAAA,EAAG;AAC5B,MAAA,MAAM,IAAI,qBAAA,CAAsB,uBAAA,EAAyB,QAAA,EAAU,MAAM,CAAA;AAAA,IAC3E;AACA,IAAA,IAAI,IAAA,CAAK,kBAAkB,CAAA,EAAG;AAC5B,MAAA,IAAA,CAAK,eAAA,EAAA;AACL,MAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAAA,IACtB;AAEA,IAAA,IAAI,IAAA,CAAK,QAAA,CAAS,MAAM,CAAA,EAAG;AACzB,MAAA,IAAA,CAAK,IAAA,GAAO,MAAA;AACZ,MAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAAA,IACtB,CAAA,MAAA,IAAW,IAAA,CAAK,eAAA,KAAoB,CAAA,EAAG;AACrC,MAAA,IAAA,CAAK,YAAA,EAAA;AAAA,IACP;AAEA,IAAA,IAAI,IAAA,CAAK,YAAA,GAAe,IAAA,CAAK,QAAA,EAAU;AACrC,MAAA,IAAA,CAAK,QAAA,EAAS;AACd,MAAA,IAAA,CAAK,kBAAkB,IAAA,CAAK,QAAA;AAC5B,MAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,QAAA,GAAiB;AACvB,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAK,SAAA,CAAU,WAAA,CAAY,QAAQ,CAAA,EAAA,EAAK;AAC1D,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,SAAA,CAAU,WAAA,CAAY,CAAC,CAAA;AAC1C,MAAA,IAAI,CAAC,KAAA,EAAO;AACV,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,yBAAyB,CAAC,CAAA,YAAA,CAAA;AAAA,UAC1B,uBAAA;AAAA,UACA;AAAA,SACF;AAAA,MACF;AACA,MAAA,MAAM,SAAA,GAAY,cAAA,CAAe,KAAA,EAAO,CAAC,CAAA;AACzC,MAAA,MAAM,QAAQ,IAAA,CAAK,GAAA,CAAI,YAAY,IAAA,CAAK,MAAA,EAAQ,KAAK,KAAK,CAAA;AAC1D,MAAA,UAAA,CAAW,OAAO,KAAK,CAAA;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAA,GAAsB;AACpB,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,WAAA,CAAY,GAAA,CAAI,CAAC,OAAO,KAAA,KAAU,cAAA,CAAe,KAAA,EAAO,KAAK,CAAC,CAAA;AAAA,EACtF;AACF;AAcO,IAAM,QAAA,GAAN,cAAuB,WAAA,CAAY;AAAA,EAChC,YAAA;AAAA,EACA,cAAA;AAAA,EAER,WAAA,CACE,SAAA,EACA,cAAA,EACA,OAAA,EACA;AACA,IAAA,MAAM,YAAA,GAAe,uBAAA,CAAwB,OAAA,CAAQ,YAAA,EAAc,cAAc,CAAA;AACjF,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,YAAA,GAAe,YAAA;AACpB,IAAA,IAAA,CAAK,cAAA,GAAiB,cAAA;AACtB,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,IAAI,IAAA,CAAK,SAAA,GAAY,IAAA,CAAK,YAAA,EAAc;AAEtC,MAAA,MAAM,MAAA,GAAA,CAAU,IAAA,CAAK,SAAA,GAAY,CAAA,IAAK,IAAA,CAAK,YAAA;AAC3C,MAAA,OAAO,KAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,EAAA,KAAO,KAAK,MAAM,CAAA;AAAA,IAC7C;AAEA,IAAA,IAAI,KAAK,cAAA,EAAgB;AAEvB,MAAA,OAAO,IAAA,CAAK,eAAe,KAAA,EAAM;AAAA,IACnC;AAEA,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA,EAES,IAAA,GAAa;AACpB,IAAA,KAAA,CAAM,IAAA,EAAK;AAGX,IAAA,IAAI,IAAA,CAAK,SAAA,IAAa,IAAA,CAAK,YAAA,IAAgB,KAAK,cAAA,EAAgB;AAC9D,MAAA,IAAA,CAAK,eAAe,IAAA,EAAK;AAAA,IAC3B;AAAA,EACF;AACF;AAmBO,IAAM,UAAA,GAAN,cAAyB,WAAA,CAAY;AAAA,EAClC,KAAA;AAAA,EACA,UAAA;AAAA,EACA,QAAA;AAAA,EACA,SAAA;AAAA,EACA,cAAA;AAAA,EACA,cAAA;AAAA,EAER,WAAA,CACE,WACA,OAAA,EASA;AACA,IAAA,MAAM,KAAA,GAAQ,sBAAA,CAAuB,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAC3D,IAAA,MAAM,UAAA,GAAa,uBAAA,CAAwB,OAAA,CAAQ,UAAA,EAAY,YAAY,CAAA;AAC3E,IAAA,MAAM,QAAA,GAAW,oBAAA,CAAqB,OAAA,CAAQ,QAAA,IAAY,KAAK,UAAU,CAAA;AACzE,IAAA,IAAI,QAAA,IAAY,CAAA,IAAK,QAAA,IAAY,CAAA,EAAG;AAClC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yCAAA;AAAA,QACA,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,MAAM,SAAA,GAAY,sBAAA,CAAuB,OAAA,CAAQ,SAAA,IAAa,IAAI,WAAW,CAAA;AAC7E,IAAA,MAAM,cAAA,GAAiB,sBAAA,CAAuB,OAAA,CAAQ,cAAA,IAAkB,KAAK,gBAAgB,CAAA;AAC7F,IAAA,MAAM,cAAA,GAAiB,QAAQ,cAAA,IAAkB,KAAA;AACjD,IAAA,IAAI,cAAA,KAAmB,KAAA,IAAS,cAAA,KAAmB,QAAA,EAAU;AAC3D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,0CAAA;AAAA,QACA,gBAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,MAAM,SAAA,GAAY,iBAAA,CAAkB,OAAA,CAAQ,SAAA,IAAa,EAAE,CAAA;AAC3D,IAAA,KAAA,CAAM,WAAW,EAAE,CAAA;AACnB,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAA;AACb,IAAA,IAAA,CAAK,UAAA,GAAa,UAAA;AAClB,IAAA,IAAA,CAAK,QAAA,GAAW,QAAA;AAChB,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,cAAA,GAAiB,cAAA;AACtB,IAAA,IAAA,CAAK,cAAA,GAAiB,cAAA;AACtB,IAAA,IAAA,CAAK,wBAAwB,SAAS,CAAA;AAAA,EACxC;AAAA,EAEA,KAAA,GAAkB;AAChB,IAAA,MAAM,UAAU,IAAA,CAAK,SAAA;AACrB,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,IAAA,CAAK,MAAM,IAAA,CAAK,UAAA,GAAa,IAAA,CAAK,QAAQ,CAAC,CAAA;AACvE,IAAA,MAAM,YAAY,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,IAAA,CAAK,aAAa,OAAO,CAAA;AAEvD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,KAAA,GAAQ,IAAA,CAAK,SAAA;AACpC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,GAAQ,IAAA,CAAK,cAAA;AAEhC,IAAA,IAAI,EAAA;AAEJ,IAAA,IAAI,OAAA,IAAW,KAAK,UAAA,EAAY;AAC9B,MAAA,EAAA,GAAK,KAAA;AAAA,IACP,CAAA,MAAA,IAAW,UAAU,OAAA,EAAS;AAE5B,MAAA,MAAM,MAAM,OAAA,GAAU,OAAA;AACtB,MAAA,EAAA,GAAK,SAAA,GAAA,CAAa,IAAA,CAAK,KAAA,GAAQ,SAAA,IAAa,GAAA;AAAA,IAC9C,CAAA,MAAO;AAEL,MAAA,MAAM,GAAA,GAAA,CAAO,UAAU,OAAA,IAAW,SAAA;AAClC,MAAA,IAAI,IAAA,CAAK,mBAAmB,KAAA,EAAO;AACjC,QAAA,EAAA,GAAK,KAAA,GAAA,CAAU,IAAA,CAAK,KAAA,GAAQ,KAAA,KAAU,CAAA,GAAI,KAAK,GAAA,CAAI,IAAA,CAAK,EAAA,GAAK,GAAG,CAAA,CAAA,GAAM,CAAA;AAAA,MACxE,CAAA,MAAO;AACL,QAAA,EAAA,GAAK,IAAA,CAAK,KAAA,GAAA,CAAS,IAAA,CAAK,KAAA,GAAQ,KAAA,IAAS,GAAA;AAAA,MAC3C;AAAA,IACF;AAGA,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,OAAO,IAAA,CAAK,OAAA,CAAQ,GAAA,CAAI,CAAC,MAAA,KAAW;AAClC,MAAA,IAAI,YAAY,CAAA,EAAG;AACjB,QAAA,OAAO,MAAA,KAAW,IAAI,CAAA,GAAI,EAAA;AAAA,MAC5B;AACA,MAAA,OAAO,MAAM,MAAA,GAAS,OAAA,CAAA;AAAA,IACxB,CAAC,CAAA;AAAA,EACH;AACF","file":"chunk-PR647I7R.js","sourcesContent":["// Base optimizer class\n\nexport type { ParamGroup } from \"./Optimizer\";\nexport { Optimizer } from \"./Optimizer\";\nexport { AdaDelta } from \"./optimizers/adadelta\";\nexport { Adagrad } from \"./optimizers/adagrad\";\n// Optimizers - gradient descent variants\nexport { Adam } from \"./optimizers/adam\";\nexport { AdamW } from \"./optimizers/adamw\";\nexport { Nadam } from \"./optimizers/nadam\";\nexport { RMSprop } from \"./optimizers/rmsprop\";\nexport { SGD } from \"./optimizers/sgd\";\n\n// Learning rate schedulers\nexport {\n CosineAnnealingLR,\n ExponentialLR,\n LinearLR,\n LRScheduler,\n MultiStepLR,\n OneCycleLR,\n ReduceLROnPlateau,\n StepLR,\n WarmupLR,\n} from \"./schedulers\";\n","import { DataValidationError } from \"../core\";\nimport type { GradTensor } from \"../ndarray\";\n\n/**\n * Base class for all optimizers.\n *\n * This abstract class provides the foundation for implementing optimization algorithms\n * used in training machine learning models. All concrete optimizers (SGD, Adam, etc.)\n * must extend this class and implement the abstract `step()` method.\n *\n * **Key Features:**\n * - Parameter groups with per-group hyperparameters\n * - State management for stateful optimizers (momentum, adaptive learning rates)\n * - Gradient zeroing utilities\n * - State serialization for checkpointing\n *\n * **Design Pattern:**\n * The optimizer maintains a list of parameter groups, where each group can have\n * different hyperparameters (e.g., different learning rates for different layers).\n * This enables fine-grained control over the optimization process.\n *\n * @example\n * ```ts\n * import { SGD } from 'deepbox/optim';\n *\n * const optimizer = new SGD(model.parameters(), { lr: 0.01 });\n *\n * // Training loop\n * for (let epoch = 0; epoch < 100; epoch++) {\n * optimizer.zeroGrad();\n * const loss = computeLoss();\n * loss.backward();\n * optimizer.step();\n * }\n * ```\n *\n * @example\n * ```ts\n * // Using parameter groups with different learning rates\n * const optimizer = new SGD([\n * { params: model.layer1.parameters(), lr: 0.01 },\n * { params: model.layer2.parameters(), lr: 0.001 }\n * ], { lr: 0.01 });\n * ```\n *\n * References:\n * - PyTorch Optimizer: https://pytorch.org/docs/stable/optim.html\n *\n * @category Optimization\n */\n\n/**\n * Represents a group of parameters with optional per-group hyperparameters.\n *\n * @template Options - Type of optimizer-specific options\n * @property params - Iterable of parameters to optimize in this group\n */\nexport type ParamGroup<Options extends Record<string, unknown>> = {\n readonly params: Iterable<GradTensor>;\n} & Partial<Options>;\n\nfunction isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === \"object\" && value !== null;\n}\n\nfunction ensureRecord(value: unknown, context: string) {\n if (!isRecord(value)) {\n throw new DataValidationError(`${context} must be an object`);\n }\n return value;\n}\n\nfunction isStateRecord(value: unknown): value is Record<string, unknown> {\n return isRecord(value);\n}\n\nfunction ensureIntegerArray(value: unknown, context: string) {\n if (!Array.isArray(value)) {\n throw new DataValidationError(`${context} must be an array of integers`);\n }\n const output: number[] = [];\n for (const entry of value) {\n if (!Number.isInteger(entry)) {\n throw new DataValidationError(`${context} must contain integers only`);\n }\n output.push(entry);\n }\n return output;\n}\n\n/**\n * Type guard to determine if params is an array of parameter groups.\n *\n * This function checks whether the provided params argument is a simple iterable\n * of parameters or an array of parameter groups with per-group options.\n *\n * @template Options - Type of optimizer-specific options\n * @param params - Either an iterable of parameters or array of parameter groups\n * @returns True if params is an array of parameter groups, false otherwise\n */\nfunction isParamGroupArray<Options extends Record<string, unknown>>(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<Options>>\n): params is ReadonlyArray<ParamGroup<Options>> {\n // Check if params is an array (parameter groups must be arrays)\n if (!Array.isArray(params)) return false;\n // Empty array is considered a valid parameter group array\n if (params.length === 0) return true;\n // Check if first element has a 'params' property (indicating it's a group)\n const first = params[0];\n if (!first || typeof first !== \"object\") return false;\n return \"params\" in first;\n}\n\n/**\n * Abstract base class for all optimization algorithms.\n *\n * @template Options - Type defining optimizer-specific hyperparameters\n * @template State - Type defining per-parameter state (e.g., momentum buffers)\n */\nexport abstract class Optimizer<\n Options extends Record<string, unknown>,\n State extends Record<string, unknown>,\n> {\n /**\n * Groups of parameters with their associated hyperparameters.\n * Each group can have different options (e.g., learning rates).\n * Exposed publicly to enable scheduler integrations.\n */\n public paramGroups: Array<{\n params: GradTensor[];\n options: Options;\n }>;\n\n /**\n * Per-parameter state storage.\n * Maps each parameter to its optimizer-specific state (momentum, adaptive rates, etc.).\n */\n protected state: Map<GradTensor, State> = new Map();\n\n /**\n * Create a new optimizer.\n *\n * Initializes the optimizer with either a simple list of parameters or\n * multiple parameter groups with per-group hyperparameters.\n *\n * @param params - Either an iterable of parameters or array of parameter groups\n * @param defaults - Default hyperparameters applied to all groups\n */\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<Options>>,\n protected readonly defaults: Readonly<Options>\n ) {\n // Initialize empty parameter groups array\n this.paramGroups = [];\n\n // Handle both simple param list and param groups\n if (!isParamGroupArray<Options>(params)) {\n // Simple iterable of parameters - create single group with default options\n this.paramGroups.push({\n params: Array.from(params), // Convert iterable to array for efficient access\n options: { ...defaults }, // Clone defaults to avoid mutation\n });\n } else {\n // Array of parameter groups - create group for each with merged options\n for (const group of params) {\n // Destructure to separate params from group-specific options\n const { params: groupParams, ...groupOptions } = group;\n this.paramGroups.push({\n params: Array.from(groupParams), // Convert to array\n options: { ...defaults, ...groupOptions }, // Merge defaults with group options\n });\n }\n }\n }\n\n /**\n * Perform a single optimization step (parameter update).\n *\n * This abstract method must be implemented by all optimizer subclasses.\n * It applies the optimization algorithm to update all parameters based on\n * their gradients.\n *\n * @param closure - Optional closure that reevaluates the model and returns the loss.\n * Used by some optimizers (e.g., LBFGS) that require multiple\n * function evaluations per step.\n * @returns Loss value if closure is provided, undefined otherwise\n */\n abstract step(closure?: () => number): number | undefined;\n\n /**\n * Zero out the gradients of all optimized parameters.\n *\n * This method should be called at the beginning of each training iteration,\n * before computing new gradients. Without this call, gradients would accumulate\n * across iterations, leading to incorrect updates.\n *\n * **Implementation Note:**\n * For parameters wrapped in GradTensor, this calls zeroGrad() on each parameter,\n * which either sets the gradient to zero or initializes it if not yet created.\n *\n * @example\n * ```ts\n * // Typical training loop\n * optimizer.zeroGrad(); // Clear previous gradients\n * const output = model.forward(input);\n * const loss = criterion(output, target);\n * loss.backward(); // Compute new gradients\n * optimizer.step(); // Update parameters\n * ```\n */\n zeroGrad(): void {\n // Iterate through all parameter groups\n for (const group of this.paramGroups) {\n // Zero gradient for each parameter in the group\n for (const param of group.params) {\n param.zeroGrad(); // Delegate to GradTensor's zeroGrad method\n }\n }\n }\n\n /**\n * Add a parameter group to the optimizer.\n *\n * This method allows adding new parameters to optimize after the optimizer\n * has been created. This is particularly useful for:\n * - Fine-tuning: adding pre-trained layers with different learning rates\n * - Progressive training: gradually unfreezing layers\n * - Dynamic architectures: adding parameters while the model grows\n *\n * @param paramGroup - Parameter group to add with optional per-group options\n *\n * @example\n * ```ts\n * const optimizer = new SGD(model.backbone.parameters(), { lr: 0.001 });\n * // Later, add classifier with higher learning rate\n * optimizer.addParamGroup({\n * params: model.classifier.parameters(),\n * lr: 0.01\n * });\n * ```\n */\n addParamGroup(paramGroup: ParamGroup<Options>): void {\n // Destructure to separate params from group-specific options\n const { params, ...options } = paramGroup;\n // Add new group with merged options (defaults + group-specific)\n this.paramGroups.push({\n params: Array.from(params), // Convert iterable to array\n options: { ...this.defaults, ...options }, // Merge with defaults\n });\n }\n\n /**\n * Validate that a given state object matches the optimizer's state type.\n *\n * @param state - The state object to validate\n * @returns True if the state object is valid, false otherwise\n */\n protected abstract isState(state: Record<string, unknown>): state is State;\n\n /**\n * Get the current state of the optimizer.\n *\n * Returns a dictionary containing all optimizer state that needs to be\n * saved for checkpointing. This includes per-parameter state (momentum buffers,\n * adaptive learning rates, etc.) and parameter group configurations.\n *\n * **Note:** In a production implementation, parameters would be identified by\n * unique IDs rather than object references for proper serialization.\n *\n * @returns Optimizer state dictionary containing state and parameter groups\n *\n * @example\n * ```ts\n * // Save checkpoint\n * const checkpoint = {\n * model: model.stateDict(),\n * optimizer: optimizer.stateDict(),\n * epoch: currentEpoch\n * };\n * ```\n */\n stateDict() {\n const paramIdMap = new Map<GradTensor, number>();\n const orderedParams: GradTensor[] = [];\n const getParamId = (param: GradTensor) => {\n const existing = paramIdMap.get(param);\n if (existing !== undefined) return existing;\n const id = orderedParams.length;\n orderedParams.push(param);\n paramIdMap.set(param, id);\n return id;\n };\n\n return {\n // Serialize per-parameter state\n state: Array.from(this.state.entries()).map(([param, state]) => ({\n paramId: getParamId(param),\n param: param, // Backward-compatible references\n state, // Optimizer-specific state (momentum, etc.)\n })),\n // Serialize parameter groups and their options\n paramGroups: this.paramGroups.map((group) => ({\n params: group.params, // Backward-compatible references\n paramIds: group.params.map((param) => getParamId(param)),\n options: group.options, // Hyperparameters for this group\n })),\n };\n }\n\n /**\n * Load optimizer state from a state dictionary.\n *\n * Restores the optimizer to a previously saved state, including all\n * per-parameter state and parameter group configurations. This is essential\n * for resuming training from checkpoints.\n *\n * **Important:** The loaded state must be compatible with the current\n * optimizer configuration (same parameters, same optimizer type).\n *\n * @param stateDict - State dictionary previously returned by stateDict()\n *\n * @example\n * ```ts\n * // Resume from checkpoint\n * const checkpoint = loadCheckpoint('checkpoint.json');\n * model.loadStateDict(checkpoint.model);\n * optimizer.loadStateDict(checkpoint.optimizer);\n * ```\n */\n loadStateDict(stateDict: Record<string, unknown>): void {\n const currentParams = this.paramGroups.flatMap((group) => group.params);\n const currentParamCount = currentParams.length;\n const paramLookup = new Map<unknown, number>();\n for (let i = 0; i < currentParams.length; i++) {\n paramLookup.set(currentParams[i], i);\n }\n\n // Validate paramGroups if present\n if (Object.hasOwn(stateDict, \"paramGroups\")) {\n const rawGroups = stateDict[\"paramGroups\"];\n if (!Array.isArray(rawGroups)) {\n throw new DataValidationError(\"paramGroups must be an array\");\n }\n const groupsArray: unknown[] = rawGroups;\n\n if (groupsArray.length === 0) {\n if (this.paramGroups.length !== 0) {\n throw new DataValidationError(\"paramGroups cannot be empty\");\n }\n this.paramGroups = [];\n } else {\n if (groupsArray.length !== this.paramGroups.length) {\n throw new DataValidationError(\"paramGroups count mismatch\");\n }\n\n const seenParamIds = new Set<number>();\n let totalParamCount = 0;\n let sawParamIds = false;\n let sawNoParamIds = false;\n const nextGroups: Array<{\n params: GradTensor[];\n options: Options;\n paramIds?: number[];\n }> = [];\n\n groupsArray.forEach((rawGroup, index) => {\n const groupRecord = ensureRecord(rawGroup, `paramGroups[${index}]`);\n const optionsRaw = ensureRecord(groupRecord[\"options\"], `paramGroups[${index}].options`);\n const options: Options = { ...this.defaults };\n const optionsRecord: Record<string, unknown> = options;\n const defaultsRecord: Record<string, unknown> = { ...this.defaults };\n\n for (const [key, value] of Object.entries(optionsRaw)) {\n if (Object.hasOwn(defaultsRecord, key)) {\n const defaultVal = defaultsRecord[key];\n const expectedType = typeof defaultVal;\n const actualType = typeof value;\n\n if (actualType !== expectedType) {\n throw new DataValidationError(\n `Type mismatch for option '${key}' in paramGroups[${index}]: expected ${expectedType}, got ${actualType}`\n );\n }\n optionsRecord[key] = value;\n }\n }\n\n const paramIdsRaw = groupRecord[\"paramIds\"];\n const paramsRaw = groupRecord[\"params\"];\n let paramIds: number[] | undefined;\n if (paramIdsRaw !== undefined) {\n paramIds = ensureIntegerArray(paramIdsRaw, `paramGroups[${index}].paramIds`);\n sawParamIds = true;\n } else {\n sawNoParamIds = true;\n }\n\n let resolvedParams: GradTensor[] | undefined;\n\n if (paramIds) {\n for (const id of paramIds) {\n if (id < 0 || id >= currentParamCount) {\n throw new DataValidationError(`Invalid paramId ${id} in paramGroups`);\n }\n if (seenParamIds.has(id)) {\n throw new DataValidationError(`Duplicate paramId ${id} in paramGroups`);\n }\n seenParamIds.add(id);\n }\n totalParamCount += paramIds.length;\n resolvedParams = paramIds.map((id) => {\n const param = currentParams[id];\n if (!param) {\n throw new DataValidationError(`Invalid paramId ${id} in paramGroups`);\n }\n return param;\n });\n }\n\n if (paramsRaw !== undefined) {\n if (!Array.isArray(paramsRaw)) {\n throw new DataValidationError(`paramGroups[${index}].params must be an array`);\n }\n const resolvedFromParams: GradTensor[] = [];\n let hasUnknown = false;\n for (const paramRef of paramsRaw) {\n const paramIndex = paramLookup.get(paramRef);\n if (paramIndex === undefined) {\n hasUnknown = true;\n continue;\n }\n const param = currentParams[paramIndex];\n if (!param) {\n hasUnknown = true;\n continue;\n }\n resolvedFromParams.push(param);\n }\n if (!hasUnknown) {\n if (paramIds && paramIds.length !== resolvedFromParams.length) {\n throw new DataValidationError(\"paramIds length does not match params length\");\n }\n if (!resolvedParams) {\n resolvedParams = resolvedFromParams;\n }\n }\n }\n\n if (!resolvedParams) {\n throw new DataValidationError(`paramGroups[${index}] must include params or paramIds`);\n }\n\n if (paramIds === undefined) {\n nextGroups.push({ params: resolvedParams, options });\n } else {\n nextGroups.push({ params: resolvedParams, options, paramIds });\n }\n });\n\n if (sawParamIds && sawNoParamIds) {\n throw new DataValidationError(\"paramIds must be provided for all parameter groups\");\n }\n\n if (sawParamIds && totalParamCount !== currentParamCount) {\n throw new DataValidationError(\n `Parameter count mismatch: expected ${currentParamCount}, got ${totalParamCount}`\n );\n }\n\n this.paramGroups = nextGroups.map((group) => ({\n params: group.params,\n options: group.options,\n }));\n }\n }\n\n // Load per-parameter state if present in state dict\n if (Object.hasOwn(stateDict, \"state\")) {\n const rawState = stateDict[\"state\"];\n if (!Array.isArray(rawState)) {\n throw new DataValidationError(\"state must be an array\");\n }\n const stateArray: unknown[] = rawState;\n // Clear existing state before loading\n this.state.clear();\n // Restore each parameter's state\n stateArray.forEach((rawEntry, index) => {\n const entryRecord = ensureRecord(rawEntry, `state[${index}]`);\n if (!Object.hasOwn(entryRecord, \"state\")) {\n throw new DataValidationError(`state[${index}].state is required`);\n }\n const entryStateValue = ensureRecord(entryRecord[\"state\"], `state[${index}].state`);\n if (!isStateRecord(entryStateValue)) {\n throw new DataValidationError(`state[${index}].state must be an object`);\n }\n\n const paramIdRaw = entryRecord[\"paramId\"];\n const paramRaw = entryRecord[\"param\"];\n\n let resolvedParam: GradTensor | undefined;\n\n if (paramIdRaw !== undefined) {\n if (\n paramIdRaw === null ||\n typeof paramIdRaw !== \"number\" ||\n !Number.isInteger(paramIdRaw)\n ) {\n throw new DataValidationError(`Invalid paramId ${String(paramIdRaw)} in state`);\n }\n if (paramIdRaw < 0 || paramIdRaw >= currentParamCount) {\n throw new DataValidationError(`Invalid paramId ${paramIdRaw} in state`);\n }\n const param = currentParams[paramIdRaw];\n if (!param) {\n throw new DataValidationError(`Invalid paramId ${paramIdRaw} in state`);\n }\n if (paramRaw !== undefined) {\n const paramIndex = paramLookup.get(paramRaw);\n if (paramIndex === undefined || paramIndex !== paramIdRaw) {\n throw new DataValidationError(`paramId ${paramIdRaw} does not match provided param`);\n }\n }\n resolvedParam = param;\n } else {\n if (paramRaw === undefined) {\n throw new DataValidationError(\"Missing param reference in state entry\");\n }\n const paramIndex = paramLookup.get(paramRaw);\n if (paramIndex === undefined) {\n throw new DataValidationError(\"Unknown param reference in state entry\");\n }\n const param = currentParams[paramIndex];\n if (!param) {\n throw new DataValidationError(\"Unknown param reference in state entry\");\n }\n resolvedParam = param;\n }\n\n if (!resolvedParam) {\n throw new DataValidationError(`Unable to resolve parameter for state[${index}]`);\n }\n if (!this.isState(entryStateValue)) {\n throw new DataValidationError(`state[${index}].state has invalid structure`);\n }\n this.state.set(resolvedParam, entryStateValue);\n });\n }\n }\n}\n","/**\n * Internal utilities for optimizer implementations.\n * This module is not part of the public API.\n *\n * @internal\n */\n\nimport {\n DeepboxError,\n DTypeError,\n IndexError,\n InvalidParameterError,\n NotFittedError,\n ShapeError,\n} from \"../core\";\nimport type { GradTensor } from \"../ndarray\";\n\n/**\n * Supported floating-point typed array types for optimizer parameters.\n */\nexport type FloatTypedArray = Float32Array | Float64Array;\n\nfunction isFloatTypedArray(value: unknown): value is FloatTypedArray {\n return value instanceof Float32Array || value instanceof Float64Array;\n}\n\n/**\n * Safely access an array element with bounds checking.\n *\n * @param array - Array to access\n * @param index - Index to access\n * @param context - Context string for error messages\n * @returns The value at the index\n * @throws {IndexError} If index is out of bounds\n * @throws {DeepboxError} If value is unexpectedly undefined\n */\nexport function safeArrayAccess<T>(array: ArrayLike<T>, index: number, context: string): T {\n if (index < 0 || index >= array.length) {\n throw new IndexError(`Index ${index} out of bounds [0, ${array.length}) in ${context}`, {\n index,\n validRange: [0, array.length - 1],\n });\n }\n const value = array[index];\n if (value === undefined) {\n throw new DeepboxError(`Unexpected undefined at index ${index} in ${context}`);\n }\n return value;\n}\n\n/**\n * Validates that a numeric value is finite and non-negative.\n *\n * @param name - Name of the parameter being validated\n * @param value - Value to validate\n * @throws {InvalidParameterError} If value is not finite or is negative\n */\nexport function assertFiniteNonNegative(name: string, value: number): void {\n if (!Number.isFinite(value) || value < 0) {\n throw new InvalidParameterError(`Invalid ${name}: ${value}`, name, value);\n }\n}\n\n/**\n * Validates that a numeric value is finite and positive (> 0).\n *\n * @param name - Name of the parameter being validated\n * @param value - Value to validate\n * @throws {InvalidParameterError} If value is not finite or is not positive\n */\nexport function assertFinitePositive(name: string, value: number): void {\n if (!Number.isFinite(value) || value <= 0) {\n throw new InvalidParameterError(`Invalid ${name}: ${value} (must be > 0)`, name, value);\n }\n}\n\n/**\n * Validates that a numeric value is finite.\n *\n * @param name - Name of the parameter being validated\n * @param value - Value to validate\n * @throws {InvalidParameterError} If value is not finite\n */\nexport function assertFinite(name: string, value: number): void {\n if (!Number.isFinite(value)) {\n throw new InvalidParameterError(`Invalid ${name}: ${value}`, name, value);\n }\n}\n\n/**\n * Validates that a value is in the range [min, max).\n *\n * @param name - Name of the parameter being validated\n * @param value - Value to validate\n * @param min - Minimum value (inclusive)\n * @param max - Maximum value (exclusive)\n * @throws {InvalidParameterError} If value is out of range\n */\nexport function assertInRange(name: string, value: number, min: number, max: number): void {\n if (!Number.isFinite(value) || value < min || value >= max) {\n throw new InvalidParameterError(\n `Invalid ${name}: ${value} (must be in range [${min}, ${max}))`,\n name,\n value\n );\n }\n}\n\n/**\n * Validates that a parameter has a gradient and returns gradient information.\n *\n * @param param - Parameter to validate\n * @param optimizerName - Name of the optimizer for error messages\n * @returns Object containing gradient data, offset, parameter data, and offset\n * @throws {InvalidParameterError} If parameter doesn't require gradients\n * @throws {NotFittedError} If parameter has no gradient\n * @throws {DTypeError} If parameter or gradient has unsupported dtype\n * @throws {ShapeError} If gradient shape doesn't match parameter shape\n */\nexport function assertHasGradFloat(\n param: GradTensor,\n optimizerName: string\n): {\n grad: FloatTypedArray;\n gradOffset: number;\n param: FloatTypedArray;\n paramOffset: number;\n} {\n if (!param.requiresGrad) {\n throw new InvalidParameterError(\n \"Cannot optimize a parameter with requiresGrad=false\",\n \"requiresGrad\",\n false\n );\n }\n\n const g = param.grad;\n if (!g) {\n throw new NotFittedError(\n \"Cannot optimize a parameter without a gradient. Did you forget backward()?\"\n );\n }\n\n const paramData = param.tensor.data;\n const gradData = g.data;\n\n if (!isFloatTypedArray(paramData) || !isFloatTypedArray(gradData)) {\n throw new DTypeError(\n `${optimizerName} optimizer supports float32 and float64 parameters and gradients only`\n );\n }\n\n if (paramData.constructor !== gradData.constructor) {\n throw new DTypeError(\n `${optimizerName} optimizer requires parameter and gradient dtypes to match`\n );\n }\n\n if (param.tensor.size !== g.size) {\n throw new ShapeError(\n `Gradient shape must match parameter shape (param: ${param.tensor.size}, grad: ${g.size})`\n );\n }\n\n return {\n grad: gradData,\n gradOffset: g.offset,\n param: paramData,\n paramOffset: param.tensor.offset,\n };\n}\n\n/**\n * Validates that a state buffer has the correct size.\n *\n * @param buffer - State buffer to validate\n * @param expectedSize - Expected size\n * @param bufferName - Name of the buffer for error messages\n * @throws {DeepboxError} If buffer size doesn't match expected size\n */\nexport function assertBufferSize(\n buffer: ArrayLike<number>,\n expectedSize: number,\n bufferName: string\n): void {\n if (buffer.length !== expectedSize) {\n throw new DeepboxError(\n `State buffer size mismatch for ${bufferName}: expected ${expectedSize}, got ${buffer.length}`\n );\n }\n}\n","import { InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n assertInRange,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\ntype AdaDeltaOptions = {\n lr: number;\n readonly rho: number;\n readonly eps: number;\n readonly weightDecay: number;\n};\n\ntype AdaDeltaState = {\n squareAvg: Float64Array;\n accDelta: Float64Array;\n};\n\n/**\n * AdaDelta optimizer.\n *\n * Implements AdaDelta algorithm - an extension of Adagrad that seeks to reduce\n * its aggressive, monotonically decreasing learning rate. AdaDelta adapts learning\n * rates based on a moving window of gradient updates, rather than accumulating all\n * past gradients.\n *\n * @example\n * ```ts\n * import { AdaDelta } from 'deepbox/optim';\n *\n * const optimizer = new AdaDelta(model.parameters(), {\n * lr: 1.0,\n * rho: 0.9,\n * eps: 1e-6\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * optimizer.zeroGrad();\n * // ...\n * optimizer.step();\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class AdaDelta extends Optimizer<AdaDeltaOptions, AdaDeltaState> {\n private _stepCount = 0;\n\n get stepCount(): number {\n return this._stepCount;\n }\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<AdaDeltaOptions>>,\n options: {\n readonly lr?: number;\n readonly rho?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n } = {}\n ) {\n const defaults = {\n lr: options.lr ?? 1.0,\n rho: options.rho ?? 0.9,\n eps: options.eps ?? 1e-6,\n weightDecay: options.weightDecay ?? 0,\n };\n\n super(params, defaults);\n\n // Validate hyperparameters\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertInRange(\"rho\", defaults.rho, 0, 1);\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n protected isState(state: Record<string, unknown>): state is AdaDeltaState {\n return state[\"squareAvg\"] instanceof Float64Array && state[\"accDelta\"] instanceof Float64Array;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n for (const group of this.paramGroups) {\n const { lr, rho, eps, weightDecay } = group.options;\n\n // Re-validate hyperparameters\n assertFiniteNonNegative(\"learning rate\", lr);\n assertInRange(\"rho\", rho, 0, 1);\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n\n for (const param of group.params) {\n const {\n grad: gradData,\n gradOffset: gOff,\n param: pData,\n paramOffset: pOff,\n } = assertHasGradFloat(param, \"AdaDelta\");\n const size = param.tensor.size;\n\n // Initialize state if needed\n let state = this.state.get(param);\n if (!state) {\n state = {\n squareAvg: new Float64Array(size),\n accDelta: new Float64Array(size),\n };\n this.state.set(param, state);\n }\n\n // Validate state buffer sizes\n assertBufferSize(state.squareAvg, size, \"AdaDelta squareAvg\");\n assertBufferSize(state.accDelta, size, \"AdaDelta accDelta\");\n\n for (let i = 0; i < size; i++) {\n const gi0 = safeArrayAccess(gradData, gOff + i, \"AdaDelta gradient\");\n const pi = safeArrayAccess(pData, pOff + i, \"AdaDelta parameter\");\n assertFinite(\"gradient\", gi0);\n assertFinite(\"parameter\", pi);\n\n // Apply weight decay\n const gi = weightDecay !== 0 ? gi0 + weightDecay * pi : gi0;\n\n // Update square average: E[g²](t) = ρ * E[g²](t-1) + (1 - ρ) * g(t)²\n const sq = safeArrayAccess(state.squareAvg, i, \"AdaDelta squareAvg\");\n const sqNew = rho * sq + (1 - rho) * gi * gi;\n state.squareAvg[i] = sqNew;\n\n // Compute RMS[g](t) = √(E[g²](t) + ε)\n const std = Math.sqrt(sqNew + eps);\n\n // Compute RMS[Δθ](t-1) = √(E[Δθ²](t-1) + ε)\n const accD = safeArrayAccess(state.accDelta, i, \"AdaDelta accDelta\");\n const rmsUpdate = Math.sqrt(accD + eps);\n\n // Compute parameter update: Δθ(t) = -RMS[Δθ](t-1) / RMS[g](t) * g(t)\n const delta = (rmsUpdate / std) * gi;\n\n // Update accumulated delta: E[Δθ²](t) = ρ * E[Δθ²](t-1) + (1 - ρ) * Δθ(t)²\n state.accDelta[i] = rho * accD + (1 - rho) * delta * delta;\n\n // Update parameter: θ(t+1) = θ(t) - lr * Δθ(t)\n pData[pOff + i] = pi - lr * delta;\n }\n }\n }\n\n return loss;\n }\n}\n","import { InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\ntype AdagradOptions = {\n lr: number;\n eps: number;\n weightDecay: number;\n lrDecay: number;\n};\n\ntype AdagradState = {\n step: number;\n sum: Float64Array;\n};\n\n/**\n * Adagrad (Adaptive Gradient Algorithm) optimizer.\n *\n * Adagrad adapts the learning rate for each parameter based on the historical\n * sum of squared gradients. Parameters with larger gradients receive smaller\n * effective learning rates, while parameters with smaller gradients receive\n * larger effective learning rates.\n *\n * @example\n * ```ts\n * import { Adagrad } from 'deepbox/optim';\n *\n * const optimizer = new Adagrad(model.parameters(), {\n * lr: 0.01,\n * eps: 1e-10\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * optimizer.zeroGrad();\n * // ...\n * optimizer.step();\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class Adagrad extends Optimizer<AdagradOptions, AdagradState> {\n private _stepCount = 0;\n\n get stepCount(): number {\n return this._stepCount;\n }\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<AdagradOptions>>,\n options: {\n readonly lr?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n readonly lrDecay?: number;\n } = {}\n ) {\n const defaults = {\n lr: options.lr ?? 0.01,\n eps: options.eps ?? 1e-10,\n weightDecay: options.weightDecay ?? 0,\n lrDecay: options.lrDecay ?? 0,\n };\n\n super(params, defaults);\n\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n assertFiniteNonNegative(\"lr_decay\", defaults.lrDecay);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n protected isState(state: Record<string, unknown>): state is AdagradState {\n return typeof state[\"step\"] === \"number\" && state[\"sum\"] instanceof Float64Array;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n for (const group of this.paramGroups) {\n const { lr, eps, weightDecay, lrDecay } = group.options;\n\n assertFiniteNonNegative(\"learning rate\", lr);\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n assertFiniteNonNegative(\"lr_decay\", lrDecay);\n\n for (const param of group.params) {\n const {\n grad: gradData,\n gradOffset: gOff,\n param: pData,\n paramOffset: pOff,\n } = assertHasGradFloat(param, \"Adagrad\");\n const size = param.tensor.size;\n\n const existing = this.state.get(param);\n const state =\n existing ??\n (() => {\n const next = {\n step: 0,\n sum: new Float64Array(size),\n };\n this.state.set(param, next);\n return next;\n })();\n\n // Validate state buffer size\n assertBufferSize(state.sum, size, \"Adagrad sum\");\n\n state.step += 1;\n\n const clr = lr / (1 + (state.step - 1) * lrDecay);\n\n for (let i = 0; i < size; i++) {\n const gi0 = safeArrayAccess(gradData, gOff + i, \"Adagrad gradient\");\n const pi = safeArrayAccess(pData, pOff + i, \"Adagrad parameter\");\n assertFinite(\"gradient\", gi0);\n assertFinite(\"parameter\", pi);\n\n const gi = weightDecay !== 0 ? gi0 + weightDecay * pi : gi0;\n\n const sumVal = safeArrayAccess(state.sum, i, \"Adagrad sum\");\n const sumNew = sumVal + gi * gi;\n state.sum[i] = sumNew;\n\n const std = Math.sqrt(sumNew) + eps;\n pData[pOff + i] = pi - clr * (gi / std);\n }\n }\n }\n\n return loss;\n }\n}\n","import { DeepboxError, InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n assertInRange,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\ntype AdamOptions = {\n lr: number;\n beta1: number;\n beta2: number;\n eps: number;\n weightDecay: number;\n amsgrad: boolean;\n};\n\ntype AdamState = {\n step: number;\n expAvg: Float64Array;\n expAvgSq: Float64Array;\n maxExpAvgSq?: Float64Array;\n};\n\n/**\n * Adam (Adaptive Moment Estimation) optimizer.\n *\n * Computes adaptive learning rates for each parameter by maintaining\n * running averages of both the gradients and their squared values.\n *\n * @example\n * ```ts\n * import { Adam } from 'deepbox/optim';\n *\n * const optimizer = new Adam(model.parameters(), {\n * lr: 0.001,\n * beta1: 0.9,\n * beta2: 0.999\n * });\n * ```\n *\n * @category Optimizers\n */\nexport class Adam extends Optimizer<AdamOptions, AdamState> {\n private _stepCount = 0;\n\n get stepCount(): number {\n return this._stepCount;\n }\n\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<AdamOptions>>,\n options: {\n readonly lr?: number;\n readonly beta1?: number;\n readonly beta2?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n readonly amsgrad?: boolean;\n } = {}\n ) {\n const defaults = {\n lr: options.lr ?? 0.001,\n beta1: options.beta1 ?? 0.9,\n beta2: options.beta2 ?? 0.999,\n eps: options.eps ?? 1e-8,\n weightDecay: options.weightDecay ?? 0,\n amsgrad: options.amsgrad ?? false,\n };\n\n super(params, defaults);\n\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertInRange(\"beta1\", defaults.beta1, 0, 1);\n assertInRange(\"beta2\", defaults.beta2, 0, 1);\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n protected isState(state: Record<string, unknown>): state is AdamState {\n const hasRequired =\n typeof state[\"step\"] === \"number\" &&\n state[\"expAvg\"] instanceof Float64Array &&\n state[\"expAvgSq\"] instanceof Float64Array;\n if (!hasRequired) return false;\n if (state[\"maxExpAvgSq\"] !== undefined && !(state[\"maxExpAvgSq\"] instanceof Float64Array)) {\n return false;\n }\n return true;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n if (closure) {\n loss = closure();\n }\n\n this._stepCount++;\n\n for (const group of this.paramGroups) {\n const { lr, beta1, beta2, eps, weightDecay, amsgrad } = group.options;\n\n assertFiniteNonNegative(\"learning rate\", lr);\n assertInRange(\"beta1\", beta1, 0, 1);\n assertInRange(\"beta2\", beta2, 0, 1);\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n\n for (const param of group.params) {\n const {\n grad: gradData,\n gradOffset,\n param: paramData,\n paramOffset,\n } = assertHasGradFloat(param, \"Adam\");\n const size = param.tensor.size;\n\n const existing = this.state.get(param);\n const state =\n existing ??\n (() => {\n const next = {\n step: 0,\n expAvg: new Float64Array(size),\n expAvgSq: new Float64Array(size),\n ...(amsgrad ? { maxExpAvgSq: new Float64Array(size) } : {}),\n };\n this.state.set(param, next);\n return next;\n })();\n\n // Validate state buffer sizes\n assertBufferSize(state.expAvg, size, \"Adam expAvg\");\n assertBufferSize(state.expAvgSq, size, \"Adam expAvgSq\");\n if (amsgrad && state.maxExpAvgSq) {\n assertBufferSize(state.maxExpAvgSq, size, \"Adam maxExpAvgSq\");\n }\n\n state.step += 1;\n\n // Bias correction\n const biasCorrection1 = 1 - beta1 ** state.step;\n const biasCorrection2 = 1 - beta2 ** state.step;\n\n const stepSize = lr / biasCorrection1;\n\n for (let i = 0; i < size; i++) {\n const gi0 = safeArrayAccess(gradData, gradOffset + i, \"Adam gradient\");\n const pi = safeArrayAccess(paramData, paramOffset + i, \"Adam parameter\");\n assertFinite(\"gradient\", gi0);\n assertFinite(\"parameter\", pi);\n\n // Optional L2 weight decay (classic Adam style)\n const gi = weightDecay !== 0 ? gi0 + weightDecay * pi : gi0;\n\n const m = safeArrayAccess(state.expAvg, i, \"Adam expAvg\");\n const v = safeArrayAccess(state.expAvgSq, i, \"Adam expAvgSq\");\n\n const mNew = beta1 * m + (1 - beta1) * gi;\n const vNew = beta2 * v + (1 - beta2) * gi * gi;\n\n state.expAvg[i] = mNew;\n state.expAvgSq[i] = vNew;\n\n let denomSq = vNew;\n if (amsgrad) {\n const maxBuf = state.maxExpAvgSq;\n if (!maxBuf) {\n throw new DeepboxError(\"Internal error: AMSGrad enabled but maxExpAvgSq is missing\");\n }\n const maxV = Math.max(safeArrayAccess(maxBuf, i, \"Adam maxExpAvgSq\"), vNew);\n maxBuf[i] = maxV;\n denomSq = maxV;\n }\n\n const denom = Math.sqrt(denomSq / biasCorrection2) + eps;\n paramData[paramOffset + i] = pi - stepSize * (mNew / denom);\n }\n }\n }\n\n return loss;\n }\n}\n","import { DeepboxError, InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n assertInRange,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\n/**\n * Options for the AdamW optimizer.\n *\n * @property lr - Learning rate (step size)\n * @property beta1 - Exponential decay rate for first moment estimates\n * @property beta2 - Exponential decay rate for second moment estimates\n * @property eps - Small constant for numerical stability\n * @property weightDecay - Weight decay coefficient (L2 penalty)\n * @property amsgrad - Whether to use the AMSGrad variant\n */\ntype AdamWOptions = {\n lr: number;\n beta1: number;\n beta2: number;\n eps: number;\n weightDecay: number;\n amsgrad: boolean;\n};\n\n/**\n * State maintained per parameter by AdamW.\n *\n * @property step - Number of optimization steps taken\n * @property expAvg - Exponentially weighted average of gradients (first moment)\n * @property expAvgSq - Exponentially weighted average of squared gradients (second moment)\n * @property maxExpAvgSq - Maximum of exponentially weighted average of squared gradients (AMSGrad only)\n */\ntype AdamWState = {\n step: number;\n expAvg: Float64Array;\n expAvgSq: Float64Array;\n maxExpAvgSq?: Float64Array;\n};\n\n/**\n * AdamW (Adam with decoupled Weight decay) optimizer.\n *\n * AdamW fixes the weight decay implementation in Adam by decoupling it from the\n * gradient-based update. This leads to better generalization and is the recommended\n * variant for most applications.\n *\n * @example\n * ```ts\n * import { AdamW } from 'deepbox/optim';\n *\n * const optimizer = new AdamW(model.parameters(), {\n * lr: 0.001,\n * weightDecay: 0.01, // Typical value for AdamW\n * beta1: 0.9,\n * beta2: 0.999\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * optimizer.zeroGrad();\n * // ...\n * optimizer.step();\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class AdamW extends Optimizer<AdamWOptions, AdamWState> {\n /** Internal counter tracking total number of optimization steps */\n private _stepCount = 0;\n\n /**\n * Get the total number of optimization steps performed.\n *\n * @returns Number of steps taken\n */\n get stepCount(): number {\n return this._stepCount;\n }\n\n /**\n * Create a new AdamW optimizer.\n *\n * @param params - Iterable of parameters or parameter groups to optimize\n * @param options - Optimization options\n * @param options.lr - Learning rate (default: 0.001)\n * @param options.beta1 - First moment decay rate (default: 0.9)\n * @param options.beta2 - Second moment decay rate (default: 0.999)\n * @param options.eps - Numerical stability constant (default: 1e-8)\n * @param options.weightDecay - Weight decay coefficient (default: 0.01)\n * @param options.amsgrad - Enable AMSGrad variant (default: false)\n * @throws {InvalidParameterError} If a parameter is invalid\n */\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<AdamWOptions>>,\n options: {\n readonly lr?: number;\n readonly beta1?: number;\n readonly beta2?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n readonly amsgrad?: boolean;\n } = {}\n ) {\n // Set default values for all options\n const defaults = {\n lr: options.lr ?? 0.001,\n beta1: options.beta1 ?? 0.9,\n beta2: options.beta2 ?? 0.999,\n eps: options.eps ?? 1e-8,\n weightDecay: options.weightDecay ?? 0.01, // Higher default than Adam\n amsgrad: options.amsgrad ?? false,\n };\n\n super(params, defaults);\n\n // Validate all hyperparameters\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertInRange(\"beta1\", defaults.beta1, 0, 1);\n assertInRange(\"beta2\", defaults.beta2, 0, 1);\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n /**\n * Perform a single optimization step (parameter update).\n *\n * Implements the AdamW update rule with decoupled weight decay.\n *\n * @param closure - Optional closure that reevaluates the model and returns the loss\n * @returns Loss value if closure is provided, undefined otherwise\n */\n protected isState(state: Record<string, unknown>): state is AdamWState {\n const hasRequired =\n typeof state[\"step\"] === \"number\" &&\n state[\"expAvg\"] instanceof Float64Array &&\n state[\"expAvgSq\"] instanceof Float64Array;\n if (!hasRequired) return false;\n if (state[\"maxExpAvgSq\"] !== undefined && !(state[\"maxExpAvgSq\"] instanceof Float64Array)) {\n return false;\n }\n return true;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n // Evaluate closure if provided (for algorithms like LBFGS)\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n // Update each parameter group\n for (const group of this.paramGroups) {\n const { lr, beta1, beta2, eps, weightDecay, amsgrad } = group.options;\n\n // Re-validate hyperparameters (they might have been changed)\n assertFiniteNonNegative(\"learning rate\", lr);\n assertInRange(\"beta1\", beta1, 0, 1);\n assertInRange(\"beta2\", beta2, 0, 1);\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n\n // Update each parameter in the group\n for (const param of group.params) {\n // Get gradient and validate\n const {\n grad,\n gradOffset,\n param: pData,\n paramOffset: pOff,\n } = assertHasGradFloat(param, \"AdamW\");\n const size = param.tensor.size;\n\n // Get or initialize optimizer state for this parameter\n const existing = this.state.get(param);\n const state =\n existing ??\n (() => {\n // Initialize state on first use\n const next = {\n step: 0,\n expAvg: new Float64Array(size), // First moment\n expAvgSq: new Float64Array(size), // Second moment\n ...(amsgrad ? { maxExpAvgSq: new Float64Array(size) } : {}), // AMSGrad buffer\n };\n this.state.set(param, next);\n return next;\n })();\n\n // Validate state buffer sizes\n assertBufferSize(state.expAvg, size, \"AdamW expAvg\");\n assertBufferSize(state.expAvgSq, size, \"AdamW expAvgSq\");\n if (amsgrad && state.maxExpAvgSq) {\n assertBufferSize(state.maxExpAvgSq, size, \"AdamW maxExpAvgSq\");\n }\n\n // Increment per-parameter step counter\n state.step += 1;\n\n // Compute bias correction terms\n const biasCorrection1 = 1 - beta1 ** state.step;\n const biasCorrection2 = 1 - beta2 ** state.step;\n\n // Compute step size with bias correction\n const stepSize = lr / biasCorrection1;\n\n // Update each element of the parameter\n for (let i = 0; i < size; i++) {\n // Get current gradient and parameter values\n const gi = safeArrayAccess(grad, gradOffset + i, \"AdamW gradient\");\n const pi = safeArrayAccess(pData, pOff + i, \"AdamW parameter\");\n\n // Validate values are finite\n assertFinite(\"gradient\", gi);\n assertFinite(\"parameter\", pi);\n\n // Get current moment estimates\n const m = safeArrayAccess(state.expAvg, i, \"AdamW expAvg\");\n const v = safeArrayAccess(state.expAvgSq, i, \"AdamW expAvgSq\");\n\n // Update biased first moment estimate: m(t) = β1 * m(t-1) + (1 - β1) * g(t)\n const mNew = beta1 * m + (1 - beta1) * gi;\n\n // Update biased second raw moment estimate: v(t) = β2 * v(t-1) + (1 - β2) * g(t)^2\n const vNew = beta2 * v + (1 - beta2) * gi * gi;\n\n // Store updated moments\n state.expAvg[i] = mNew;\n state.expAvgSq[i] = vNew;\n\n // Determine which second moment to use (AMSGrad or standard)\n let denomSq = vNew;\n if (amsgrad) {\n const maxBuf = state.maxExpAvgSq;\n if (!maxBuf) {\n throw new DeepboxError(\"Internal error: AMSGrad enabled but maxExpAvgSq is missing\");\n }\n // AMSGrad: use maximum of all past second moments\n const maxV = Math.max(safeArrayAccess(maxBuf, i, \"AdamW maxExpAvgSq\"), vNew);\n maxBuf[i] = maxV;\n denomSq = maxV;\n }\n\n // Compute denominator with bias correction: √(v̂(t)) + ε\n const denom = Math.sqrt(denomSq / biasCorrection2) + eps;\n\n // AdamW update: θ(t+1) = θ(t) - lr * (m̂(t) / denom + λ * θ(t))\n // Note: weight decay is applied directly to parameters (decoupled)\n pData[pOff + i] = pi - stepSize * (mNew / denom) - lr * weightDecay * pi;\n }\n }\n }\n\n return loss;\n }\n}\n","import { InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n assertInRange,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\ntype NadamOptions = {\n lr: number;\n readonly beta1: number;\n readonly beta2: number;\n readonly eps: number;\n readonly weightDecay: number;\n readonly momentumDecay: number;\n};\n\ntype NadamState = {\n step: number;\n expAvg: Float64Array;\n expAvgSq: Float64Array;\n muProduct: number;\n};\n\n/**\n * Nadam (Nesterov-accelerated Adam) optimizer.\n *\n * Implements Nadam algorithm - combines Adam's adaptive learning rates with\n * Nesterov momentum for potentially faster convergence. Nadam applies Nesterov\n * acceleration to the momentum term, providing a \"look-ahead\" gradient.\n *\n * @example\n * ```ts\n * import { Nadam } from 'deepbox/optim';\n *\n * const optimizer = new Nadam(model.parameters(), {\n * lr: 0.002,\n * beta1: 0.9,\n * beta2: 0.999\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * optimizer.zeroGrad();\n * // ...\n * optimizer.step();\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class Nadam extends Optimizer<NadamOptions, NadamState> {\n private _stepCount = 0;\n\n get stepCount(): number {\n return this._stepCount;\n }\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<NadamOptions>>,\n options: {\n readonly lr?: number;\n readonly beta1?: number;\n readonly beta2?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n readonly momentumDecay?: number;\n } = {}\n ) {\n const defaults = {\n lr: options.lr ?? 0.002,\n beta1: options.beta1 ?? 0.9,\n beta2: options.beta2 ?? 0.999,\n eps: options.eps ?? 1e-8,\n weightDecay: options.weightDecay ?? 0,\n momentumDecay: options.momentumDecay ?? 0.004,\n };\n\n super(params, defaults);\n\n // Validate hyperparameters\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertInRange(\"beta1\", defaults.beta1, 0, 1);\n assertInRange(\"beta2\", defaults.beta2, 0, 1);\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n assertFiniteNonNegative(\"momentum_decay\", defaults.momentumDecay);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n protected isState(state: Record<string, unknown>): state is NadamState {\n return (\n typeof state[\"step\"] === \"number\" &&\n state[\"expAvg\"] instanceof Float64Array &&\n state[\"expAvgSq\"] instanceof Float64Array &&\n typeof state[\"muProduct\"] === \"number\"\n );\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n for (const group of this.paramGroups) {\n const { lr, beta1, beta2, eps, weightDecay, momentumDecay } = group.options;\n\n // Re-validate hyperparameters\n assertFiniteNonNegative(\"learning rate\", lr);\n assertInRange(\"beta1\", beta1, 0, 1);\n assertInRange(\"beta2\", beta2, 0, 1);\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n assertFiniteNonNegative(\"momentum_decay\", momentumDecay);\n\n for (const param of group.params) {\n const {\n grad: gradData,\n gradOffset: gOff,\n param: pData,\n paramOffset: pOff,\n } = assertHasGradFloat(param, \"Nadam\");\n const size = param.tensor.size;\n\n // Initialize state if needed\n let state = this.state.get(param);\n if (!state) {\n state = {\n step: 0,\n expAvg: new Float64Array(size),\n expAvgSq: new Float64Array(size),\n muProduct: 1,\n };\n this.state.set(param, state);\n }\n\n // Validate state buffer sizes\n assertBufferSize(state.expAvg, size, \"Nadam expAvg\");\n assertBufferSize(state.expAvgSq, size, \"Nadam expAvgSq\");\n\n state.step++;\n const t = state.step;\n\n const biasCorrection2 = 1 - beta2 ** t;\n const mu = beta1 * (1 - 0.5 * 0.96 ** (t * momentumDecay));\n const muNext = beta1 * (1 - 0.5 * 0.96 ** ((t + 1) * momentumDecay));\n const muProduct = state.muProduct * mu;\n const muProductNext = muProduct * muNext;\n state.muProduct = muProduct;\n\n for (let i = 0; i < size; i++) {\n const gi0 = safeArrayAccess(gradData, gOff + i, \"Nadam gradient\");\n const pi = safeArrayAccess(pData, pOff + i, \"Nadam parameter\");\n assertFinite(\"gradient\", gi0);\n assertFinite(\"parameter\", pi);\n\n // Apply weight decay\n const gi = weightDecay !== 0 ? gi0 + weightDecay * pi : gi0;\n\n // Update biased first moment estimate: m(t) = β1 * m(t-1) + (1 - β1) * g(t)\n const m = safeArrayAccess(state.expAvg, i, \"Nadam expAvg\");\n const mNew = beta1 * m + (1 - beta1) * gi;\n state.expAvg[i] = mNew;\n\n // Update biased second moment estimate: v(t) = β2 * v(t-1) + (1 - β2) * g(t)²\n const v = safeArrayAccess(state.expAvgSq, i, \"Nadam expAvgSq\");\n const vNew = beta2 * v + (1 - beta2) * gi * gi;\n state.expAvgSq[i] = vNew;\n\n const denom = Math.sqrt(vNew / biasCorrection2) + eps;\n const mHatNext = mNew / (1 - muProductNext);\n const gHat = gi / (1 - muProduct);\n const mNesterov = muNext * mHatNext + (1 - mu) * gHat;\n pData[pOff + i] = pi - (lr * mNesterov) / denom;\n }\n }\n }\n\n return loss;\n }\n}\n","import { InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertBufferSize,\n assertFinite,\n assertFiniteNonNegative,\n assertFinitePositive,\n assertHasGradFloat,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\n/**\n * Options for the RMSprop optimizer.\n *\n * @property lr - Learning rate (step size)\n * @property alpha - Smoothing constant for moving average of squared gradients\n * @property eps - Small constant for numerical stability\n * @property weightDecay - Weight decay coefficient (L2 penalty)\n * @property momentum - Momentum factor\n * @property centered - Whether to use centered RMSprop variant\n */\ntype RMSpropOptions = {\n lr: number;\n alpha: number;\n eps: number;\n weightDecay: number;\n momentum: number;\n centered: boolean;\n};\n\n/**\n * State maintained per parameter by RMSprop.\n *\n * @property squareAvg - Exponentially weighted average of squared gradients\n * @property momentumBuffer - Momentum buffer (if momentum > 0)\n * @property gradAvg - Exponentially weighted average of gradients (centered variant only)\n */\ntype RMSpropState = {\n squareAvg: Float64Array;\n momentumBuffer?: Float64Array;\n gradAvg?: Float64Array;\n};\n\n/**\n * RMSprop (Root Mean Square Propagation) optimizer.\n *\n * RMSprop adapts the learning rate for each parameter by dividing by a running\n * average of recent gradient magnitudes. This helps with non-stationary objectives\n * and is particularly effective for RNNs.\n *\n * @example\n * ```ts\n * import { RMSprop } from 'deepbox/optim';\n *\n * const optimizer = new RMSprop(model.parameters(), {\n * lr: 0.01,\n * alpha: 0.99,\n * momentum: 0.9,\n * centered: true\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * optimizer.zeroGrad();\n * // ...\n * optimizer.step();\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class RMSprop extends Optimizer<RMSpropOptions, RMSpropState> {\n /** Internal counter tracking total number of optimization steps */\n private _stepCount = 0;\n\n /**\n * Get the total number of optimization steps performed.\n *\n * @returns Number of steps taken\n */\n get stepCount(): number {\n return this._stepCount;\n }\n /**\n * Create a new RMSprop optimizer.\n *\n * @param params - Iterable of parameters or parameter groups to optimize\n * @param options - Optimization options\n * @param options.lr - Learning rate (default: 0.01)\n * @param options.alpha - Smoothing constant (default: 0.99)\n * @param options.eps - Numerical stability constant (default: 1e-8)\n * @param options.weightDecay - Weight decay coefficient (default: 0)\n * @param options.momentum - Momentum factor (default: 0)\n * @param options.centered - Use centered variant (default: false)\n * @throws {InvalidParameterError} If a parameter is invalid\n */\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<RMSpropOptions>>,\n options: {\n readonly lr?: number;\n readonly alpha?: number;\n readonly eps?: number;\n readonly weightDecay?: number;\n readonly momentum?: number;\n readonly centered?: boolean;\n } = {}\n ) {\n // Set default values for all options\n const defaults = {\n lr: options.lr ?? 0.01,\n alpha: options.alpha ?? 0.99,\n eps: options.eps ?? 1e-8,\n weightDecay: options.weightDecay ?? 0,\n momentum: options.momentum ?? 0,\n centered: options.centered ?? false,\n };\n\n super(params, defaults);\n\n // Validate all hyperparameters\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n if (!Number.isFinite(defaults.alpha) || defaults.alpha < 0 || defaults.alpha > 1) {\n throw new InvalidParameterError(\n `Invalid alpha: ${defaults.alpha} (must be in range [0, 1])`,\n \"alpha\",\n defaults.alpha\n );\n }\n assertFinitePositive(\"epsilon\", defaults.eps);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n assertFiniteNonNegative(\"momentum value\", defaults.momentum);\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n\n protected isState(state: Record<string, unknown>): state is RMSpropState {\n if (!(state[\"squareAvg\"] instanceof Float64Array)) return false;\n if (\n state[\"momentumBuffer\"] !== undefined &&\n !(state[\"momentumBuffer\"] instanceof Float64Array)\n ) {\n return false;\n }\n if (state[\"gradAvg\"] !== undefined && !(state[\"gradAvg\"] instanceof Float64Array)) {\n return false;\n }\n return true;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n // Evaluate closure if provided\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n // Update each parameter group\n for (const group of this.paramGroups) {\n const { lr, alpha, eps, weightDecay, momentum, centered } = group.options;\n\n // Re-validate hyperparameters\n assertFiniteNonNegative(\"learning rate\", lr);\n if (!Number.isFinite(alpha) || alpha < 0 || alpha > 1) {\n throw new InvalidParameterError(\n `Invalid alpha: ${alpha} (must be in range [0, 1])`,\n \"alpha\",\n alpha\n );\n }\n assertFinitePositive(\"epsilon\", eps);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n assertFiniteNonNegative(\"momentum value\", momentum);\n\n // Update each parameter in the group\n for (const param of group.params) {\n // Get gradient and validate\n const {\n grad: gradData,\n gradOffset: gOff,\n param: pData,\n paramOffset: pOff,\n } = assertHasGradFloat(param, \"RMSprop\");\n const size = param.tensor.size;\n\n // Get or initialize optimizer state for this parameter\n let state = this.state.get(param);\n if (!state) {\n state = {\n squareAvg: new Float64Array(size),\n };\n this.state.set(param, state);\n }\n\n // Initialize momentum buffer if needed\n if (momentum > 0 && !state.momentumBuffer) {\n state.momentumBuffer = new Float64Array(size);\n }\n\n // Initialize gradient average buffer if centered variant is used\n if (centered && !state.gradAvg) {\n state.gradAvg = new Float64Array(size);\n }\n\n // Validate state buffer sizes\n assertBufferSize(state.squareAvg, size, \"RMSprop squareAvg\");\n if (momentum > 0 && state.momentumBuffer) {\n assertBufferSize(state.momentumBuffer, size, \"RMSprop momentumBuffer\");\n }\n if (centered && state.gradAvg) {\n assertBufferSize(state.gradAvg, size, \"RMSprop gradAvg\");\n }\n\n // Update each element of the parameter\n for (let i = 0; i < size; i++) {\n // Get current gradient and parameter values\n const gi = safeArrayAccess(gradData, gOff + i, \"RMSprop gradient\");\n const pi = safeArrayAccess(pData, pOff + i, \"RMSprop parameter\");\n\n // Validate values are finite\n assertFinite(\"gradient\", gi);\n assertFinite(\"parameter\", pi);\n\n // Apply weight decay to gradient if specified\n let grad = gi;\n if (weightDecay !== 0) {\n grad = grad + weightDecay * pi;\n }\n\n // Update exponentially weighted average of squared gradients\n // v(t) = α * v(t-1) + (1 - α) * g(t)^2\n const sqAvg = safeArrayAccess(state.squareAvg, i, \"RMSprop squareAvg\");\n const sqAvgNew = alpha * sqAvg + (1 - alpha) * grad * grad;\n state.squareAvg[i] = sqAvgNew;\n\n // Compute adaptive learning rate denominator\n let avg = sqAvgNew;\n\n // Centered variant: subtract squared mean of gradients\n if (centered) {\n const gAvg = state.gradAvg ? safeArrayAccess(state.gradAvg, i, \"RMSprop gradAvg\") : 0;\n // Update exponentially weighted average of gradients\n const gAvgNew = alpha * gAvg + (1 - alpha) * grad;\n if (state.gradAvg) state.gradAvg[i] = gAvgNew;\n // Use variance instead of second moment\n avg = sqAvgNew - gAvgNew * gAvgNew;\n }\n\n const denom = centered ? Math.sqrt(Math.max(avg, 0) + eps) : Math.sqrt(avg) + eps;\n const normalizedGrad = grad / denom;\n\n // Apply momentum if specified\n if (momentum > 0) {\n const buf = state.momentumBuffer\n ? safeArrayAccess(state.momentumBuffer, i, \"RMSprop momentumBuffer\")\n : 0;\n const bufNew = momentum * buf + normalizedGrad;\n if (state.momentumBuffer) state.momentumBuffer[i] = bufNew;\n // Update parameter with momentum\n pData[pOff + i] = pi - lr * bufNew;\n } else {\n // Update parameter without momentum\n pData[pOff + i] = pi - lr * normalizedGrad;\n }\n }\n }\n }\n\n return loss;\n }\n}\n","import { InvalidParameterError } from \"../../core\";\nimport type { GradTensor } from \"../../ndarray\";\nimport {\n assertFinite,\n assertFiniteNonNegative,\n assertHasGradFloat,\n safeArrayAccess,\n} from \"../_internal\";\nimport { Optimizer, type ParamGroup } from \"../Optimizer\";\n\ntype SGDOptions = {\n lr: number;\n momentum: number;\n dampening: number;\n weightDecay: number;\n nesterov: boolean;\n};\n\ntype SGDState = {\n momentumBuffer?: Float64Array;\n};\n\n/**\n * Stochastic Gradient Descent (SGD) optimizer.\n *\n * Implements vanilla SGD with optional momentum, weight decay, and Nesterov acceleration.\n *\n * @example\n * ```ts\n * import { SGD } from 'deepbox/optim';\n * import { Module } from 'deepbox/nn';\n *\n * const model: Module = ...;\n * const optimizer = new SGD(model.parameters(), {\n * lr: 0.01,\n * momentum: 0.9,\n * weightDecay: 5e-4,\n * nesterov: true\n * });\n *\n * // Training loop\n * for (let epoch = 0; epoch < numEpochs; epoch++) {\n * for (const [inputs, targets] of dataLoader) {\n * optimizer.zeroGrad();\n * const outputs = model.forward(inputs);\n * const loss = criterion(outputs, targets);\n * loss.backward();\n * optimizer.step();\n * }\n * }\n * ```\n *\n * @category Optimizers\n */\nexport class SGD extends Optimizer<SGDOptions, SGDState> {\n /** Internal counter tracking total number of optimization steps */\n private _stepCount = 0;\n\n get stepCount(): number {\n return this._stepCount;\n }\n /**\n * Create a new SGD optimizer.\n *\n * @param params - Iterable of parameters or parameter groups to optimize\n * @param options - Optimization options\n * @param options.lr - Learning rate (default: 0.01)\n * @param options.momentum - Momentum factor (default: 0)\n * @param options.dampening - Dampening for momentum (default: 0)\n * @param options.weightDecay - Weight decay (L2 penalty) (default: 0)\n * @param options.nesterov - Enable Nesterov momentum (default: false)\n */\n constructor(\n params: Iterable<GradTensor> | ReadonlyArray<ParamGroup<SGDOptions>>,\n options: {\n readonly lr?: number;\n readonly momentum?: number;\n readonly dampening?: number;\n readonly weightDecay?: number;\n readonly nesterov?: boolean;\n } = {}\n ) {\n const defaults = {\n lr: options.lr ?? 0.01,\n momentum: options.momentum ?? 0,\n dampening: options.dampening ?? 0,\n weightDecay: options.weightDecay ?? 0,\n nesterov: options.nesterov ?? false,\n };\n\n super(params, defaults);\n\n // Validate options\n assertFiniteNonNegative(\"learning rate\", defaults.lr);\n assertFiniteNonNegative(\"momentum value\", defaults.momentum);\n assertFiniteNonNegative(\"dampening\", defaults.dampening);\n assertFiniteNonNegative(\"weight_decay value\", defaults.weightDecay);\n if (defaults.nesterov && (defaults.momentum <= 0 || defaults.dampening !== 0)) {\n throw new InvalidParameterError(\n \"Nesterov momentum requires a momentum and zero dampening\",\n \"nesterov\",\n {\n momentum: defaults.momentum,\n dampening: defaults.dampening,\n nesterov: defaults.nesterov,\n }\n );\n }\n }\n\n /**\n * Perform a single optimization step.\n *\n * Implements the SGD update rule with optional momentum and weight decay.\n *\n * @param closure - Optional closure that reevaluates the model and returns the loss\n * @returns Loss value if closure is provided\n */\n protected isState(state: Record<string, unknown>): state is SGDState {\n if (\n state[\"momentumBuffer\"] !== undefined &&\n !(state[\"momentumBuffer\"] instanceof Float64Array)\n ) {\n return false;\n }\n return true;\n }\n\n step(closure?: () => number): number | undefined {\n let loss: number | undefined;\n\n // Evaluate loss if closure provided\n if (closure) {\n loss = closure();\n }\n\n // Increment global step counter\n this._stepCount++;\n\n // Update each parameter group\n for (const group of this.paramGroups) {\n const { lr, momentum, dampening, weightDecay, nesterov } = group.options;\n\n assertFiniteNonNegative(\"learning rate\", lr);\n assertFiniteNonNegative(\"momentum value\", momentum);\n assertFiniteNonNegative(\"dampening\", dampening);\n assertFiniteNonNegative(\"weight_decay value\", weightDecay);\n\n if (nesterov && (momentum <= 0 || dampening !== 0)) {\n throw new InvalidParameterError(\n \"Nesterov momentum requires a momentum and zero dampening\",\n \"nesterov\",\n { momentum, dampening, nesterov }\n );\n }\n\n for (const param of group.params) {\n const {\n grad: gradData,\n gradOffset,\n param: paramData,\n paramOffset,\n } = assertHasGradFloat(param, \"SGD\");\n const size = param.tensor.size;\n\n let state = this.state.get(param);\n if (!state) {\n state = {};\n this.state.set(param, state);\n }\n\n // Momentum buffer is stored densely (one value per element).\n let momentumBuffer: Float64Array | undefined;\n if (momentum !== 0) {\n if (!state.momentumBuffer) {\n state.momentumBuffer = new Float64Array(size);\n }\n momentumBuffer = state.momentumBuffer;\n }\n\n for (let i = 0; i < size; i++) {\n const gi = safeArrayAccess(gradData, gradOffset + i, \"SGD gradient\");\n const pi = safeArrayAccess(paramData, paramOffset + i, \"SGD parameter\");\n assertFinite(\"gradient\", gi);\n assertFinite(\"parameter\", pi);\n\n // d_p = grad + weightDecay * param\n let d = gi;\n if (weightDecay !== 0) {\n d = d + weightDecay * pi;\n }\n\n if (momentumBuffer) {\n const bPrev = safeArrayAccess(momentumBuffer, i, \"SGD momentum buffer\");\n const bNew = momentum * bPrev + (1 - dampening) * d;\n momentumBuffer[i] = bNew;\n d = nesterov ? d + momentum * bNew : bNew;\n }\n\n // param -= lr * d\n paramData[paramOffset + i] = pi - lr * d;\n }\n }\n }\n\n return loss;\n }\n\n /**\n * Get the current learning rate.\n *\n * @param groupIdx - Parameter group index (default: 0)\n * @returns Current learning rate\n */\n getLearningRate(groupIdx = 0): number {\n const group = this.paramGroups[groupIdx];\n if (!group) {\n throw new InvalidParameterError(\n `Invalid group index: ${groupIdx} (valid range: [0, ${this.paramGroups.length}))`,\n \"groupIdx\",\n groupIdx\n );\n }\n return group.options.lr;\n }\n\n /**\n * Set the learning rate for all parameter groups.\n *\n * @param lr - New learning rate\n */\n setLearningRate(lr: number): void {\n assertFiniteNonNegative(\"learning rate\", lr);\n for (const group of this.paramGroups) {\n group.options.lr = lr;\n }\n }\n}\n","// Learning rate schedulers for optimizers\nimport { InvalidParameterError } from \"../core\";\n\n/**\n * Interface for optimizer-like objects that schedulers can work with.\n * This allows schedulers to work with different optimizer implementations.\n * Parameter groups may expose `lr` directly or via `options.lr`.\n */\ninterface SchedulerOptimizer {\n paramGroups: SchedulerParamGroup[];\n}\n\ntype SchedulerParamGroup = {\n params: unknown[];\n lr?: number;\n options?: Record<string, unknown>;\n};\n\nfunction isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === \"object\" && value !== null;\n}\n\nfunction resolveGroupLr(group: SchedulerParamGroup, index: number) {\n const options = isRecord(group.options) ? group.options : undefined;\n const lrValue = group.lr ?? options?.[\"lr\"];\n if (typeof lrValue !== \"number\" || !Number.isFinite(lrValue) || lrValue < 0) {\n throw new InvalidParameterError(\n `optimizer.paramGroups[${index}].lr must be finite and >= 0`,\n `optimizer.paramGroups[${index}].lr`,\n lrValue\n );\n }\n return lrValue;\n}\n\nfunction setGroupLr(group: SchedulerParamGroup, lr: number) {\n if (isRecord(group.options)) {\n group.options[\"lr\"] = lr;\n }\n if (\"lr\" in group) {\n group.lr = lr;\n }\n if (!(\"lr\" in group) && !isRecord(group.options)) {\n group.lr = lr;\n }\n}\n\nfunction validateLastEpoch(value: number) {\n if (!Number.isInteger(value) || value < -1) {\n throw new InvalidParameterError(\"lastEpoch must be an integer >= -1\", \"lastEpoch\", value);\n }\n return value;\n}\n\nfunction validateFiniteNumber(value: number, name: string) {\n if (!Number.isFinite(value)) {\n throw new InvalidParameterError(`${name} must be finite`, name, value);\n }\n return value;\n}\n\nfunction validatePositiveNumber(value: number, name: string) {\n if (!Number.isFinite(value) || value <= 0) {\n throw new InvalidParameterError(`${name} must be > 0`, name, value);\n }\n return value;\n}\n\nfunction validatePositiveInteger(value: number, name: string) {\n if (!Number.isInteger(value) || value <= 0) {\n throw new InvalidParameterError(`${name} must be a positive integer`, name, value);\n }\n return value;\n}\n\nfunction validateNonNegativeNumber(value: number, name: string) {\n if (!Number.isFinite(value) || value < 0) {\n throw new InvalidParameterError(`${name} must be >= 0`, name, value);\n }\n return value;\n}\n\nfunction validateNonNegativeInteger(value: number, name: string) {\n if (!Number.isInteger(value) || value < 0) {\n throw new InvalidParameterError(`${name} must be a non-negative integer`, name, value);\n }\n return value;\n}\n\nfunction validateOptimizer(optimizer: SchedulerOptimizer) {\n if (!optimizer || typeof optimizer !== \"object\" || !Array.isArray(optimizer.paramGroups)) {\n throw new InvalidParameterError(\n \"optimizer must expose paramGroups array\",\n \"optimizer\",\n optimizer\n );\n }\n if (optimizer.paramGroups.length === 0) {\n throw new InvalidParameterError(\n \"optimizer.paramGroups must contain at least one group\",\n \"optimizer.paramGroups\",\n optimizer.paramGroups\n );\n }\n for (let i = 0; i < optimizer.paramGroups.length; i++) {\n const group = optimizer.paramGroups[i];\n if (!group || typeof group !== \"object\") {\n throw new InvalidParameterError(\n `optimizer.paramGroups[${i}] must be an object`,\n \"optimizer.paramGroups\",\n group\n );\n }\n if (!Array.isArray(group.params)) {\n throw new InvalidParameterError(\n `optimizer.paramGroups[${i}].params must be an array`,\n `optimizer.paramGroups[${i}].params`,\n group.params\n );\n }\n resolveGroupLr(group, i);\n }\n}\n\nfunction validateMilestones(milestones: number[]) {\n if (!Array.isArray(milestones) || milestones.length === 0) {\n throw new InvalidParameterError(\n \"milestones must be a non-empty array of non-negative integers\",\n \"milestones\",\n milestones\n );\n }\n\n const sorted = [...milestones].sort((a, b) => a - b);\n for (let i = 0; i < sorted.length; i++) {\n const value = sorted[i];\n if (value === undefined || !Number.isInteger(value) || value < 0) {\n throw new InvalidParameterError(\n \"milestones must contain non-negative integers only\",\n \"milestones\",\n milestones\n );\n }\n if (i > 0) {\n const prev = sorted[i - 1];\n if (prev !== undefined && value <= prev) {\n throw new InvalidParameterError(\n \"milestones must be strictly increasing\",\n \"milestones\",\n milestones\n );\n }\n }\n }\n return sorted;\n}\n\n/**\n * Base class for learning rate schedulers.\n *\n * Learning rate schedulers adjust the learning rate during training according\n * to a predefined schedule. This can help improve convergence and prevent\n * overshooting optimal solutions.\n *\n * @example\n * ```ts\n * import { SGD, StepLR } from 'deepbox/optim';\n *\n * const optimizer = new SGD(model.parameters(), { lr: 0.1 });\n * const scheduler = new StepLR(optimizer, { stepSize: 10, gamma: 0.1 });\n *\n * for (let epoch = 0; epoch < 100; epoch++) {\n * train();\n * scheduler.step();\n * }\n * ```\n *\n * @category Optimization\n */\nexport abstract class LRScheduler {\n protected optimizer: SchedulerOptimizer;\n protected lastEpoch: number;\n protected baseLrs: number[];\n\n constructor(optimizer: SchedulerOptimizer, lastEpoch: number = -1) {\n validateOptimizer(optimizer);\n this.lastEpoch = validateLastEpoch(lastEpoch);\n this.optimizer = optimizer;\n\n // Store base learning rates from all parameter groups\n this.baseLrs = optimizer.paramGroups.map((group, index) => resolveGroupLr(group, index));\n }\n\n protected initializeFromLastEpoch(lastEpoch: number): void {\n const validated = validateLastEpoch(lastEpoch);\n if (validated < 0) {\n return;\n }\n this.lastEpoch = -1;\n for (let i = 0; i <= validated; i++) {\n this.step();\n }\n }\n\n /**\n * Compute the learning rate for the current epoch.\n * Must be implemented by subclasses.\n *\n * @returns Array of learning rates for each parameter group\n */\n abstract getLr(): number[];\n\n /**\n * Perform a scheduler step, updating learning rates.\n *\n * Should be called once per epoch after the optimizer step.\n */\n step(): void {\n this.lastEpoch++;\n const newLrs = this.getLr();\n\n for (let i = 0; i < this.optimizer.paramGroups.length; i++) {\n const group = this.optimizer.paramGroups[i];\n if (group) {\n const next = newLrs[i];\n if (next !== undefined) {\n setGroupLr(group, next);\n }\n }\n }\n }\n\n /**\n * Get the current learning rates for all parameter groups.\n */\n getLastLr(): number[] {\n return this.optimizer.paramGroups.map((group, index) => resolveGroupLr(group, index));\n }\n\n /**\n * Get current epoch number.\n */\n get epoch() {\n return this.lastEpoch;\n }\n}\n\n/**\n * Step learning rate scheduler.\n *\n * Decays the learning rate by gamma every stepSize epochs.\n * lr = baseLr * gamma^(epoch // stepSize)\n *\n * @example\n * ```ts\n * const scheduler = new StepLR(optimizer, { stepSize: 30, gamma: 0.1 });\n * // lr = 0.1 for epochs 0-29\n * // lr = 0.01 for epochs 30-59\n * // lr = 0.001 for epochs 60-89\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.StepLR.html | PyTorch StepLR}\n */\nexport class StepLR extends LRScheduler {\n private stepSize: number;\n private gamma: number;\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: { stepSize: number; gamma?: number; lastEpoch?: number }\n ) {\n const stepSize = validatePositiveInteger(options.stepSize, \"stepSize\");\n const gamma = validatePositiveNumber(options.gamma ?? 0.1, \"gamma\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.stepSize = stepSize;\n this.gamma = gamma;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n const factor = this.gamma ** Math.floor(this.lastEpoch / this.stepSize);\n return this.baseLrs.map((lr) => lr * factor);\n }\n}\n\n/**\n * Exponential learning rate scheduler.\n *\n * Decays the learning rate exponentially every epoch.\n * lr = baseLr * gamma^epoch\n *\n * @example\n * ```ts\n * const scheduler = new ExponentialLR(optimizer, { gamma: 0.95 });\n * // lr *= 0.95 each epoch\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.ExponentialLR.html | PyTorch ExponentialLR}\n */\nexport class ExponentialLR extends LRScheduler {\n private gamma: number;\n\n constructor(optimizer: SchedulerOptimizer, options: { gamma: number; lastEpoch?: number }) {\n const gamma = validatePositiveNumber(options.gamma, \"gamma\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.gamma = gamma;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n return this.baseLrs.map((lr) => lr * this.gamma ** this.lastEpoch);\n }\n}\n\n/**\n * Cosine annealing learning rate scheduler.\n *\n * Sets the learning rate using a cosine annealing schedule.\n * lr = etaMin + (baseLr - etaMin) * (1 + cos(π * epoch / T_max)) / 2\n *\n * @example\n * ```ts\n * const scheduler = new CosineAnnealingLR(optimizer, { T_max: 100, etaMin: 0.001 });\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.CosineAnnealingLR.html | PyTorch CosineAnnealingLR}\n */\nexport class CosineAnnealingLR extends LRScheduler {\n private T_max: number;\n private etaMin: number;\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: { T_max: number; etaMin?: number; lastEpoch?: number }\n ) {\n const tMax = validatePositiveInteger(options.T_max, \"T_max\");\n const etaMin = validateNonNegativeNumber(options.etaMin ?? 0, \"etaMin\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.T_max = tMax;\n this.etaMin = etaMin;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n return this.baseLrs.map((baseLr) => {\n return (\n this.etaMin +\n ((baseLr - this.etaMin) * (1 + Math.cos((Math.PI * this.lastEpoch) / this.T_max))) / 2\n );\n });\n }\n}\n\n/**\n * Multi-step learning rate scheduler.\n *\n * Decays the learning rate by gamma once the epoch reaches one of the milestones.\n *\n * @example\n * ```ts\n * const scheduler = new MultiStepLR(optimizer, { milestones: [30, 80], gamma: 0.1 });\n * // lr = 0.1 for epochs 0-29\n * // lr = 0.01 for epochs 30-79\n * // lr = 0.001 for epochs 80+\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.MultiStepLR.html | PyTorch MultiStepLR}\n */\nexport class MultiStepLR extends LRScheduler {\n private sortedMilestones: number[];\n private gamma: number;\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: { milestones: number[]; gamma?: number; lastEpoch?: number }\n ) {\n const milestones = validateMilestones(options.milestones);\n const gamma = validatePositiveNumber(options.gamma ?? 0.1, \"gamma\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.sortedMilestones = milestones;\n this.gamma = gamma;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n // Count how many milestones we've passed\n let numDecays = 0;\n for (const milestone of this.sortedMilestones) {\n if (this.lastEpoch >= milestone) {\n numDecays++;\n }\n }\n const factor = this.gamma ** numDecays;\n return this.baseLrs.map((lr) => lr * factor);\n }\n}\n\n/**\n * Linear learning rate scheduler.\n *\n * Linearly interpolates the learning rate multiplicative factor from startFactor\n * to endFactor over totalIters epochs. After totalIters, the factor remains at endFactor.\n *\n * lr = baseLr * (startFactor + (endFactor - startFactor) * epoch / totalIters)\n *\n * @example\n * ```ts\n * const scheduler = new LinearLR(optimizer, {\n * startFactor: 0.1,\n * endFactor: 0.01,\n * totalIters: 100\n * });\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.LinearLR.html | PyTorch LinearLR}\n */\nexport class LinearLR extends LRScheduler {\n private startFactor: number;\n private endFactor: number;\n private totalIters: number;\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: {\n startFactor?: number;\n endFactor?: number;\n totalIters: number;\n lastEpoch?: number;\n }\n ) {\n const startFactor = validatePositiveNumber(options.startFactor ?? 1 / 3, \"startFactor\");\n const endFactor = validatePositiveNumber(options.endFactor ?? 1.0, \"endFactor\");\n const totalIters = validatePositiveInteger(options.totalIters, \"totalIters\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.startFactor = startFactor;\n this.endFactor = endFactor;\n this.totalIters = totalIters;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n if (this.lastEpoch >= this.totalIters) {\n return this.baseLrs.map((lr) => lr * this.endFactor);\n }\n\n const factor =\n this.startFactor + (this.endFactor - this.startFactor) * (this.lastEpoch / this.totalIters);\n return this.baseLrs.map((lr) => lr * factor);\n }\n}\n\n/**\n * Reduce learning rate on plateau.\n *\n * Reduces learning rate when a metric has stopped improving.\n * This scheduler reads a metric value and if no improvement is seen\n * for 'patience' epochs, the learning rate is reduced.\n *\n * @example\n * ```ts\n * const scheduler = new ReduceLROnPlateau(optimizer, {\n * mode: 'min',\n * factor: 0.1,\n * patience: 10\n * });\n *\n * for (let epoch = 0; epoch < 100; epoch++) {\n * const valLoss = validate();\n * scheduler.step(valLoss);\n * }\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.ReduceLROnPlateau.html | PyTorch ReduceLROnPlateau}\n */\nexport class ReduceLROnPlateau {\n private optimizer: SchedulerOptimizer;\n private mode: \"min\" | \"max\";\n private factor: number;\n private patience: number;\n private threshold: number;\n private cooldown: number;\n private minLr: number;\n private best: number;\n private numBadEpochs: number;\n private cooldownCounter: number;\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: {\n mode?: \"min\" | \"max\";\n factor?: number;\n patience?: number;\n threshold?: number;\n cooldown?: number;\n minLr?: number;\n } = {}\n ) {\n this.optimizer = optimizer;\n validateOptimizer(optimizer);\n this.mode = options.mode ?? \"min\";\n if (this.mode !== \"min\" && this.mode !== \"max\") {\n throw new InvalidParameterError(\"mode must be 'min' or 'max'\", \"mode\", options.mode);\n }\n this.factor = validateFiniteNumber(options.factor ?? 0.1, \"factor\");\n if (this.factor <= 0 || this.factor >= 1) {\n throw new InvalidParameterError(\n \"factor must be in the interval (0, 1)\",\n \"factor\",\n this.factor\n );\n }\n this.patience = validateNonNegativeInteger(options.patience ?? 10, \"patience\");\n this.threshold = validateNonNegativeNumber(options.threshold ?? 1e-4, \"threshold\");\n this.cooldown = validateNonNegativeInteger(options.cooldown ?? 0, \"cooldown\");\n this.minLr = validateNonNegativeNumber(options.minLr ?? 0, \"minLr\");\n this.best = this.mode === \"min\" ? Infinity : -Infinity;\n this.numBadEpochs = 0;\n this.cooldownCounter = 0;\n }\n\n /**\n * Check if metric improved.\n */\n private isBetter(current: number): boolean {\n if (this.mode === \"min\") {\n return current < this.best - this.threshold;\n }\n return current > this.best + this.threshold;\n }\n\n /**\n * Perform a scheduler step based on the metric value.\n *\n * @param metric - Current value of the metric being monitored\n */\n step(metric: number): void {\n if (!Number.isFinite(metric)) {\n throw new InvalidParameterError(\"metric must be finite\", \"metric\", metric);\n }\n if (this.cooldownCounter > 0) {\n this.cooldownCounter--;\n this.numBadEpochs = 0;\n }\n\n if (this.isBetter(metric)) {\n this.best = metric;\n this.numBadEpochs = 0;\n } else if (this.cooldownCounter === 0) {\n this.numBadEpochs++;\n }\n\n if (this.numBadEpochs > this.patience) {\n this.reduceLr();\n this.cooldownCounter = this.cooldown;\n this.numBadEpochs = 0;\n }\n }\n\n /**\n * Reduce learning rate for all parameter groups.\n */\n private reduceLr(): void {\n for (let i = 0; i < this.optimizer.paramGroups.length; i++) {\n const group = this.optimizer.paramGroups[i];\n if (!group) {\n throw new InvalidParameterError(\n `optimizer.paramGroups[${i}] is missing`,\n \"optimizer.paramGroups\",\n group\n );\n }\n const currentLr = resolveGroupLr(group, i);\n const newLr = Math.max(currentLr * this.factor, this.minLr);\n setGroupLr(group, newLr);\n }\n }\n\n /**\n * Get the current learning rates for all parameter groups.\n */\n getLastLr(): number[] {\n return this.optimizer.paramGroups.map((group, index) => resolveGroupLr(group, index));\n }\n}\n\n/**\n * Warmup scheduler that wraps another scheduler.\n *\n * Linearly increases the learning rate from 0 to the base lr over warmupEpochs,\n * then delegates to the wrapped scheduler.\n *\n * @example\n * ```ts\n * const baseScheduler = new CosineAnnealingLR(optimizer, { T_max: 100 });\n * const scheduler = new WarmupLR(optimizer, baseScheduler, { warmupEpochs: 5 });\n * ```\n */\nexport class WarmupLR extends LRScheduler {\n private warmupEpochs: number;\n private afterScheduler: LRScheduler | null;\n\n constructor(\n optimizer: SchedulerOptimizer,\n afterScheduler: LRScheduler | null,\n options: { warmupEpochs: number; lastEpoch?: number }\n ) {\n const warmupEpochs = validatePositiveInteger(options.warmupEpochs, \"warmupEpochs\");\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.warmupEpochs = warmupEpochs;\n this.afterScheduler = afterScheduler;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n if (this.lastEpoch < this.warmupEpochs) {\n // Linear warmup\n const factor = (this.lastEpoch + 1) / this.warmupEpochs;\n return this.baseLrs.map((lr) => lr * factor);\n }\n\n if (this.afterScheduler) {\n // Delegate to wrapped scheduler\n return this.afterScheduler.getLr();\n }\n\n return this.baseLrs;\n }\n\n override step(): void {\n super.step();\n\n // Also step the after scheduler once warmup is complete\n if (this.lastEpoch >= this.warmupEpochs && this.afterScheduler) {\n this.afterScheduler.step();\n }\n }\n}\n\n/**\n * One-cycle learning rate scheduler.\n *\n * Implements the 1cycle policy: lr starts at maxLr/divFactor, increases to maxLr\n * over pctStart of the training, then decreases to maxLr/finalDivFactor.\n *\n * @example\n * ```ts\n * const scheduler = new OneCycleLR(optimizer, {\n * maxLr: 0.1,\n * totalSteps: 1000,\n * pctStart: 0.3\n * });\n * ```\n *\n * @see {@link https://pytorch.org/docs/stable/generated/torch.optim.lr_scheduler.OneCycleLR.html | PyTorch OneCycleLR}\n */\nexport class OneCycleLR extends LRScheduler {\n private maxLr: number;\n private totalSteps: number;\n private pctStart: number;\n private divFactor: number;\n private finalDivFactor: number;\n private annealStrategy: \"cos\" | \"linear\";\n\n constructor(\n optimizer: SchedulerOptimizer,\n options: {\n maxLr: number;\n totalSteps: number;\n pctStart?: number;\n divFactor?: number;\n finalDivFactor?: number;\n annealStrategy?: \"cos\" | \"linear\";\n lastEpoch?: number;\n }\n ) {\n const maxLr = validatePositiveNumber(options.maxLr, \"maxLr\");\n const totalSteps = validatePositiveInteger(options.totalSteps, \"totalSteps\");\n const pctStart = validateFiniteNumber(options.pctStart ?? 0.3, \"pctStart\");\n if (pctStart <= 0 || pctStart >= 1) {\n throw new InvalidParameterError(\n \"pctStart must be in the interval (0, 1)\",\n \"pctStart\",\n pctStart\n );\n }\n const divFactor = validatePositiveNumber(options.divFactor ?? 25, \"divFactor\");\n const finalDivFactor = validatePositiveNumber(options.finalDivFactor ?? 1e4, \"finalDivFactor\");\n const annealStrategy = options.annealStrategy ?? \"cos\";\n if (annealStrategy !== \"cos\" && annealStrategy !== \"linear\") {\n throw new InvalidParameterError(\n \"annealStrategy must be 'cos' or 'linear'\",\n \"annealStrategy\",\n annealStrategy\n );\n }\n const lastEpoch = validateLastEpoch(options.lastEpoch ?? -1);\n super(optimizer, -1);\n this.maxLr = maxLr;\n this.totalSteps = totalSteps;\n this.pctStart = pctStart;\n this.divFactor = divFactor;\n this.finalDivFactor = finalDivFactor;\n this.annealStrategy = annealStrategy;\n this.initializeFromLastEpoch(lastEpoch);\n }\n\n getLr(): number[] {\n const stepNum = this.lastEpoch;\n const upSteps = Math.max(1, Math.floor(this.totalSteps * this.pctStart));\n const downSteps = Math.max(1, this.totalSteps - upSteps);\n\n const initialLr = this.maxLr / this.divFactor;\n const minLr = this.maxLr / this.finalDivFactor;\n\n let lr: number;\n\n if (stepNum >= this.totalSteps) {\n lr = minLr;\n } else if (stepNum < upSteps) {\n // Increasing phase\n const pct = stepNum / upSteps;\n lr = initialLr + (this.maxLr - initialLr) * pct;\n } else {\n // Decreasing phase\n const pct = (stepNum - upSteps) / downSteps;\n if (this.annealStrategy === \"cos\") {\n lr = minLr + ((this.maxLr - minLr) * (1 + Math.cos(Math.PI * pct))) / 2;\n } else {\n lr = this.maxLr - (this.maxLr - minLr) * pct;\n }\n }\n\n // Scale for each param group based on their base lr ratio\n const baseRef = this.baseLrs[0] ?? 0;\n return this.baseLrs.map((baseLr) => {\n if (baseRef === 0) {\n return baseLr === 0 ? 0 : lr;\n }\n return lr * (baseLr / baseRef);\n });\n }\n}\n"]}
|