deepbox 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +344 -0
- package/dist/CSRMatrix-CwGwQRea.d.cts +219 -0
- package/dist/CSRMatrix-KzNt6QpS.d.ts +219 -0
- package/dist/Tensor-BQLk1ltW.d.cts +147 -0
- package/dist/Tensor-g8mUClel.d.ts +147 -0
- package/dist/chunk-4S73VUBD.js +677 -0
- package/dist/chunk-4S73VUBD.js.map +1 -0
- package/dist/chunk-5R4S63PF.js +2925 -0
- package/dist/chunk-5R4S63PF.js.map +1 -0
- package/dist/chunk-6AE5FKKQ.cjs +9264 -0
- package/dist/chunk-6AE5FKKQ.cjs.map +1 -0
- package/dist/chunk-AD436M45.js +3854 -0
- package/dist/chunk-AD436M45.js.map +1 -0
- package/dist/chunk-ALS7ETWZ.cjs +4263 -0
- package/dist/chunk-ALS7ETWZ.cjs.map +1 -0
- package/dist/chunk-AU7XHGKJ.js +2092 -0
- package/dist/chunk-AU7XHGKJ.js.map +1 -0
- package/dist/chunk-B5TNKUEY.js +1481 -0
- package/dist/chunk-B5TNKUEY.js.map +1 -0
- package/dist/chunk-BCR7G3A6.js +9136 -0
- package/dist/chunk-BCR7G3A6.js.map +1 -0
- package/dist/chunk-C4PKXY74.cjs +1917 -0
- package/dist/chunk-C4PKXY74.cjs.map +1 -0
- package/dist/chunk-DWZY6PIP.cjs +6400 -0
- package/dist/chunk-DWZY6PIP.cjs.map +1 -0
- package/dist/chunk-E3EU5FZO.cjs +2113 -0
- package/dist/chunk-E3EU5FZO.cjs.map +1 -0
- package/dist/chunk-F3JWBINJ.js +1054 -0
- package/dist/chunk-F3JWBINJ.js.map +1 -0
- package/dist/chunk-FJYLIGJX.js +1940 -0
- package/dist/chunk-FJYLIGJX.js.map +1 -0
- package/dist/chunk-JSCDE774.cjs +729 -0
- package/dist/chunk-JSCDE774.cjs.map +1 -0
- package/dist/chunk-LWECRCW2.cjs +2412 -0
- package/dist/chunk-LWECRCW2.cjs.map +1 -0
- package/dist/chunk-MLBMYKCG.js +6379 -0
- package/dist/chunk-MLBMYKCG.js.map +1 -0
- package/dist/chunk-OX6QXFMV.cjs +3874 -0
- package/dist/chunk-OX6QXFMV.cjs.map +1 -0
- package/dist/chunk-PHV2DKRS.cjs +1072 -0
- package/dist/chunk-PHV2DKRS.cjs.map +1 -0
- package/dist/chunk-PL7TAYKI.js +4056 -0
- package/dist/chunk-PL7TAYKI.js.map +1 -0
- package/dist/chunk-PR647I7R.js +1898 -0
- package/dist/chunk-PR647I7R.js.map +1 -0
- package/dist/chunk-QERHVCHC.cjs +2960 -0
- package/dist/chunk-QERHVCHC.cjs.map +1 -0
- package/dist/chunk-XEG44RF6.cjs +1514 -0
- package/dist/chunk-XEG44RF6.cjs.map +1 -0
- package/dist/chunk-XMWVME2W.js +2377 -0
- package/dist/chunk-XMWVME2W.js.map +1 -0
- package/dist/chunk-ZB75FESB.cjs +1979 -0
- package/dist/chunk-ZB75FESB.cjs.map +1 -0
- package/dist/chunk-ZLW62TJG.cjs +4061 -0
- package/dist/chunk-ZLW62TJG.cjs.map +1 -0
- package/dist/chunk-ZXKBDFP3.js +4235 -0
- package/dist/chunk-ZXKBDFP3.js.map +1 -0
- package/dist/core/index.cjs +204 -0
- package/dist/core/index.cjs.map +1 -0
- package/dist/core/index.d.cts +2 -0
- package/dist/core/index.d.ts +2 -0
- package/dist/core/index.js +3 -0
- package/dist/core/index.js.map +1 -0
- package/dist/dataframe/index.cjs +22 -0
- package/dist/dataframe/index.cjs.map +1 -0
- package/dist/dataframe/index.d.cts +3 -0
- package/dist/dataframe/index.d.ts +3 -0
- package/dist/dataframe/index.js +5 -0
- package/dist/dataframe/index.js.map +1 -0
- package/dist/datasets/index.cjs +134 -0
- package/dist/datasets/index.cjs.map +1 -0
- package/dist/datasets/index.d.cts +3 -0
- package/dist/datasets/index.d.ts +3 -0
- package/dist/datasets/index.js +5 -0
- package/dist/datasets/index.js.map +1 -0
- package/dist/index-74AB8Cyh.d.cts +1126 -0
- package/dist/index-9oQx1HgV.d.cts +1180 -0
- package/dist/index-BJY2SI4i.d.ts +483 -0
- package/dist/index-BWGhrDlr.d.ts +733 -0
- package/dist/index-B_DK4FKY.d.cts +242 -0
- package/dist/index-BbA2Gxfl.d.ts +456 -0
- package/dist/index-BgHYAoSS.d.cts +837 -0
- package/dist/index-BndMbqsM.d.ts +1439 -0
- package/dist/index-C1mfVYoo.d.ts +2517 -0
- package/dist/index-CCvlwAmL.d.cts +809 -0
- package/dist/index-CDw5CnOU.d.ts +785 -0
- package/dist/index-Cn3SdB0O.d.ts +1126 -0
- package/dist/index-CrqLlS-a.d.ts +776 -0
- package/dist/index-D61yaSMY.d.cts +483 -0
- package/dist/index-D9Loo1_A.d.cts +2517 -0
- package/dist/index-DIT_OO9C.d.cts +785 -0
- package/dist/index-DIp_RrRt.d.ts +242 -0
- package/dist/index-DbultU6X.d.cts +1427 -0
- package/dist/index-DmEg_LCm.d.cts +776 -0
- package/dist/index-DoPWVxPo.d.cts +1439 -0
- package/dist/index-DuCxd-8d.d.ts +837 -0
- package/dist/index-Dx42TZaY.d.ts +809 -0
- package/dist/index-DyZ4QQf5.d.cts +456 -0
- package/dist/index-GFAVyOWO.d.ts +1427 -0
- package/dist/index-WHQLn0e8.d.cts +733 -0
- package/dist/index-ZtI1Iy4L.d.ts +1180 -0
- package/dist/index-eJgeni9c.d.cts +1911 -0
- package/dist/index-tk4lSYod.d.ts +1911 -0
- package/dist/index.cjs +72 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +17 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +15 -0
- package/dist/index.js.map +1 -0
- package/dist/linalg/index.cjs +86 -0
- package/dist/linalg/index.cjs.map +1 -0
- package/dist/linalg/index.d.cts +3 -0
- package/dist/linalg/index.d.ts +3 -0
- package/dist/linalg/index.js +5 -0
- package/dist/linalg/index.js.map +1 -0
- package/dist/metrics/index.cjs +158 -0
- package/dist/metrics/index.cjs.map +1 -0
- package/dist/metrics/index.d.cts +3 -0
- package/dist/metrics/index.d.ts +3 -0
- package/dist/metrics/index.js +5 -0
- package/dist/metrics/index.js.map +1 -0
- package/dist/ml/index.cjs +87 -0
- package/dist/ml/index.cjs.map +1 -0
- package/dist/ml/index.d.cts +3 -0
- package/dist/ml/index.d.ts +3 -0
- package/dist/ml/index.js +6 -0
- package/dist/ml/index.js.map +1 -0
- package/dist/ndarray/index.cjs +501 -0
- package/dist/ndarray/index.cjs.map +1 -0
- package/dist/ndarray/index.d.cts +5 -0
- package/dist/ndarray/index.d.ts +5 -0
- package/dist/ndarray/index.js +4 -0
- package/dist/ndarray/index.js.map +1 -0
- package/dist/nn/index.cjs +142 -0
- package/dist/nn/index.cjs.map +1 -0
- package/dist/nn/index.d.cts +6 -0
- package/dist/nn/index.d.ts +6 -0
- package/dist/nn/index.js +5 -0
- package/dist/nn/index.js.map +1 -0
- package/dist/optim/index.cjs +77 -0
- package/dist/optim/index.cjs.map +1 -0
- package/dist/optim/index.d.cts +4 -0
- package/dist/optim/index.d.ts +4 -0
- package/dist/optim/index.js +4 -0
- package/dist/optim/index.js.map +1 -0
- package/dist/plot/index.cjs +114 -0
- package/dist/plot/index.cjs.map +1 -0
- package/dist/plot/index.d.cts +6 -0
- package/dist/plot/index.d.ts +6 -0
- package/dist/plot/index.js +5 -0
- package/dist/plot/index.js.map +1 -0
- package/dist/preprocess/index.cjs +82 -0
- package/dist/preprocess/index.cjs.map +1 -0
- package/dist/preprocess/index.d.cts +4 -0
- package/dist/preprocess/index.d.ts +4 -0
- package/dist/preprocess/index.js +5 -0
- package/dist/preprocess/index.js.map +1 -0
- package/dist/random/index.cjs +74 -0
- package/dist/random/index.cjs.map +1 -0
- package/dist/random/index.d.cts +3 -0
- package/dist/random/index.d.ts +3 -0
- package/dist/random/index.js +5 -0
- package/dist/random/index.js.map +1 -0
- package/dist/stats/index.cjs +142 -0
- package/dist/stats/index.cjs.map +1 -0
- package/dist/stats/index.d.cts +3 -0
- package/dist/stats/index.d.ts +3 -0
- package/dist/stats/index.js +5 -0
- package/dist/stats/index.js.map +1 -0
- package/dist/tensor-B96jjJLQ.d.cts +205 -0
- package/dist/tensor-B96jjJLQ.d.ts +205 -0
- package/package.json +226 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/ml/index.ts","../src/ml/_validation.ts","../src/ml/clustering/DBSCAN.ts","../src/ml/clustering/KMeans.ts","../src/ml/decomposition/index.ts","../src/ml/tree/DecisionTree.ts","../src/ml/ensemble/GradientBoosting.ts","../src/ml/linear/Lasso.ts","../src/ml/linear/LinearRegression.ts","../src/ml/linear/LogisticRegression.ts","../src/ml/linear/Ridge.ts","../src/ml/manifold/TSNE.ts","../src/ml/naive_bayes/index.ts","../src/ml/neighbors/index.ts","../src/ml/svm/SVM.ts","../src/ml/tree/RandomForest.ts"],"names":["invM","delta","ez","sumExp","mean"],"mappings":";;;;;AAAA,IAAA,UAAA,GAAA;AAAA,QAAA,CAAA,UAAA,EAAA;AAAA,EAAA,MAAA,EAAA,MAAA,MAAA;AAAA,EAAA,sBAAA,EAAA,MAAA,sBAAA;AAAA,EAAA,qBAAA,EAAA,MAAA,qBAAA;AAAA,EAAA,UAAA,EAAA,MAAA,UAAA;AAAA,EAAA,0BAAA,EAAA,MAAA,0BAAA;AAAA,EAAA,yBAAA,EAAA,MAAA,yBAAA;AAAA,EAAA,MAAA,EAAA,MAAA,MAAA;AAAA,EAAA,oBAAA,EAAA,MAAA,oBAAA;AAAA,EAAA,mBAAA,EAAA,MAAA,mBAAA;AAAA,EAAA,KAAA,EAAA,MAAA,KAAA;AAAA,EAAA,gBAAA,EAAA,MAAA,gBAAA;AAAA,EAAA,SAAA,EAAA,MAAA,SAAA;AAAA,EAAA,SAAA,EAAA,MAAA,SAAA;AAAA,EAAA,kBAAA,EAAA,MAAA,kBAAA;AAAA,EAAA,GAAA,EAAA,MAAA,GAAA;AAAA,EAAA,sBAAA,EAAA,MAAA,sBAAA;AAAA,EAAA,qBAAA,EAAA,MAAA,qBAAA;AAAA,EAAA,KAAA,EAAA,MAAA,KAAA;AAAA,EAAA,IAAA,EAAA,MAAA;AAAA,CAAA,CAAA;;;ACWO,SAAS,gBAAA,CAAiB,GAAW,IAAA,EAAoB;AAC9D,EAAA,IAAI,CAAC,YAAA,CAAa,CAAA,CAAE,KAAA,EAAO,CAAA,CAAE,OAAO,CAAA,EAAG;AACrC,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR,GAAG,IAAI,CAAA,qGAAA;AAAA,KACT;AAAA,EACF;AACF;AAkBO,SAAS,iBAAA,CAAkB,GAAW,CAAA,EAAiB;AAE5D,EAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,IAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,EACpE;AACA,EAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,IAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,EACpE;AACA,EAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,EAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAGvB,EAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,EAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,EAAA,IAAI,aAAa,CAAA,EAAG;AAClB,IAAA,MAAM,IAAI,oBAAoB,iCAAiC,CAAA;AAAA,EACjE;AACA,EAAA,IAAI,cAAc,CAAA,EAAG;AACnB,IAAA,MAAM,IAAI,oBAAoB,kCAAkC,CAAA;AAAA,EAClE;AAGA,EAAA,IAAI,QAAA,KAAa,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,EAAG;AAC3B,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,gEAAgE,QAAQ,CAAA,aAAA,EAAgB,CAAA,CAAE,KAAA,CAAM,CAAC,CAAC,CAAA;AAAA,KACpG;AAAA,EACF;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,IAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,IAC3E;AAAA,EACF;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,IAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,IAC3E;AAAA,EACF;AACF;AAgBO,SAAS,8BAA8B,CAAA,EAAiB;AAE7D,EAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,IAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,EACpE;AACA,EAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAGvB,EAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,EAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,EAAA,IAAI,aAAa,CAAA,EAAG;AAClB,IAAA,MAAM,IAAI,oBAAoB,iCAAiC,CAAA;AAAA,EACjE;AACA,EAAA,IAAI,cAAc,CAAA,EAAG;AACnB,IAAA,MAAM,IAAI,oBAAoB,kCAAkC,CAAA;AAAA,EAClE;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,IAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,IAC3E;AAAA,EACF;AACF;AAkBO,SAAS,qBAAA,CACd,CAAA,EACA,iBAAA,EACA,SAAA,EACM;AAEN,EAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,IAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,EACpE;AACA,EAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAGvB,EAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,EAAA,IAAI,cAAc,iBAAA,EAAmB;AACnC,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,CAAA,MAAA,EAAS,SAAS,CAAA,cAAA,EAAiB,SAAS,oBAAoB,iBAAiB,CAAA,SAAA;AAAA,KACnF;AAAA,EACF;AAGA,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,IAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,MAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,IAC3E;AAAA,EACF;AACF;;;ACjIO,IAAM,SAAN,MAAkC;AAAA,EAC/B,GAAA;AAAA,EACA,UAAA;AAAA,EACA,MAAA;AAAA,EAEA,OAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAII,EAAC,EACL;AACA,IAAA,IAAA,CAAK,GAAA,GAAM,QAAQ,GAAA,IAAO,GAAA;AAC1B,IAAA,IAAA,CAAK,UAAA,GAAa,QAAQ,UAAA,IAAc,CAAA;AACxC,IAAA,IAAA,CAAK,MAAA,GAAS,QAAQ,MAAA,IAAU,WAAA;AAEhC,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,GAAG,CAAA,IAAK,IAAA,CAAK,OAAO,CAAA,EAAG;AAC/C,MAAA,MAAM,IAAI,qBAAA,CAAsB,iCAAA,EAAmC,KAAA,EAAO,KAAK,GAAG,CAAA;AAAA,IACpF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,UAAU,CAAA,IAAK,IAAA,CAAK,aAAa,CAAA,EAAG;AAC7D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,oCAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,IAAA,CAAK,MAAA,KAAW,WAAA,IAAe,IAAA,CAAK,WAAW,WAAA,EAAa;AAC9D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,yCAAA,CAAA;AAAA,QACA,QAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAA,CAAW,GAAa,CAAA,EAAsB;AACpD,IAAA,IAAI,IAAA,CAAK,WAAW,WAAA,EAAa;AAC/B,MAAA,IAAI,GAAA,GAAM,CAAA;AACV,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,QAAQ,CAAA,EAAA,EAAK;AACjC,QAAA,GAAA,IAAO,IAAA,CAAK,KAAK,CAAA,CAAE,CAAC,KAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAE,CAAA;AACzC,QAAA,IAAI,GAAA,GAAM,IAAA,CAAK,GAAA,EAAK,OAAO,KAAA;AAAA,MAC7B;AACA,MAAA,OAAO,OAAO,IAAA,CAAK,GAAA;AAAA,IACrB;AAGA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,GAAA,GAAM,IAAA,CAAK,GAAA;AAC9B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,QAAQ,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,QAAQ,CAAA,CAAE,CAAC,KAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AACpC,MAAA,KAAA,IAAS,IAAA,GAAO,IAAA;AAChB,MAAA,IAAI,KAAA,GAAQ,OAAO,OAAO,KAAA;AAAA,IAC5B;AACA,IAAA,OAAO,KAAA,IAAS,KAAA;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAA,CAAa,MAAkB,QAAA,EAA4B;AACjE,IAAA,MAAM,YAAsB,EAAC;AAC7B,IAAA,MAAM,KAAA,GAAQ,KAAK,QAAQ,CAAA;AAC3B,IAAA,IAAI,CAAC,OAAO,OAAO,SAAA;AAEnB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,QAAQ,CAAA,EAAA,EAAK;AACpC,MAAA,MAAM,KAAA,GAAQ,KAAK,CAAC,CAAA;AACpB,MAAA,IAAI,KAAA,IAAS,IAAA,CAAK,UAAA,CAAW,KAAA,EAAO,KAAK,CAAA,EAAG;AAC1C,QAAA,SAAA,CAAU,KAAK,CAAC,CAAA;AAAA,MAClB;AAAA,IACF;AAEA,IAAA,OAAO,SAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,GAAA,CAAI,GAAW,EAAA,EAAmB;AAChC,IAAA,6BAAA,CAA8B,CAAC,CAAA;AAE/B,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAGhC,IAAA,MAAM,OAAmB,EAAC;AAC1B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,IACf;AAGA,IAAA,MAAM,SAAmB,IAAI,KAAA,CAAM,QAAQ,CAAA,CAAE,KAAK,EAAE,CAAA;AACpD,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AAEjC,MAAA,IAAI,MAAA,CAAO,CAAC,CAAA,KAAM,EAAA,EAAI;AAEtB,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,IAAA,EAAM,CAAC,CAAA;AAE3C,MAAA,IAAI,SAAA,CAAU,MAAA,GAAS,IAAA,CAAK,UAAA,EAAY;AAEtC,QAAA,MAAA,CAAO,CAAC,CAAA,GAAI,EAAA;AACZ,QAAA;AAAA,MACF;AAGA,MAAA,WAAA,CAAY,KAAK,CAAC,CAAA;AAClB,MAAA,MAAA,CAAO,CAAC,CAAA,GAAI,SAAA;AAGZ,MAAA,MAAM,OAAA,GAAU,IAAI,GAAA,CAAI,SAAS,CAAA;AAEjC,MAAA,KAAA,MAAW,KAAK,OAAA,EAAS;AAEvB,QAAA,IAAI,MAAA,CAAO,CAAC,CAAA,KAAM,EAAA,EAAI;AACpB,UAAA,MAAA,CAAO,CAAC,CAAA,GAAI,SAAA;AAAA,QACd;AAGA,QAAA,IAAI,MAAA,CAAO,CAAC,CAAA,KAAM,EAAA,EAAI;AACpB,UAAA,MAAA,CAAO,CAAC,CAAA,GAAI,SAAA;AAEZ,UAAA,MAAM,UAAA,GAAa,IAAA,CAAK,YAAA,CAAa,IAAA,EAAM,CAAC,CAAA;AAE5C,UAAA,IAAI,UAAA,CAAW,MAAA,IAAU,IAAA,CAAK,UAAA,EAAY;AACxC,YAAA,WAAA,CAAY,KAAK,CAAC,CAAA;AAElB,YAAA,KAAA,MAAW,KAAK,UAAA,EAAY;AAC1B,cAAA,IAAI,OAAO,CAAC,CAAA,KAAM,MAAM,MAAA,CAAO,CAAC,MAAM,EAAA,EAAI;AACxC,gBAAA,OAAA,CAAQ,IAAI,CAAC,CAAA;AAAA,cACf;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,MAAA,SAAA,EAAA;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,UAAU,MAAA,CAAO,MAAA,EAAQ,EAAE,KAAA,EAAO,SAAS,CAAA;AAChD,IAAA,IAAA,CAAK,YAAA,GAAe,WAAA;AACpB,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAAQ,EAAA,EAAoB;AAC1B,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,UAAA,CAAW,GAAW,EAAA,EAAqB;AACzC,IAAA,IAAA,CAAK,IAAI,CAAC,CAAA;AACV,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,MAAM,IAAI,eAAe,mCAAmC,CAAA;AAAA,IAC9D;AACA,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,MAAA,GAAiB;AACnB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,OAAA,EAAS;AACjC,MAAA,MAAM,IAAI,eAAe,wCAAwC,CAAA;AAAA,IACnE;AACA,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,IAAI,WAAA,GAAwB;AAC1B,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,YAAA,EAAc;AACtC,MAAA,MAAM,IAAI,eAAe,8CAA8C,CAAA;AAAA,IACzE;AACA,IAAA,OAAO,CAAC,GAAG,IAAA,CAAK,YAAY,CAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,KAAK,IAAA,CAAK,GAAA;AAAA,MACV,YAAY,IAAA,CAAK,UAAA;AAAA,MACjB,QAAQ,IAAA,CAAK;AAAA,KACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAU,MAAA,EAAuC;AAC/C,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACjD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,KAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,KAAA,IAAS,CAAA,EAAG;AAC3C,YAAA,MAAM,IAAI,qBAAA,CAAsB,iBAAA,EAAmB,KAAA,EAAO,KAAK,CAAA;AAAA,UACjE;AACA,UAAA,IAAA,CAAK,GAAA,GAAM,KAAA;AACX,UAAA;AAAA,QACF,KAAK,YAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,SAAA,CAAU,KAAK,CAAA,IAAK,KAAA,GAAQ,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,oCAAA;AAAA,cACA,YAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,UAAA,GAAa,KAAA;AAClB,UAAA;AAAA,QACF,KAAK,QAAA;AACH,UAAA,IAAI,KAAA,KAAU,WAAA,IAAe,KAAA,KAAU,WAAA,EAAa;AAClD,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,yCAAA,CAAA;AAAA,cACA,QAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,MAAA,GAAS,KAAA;AACd,UAAA;AAAA,QACF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACjRO,IAAM,SAAN,MAAkC;AAAA,EAC/B,SAAA;AAAA,EACA,OAAA;AAAA,EACA,GAAA;AAAA,EACA,IAAA;AAAA,EACA,WAAA;AAAA,EAEA,eAAA;AAAA,EACA,OAAA;AAAA,EACA,QAAA;AAAA,EACA,MAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYjB,WAAA,CACE,OAAA,GAMI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,SAAA,GAAY,QAAQ,SAAA,IAAa,CAAA;AACtC,IAAA,IAAA,CAAK,OAAA,GAAU,QAAQ,OAAA,IAAW,GAAA;AAClC,IAAA,IAAA,CAAK,GAAA,GAAM,QAAQ,GAAA,IAAO,IAAA;AAC1B,IAAA,IAAA,CAAK,IAAA,GAAO,QAAQ,IAAA,IAAQ,UAAA;AAC5B,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,SAAS,CAAA,IAAK,IAAA,CAAK,YAAY,CAAA,EAAG;AAC3D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,mCAAA;AAAA,QACA,WAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA,IAAK,IAAA,CAAK,UAAU,CAAA,EAAG;AACvD,MAAA,MAAM,IAAI,qBAAA,CAAsB,iCAAA,EAAmC,SAAA,EAAW,KAAK,OAAO,CAAA;AAAA,IAC5F;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,GAAG,CAAA,IAAK,IAAA,CAAK,MAAM,CAAA,EAAG;AAC9C,MAAA,MAAM,IAAI,qBAAA,CAAsB,kCAAA,EAAoC,KAAA,EAAO,KAAK,GAAG,CAAA;AAAA,IACrF;AACA,IAAA,IAAI,IAAA,CAAK,IAAA,KAAS,QAAA,IAAY,IAAA,CAAK,SAAS,UAAA,EAAY;AACtD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA;AAAA,QAClE,MAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,OAAA,CAAQ,WAAW,CAAA,EAAG;AAC9E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAC,CAAA,CAAA;AAAA,QAC5E,aAAA;AAAA,QACA,OAAA,CAAQ;AAAA,OACV;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,GAAA,CAAI,GAAW,EAAA,EAAmB;AAChC,IAAA,6BAAA,CAA8B,CAAC,CAAA;AAE/B,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AAEpB,IAAA,IAAI,QAAA,GAAW,KAAK,SAAA,EAAW;AAC7B,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,UAAA,EAAa,QAAQ,CAAA,yBAAA,EAA4B,IAAA,CAAK,SAAS,CAAA,CAAA;AAAA,QAC/D,WAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAGA,IAAA,IAAI,SAAA,GAAY,IAAA,CAAK,mBAAA,CAAoB,CAAC,CAAA;AAE1C,IAAA,IAAI,cAAc,MAAA,CAAO,iBAAA;AAEzB,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,IAAA,CAAK,SAAS,IAAA,EAAA,EAAQ;AAE9C,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,cAAA,CAAe,CAAA,EAAG,SAAS,CAAA;AAG/C,MAAA,MAAM,eAA2B,EAAC;AAClC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,WAAW,CAAA,EAAA,EAAK;AACvC,QAAA,MAAM,gBAA4B,EAAC;AACnC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,IAAI,MAAA,CAAO,OAAO,IAAA,CAAK,MAAA,CAAO,SAAS,CAAC,CAAC,MAAM,CAAA,EAAG;AAChD,YAAA,MAAM,QAAkB,EAAC;AACzB,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,cAAA,KAAA,CAAM,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,YACzD;AACA,YAAA,aAAA,CAAc,KAAK,KAAK,CAAA;AAAA,UAC1B;AAAA,QACF;AAEA,QAAA,IAAI,aAAA,CAAc,SAAS,CAAA,EAAG;AAC5B,UAAA,MAAM,WAAqB,EAAC;AAC5B,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,IAAI,GAAA,GAAM,CAAA;AACV,YAAA,KAAA,MAAW,SAAS,aAAA,EAAe;AACjC,cAAA,GAAA,IAAO,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAAA,YACrB;AACA,YAAA,QAAA,CAAS,IAAA,CAAK,GAAA,GAAM,aAAA,CAAc,MAAM,CAAA;AAAA,UAC1C;AACA,UAAA,YAAA,CAAa,KAAK,QAAQ,CAAA;AAAA,QAC5B,CAAA,MAAO;AAEL,UAAA,MAAM,cAAwB,EAAC;AAC/B,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,WAAA,CAAY,IAAA,CAAK,MAAA,CAAO,SAAA,CAAU,IAAA,CAAK,SAAA,CAAU,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,UAC/E;AACA,UAAA,YAAA,CAAa,KAAK,WAAW,CAAA;AAAA,QAC/B;AAAA,MACF;AAEA,MAAA,SAAA,GAAY,OAAO,YAAY,CAAA;AAG/B,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,gBAAA,CAAiB,CAAA,EAAG,WAAW,MAAM,CAAA;AAG1D,MAAA,IAAI,KAAK,GAAA,CAAI,WAAA,GAAc,OAAO,CAAA,GAAI,KAAK,GAAA,EAAK;AAC9C,QAAA,IAAA,CAAK,SAAS,IAAA,GAAO,CAAA;AACrB,QAAA;AAAA,MACF;AAEA,MAAA,WAAA,GAAc,OAAA;AACd,MAAA,IAAA,CAAK,SAAS,IAAA,GAAO,CAAA;AAAA,IACvB;AAEA,IAAA,IAAA,CAAK,eAAA,GAAkB,SAAA;AACvB,IAAA,IAAA,CAAK,OAAA,GAAU,IAAA,CAAK,cAAA,CAAe,CAAA,EAAG,SAAS,CAAA;AAC/C,IAAA,IAAA,CAAK,WAAW,IAAA,CAAK,gBAAA,CAAiB,CAAA,EAAG,SAAA,EAAW,KAAK,OAAO,CAAA;AAChE,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,eAAA,EAAiB;AACzC,MAAA,MAAM,IAAI,eAAe,yCAAyC,CAAA;AAAA,IACpE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,QAAQ,CAAA;AAEzD,IAAA,OAAO,IAAA,CAAK,cAAA,CAAe,CAAA,EAAG,IAAA,CAAK,eAAe,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAA,CAAW,GAAW,EAAA,EAAqB;AACzC,IAAA,IAAA,CAAK,IAAI,CAAC,CAAA;AACV,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,MAAM,IAAI,eAAe,mCAAmC,CAAA;AAAA,IAC9D;AACA,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,CAAA,EAAmB;AAC7C,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAI,IAAA,CAAK,SAAS,QAAA,EAAU;AAE1B,MAAA,MAAM,OAAA,uBAAc,GAAA,EAAY;AAChC,MAAA,MAAM,GAAA,GAAM,KAAK,SAAA,EAAU;AAE3B,MAAA,OAAO,OAAA,CAAQ,IAAA,GAAO,IAAA,CAAK,SAAA,EAAW;AACpC,QAAA,OAAA,CAAQ,IAAI,IAAA,CAAK,KAAA,CAAM,GAAA,EAAI,GAAI,QAAQ,CAAC,CAAA;AAAA,MAC1C;AAEA,MAAA,MAAM,YAAwB,EAAC;AAC/B,MAAA,KAAA,MAAW,OAAO,OAAA,EAAS;AACzB,QAAA,MAAM,WAAqB,EAAC;AAC5B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,QAAA,CAAS,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,GAAA,GAAM,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,QAC9D;AACA,QAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,MACzB;AAEA,MAAA,OAAO,OAAO,SAAS,CAAA;AAAA,IACzB,CAAA,MAAO;AAEL,MAAA,MAAM,GAAA,GAAM,KAAK,SAAA,EAAU;AAC3B,MAAA,MAAM,YAAwB,EAAC;AAC/B,MAAA,MAAM,YAAY,IAAI,YAAA,CAAa,QAAQ,CAAA,CAAE,KAAK,QAAQ,CAAA;AAG1D,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,GAAA,KAAQ,QAAQ,CAAA;AAC5C,MAAA,MAAM,gBAA0B,EAAC;AACjC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,aAAA,CAAc,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,QAAA,GAAW,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACxE;AACA,MAAA,SAAA,CAAU,KAAK,aAAa,CAAA;AAG5B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,IAAI,IAAA,GAAO,CAAA;AACX,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,MAAM,IAAA,GAAO,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA,IAAK,aAAA,CAAc,CAAC,CAAA,IAAK,CAAA,CAAA;AACjF,UAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,QACjB;AACA,QAAA,SAAA,CAAU,CAAC,CAAA,GAAI,IAAA;AAAA,MACjB;AAGA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,WAAW,CAAA,EAAA,EAAK;AAEvC,QAAA,MAAM,SAAA,GAAY,UAAU,MAAA,CAAO,CAAC,GAAG,CAAA,KAAM,CAAA,GAAI,GAAG,CAAC,CAAA;AACrD,QAAA,IAAI,CAAA,GAAI,KAAI,GAAI,SAAA;AAChB,QAAA,IAAI,OAAA,GAAU,CAAA;AAEd,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,CAAA,IAAK,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA;AACrB,UAAA,IAAI,KAAK,CAAA,EAAG;AACV,YAAA,OAAA,GAAU,CAAA;AACV,YAAA;AAAA,UACF;AAAA,QACF;AAEA,QAAA,MAAM,eAAyB,EAAC;AAChC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,YAAA,CAAa,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,OAAA,GAAU,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,QACtE;AACA,QAAA,SAAA,CAAU,KAAK,YAAY,CAAA;AAG3B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,IAAI,IAAA,GAAO,CAAA;AACX,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,MAAM,IAAA,GAAO,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA,IAAK,YAAA,CAAa,CAAC,CAAA,IAAK,CAAA,CAAA;AAChF,YAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,UACjB;AACA,UAAA,IAAI,IAAA,IAAQ,SAAA,CAAU,CAAC,CAAA,IAAK,QAAA,CAAA,EAAW;AACrC,YAAA,SAAA,CAAU,CAAC,CAAA,GAAI,IAAA;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAEA,MAAA,OAAO,OAAO,SAAS,CAAA;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAA,CAAe,GAAW,SAAA,EAA2B;AAC3D,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,SAAmB,EAAC;AAE1B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,IAAI,UAAU,MAAA,CAAO,iBAAA;AACrB,MAAA,IAAI,QAAA,GAAW,CAAA;AAEf,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,WAAW,CAAA,EAAA,EAAK;AACvC,QAAA,IAAI,IAAA,GAAO,CAAA;AACX,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,MAAM,OACJ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,IAAI,SAAA,GAAY,CAAC,CAAC,CAAA,GAC3C,MAAA,CAAO,UAAU,IAAA,CAAK,SAAA,CAAU,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAC7D,UAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,QACjB;AAEA,QAAA,IAAI,OAAO,OAAA,EAAS;AAClB,UAAA,OAAA,GAAU,IAAA;AACV,UAAA,QAAA,GAAW,CAAA;AAAA,QACb;AAAA,MACF;AAEA,MAAA,MAAA,CAAO,KAAK,QAAQ,CAAA;AAAA,IACtB;AAEA,IAAA,OAAO,MAAA,CAAO,MAAA,EAAQ,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAA,CAAiB,CAAA,EAAW,SAAA,EAAmB,MAAA,EAAwB;AAC7E,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAI,OAAA,GAAU,CAAA;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,QAAQ,MAAA,CAAO,MAAA,CAAO,KAAK,MAAA,CAAO,MAAA,GAAS,CAAC,CAAC,CAAA;AACnD,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,OACJ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,IAAI,SAAA,GAAY,CAAC,CAAC,CAAA,GAC3C,MAAA,CAAO,UAAU,IAAA,CAAK,SAAA,CAAU,SAAS,KAAA,GAAQ,SAAA,GAAY,CAAC,CAAC,CAAA;AACjE,QAAA,OAAA,IAAW,IAAA,GAAO,IAAA;AAAA,MACpB;AAAA,IACF;AAEA,IAAA,OAAO,OAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAA,GAA0B;AAChC,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,IAAI,OAAO,IAAA,CAAK,WAAA;AAChB,MAAA,OAAO,MAAM;AACX,QAAA,IAAA,GAAA,CAAQ,IAAA,GAAO,OAAO,KAAA,IAAS,MAAA;AAC/B,QAAA,OAAO,IAAA,GAAO,MAAA;AAAA,MAChB,CAAA;AAAA,IACF;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,cAAA,GAAyB;AAC3B,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,eAAA,EAAiB;AACzC,MAAA,MAAM,IAAI,eAAe,iDAAiD,CAAA;AAAA,IAC5E;AACA,IAAA,OAAO,IAAA,CAAK,eAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAA,GAAiB;AACnB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,OAAA,EAAS;AACjC,MAAA,MAAM,IAAI,eAAe,wCAAwC,CAAA;AAAA,IACnE;AACA,IAAA,OAAO,IAAA,CAAK,OAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAA,GAAkB;AACpB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,IAAA,CAAK,aAAa,MAAA,EAAW;AAC/C,MAAA,MAAM,IAAI,eAAe,yCAAyC,CAAA;AAAA,IACpE;AACA,IAAA,OAAO,IAAA,CAAK,QAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,KAAA,GAAgB;AAClB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,IAAA,CAAK,WAAW,MAAA,EAAW;AAC7C,MAAA,MAAM,IAAI,eAAe,wCAAwC,CAAA;AAAA,IACnE;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,WAAW,IAAA,CAAK,SAAA;AAAA,MAChB,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,KAAK,IAAA,CAAK,GAAA;AAAA,MACV,MAAM,IAAA,CAAK,IAAA;AAAA,MACX,aAAa,IAAA,CAAK;AAAA,KACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAU,MAAA,EAAuC;AAC/C,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACjD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,WAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,SAAA,CAAU,KAAK,CAAA,IAAK,KAAA,GAAQ,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,mCAAA;AAAA,cACA,WAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,SAAA,GAAY,KAAA;AACjB,UAAA;AAAA,QACF,KAAK,SAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,SAAA,CAAU,KAAK,CAAA,IAAK,KAAA,GAAQ,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA,CAAsB,iCAAA,EAAmC,SAAA,EAAW,KAAK,CAAA;AAAA,UACrF;AACA,UAAA,IAAA,CAAK,OAAA,GAAU,KAAA;AACf,UAAA;AAAA,QACF,KAAK,KAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,KAAA,GAAQ,CAAA,EAAG;AAC1C,YAAA,MAAM,IAAI,qBAAA,CAAsB,kBAAA,EAAoB,KAAA,EAAO,KAAK,CAAA;AAAA,UAClE;AACA,UAAA,IAAA,CAAK,GAAA,GAAM,KAAA;AACX,UAAA;AAAA,QACF,KAAK,MAAA;AACH,UAAA,IAAI,KAAA,KAAU,QAAA,IAAY,KAAA,KAAU,UAAA,EAAY;AAC9C,YAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mCAAA,CAAA,EAAuC,MAAA,EAAQ,KAAK,CAAA;AAAA,UACtF;AACA,UAAA,IAAA,CAAK,IAAA,GAAO,KAAA;AACZ,UAAA;AAAA,QACF,KAAK,aAAA;AACH,UAAA,IAAI,KAAA,KAAU,WAAc,OAAO,KAAA,KAAU,YAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,CAAA,EAAI;AACjF,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,qCAAA;AAAA,cACA,aAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,WAAA,GAAc,KAAA,KAAU,MAAA,GAAY,MAAA,GAAY,KAAA;AACrD,UAAA;AAAA,QACF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AChcO,IAAM,MAAN,MAAiC;AAAA,EACrB,WAAA;AAAA,EACA,MAAA;AAAA,EAET,WAAA;AAAA,EACA,kBAAA;AAAA,EACA,uBAAA;AAAA,EACA,KAAA;AAAA,EACA,kBAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASjB,WAAA,CACE,OAAA,GAGI,EAAC,EACL;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,QAAQ,MAAA,IAAU,KAAA;AAEhC,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,cAAc,CAAA,EAAG;AAC/D,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,qCAAA;AAAA,UACA,aAAA;AAAA,UACA,IAAA,CAAK;AAAA,SACP;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,GAAA,CAAI,GAAW,EAAA,EAAmB;AAChC,IAAA,6BAAA,CAA8B,CAAC,CAAA;AAE/B,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AAEpB,IAAA,IAAI,WAAW,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,oBAAoB,wCAAwC,CAAA;AAAA,IACxE;AAGA,IAAA,MAAM,oBAAoB,IAAA,CAAK,WAAA,IAAe,IAAA,CAAK,GAAA,CAAI,UAAU,SAAS,CAAA;AAC1E,IAAA,IAAI,iBAAA,GAAoB,IAAA,CAAK,GAAA,CAAI,QAAA,EAAU,SAAS,CAAA,EAAG;AACrD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,eAAe,iBAAiB,CAAA,uCAAA,EAA0C,KAAK,GAAA,CAAI,QAAA,EAAU,SAAS,CAAC,CAAA,CAAA;AAAA,QACvG,aAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAGA,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACzB,IAAA,IAAA,CAAK,KAAA,GAAQ,OAAA;AAEb,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,UAAA,CAAW,CAAA,EAAG,OAAO,CAAA;AAG5C,IAAA,MAAM,CAAC,EAAA,EAAI,CAAA,EAAG,EAAE,CAAA,GAAI,GAAA,CAAI,WAAW,KAAK,CAAA;AAGxC,IAAA,MAAM,aAAyB,EAAC;AAChC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,iBAAA,EAAmB,CAAA,EAAA,EAAK;AAC1C,MAAA,MAAM,YAAsB,EAAC;AAC7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,SAAA,CAAU,IAAA,CAAK,MAAA,CAAO,EAAA,CAAG,IAAA,CAAK,EAAA,CAAG,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC/D;AACA,MAAA,UAAA,CAAW,KAAK,SAAS,CAAA;AAAA,IAC3B;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,OAAO,UAAU,CAAA;AAGpC,IAAA,MAAM,oBAA8B,EAAC;AACrC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,iBAAA,EAAmB,CAAA,EAAA,EAAK;AAC1C,MAAA,MAAM,KAAK,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACtC,MAAA,iBAAA,CAAkB,IAAA,CAAM,EAAA,GAAK,EAAA,IAAO,QAAA,GAAW,CAAA,CAAE,CAAA;AAAA,IACnD;AACA,IAAA,IAAA,CAAK,kBAAA,GAAqB,OAAO,iBAAiB,CAAA;AAGlD,IAAA,IAAI,aAAA,GAAgB,CAAA;AACpB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,KAAK,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACtC,MAAA,aAAA,IAAkB,EAAA,GAAK,MAAO,QAAA,GAAW,CAAA,CAAA;AAAA,IAC3C;AACA,IAAA,MAAM,sBAAA,GACJ,aAAA,KAAkB,CAAA,GACd,iBAAA,CAAkB,GAAA,CAAI,MAAM,CAAC,CAAA,GAC7B,iBAAA,CAAkB,GAAA,CAAI,CAAC,CAAA,KAAM,IAAI,aAAa,CAAA;AACpD,IAAA,IAAA,CAAK,uBAAA,GAA0B,OAAO,sBAAsB,CAAA;AAE5D,IAAA,IAAA,CAAK,kBAAA,GAAqB,iBAAA;AAC1B,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,CAAA,EAAmB;AAC3B,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,WAAA,IAAe,CAAC,KAAK,KAAA,EAAO;AACpD,MAAA,MAAM,IAAI,eAAe,qCAAqC,CAAA;AAAA,IAChE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,KAAK,CAAA;AAEtD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,WAAA,GAAc,KAAK,kBAAA,IAAsB,CAAA;AAG/C,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,UAAA,CAAW,CAAA,EAAG,KAAK,KAAK,CAAA;AAG/C,IAAA,MAAM,cAA0B,EAAC;AACjC,IAAA,MAAM,WAAA,GAAc,KAAA;AACpB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,WAAA,EAAa,CAAA,EAAA,EAAK;AACpC,QAAA,IAAI,GAAA,GAAM,CAAA;AACV,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,GAAA,IACE,MAAA,CAAO,UAAU,IAAA,CAAK,SAAA,CAAU,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA,GAC3D,OAAO,IAAA,CAAK,WAAA,CAAY,KAAK,IAAA,CAAK,WAAA,CAAY,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAAA,QAC7E;AAEA,QAAA,IAAI,KAAK,MAAA,EAAQ;AACf,UAAA,MAAM,QAAA,GAAW,MAAA;AAAA,YACf,KAAK,kBAAA,EAAoB,IAAA,CAAK,KAAK,kBAAA,CAAmB,MAAA,GAAS,CAAC,CAAA,IAAK;AAAA,WACvE;AACA,UAAA,GAAA,CAAI,KAAK,GAAA,GAAM,IAAA,CAAK,IAAA,CAAK,QAAA,GAAW,WAAW,CAAC,CAAA;AAAA,QAClD,CAAA,MAAO;AACL,UAAA,GAAA,CAAI,KAAK,GAAG,CAAA;AAAA,QACd;AAAA,MACF;AACA,MAAA,WAAA,CAAY,KAAK,GAAG,CAAA;AAAA,IACtB;AAEA,IAAA,OAAO,OAAO,WAAW,CAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,YAAA,CAAa,GAAW,EAAA,EAAqB;AAC3C,IAAA,IAAA,CAAK,IAAI,CAAC,CAAA;AACV,IAAA,OAAO,IAAA,CAAK,UAAU,CAAC,CAAA;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,WAAA,IAAe,CAAC,KAAK,KAAA,EAAO;AACpD,MAAA,MAAM,IAAI,eAAe,6CAA6C,CAAA;AAAA,IACxE;AAEA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAEvB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,WAAA,GAAc,KAAK,kBAAA,IAAsB,CAAA;AAC/C,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,WAAA,CAAY,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/C,IAAA,IAAA,CAAK,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,OAAO,WAAA,EAAa;AACrC,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,YAAA,EAAe,WAAW,CAAA,iBAAA,EAAA,CAAqB,CAAA,CAAE,MAAM,CAAC,CAAA,IAAK,CAAA,EAAG,QAAA,EAAU,CAAA;AAAA,OAC5E;AAAA,IACF;AAEA,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AAGA,IAAA,MAAM,gBAA4B,EAAC;AACnC,IAAA,MAAM,WAAA,GAAc,KAAA;AACpB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,IAAI,GAAA,GAAM,CAAA;AACV,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,WAAA,EAAa,CAAA,EAAA,EAAK;AACpC,UAAA,MAAM,IAAA,GAAO,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAA,GAAI,WAAA,GAAc,CAAC,CAAC,CAAA;AAC1D,UAAA,MAAM,QAAA,GAAW,MAAA;AAAA,YACf,KAAK,kBAAA,EAAoB,IAAA,CAAK,KAAK,kBAAA,CAAmB,MAAA,GAAS,CAAC,CAAA,IAAK;AAAA,WACvE;AAEA,UAAA,MAAM,MAAA,GAAS,KAAK,MAAA,GAAS,IAAA,GAAO,KAAK,IAAA,CAAK,QAAA,GAAW,WAAW,CAAA,GAAI,IAAA;AACxE,UAAA,GAAA,IACE,MAAA,GAAS,MAAA,CAAO,IAAA,CAAK,WAAA,CAAY,IAAA,CAAK,IAAA,CAAK,WAAA,CAAY,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAAA,QACtF;AAEA,QAAA,GAAA,IAAO,MAAA,CAAO,KAAK,KAAA,CAAM,IAAA,CAAK,KAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAC,CAAA;AACpD,QAAA,GAAA,CAAI,KAAK,GAAG,CAAA;AAAA,MACd;AACA,MAAA,aAAA,CAAc,KAAK,GAAG,CAAA;AAAA,IACxB;AAEA,IAAA,OAAO,OAAO,aAAa,CAAA;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAA,CAAW,GAAW,OAAA,EAAyB;AACrD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,WAAuB,EAAC;AAE9B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,GAAA,GAAM,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AACvD,QAAA,MAAM,UAAU,MAAA,CAAO,OAAA,CAAQ,KAAK,OAAA,CAAQ,MAAA,GAAS,CAAC,CAAC,CAAA;AACvD,QAAA,GAAA,CAAI,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,MACxB;AACA,MAAA,QAAA,CAAS,KAAK,GAAG,CAAA;AAAA,IACnB;AAEA,IAAA,OAAO,OAAO,QAAQ,CAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAA,GAAqB;AACvB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,WAAA,EAAa;AACrC,MAAA,MAAM,IAAI,eAAe,yCAAyC,CAAA;AAAA,IACpE;AACA,IAAA,OAAO,IAAA,CAAK,WAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,iBAAA,GAA4B;AAC9B,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,kBAAA,EAAoB;AAC5C,MAAA,MAAM,IAAI,eAAe,iDAAiD,CAAA;AAAA,IAC5E;AACA,IAAA,OAAO,IAAA,CAAK,kBAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,sBAAA,GAAiC;AACnC,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,uBAAA,EAAyB;AACjD,MAAA,MAAM,IAAI,eAAe,uDAAuD,CAAA;AAAA,IAClF;AACA,IAAA,OAAO,IAAA,CAAK,uBAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,QAAQ,IAAA,CAAK;AAAA,KACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,oBAAoB,mDAAmD,CAAA;AAAA,EACnF;AACF;;;AChTO,IAAM,yBAAN,MAAmD;AAAA,EAChD,QAAA;AAAA,EACA,eAAA;AAAA,EACA,cAAA;AAAA,EACA,WAAA;AAAA,EACA,WAAA;AAAA,EAEA,IAAA;AAAA,EACA,SAAA;AAAA,EACA,WAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAMI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,EAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAClD,IAAA,IAAA,CAAK,cAAA,GAAiB,QAAQ,cAAA,IAAkB,CAAA;AAChD,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,WAAW,CAAA,EAAG;AACxE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,IAAA,CAAK,WAAW,CAAC,CAAA,CAAA;AAAA,QACzE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,KAAK,QAAQ,CAAA,CAAA;AAAA,QAC3D,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,KAAK,eAAe,CAAA,CAAA;AAAA,QACzE,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,cAAc,CAAA,IAAK,IAAA,CAAK,iBAAiB,CAAA,EAAG;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,iDAAA,EAAoD,KAAK,cAAc,CAAA,CAAA;AAAA,QACvE,gBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IACE,IAAA,CAAK,WAAA,KAAgB,MAAA,KACpB,CAAC,MAAA,CAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,WAAA,GAAc,CAAA,CAAA,EAC3D;AACA,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,KAAK,WAAW,CAAA,CAAA;AAAA,QACjE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,MAAA,GAAuB;AAC7B,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AACA,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA;AAChB,IAAA,OAAO,MAAM;AACX,MAAA,IAAA,GAAQ,IAAA,GAAO,UAAU,UAAA,KAAgB,CAAA;AACzC,MAAA,OAAO,IAAA,GAAO,UAAA;AAAA,IAChB,CAAA;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAGjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AAEzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAGA,IAAA,IAAA,CAAK,WAAA,GAAc,CAAC,GAAG,IAAI,GAAA,CAAI,KAAK,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,CAAC,CAAA;AAG3D,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,UAAS,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAC5D,IAAA,IAAA,CAAK,OAAO,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,KAAA,EAAO,SAAS,CAAC,CAAA;AACnD,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEQ,SAAA,CACN,KAAA,EACA,KAAA,EACA,OAAA,EACA,KAAA,EACU;AACV,IAAA,MAAM,IAAI,OAAA,CAAQ,MAAA;AAGlB,IAAA,IAAI,KAAA,IAAS,KAAK,QAAA,IAAY,CAAA,GAAI,KAAK,eAAA,IAAmB,CAAA,GAAI,KAAK,cAAA,EAAgB;AACjF,MAAA,OAAO;AAAA,QACL,MAAA,EAAQ,IAAA;AAAA,QACR,UAAA,EAAY,IAAA,CAAK,gBAAA,CAAiB,KAAA,EAAO,OAAO,CAAA;AAAA,QAChD,kBAAA,EAAoB,IAAA,CAAK,qBAAA,CAAsB,KAAA,EAAO,OAAO;AAAA,OAC/D;AAAA,IACF;AAGA,IAAA,MAAM,OAAA,GAAU,IAAI,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAA,KAAM,KAAA,CAAM,CAAC,CAAC,CAAC,CAAA;AACpD,IAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,MAAA,MAAM,QAAA,GAAW,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA;AAC/B,MAAA,OAAO;AAAA,QACL,MAAA,EAAQ,IAAA;AAAA,QACR,UAAA,EAAY,KAAA,CAAM,QAAQ,CAAA,IAAK,CAAA;AAAA,QAC/B,kBAAA,EAAoB,IAAA,CAAK,qBAAA,CAAsB,KAAA,EAAO,OAAO;AAAA,OAC/D;AAAA,IACF;AAGA,IAAA,MAAM,EAAE,YAAA,EAAc,SAAA,EAAW,WAAA,EAAa,YAAA,KAAiB,IAAA,CAAK,aAAA;AAAA,MAClE,KAAA;AAAA,MACA,KAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,IAAI,WAAA,CAAY,MAAA,KAAW,CAAA,IAAK,YAAA,CAAa,WAAW,CAAA,EAAG;AACzD,MAAA,OAAO;AAAA,QACL,MAAA,EAAQ,IAAA;AAAA,QACR,UAAA,EAAY,IAAA,CAAK,gBAAA,CAAiB,KAAA,EAAO,OAAO,CAAA;AAAA,QAChD,kBAAA,EAAoB,IAAA,CAAK,qBAAA,CAAsB,KAAA,EAAO,OAAO;AAAA,OAC/D;AAAA,IACF;AAGA,IAAA,MAAM,OAAO,IAAA,CAAK,SAAA,CAAU,OAAO,KAAA,EAAO,WAAA,EAAa,QAAQ,CAAC,CAAA;AAChE,IAAA,MAAM,QAAQ,IAAA,CAAK,SAAA,CAAU,OAAO,KAAA,EAAO,YAAA,EAAc,QAAQ,CAAC,CAAA;AAElE,IAAA,OAAO;AAAA,MACL,MAAA,EAAQ,KAAA;AAAA,MACR,YAAA;AAAA,MACA,SAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAAA,EAEQ,gBAAA,CAAiB,OAAiB,OAAA,EAA2B;AACnE,IAAA,MAAM,MAAA,uBAAa,GAAA,EAAoB;AACvC,IAAA,KAAA,MAAW,KAAK,OAAA,EAAS;AACvB,MAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC1B,MAAA,MAAA,CAAO,IAAI,KAAA,EAAA,CAAQ,MAAA,CAAO,IAAI,KAAK,CAAA,IAAK,KAAK,CAAC,CAAA;AAAA,IAChD;AAEA,IAAA,IAAI,QAAA,GAAW,CAAA;AACf,IAAA,IAAI,QAAA,GAAW,CAAA;AACf,IAAA,KAAA,MAAW,CAAC,KAAA,EAAO,KAAK,CAAA,IAAK,MAAA,EAAQ;AACnC,MAAA,IAAI,QAAQ,QAAA,EAAU;AACpB,QAAA,QAAA,GAAW,KAAA;AACX,QAAA,QAAA,GAAW,KAAA;AAAA,MACb;AAAA,IACF;AACA,IAAA,OAAO,QAAA;AAAA,EACT;AAAA,EAEQ,qBAAA,CAAsB,OAAiB,OAAA,EAA6B;AAC1E,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,WAAA,IAAe,EAAC;AACpC,IAAA,IAAI,MAAA,CAAO,MAAA,KAAW,CAAA,IAAK,OAAA,CAAQ,WAAW,CAAA,EAAG;AAC/C,MAAA,OAAO,EAAC;AAAA,IACV;AAEA,IAAA,MAAM,UAAA,uBAAiB,GAAA,EAAoB;AAC3C,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,MAAA,CAAO,QAAQ,CAAA,EAAA,EAAK;AACtC,MAAA,MAAM,CAAA,GAAI,OAAO,CAAC,CAAA;AAClB,MAAA,IAAI,CAAA,KAAM,MAAA,EAAW,UAAA,CAAW,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,IAC1C;AAEA,IAAA,MAAM,SAAS,IAAI,KAAA,CAAc,OAAO,MAAM,CAAA,CAAE,KAAK,CAAC,CAAA;AACtD,IAAA,KAAA,MAAW,SAAS,OAAA,EAAS;AAC3B,MAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,KAAK,CAAA,IAAK,CAAA;AAC9B,MAAA,MAAM,GAAA,GAAM,UAAA,CAAW,GAAA,CAAI,KAAK,CAAA;AAChC,MAAA,IAAI,GAAA,KAAQ,QAAW,MAAA,CAAO,GAAG,KAAK,MAAA,CAAO,GAAG,KAAK,CAAA,IAAK,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAM,IAAA,GAAO,IAAI,OAAA,CAAQ,MAAA;AACzB,IAAA,OAAO,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA,KAAM,IAAI,IAAI,CAAA;AAAA,EACnC;AAAA,EAEQ,aAAA,CACN,KAAA,EACA,KAAA,EACA,OAAA,EAMA;AACA,IAAA,IAAI,QAAA,GAAW,QAAA;AACf,IAAA,IAAI,WAAA,GAAc,CAAA;AAClB,IAAA,IAAI,aAAA,GAAgB,CAAA;AACpB,IAAA,IAAI,WAAqB,EAAC;AAC1B,IAAA,IAAI,YAAsB,EAAC;AAE3B,IAAA,MAAM,SAAA,GAAY,KAAA,CAAM,CAAC,CAAA,EAAG,MAAA,IAAU,CAAA;AACtC,IAAA,IAAI,cAAA,GAAiB,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,WAAU,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAElE,IAAA,IAAI,IAAA,CAAK,WAAA,KAAgB,MAAA,IAAa,IAAA,CAAK,cAAc,SAAA,EAAW;AAClE,MAAA,MAAM,GAAA,GAAM,KAAK,MAAA,EAAO;AAExB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,QAAA,MAAM,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAA,EAAI,IAAK,YAAY,CAAA,CAAE,CAAA;AAChD,QAAA,MAAM,IAAA,GAAO,eAAe,CAAC,CAAA;AAC7B,QAAA,IAAI,SAAS,MAAA,EAAW;AACtB,UAAA,MAAM,OAAA,GAAU,eAAe,CAAC,CAAA;AAChC,UAAA,IAAI,YAAY,MAAA,EAAW;AACzB,YAAA,cAAA,CAAe,CAAC,CAAA,GAAI,OAAA;AACpB,YAAA,cAAA,CAAe,CAAC,CAAA,GAAI,IAAA;AAAA,UACtB;AAAA,QACF;AAAA,MACF;AACA,MAAA,cAAA,GAAiB,cAAA,CAAe,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,WAAW,CAAA;AAAA,IAC3D;AAEA,IAAA,MAAM,IAAI,OAAA,CAAQ,MAAA;AAElB,IAAA,MAAM,WAAA,uBAAkB,GAAA,EAAoB;AAC5C,IAAA,KAAA,MAAW,KAAK,OAAA,EAAS;AACvB,MAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC1B,MAAA,WAAA,CAAY,IAAI,KAAA,EAAA,CAAQ,WAAA,CAAY,IAAI,KAAK,CAAA,IAAK,KAAK,CAAC,CAAA;AAAA,IAC1D;AAEA,IAAA,KAAA,MAAW,KAAK,cAAA,EAAgB;AAG9B,MAAA,MAAM,aAAA,GAAgB,CAAC,GAAG,OAAO,CAAA,CAAE,IAAA;AAAA,QACjC,CAAC,CAAA,EAAG,CAAA,KAAA,CAAO,KAAA,CAAM,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA,KAAM,KAAA,CAAM,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAAA,OACrD;AAEA,MAAA,MAAM,UAAA,uBAAiB,GAAA,EAAoB;AAC3C,MAAA,MAAM,WAAA,GAAc,IAAI,GAAA,CAAoB,WAAW,CAAA;AACvD,MAAA,IAAI,QAAA,GAAW,CAAA;AACf,MAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,QAAA,MAAM,GAAA,GAAM,cAAc,CAAC,CAAA;AAC3B,QAAA,IAAI,QAAQ,MAAA,EAAW;AACvB,QAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,GAAG,CAAA,IAAK,CAAA;AAC5B,QAAA,MAAM,GAAA,GAAM,KAAA,CAAM,GAAG,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAC/B,QAAA,MAAM,OAAA,GAAU,aAAA,CAAc,CAAA,GAAI,CAAC,CAAA;AACnC,QAAA,IAAI,YAAY,MAAA,EAAW;AAC3B,QAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAO,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAGvC,QAAA,MAAM,YAAA,GAAe,WAAA,CAAY,GAAA,CAAI,KAAK,CAAA,IAAK,CAAA;AAC/C,QAAA,IAAI,YAAA,IAAgB,CAAA,EAAG,WAAA,CAAY,MAAA,CAAO,KAAK,CAAA;AAAA,aAC1C,WAAA,CAAY,GAAA,CAAI,KAAA,EAAO,YAAA,GAAe,CAAC,CAAA;AAC5C,QAAA,SAAA,EAAA;AAEA,QAAA,UAAA,CAAW,IAAI,KAAA,EAAA,CAAQ,UAAA,CAAW,IAAI,KAAK,CAAA,IAAK,KAAK,CAAC,CAAA;AACtD,QAAA,QAAA,EAAA;AAEA,QAAA,IAAI,QAAQ,OAAA,EAAS;AAErB,QAAA,IAAI,QAAA,GAAW,IAAA,CAAK,cAAA,IAAkB,SAAA,GAAY,KAAK,cAAA,EAAgB;AAGvE,QAAA,MAAM,QAAA,GAAW,IAAA,CAAK,cAAA,CAAe,UAAA,EAAY,QAAQ,CAAA;AACzD,QAAA,MAAM,SAAA,GAAY,IAAA,CAAK,cAAA,CAAe,WAAA,EAAa,SAAS,CAAA;AAC5D,QAAA,MAAM,YAAA,GAAA,CAAgB,QAAA,GAAW,QAAA,GAAW,SAAA,GAAY,SAAA,IAAa,CAAA;AAErE,QAAA,IAAI,eAAe,QAAA,EAAU;AAC3B,UAAA,QAAA,GAAW,YAAA;AACX,UAAA,WAAA,GAAc,CAAA;AACd,UAAA,aAAA,GAAA,CAAiB,MAAM,OAAA,IAAW,CAAA;AAClC,UAAA,QAAA,GAAW,aAAA,CAAc,KAAA,CAAM,CAAA,EAAG,CAAA,GAAI,CAAC,CAAA;AACvC,UAAA,SAAA,GAAY,aAAA,CAAc,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,WAAA;AAAA,MACd,SAAA,EAAW,aAAA;AAAA,MACX,WAAA,EAAa,QAAA;AAAA,MACb,YAAA,EAAc;AAAA,KAChB;AAAA,EACF;AAAA,EAEQ,cAAA,CAAe,QAA6B,CAAA,EAAmB;AACrE,IAAA,IAAI,QAAA,GAAW,CAAA;AACf,IAAA,KAAA,MAAW,KAAA,IAAS,MAAA,CAAO,MAAA,EAAO,EAAG;AACnC,MAAA,MAAM,IAAI,KAAA,GAAQ,CAAA;AAClB,MAAA,QAAA,IAAY,CAAA,GAAI,CAAA;AAAA,IAClB;AACA,IAAA,OAAO,QAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,IAAA,EAAM;AAC9B,MAAA,MAAM,IAAI,eAAe,yDAAyD,CAAA;AAAA,IACpF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,wBAAwB,CAAA;AAEtE,IAAA,MAAM,cAAwB,EAAC;AAC/B,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AACA,MAAA,WAAA,CAAY,KAAK,IAAA,CAAK,aAAA,CAAc,MAAA,EAAQ,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,IACxD;AAEA,IAAA,OAAO,MAAA,CAAO,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC/C;AAAA,EAEQ,aAAA,CAAc,QAAkB,IAAA,EAAwB;AAC9D,IAAA,IAAI,OAAA,GAAU,IAAA;AACd,IAAA,OAAO,CAAC,QAAQ,MAAA,EAAQ;AACtB,MAAA,MAAM,YAAA,GAAe,MAAA,CAAO,OAAA,CAAQ,YAAA,IAAgB,CAAC,CAAA,IAAK,CAAA;AAC1D,MAAA,IAAI,YAAA,KAAiB,OAAA,CAAQ,SAAA,IAAa,CAAA,CAAA,EAAI;AAC5C,QAAA,IAAI,CAAC,OAAA,CAAQ,IAAA;AACX,UAAA,MAAM,IAAI,aAAa,kDAAkD,CAAA;AAC3E,QAAA,OAAA,GAAU,OAAA,CAAQ,IAAA;AAAA,MACpB,CAAA,MAAO;AACL,QAAA,IAAI,CAAC,OAAA,CAAQ,KAAA;AACX,UAAA,MAAM,IAAI,aAAa,mDAAmD,CAAA;AAC5E,QAAA,OAAA,GAAU,OAAA,CAAQ,KAAA;AAAA,MACpB;AAAA,IACF;AACA,IAAA,OAAO,QAAQ,UAAA,IAAc,CAAA;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,IAAA,IAAQ,CAAC,KAAK,WAAA,EAAa;AACnD,MAAA,MAAM,IAAI,eAAe,yDAAyD,CAAA;AAAA,IACpF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,wBAAwB,CAAA;AACtE,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAEvB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,QAAA,GAAW,KAAK,WAAA,CAAY,MAAA;AAElC,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AAEA,MAAA,MAAM,IAAA,GAAO,IAAA,CAAK,WAAA,CAAY,MAAA,EAAQ,KAAK,IAAI,CAAA;AAC/C,MAAA,MAAM,GAAA,GAAM,IAAA,CAAK,kBAAA,GACb,CAAC,GAAG,IAAA,CAAK,kBAAkB,CAAA,GAC3B,IAAI,KAAA,CAAM,QAAQ,CAAA,CAAE,KAAK,CAAC,CAAA;AAC9B,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AAAA,IAChB;AAEA,IAAA,OAAO,OAAO,KAAK,CAAA;AAAA,EACrB;AAAA,EAEQ,WAAA,CAAY,QAAkB,IAAA,EAA0B;AAC9D,IAAA,IAAI,KAAK,MAAA,EAAQ;AACf,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,MAAM,YAAA,GAAe,MAAA,CAAO,IAAA,CAAK,YAAA,IAAgB,CAAC,CAAA,IAAK,CAAA;AACvD,IAAA,IAAI,YAAA,KAAiB,IAAA,CAAK,SAAA,IAAa,CAAA,CAAA,EAAI;AACzC,MAAA,IAAI,CAAC,KAAK,IAAA,EAAM;AACd,QAAA,OAAO,IAAA;AAAA,MACT;AACA,MAAA,OAAO,IAAA,CAAK,WAAA,CAAY,MAAA,EAAQ,IAAA,CAAK,IAAI,CAAA;AAAA,IAC3C;AACA,IAAA,IAAI,CAAC,KAAK,KAAA,EAAO;AACf,MAAA,OAAO,IAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA,CAAK,WAAA,CAAY,MAAA,EAAQ,IAAA,CAAK,KAAK,CAAA;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,WAAA,CAAY,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACrF,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,OAAA,GAA8B;AAChC,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,WAAA,EAAa;AACrC,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,OAAO,OAAO,IAAA,CAAK,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,MACrB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,aAAa,IAAA,CAAK;AAAA,KACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF;AASO,IAAM,wBAAN,MAAiD;AAAA,EAC9C,QAAA;AAAA,EACA,eAAA;AAAA,EACA,cAAA;AAAA,EACA,WAAA;AAAA,EACA,WAAA;AAAA,EAEA,IAAA;AAAA,EACA,SAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAMI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,EAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAClD,IAAA,IAAA,CAAK,cAAA,GAAiB,QAAQ,cAAA,IAAkB,CAAA;AAChD,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,WAAW,CAAA,EAAG;AACxE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,IAAA,CAAK,WAAW,CAAC,CAAA,CAAA;AAAA,QACzE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,KAAK,QAAQ,CAAA,CAAA;AAAA,QAC3D,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,KAAK,eAAe,CAAA,CAAA;AAAA,QACzE,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,cAAc,CAAA,IAAK,IAAA,CAAK,iBAAiB,CAAA,EAAG;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,iDAAA,EAAoD,KAAK,cAAc,CAAA,CAAA;AAAA,QACvE,gBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IACE,IAAA,CAAK,WAAA,KAAgB,MAAA,KACpB,CAAC,MAAA,CAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,WAAA,GAAc,CAAA,CAAA,EAC3D;AACA,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,KAAK,WAAW,CAAA,CAAA;AAAA,QACjE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,MAAA,GAAuB;AAC7B,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,OAAO,IAAA,CAAK,MAAA;AAAA,IACd;AACA,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA;AAChB,IAAA,OAAO,MAAM;AACX,MAAA,IAAA,GAAQ,IAAA,GAAO,UAAU,UAAA,KAAgB,CAAA;AACzC,MAAA,OAAO,IAAA,GAAO,UAAA;AAAA,IAChB,CAAA;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAEvB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAEjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AAEzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAEA,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,UAAS,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAC5D,IAAA,IAAA,CAAK,OAAO,IAAA,CAAK,SAAA,CAAU,KAAA,EAAO,KAAA,EAAO,SAAS,CAAC,CAAA;AACnD,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEQ,SAAA,CACN,KAAA,EACA,KAAA,EACA,OAAA,EACA,KAAA,EACU;AACV,IAAA,MAAM,IAAI,OAAA,CAAQ,MAAA;AAClB,IAAA,IAAI,MAAM,CAAA,EAAG;AACX,MAAA,MAAM,IAAI,oBAAoB,oDAAoD,CAAA;AAAA,IACpF;AAEA,IAAA,IAAI,KAAA,IAAS,KAAK,QAAA,IAAY,CAAA,GAAI,KAAK,eAAA,IAAmB,CAAA,GAAI,KAAK,cAAA,EAAgB;AACjF,MAAA,OAAO,EAAE,QAAQ,IAAA,EAAM,UAAA,EAAY,KAAK,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA,EAAE;AAAA,IAClE;AAEA,IAAA,MAAM,EAAE,YAAA,EAAc,SAAA,EAAW,WAAA,EAAa,YAAA,KAAiB,IAAA,CAAK,aAAA;AAAA,MAClE,KAAA;AAAA,MACA,KAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,IAAI,WAAA,CAAY,MAAA,KAAW,CAAA,IAAK,YAAA,CAAa,WAAW,CAAA,EAAG;AACzD,MAAA,OAAO,EAAE,QAAQ,IAAA,EAAM,UAAA,EAAY,KAAK,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA,EAAE;AAAA,IAClE;AAEA,IAAA,MAAM,OAAO,IAAA,CAAK,SAAA,CAAU,OAAO,KAAA,EAAO,WAAA,EAAa,QAAQ,CAAC,CAAA;AAChE,IAAA,MAAM,QAAQ,IAAA,CAAK,SAAA,CAAU,OAAO,KAAA,EAAO,YAAA,EAAc,QAAQ,CAAC,CAAA;AAElE,IAAA,OAAO;AAAA,MACL,MAAA,EAAQ,KAAA;AAAA,MACR,YAAA;AAAA,MACA,SAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAAA,EAEQ,OAAA,CAAQ,OAAiB,OAAA,EAA2B;AAC1D,IAAA,IAAI,GAAA,GAAM,CAAA;AACV,IAAA,KAAA,MAAW,KAAK,OAAA,EAAS;AACvB,MAAA,GAAA,IAAO,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAAA,IACrB;AACA,IAAA,OAAO,MAAM,OAAA,CAAQ,MAAA;AAAA,EACvB;AAAA,EAEQ,aAAA,CACN,KAAA,EACA,KAAA,EACA,OAAA,EAMA;AACA,IAAA,IAAI,SAAA,GAAY,CAAA,QAAA;AAChB,IAAA,IAAI,WAAA,GAAc,CAAA;AAClB,IAAA,IAAI,aAAA,GAAgB,CAAA;AACpB,IAAA,IAAI,WAAqB,EAAC;AAC1B,IAAA,IAAI,YAAsB,EAAC;AAE3B,IAAA,MAAM,SAAA,GAAY,KAAA,CAAM,CAAC,CAAA,EAAG,MAAA,IAAU,CAAA;AACtC,IAAA,IAAI,cAAA,GAAiB,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,WAAU,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAElE,IAAA,IAAI,IAAA,CAAK,WAAA,KAAgB,MAAA,IAAa,IAAA,CAAK,cAAc,SAAA,EAAW;AAClE,MAAA,MAAM,GAAA,GAAM,KAAK,MAAA,EAAO;AACxB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,QAAA,MAAM,IAAI,CAAA,GAAI,IAAA,CAAK,MAAM,GAAA,EAAI,IAAK,YAAY,CAAA,CAAE,CAAA;AAChD,QAAA,MAAM,CAAA,GAAI,eAAe,CAAC,CAAA;AAC1B,QAAA,MAAM,CAAA,GAAI,eAAe,CAAC,CAAA;AAC1B,QAAA,IAAI,CAAA,KAAM,MAAA,IAAa,CAAA,KAAM,MAAA,EAAW;AACtC,UAAA,MAAM,IAAI,YAAA,CAAa,CAAA,gDAAA,EAAmD,CAAC,CAAA,IAAA,EAAO,CAAC,CAAA,CAAE,CAAA;AAAA,QACvF;AACA,QAAA,cAAA,CAAe,CAAC,CAAA,GAAI,CAAA;AACpB,QAAA,cAAA,CAAe,CAAC,CAAA,GAAI,CAAA;AAAA,MACtB;AACA,MAAA,cAAA,GAAiB,cAAA,CAAe,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,WAAW,CAAA;AAAA,IAC3D;AAEA,IAAA,MAAM,IAAI,OAAA,CAAQ,MAAA;AAClB,IAAA,IAAI,QAAA,GAAW,CAAA;AAEf,IAAA,KAAA,MAAW,KAAK,OAAA,EAAS;AACvB,MAAA,MAAM,IAAA,GAAO,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACzB,MAAA,QAAA,IAAY,IAAA;AAAA,IACd;AAEA,IAAA,KAAA,MAAW,KAAK,cAAA,EAAgB;AAE9B,MAAA,MAAM,aAAA,GAAgB,CAAC,GAAG,OAAO,CAAA,CAAE,IAAA;AAAA,QACjC,CAAC,CAAA,EAAG,CAAA,KAAA,CAAO,KAAA,CAAM,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA,KAAM,KAAA,CAAM,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAAA,OACrD;AAEA,MAAA,IAAI,OAAA,GAAU,CAAA;AACd,MAAA,IAAI,OAAA,GAAU,CAAA;AACd,MAAA,IAAI,QAAA,GAAW,QAAA;AACf,MAAA,IAAI,QAAA,GAAW,CAAA;AAEf,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,QAAA,MAAM,GAAA,GAAM,cAAc,CAAC,CAAA;AAC3B,QAAA,IAAI,QAAQ,MAAA,EAAW;AACvB,QAAA,MAAM,GAAA,GAAM,KAAA,CAAM,GAAG,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAC/B,QAAA,MAAM,OAAA,GAAU,aAAA,CAAc,CAAA,GAAI,CAAC,CAAA;AACnC,QAAA,IAAI,YAAY,MAAA,EAAW;AAC3B,QAAA,MAAM,OAAA,GAAU,KAAA,CAAM,OAAO,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AACvC,QAAA,MAAM,IAAA,GAAO,KAAA,CAAM,GAAG,CAAA,IAAK,CAAA;AAG3B,QAAA,OAAA,IAAW,IAAA;AACX,QAAA,OAAA,EAAA;AACA,QAAA,QAAA,IAAY,IAAA;AACZ,QAAA,QAAA,EAAA;AAEA,QAAA,IAAI,QAAQ,OAAA,EAAS;AAErB,QAAA,IAAI,OAAA,GAAU,IAAA,CAAK,cAAA,IAAkB,QAAA,GAAW,KAAK,cAAA,EAAgB;AAIrE,QAAA,MAAM,KAAA,GAAS,OAAA,GAAU,OAAA,GAAW,OAAA,GAAW,WAAW,QAAA,GAAY,QAAA;AAEtE,QAAA,IAAI,QAAQ,SAAA,EAAW;AACrB,UAAA,SAAA,GAAY,KAAA;AACZ,UAAA,WAAA,GAAc,CAAA;AACd,UAAA,aAAA,GAAA,CAAiB,MAAM,OAAA,IAAW,CAAA;AAClC,UAAA,QAAA,GAAW,aAAA,CAAc,KAAA,CAAM,CAAA,EAAG,CAAA,GAAI,CAAC,CAAA;AACvC,UAAA,SAAA,GAAY,aAAA,CAAc,KAAA,CAAM,CAAA,GAAI,CAAC,CAAA;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAGA,IAAA,IAAI,cAAc,CAAA,QAAA,EAAW;AAC3B,MAAA,OAAO;AAAA,QACL,YAAA,EAAc,CAAA;AAAA,QACd,SAAA,EAAW,CAAA;AAAA,QACX,aAAa,EAAC;AAAA,QACd,cAAc;AAAC,OACjB;AAAA,IACF;AAEA,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,WAAA;AAAA,MACd,SAAA,EAAW,aAAA;AAAA,MACX,WAAA,EAAa,QAAA;AAAA,MACb,YAAA,EAAc;AAAA,KAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,IAAA,EAAM;AAC9B,MAAA,MAAM,IAAI,eAAe,wDAAwD,CAAA;AAAA,IACnF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,uBAAuB,CAAA;AACrE,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAEvB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AACA,MAAA,WAAA,CAAY,KAAK,IAAA,CAAK,aAAA,CAAc,MAAA,EAAQ,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,IACxD;AAEA,IAAA,OAAO,OAAO,WAAW,CAAA;AAAA,EAC3B;AAAA,EAEQ,aAAA,CAAc,QAAkB,IAAA,EAAwB;AAC9D,IAAA,IAAI,KAAK,MAAA,EAAQ;AACf,MAAA,OAAO,KAAK,UAAA,IAAc,CAAA;AAAA,IAC5B;AAEA,IAAA,MAAM,YAAA,GAAe,MAAA,CAAO,IAAA,CAAK,YAAA,IAAgB,CAAC,CAAA,IAAK,CAAA;AACvD,IAAA,IAAI,YAAA,KAAiB,IAAA,CAAK,SAAA,IAAa,CAAA,CAAA,EAAI;AACzC,MAAA,IAAI,CAAC,IAAA,CAAK,IAAA,EAAM,MAAM,IAAI,aAAa,kDAAkD,CAAA;AACzF,MAAA,OAAO,IAAA,CAAK,aAAA,CAAc,MAAA,EAAQ,IAAA,CAAK,IAAI,CAAA;AAAA,IAC7C,CAAA,MAAO;AACL,MAAA,IAAI,CAAC,IAAA,CAAK,KAAA,EAAO,MAAM,IAAI,aAAa,mDAAmD,CAAA;AAC3F,MAAA,OAAO,IAAA,CAAK,aAAA,CAAc,MAAA,EAAQ,IAAA,CAAK,KAAK,CAAA;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AAGA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACtC;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,QAAQ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACzC,MAAA,MAAM,QAAQ,MAAA,CAAO,WAAA,CAAY,KAAK,WAAA,CAAY,MAAA,GAAS,CAAC,CAAC,CAAA;AAC7D,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAC5B,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,UAAU,CAAA,GAAK,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA,GAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,MACrB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,aAAa,IAAA,CAAK;AAAA,KACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF;;;AC35BO,IAAM,4BAAN,MAAqD;AAAA;AAAA,EAElD,WAAA;AAAA;AAAA,EAGA,YAAA;AAAA;AAAA,EAGA,QAAA;AAAA;AAAA,EAGA,eAAA;AAAA;AAAA,EAGA,aAAsC,EAAC;AAAA;AAAA,EAGvC,cAAA,GAAiB,CAAA;AAAA;AAAA,EAGjB,SAAA,GAAY,CAAA;AAAA;AAAA,EAGZ,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAKI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,GAAA;AAC1C,IAAA,IAAA,CAAK,YAAA,GAAe,QAAQ,YAAA,IAAgB,GAAA;AAC5C,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,CAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAElD,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,eAAe,CAAA,EAAG;AAChE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,wCAAA;AAAA,QACA,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,YAAY,CAAA,IAAK,IAAA,CAAK,gBAAgB,CAAA,EAAG;AACjE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,+BAAA;AAAA,QACA,cAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,kCAAA;AAAA,QACA,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yCAAA;AAAA,QACA,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAEjB,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAGA,IAAA,IAAA,CAAK,cAAA,GAAiB,MAAM,MAAA,CAAO,CAAC,KAAK,GAAA,KAAQ,GAAA,GAAM,GAAA,EAAK,CAAC,CAAA,GAAI,QAAA;AAGjE,IAAA,MAAM,cAAc,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,IAAA,CAAK,KAAK,cAAc,CAAA;AAGxE,IAAA,IAAA,CAAK,aAAa,EAAC;AAEnB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AAEzC,MAAA,MAAM,YAAsB,EAAC;AAC7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,IAAA,CAAA,CAAM,MAAM,CAAC,CAAA,IAAK,MAAM,WAAA,CAAY,CAAC,KAAK,CAAA,CAAE,CAAA;AAAA,MACxD;AAGA,MAAA,MAAM,IAAA,GAAO,IAAI,qBAAA,CAAsB;AAAA,QACrC,UAAU,IAAA,CAAK,QAAA;AAAA,QACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,QACtB,cAAA,EAAgB;AAAA,OACjB,CAAA;AACD,MAAA,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,MAAA,CAAO,SAAS,CAAC,CAAA;AAC7B,MAAA,IAAA,CAAK,UAAA,CAAW,KAAK,IAAI,CAAA;AAGzB,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,WAAA,CAAY,CAAC,CAAA,GAAA,CACV,WAAA,CAAY,CAAC,KAAK,CAAA,IAAK,IAAA,CAAK,YAAA,GAAe,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,QAAA,CAAS,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,MACzF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,4DAA4D,CAAA;AAAA,IACvF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,2BAA2B,CAAA;AAEzE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,cAAc,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,IAAA,CAAK,KAAK,cAAc,CAAA;AAExE,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,UAAA,EAAY;AAClC,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,WAAA,CAAY,CAAC,CAAA,GAAA,CACV,WAAA,CAAY,CAAC,KAAK,CAAA,IAAK,IAAA,CAAK,YAAA,GAAe,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,QAAA,CAAS,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,MACzF;AAAA,IACF;AAEA,IAAA,OAAO,OAAO,WAAW,CAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AAEA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACtC;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,QAAQ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACzC,MAAA,MAAM,QAAQ,MAAA,CAAO,WAAA,CAAY,KAAK,WAAA,CAAY,MAAA,GAAS,CAAC,CAAC,CAAA;AAC7D,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAC5B,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,UAAU,CAAA,GAAK,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA,GAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,cAAc,IAAA,CAAK,YAAA;AAAA,MACnB,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK;AAAA,KACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF;AAuBO,IAAM,6BAAN,MAAuD;AAAA;AAAA,EAEpD,WAAA;AAAA;AAAA,EAGA,YAAA;AAAA;AAAA,EAGA,QAAA;AAAA;AAAA,EAGA,eAAA;AAAA;AAAA,EAGA,aAAsC,EAAC;AAAA;AAAA,EAGvC,cAAA,GAAiB,CAAA;AAAA;AAAA,EAGjB,SAAA,GAAY,CAAA;AAAA;AAAA,EAGZ,cAAwB,EAAC;AAAA;AAAA,EAGzB,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAKI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,GAAA;AAC1C,IAAA,IAAA,CAAK,YAAA,GAAe,QAAQ,YAAA,IAAgB,GAAA;AAC5C,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,CAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAElD,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,eAAe,CAAA,EAAG;AAChE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,wCAAA;AAAA,QACA,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,YAAY,CAAA,IAAK,IAAA,CAAK,gBAAgB,CAAA,EAAG;AACjE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,+BAAA;AAAA,QACA,cAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,kCAAA;AAAA,QACA,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yCAAA;AAAA,QACA,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAEjB,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAGA,IAAA,IAAA,CAAK,WAAA,GAAc,CAAC,GAAG,IAAI,GAAA,CAAI,KAAK,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,CAAC,CAAA;AAC3D,IAAA,IAAI,IAAA,CAAK,WAAA,CAAY,MAAA,KAAW,CAAA,EAAG;AACjC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,uDAAA;AAAA,QACA,GAAA;AAAA,QACA,KAAK,WAAA,CAAY;AAAA,OACnB;AAAA,IACF;AAGA,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,GAAA,CAAI,CAAC,KAAA,KAAW,KAAA,KAAU,IAAA,CAAK,WAAA,CAAY,CAAC,CAAA,GAAI,CAAA,GAAI,CAAE,CAAA;AAG5E,IAAA,MAAM,WAAW,OAAA,CAAQ,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,KAAM,CAAC,CAAA,CAAE,MAAA;AAChD,IAAA,MAAM,WAAW,QAAA,GAAW,QAAA;AAC5B,IAAA,IAAA,CAAK,iBAAiB,IAAA,CAAK,GAAA,CAAA,CAAK,QAAA,GAAW,CAAA,KAAM,WAAW,CAAA,CAAE,CAAA;AAG9D,IAAA,MAAM,YAAY,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,IAAA,CAAK,KAAK,cAAc,CAAA;AAGtE,IAAA,IAAA,CAAK,aAAa,EAAC;AAEnB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AAEzC,MAAA,MAAM,YAAsB,EAAC;AAC7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,IAAA,GAAO,KAAK,CAAA,GAAI,IAAA,CAAK,IAAI,EAAE,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA,CAAE,CAAA,CAAA;AACnD,QAAA,MAAM,GAAA,GAAM,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA;AAC1B,QAAA,SAAA,CAAU,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,MAC3B;AAGA,MAAA,MAAM,IAAA,GAAO,IAAI,qBAAA,CAAsB;AAAA,QACrC,UAAU,IAAA,CAAK,QAAA;AAAA,QACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,QACtB,cAAA,EAAgB;AAAA,OACjB,CAAA;AACD,MAAA,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,MAAA,CAAO,SAAS,CAAC,CAAA;AAC7B,MAAA,IAAA,CAAK,UAAA,CAAW,KAAK,IAAI,CAAA;AAGzB,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,CAAC,CAAA,GAAA,CACR,SAAA,CAAU,CAAC,KAAK,CAAA,IAAK,IAAA,CAAK,YAAA,GAAe,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,QAAA,CAAS,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,MACvF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,6DAA6D,CAAA;AAAA,IACxF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,4BAA4B,CAAA;AAE1E,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAE/B,IAAA,MAAM,YAAY,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,IAAA,CAAK,KAAK,cAAc,CAAA;AACtE,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,UAAA,EAAY;AAClC,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,CAAC,CAAA,GAAA,CACR,SAAA,CAAU,CAAC,KAAK,CAAA,IAAK,IAAA,CAAK,YAAA,GAAe,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,QAAA,CAAS,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,MACvF;AAAA,IACF;AAEA,IAAA,MAAM,cAAwB,EAAC;AAC/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,IAAA,GAAO,KAAK,CAAA,GAAI,IAAA,CAAK,IAAI,EAAE,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA,CAAE,CAAA,CAAA;AACnD,MAAA,MAAM,cAAA,GAAiB,QAAQ,GAAA,GAAM,IAAA,CAAK,YAAY,CAAC,CAAA,GAAI,IAAA,CAAK,WAAA,CAAY,CAAC,CAAA;AAC7E,MAAA,WAAA,CAAY,IAAA,CAAK,kBAAkB,CAAC,CAAA;AAAA,IACtC;AAEA,IAAA,OAAO,MAAA,CAAO,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,6DAA6D,CAAA;AAAA,IACxF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,4BAA4B,CAAA;AAE1E,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,YAAY,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,IAAA,CAAK,KAAK,cAAc,CAAA;AACtE,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,UAAA,EAAY;AAClC,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,CAAC,CAAA,GAAA,CACR,SAAA,CAAU,CAAC,KAAK,CAAA,IAAK,IAAA,CAAK,YAAA,GAAe,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,QAAA,CAAS,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,MACvF;AAAA,IACF;AAEA,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,IAAA,GAAO,KAAK,CAAA,GAAI,IAAA,CAAK,IAAI,EAAE,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA,CAAE,CAAA,CAAA;AACnD,MAAA,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,GAAI,IAAA,EAAM,IAAI,CAAC,CAAA;AAAA,IAC7B;AAEA,IAAA,OAAO,OAAO,KAAK,CAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,WAAA,CAAY,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACrF,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,cAAc,IAAA,CAAK,YAAA;AAAA,MACnB,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK;AAAA,KACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF;;;AC5iBO,IAAM,QAAN,MAAiC;AAAA;AAAA,EAE9B,OAAA;AAAA;AAAA,EAaA,KAAA;AAAA;AAAA,EAGA,UAAA,GAAa,CAAA;AAAA;AAAA,EAGb,YAAA;AAAA;AAAA,EAGA,MAAA;AAAA;AAAA,EAGA,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAejB,WAAA,CACE,OAAA,GAUI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,OAAA,GAAU,EAAE,GAAG,OAAA,EAAQ;AAC5B,IAAA,IAAI,IAAA,CAAK,OAAA,CAAQ,WAAA,KAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,OAAA,CAAQ,WAAW,CAAA,EAAG;AACxF,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,WAAW,CAAC,CAAA,CAAA;AAAA,QACjF,aAAA;AAAA,QACA,KAAK,OAAA,CAAQ;AAAA,OACf;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,SAAA,GAA0B;AAChC,IAAA,IAAI,IAAA,CAAK,OAAA,CAAQ,WAAA,KAAgB,MAAA,EAAW;AAC1C,MAAA,IAAI,IAAA,GAAO,KAAK,OAAA,CAAQ,WAAA;AACxB,MAAA,OAAO,MAAM;AACX,QAAA,IAAA,GAAA,CAAQ,IAAA,GAAO,OAAO,KAAA,IAAS,MAAA;AAC/B,QAAA,OAAO,IAAA,GAAO,MAAA;AAAA,MAChB,CAAA;AAAA,IACF;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6BA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAE9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAGd,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,KAAA,IAAS,CAAA;AACpC,IAAA,IAAI,EAAE,SAAS,CAAA,CAAA,EAAI;AACjB,MAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,6BAAA,EAAgC,KAAK,CAAA,CAAA,EAAI,SAAS,KAAK,CAAA;AAAA,IACzF;AAGA,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,IAAW,GAAA;AACxC,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,OAAA,CAAQ,GAAA,IAAO,IAAA;AAChC,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,IAAA;AAClD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,SAAA,IAAa,KAAA;AAC5C,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,QAAA,IAAY,KAAA;AAC1C,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,SAAA,IAAa,QAAA;AAC5C,IAAA,MAAM,GAAA,GAAM,KAAK,SAAA,EAAU;AAG3B,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAGxB,IAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAIpB,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,MAAM,QAAQ,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAEzC,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,MAC3C;AAGA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,KAAA,CAAM,CAAC,CAAA,GAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA;AAAA,QAC9D;AAAA,MACF;AAGA,MAAA,MAAMA,KAAAA,GAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA;AAC/B,MAAA,KAAA,IAASA,KAAAA;AACT,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,CAAM,CAAC,CAAA,GAAA,CAAK,KAAA,CAAM,CAAC,KAAK,CAAA,IAAKA,KAAAA;AAAA,MAC/B;AAAA,IACF;AAEA,IAAA,IAAI,MAAA;AACJ,IAAA,IAAI,SAAA,EAAW;AACb,MAAA,MAAA,GAAS,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACpC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,MAAM,QAAA,GAAW,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA,IAAK,YAAA,GAAgB,KAAA,CAAM,CAAC,KAAK,CAAA,GAAK,CAAA,CAAA;AACtF,UAAA,MAAA,CAAO,CAAC,CAAA,GAAA,CAAK,MAAA,CAAO,CAAC,CAAA,IAAK,KAAK,QAAA,GAAW,QAAA;AAAA,QAC5C;AAAA,MACF;AACA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA,CAAK,KAAK,MAAA,CAAO,CAAC,KAAK,CAAC,CAAA;AAAA,MACtC;AAAA,IACF;AAEA,IAAA,MAAM,IAAA,GAAO,CAAC,WAAA,EAAqB,YAAA,KAAiC;AAClE,MAAA,MAAM,GAAA,GAAM,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,WAAA,GAAc,CAAA,GAAI,YAAY,CAAA,IAAK,CAAC,CAAA;AACzE,MAAA,MAAM,WAAW,GAAA,IAAO,YAAA,GAAgB,KAAA,CAAM,YAAY,KAAK,CAAA,GAAK,CAAA,CAAA;AACpE,MAAA,IAAI,aAAa,MAAA,EAAQ;AACvB,QAAA,MAAM,CAAA,GAAI,MAAA,CAAO,YAAY,CAAA,IAAK,CAAA;AAClC,QAAA,OAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,QAAA,GAAW,CAAA;AAAA,MAClC;AACA,MAAA,OAAO,QAAA;AAAA,IACT,CAAA;AAKA,IAAA,MAAM,WAAW,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAC5C,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,IAAI,CAAA,GAAI,CAAA;AACR,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,GAAA,GAAM,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACrB,QAAA,CAAA,IAAK,GAAA,GAAM,GAAA;AAAA,MACb;AAEA,MAAA,QAAA,CAAS,CAAC,CAAA,GAAI,CAAA,KAAM,CAAA,GAAI,IAAI,CAAA,GAAI,CAAA;AAAA,IAClC;AAKA,IAAA,MAAM,IAAI,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACrC,IAAA,IAAI,IAAA,CAAK,OAAA,CAAQ,SAAA,IAAa,IAAA,CAAK,KAAA,IAAS,IAAA,CAAK,KAAA,CAAM,IAAA,KAAS,CAAA,IAAK,IAAA,CAAK,KAAA,CAAM,IAAA,KAAS,CAAA,EAAG;AAE1F,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,CAAA,CAAE,CAAC,CAAA,GAAI,MAAA,CAAO,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAA,IAAK,CAAC,CAAA;AAAA,MAC3D;AAAA,IACF;AAKA,IAAA,MAAM,OAAO,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACxC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,IAAI,IAAA,GAAO,CAAA;AACX,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAA,IAAQ,KAAK,CAAA,EAAG,CAAC,CAAA,IAAK,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MAChC;AACA,MAAA,IAAA,CAAK,CAAC,CAAA,GAAI,IAAA;AAAA,IACZ;AAGA,IAAA,MAAM,IAAA,GAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA;AAI/B,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,OAAA,EAAS,IAAA,EAAA,EAAQ;AAEzC,MAAA,IAAI,SAAA,GAAY,CAAA;AAKhB,MAAA,IAAI,OAAA,GAA2B,IAAA;AAC/B,MAAA,IAAI,cAAc,QAAA,EAAU;AAC1B,QAAA,OAAA,GAAU,KAAA,CAAM,KAAK,EAAE,MAAA,EAAQ,GAAE,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAE/C,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,UAAA,MAAM,IAAI,IAAA,CAAK,KAAA,CAAM,GAAA,EAAI,IAAK,IAAI,CAAA,CAAE,CAAA;AACpC,UAAA,MAAM,GAAA,GAAM,QAAQ,CAAC,CAAA;AACrB,UAAA,OAAA,CAAQ,CAAC,CAAA,GAAI,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA;AAC3B,UAAA,OAAA,CAAQ,CAAC,IAAI,GAAA,IAAO,CAAA;AAAA,QACtB;AAAA,MACF;AAGA,MAAA,MAAM,SAAA,GAAY,OAAA,IAAW,KAAA,CAAM,IAAA,CAAK,EAAE,MAAA,EAAQ,CAAA,EAAE,EAAG,CAAC,CAAA,EAAG,CAAA,KAAM,CAAC,CAAA;AAClE,MAAA,KAAA,MAAW,KAAK,SAAA,EAAW;AACzB,QAAA,MAAM,KAAA,GAAQ,QAAA,CAAS,CAAC,CAAA,IAAK,CAAA;AAG7B,QAAA,IAAI,UAAU,CAAA,EAAG;AACf,UAAA,MAAM,KAAA,GAAQ,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA;AACtB,UAAA,IAAI,UAAU,CAAA,EAAG;AACf,YAAA,MAAMC,SAAQ,CAAC,KAAA;AACf,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,cAAA,IAAA,CAAK,CAAC,KAAK,IAAA,CAAK,CAAC,KAAK,CAAA,IAAKA,MAAAA,GAAQ,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AAAA,YAC9C;AACA,YAAA,SAAA,GAAY,KAAK,GAAA,CAAI,SAAA,EAAW,IAAA,CAAK,GAAA,CAAIA,MAAK,CAAC,CAAA;AAAA,UACjD;AACA,UAAA,CAAA,CAAE,CAAC,CAAA,GAAI,CAAA;AACP,UAAA;AAAA,QACF;AAKA,QAAA,IAAI,GAAA,GAAM,CAAA;AACV,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,MAAM,GAAA,GAAM,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACrB,UAAA,MAAM,EAAA,GAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAC,CAAA,IAAK,YAAA,GAAe,KAAA,GAAQ,CAAA,CAAA;AAEvE,UAAA,MAAM,CAAA,GAAI,MAAM,IAAA,CAAK,CAAC,KAAK,CAAA,CAAA,GAAA,CAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,IAAK,GAAA;AAC9C,UAAA,GAAA,IAAO,GAAA,GAAM,CAAA;AAAA,QACf;AACA,QAAA,GAAA,IAAO,IAAA;AAKP,QAAA,IAAI,IAAA,GAAO,IAAA,CAAK,aAAA,CAAc,GAAA,EAAK,KAAK,CAAA,GAAI,KAAA;AAG5C,QAAA,IAAI,QAAA,IAAY,OAAO,CAAA,EAAG;AACxB,UAAA,IAAA,GAAO,CAAA;AAAA,QACT;AAGA,QAAA,MAAM,KAAA,GAAQ,IAAA,IAAQ,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAK9B,QAAA,IAAI,UAAU,CAAA,EAAG;AACf,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,YAAA,IAAA,CAAK,CAAC,KAAK,IAAA,CAAK,CAAC,KAAK,CAAA,IAAK,KAAA,GAAQ,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AAAA,UAC9C;AAAA,QACF;AAGA,QAAA,CAAA,CAAE,CAAC,CAAA,GAAI,IAAA;AAGP,QAAA,SAAA,GAAY,KAAK,GAAA,CAAI,SAAA,EAAW,IAAA,CAAK,GAAA,CAAI,KAAK,CAAC,CAAA;AAAA,MACjD;AAGA,MAAA,IAAI,YAAY,GAAA,EAAK;AACnB,QAAA,IAAA,CAAK,SAAS,IAAA,GAAO,CAAA;AACrB,QAAA;AAAA,MACF;AAAA,IACF;AAGA,IAAA,IAAI,IAAA,CAAK,WAAW,MAAA,EAAW;AAC7B,MAAA,IAAA,CAAK,MAAA,GAAS,OAAA;AAAA,IAChB;AAGA,IAAA,IAAI,aAAa,MAAA,EAAQ;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,CAAA,GAAI,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA;AACvB,QAAA,CAAA,CAAE,CAAC,IAAI,CAAA,KAAM,CAAA,GAAI,KAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,CAAA;AAAA,MACrC;AAAA,IACF;AAGA,IAAA,IAAA,CAAK,KAAA,GAAQ,OAAO,CAAC,CAAA;AAKrB,IAAA,IAAI,YAAA,EAAc;AAChB,MAAA,IAAI,SAAA,GAAY,CAAA;AAChB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,SAAA,IAAA,CAAc,MAAM,CAAC,CAAA,IAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MAC1C;AACA,MAAA,IAAA,CAAK,aAAa,KAAA,GAAQ,SAAA;AAAA,IAC5B,CAAA,MAAO;AACL,MAAA,IAAA,CAAK,UAAA,GAAa,CAAA;AAAA,IACpB;AAGA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBQ,aAAA,CAAc,GAAW,MAAA,EAAwB;AAEvD,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,CAAC,KAAK,CAAC,MAAA,CAAO,QAAA,CAAS,MAAM,CAAA,EAAG;AACnD,MAAA,MAAM,IAAI,oBAAoB,uDAAuD,CAAA;AAAA,IACvF;AACA,IAAA,IAAI,CAAA,GAAI,MAAA,EAAQ,OAAO,CAAA,GAAI,MAAA;AAC3B,IAAA,IAAI,CAAA,GAAI,CAAC,MAAA,EAAQ,OAAO,CAAA,GAAI,MAAA;AAC5B,IAAA,OAAO,CAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,IAAI,IAAA,GAAe;AACjB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,6CAA6C,CAAA;AAAA,IACxE;AACA,IAAA,OAAO,IAAA,CAAK,KAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,SAAA,GAAoB;AACtB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,0CAA0C,CAAA;AAAA,IACrE;AACA,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,KAAA,GAA4B;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,sCAAsC,CAAA;AAAA,IACjE;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,QAAQ,CAAA,EAAmB;AAEzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,wCAAwC,CAAA;AAAA,IACnE;AAGA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,OAAO,CAAA;AAExD,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,IAAA,GAAO,KAAA,CAAM,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAG5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAE1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAA,CAAK,CAAC,KACJ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA,GAAI,CAAC,CAAA,IAAK,CAAC,CAAA,GACxC,MAAA,CAAO,KAAK,KAAA,CAAM,IAAA,CAAK,KAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAA,IAAK,CAAC,CAAA;AAAA,MACtD;AAEA,MAAA,IAAA,CAAK,CAAC,KAAK,IAAA,CAAK,UAAA;AAAA,IAClB;AAEA,IAAA,OAAO,OAAO,IAAI,CAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,qCAAqC,CAAA;AAAA,IAChE;AACA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC3B,IAAA,IAAI,IAAA,CAAK,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACxB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,IAAA,CAAK,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAC/E;AAAA,IACF;AACA,IAAA,IAAI,KAAA,GAAQ,GACV,KAAA,GAAQ,CAAA;AACV,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,IAC3C;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,IAAA,GAAO,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC7C,MAAA,MAAM,OAAA,GAAU,OAAO,IAAA,CAAK,IAAA,CAAK,KAAK,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AACtD,MAAA,KAAA,IAAA,CAAU,OAAO,OAAA,KAAY,CAAA;AAC7B,MAAA,KAAA,IAAA,CAAU,OAAO,KAAA,KAAU,CAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,UAAU,CAAA,EAAG;AACf,MAAA,OAAO,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA;AAAA,IAC7B;AAEA,IAAA,OAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO,EAAE,GAAG,IAAA,CAAK,OAAA,EAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAU,MAAA,EAAuC;AAC/C,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACjD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,OAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA,CAAsB,+BAAA,EAAiC,OAAA,EAAS,KAAK,CAAA;AAAA,UACjF;AACA,UAAA,IAAA,CAAK,QAAQ,KAAA,GAAQ,KAAA;AACrB,UAAA;AAAA,QACF,KAAK,SAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA,CAAsB,iCAAA,EAAmC,SAAA,EAAW,KAAK,CAAA;AAAA,UACrF;AACA,UAAA,IAAA,CAAK,QAAQ,OAAA,GAAU,KAAA;AACvB,UAAA;AAAA,QACF,KAAK,KAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA,CAAsB,6BAAA,EAA+B,KAAA,EAAO,KAAK,CAAA;AAAA,UAC7E;AACA,UAAA,IAAA,CAAK,QAAQ,GAAA,GAAM,KAAA;AACnB,UAAA;AAAA,QACF,KAAK,cAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,gCAAA;AAAA,cACA,cAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,YAAA,GAAe,KAAA;AAC5B,UAAA;AAAA,QACF,KAAK,WAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA,CAAsB,6BAAA,EAA+B,WAAA,EAAa,KAAK,CAAA;AAAA,UACnF;AACA,UAAA,IAAA,CAAK,QAAQ,SAAA,GAAY,KAAA;AACzB,UAAA;AAAA,QACF,KAAK,WAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA,CAAsB,6BAAA,EAA+B,WAAA,EAAa,KAAK,CAAA;AAAA,UACnF;AACA,UAAA,IAAA,CAAK,QAAQ,SAAA,GAAY,KAAA;AACzB,UAAA;AAAA,QACF,KAAK,UAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA,CAAsB,4BAAA,EAA8B,UAAA,EAAY,KAAK,CAAA;AAAA,UACjF;AACA,UAAA,IAAA,CAAK,QAAQ,QAAA,GAAW,KAAA;AACxB,UAAA;AAAA,QACF,KAAK,WAAA;AACH,UAAA,IAAI,KAAA,KAAU,QAAA,IAAY,KAAA,KAAU,QAAA,EAAU;AAC5C,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,mBAAA,EAAsB,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACnC,WAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,SAAA,GAAY,KAAA;AACzB,UAAA;AAAA,QACF,KAAK,aAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cAC9D,aAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,WAAA,GAAc,KAAA;AAC3B,UAAA;AAAA,QACF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACllBO,IAAM,mBAAN,MAA4C;AAAA;AAAA,EAEzC,KAAA;AAAA;AAAA,EAGA,UAAA;AAAA;AAAA,EAGA,YAAA;AAAA;AAAA,EAGA,MAAA,GAAS,KAAA;AAAA,EAET,OAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcR,WAAA,CACE,OAAA,GAII,EAAC,EACL;AACA,IAAA,IAAA,CAAK,OAAA,GAAU,EAAE,GAAG,OAAA,EAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAE9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAGtB,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AAIpB,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,IAAA;AAClD,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,KAAA,IAAS,IAAA;AACpC,IAAA,MAAM,eAAe,KAAA,KAAU,KAAA,KAAU,EAAE,KAAA,KAAU,SAAA,IAAa,EAAE,KAAA,KAAU,SAAA,CAAA;AAE9E,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AAExB,MAAA,MAAM,MAAA,GAAS,KAAK,CAAC,CAAA;AAGrB,MAAA,IAAI,WAAA,GAAc,YAAA,GACd,IAAA,CAAK,iBAAA,CAAkB,CAAA,EAAG,MAAM,CAAA,GAChC,IAAA,CAAK,UAAA,CAAW,CAAA,EAAG,MAAM,CAAA;AAC7B,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,UAAA,CAAW,CAAA,EAAG,MAAM,CAAA;AAG5C,MAAA,IAAI,OAAA;AACJ,MAAA,IAAI,IAAA,CAAK,QAAQ,SAAA,EAAW;AAE1B,QAAA,OAAA,GAAU,IAAA,CAAK,cAAc,WAAW,CAAA;AAExC,QAAA,WAAA,GAAc,YAAA,GACV,KAAK,gBAAA,CAAiB,WAAA,EAAa,OAAO,CAAA,GAC1C,IAAA,CAAK,SAAA,CAAU,WAAA,EAAa,OAAO,CAAA;AAAA,MACzC;AAGA,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,WAAA,EAAa,UAAU,CAAA;AAC5C,MAAA,IAAI,IAAI,MAAA,CAAO,CAAA;AAGf,MAAA,IAAI,OAAA,EAAS;AACX,QAAA,CAAA,GAAI,IAAA,CAAK,YAAA,CAAa,CAAA,EAAG,OAAO,CAAA;AAAA,MAClC;AACA,MAAA,IAAA,CAAK,KAAA,GAAQ,CAAA;AAGb,MAAA,MAAM,eAAA,GAAkB,GAAA,CAAI,MAAA,EAAQ,IAAA,CAAK,KAAK,CAAA;AAC9C,MAAA,IAAA,CAAK,UAAA,GAAa,GAAA,CAAI,MAAA,EAAQ,eAAe,CAAA;AAAA,IAC/C,CAAA,MAAO;AAEL,MAAA,MAAM,MAAA,GAAS,KAAA,CAAM,CAAA,EAAG,CAAC,CAAA;AACzB,MAAA,IAAA,CAAK,QAAQ,MAAA,CAAO,CAAA;AAAA,IAEtB;AAGA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,UAAA,CAAW,MAAc,QAAA,EAA0B;AAEzD,IAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,MAAA,MAAM,IAAI,IAAA,CAAK,IAAA;AACf,MAAA,MAAM,UAAU,MAAA,CAAO,QAAA,CAAS,KAAK,QAAA,CAAS,MAAM,KAAK,CAAC,CAAA;AAC1D,MAAA,MAAM,WAAqB,EAAC;AAC5B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,QAAA,CAAS,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,SAAS,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,OAAO,CAAA;AAAA,MACjE;AACA,MAAA,OAAO,OAAO,QAAQ,CAAA;AAAA,IACxB;AAGA,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAClC,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEnC,IAAA,MAAM,SAAqB,EAAC;AAE5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,GAAA,GAAM,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAA,IAAK,CAAC,CAAA;AAClE,QAAA,MAAM,OAAA,GAAU,OAAO,QAAA,CAAS,IAAA,CAAK,SAAS,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC9D,QAAA,GAAA,CAAI,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,MACxB;AACA,MAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,IACjB;AAEA,IAAA,OAAO,OAAO,MAAM,CAAA;AAAA,EACtB;AAAA,EAEQ,iBAAA,CAAkB,MAAc,QAAA,EAA0B;AAChE,IAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,MAAA,MAAM,IAAI,IAAA,CAAK,IAAA;AACf,MAAA,MAAM,UAAU,MAAA,CAAO,QAAA,CAAS,KAAK,QAAA,CAAS,MAAM,KAAK,CAAC,CAAA;AAC1D,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,GAAA,GAAM,KAAK,MAAA,GAAS,CAAA;AAC1B,QAAA,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA,GAAI,MAAA,CAAO,KAAK,IAAA,CAAK,GAAG,CAAA,IAAK,CAAC,CAAA,GAAI,OAAA;AAAA,MACjD;AACA,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAClC,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,MAAA,GAAS,CAAA,GAAI,SAAA;AAClC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,MAAM,OAAA,GAAU,CAAA;AACtB,QAAA,MAAM,OAAA,GAAU,OAAO,QAAA,CAAS,IAAA,CAAK,SAAS,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC9D,QAAA,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA,GAAI,MAAA,CAAO,KAAK,IAAA,CAAK,GAAG,CAAA,IAAK,CAAC,CAAA,GAAI,OAAA;AAAA,MACjD;AAAA,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEQ,cAAc,CAAA,EAAmB;AACvC,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,QAAkB,IAAI,KAAA,CAAM,SAAS,CAAA,CAAE,KAAK,CAAC,CAAA;AAEnD,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,MAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,GAAA,GAAM,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAA,IAAK,CAAC,CAAA;AAC5D,QAAA,KAAA,IAAS,GAAA,GAAM,GAAA;AAAA,MACjB;AACA,MAAA,KAAA,CAAM,CAAC,CAAA,GAAI,IAAA,CAAK,IAAA,CAAK,KAAK,CAAA;AAAA,IAC5B;AACA,IAAA,OAAO,OAAO,KAAK,CAAA;AAAA,EACrB;AAAA,EAEQ,SAAA,CAAU,GAAW,KAAA,EAAuB;AAClD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,SAAqB,EAAC;AAE5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,GAAA,GAAM,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAA,IAAK,CAAC,CAAA;AAC5D,QAAA,MAAM,CAAA,GAAI,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAClD,QAAA,GAAA,CAAI,IAAA,CAAK,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,MAAM,CAAC,CAAA;AAAA,MAChC;AACA,MAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,IACjB;AACA,IAAA,OAAO,OAAO,MAAM,CAAA;AAAA,EACtB;AAAA,EAEQ,gBAAA,CAAiB,GAAW,KAAA,EAAuB;AACzD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,MAAM,OAAA,GAAU,CAAA;AACtB,QAAA,MAAM,CAAA,GAAI,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAClD,QAAA,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA,GAAI,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,GAAG,CAAA,IAAK,CAAC,CAAA,GAAI,CAAA;AAAA,MACzD;AAAA,IACF;AACA,IAAA,OAAO,CAAA;AAAA,EACT;AAAA,EAEQ,YAAA,CAAa,MAAc,KAAA,EAAuB;AAGxD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,MAAM,QAAA,GAAW,KAAK,IAAA,GAAO,CAAA,GAAK,KAAK,KAAA,CAAM,CAAC,KAAK,CAAA,GAAK,CAAA;AAExD,IAAA,IAAI,IAAA,CAAK,SAAS,CAAA,EAAG;AACnB,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,CAAA,GAAI,OAAO,IAAA,CAAK,IAAA,CAAK,KAAK,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAChD,QAAA,MAAM,CAAA,GAAI,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAClD,QAAA,GAAA,CAAI,IAAA,CAAK,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,IAAI,CAAC,CAAA;AAAA,MAC9B;AACA,MAAA,OAAO,OAAO,GAAG,CAAA;AAAA,IACnB;AAEA,IAAA,MAAM,SAAqB,EAAC;AAC5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,MAAM,CAAA,GAAI,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAClD,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,CAAA,GAAI,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,SAAS,CAAA,GAAI,QAAA,GAAW,CAAC,CAAA,IAAK,CAAC,CAAA;AAC/D,QAAA,GAAA,CAAI,IAAA,CAAK,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,IAAI,CAAC,CAAA;AAAA,MAC9B;AACA,MAAA,MAAA,CAAO,KAAK,GAAG,CAAA;AAAA,IACjB;AACA,IAAA,OAAO,OAAO,MAAM,CAAA;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,mDAAmD,CAAA;AAAA,IAC9E;AAGA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,kBAAkB,CAAA;AAGnE,IAAA,MAAM,UAAA,GAAa,GAAA,CAAI,CAAA,EAAG,IAAA,CAAK,KAAK,CAAA;AAGpC,IAAA,IAAI,IAAA,CAAK,eAAe,MAAA,EAAW;AAEjC,MAAA,MAAM,YAAA,GAAe,OAAO,IAAA,CAAK,UAAA,CAAW,KAAK,IAAA,CAAK,UAAA,CAAW,MAAM,CAAA,IAAK,CAAC,CAAA;AAC7E,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,UAAA,CAAW,MAAM,CAAA,EAAA,EAAK;AACxC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,UAAA,CAAW,IAAA,CAAK,UAAA,CAAW,SAAS,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,YAAY,CAAA;AAAA,MAChF;AACA,MAAA,OAAO,OAAO,MAAM,CAAA;AAAA,IACtB;AAEA,IAAA,OAAO,UAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,gDAAgD,CAAA;AAAA,IAC3E;AACA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC7B,IAAA,IAAI,MAAA,CAAO,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC1B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,MAAA,CAAO,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACjF;AAAA,IACF;AAIA,IAAA,IAAI,MAAA,GAAS,CAAA;AACb,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,OAAO,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,OAAO,MAAA,CAAO,IAAA,CAAK,OAAO,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC3F,MAAA,MAAA,IAAU,IAAA,GAAO,IAAA;AAAA,IACnB;AAGA,IAAA,MAAM,aAAA,GAAgB,KAAK,CAAC,CAAA;AAC5B,IAAA,MAAM,aAAa,MAAA,CAAO,aAAA,CAAc,KAAK,aAAA,CAAc,MAAM,KAAK,CAAC,CAAA;AACvE,IAAA,IAAI,MAAA,GAAS,CAAA;AACb,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,IAAA,GAAO,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,UAAA;AACjD,MAAA,MAAA,IAAU,IAAA,GAAO,IAAA;AAAA,IACnB;AAIA,IAAA,IAAI,WAAW,CAAA,EAAG;AAChB,MAAA,OAAO,MAAA,KAAW,IAAI,CAAA,GAAM,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,IAAI,MAAA,GAAS,MAAA;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,IAAA,GAAe;AACjB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,wDAAwD,CAAA;AAAA,IACnF;AACA,IAAA,OAAO,IAAA,CAAK,KAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,SAAA,GAAgC;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,qDAAqD,CAAA;AAAA,IAChF;AACA,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,YAAA,EAAc,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,IAAA;AAAA,MAC3C,SAAA,EAAW,IAAA,CAAK,OAAA,CAAQ,SAAA,IAAa,KAAA;AAAA,MACrC,KAAA,EAAO,IAAA,CAAK,OAAA,CAAQ,KAAA,IAAS;AAAA,KAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA,EAAG;AAClD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,cAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,yCAAA,EAA4C,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACzD,cAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,YAAA,GAAe,KAAA;AAC5B,UAAA;AAAA,QACF,KAAK,WAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,sCAAA,EAAyC,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACtD,WAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,SAAA,GAAY,KAAA;AACzB,UAAA;AAAA,QACF,KAAK,OAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,kCAAA,EAAqC,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cAClD,OAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,KAAA,GAAQ,KAAA;AACrB,UAAA;AAAA,QACF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACnbO,IAAM,qBAAN,MAA+C;AAAA,EAC5C,OAAA;AAAA,EAUA,KAAA;AAAA;AAAA,EACA,UAAA;AAAA;AAAA,EACA,YAAA;AAAA,EACA,QAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EACT,WAAA,GAAc,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EActB,WAAA,CACE,OAAA,GAQI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,OAAA,GAAU,EAAE,GAAG,OAAA,EAAQ;AAE5B,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,IAAW,IAAA;AACxC,IAAA,IAAI,OAAA,KAAY,IAAA,IAAQ,OAAA,KAAY,MAAA,EAAQ;AAC1C,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,mDAAA,EAAsD,MAAA,CAAO,OAAO,CAAC,CAAA,CAAA;AAAA,QACrE,SAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,QAAQ,OAAA,GAAU,OAAA;AAEvB,IAAA,MAAM,UAAA,GAAa,IAAA,CAAK,OAAA,CAAQ,UAAA,IAAc,MAAA;AAC9C,IAAA,IAAI,UAAA,KAAe,KAAA,IAAS,UAAA,KAAe,MAAA,EAAQ;AACjD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,6CAAA,EAAgD,MAAA,CAAO,UAAU,CAAC,CAAA,CAAA;AAAA,QAClE,YAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAA,CAAK,QAAQ,UAAA,GAAa,UAAA;AAE1B,IAAA,IAAI,KAAK,OAAA,CAAQ,CAAA,KAAM,UAAa,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAA,EAAG;AACvD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,wBAAA,EAA2B,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,CAAA;AAAA,QACzC,GAAA;AAAA,QACA,KAAK,OAAA,CAAQ;AAAA,OACf;AAAA,IACF;AACA,IAAA,IACE,IAAA,CAAK,OAAA,CAAQ,OAAA,KAAY,MAAA,KACxB,CAAC,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,OAAA,CAAQ,OAAO,CAAA,IAAK,IAAA,CAAK,OAAA,CAAQ,WAAW,CAAA,CAAA,EACnE;AACA,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,mDAAA,EAAsD,IAAA,CAAK,OAAA,CAAQ,OAAO,CAAA,CAAA;AAAA,QAC1E,SAAA;AAAA,QACA,KAAK,OAAA,CAAQ;AAAA,OACf;AAAA,IACF;AACA,IAAA,IACE,IAAA,CAAK,OAAA,CAAQ,GAAA,KAAQ,MAAA,KACpB,CAAC,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA,IAAK,IAAA,CAAK,OAAA,CAAQ,MAAM,CAAA,CAAA,EAC1D;AACA,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA,CAAA;AAAA,QAC9D,KAAA;AAAA,QACA,KAAK,OAAA,CAAQ;AAAA,OACf;AAAA,IACF;AACA,IAAA,IACE,IAAA,CAAK,OAAA,CAAQ,YAAA,KAAiB,MAAA,KAC7B,CAAC,MAAA,CAAO,QAAA,CAAS,IAAA,CAAK,OAAA,CAAQ,YAAY,CAAA,IAAK,IAAA,CAAK,OAAA,CAAQ,gBAAgB,CAAA,CAAA,EAC7E;AACA,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,wDAAA,EAA2D,IAAA,CAAK,OAAA,CAAQ,YAAY,CAAA,CAAA;AAAA,QACpF,cAAA;AAAA,QACA,KAAK,OAAA,CAAQ;AAAA,OACf;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaQ,QAAQ,CAAA,EAAmB;AAEjC,IAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,CAAC,CAAA,EAAG;AACvB,MAAA,OAAO,CAAA,GAAI,IAAI,CAAA,GAAI,CAAA;AAAA,IACrB;AACA,IAAA,IAAI,KAAK,CAAA,EAAG;AACV,MAAA,MAAMC,GAAAA,GAAK,IAAA,CAAK,GAAA,CAAI,CAAC,CAAC,CAAA;AACtB,MAAA,OAAO,KAAK,CAAA,GAAIA,GAAAA,CAAAA;AAAA,IAClB;AACA,IAAA,MAAM,EAAA,GAAK,IAAA,CAAK,GAAA,CAAI,CAAC,CAAA;AACrB,IAAA,OAAO,MAAM,CAAA,GAAI,EAAA,CAAA;AAAA,EACnB;AAAA,EAEQ,YAAA,GAAqB;AAC3B,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,4DAA4D,CAAA;AAAA,IACvF;AAAA,EACF;AAAA,EAEQ,UAAA,CACN,CAAA,EACA,CAAA,EACA,CAAA,EACA,GACA,MAAA,EAC4B;AAC5B,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,IAAW,GAAA;AACxC,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,OAAA,CAAQ,GAAA,IAAO,IAAA;AAChC,IAAA,MAAM,EAAA,GAAK,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,GAAA;AACxC,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,IAAA;AAGlD,IAAA,MAAM,IAAI,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACrC,IAAA,IAAI,CAAA,GAAI,CAAA;AAGR,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,OAAA,EAAS,IAAA,EAAA,EAAQ;AACzC,MAAA,MAAM,QAAQ,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACzC,MAAA,IAAI,KAAA,GAAQ,CAAA;AAGZ,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,CAAA,GAAI,eAAe,CAAA,GAAI,CAAA;AAC3B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,KAAK,CAAC,CAAA,IAAK,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,QACnD;AAEA,QAAA,MAAM,EAAA,GAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC3C,QAAA,MAAM,EAAA,GAAK,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AACzB,QAAA,MAAM,QAAQ,EAAA,GAAK,EAAA;AAEnB,QAAA,KAAA,IAAS,KAAA;AACT,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,KAAA,CAAM,CAAC,CAAA,GAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,IAAK,KAAA,GAAQ,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,KAAK,CAAC,CAAA;AAAA,QACtE;AAAA,MACF;AAGA,MAAA,MAAM,IAAA,GAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA;AAC/B,MAAA,IAAI,SAAA,GAAY,CAAA;AAChB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,CAAA,GAAA,CAAK,MAAM,CAAC,CAAA,IAAK,KAAK,IAAA,GAAO,MAAA,IAAU,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AACrD,QAAA,MAAM,SAAS,EAAA,GAAK,CAAA;AACpB,QAAA,CAAA,CAAE,CAAC,CAAA,GAAA,CAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,MAAA;AACrB,QAAA,SAAA,GAAY,KAAK,GAAA,CAAI,SAAA,EAAW,IAAA,CAAK,GAAA,CAAI,MAAM,CAAC,CAAA;AAAA,MAClD;AACA,MAAA,IAAI,YAAA,EAAc;AAChB,QAAA,MAAM,KAAK,KAAA,GAAQ,IAAA;AACnB,QAAA,MAAM,UAAU,EAAA,GAAK,EAAA;AACrB,QAAA,CAAA,IAAK,OAAA;AACL,QAAA,SAAA,GAAY,KAAK,GAAA,CAAI,SAAA,EAAW,IAAA,CAAK,GAAA,CAAI,OAAO,CAAC,CAAA;AAAA,MACnD;AAGA,MAAA,IAAI,YAAY,GAAA,EAAK;AACnB,QAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,EAAE,CAAA,EAAG,CAAA,EAAG,YAAA,GAAe,IAAI,CAAA,EAAE;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAE9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAGpB,IAAA,MAAM,KAAA,GAAQ,IAAI,YAAA,CAAa,CAAC,CAAA;AAChC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,KAAA,CAAM,CAAC,IAAI,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACxC;AACA,IAAA,MAAM,aAAA,GAAgB,CAAC,GAAG,IAAI,GAAA,CAAI,KAAK,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,CAAC,CAAA;AAC9D,IAAA,IAAA,CAAK,QAAA,GAAW,OAAO,aAAa,CAAA;AAGpC,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,IAAW,IAAA;AACxC,IAAA,MAAM,CAAA,GAAI,IAAA,CAAK,OAAA,CAAQ,CAAA,IAAK,CAAA;AAC5B,IAAA,IAAI,EAAE,IAAI,CAAA,CAAA,EAAI;AACZ,MAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,wBAAA,EAA2B,CAAC,CAAA,CAAA,EAAI,KAAK,CAAC,CAAA;AAAA,IACxE;AACA,IAAA,MAAM,MAAA,GAAS,OAAA,KAAY,IAAA,GAAO,CAAA,GAAI,CAAA,GAAI,CAAA;AAC1C,IAAA,MAAM,UAAA,GAAa,IAAA,CAAK,OAAA,CAAQ,UAAA,IAAc,MAAA;AAE9C,IAAA,IAAI,aAAA,CAAc,UAAU,CAAA,EAAG;AAE7B,MAAA,IAAA,CAAK,WAAA,GAAc,KAAA;AAGnB,MAAA,IAAI,OAAA,GAAU,CAAA;AACd,MAAA,IAAI,aAAA,CAAc,MAAA,KAAW,CAAA,KAAM,aAAA,CAAc,CAAC,MAAM,CAAA,IAAK,aAAA,CAAc,CAAC,CAAA,KAAM,CAAA,CAAA,EAAI;AAEpF,QAAA,MAAM,UAAA,GAAa,IAAI,YAAA,CAAa,CAAC,CAAA;AACrC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,UAAA,CAAW,CAAC,IAAI,KAAA,CAAM,CAAC,MAAM,aAAA,CAAc,CAAC,IAAI,CAAA,GAAI,CAAA;AAAA,QACtD;AACA,QAAA,OAAA,GAAU,OAAO,UAAU,CAAA;AAAA,MAC7B,CAAA,MAAA,IAAW,aAAA,CAAc,MAAA,KAAW,CAAA,EAAG;AAErC,QAAA,MAAM,UAAA,GAAa,IAAI,YAAA,CAAa,CAAC,CAAA;AACrC,QAAA,MAAM,MAAA,GAAS,aAAA,CAAc,CAAC,CAAA,KAAM,IAAI,CAAA,GAAI,CAAA;AAC5C,QAAA,UAAA,CAAW,KAAK,MAAM,CAAA;AACtB,QAAA,OAAA,GAAU,OAAO,UAAU,CAAA;AAAA,MAC7B,CAAA,MAAO;AAEL,QAAA,KAAA,MAAW,OAAO,aAAA,EAAe;AAC/B,UAAA,IAAI,GAAA,KAAQ,CAAA,IAAK,GAAA,KAAQ,CAAA,EAAG;AAE1B,YAAA,MAAM,IAAI,oBAAoB,8CAA8C,CAAA;AAAA,UAC9E;AAAA,QACF;AAAA,MACF;AAEA,MAAA,MAAM,EAAE,CAAA,EAAG,CAAA,EAAE,GAAI,IAAA,CAAK,WAAW,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,CAAA,EAAG,MAAM,CAAA;AACzD,MAAA,IAAA,CAAK,KAAA,GAAQ,OAAO,CAAC,CAAA;AACrB,MAAA,IAAA,CAAK,UAAA,GAAa,CAAA;AAAA,IACpB,CAAA,MAAO;AAEL,MAAA,IAAI,UAAA,KAAe,KAAA,IAAS,UAAA,KAAe,MAAA,EAAQ;AACjD,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,CAAA,6CAAA,EAAgD,MAAA,CAAO,UAAU,CAAC,CAAA,CAAA;AAAA,UAClE,YAAA;AAAA,UACA;AAAA,SACF;AAAA,MACF;AACA,MAAA,IAAA,CAAK,WAAA,GAAc,IAAA;AACnB,MAAA,MAAM,WAAW,aAAA,CAAc,MAAA;AAC/B,MAAA,MAAM,WAAqB,EAAC;AAC5B,MAAA,MAAM,gBAA0B,EAAC;AAEjC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,WAAA,GAAc,cAAc,CAAC,CAAA;AAEnC,QAAA,MAAM,WAAA,GAAc,IAAI,YAAA,CAAa,CAAC,CAAA;AACtC,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,WAAA,CAAY,CAAC,CAAA,GAAI,KAAA,CAAM,CAAC,CAAA,KAAM,cAAc,CAAA,GAAI,CAAA;AAAA,QAClD;AACA,QAAA,MAAM,OAAA,GAAU,OAAO,WAAW,CAAA;AAElC,QAAA,MAAM,EAAE,CAAA,EAAG,CAAA,EAAE,GAAI,IAAA,CAAK,WAAW,CAAA,EAAG,OAAA,EAAS,CAAA,EAAG,CAAA,EAAG,MAAM,CAAA;AACzD,QAAA,QAAA,CAAS,IAAA,CAAK,GAAG,CAAC,CAAA;AAClB,QAAA,aAAA,CAAc,KAAK,CAAC,CAAA;AAAA,MACtB;AAEA,MAAA,MAAM,cAAA,GAAiB,OAAO,QAAQ,CAAA;AACtC,MAAA,IAAA,CAAK,QAAQ,OAAA,CAAQ,cAAA,EAAgB,CAAC,QAAA,EAAU,CAAC,CAAC,CAAA;AAClD,MAAA,IAAA,CAAK,UAAA,GAAa,aAAA;AAAA,IACpB;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,IAAI,OAAA,GAA8B;AAChC,IAAA,OAAO,IAAA,CAAK,QAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,IAAA,GAAe;AACjB,IAAA,IAAA,CAAK,YAAA,EAAa;AAClB,IAAA,MAAM,OAAO,IAAA,CAAK,KAAA;AAClB,IAAA,IAAI,CAAC,IAAA,EAAM;AACT,MAAA,MAAM,IAAI,aAAa,wDAAwD,CAAA;AAAA,IACjF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,SAAA,GAA+B;AACjC,IAAA,IAAA,CAAK,YAAA,EAAa;AAClB,IAAA,IAAI,IAAA,CAAK,eAAe,MAAA,EAAW;AACjC,MAAA,OAAO,CAAA;AAAA,IACT;AACA,IAAA,OAAO,IAAA,CAAK,UAAA;AAAA,EACd;AAAA,EAEA,QAAQ,CAAA,EAAmB;AACzB,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,YAAA,CAAa,CAAC,CAAA;AACjC,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,OAAO,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAExC,IAAA,IAAI,KAAK,WAAA,EAAa;AACpB,MAAA,MAAM,UAAU,IAAA,CAAK,QAAA;AACrB,MAAA,IAAI,CAAC,OAAA,EAAS;AACZ,QAAA,MAAM,IAAI,eAAe,qCAAqC,CAAA;AAAA,MAChE;AACA,MAAA,MAAM,WAAW,OAAA,CAAQ,IAAA;AACzB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,OAAA,GAAU,EAAA;AACd,QAAA,IAAI,WAAA,GAAc,CAAA;AAClB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,MAAM,CAAA,GAAI,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAA,GAAI,QAAA,GAAW,CAAC,CAAC,CAAA;AAC5D,UAAA,IAAI,IAAI,OAAA,EAAS;AACf,YAAA,OAAA,GAAU,CAAA;AACV,YAAA,WAAA,GAAc,CAAA;AAAA,UAChB;AAAA,QACF;AAEA,QAAA,IAAA,CAAK,CAAC,IAAI,MAAA,CAAO,OAAA,CAAQ,KAAK,OAAA,CAAQ,MAAA,GAAS,WAAW,CAAC,CAAA;AAAA,MAC7D;AAAA,IACF,CAAA,MAAO;AACL,MAAA,MAAM,UAAU,IAAA,CAAK,QAAA;AACrB,MAAA,IAAI,CAAC,OAAA,EAAS;AACZ,QAAA,MAAM,IAAI,eAAe,qCAAqC,CAAA;AAAA,MAChE;AAKA,MAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACtB,QAAA,MAAM,MAAM,MAAA,CAAO,OAAA,CAAQ,IAAA,CAAK,OAAA,CAAQ,MAAM,CAAC,CAAA;AAC/C,QAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,MACf,CAAA,MAAO;AACL,QAAA,MAAM,OAAO,MAAA,CAAO,OAAA,CAAQ,IAAA,CAAK,OAAA,CAAQ,MAAM,CAAC,CAAA;AAChD,QAAA,MAAM,OAAO,MAAA,CAAO,OAAA,CAAQ,KAAK,OAAA,CAAQ,MAAA,GAAS,CAAC,CAAC,CAAA;AAEpD,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,MAAM,EAAA,GAAK,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAA,GAAI,CAAA,GAAI,CAAC,CAAC,CAAA;AACtD,UAAA,IAAA,CAAK,CAAC,CAAA,GAAI,EAAA,IAAM,GAAA,GAAM,IAAA,GAAO,IAAA;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,OAAO,IAAI,CAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAA,CAAK,YAAA,EAAa;AAElB,IAAA,MAAM,OAAO,IAAA,CAAK,KAAA;AAClB,IAAA,IAAI,CAAC,IAAA,EAAM;AACT,MAAA,MAAM,IAAI,aAAa,uDAAuD,CAAA;AAAA,IAChF;AAGA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,oBAAoB,CAAA;AAErE,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAExB,IAAA,IAAI,KAAK,WAAA,EAAa;AACpB,MAAA,MAAM,QAAA,GAAW,IAAA,CAAK,QAAA,EAAU,IAAA,IAAQ,CAAA;AACxC,MAAA,MAAM,KAAA,GAAQ,IAAI,YAAA,CAAa,CAAA,GAAI,QAAQ,CAAA;AAC3C,MAAA,MAAM,iBAAiB,IAAA,CAAK,UAAA;AAC5B,MAAA,IAAI,CAAC,KAAA,CAAM,OAAA,CAAQ,cAAc,CAAA,EAAG;AAClC,QAAA,MAAM,IAAI,aAAa,4DAA4D,CAAA;AAAA,MACrF;AAEA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,IAAI,MAAA,GAAS,CAAA;AACb,QAAA,MAAM,SAAS,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,KAAK,CAAC,CAAA;AAGjD,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,IAAI,CAAA,GAAI,cAAA,CAAe,CAAC,CAAA,IAAK,CAAA;AAC7B,UAAA,MAAM,WAAA,GAAc,IAAA,CAAK,MAAA,GAAS,CAAA,GAAI,CAAA;AACtC,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,YAAA,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,MAAA,CAAO,IAAA,CAAK,IAAA,CAAK,WAAA,GAAc,CAAC,KAAK,CAAC,CAAA;AAAA,UAChF;AAGA,UAAA,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC1B,UAAA,MAAA,IAAU,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA;AAAA,QACzB;AAGA,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AAEjC,UAAA,KAAA,CAAM,CAAA,GAAI,QAAA,GAAW,CAAC,CAAA,GAAI,MAAA,GAAS,CAAA,GAAA,CAAK,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,GAAS,CAAA,GAAM,QAAA;AAAA,QAC3E;AAAA,MACF;AAEA,MAAA,OAAO,MAAA,CAAO,KAAA,EAAO,EAAE,KAAA,EAAO,SAAA,EAAW,CAAA,CAAE,OAAA,CAAQ,CAAC,CAAA,EAAG,QAAQ,CAAC,CAAA;AAAA,IAClE,CAAA,MAAO;AAEL,MAAA,MAAM,QAAQ,IAAI,KAAA,CAAc,IAAI,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAC7C,MAAA,MAAM,iBAAiB,IAAA,CAAK,UAAA;AAC5B,MAAA,IAAI,MAAM,OAAA,CAAQ,cAAc,CAAA,IAAK,OAAO,mBAAmB,QAAA,EAAU;AACvE,QAAA,MAAM,IAAI,aAAa,6DAA6D,CAAA;AAAA,MACtF;AAEA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,CAAA,GAAI,cAAA;AACR,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,MAAA,CAAO,KAAK,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,QAChF;AAEA,QAAA,MAAM,EAAA,GAAK,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AACzB,QAAA,KAAA,CAAM,CAAA,GAAI,CAAA,GAAI,CAAC,CAAA,GAAI,CAAA,GAAI,EAAA;AACvB,QAAA,KAAA,CAAM,CAAA,GAAI,CAAA,GAAI,CAAC,CAAA,GAAI,EAAA;AAAA,MACrB;AAEA,MAAA,OAAO,OAAO,KAAK,CAAA,CAAE,QAAQ,CAAC,CAAA,EAAG,CAAC,CAAC,CAAA;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC3B,IAAA,IAAI,IAAA,CAAK,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACxB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,IAAA,CAAK,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAC/E;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,OAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,CAAC,CAAA,IAAK,CAAC,CAAA,KAAM,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAA,IAAK,CAAC,CAAA,EAAG;AACjF,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO,EAAE,GAAG,IAAA,CAAK,OAAA,EAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAU,MAAA,EAAuC;AAC/C,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACjD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,SAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,IAAS,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,0CAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,OAAA,GAAU,KAAA;AACvB,UAAA;AAAA,QACF,KAAK,KAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,GAAQ,CAAA,EAAG;AACrE,YAAA,MAAM,IAAI,qBAAA,CAAsB,kCAAA,EAAoC,KAAA,EAAO,KAAK,CAAA;AAAA,UAClF;AACA,UAAA,IAAA,CAAK,QAAQ,GAAA,GAAM,KAAA;AACnB,UAAA;AAAA,QACF,KAAK,GAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,IAAS,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA,CAAsB,oCAAA,EAAsC,GAAA,EAAK,KAAK,CAAA;AAAA,UAClF;AACA,UAAA,IAAA,CAAK,QAAQ,CAAA,GAAI,KAAA;AACjB,UAAA;AAAA,QACF,KAAK,cAAA;AACH,UAAA,IAAI,OAAO,UAAU,QAAA,IAAY,CAAC,OAAO,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,IAAS,CAAA,EAAG;AACtE,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,+CAAA;AAAA,cACA,cAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,YAAA,GAAe,KAAA;AAC5B,UAAA;AAAA,QACF,KAAK,cAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,gCAAA;AAAA,cACA,cAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,YAAA,GAAe,KAAA;AAC5B,UAAA;AAAA,QACF,KAAK,SAAA;AACH,UAAA,IAAI,KAAA,KAAU,MAAA,IAAU,KAAA,KAAU,IAAA,EAAM;AACtC,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,mDAAA,EAAsD,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACnE,SAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,OAAA,GAAU,KAAA;AACvB,UAAA;AAAA,QACF,KAAK,YAAA;AACH,UAAA,IAAI,KAAA,KAAU,KAAA,IAAS,KAAA,KAAU,MAAA,EAAQ;AACvC,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,6CAAA,EAAgD,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cAC7D,YAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,UAAA,GAAa,KAAA;AAC1B,UAAA;AAAA,QACF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;AC5lBO,IAAM,QAAN,MAAiC;AAAA;AAAA,EAE9B,OAAA;AAAA;AAAA,EAUA,KAAA;AAAA;AAAA,EAGA,UAAA;AAAA;AAAA,EAGA,YAAA;AAAA;AAAA,EAGA,MAAA;AAAA;AAAA,EAGA,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAajB,WAAA,CACE,OAAA,GAOI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,OAAA,GAAU,EAAE,GAAG,OAAA,EAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAE9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AAGd,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,KAAA,IAAS,CAAA;AACpC,IAAA,IAAI,EAAE,SAAS,CAAA,CAAA,EAAI;AACjB,MAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,6BAAA,EAAgC,KAAK,CAAA,CAAA,EAAI,SAAS,KAAK,CAAA;AAAA,IACzF;AAGA,IAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,YAAA,IAAgB,IAAA;AAGlD,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAGxB,IAAA,IAAA,CAAK,YAAA,GAAe,CAAA;AAOpB,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,MAAM,QAAQ,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAEzC,IAAA,IAAI,YAAA,EAAc;AAEhB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,MAC3C;AAGA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,KAAA,CAAM,CAAC,CAAA,GAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA;AAAA,QAC9D;AAAA,MACF;AAGA,MAAA,MAAM,IAAA,GAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA;AAC/B,MAAA,KAAA,IAAS,IAAA;AACT,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,CAAM,CAAC,CAAA,GAAA,CAAK,KAAA,CAAM,CAAC,KAAK,CAAA,IAAK,IAAA;AAAA,MAC/B;AAAA,IACF;AAEA,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,SAAA,IAAa,KAAA;AAC5C,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,OAAA,IAAW,GAAA;AACxC,IAAA,MAAM,GAAA,GAAM,IAAA,CAAK,OAAA,CAAQ,GAAA,IAAO,IAAA;AAEhC,IAAA,IAAI,MAAA;AACJ,IAAA,IAAI,SAAA,EAAW;AACb,MAAA,MAAA,GAAS,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACpC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,OAAA,GAAU,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,CAAA;AAC/B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,MAAM,QAAA,GAAW,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA,IAAK,CAAC,CAAA,IAAK,YAAA,GAAgB,KAAA,CAAM,CAAC,KAAK,CAAA,GAAK,CAAA,CAAA;AACtF,UAAA,MAAA,CAAO,CAAC,CAAA,GAAA,CAAK,MAAA,CAAO,CAAC,CAAA,IAAK,KAAK,QAAA,GAAW,QAAA;AAAA,QAC5C;AAAA,MACF;AACA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAA,CAAO,CAAC,CAAA,GAAI,IAAA,CAAK,KAAK,MAAA,CAAO,CAAC,KAAK,CAAC,CAAA;AAAA,MACtC;AAAA,IACF;AAEA,IAAA,MAAM,IAAA,GAAO,CAAC,WAAA,EAAqB,YAAA,KAAiC;AAClE,MAAA,MAAM,GAAA,GAAM,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,WAAA,GAAc,CAAA,GAAI,YAAY,CAAA,IAAK,CAAC,CAAA;AACzE,MAAA,MAAM,WAAW,GAAA,IAAO,YAAA,GAAgB,KAAA,CAAM,YAAY,KAAK,CAAA,GAAK,CAAA,CAAA;AACpE,MAAA,IAAI,aAAa,MAAA,EAAQ;AACvB,QAAA,MAAM,CAAA,GAAI,MAAA,CAAO,YAAY,CAAA,IAAK,CAAA;AAClC,QAAA,OAAO,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,QAAA,GAAW,CAAA;AAAA,MAClC;AACA,MAAA,OAAO,QAAA;AAAA,IACT,CAAA;AAEA,IAAA,MAAM,IAAA,GAAO,CAAC,WAAA,KAAgC;AAC5C,MAAA,MAAM,GAAA,GAAM,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,WAAW,KAAK,CAAC,CAAA;AACtD,MAAA,OAAO,YAAA,GAAe,MAAM,KAAA,GAAQ,GAAA;AAAA,IACtC,CAAA;AAIA,IAAA,IAAI,UAAA;AACJ,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,OAAA,CAAQ,MAAA,IAAU,MAAA;AAEtC,IAAA,IAAI,WAAW,KAAA,EAAO;AACpB,MAAA,MAAM,GAAA,GAAM,KAAK,QAAA,CAAS,IAAA,EAAM,MAAM,CAAA,EAAG,CAAA,EAAG,KAAA,EAAO,OAAA,EAAS,GAAG,CAAA;AAC/D,MAAA,UAAA,GAAa,MAAA,CAAO,IAAI,CAAC,CAAA;AACzB,MAAA,IAAA,CAAK,SAAS,GAAA,CAAI,KAAA;AAAA,IACpB,CAAA,MAAO;AAIL,MAAA,MAAM,GAAA,GAAM,KAAA,CAAM,CAAC,CAAA,CAChB,KAAK,CAAC,CAAA,CACN,GAAA,CAAI,MAAM,KAAA,CAAM,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAC,CAAA;AAE7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,IAAI,GAAA,GAAM,CAAA;AAGV,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,YAAA,MAAM,EAAA,GAAK,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACpB,YAAA,MAAM,EAAA,GAAK,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACpB,YAAA,GAAA,IAAO,EAAA,GAAK,EAAA;AAAA,UACd;AAIA,UAAA,MAAM,MAAA,GAAS,IAAI,CAAC,CAAA;AACpB,UAAA,IAAI,QAAQ,MAAA,CAAO,CAAC,IAAI,GAAA,IAAO,CAAA,KAAM,IAAI,KAAA,GAAQ,CAAA,CAAA;AAAA,QACnD;AAAA,MACF;AAIA,MAAA,MAAM,MAAM,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAEvC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,GAAA,GAAM,CAAA;AAGV,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,MAAM,IAAA,GAAO,KAAK,CAAC,CAAA;AACnB,UAAA,MAAM,IAAA,GAAO,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACtB,UAAA,GAAA,IAAO,IAAA,GAAO,IAAA;AAAA,QAChB;AACA,QAAA,GAAA,CAAI,CAAC,CAAA,GAAI,GAAA;AAAA,MACX;AAEA,MAAA,IAAI,WAAW,MAAA,EAAQ;AACrB,QAAA,MAAM,MAAM,IAAA,CAAK,sBAAA,CAAuB,GAAA,EAAK,GAAA,EAAK,SAAS,GAAG,CAAA;AAC9D,QAAA,UAAA,GAAa,MAAA,CAAO,IAAI,CAAC,CAAA;AACzB,QAAA,IAAA,CAAK,SAAS,GAAA,CAAI,KAAA;AAAA,MACpB,CAAA,MAAA,IAAW,MAAA,KAAW,UAAA,IAAc,MAAA,KAAW,MAAA,EAAQ;AACrD,QAAA,IAAI;AACF,UAAA,MAAM,SAAA,GAAY,OAAO,GAAG,CAAA;AAC5B,UAAA,MAAM,SAAA,GAAY,OAAO,GAAG,CAAA;AAC5B,UAAA,MAAM,CAAA,GAAI,SAAS,SAAS,CAAA;AAC5B,UAAA,MAAM,EAAA,GAAK,eAAA,CAAgB,CAAA,EAAG,SAAA,EAAW,IAAI,CAAA;AAC7C,UAAA,UAAA,GAAa,eAAA,CAAgB,SAAA,CAAU,CAAC,CAAA,EAAG,IAAI,KAAK,CAAA;AAAA,QACtD,SAAS,CAAA,EAAG;AACV,UAAA,IAAI,WAAW,MAAA,EAAQ;AAErB,YAAA,MAAM,GAAA,GAAM,IAAA,CAAK,iBAAA,CAAkB,GAAA,EAAK,GAAG,CAAA;AAC3C,YAAA,UAAA,GAAa,OAAO,GAAG,CAAA;AAAA,UACzB,CAAA,MAAO;AACL,YAAA,MAAM,CAAA;AAAA,UACR;AAAA,QACF;AAAA,MACF,CAAA,MAAA,IAAW,WAAW,KAAA,EAAO;AAC3B,QAAA,MAAM,SAAA,GAAY,OAAO,GAAG,CAAA;AAC5B,QAAA,MAAM,SAAA,GAAY,OAAO,GAAG,CAAA;AAC5B,QAAA,MAAM,CAAC,CAAA,EAAG,CAAA,EAAG,EAAE,CAAA,GAAI,IAAI,SAAS,CAAA;AAGhC,QAAA,MAAM,EAAA,GAAK,UAAU,CAAC,CAAA;AACtB,QAAA,MAAM,GAAA,GAAM,GAAA,CAAI,EAAA,EAAI,SAAS,CAAA;AAE7B,QAAA,MAAM,QAAQ,CAAA,CAAE,IAAA;AAChB,QAAA,IAAI,EAAE,iBAAiB,YAAA,CAAA,EAAe;AACpC,UAAA,MAAM,IAAI,oBAAoB,0CAA0C,CAAA;AAAA,QAC1E;AACA,QAAA,MAAM,UAAA,GAAa,IAAI,YAAA,CAAa,GAAA,CAAI,IAAI,CAAA;AAC5C,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAA,CAAI,MAAM,CAAA,EAAA,EAAK;AACjC,UAAA,MAAM,MAAM,MAAA,CAAO,GAAA,CAAI,KAAK,GAAA,CAAI,MAAA,GAAS,CAAC,CAAC,CAAA;AAC3C,UAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC1B,UAAA,UAAA,CAAW,CAAC,IAAI,IAAA,CAAK,GAAA,CAAI,KAAK,CAAA,GAAI,KAAA,GAAQ,MAAM,KAAA,GAAQ,CAAA;AAAA,QAC1D;AACA,QAAA,MAAM,MAAA,GAAS,OAAO,UAAU,CAAA;AAEhC,QAAA,MAAM,CAAA,GAAI,UAAU,EAAE,CAAA;AACtB,QAAA,UAAA,GAAa,GAAA,CAAI,GAAG,MAAM,CAAA;AAAA,MAC5B,CAAA,MAAO;AACL,QAAA,MAAM,GAAA,GAAM,IAAA,CAAK,iBAAA,CAAkB,GAAA,EAAK,GAAG,CAAA;AAC3C,QAAA,UAAA,GAAa,OAAO,GAAG,CAAA;AAAA,MACzB;AAAA,IACF;AAEA,IAAA,IAAI,aAAa,MAAA,EAAQ;AACvB,MAAA,UAAA,GAAa,IAAA,CAAK,YAAA,CAAa,UAAA,EAAY,MAAM,CAAA;AAAA,IACnD;AAEA,IAAA,IAAA,CAAK,KAAA,GAAQ,UAAA;AAKb,IAAA,IAAI,YAAA,EAAc;AAChB,MAAA,IAAI,SAAA,GAAY,CAAA;AAChB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,EAAA,GAAK,OAAO,UAAA,CAAW,IAAA,CAAK,WAAW,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC7D,QAAA,SAAA,IAAA,CAAc,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA,IAAK,EAAA;AAAA,MACjC;AACA,MAAA,IAAA,CAAK,aAAa,KAAA,GAAQ,SAAA;AAAA,IAC5B,CAAA,MAAO;AACL,MAAA,IAAA,CAAK,UAAA,GAAa,CAAA;AAAA,IACpB;AAGA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBQ,iBAAA,CAAkB,GAAe,CAAA,EAAuB;AAC9D,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AAIZ,IAAA,MAAM,GAAA,GAAM,CAAA,CAAE,GAAA,CAAI,CAAC,GAAA,EAAK,CAAA,KAAM,CAAC,GAAG,GAAA,EAAK,CAAA,CAAE,CAAC,CAAA,IAAK,CAAC,CAAC,CAAA;AACjD,IAAA,IAAI,MAAA,GAAS,CAAA;AACb,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,GAAA,GAAM,IAAI,CAAC,CAAA;AACjB,MAAA,IAAI,CAAC,GAAA,EAAK;AACV,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,IAAI,IAAA,CAAK,GAAA,CAAI,GAAA,CAAI,CAAC,KAAK,CAAC,CAAA;AAC9B,QAAA,IAAI,CAAA,GAAI,QAAQ,MAAA,GAAS,CAAA;AAAA,MAC3B;AAAA,IACF;AACA,IAAA,IAAI,WAAW,CAAA,IAAK,CAAC,MAAA,CAAO,QAAA,CAAS,MAAM,CAAA,EAAG;AAC5C,MAAA,MAAM,IAAI,oBAAoB,uCAAuC,CAAA;AAAA,IACvE;AACA,IAAA,MAAM,GAAA,GAAM,MAAA,CAAO,OAAA,GAAU,CAAA,GAAI,MAAA;AAGjC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAG1B,MAAA,IAAI,MAAA,GAAS,CAAA;AACb,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,QAAA,IAAI,KAAK,GAAA,CAAI,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAC,CAAA,GAAI,IAAA,CAAK,IAAI,GAAA,CAAI,MAAM,IAAI,CAAC,CAAA,IAAK,CAAC,CAAA,EAAG;AAChE,UAAA,MAAA,GAAS,CAAA;AAAA,QACX;AAAA,MACF;AAGA,MAAA,MAAM,IAAA,GAAO,GAAA,CAAI,CAAC,CAAA,IAAK,EAAC;AACxB,MAAA,MAAM,MAAA,GAAS,GAAA,CAAI,MAAM,CAAA,IAAK,EAAC;AAC/B,MAAA,GAAA,CAAI,CAAC,CAAA,GAAI,MAAA;AACT,MAAA,GAAA,CAAI,MAAM,CAAA,GAAI,IAAA;AAEd,MAAA,MAAM,KAAA,GAAQ,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAC7B,MAAA,IAAI,CAAC,OAAO,QAAA,CAAS,KAAK,KAAK,IAAA,CAAK,GAAA,CAAI,KAAK,CAAA,IAAK,GAAA,EAAK;AACrD,QAAA,MAAM,IAAI,oBAAoB,uCAAuC,CAAA;AAAA,MACvE;AAGA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAE9B,QAAA,MAAM,KAAK,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,KAAK,CAAA,IAAK,KAAA;AAC/B,QAAA,MAAM,IAAA,GAAO,IAAI,CAAC,CAAA;AAElB,QAAA,IAAI,IAAA,EAAM;AAER,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,IAAK,CAAA,EAAG,CAAA,EAAA,EAAK;AAC3B,YAAA,IAAA,CAAK,CAAC,CAAA,GAAA,CAAK,IAAA,CAAK,CAAC,CAAA,IAAK,CAAA,IAAK,CAAA,IAAK,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UACjD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,IAAA,MAAM,CAAA,GAAI,KAAA,CAAM,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AAE/B,MAAA,CAAA,CAAE,CAAC,CAAA,GAAI,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAGtB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,GAAG,CAAA,EAAA,EAAK;AAC9B,QAAA,CAAA,CAAE,CAAC,CAAA,GAAA,CAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAA,CAAM,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MACrD;AAGA,MAAA,MAAM,IAAA,GAAO,GAAA,CAAI,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AAC5B,MAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAI,KAAK,IAAA,CAAK,GAAA,CAAI,IAAI,CAAA,IAAK,GAAA,EAAK;AACnD,QAAA,MAAM,IAAI,oBAAoB,uCAAuC,CAAA;AAAA,MACvE;AACA,MAAA,CAAA,CAAE,CAAC,CAAA,GAAA,CAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,IAAA;AAAA,IACvB;AAEA,IAAA,OAAO,CAAA;AAAA,EACT;AAAA,EAEQ,sBAAA,CACN,CAAA,EACA,CAAA,EACA,OAAA,EACA,GAAA,EACgC;AAChC,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,IAAI,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACrC,IAAA,MAAM,IAAI,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAErC,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA;AACnB,MAAA,CAAA,CAAE,CAAC,CAAA,GAAI,EAAA;AACP,MAAA,KAAA,IAAS,EAAA,GAAK,EAAA;AAAA,IAChB;AAEA,IAAA,IAAI,UAAU,CAAA,EAAG;AACf,MAAA,OAAO,EAAE,CAAA,EAAG,KAAA,EAAO,CAAA,EAAE;AAAA,IACvB;AAEA,IAAA,MAAM,CAAA,GAAI,EAAE,KAAA,EAAM;AAClB,IAAA,MAAM,QAAQ,GAAA,GAAM,GAAA;AACpB,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,OAAA,EAAS,IAAA,EAAA,EAAQ;AACzC,MAAA,MAAM,KAAK,IAAI,KAAA,CAAc,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AACtC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,GAAA,GAAM,CAAA;AACV,QAAA,MAAM,GAAA,GAAM,EAAE,CAAC,CAAA;AACf,QAAA,IAAI,CAAC,GAAA,EAAK;AACV,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,GAAA,IAAA,CAAQ,IAAI,CAAC,CAAA,IAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,QAClC;AACA,QAAA,EAAA,CAAG,CAAC,CAAA,GAAI,GAAA;AAAA,MACV;AAEA,MAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,KAAA,IAAA,CAAU,EAAE,CAAC,CAAA,IAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MACnC;AACA,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,IAAK,UAAU,CAAA,EAAG;AAC1C,QAAA,MAAM,IAAI,mBAAA;AAAA,UACR;AAAA,SACF;AAAA,MACF;AAEA,MAAA,MAAM,QAAQ,KAAA,GAAQ,KAAA;AACtB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,CAAA,CAAE,CAAC,KAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AACtC,QAAA,CAAA,CAAE,CAAC,KAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,KAAA,IAAS,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MACzC;AAEA,MAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA;AACnB,QAAA,KAAA,IAAS,EAAA,GAAK,EAAA;AAAA,MAChB;AACA,MAAA,KAAA,GAAQ,IAAA,GAAO,CAAA;AACf,MAAA,IAAI,QAAQ,KAAA,EAAO;AACjB,QAAA;AAAA,MACF;AAEA,MAAA,MAAM,OAAO,KAAA,GAAQ,KAAA;AACrB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,CAAA,CAAE,CAAC,KAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,IAAA,IAAQ,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,MACvC;AACA,MAAA,KAAA,GAAQ,KAAA;AAAA,IACV;AAEA,IAAA,OAAO,EAAE,GAAG,KAAA,EAAM;AAAA,EACpB;AAAA,EAEQ,SACN,IAAA,EACA,IAAA,EACA,UACA,SAAA,EACA,KAAA,EACA,SACA,GAAA,EACgC;AAChC,IAAA,MAAM,IAAI,IAAI,KAAA,CAAc,SAAS,CAAA,CAAE,KAAK,CAAC,CAAA;AAC7C,IAAA,MAAM,UAAU,IAAI,KAAA,CAAc,SAAS,CAAA,CAAE,KAAK,CAAC,CAAA;AACnD,IAAA,MAAM,YAAY,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,KAAK,CAAC,CAAA;AAEpD,IAAA,IAAI,SAAA,GAAY,CAAA;AAChB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,IAAI,MAAA,GAAS,CAAA;AACb,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,GAAA,GAAM,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AACrB,QAAA,MAAA,IAAU,GAAA,GAAM,GAAA;AAAA,MAClB;AACA,MAAA,IAAI,SAAS,SAAA,EAAW;AACtB,QAAA,SAAA,GAAY,MAAA;AAAA,MACd;AAAA,IACF;AAEA,IAAA,MAAM,KAAA,GAAQ,QAAA,KAAa,CAAA,GAAI,CAAA,GAAI,QAAA;AACnC,IAAA,MAAM,CAAA,GAAI,YAAY,KAAA,GAAQ,KAAA;AAC9B,IAAA,MAAM,IAAA,GAAO,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA,GAAI,CAAA;AAE7B,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,OAAA,EAAS,IAAA,EAAA,EAAQ;AACzC,MAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,IAAI,OAAA,GAAU,CAAA;AACd,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,OAAA,IAAA,CAAY,EAAE,CAAC,CAAA,IAAK,CAAA,IAAK,IAAA,CAAK,GAAG,CAAC,CAAA;AAAA,QACpC;AAEA,QAAA,MAAM,EAAA,GAAK,KAAK,CAAC,CAAA;AACjB,QAAA,MAAM,cAAc,OAAA,GAAU,EAAA;AAC9B,QAAA,MAAM,KAAA,GAAQ,WAAA,IAAe,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA,CAAA;AAC7C,QAAA,SAAA,CAAU,CAAC,CAAA,GAAI,WAAA;AAEf,QAAA,IAAI,UAAU,CAAA,EAAG;AACf,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,OAAA,CAAQ,CAAC,KAAK,OAAA,CAAQ,CAAC,KAAK,CAAA,IAAK,KAAA,GAAQ,IAAA,CAAK,CAAA,EAAG,CAAC,CAAA;AAAA,UACpD;AAAA,QACF;AAEA,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,MAAM,IAAA,GAAA,CAAQ,QAAQ,CAAC,CAAA,IAAK,KAAK,KAAA,IAAS,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AAClD,UAAA,MAAM,SAAS,IAAA,GAAO,IAAA;AACtB,UAAA,CAAA,CAAE,CAAC,CAAA,GAAA,CAAK,CAAA,CAAE,CAAC,KAAK,CAAA,IAAK,MAAA;AACrB,UAAA,IAAI,IAAA,CAAK,GAAA,CAAI,MAAM,CAAA,GAAI,SAAA,EAAW;AAChC,YAAA,SAAA,GAAY,IAAA,CAAK,IAAI,MAAM,CAAA;AAAA,UAC7B;AAAA,QACF;AAAA,MACF;AAEA,MAAA,KAAA,GAAQ,IAAA,GAAO,CAAA;AACf,MAAA,IAAI,YAAY,GAAA,EAAK;AACnB,QAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,EAAE,CAAA,EAAG,CAAA,EAAG,KAAA,EAAM;AAAA,EACvB;AAAA,EAEQ,YAAA,CAAa,MAAc,KAAA,EAAyB;AAC1D,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,MAAA,MAAM,CAAA,GAAI,OAAO,IAAA,CAAK,IAAA,CAAK,KAAK,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAChD,MAAA,MAAM,CAAA,GAAI,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACtB,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,KAAM,CAAA,GAAI,CAAA,GAAI,IAAI,CAAC,CAAA;AAAA,IACjC;AACA,IAAA,OAAO,OAAO,MAAM,CAAA;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,QAAQ,CAAA,EAAmB;AAEzB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,wCAAwC,CAAA;AAAA,IACnE;AAGA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,OAAO,CAAA;AAExD,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,CAAA,GAAI,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACxB,IAAA,MAAM,IAAA,GAAO,KAAA,CAAM,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAG5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,IAAI,GAAA,GAAM,KAAK,UAAA,IAAc,CAAA;AAG7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,GAAA,IACE,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,CAAA,GAAI,CAAC,CAAA,IAAK,CAAC,IACxC,MAAA,CAAO,IAAA,CAAK,MAAM,IAAA,CAAK,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,MACtD;AACA,MAAA,IAAA,CAAK,CAAC,CAAA,GAAI,GAAA;AAAA,IACZ;AAEA,IAAA,OAAO,OAAO,IAAI,CAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAElC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,qCAAqC,CAAA;AAAA,IAChE;AAGA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AAGA,IAAA,MAAM,IAAA,GAAO,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC3B,IAAA,IAAI,IAAA,CAAK,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACxB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,IAAA,CAAK,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAC/E;AAAA,IACF;AAEA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAGZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAAA,IAC3C;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAGX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,IAAA,GAAO,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAC7C,MAAA,MAAM,OAAA,GAAU,OAAO,IAAA,CAAK,IAAA,CAAK,KAAK,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAGtD,MAAA,KAAA,IAAA,CAAU,OAAO,OAAA,KAAY,CAAA;AAG7B,MAAA,KAAA,IAAA,CAAU,OAAO,KAAA,KAAU,CAAA;AAAA,IAC7B;AAKA,IAAA,IAAI,UAAU,CAAA,EAAG;AACf,MAAA,OAAO,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA;AAAA,IAC7B;AAGA,IAAA,OAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,SAAA,GAAqC;AACnC,IAAA,OAAO,EAAE,GAAG,IAAA,CAAK,OAAA,EAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,UAAU,MAAA,EAAuC;AAC/C,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,MAAM,CAAA,EAAG;AACjD,MAAA,QAAQ,GAAA;AAAK,QACX,KAAK,OAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,wCAAA,EAA2C,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACxD,OAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,KAAA,GAAQ,KAAA;AACrB,UAAA;AAAA,QAEF,KAAK,SAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,0CAAA,EAA6C,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cAC1D,SAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,OAAA,GAAU,KAAA;AACvB,UAAA;AAAA,QAEF,KAAK,KAAA;AACH,UAAA,IAAI,OAAO,KAAA,KAAU,QAAA,IAAY,CAAC,MAAA,CAAO,QAAA,CAAS,KAAK,CAAA,EAAG;AACxD,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,sCAAA,EAAyC,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACtD,KAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,GAAA,GAAM,KAAA;AACnB,UAAA;AAAA,QAEF,KAAK,cAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,yCAAA,EAA4C,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACzD,cAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,YAAA,GAAe,KAAA;AAC5B,UAAA;AAAA,QAEF,KAAK,WAAA;AACH,UAAA,IAAI,OAAO,UAAU,SAAA,EAAW;AAC9B,YAAA,MAAM,IAAI,qBAAA;AAAA,cACR,CAAA,sCAAA,EAAyC,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA;AAAA,cACtD,WAAA;AAAA,cACA;AAAA,aACF;AAAA,UACF;AACA,UAAA,IAAA,CAAK,QAAQ,SAAA,GAAY,KAAA;AACzB,UAAA;AAAA,QAEF,KAAK,QAAA;AACH,UAAA,IACE,KAAA,KAAU,UACV,KAAA,KAAU,KAAA,IACV,UAAU,UAAA,IACV,KAAA,KAAU,MAAA,IACV,KAAA,KAAU,KAAA,EACV;AACA,YAAA,MAAM,IAAI,sBAAsB,CAAA,gBAAA,EAAmB,MAAA,CAAO,KAAK,CAAC,CAAA,CAAA,EAAI,UAAU,KAAK,CAAA;AAAA,UACrF;AACA,UAAA,IAAA,CAAK,QAAQ,MAAA,GAAS,KAAA;AACtB,UAAA;AAAA,QAEF;AACE,UAAA,MAAM,IAAI,qBAAA,CAAsB,CAAA,mBAAA,EAAsB,GAAG,CAAA,CAAA,EAAI,KAAK,KAAK,CAAA;AAAA;AAC3E,IACF;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,IAAA,GAAe;AACjB,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,KAAA,EAAO;AAC/B,MAAA,MAAM,IAAI,eAAe,6CAA6C,CAAA;AAAA,IACxE;AACA,IAAA,OAAO,IAAA,CAAK,KAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,SAAA,GAAoB;AACtB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,0CAA0C,CAAA;AAAA,IACrE;AACA,IAAA,OAAO,KAAK,UAAA,IAAc,CAAA;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,KAAA,GAA4B;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,sCAAsC,CAAA;AAAA,IACjE;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AACF;;;ACjxBO,IAAM,OAAN,MAAW;AAAA;AAAA,EAEC,WAAA;AAAA;AAAA,EAGA,UAAA;AAAA;AAAA,EAGA,YAAA;AAAA;AAAA,EAGA,KAAA;AAAA;AAAA,EAGA,iBAAA;AAAA;AAAA,EAGA,qBAAA;AAAA;AAAA,EAGA,WAAA;AAAA;AAAA,EAGA,WAAA;AAAA;AAAA,EAGA,MAAA;AAAA;AAAA,EAGA,eAAA;AAAA;AAAA,EAGA,oBAAA;AAAA;AAAA,EAGA,eAAA;AAAA;AAAA,EAGT,YAAwB,EAAC;AAAA;AAAA,EAGzB,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAiBI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,CAAA;AAC1C,IAAA,IAAA,CAAK,UAAA,GAAa,QAAQ,UAAA,IAAc,EAAA;AACxC,IAAA,IAAA,CAAK,YAAA,GAAe,QAAQ,YAAA,IAAgB,GAAA;AAC5C,IAAA,IAAA,CAAK,KAAA,GAAQ,QAAQ,KAAA,IAAS,GAAA;AAC9B,IAAA,IAAA,CAAK,iBAAA,GAAoB,QAAQ,iBAAA,IAAqB,EAAA;AACtD,IAAA,MAAM,qBAAA,GAAwB,QAAQ,qBAAA,IAAyB,GAAA;AAC/D,IAAA,IAAA,CAAK,qBAAA,GAAwB,IAAA,CAAK,GAAA,CAAI,qBAAA,EAAuB,KAAK,KAAK,CAAA;AACvE,IAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAC3B,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,IAAA;AAC1C,IAAA,IAAA,CAAK,MAAA,GAAS,QAAQ,MAAA,IAAU,OAAA;AAChC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,GAAA;AAClD,IAAA,IAAA,CAAK,oBAAA,GACH,OAAA,CAAQ,oBAAA,IAAwB,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,UAAA,GAAa,CAAC,CAAC,CAAA;AAC7E,IAAA,IAAA,CAAK,eAAA,GAAkB,OAAA,CAAQ,eAAA,IAAmB,IAAA,CAAK,GAAA,CAAI,EAAA,EAAI,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,UAAA,GAAa,CAAC,CAAC,CAAA;AAE9F,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,eAAe,CAAA,EAAG;AAChE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,8BAAA;AAAA,QACA,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,UAAU,CAAA,IAAK,IAAA,CAAK,cAAc,CAAA,EAAG;AAC7D,MAAA,MAAM,IAAI,qBAAA,CAAsB,6BAAA,EAA+B,YAAA,EAAc,KAAK,UAAU,CAAA;AAAA,IAC9F;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,YAAY,CAAA,IAAK,IAAA,CAAK,gBAAgB,CAAA,EAAG;AACjE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,+BAAA;AAAA,QACA,cAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,KAAK,CAAA,IAAK,IAAA,CAAK,SAAS,CAAA,EAAG;AACpD,MAAA,MAAM,IAAI,qBAAA,CAAsB,kCAAA,EAAoC,OAAA,EAAS,KAAK,KAAK,CAAA;AAAA,IACzF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,iBAAiB,CAAA,IAAK,IAAA,CAAK,qBAAqB,CAAA,EAAG;AAC3E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,oCAAA;AAAA,QACA,mBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,MAAA,CAAO,SAAA,CAAU,qBAAqB,CAAA,IAAK,wBAAwB,CAAA,EAAG;AACzE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,+CAAA;AAAA,QACA,uBAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,eAAe,CAAA,EAAG;AAC/D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,8BAAA;AAAA,QACA,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,IAAA,CAAK,MAAA,KAAW,OAAA,IAAW,IAAA,CAAK,WAAW,aAAA,EAAe;AAC5D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,yCAAA;AAAA,QACA,QAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,mBAAmB,CAAA,EAAG;AACxE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,4CAAA;AAAA,QACA,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,oBAAoB,CAAA,IAAK,IAAA,CAAK,wBAAwB,CAAA,EAAG;AAClF,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,iDAAA;AAAA,QACA,sBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,mBAAmB,CAAA,EAAG;AACxE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,4CAAA;AAAA,QACA,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,OAAA,CAAQ,WAAW,CAAA,EAAG;AAC9E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,qCAAA;AAAA,QACA,aAAA;AAAA,QACA,OAAA,CAAQ;AAAA,OACV;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,CAAA,EAA2B;AAClD,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,YAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,MAAM,CAAA,EAAG;AACX,UAAA,GAAA,CAAI,KAAK,CAAC,CAAA;AAAA,QACZ,CAAA,MAAO;AACL,UAAA,IAAI,IAAA,GAAO,CAAA;AACX,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,IAAI,MAAM,EAAA,EAAI;AACZ,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,CAAG,QAAQ,CAAA,EAAA,EAAK;AAClC,cAAA,MAAM,QAAQ,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AACtC,cAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,YACjB;AAAA,UACF;AACA,UAAA,GAAA,CAAI,KAAK,IAAI,CAAA;AAAA,QACf;AAAA,MACF;AACA,MAAA,SAAA,CAAU,KAAK,GAAG,CAAA;AAAA,IACpB;AAEA,IAAA,OAAO,SAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAA,CAAuB,GAAa,CAAA,EAAqB;AAC/D,IAAA,IAAI,IAAA,GAAO,CAAA;AACX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,QAAQ,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,QAAQ,CAAA,CAAE,CAAC,KAAK,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,IAAK,CAAA,CAAA;AACpC,MAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,IACjB;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAA,CAAc,CAAA,EAAW,OAAA,EAAiB,CAAA,EAAW,GAAA,EAA6B;AACxF,IAAA,MAAM,MAAA,uBAAa,GAAA,EAAY;AAC/B,IAAA,MAAM,WAAA,GAAc,IAAI,EAAA,GAAK,GAAA;AAC7B,IAAA,IAAI,QAAA,GAAW,CAAA;AAEf,IAAA,OAAO,MAAA,CAAO,IAAA,GAAO,CAAA,IAAK,QAAA,GAAW,WAAA,EAAa;AAChD,MAAA,MAAM,MAAM,IAAA,CAAK,KAAA,CAAM,GAAA,EAAI,IAAK,IAAI,CAAA,CAAE,CAAA;AACtC,MAAA,MAAM,GAAA,GAAM,GAAA,IAAO,OAAA,GAAU,GAAA,GAAM,CAAA,GAAI,GAAA;AACvC,MAAA,MAAA,CAAO,IAAI,GAAG,CAAA;AACd,MAAA,QAAA,IAAY,CAAA;AAAA,IACd;AAEA,IAAA,IAAI,MAAA,CAAO,OAAO,CAAA,EAAG;AACnB,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAK,MAAA,CAAO,IAAA,GAAO,GAAG,CAAA,EAAA,EAAK;AAC7C,QAAA,IAAI,MAAM,OAAA,EAAS;AACjB,UAAA,MAAA,CAAO,IAAI,CAAC,CAAA;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,KAAA,CAAM,KAAK,MAAM,CAAA;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKQ,2BAAA,CACN,CAAA,EACA,UAAA,EACA,CAAA,EACA,GAAA,EACU;AACV,IAAA,MAAM,MAAA,uBAAa,GAAA,EAAY;AAC/B,IAAA,MAAM,WAAA,GAAc,IAAI,EAAA,GAAK,GAAA;AAC7B,IAAA,IAAI,QAAA,GAAW,CAAA;AAEf,IAAA,OAAO,MAAA,CAAO,IAAA,GAAO,CAAA,IAAK,QAAA,GAAW,WAAA,EAAa;AAChD,MAAA,MAAM,GAAA,GAAM,IAAA,CAAK,KAAA,CAAM,GAAA,KAAQ,CAAC,CAAA;AAChC,MAAA,IAAI,CAAC,UAAA,CAAW,GAAA,CAAI,GAAG,CAAA,EAAG;AACxB,QAAA,MAAA,CAAO,IAAI,GAAG,CAAA;AAAA,MAChB;AACA,MAAA,QAAA,IAAY,CAAA;AAAA,IACd;AAEA,IAAA,IAAI,MAAA,CAAO,OAAO,CAAA,EAAG;AACnB,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAK,MAAA,CAAO,IAAA,GAAO,GAAG,CAAA,EAAA,EAAK;AAC7C,QAAA,IAAI,CAAC,UAAA,CAAW,GAAA,CAAI,CAAC,CAAA,EAAG;AACtB,UAAA,MAAA,CAAO,IAAI,CAAC,CAAA;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,KAAA,CAAM,KAAK,MAAM,CAAA;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,SAAA,EAAmC;AAC9D,IAAA,MAAM,IAAI,SAAA,CAAU,MAAA;AACpB,IAAA,MAAM,aAAA,GAAgB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,UAAU,CAAA;AAC9C,IAAA,MAAM,IAAgB,EAAC;AAEvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,MAAgB,IAAI,KAAA,CAAM,CAAC,CAAA,CAAE,KAAK,CAAC,CAAA;AAGzC,MAAA,IAAI,QAAA,GAAW,KAAA;AACf,MAAA,IAAI,QAAA,GAAW,IAAA;AACf,MAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,MAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,EAAA,EAAI,IAAA,EAAA,EAAQ;AAEpC,QAAA,IAAIC,OAAAA,GAAS,CAAA;AACb,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,IAAI,MAAM,CAAA,EAAG;AACX,YAAA,MAAM,OAAA,GAAU,UAAU,CAAC,CAAA;AAC3B,YAAA,MAAM,IAAA,GAAO,OAAA,GAAW,OAAA,CAAQ,CAAC,KAAK,CAAA,GAAK,CAAA;AAC3C,YAAAA,WAAU,IAAA,CAAK,GAAA,CAAI,CAAC,IAAA,IAAQ,CAAA,GAAI,QAAQ,KAAA,CAAM,CAAA;AAAA,UAChD;AAAA,QACF;AAGA,QAAA,IAAI,OAAA,GAAU,CAAA;AACd,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,UAAA,IAAI,MAAM,CAAA,EAAG;AACX,YAAA,MAAM,OAAA,GAAU,UAAU,CAAC,CAAA;AAC3B,YAAA,MAAM,IAAA,GAAO,OAAA,GAAW,OAAA,CAAQ,CAAC,KAAK,CAAA,GAAK,CAAA;AAC3C,YAAA,MAAM,GAAA,GAAM,KAAK,GAAA,CAAI,CAAC,QAAQ,CAAA,GAAI,KAAA,GAAQ,KAAA,CAAM,CAAA,IAAKA,OAAAA,GAAS,KAAA,CAAA;AAC9D,YAAA,IAAI,MAAM,KAAA,EAAO;AACf,cAAA,OAAA,IAAW,GAAA,GAAM,IAAA,CAAK,GAAA,CAAI,GAAG,CAAA;AAAA,YAC/B;AAAA,UACF;AAAA,QACF;AAGA,QAAA,IAAI,IAAA,CAAK,GAAA,CAAI,OAAA,GAAU,aAAa,IAAI,IAAA,EAAM;AAC5C,UAAA;AAAA,QACF;AAEA,QAAA,IAAI,UAAU,aAAA,EAAe;AAC3B,UAAA,QAAA,GAAW,KAAA;AACX,UAAA,KAAA,GAAA,CAAS,QAAQ,QAAA,IAAY,CAAA;AAAA,QAC/B,CAAA,MAAO;AACL,UAAA,QAAA,GAAW,KAAA;AACX,UAAA,KAAA,GAAA,CAAS,QAAQ,QAAA,IAAY,CAAA;AAAA,QAC/B;AAAA,MACF;AAGA,MAAA,IAAI,MAAA,GAAS,CAAA;AACb,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,MAAM,CAAA,EAAG;AACX,UAAA,MAAM,OAAA,GAAU,UAAU,CAAC,CAAA;AAC3B,UAAA,MAAM,IAAA,GAAO,OAAA,GAAW,OAAA,CAAQ,CAAC,KAAK,CAAA,GAAK,CAAA;AAC3C,UAAA,MAAA,IAAU,KAAK,GAAA,CAAI,CAAC,IAAA,IAAQ,CAAA,GAAI,QAAQ,KAAA,CAAM,CAAA;AAAA,QAChD;AAAA,MACF;AAEA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,MAAM,CAAA,EAAG;AACX,UAAA,MAAM,OAAA,GAAU,UAAU,CAAC,CAAA;AAC3B,UAAA,MAAM,IAAA,GAAO,OAAA,GAAW,OAAA,CAAQ,CAAC,KAAK,CAAA,GAAK,CAAA;AAC3C,UAAA,GAAA,CAAI,CAAC,CAAA,GAAI,IAAA,CAAK,GAAA,CAAI,CAAC,QAAQ,CAAA,GAAI,KAAA,GAAQ,KAAA,CAAM,CAAA,IAAK,MAAA,GAAS,KAAA,CAAA;AAAA,QAC7D;AAAA,MACF;AAEA,MAAA,CAAA,CAAE,KAAK,GAAG,CAAA;AAAA,IACZ;AAGA,IAAA,MAAM,OAAmB,EAAC;AAC1B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,GAAA,GAAM,CAAA,CAAE,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AACzB,QAAA,MAAM,GAAA,GAAM,CAAA,CAAE,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AACzB,QAAA,GAAA,CAAI,IAAA,CAAA,CAAM,GAAA,GAAM,GAAA,KAAQ,CAAA,GAAI,CAAA,CAAE,CAAA;AAAA,MAChC;AACA,MAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,IACf;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,0BAAA,CACN,CAAA,EACA,SAAA,EACA,GAAA,EACc;AACd,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,aAAA,GAAgB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,UAAU,CAAA;AAC9C,IAAA,MAAM,OAAqB,EAAC;AAE5B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,kBAAkB,IAAA,CAAK,aAAA,CAAc,CAAA,EAAG,CAAA,EAAG,WAAW,GAAG,CAAA;AAC/D,MAAA,MAAM,YAAY,eAAA,CAAgB,GAAA;AAAA,QAAI,CAAC,CAAA,KACrC,IAAA,CAAK,sBAAA,CAAuB,CAAA,CAAE,CAAC,CAAA,IAAK,EAAC,EAAG,CAAA,CAAE,CAAC,CAAA,IAAK,EAAE;AAAA,OACpD;AAEA,MAAA,IAAI,QAAA,GAAW,KAAA;AACf,MAAA,IAAI,QAAA,GAAW,IAAA;AACf,MAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,MAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,EAAA,EAAI,IAAA,EAAA,EAAQ;AACpC,QAAA,IAAIA,OAAAA,GAAS,CAAA;AACb,QAAA,KAAA,MAAW,QAAQ,SAAA,EAAW;AAC5B,UAAAA,WAAU,IAAA,CAAK,GAAA,CAAI,CAAC,IAAA,IAAQ,CAAA,GAAI,QAAQ,KAAA,CAAM,CAAA;AAAA,QAChD;AAEA,QAAA,IAAI,OAAA,GAAU,CAAA;AACd,QAAA,KAAA,MAAW,QAAQ,SAAA,EAAW;AAC5B,UAAA,MAAM,GAAA,GAAM,KAAK,GAAA,CAAI,CAAC,QAAQ,CAAA,GAAI,KAAA,GAAQ,KAAA,CAAM,CAAA,IAAKA,OAAAA,GAAS,KAAA,CAAA;AAC9D,UAAA,IAAI,MAAM,KAAA,EAAO;AACf,YAAA,OAAA,IAAW,GAAA,GAAM,IAAA,CAAK,GAAA,CAAI,GAAG,CAAA;AAAA,UAC/B;AAAA,QACF;AAEA,QAAA,IAAI,IAAA,CAAK,GAAA,CAAI,OAAA,GAAU,aAAa,IAAI,IAAA,EAAM;AAC5C,UAAA;AAAA,QACF;AAEA,QAAA,IAAI,UAAU,aAAA,EAAe;AAC3B,UAAA,QAAA,GAAW,KAAA;AACX,UAAA,KAAA,GAAA,CAAS,QAAQ,QAAA,IAAY,CAAA;AAAA,QAC/B,CAAA,MAAO;AACL,UAAA,QAAA,GAAW,KAAA;AACX,UAAA,KAAA,GAAA,CAAS,QAAQ,QAAA,IAAY,CAAA;AAAA,QAC/B;AAAA,MACF;AAEA,MAAA,IAAI,MAAA,GAAS,CAAA;AACb,MAAA,KAAA,MAAW,QAAQ,SAAA,EAAW;AAC5B,QAAA,MAAA,IAAU,KAAK,GAAA,CAAI,CAAC,IAAA,IAAQ,CAAA,GAAI,QAAQ,KAAA,CAAM,CAAA;AAAA,MAChD;AAEA,MAAA,MAAM,SAAmB,SAAA,CAAU,GAAA;AAAA,QACjC,CAAC,IAAA,KAAS,IAAA,CAAK,GAAA,CAAI,CAAC,QAAQ,CAAA,GAAI,KAAA,GAAQ,KAAA,CAAM,CAAA,IAAK,MAAA,GAAS,KAAA;AAAA,OAC9D;AAEA,MAAA,IAAA,CAAK,IAAA,CAAK,EAAE,OAAA,EAAS,eAAA,EAAiB,QAAQ,CAAA;AAAA,IAChD;AAEA,IAAA,OAAO,IAAA,CAAK,gBAAA,CAAiB,IAAA,EAAM,CAAC,CAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAA,CAAiB,MAAoB,CAAA,EAAyB;AACpE,IAAA,MAAM,IAAA,GAAmC,IAAA,CAAK,GAAA,CAAI,CAAC,GAAA,KAAQ;AACzD,MAAA,MAAM,GAAA,uBAAU,GAAA,EAAoB;AACpC,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,GAAA,CAAI,OAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AAC3C,QAAA,MAAM,CAAA,GAAI,GAAA,CAAI,OAAA,CAAQ,CAAC,CAAA;AACvB,QAAA,IAAI,MAAM,MAAA,EAAW;AACrB,QAAA,MAAM,GAAA,GAAM,GAAA,CAAI,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA;AAC7B,QAAA,GAAA,CAAI,GAAA,CAAI,GAAG,GAAG,CAAA;AAAA,MAChB;AACA,MAAA,OAAO,GAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,MAAM,MAAoB,EAAC;AAC3B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,GAAA,GAAM,KAAK,CAAC,CAAA;AAClB,MAAA,IAAI,CAAC,GAAA,EAAK;AACR,QAAA,GAAA,CAAI,IAAA,CAAK,EAAE,OAAA,EAAS,IAAI,MAAA,EAAQ,IAAI,CAAA;AACpC,QAAA;AAAA,MACF;AACA,MAAA,MAAM,OAAA,GAAU,GAAA,CAAI,OAAA,CAAQ,KAAA,EAAM;AAClC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,OAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AACvC,QAAA,MAAM,CAAA,GAAI,QAAQ,CAAC,CAAA;AACnB,QAAA,IAAI,MAAM,MAAA,EAAW;AACrB,QAAA,MAAM,MAAM,IAAA,CAAK,CAAC,CAAA,EAAG,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA;AAC/B,QAAA,MAAM,MAAM,IAAA,CAAK,CAAC,CAAA,EAAG,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA;AAC/B,QAAA,MAAA,CAAO,IAAA,CAAA,CAAM,GAAA,GAAM,GAAA,KAAQ,CAAA,GAAI,CAAA,CAAE,CAAA;AAAA,MACnC;AACA,MAAA,GAAA,CAAI,IAAA,CAAK,EAAE,OAAA,EAAS,MAAA,EAAQ,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,GAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,CAAA,EAAuB;AACjD,IAAA,MAAM,YAAwB,EAAC;AAG/B,IAAA,IAAI,IAAA,GAAO,IAAA,CAAK,WAAA,IAAe,IAAA,CAAK,GAAA,EAAI;AACxC,IAAA,MAAM,SAAS,MAAc;AAC3B,MAAA,IAAA,GAAQ,IAAA,GAAO,aAAa,KAAA,GAAS,UAAA;AACrC,MAAA,OAAO,IAAA,GAAO,UAAA;AAAA,IAChB,CAAA;AAEA,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AAEzC,QAAA,MAAM,EAAA,GAAK,QAAO,IAAK,IAAA;AACvB,QAAA,MAAM,KAAK,MAAA,EAAO;AAClB,QAAA,MAAM,CAAA,GAAI,IAAA,CAAK,IAAA,CAAK,EAAA,GAAK,KAAK,GAAA,CAAI,EAAE,CAAC,CAAA,GAAI,IAAA,CAAK,GAAA,CAAI,CAAA,GAAI,IAAA,CAAK,KAAK,EAAE,CAAA;AAClE,QAAA,GAAA,CAAI,IAAA,CAAK,IAAI,IAAM,CAAA;AAAA,MACrB;AACA,MAAA,SAAA,CAAU,KAAK,GAAG,CAAA;AAAA,IACpB;AAEA,IAAA,OAAO,SAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,CAAA,EAAgD;AAC/D,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,IAAgB,EAAC;AACvB,IAAA,IAAI,IAAA,GAAO,CAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,MAAM,CAAA,EAAG;AACX,UAAA,GAAA,CAAI,KAAK,CAAC,CAAA;AAAA,QACZ,CAAA,MAAO;AACL,UAAA,IAAI,IAAA,GAAO,CAAA;AACX,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,IAAI,MAAM,EAAA,EAAI;AACZ,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,cAAA,MAAM,QAAQ,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AACtC,cAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,YACjB;AAAA,UACF;AAEA,UAAA,MAAM,GAAA,GAAM,KAAK,CAAA,GAAI,IAAA,CAAA;AACrB,UAAA,GAAA,CAAI,KAAK,GAAG,CAAA;AACZ,UAAA,IAAA,IAAQ,GAAA;AAAA,QACV;AAAA,MACF;AACA,MAAA,CAAA,CAAE,KAAK,GAAG,CAAA;AAAA,IACZ;AAEA,IAAA,OAAO,EAAE,GAAG,IAAA,EAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAA,CACN,CAAA,EACA,SAAA,EACA,eAAA,EACA,GAAA,EACqC;AACrC,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,OAAoB,EAAC;AAC3B,IAAA,IAAI,IAAA,GAAO,CAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,eAAA,GAAkB,SAAA,CAAU,CAAC,CAAA,EAAG,WAAW,EAAC;AAClD,MAAA,MAAM,SAAA,GAAY,IAAI,GAAA,CAAY,eAAe,CAAA;AACjD,MAAA,SAAA,CAAU,IAAI,CAAC,CAAA;AAEf,MAAA,MAAM,YAAY,IAAA,CAAK,2BAAA,CAA4B,CAAA,EAAG,SAAA,EAAW,iBAAiB,GAAG,CAAA;AACrF,MAAA,MAAM,OAAA,GAAU,eAAA,CAAgB,MAAA,CAAO,SAAS,CAAA;AAChD,MAAA,MAAM,UAAoB,EAAC;AAE3B,MAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,EAAC;AACpB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,OAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AACvC,QAAA,MAAM,CAAA,GAAI,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA;AACxB,QAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,EAAC;AACpB,QAAA,MAAM,IAAA,GAAO,IAAA,CAAK,sBAAA,CAAuB,EAAA,EAAI,EAAE,CAAA;AAC/C,QAAA,MAAM,GAAA,GAAM,KAAK,CAAA,GAAI,IAAA,CAAA;AACrB,QAAA,OAAA,CAAQ,KAAK,GAAG,CAAA;AAChB,QAAA,IAAA,IAAQ,GAAA;AAAA,MACV;AAEA,MAAA,IAAA,CAAK,IAAA,CAAK,EAAE,OAAA,EAAS,OAAA,EAAS,CAAA;AAAA,IAChC;AAEA,IAAA,OAAO,EAAE,MAAM,IAAA,EAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAA,CAAiB,CAAA,EAAe,CAAA,EAAe,IAAA,EAAc,CAAA,EAA2B;AAC9F,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,YAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,OAAiB,IAAI,KAAA,CAAM,KAAK,WAAW,CAAA,CAAE,KAAK,CAAC,CAAA;AAEzD,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,IAAI,MAAM,CAAA,EAAG;AACX,UAAA,MAAM,GAAA,GAAM,CAAA,CAAE,CAAC,CAAA,GAAI,CAAC,CAAA,IAAK,CAAA;AACzB,UAAA,MAAM,OAAO,CAAA,CAAE,CAAC,IAAI,CAAC,CAAA,IAAK,MAAM,IAAA,GAAO,KAAA,CAAA;AAEvC,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,UAAA,IAAI,MAAM,EAAA,EAAI;AAEZ,YAAA,IAAI,IAAA,GAAO,CAAA;AACX,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,cAAA,MAAM,QAAQ,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AACtC,cAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,YACjB;AACA,YAAA,MAAM,IAAA,GAAA,CAAQ,GAAA,GAAM,GAAA,KAAQ,CAAA,IAAK,CAAA,GAAI,IAAA,CAAA,CAAA;AAErC,YAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,cAAA,IAAA,CAAK,CAAC,CAAA,GAAA,CAAK,IAAA,CAAK,CAAC,KAAK,CAAA,IAAK,CAAA,GAAI,IAAA,IAAA,CAAS,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA,CAAA;AAAA,YAClE;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,MAAA,SAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IACrB;AAEA,IAAA,OAAO,SAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAA,CACN,KAAA,EACA,KAAA,EACA,IAAA,EACA,GACA,YAAA,EACY;AACZ,IAAA,MAAM,IAAI,CAAA,CAAE,MAAA;AACZ,IAAA,MAAM,YAAwB,EAAC;AAC/B,IAAA,MAAM,QAAQ,IAAA,GAAO,KAAA;AAErB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,MAAA,MAAM,OAAiB,IAAI,KAAA,CAAM,KAAK,WAAW,CAAA,CAAE,KAAK,CAAC,CAAA;AACzD,MAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,EAAC;AACpB,MAAA,MAAM,IAAA,GAAO,MAAM,CAAC,CAAA;AACpB,MAAA,MAAM,IAAA,GAAO,MAAM,CAAC,CAAA;AAEpB,MAAA,IAAI,IAAA,EAAM;AACR,QAAA,KAAA,IAAS,MAAM,CAAA,EAAG,GAAA,GAAM,IAAA,CAAK,OAAA,CAAQ,QAAQ,GAAA,EAAA,EAAO;AAClD,UAAA,MAAM,CAAA,GAAI,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA,IAAK,CAAA;AAC/B,UAAA,IAAI,MAAM,CAAA,EAAG;AACb,UAAA,MAAM,EAAA,GAAK,CAAA,CAAE,CAAC,CAAA,IAAK,EAAC;AAEpB,UAAA,MAAM,OAAA,GAAU,IAAA,EAAM,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA;AAChC,UAAA,MAAM,MAAM,OAAA,GAAU,YAAA;AACtB,UAAA,MAAM,OAAA,GAAU,IAAA,CAAK,OAAA,CAAQ,GAAG,CAAA,IAAK,CAAA;AACrC,UAAA,MAAM,MAAM,OAAA,GAAU,KAAA;AAEtB,UAAA,MAAM,IAAA,GAAA,CAAQ,MAAM,GAAA,IAAO,OAAA;AAC3B,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,YAAA,IAAA,CAAK,CAAC,CAAA,GAAA,CAAK,IAAA,CAAK,CAAC,KAAK,CAAA,IAAK,CAAA,GAAI,IAAA,IAAA,CAAS,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA,CAAA;AAAA,UAClE;AAAA,QACF;AAAA,MACF;AAEA,MAAA,SAAA,CAAU,KAAK,IAAI,CAAA;AAAA,IACrB;AAEA,IAAA,OAAO,SAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,CAAA,EAAmB;AAC9B,IAAA,6BAAA,CAA8B,CAAC,CAAA;AAE/B,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAI,WAAW,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,qBAAA,CAAsB,mCAAA,EAAqC,UAAA,EAAY,QAAQ,CAAA;AAAA,IAC3F;AACA,IAAA,IAAI,IAAA,CAAK,cAAc,QAAA,EAAU;AAC/B,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,4DAAA,EAA+D,IAAA,CAAK,UAAU,CAAA,YAAA,EAAe,QAAQ,CAAA,CAAA;AAAA,QACrG,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,IAAA,CAAK,MAAA,KAAW,OAAA,IAAW,QAAA,GAAW,KAAK,eAAA,EAAiB;AAC9D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,IAAA,CAAK,eAAe,CAAA,qBAAA,EAAwB,QAAQ,CAAA,uDAAA,CAAA;AAAA,QACzG,UAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAGA,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AAAA,IAChB;AAEA,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,WAAA,IAAe,IAAA,CAAK,GAAA,EAAI;AAC9C,IAAA,MAAM,OAAO,MAAM;AACjB,MAAA,IAAI,QAAQ,QAAA,KAAa,CAAA;AACzB,MAAA,OAAO,MAAc;AACnB,QAAA,KAAA,GAAS,KAAA,GAAQ,UAAU,UAAA,KAAgB,CAAA;AAC3C,QAAA,OAAO,QAAQ,CAAA,IAAK,EAAA;AAAA,MACtB,CAAA;AAAA,IACF,CAAA,GAAG;AAEH,IAAA,MAAM,cAAA,GAAiB,KAAK,MAAA,KAAW,aAAA;AACvC,IAAA,MAAM,gBAAgB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,oBAAA,EAAsB,WAAW,CAAC,CAAA;AACtE,IAAA,MAAM,qBAAqB,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,QAAA,GAAW,IAAI,aAAa,CAAA;AACnE,IAAA,MAAM,aAAA,GAAgB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,iBAAiB,kBAAkB,CAAA;AACvE,IAAA,IAAI,cAAA,IAAkB,gBAAgB,CAAA,EAAG;AACvC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,8DAAA;AAAA,QACA,sBAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AACA,IAAA,IAAI,cAAA,IAAkB,IAAA,CAAK,UAAA,IAAc,aAAA,EAAe;AACtD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,uEAAA,EAA0E,IAAA,CAAK,UAAU,CAAA,uBAAA,EAA0B,aAAa,CAAA,CAAA;AAAA,QAChI,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAGA,IAAA,MAAM,MAAA,GAAS,iBAAiB,IAAA,GAAO,IAAA,CAAK,qBAAqB,IAAA,CAAK,gBAAA,CAAiB,KAAK,CAAC,CAAA;AAC7F,IAAA,MAAM,UAAU,cAAA,GACZ,IAAA,CAAK,2BAA2B,KAAA,EAAO,aAAA,EAAe,GAAG,CAAA,GACzD,IAAA;AAGJ,IAAA,MAAM,CAAA,GAAI,IAAA,CAAK,mBAAA,CAAoB,QAAQ,CAAA;AAG3C,IAAA,MAAM,aAAyB,EAAC;AAChC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,UAAA,CAAW,IAAA,CAAK,IAAI,KAAA,CAAM,IAAA,CAAK,WAAW,CAAA,CAAE,IAAA,CAAK,CAAC,CAAC,CAAA;AAAA,IACrD;AAEA,IAAA,MAAM,QAAA,GAAW,GAAA;AACjB,IAAA,MAAM,aAAA,GAAgB,GAAA;AAEtB,IAAA,MAAM,QACJ,cAAA,IAAkB,OAAA,GACd,OAAA,CAAQ,GAAA,CAAI,CAAC,GAAA,KAAQ;AACnB,MAAA,MAAM,GAAA,uBAAU,GAAA,EAAoB;AACpC,MAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,GAAA,CAAI,OAAA,CAAQ,QAAQ,CAAA,EAAA,EAAK;AAC3C,QAAA,MAAM,CAAA,GAAI,GAAA,CAAI,OAAA,CAAQ,CAAC,CAAA;AACvB,QAAA,IAAI,MAAM,MAAA,EAAW;AACrB,QAAA,MAAM,GAAA,GAAM,GAAA,CAAI,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA;AAC7B,QAAA,GAAA,CAAI,GAAA,CAAI,GAAG,GAAG,CAAA;AAAA,MAChB;AACA,MAAA,OAAO,GAAA;AAAA,IACT,CAAC,IACD,EAAC;AAGP,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,IAAA,CAAK,OAAO,IAAA,EAAA,EAAQ;AAE5C,MAAA,MAAM,YAAA,GAAe,IAAA,GAAO,IAAA,CAAK,qBAAA,GAAwB,KAAK,iBAAA,GAAoB,CAAA;AAElF,MAAA,IAAI,SAAA;AACJ,MAAA,IAAI,kBAAkB,OAAA,EAAS;AAC7B,QAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAO,IAAA,EAAK,GAAI,KAAK,cAAA,CAAe,CAAA,EAAG,OAAA,EAAS,aAAA,EAAe,GAAG,CAAA;AAChF,QAAA,SAAA,GAAY,KAAK,sBAAA,CAAuB,KAAA,EAAO,KAAA,EAAO,IAAA,EAAM,GAAG,YAAY,CAAA;AAAA,MAC7E,CAAA,MAAO;AAEL,QAAA,MAAM,QAAoB,EAAC;AAC3B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,MAAM,MAAgB,EAAC;AACvB,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,YAAA,GAAA,CAAI,MAAM,MAAA,GAAS,CAAC,IAAI,CAAC,CAAA,IAAK,KAAK,YAAY,CAAA;AAAA,UACjD;AACA,UAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AAAA,QAChB;AAGA,QAAA,MAAM,EAAE,CAAA,EAAG,IAAA,EAAK,GAAI,IAAA,CAAK,SAAS,CAAC,CAAA;AAGnC,QAAA,SAAA,GAAY,IAAA,CAAK,gBAAA,CAAiB,KAAA,EAAO,CAAA,EAAG,MAAM,CAAC,CAAA;AAAA,MACrD;AAGA,MAAA,IAAI,QAAA,GAAW,CAAA;AACf,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,IAAA,GAAO,UAAU,CAAC,CAAA;AACxB,QAAA,IAAI,IAAA,EAAM;AACR,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,YAAA,QAAA,IAAA,CAAa,IAAA,CAAK,CAAC,CAAA,IAAK,CAAA,KAAM,CAAA;AAAA,UAChC;AAAA,QACF;AAAA,MACF;AACA,MAAA,QAAA,GAAW,IAAA,CAAK,KAAK,QAAQ,CAAA;AAE7B,MAAA,IAAI,QAAA,GAAW,KAAK,WAAA,EAAa;AAC/B,QAAA;AAAA,MACF;AAGA,MAAA,MAAM,eAAA,GAAkB,IAAA,GAAO,IAAA,CAAK,qBAAA,GAAwB,QAAA,GAAW,aAAA;AAGvE,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,QAAA,MAAM,IAAA,GAAO,UAAU,CAAC,CAAA;AACxB,QAAA,MAAM,GAAA,GAAM,WAAW,CAAC,CAAA;AAExB,QAAA,IAAI,EAAA,IAAM,QAAQ,GAAA,EAAK;AACrB,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,YAAA,GAAA,CAAI,CAAC,CAAA,GAAI,eAAA,IAAmB,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA,CAAA,GAAK,IAAA,CAAK,YAAA,IAAgB,IAAA,CAAK,CAAC,CAAA,IAAK,CAAA,CAAA;AAC3E,YAAA,EAAA,CAAG,CAAC,KAAK,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,GAAA,CAAI,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAGA,MAAA,MAAM,SAAmB,IAAI,KAAA,CAAM,KAAK,WAAW,CAAA,CAAE,KAAK,CAAC,CAAA;AAC3D,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,QAAA,IAAI,EAAA,EAAI;AACN,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,YAAA,MAAA,CAAO,CAAC,KAAK,MAAA,CAAO,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AACA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,QAAA,MAAA,CAAO,CAAC,CAAA,GAAA,CAAK,MAAA,CAAO,CAAC,KAAK,CAAA,IAAK,QAAA;AAAA,MACjC;AACA,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,EAAA,GAAK,EAAE,CAAC,CAAA;AACd,QAAA,IAAI,EAAA,EAAI;AACN,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,YAAA,EAAA,CAAG,CAAC,KAAK,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UACvC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,SAAA,GAAY,CAAA;AACjB,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,OAAO,CAAC,CAAA;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,CAAA,EAAiB;AACnB,IAAA,IAAA,CAAK,aAAa,CAAC,CAAA;AACnB,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,eAAA,GAA0B;AAC5B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,gDAAgD,CAAA;AAAA,IAC3E;AACA,IAAA,OAAO,MAAA,CAAO,KAAK,SAAS,CAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,YAAY,IAAA,CAAK,UAAA;AAAA,MACjB,cAAc,IAAA,CAAK,YAAA;AAAA,MACnB,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,mBAAmB,IAAA,CAAK,iBAAA;AAAA,MACxB,uBAAuB,IAAA,CAAK,qBAAA;AAAA,MAC5B,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,QAAQ,IAAA,CAAK,MAAA;AAAA,MACb,iBAAiB,IAAA,CAAK,eAAA;AAAA,MACtB,sBAAsB,IAAA,CAAK,oBAAA;AAAA,MAC3B,iBAAiB,IAAA,CAAK;AAAA,KACxB;AAAA,EACF;AACF;;;ACr1BO,IAAM,aAAN,MAAuC;AAAA,EAC3B,YAAA;AAAA,EAET,QAAA;AAAA,EACA,WAAA;AAAA,EACA,MAAA;AAAA;AAAA,EACA,IAAA;AAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQjB,WAAA,CACE,OAAA,GAEI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,YAAA,GAAe,QAAQ,YAAA,IAAgB,IAAA;AAC5C,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,YAAY,CAAA,IAAK,IAAA,CAAK,eAAe,CAAA,EAAG;AAChE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,2CAAA;AAAA,QACA,cAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AAGpB,IAAA,MAAM,QAAA,uBAAe,GAAA,EAAY;AACjC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,QAAA,CAAS,GAAA,CAAI,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IAC3C;AACA,IAAA,IAAA,CAAK,QAAA,GAAW,KAAA,CAAM,IAAA,CAAK,QAAQ,CAAA,CAAE,KAAK,CAAC,CAAA,EAAG,CAAA,KAAM,CAAA,GAAI,CAAC,CAAA;AAGzD,IAAA,IAAA,CAAK,WAAA,uBAAkB,GAAA,EAAI;AAC3B,IAAA,KAAA,MAAW,GAAA,IAAO,KAAK,QAAA,EAAU;AAC/B,MAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,IAAI,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAC,MAAM,GAAA,EAAK;AACxC,UAAA,KAAA,EAAA;AAAA,QACF;AAAA,MACF;AACA,MAAA,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,GAAA,EAAK,KAAA,GAAQ,QAAQ,CAAA;AAAA,IAC5C;AAGA,IAAA,IAAA,CAAK,MAAA,uBAAa,GAAA,EAAI;AACtB,IAAA,IAAA,CAAK,IAAA,uBAAW,GAAA,EAAI;AAEpB,IAAA,KAAA,MAAW,GAAA,IAAO,KAAK,QAAA,EAAU;AAC/B,MAAA,MAAM,eAA2B,EAAC;AAElC,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,IAAI,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAC,MAAM,GAAA,EAAK;AACxC,UAAA,MAAM,SAAmB,EAAC;AAC1B,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,UAC1D;AACA,UAAA,YAAA,CAAa,KAAK,MAAM,CAAA;AAAA,QAC1B;AAAA,MACF;AAEA,MAAA,MAAM,QAAkB,EAAC;AACzB,MAAA,MAAM,YAAsB,EAAC;AAE7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAElC,QAAA,IAAI,GAAA,GAAM,CAAA;AACV,QAAA,KAAA,MAAW,UAAU,YAAA,EAAc;AACjC,UAAA,GAAA,IAAO,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA;AAAA,QACtB;AACA,QAAA,MAAMC,KAAAA,GAAO,MAAM,YAAA,CAAa,MAAA;AAChC,QAAA,KAAA,CAAM,KAAKA,KAAI,CAAA;AAGf,QAAA,IAAI,MAAA,GAAS,CAAA;AACb,QAAA,KAAA,MAAW,UAAU,YAAA,EAAc;AACjC,UAAA,MAAM,IAAA,GAAA,CAAQ,MAAA,CAAO,CAAC,CAAA,IAAK,CAAA,IAAKA,KAAAA;AAChC,UAAA,MAAA,IAAU,IAAA,GAAO,IAAA;AAAA,QACnB;AACA,QAAA,MAAM,QAAA,GAAW,SAAS,YAAA,CAAa,MAAA;AACvC,QAAA,IAAI,QAAA,KAAa,CAAA,IAAK,IAAA,CAAK,YAAA,KAAiB,CAAA,EAAG;AAC7C,UAAA,MAAM,IAAI,mBAAA;AAAA,YACR;AAAA,WACF;AAAA,QACF;AACA,QAAA,SAAA,CAAU,KAAK,QAAQ,CAAA;AAAA,MACzB;AAEA,MAAA,IAAA,CAAK,MAAA,CAAO,GAAA,CAAI,GAAA,EAAK,KAAK,CAAA;AAC1B,MAAA,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,GAAA,EAAK,SAAS,CAAA;AAAA,IAC9B;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,6CAA6C,CAAA;AAAA,IACxE;AAEA,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,YAAA,CAAa,CAAC,CAAA;AACjC,IAAA,MAAM,QAAA,GAAW,KAAA,CAAM,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,MAAM,QAAA,GAAW,KAAA,CAAM,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACnC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,IAAI,OAAA,GAAU,EAAA;AACd,MAAA,IAAI,QAAA,GAAW,CAAA;AAEf,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,IAAA,GAAO,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAA,GAAI,QAAA,GAAW,CAAC,CAAC,CAAA;AAC/D,QAAA,IAAI,OAAO,OAAA,EAAS;AAClB,UAAA,OAAA,GAAU,IAAA;AACV,UAAA,QAAA,GAAW,IAAA,CAAK,QAAA,GAAW,CAAC,CAAA,IAAK,CAAA;AAAA,QACnC;AAAA,MACF;AAEA,MAAA,WAAA,CAAY,KAAK,QAAQ,CAAA;AAAA,IAC3B;AAEA,IAAA,OAAO,MAAA,CAAO,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,QAAA,IAAY,CAAC,IAAA,CAAK,WAAA,IAAe,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,IAAA,EAAM;AACrF,MAAA,MAAM,IAAI,eAAe,6CAA6C,CAAA;AAAA,IACxE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,YAAY,CAAA;AAE7D,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,MAAM,gBAA4B,EAAC;AAEnC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,WAAqB,EAAC;AAE5B,MAAA,KAAA,MAAW,GAAA,IAAO,KAAK,QAAA,EAAU;AAC/B,QAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,WAAA,CAAY,GAAA,CAAI,GAAG,CAAA,IAAK,CAAA;AAC3C,QAAA,MAAM,QAAQ,IAAA,CAAK,MAAA,CAAO,GAAA,CAAI,GAAG,KAAK,EAAC;AACvC,QAAA,MAAM,YAAY,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,GAAG,KAAK,EAAC;AAEzC,QAAA,IAAI,OAAA,GAAU,IAAA,CAAK,GAAA,CAAI,KAAK,CAAA;AAE5B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,MAAM,CAAA,GAAI,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AACrD,UAAA,MAAMA,KAAAA,GAAO,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AACzB,UAAA,MAAM,QAAA,GAAA,CAAY,SAAA,CAAU,CAAC,CAAA,IAAK,KAAK,IAAA,CAAK,YAAA;AAG5C,UAAA,OAAA,IAAW,MAAM,IAAA,CAAK,GAAA,CAAI,CAAA,GAAI,IAAA,CAAK,KAAK,QAAQ,CAAA;AAChD,UAAA,OAAA,IAAA,CAAY,CAAA,GAAIA,KAAAA,KAAS,CAAA,IAAK,CAAA,GAAI,QAAA,CAAA;AAAA,QACpC;AAEA,QAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,MACvB;AAGA,MAAA,MAAM,UAAA,GAAa,IAAA,CAAK,GAAA,CAAI,GAAG,QAAQ,CAAA;AACvC,MAAA,MAAM,QAAA,GAAW,SAAS,GAAA,CAAI,CAAC,OAAO,IAAA,CAAK,GAAA,CAAI,EAAA,GAAK,UAAU,CAAC,CAAA;AAC/D,MAAA,MAAM,WAAA,GAAc,SAAS,MAAA,CAAO,CAAC,GAAG,CAAA,KAAM,CAAA,GAAI,GAAG,CAAC,CAAA;AACtD,MAAA,MAAM,QAAQ,QAAA,CAAS,GAAA,CAAI,CAAC,EAAA,KAAO,KAAK,WAAW,CAAA;AAEnD,MAAA,aAAA,CAAc,KAAK,KAAK,CAAA;AAAA,IAC1B;AAEA,IAAA,OAAO,OAAO,aAAa,CAAA;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC5B,IAAA,IAAI,KAAA,CAAM,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACzB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,KAAA,CAAM,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAChF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,MAAM,IAAA,CAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACzE,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,OAAA,GAA8B;AAChC,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,QAAA,EAAU;AAClC,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,OAAO,OAAO,IAAA,CAAK,QAAA,EAAU,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,cAAc,IAAA,CAAK;AAAA,KACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,oBAAoB,0DAA0D,CAAA;AAAA,EAC1F;AACF;;;AC7TA,IAAe,iBAAf,MAA8B;AAAA,EACT,UAAA;AAAA,EACA,OAAA;AAAA,EACA,MAAA;AAAA,EAET,OAAA;AAAA,EACA,OAAA;AAAA,EACA,YAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEnB,WAAA,CACE,OAAA,GAII,EAAC,EACL;AACA,IAAA,IAAA,CAAK,UAAA,GAAa,QAAQ,UAAA,IAAc,CAAA;AACxC,IAAA,IAAA,CAAK,OAAA,GAAU,QAAQ,OAAA,IAAW,SAAA;AAClC,IAAA,IAAA,CAAK,MAAA,GAAS,QAAQ,MAAA,IAAU,WAAA;AAEhC,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,UAAU,CAAA,IAAK,IAAA,CAAK,aAAa,CAAA,EAAG;AAC7D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,oCAAA;AAAA,QACA,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,IAAA,CAAK,OAAA,KAAY,SAAA,IAAa,IAAA,CAAK,YAAY,UAAA,EAAY;AAC7D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,MAAA,CAAO,IAAA,CAAK,OAAO,CAAC,CAAA,CAAA;AAAA,QACzE,SAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,IAAA,CAAK,MAAA,KAAW,WAAA,IAAe,IAAA,CAAK,WAAW,WAAA,EAAa;AAC9D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,MAAA,CAAO,IAAA,CAAK,MAAM,CAAC,CAAA,CAAA;AAAA,QAC1E,QAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAAA,EACF;AAAA,EAEU,iBAAA,CAAkB,IAAc,EAAA,EAAsB;AAC9D,IAAA,IAAI,IAAA,GAAO,CAAA;AACX,IAAA,IAAI,IAAA,CAAK,WAAW,WAAA,EAAa;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,CAAG,QAAQ,CAAA,EAAA,EAAK;AAClC,QAAA,MAAM,QAAQ,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AACtC,QAAA,IAAA,IAAQ,IAAA,GAAO,IAAA;AAAA,MACjB;AACA,MAAA,OAAO,IAAA,CAAK,KAAK,IAAI,CAAA;AAAA,IACvB,CAAA,MAAO;AAEL,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,CAAG,QAAQ,CAAA,EAAA,EAAK;AAClC,QAAA,IAAA,IAAQ,IAAA,CAAK,KAAK,EAAA,CAAG,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAE,CAAA;AAAA,MAC9C;AACA,MAAA,OAAO,IAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEU,aAAa,MAAA,EAA8D;AACnF,IAAA,IAAI,CAAC,KAAK,OAAA,EAAS;AACjB,MAAA,MAAM,IAAI,eAAe,+CAA+C,CAAA;AAAA,IAC1E;AAEA,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,OAAA,CAAQ,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC1C,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAE3C,IAAA,MAAM,YAAwD,EAAC;AAE/D,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,cAAwB,EAAC;AAC/B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,WAAA,CAAY,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,IAAA,CAAK,OAAA,CAAQ,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACrF;AACA,MAAA,MAAM,IAAA,GAAO,IAAA,CAAK,iBAAA,CAAkB,MAAA,EAAQ,WAAW,CAAA;AACvD,MAAA,SAAA,CAAU,KAAK,EAAE,KAAA,EAAO,CAAA,EAAG,QAAA,EAAU,MAAM,CAAA;AAAA,IAC7C;AAEA,IAAA,SAAA,CAAU,KAAK,CAAC,CAAA,EAAG,MAAM,CAAA,CAAE,QAAA,GAAW,EAAE,QAAQ,CAAA;AAChD,IAAA,OAAO,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,UAAU,CAAA;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,YAAY,IAAA,CAAK,UAAA;AAAA,MACjB,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,QAAQ,IAAA,CAAK;AAAA,KACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,oBAAoB,0DAA0D,CAAA;AAAA,EAC1F;AACF,CAAA;AAkCO,IAAM,oBAAA,GAAN,cAAmC,cAAA,CAAqC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAY7E,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAI,IAAA,CAAK,aAAa,QAAA,EAAU;AAC9B,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,0CAAA,EAA6C,IAAA,CAAK,UAAU,CAAA,GAAA,EAAM,QAAQ,CAAA,CAAA;AAAA,QAC1E,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,OAAA,GAAU,CAAA;AACf,IAAA,IAAA,CAAK,OAAA,GAAU,CAAA;AACf,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AACpB,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,OAAA,IAAW,CAAC,KAAK,OAAA,EAAS;AAClD,MAAA,MAAM,IAAI,eAAe,uDAAuD,CAAA;AAAA,IAClF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,sBAAsB,CAAA;AAEvE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AAEA,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,MAAM,CAAA;AAG1C,MAAA,MAAM,KAAA,uBAAY,GAAA,EAAoB;AAEtC,MAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,QAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,KAAK,OAAA,CAAQ,MAAA,GAAS,QAAA,CAAS,KAAK,CAAC,CAAA;AAC5E,QAAA,MAAM,SAAS,IAAA,CAAK,OAAA,KAAY,YAAY,CAAA,GAAI,CAAA,IAAK,SAAS,QAAA,GAAW,KAAA,CAAA;AACzE,QAAA,KAAA,CAAM,IAAI,KAAA,EAAA,CAAQ,KAAA,CAAM,IAAI,KAAK,CAAA,IAAK,KAAK,MAAM,CAAA;AAAA,MACnD;AAGA,MAAA,IAAI,QAAA,GAAW,EAAA;AACf,MAAA,IAAI,cAAA,GAAiB,CAAA;AACrB,MAAA,KAAA,MAAW,CAAC,KAAA,EAAO,SAAS,CAAA,IAAK,KAAA,CAAM,SAAQ,EAAG;AAChD,QAAA,IAAI,YAAY,QAAA,EAAU;AACxB,UAAA,QAAA,GAAW,SAAA;AACX,UAAA,cAAA,GAAiB,KAAA;AAAA,QACnB;AAAA,MACF;AAEA,MAAA,WAAA,CAAY,KAAK,cAAc,CAAA;AAAA,IACjC;AAEA,IAAA,OAAO,MAAA,CAAO,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,OAAA,IAAW,CAAC,KAAK,OAAA,EAAS;AAClD,MAAA,MAAM,IAAI,eAAe,uDAAuD,CAAA;AAAA,IAClF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,sBAAsB,CAAA;AAEvE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAGhC,IAAA,MAAM,QAAA,uBAAe,GAAA,EAAY;AACjC,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,OAAA,CAAQ,MAAM,CAAA,EAAA,EAAK;AAC1C,MAAA,QAAA,CAAS,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,KAAK,OAAA,CAAQ,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACjE;AACA,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,IAAA,CAAK,QAAQ,CAAA,CAAE,KAAK,CAAC,CAAA,EAAG,CAAA,KAAM,CAAA,GAAI,CAAC,CAAA;AAEzD,IAAA,MAAM,gBAA4B,EAAC;AAEnC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AAEA,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,MAAM,CAAA;AAG1C,MAAA,MAAM,KAAA,uBAAY,GAAA,EAAoB;AACtC,MAAA,IAAI,WAAA,GAAc,CAAA;AAElB,MAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,QAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,KAAK,OAAA,CAAQ,MAAA,GAAS,QAAA,CAAS,KAAK,CAAC,CAAA;AAC5E,QAAA,MAAM,SAAS,IAAA,CAAK,OAAA,KAAY,YAAY,CAAA,GAAI,CAAA,IAAK,SAAS,QAAA,GAAW,KAAA,CAAA;AACzE,QAAA,KAAA,CAAM,IAAI,KAAA,EAAA,CAAQ,KAAA,CAAM,IAAI,KAAK,CAAA,IAAK,KAAK,MAAM,CAAA;AACjD,QAAA,WAAA,IAAe,MAAA;AAAA,MACjB;AAGA,MAAA,MAAM,QAAkB,EAAC;AACzB,MAAA,KAAA,MAAW,OAAO,OAAA,EAAS;AACzB,QAAA,KAAA,CAAM,MAAM,KAAA,CAAM,GAAA,CAAI,GAAG,CAAA,IAAK,KAAK,WAAW,CAAA;AAAA,MAChD;AACA,MAAA,aAAA,CAAc,KAAK,KAAK,CAAA;AAAA,IAC1B;AAEA,IAAA,OAAO,OAAO,aAAa,CAAA;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC5B,IAAA,IAAI,KAAA,CAAM,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACzB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,KAAA,CAAM,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAChF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,MAAM,IAAA,CAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACzE,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AACF;AAwBO,IAAM,mBAAA,GAAN,cAAkC,cAAA,CAAoC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAY3E,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAI,IAAA,CAAK,aAAa,QAAA,EAAU;AAC9B,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,0CAAA,EAA6C,IAAA,CAAK,UAAU,CAAA,GAAA,EAAM,QAAQ,CAAA,CAAA;AAAA,QAC1E,YAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,OAAA,GAAU,CAAA;AACf,IAAA,IAAA,CAAK,OAAA,GAAU,CAAA;AACf,IAAA,IAAA,CAAK,YAAA,GAAe,SAAA;AACpB,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AAEd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,IAAU,CAAC,KAAK,OAAA,IAAW,CAAC,KAAK,OAAA,EAAS;AAClD,MAAA,MAAM,IAAI,eAAe,sDAAsD,CAAA;AAAA,IACjF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,YAAA,IAAgB,CAAA,EAAG,qBAAqB,CAAA;AAEtE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,SAAmB,EAAC;AAC1B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,MAAA,CAAO,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MAC1D;AAEA,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,MAAM,CAAA;AAG1C,MAAA,IAAI,SAAA,GAAY,CAAA;AAChB,MAAA,IAAI,UAAA,GAAa,CAAA;AAEjB,MAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,QAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,IAAA,CAAK,OAAA,CAAQ,IAAA,CAAK,KAAK,OAAA,CAAQ,MAAA,GAAS,QAAA,CAAS,KAAK,CAAC,CAAA;AAC5E,QAAA,MAAM,SAAS,IAAA,CAAK,OAAA,KAAY,YAAY,CAAA,GAAI,CAAA,IAAK,SAAS,QAAA,GAAW,KAAA,CAAA;AACzE,QAAA,SAAA,IAAa,KAAA,GAAQ,MAAA;AACrB,QAAA,UAAA,IAAc,MAAA;AAAA,MAChB;AAEA,MAAA,WAAA,CAAY,IAAA,CAAK,YAAY,UAAU,CAAA;AAAA,IACzC;AAEA,IAAA,OAAO,OAAO,WAAW,CAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC5B,IAAA,IAAI,KAAA,CAAM,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AACzB,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,KAAA,CAAM,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OAChF;AAAA,IACF;AAEA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACtC;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,QAAQ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACzC,MAAA,MAAM,WAAW,MAAA,CAAO,KAAA,CAAM,KAAK,KAAA,CAAM,MAAA,GAAS,CAAC,CAAC,CAAA;AACpD,MAAA,KAAA,IAAA,CAAU,QAAQ,QAAA,KAAa,CAAA;AAC/B,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAAA,IAC9B;AAEA,IAAA,IAAI,UAAU,CAAA,EAAG;AACf,MAAA,OAAO,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA;AAAA,IAC7B;AAEA,IAAA,OAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EACrB;AACF;;;AC/bO,IAAM,YAAN,MAAsC;AAAA;AAAA,EAE1B,CAAA;AAAA;AAAA,EAGA,OAAA;AAAA;AAAA,EAGA,GAAA;AAAA;AAAA,EAGT,UAAoB,EAAC;AAAA;AAAA,EAGrB,IAAA,GAAO,CAAA;AAAA;AAAA,EAGP,SAAA,GAAY,CAAA;AAAA;AAAA,EAGZ,cAAwB,EAAC;AAAA;AAAA,EAGzB,MAAA,GAAS,KAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUjB,WAAA,CACE,OAAA,GAII,EAAC,EACL;AACA,IAAA,IAAA,CAAK,CAAA,GAAI,QAAQ,CAAA,IAAK,CAAA;AACtB,IAAA,IAAA,CAAK,OAAA,GAAU,QAAQ,OAAA,IAAW,GAAA;AAClC,IAAA,IAAA,CAAK,GAAA,GAAM,QAAQ,GAAA,IAAO,IAAA;AAG1B,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,CAAC,CAAA,IAAK,IAAA,CAAK,KAAK,CAAA,EAAG;AAC3C,MAAA,MAAM,IAAI,qBAAA,CAAsB,oBAAA,EAAsB,GAAA,EAAK,KAAK,CAAC,CAAA;AAAA,IACnE;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACxD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,oCAAA;AAAA,QACA,SAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,GAAG,CAAA,IAAK,IAAA,CAAK,MAAM,CAAA,EAAG;AAC9C,MAAA,MAAM,IAAI,qBAAA,CAAsB,kBAAA,EAAoB,KAAA,EAAO,KAAK,GAAG,CAAA;AAAA,IACrE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAGjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AAEzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAGA,IAAA,IAAA,CAAK,WAAA,GAAc,CAAC,GAAG,IAAI,GAAA,CAAI,KAAK,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,CAAC,CAAA;AAC3D,IAAA,IAAI,IAAA,CAAK,WAAA,CAAY,MAAA,KAAW,CAAA,EAAG;AACjC,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,gEAAA;AAAA,QACA,GAAA;AAAA,QACA,KAAK,WAAA,CAAY;AAAA,OACnB;AAAA,IACF;AAGA,IAAA,MAAM,OAAA,GAAU,KAAA,CAAM,GAAA,CAAI,CAAC,KAAA,KAAW,KAAA,KAAU,IAAA,CAAK,WAAA,CAAY,CAAC,CAAA,GAAI,EAAA,GAAK,CAAE,CAAA;AAG7E,IAAA,IAAA,CAAK,UAAU,IAAI,KAAA,CAAM,SAAS,CAAA,CAAE,KAAK,CAAC,CAAA;AAC1C,IAAA,IAAA,CAAK,IAAA,GAAO,CAAA;AAWZ,IAAA,MAAM,YAAA,GAAe,IAAA;AAErB,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,IAAA,CAAK,SAAS,IAAA,EAAA,EAAQ;AAC9C,MAAA,IAAI,YAAA,GAAe,CAAA;AAEnB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,EAAA,GAAK,MAAM,CAAC,CAAA;AAClB,QAAA,MAAM,EAAA,GAAK,QAAQ,CAAC,CAAA;AAEpB,QAAA,IAAI,EAAA,KAAO,MAAA,IAAa,EAAA,KAAO,MAAA,EAAW;AAG1C,QAAA,IAAI,WAAW,IAAA,CAAK,IAAA;AACpB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,QAAA,IAAA,CAAa,KAAK,OAAA,CAAQ,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,QACjD;AAGA,QAAA,MAAM,SAAS,EAAA,GAAK,QAAA;AAGpB,QAAA,IAAI,SAAS,CAAA,EAAG;AACd,UAAA,YAAA,GAAe,IAAA,CAAK,GAAA,CAAI,YAAA,EAAc,CAAA,GAAI,MAAM,CAAA;AAAA,QAClD;AAYA,QAAA,MAAM,cAAc,IAAA,CAAK,GAAA,CAAI,cAAc,CAAA,IAAO,IAAA,CAAK,IAAI,EAAA,CAAG,CAAA;AAE9D,QAAA,IAAI,SAAS,CAAA,EAAG;AAEd,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,IAAA,CAAK,QAAQ,CAAC,CAAA,GAAA,CACX,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,KAAM,CAAA,GAAI,WAAA,CAAA,GAAe,cAAc,IAAA,CAAK,CAAA,GAAI,EAAA,IAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UACvF;AACA,UAAA,IAAA,CAAK,IAAA,IAAQ,WAAA,GAAc,IAAA,CAAK,CAAA,GAAI,EAAA;AAAA,QACtC,CAAA,MAAO;AAEL,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,GAAA,CAAK,IAAA,CAAK,QAAQ,CAAC,CAAA,IAAK,MAAM,CAAA,GAAI,WAAA,CAAA;AAAA,UAClD;AAAA,QACF;AAAA,MACF;AAGA,MAAA,IAAI,YAAA,GAAe,KAAK,GAAA,EAAK;AAC3B,QAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,sCAAsC,CAAA;AAAA,IACjE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,WAAW,CAAA;AAEzD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AAEjC,MAAA,IAAI,WAAW,IAAA,CAAK,IAAA;AACpB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,QAAA,IAAA,CAAa,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAAA,MAClF;AAGA,MAAA,MAAM,cAAA,GAAiB,YAAY,CAAA,GAAI,IAAA,CAAK,YAAY,CAAC,CAAA,GAAI,IAAA,CAAK,WAAA,CAAY,CAAC,CAAA;AAC/E,MAAA,WAAA,CAAY,IAAA,CAAK,kBAAkB,CAAC,CAAA;AAAA,IACtC;AAEA,IAAA,OAAO,MAAA,CAAO,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,4CAA4C,CAAA;AAAA,IACvE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,WAAW,CAAA;AAEzD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,MAAM,QAAoB,EAAC;AAE3B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AAEjC,MAAA,IAAI,WAAW,IAAA,CAAK,IAAA;AACpB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,QAAA,IAAA,CAAa,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAAA,MAClF;AAGA,MAAA,MAAM,KAAK,CAAA,IAAK,CAAA,GAAI,IAAA,CAAK,GAAA,CAAI,CAAC,QAAQ,CAAA,CAAA;AACtC,MAAA,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,GAAI,EAAA,EAAI,EAAE,CAAC,CAAA;AAAA,IACzB;AAEA,IAAA,OAAO,OAAO,KAAK,CAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,WAAA,CAAY,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACrF,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,IAAA,GAAe;AACjB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,iDAAiD,CAAA;AAAA,IAC5E;AACA,IAAA,OAAO,MAAA,CAAO,CAAC,IAAA,CAAK,OAAO,CAAC,CAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,SAAA,GAAoB;AACtB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,8CAA8C,CAAA;AAAA,IACzE;AACA,IAAA,OAAO,IAAA,CAAK,IAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,GAAG,IAAA,CAAK,CAAA;AAAA,MACR,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,KAAK,IAAA,CAAK;AAAA,KACZ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,oBAAoB,yDAAyD,CAAA;AAAA,EACzF;AACF;AAsBO,IAAM,YAAN,MAAqC;AAAA;AAAA,EAEzB,CAAA;AAAA;AAAA,EAGA,OAAA;AAAA;AAAA,EAGA,OAAA;AAAA;AAAA,EAGA,GAAA;AAAA;AAAA,EAGT,UAAoB,EAAC;AAAA;AAAA,EAGrB,IAAA,GAAO,CAAA;AAAA;AAAA,EAGP,SAAA,GAAY,CAAA;AAAA;AAAA,EAGZ,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAKI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,CAAA,GAAI,QAAQ,CAAA,IAAK,CAAA;AACtB,IAAA,IAAA,CAAK,OAAA,GAAU,QAAQ,OAAA,IAAW,GAAA;AAClC,IAAA,IAAA,CAAK,OAAA,GAAU,QAAQ,OAAA,IAAW,GAAA;AAClC,IAAA,IAAA,CAAK,GAAA,GAAM,QAAQ,GAAA,IAAO,IAAA;AAE1B,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,CAAC,CAAA,IAAK,IAAA,CAAK,KAAK,CAAA,EAAG;AAC3C,MAAA,MAAM,IAAI,qBAAA,CAAsB,oBAAA,EAAsB,GAAA,EAAK,KAAK,CAAC,CAAA;AAAA,IACnE;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,OAAO,CAAA,IAAK,IAAA,CAAK,UAAU,CAAA,EAAG;AACtD,MAAA,MAAM,IAAI,qBAAA,CAAsB,sBAAA,EAAwB,SAAA,EAAW,KAAK,OAAO,CAAA;AAAA,IACjF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,OAAO,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACxD,MAAA,MAAM,IAAI,qBAAA,CAAsB,0BAAA,EAA4B,SAAA,EAAW,KAAK,OAAO,CAAA;AAAA,IACrF;AACA,IAAA,IAAI,CAAC,OAAO,QAAA,CAAS,IAAA,CAAK,GAAG,CAAA,IAAK,IAAA,CAAK,MAAM,CAAA,EAAG;AAC9C,MAAA,MAAM,IAAI,qBAAA,CAAsB,kBAAA,EAAoB,KAAA,EAAO,KAAK,GAAG,CAAA;AAAA,IACrE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAGjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AAEzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAGA,IAAA,IAAA,CAAK,UAAU,IAAI,KAAA,CAAM,SAAS,CAAA,CAAE,KAAK,CAAC,CAAA;AAC1C,IAAA,IAAA,CAAK,IAAA,GAAO,CAAA;AAEZ,IAAA,MAAM,YAAA,GAAe,IAAA;AAErB,IAAA,KAAA,IAAS,IAAA,GAAO,CAAA,EAAG,IAAA,GAAO,IAAA,CAAK,SAAS,IAAA,EAAA,EAAQ;AAC9C,MAAA,IAAI,SAAA,GAAY,CAAA;AAEhB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,MAAM,EAAA,GAAK,MAAM,CAAC,CAAA;AAClB,QAAA,MAAM,EAAA,GAAK,MAAM,CAAC,CAAA;AAElB,QAAA,IAAI,EAAA,KAAO,MAAA,IAAa,EAAA,KAAO,MAAA,EAAW;AAG1C,QAAA,IAAI,OAAO,IAAA,CAAK,IAAA;AAChB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,UAAA,IAAA,IAAA,CAAS,KAAK,OAAA,CAAQ,CAAC,KAAK,CAAA,KAAM,EAAA,CAAG,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,QAC7C;AAEA,QAAA,MAAM,QAAQ,IAAA,GAAO,EAAA;AACrB,QAAA,MAAM,QAAA,GAAW,IAAA,CAAK,GAAA,CAAI,KAAK,CAAA;AAG/B,QAAA,IAAI,QAAA,GAAW,KAAK,OAAA,EAAS;AAC3B,UAAA,SAAA,IAAa,WAAW,IAAA,CAAK,OAAA;AAG7B,UAAA,MAAM,IAAA,GAAO,KAAA,GAAQ,CAAA,GAAI,CAAA,GAAI,EAAA;AAE7B,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,IAAA,CAAK,QAAQ,CAAC,CAAA,GAAA,CACX,KAAK,OAAA,CAAQ,CAAC,KAAK,CAAA,IACpB,YAAA,IAAgB,KAAK,CAAA,GAAI,IAAA,IAAQ,GAAG,CAAC,CAAA,IAAK,MAAM,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,CAAA,CAAA;AAAA,UACvE;AACA,UAAA,IAAA,CAAK,IAAA,IAAQ,YAAA,GAAe,IAAA,CAAK,CAAA,GAAI,IAAA;AAAA,QACvC,CAAA,MAAO;AAEL,UAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,YAAA,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,GAAA,CAAK,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,IAAK,YAAA,IAAgB,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,CAAA;AAAA,UAChF;AAAA,QACF;AAAA,MACF;AAEA,MAAA,IAAI,SAAA,GAAY,QAAA,GAAW,IAAA,CAAK,GAAA,EAAK;AACnC,QAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,sCAAsC,CAAA;AAAA,IACjE;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,KAAK,CAAA;AAEnD,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAEhC,IAAA,MAAM,cAAwB,EAAC;AAE/B,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,IAAI,OAAO,IAAA,CAAK,IAAA;AAChB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,IAAA,IAAA,CAAS,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,IAAK,CAAA,IAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAA;AAAA,MAC9E;AACA,MAAA,WAAA,CAAY,KAAK,IAAI,CAAA;AAAA,IACvB;AAEA,IAAA,OAAO,OAAO,WAAW,CAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AAEA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACtC;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,QAAQ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACzC,MAAA,MAAM,QAAQ,MAAA,CAAO,WAAA,CAAY,KAAK,WAAA,CAAY,MAAA,GAAS,CAAC,CAAC,CAAA;AAC7D,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAC5B,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,UAAU,CAAA,GAAK,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA,GAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,GAAG,IAAA,CAAK,CAAA;AAAA,MACR,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,KAAK,IAAA,CAAK;AAAA,KACZ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,oBAAoB,yDAAyD,CAAA;AAAA,EACzF;AACF;;;AC3lBO,IAAM,yBAAN,MAAmD;AAAA,EACvC,WAAA;AAAA,EACA,QAAA;AAAA,EACA,eAAA;AAAA,EACA,cAAA;AAAA,EACA,WAAA;AAAA,EACA,SAAA;AAAA,EACA,WAAA;AAAA,EAET,QAAkC,EAAC;AAAA,EACnC,WAAA;AAAA,EACA,SAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAQI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,GAAA;AAC1C,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,EAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAClD,IAAA,IAAA,CAAK,cAAA,GAAiB,QAAQ,cAAA,IAAkB,CAAA;AAChD,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,MAAA;AAC1C,IAAA,IAAA,CAAK,SAAA,GAAY,QAAQ,SAAA,IAAa,IAAA;AACtC,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,cAAc,CAAA,EAAG;AAC/D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,KAAK,WAAW,CAAA,CAAA;AAAA,QACjE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,KAAK,QAAQ,CAAA,CAAA;AAAA,QAC3D,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,KAAK,eAAe,CAAA,CAAA;AAAA,QACzE,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,cAAc,CAAA,IAAK,IAAA,CAAK,iBAAiB,CAAA,EAAG;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,iDAAA,EAAoD,KAAK,cAAc,CAAA,CAAA;AAAA,QACvE,gBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA,KAAgB,QAAA,EAAU;AACxC,MAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,cAAc,CAAA,EAAG;AAC/D,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,CAAA,8CAAA,EAAiD,KAAK,WAAW,CAAA,CAAA;AAAA,UACjE,aAAA;AAAA,UACA,IAAA,CAAK;AAAA,SACP;AAAA,MACF;AAAA,IACF,WAAW,IAAA,CAAK,WAAA,KAAgB,MAAA,IAAU,IAAA,CAAK,gBAAgB,MAAA,EAAQ;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,oEAAA,EAAuE,MAAA,CAAO,IAAA,CAAK,WAAW,CAAC,CAAA,CAAA;AAAA,QAC/F,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,OAAA,CAAQ,WAAW,CAAA,EAAG;AAC9E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAC,CAAA,CAAA;AAAA,QAC5E,aAAA;AAAA,QACA,OAAA,CAAQ;AAAA,OACV;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,SAAA,GAA0B;AAChC,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,IAAI,OAAO,IAAA,CAAK,WAAA;AAChB,MAAA,OAAO,MAAM;AACX,QAAA,IAAA,GAAA,CAAQ,IAAA,GAAO,OAAO,KAAA,IAAS,MAAA;AAC/B,QAAA,OAAO,IAAA,GAAO,MAAA;AAAA,MAChB,CAAA;AAAA,IACF;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AACtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAGjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAEA,IAAA,IAAA,CAAK,WAAA,GAAc,CAAC,GAAG,IAAI,GAAA,CAAI,KAAK,CAAC,CAAA,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,CAAC,CAAA;AAG3D,IAAA,IAAI,eAAA;AACJ,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA,KAAgB,QAAA,EAAU;AACxC,MAAA,eAAA,GAAkB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,WAAA,EAAa,SAAS,CAAA;AAAA,IACxD,CAAA,MAAA,IAAW,IAAA,CAAK,WAAA,KAAgB,MAAA,EAAQ;AACtC,MAAA,eAAA,GAAkB,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,SAAS,CAAC,CAAA;AAAA,IACnD,CAAA,MAAO;AACL,MAAA,eAAA,GAAkB,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,SAAS,CAAC,CAAA;AAAA,IACnD;AACA,IAAA,eAAA,GAAkB,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,eAAe,CAAA;AAE7C,IAAA,MAAM,GAAA,GAAM,KAAK,SAAA,EAAU;AAE3B,IAAA,IAAA,CAAK,QAAQ,EAAC;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AAEzC,MAAA,MAAM,gBAA0B,EAAC;AACjC,MAAA,IAAI,KAAK,SAAA,EAAW;AAClB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,aAAA,CAAc,KAAK,IAAA,CAAK,KAAA,CAAM,GAAA,EAAI,GAAI,QAAQ,CAAC,CAAA;AAAA,QACjD;AAAA,MACF,CAAA,MAAO;AACL,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,aAAA,CAAc,KAAK,CAAC,CAAA;AAAA,QACtB;AAAA,MACF;AAGA,MAAA,MAAM,UAAsB,EAAC;AAC7B,MAAA,MAAM,UAAoB,EAAC;AAC3B,MAAA,KAAA,MAAW,aAAa,aAAA,EAAe;AAErC,QAAA,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,SAAS,CAAA,IAAK,EAAE,CAAA;AACnC,QAAA,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,SAAS,CAAA,IAAK,CAAC,CAAA;AAAA,MACpC;AAGA,MAAA,MAAM,WAAA,GAMF;AAAA,QACF,UAAU,IAAA,CAAK,QAAA;AAAA,QACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,QACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,QACrB,WAAA,EAAa;AAAA,OACf;AACA,MAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,QAAA,WAAA,CAAY,WAAA,GAAc,KAAK,WAAA,GAAc,CAAA;AAAA,MAC/C;AACA,MAAA,MAAM,IAAA,GAAO,IAAI,sBAAA,CAAuB,WAAW,CAAA;AACnD,MAAA,IAAA,CAAK,GAAA,CAAI,MAAA,CAAO,OAAO,CAAA,EAAG,MAAA,CAAO,SAAS,EAAE,KAAA,EAAO,OAAA,EAAS,CAAC,CAAA;AAC7D,MAAA,IAAA,CAAK,KAAA,CAAM,KAAK,IAAI,CAAA;AAAA,IACtB;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,yDAAyD,CAAA;AAAA,IACpF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,wBAAwB,CAAA;AAEtE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAG/B,IAAA,MAAM,iBAA6B,EAAC;AAEpC,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,KAAA,EAAO;AAC7B,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC5B,MAAA,MAAM,YAAsB,EAAC;AAC7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,IAAA,CAAK,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,MACrD;AACA,MAAA,cAAA,CAAe,KAAK,SAAS,CAAA;AAAA,IAC/B;AAGA,IAAA,MAAM,mBAA6B,EAAC;AACpC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,KAAA,uBAAY,GAAA,EAAoB;AACtC,MAAA,KAAA,MAAW,aAAa,cAAA,EAAgB;AACtC,QAAA,MAAM,IAAA,GAAO,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA;AAC7B,QAAA,KAAA,CAAM,IAAI,IAAA,EAAA,CAAO,KAAA,CAAM,IAAI,IAAI,CAAA,IAAK,KAAK,CAAC,CAAA;AAAA,MAC5C;AAEA,MAAA,IAAI,QAAA,GAAW,CAAA;AACf,MAAA,IAAI,UAAA,GAAa,CAAA;AACjB,MAAA,KAAA,MAAW,CAAC,KAAA,EAAO,KAAK,CAAA,IAAK,KAAA,EAAO;AAClC,QAAA,IAAI,QAAQ,QAAA,EAAU;AACpB,UAAA,QAAA,GAAW,KAAA;AACX,UAAA,UAAA,GAAa,KAAA;AAAA,QACf;AAAA,MACF;AACA,MAAA,gBAAA,CAAiB,KAAK,UAAU,CAAA;AAAA,IAClC;AAEA,IAAA,OAAO,MAAA,CAAO,gBAAA,EAAkB,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,aAAa,CAAA,EAAmB;AAC9B,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,yDAAyD,CAAA;AAAA,IACpF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,wBAAwB,CAAA;AACtE,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AAEvB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,WAAA,IAAe,EAAC;AACzC,IAAA,MAAM,WAAW,WAAA,CAAY,MAAA;AAE7B,IAAA,IAAI,aAAa,CAAA,EAAG;AAClB,MAAA,MAAM,IAAI,eAAe,yDAAyD,CAAA;AAAA,IACpF;AAEA,IAAA,MAAM,UAAA,uBAAiB,GAAA,EAAoB;AAC3C,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,CAAA,GAAI,YAAY,CAAC,CAAA;AACvB,MAAA,IAAI,CAAA,KAAM,MAAA,EAAW,UAAA,CAAW,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,IAC1C;AAEA,IAAA,MAAM,QAAoB,KAAA,CAAM,IAAA;AAAA,MAAK,EAAE,QAAQ,QAAA,EAAS;AAAA,MAAG,MACzD,IAAI,KAAA,CAAc,QAAQ,CAAA,CAAE,KAAK,CAAC;AAAA,KACpC;AAEA,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,KAAA,EAAO;AAC7B,MAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,CAAC,CAAA;AACrC,MAAA,MAAM,cAAc,IAAA,CAAK,OAAA;AACzB,MAAA,IAAI,CAAC,WAAA,EAAa;AAClB,MAAA,gBAAA,CAAiB,aAAa,SAAS,CAAA;AAEvC,MAAA,MAAM,IAAI,WAAA,CAAY,IAAA;AACtB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,EAAG,CAAA,EAAA,EAAK;AAC1B,QAAA,MAAM,GAAA,GAAM,OAAO,WAAA,CAAY,IAAA,CAAK,YAAY,MAAA,GAAS,CAAC,KAAK,CAAC,CAAA;AAChE,QAAA,MAAM,OAAA,GAAU,UAAA,CAAW,GAAA,CAAI,GAAG,CAAA;AAClC,QAAA,IAAI,YAAY,MAAA,EAAW;AAE3B,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,MAAM,GAAA,GAAM,MAAM,CAAC,CAAA;AACnB,UAAA,IAAI,GAAA,EAAK;AACP,YAAA,GAAA,CAAI,OAAO,CAAA,GAAA,CACR,GAAA,CAAI,OAAO,KAAK,CAAA,IAAK,MAAA,CAAO,SAAA,CAAU,IAAA,CAAK,UAAU,MAAA,GAAS,CAAA,GAAI,CAAA,GAAI,CAAC,KAAK,CAAC,CAAA;AAAA,UAClF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,MAAM,QAAA,GAAW,KAAK,KAAA,CAAM,MAAA,KAAW,IAAI,CAAA,GAAI,CAAA,GAAI,KAAK,KAAA,CAAM,MAAA;AAC9D,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,GAAA,GAAM,MAAM,CAAC,CAAA;AACnB,MAAA,IAAI,GAAA,EAAK;AACP,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,GAAA,CAAI,CAAC,CAAA,GAAA,CAAK,GAAA,CAAI,CAAC,KAAK,CAAA,IAAK,QAAA;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,OAAO,KAAK,CAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,sDAAsD,CAAA;AAAA,IACjF;AACA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AACA,IAAA,IAAI,OAAA,GAAU,CAAA;AACd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,IAAI,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,WAAA,CAAY,SAAS,CAAC,CAAC,CAAA,KAAM,MAAA,CAAO,EAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA,EAAG;AACrF,QAAA,OAAA,EAAA;AAAA,MACF;AAAA,IACF;AACA,IAAA,OAAO,UAAU,CAAA,CAAE,IAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAI,OAAA,GAA8B;AAChC,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,IAAU,CAAC,KAAK,WAAA,EAAa;AACrC,MAAA,OAAO,MAAA;AAAA,IACT;AACA,IAAA,OAAO,OAAO,IAAA,CAAK,WAAA,EAAa,EAAE,KAAA,EAAO,SAAS,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,MACrB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,WAAW,IAAA,CAAK,SAAA;AAAA,MAChB,aAAa,IAAA,CAAK;AAAA,KACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF;AAyBO,IAAM,wBAAN,MAAiD;AAAA,EACrC,WAAA;AAAA,EACA,QAAA;AAAA,EACA,eAAA;AAAA,EACA,cAAA;AAAA,EACA,WAAA;AAAA,EACA,SAAA;AAAA,EACA,WAAA;AAAA,EAET,QAAiC,EAAC;AAAA,EAClC,SAAA;AAAA,EACA,MAAA,GAAS,KAAA;AAAA,EAEjB,WAAA,CACE,OAAA,GAQI,EAAC,EACL;AACA,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,GAAA;AAC1C,IAAA,IAAA,CAAK,QAAA,GAAW,QAAQ,QAAA,IAAY,EAAA;AACpC,IAAA,IAAA,CAAK,eAAA,GAAkB,QAAQ,eAAA,IAAmB,CAAA;AAClD,IAAA,IAAA,CAAK,cAAA,GAAiB,QAAQ,cAAA,IAAkB,CAAA;AAChD,IAAA,IAAA,CAAK,WAAA,GAAc,QAAQ,WAAA,IAAe,CAAA;AAC1C,IAAA,IAAA,CAAK,SAAA,GAAY,QAAQ,SAAA,IAAa,IAAA;AACtC,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,EAAW;AACrC,MAAA,IAAA,CAAK,cAAc,OAAA,CAAQ,WAAA;AAAA,IAC7B;AAEA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,cAAc,CAAA,EAAG;AAC/D,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,KAAK,WAAW,CAAA,CAAA;AAAA,QACjE,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,QAAQ,CAAA,IAAK,IAAA,CAAK,WAAW,CAAA,EAAG;AACzD,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,2CAAA,EAA8C,KAAK,QAAQ,CAAA,CAAA;AAAA,QAC3D,UAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,eAAe,CAAA,IAAK,IAAA,CAAK,kBAAkB,CAAA,EAAG;AACvE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,kDAAA,EAAqD,KAAK,eAAe,CAAA,CAAA;AAAA,QACzE,iBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,CAAC,OAAO,SAAA,CAAU,IAAA,CAAK,cAAc,CAAA,IAAK,IAAA,CAAK,iBAAiB,CAAA,EAAG;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,iDAAA,EAAoD,KAAK,cAAc,CAAA,CAAA;AAAA,QACvE,gBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA,KAAgB,QAAA,EAAU;AACxC,MAAA,IACE,IAAA,CAAK,WAAA,KAAgB,CAAA,KACpB,CAAC,MAAA,CAAO,SAAA,CAAU,IAAA,CAAK,WAAW,CAAA,IAAK,IAAA,CAAK,WAAA,GAAc,CAAA,CAAA,EAC3D;AACA,QAAA,MAAM,IAAI,qBAAA;AAAA,UACR,CAAA,sEAAA,EAAyE,KAAK,WAAW,CAAA,CAAA;AAAA,UACzF,aAAA;AAAA,UACA,IAAA,CAAK;AAAA,SACP;AAAA,MACF;AAAA,IACF,WAAW,IAAA,CAAK,WAAA,KAAgB,MAAA,IAAU,IAAA,CAAK,gBAAgB,MAAA,EAAQ;AACrE,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,oEAAA,EAAuE,MAAA,CAAO,IAAA,CAAK,WAAW,CAAC,CAAA,CAAA;AAAA,QAC/F,aAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AACA,IAAA,IAAI,OAAA,CAAQ,gBAAgB,MAAA,IAAa,CAAC,OAAO,QAAA,CAAS,OAAA,CAAQ,WAAW,CAAA,EAAG;AAC9E,MAAA,MAAM,IAAI,qBAAA;AAAA,QACR,CAAA,8CAAA,EAAiD,MAAA,CAAO,OAAA,CAAQ,WAAW,CAAC,CAAA,CAAA;AAAA,QAC5E,aAAA;AAAA,QACA,OAAA,CAAQ;AAAA,OACV;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,SAAA,GAA0B;AAChC,IAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,MAAA,IAAI,OAAO,IAAA,CAAK,WAAA;AAChB,MAAA,OAAO,MAAM;AACX,QAAA,IAAA,GAAA,CAAQ,IAAA,GAAO,OAAO,KAAA,IAAS,MAAA;AAC/B,QAAA,OAAO,IAAA,GAAO,MAAA;AAAA,MAChB,CAAA;AAAA,IACF;AACA,IAAA,OAAO,IAAA,CAAK,MAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,GAAA,CAAI,GAAW,CAAA,EAAiB;AAC9B,IAAA,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAEtB,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAC/B,IAAA,MAAM,SAAA,GAAY,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAChC,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AAEjB,IAAA,MAAM,QAAoB,EAAC;AAC3B,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,MAAM,MAAgB,EAAC;AACvB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,SAAA,EAAW,CAAA,EAAA,EAAK;AAClC,QAAA,GAAA,CAAI,IAAA,CAAK,MAAA,CAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,SAAS,CAAA,GAAI,SAAA,GAAY,CAAC,CAAC,CAAC,CAAA;AAAA,MACvD;AACA,MAAA,KAAA,CAAM,KAAK,GAAG,CAAA;AACd,MAAA,KAAA,CAAM,IAAA,CAAK,OAAO,CAAA,CAAE,IAAA,CAAK,EAAE,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,IACzC;AAEA,IAAA,IAAI,eAAA;AACJ,IAAA,IAAI,OAAO,IAAA,CAAK,WAAA,KAAgB,QAAA,EAAU;AACxC,MAAA,IAAI,IAAA,CAAK,gBAAgB,CAAA,EAAK;AAC5B,QAAA,eAAA,GAAkB,SAAA;AAAA,MACpB,CAAA,MAAO;AACL,QAAA,eAAA,GAAkB,IAAA,CAAK,GAAA,CAAI,IAAA,CAAK,WAAA,EAAa,SAAS,CAAA;AAAA,MACxD;AAAA,IACF,CAAA,MAAA,IAAW,IAAA,CAAK,WAAA,KAAgB,MAAA,EAAQ;AACtC,MAAA,eAAA,GAAkB,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,SAAS,CAAC,CAAA;AAAA,IACnD,CAAA,MAAO;AACL,MAAA,eAAA,GAAkB,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,SAAS,CAAC,CAAA;AAAA,IACnD;AACA,IAAA,eAAA,GAAkB,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,eAAe,CAAA;AAE7C,IAAA,MAAM,GAAA,GAAM,KAAK,SAAA,EAAU;AAE3B,IAAA,IAAA,CAAK,QAAQ,EAAC;AAEd,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,IAAA,CAAK,aAAa,CAAA,EAAA,EAAK;AACzC,MAAA,MAAM,gBAA0B,EAAC;AACjC,MAAA,IAAI,KAAK,SAAA,EAAW;AAClB,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,aAAA,CAAc,KAAK,IAAA,CAAK,KAAA,CAAM,GAAA,EAAI,GAAI,QAAQ,CAAC,CAAA;AAAA,QACjD;AAAA,MACF,CAAA,MAAO;AACL,QAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,UAAA,aAAA,CAAc,KAAK,CAAC,CAAA;AAAA,QACtB;AAAA,MACF;AAGA,MAAA,MAAM,UAAsB,EAAC;AAC7B,MAAA,MAAM,UAAoB,EAAC;AAC3B,MAAA,KAAA,MAAW,aAAa,aAAA,EAAe;AACrC,QAAA,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,SAAS,CAAA,IAAK,EAAE,CAAA;AACnC,QAAA,OAAA,CAAQ,IAAA,CAAK,KAAA,CAAM,SAAS,CAAA,IAAK,CAAC,CAAA;AAAA,MACpC;AAEA,MAAA,MAAM,WAAA,GAMF;AAAA,QACF,UAAU,IAAA,CAAK,QAAA;AAAA,QACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,QACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,QACrB,WAAA,EAAa;AAAA,OACf;AACA,MAAA,IAAI,IAAA,CAAK,gBAAgB,MAAA,EAAW;AAClC,QAAA,WAAA,CAAY,WAAA,GAAc,KAAK,WAAA,GAAc,CAAA;AAAA,MAC/C;AACA,MAAA,MAAM,IAAA,GAAO,IAAI,qBAAA,CAAsB,WAAW,CAAA;AAClD,MAAA,IAAA,CAAK,IAAI,MAAA,CAAO,OAAO,CAAA,EAAG,MAAA,CAAO,OAAO,CAAC,CAAA;AACzC,MAAA,IAAA,CAAK,KAAA,CAAM,KAAK,IAAI,CAAA;AAAA,IACtB;AAEA,IAAA,IAAA,CAAK,MAAA,GAAS,IAAA;AACd,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,QAAQ,CAAA,EAAmB;AACzB,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,wDAAwD,CAAA;AAAA,IACnF;AAEA,IAAA,qBAAA,CAAsB,CAAA,EAAG,IAAA,CAAK,SAAA,IAAa,CAAA,EAAG,uBAAuB,CAAA;AAErE,IAAA,MAAM,QAAA,GAAW,CAAA,CAAE,KAAA,CAAM,CAAC,CAAA,IAAK,CAAA;AAE/B,IAAA,MAAM,iBAA6B,EAAC;AAEpC,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,KAAA,EAAO;AAC7B,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAC5B,MAAA,MAAM,YAAsB,EAAC;AAC7B,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,QAAA,SAAA,CAAU,IAAA,CAAK,OAAO,KAAA,CAAM,IAAA,CAAK,MAAM,MAAA,GAAS,CAAC,CAAC,CAAC,CAAA;AAAA,MACrD;AACA,MAAA,cAAA,CAAe,KAAK,SAAS,CAAA;AAAA,IAC/B;AAGA,IAAA,MAAM,mBAA6B,EAAC;AACpC,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,QAAA,EAAU,CAAA,EAAA,EAAK;AACjC,MAAA,IAAI,GAAA,GAAM,CAAA;AACV,MAAA,KAAA,MAAW,aAAa,cAAA,EAAgB;AACtC,QAAA,GAAA,IAAO,SAAA,CAAU,CAAC,CAAA,IAAK,CAAA;AAAA,MACzB;AACA,MAAA,gBAAA,CAAiB,IAAA,CAAK,GAAA,GAAM,IAAA,CAAK,KAAA,CAAM,MAAM,CAAA;AAAA,IAC/C;AAEA,IAAA,OAAO,OAAO,gBAAgB,CAAA;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,KAAA,CAAM,GAAW,CAAA,EAAmB;AAClC,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,IAAI,eAAe,qDAAqD,CAAA;AAAA,IAChF;AACA,IAAA,IAAI,CAAA,CAAE,SAAS,CAAA,EAAG;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAA,kCAAA,EAAqC,CAAA,CAAE,IAAI,CAAA,CAAE,CAAA;AAAA,IACpE;AACA,IAAA,gBAAA,CAAiB,GAAG,GAAG,CAAA;AACvB,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,MAAM,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA,IAAK,CAAA;AACpC,MAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,EAAG;AACzB,QAAA,MAAM,IAAI,oBAAoB,2CAA2C,CAAA;AAAA,MAC3E;AAAA,IACF;AACA,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA;AAClC,IAAA,IAAI,WAAA,CAAY,IAAA,KAAS,CAAA,CAAE,IAAA,EAAM;AAC/B,MAAA,MAAM,IAAI,UAAA;AAAA,QACR,CAAA,oDAAA,EAAuD,WAAA,CAAY,IAAI,CAAA,IAAA,EAAO,EAAE,IAAI,CAAA;AAAA,OACtF;AAAA,IACF;AAEA,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,IAAA,IAAI,KAAA,GAAQ,CAAA;AAEZ,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,KAAA,IAAS,OAAO,CAAA,CAAE,IAAA,CAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AAAA,IACtC;AACA,IAAA,KAAA,IAAS,CAAA,CAAE,IAAA;AAEX,IAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,CAAA,CAAE,MAAM,CAAA,EAAA,EAAK;AAC/B,MAAA,MAAM,QAAQ,MAAA,CAAO,CAAA,CAAE,KAAK,CAAA,CAAE,MAAA,GAAS,CAAC,CAAC,CAAA;AACzC,MAAA,MAAM,QAAQ,MAAA,CAAO,WAAA,CAAY,KAAK,WAAA,CAAY,MAAA,GAAS,CAAC,CAAC,CAAA;AAC7D,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAC5B,MAAA,KAAA,IAAA,CAAU,QAAQ,KAAA,KAAU,CAAA;AAAA,IAC9B;AAEA,IAAA,OAAO,UAAU,CAAA,GAAK,KAAA,KAAU,IAAI,CAAA,GAAM,CAAA,GAAO,IAAI,KAAA,GAAQ,KAAA;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAA,GAAqC;AACnC,IAAA,OAAO;AAAA,MACL,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,UAAU,IAAA,CAAK,QAAA;AAAA,MACf,iBAAiB,IAAA,CAAK,eAAA;AAAA,MACtB,gBAAgB,IAAA,CAAK,cAAA;AAAA,MACrB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,WAAW,IAAA,CAAK,SAAA;AAAA,MAChB,aAAa,IAAA,CAAK;AAAA,KACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,UAAU,OAAA,EAAwC;AAChD,IAAA,MAAM,IAAI,mBAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AACF","file":"chunk-MLBMYKCG.js","sourcesContent":["// Base types for ML estimators\nexport type {\n Classifier,\n Clusterer,\n Estimator,\n OutlierDetector,\n Regressor,\n Transformer,\n} from \"./base\";\n// Clustering models\nexport { DBSCAN } from \"./clustering/DBSCAN\";\nexport { KMeans } from \"./clustering/KMeans\";\n// Dimensionality reduction\nexport { PCA } from \"./decomposition\";\n// Ensemble methods\nexport {\n GradientBoostingClassifier,\n GradientBoostingRegressor,\n} from \"./ensemble\";\n// Linear models - regression and classification\nexport { Lasso } from \"./linear/Lasso\";\nexport { LinearRegression } from \"./linear/LinearRegression\";\nexport { LogisticRegression } from \"./linear/LogisticRegression\";\nexport { Ridge } from \"./linear/Ridge\";\n// Manifold learning\nexport { TSNE } from \"./manifold\";\n// Naive Bayes\nexport { GaussianNB } from \"./naive_bayes\";\n// Neighbors\nexport { KNeighborsClassifier, KNeighborsRegressor } from \"./neighbors\";\n// Support Vector Machines\nexport { LinearSVC, LinearSVR } from \"./svm\";\n// Tree-based models\nexport {\n DecisionTreeClassifier,\n DecisionTreeRegressor,\n RandomForestClassifier,\n RandomForestRegressor,\n} from \"./tree\";\n","/**\n * Internal validation utilities for ML models.\n * This file is not exported from the public API.\n *\n * @internal\n */\n\nimport { DataValidationError, ShapeError } from \"../core\";\nimport type { Tensor } from \"../ndarray\";\nimport { isContiguous } from \"../ndarray/tensor/strides\";\n\nexport function assertContiguous(t: Tensor, name: string): void {\n if (!isContiguous(t.shape, t.strides)) {\n throw new DataValidationError(\n `${name} must be contiguous in row-major order; materialize a contiguous tensor before passing to ML routines`\n );\n }\n}\n\n/**\n * Validate inputs for supervised learning fit methods.\n *\n * Checks:\n * - X is 2D, y is 1D\n * - X and y have matching number of samples\n * - No empty data (at least 1 sample and 1 feature)\n * - No NaN or Inf values\n *\n * @param X - Feature matrix of shape (n_samples, n_features)\n * @param y - Target vector of shape (n_samples,)\n * @throws {ShapeError} If dimensions are invalid\n * @throws {DataValidationError} If data contains invalid values\n *\n * @internal\n */\nexport function validateFitInputs(X: Tensor, y: Tensor): void {\n // Check dimensions\n if (X.ndim !== 2) {\n throw new ShapeError(`X must be 2-dimensional; got ndim=${X.ndim}`);\n }\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(X, \"X\");\n assertContiguous(y, \"y\");\n\n // Check for empty data\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n if (nSamples === 0) {\n throw new DataValidationError(\"X must have at least one sample\");\n }\n if (nFeatures === 0) {\n throw new DataValidationError(\"X must have at least one feature\");\n }\n\n // Check shape match\n if (nSamples !== y.shape[0]) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X.shape[0]=${nSamples}, y.shape[0]=${y.shape[0]}`\n );\n }\n\n // Check for NaN/Inf in X\n for (let i = 0; i < X.size; i++) {\n const val = X.data[X.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"X contains non-finite values (NaN or Inf)\");\n }\n }\n\n // Check for NaN/Inf in y\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n}\n\n/**\n * Validate inputs for unsupervised learning fit methods.\n *\n * Checks:\n * - X is 2D\n * - No empty data (at least 1 sample and 1 feature)\n * - No NaN or Inf values\n *\n * @param X - Feature matrix of shape (n_samples, n_features)\n * @throws {ShapeError} If dimensions are invalid\n * @throws {DataValidationError} If data contains invalid values\n *\n * @internal\n */\nexport function validateUnsupervisedFitInputs(X: Tensor): void {\n // Check dimensions\n if (X.ndim !== 2) {\n throw new ShapeError(`X must be 2-dimensional; got ndim=${X.ndim}`);\n }\n assertContiguous(X, \"X\");\n\n // Check for empty data\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n if (nSamples === 0) {\n throw new DataValidationError(\"X must have at least one sample\");\n }\n if (nFeatures === 0) {\n throw new DataValidationError(\"X must have at least one feature\");\n }\n\n // Check for NaN/Inf in X\n for (let i = 0; i < X.size; i++) {\n const val = X.data[X.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"X contains non-finite values (NaN or Inf)\");\n }\n }\n}\n\n/**\n * Validate inputs for prediction methods.\n *\n * Checks:\n * - X is 2D\n * - X has correct number of features\n * - No NaN or Inf values\n *\n * @param X - Feature matrix of shape (n_samples, n_features)\n * @param nFeaturesExpected - Expected number of features from training\n * @param modelName - Name of the model (for error messages)\n * @throws {ShapeError} If dimensions are invalid\n * @throws {DataValidationError} If data contains invalid values\n *\n * @internal\n */\nexport function validatePredictInputs(\n X: Tensor,\n nFeaturesExpected: number,\n modelName: string\n): void {\n // Check dimensions\n if (X.ndim !== 2) {\n throw new ShapeError(`X must be 2-dimensional; got ndim=${X.ndim}`);\n }\n assertContiguous(X, \"X\");\n\n // Check feature count\n const nFeatures = X.shape[1] ?? 0;\n if (nFeatures !== nFeaturesExpected) {\n throw new ShapeError(\n `X has ${nFeatures} features but ${modelName} was fitted with ${nFeaturesExpected} features`\n );\n }\n\n // Check for NaN/Inf\n for (let i = 0; i < X.size; i++) {\n const val = X.data[X.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"X contains non-finite values (NaN or Inf)\");\n }\n }\n}\n","import { InvalidParameterError, NotFittedError, NotImplementedError } from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { validateUnsupervisedFitInputs } from \"../_validation\";\nimport type { Clusterer } from \"../base\";\n\n/**\n * DBSCAN (Density-Based Spatial Clustering of Applications with Noise).\n *\n * Clusters points based on density. Points in high-density regions are\n * grouped together, while points in low-density regions are marked as noise.\n *\n * **Algorithm**:\n * 1. For each point, find all neighbors within eps distance\n * 2. If a point has at least minSamples neighbors, it's a core point\n * 3. Core points and their neighbors form clusters\n * 4. Points not reachable from any core point are noise (label = -1)\n *\n * **Advantages**:\n * - No need to specify number of clusters\n * - Can find arbitrarily shaped clusters\n * - Robust to outliers\n *\n * @example\n * ```ts\n * import { DBSCAN } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [2, 2], [2, 3], [8, 7], [8, 8], [25, 80]]);\n * const dbscan = new DBSCAN({ eps: 3, minSamples: 2 });\n * const labels = dbscan.fitPredict(X);\n * // labels: [0, 0, 0, 1, 1, -1] (-1 = noise)\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.cluster.DBSCAN.html | scikit-learn DBSCAN}\n */\nexport class DBSCAN implements Clusterer {\n private eps: number;\n private minSamples: number;\n private metric: \"euclidean\" | \"manhattan\";\n\n private labels_?: Tensor;\n private coreIndices_?: number[];\n private fitted = false;\n\n constructor(\n options: {\n readonly eps?: number;\n readonly minSamples?: number;\n readonly metric?: \"euclidean\" | \"manhattan\";\n } = {}\n ) {\n this.eps = options.eps ?? 0.5;\n this.minSamples = options.minSamples ?? 5;\n this.metric = options.metric ?? \"euclidean\";\n\n if (!Number.isFinite(this.eps) || this.eps <= 0) {\n throw new InvalidParameterError(\"eps must be a finite number > 0\", \"eps\", this.eps);\n }\n if (!Number.isInteger(this.minSamples) || this.minSamples < 1) {\n throw new InvalidParameterError(\n \"minSamples must be an integer >= 1\",\n \"minSamples\",\n this.minSamples\n );\n }\n if (this.metric !== \"euclidean\" && this.metric !== \"manhattan\") {\n throw new InvalidParameterError(\n `metric must be \"euclidean\" or \"manhattan\"`,\n \"metric\",\n this.metric\n );\n }\n }\n\n /**\n * Check if two points are neighbors (distance <= eps).\n */\n private isNeighbor(a: number[], b: number[]): boolean {\n if (this.metric === \"manhattan\") {\n let sum = 0;\n for (let i = 0; i < a.length; i++) {\n sum += Math.abs((a[i] ?? 0) - (b[i] ?? 0));\n if (sum > this.eps) return false;\n }\n return sum <= this.eps;\n }\n\n // Euclidean distance\n let sumSq = 0;\n const epsSq = this.eps * this.eps;\n for (let i = 0; i < a.length; i++) {\n const diff = (a[i] ?? 0) - (b[i] ?? 0);\n sumSq += diff * diff;\n if (sumSq > epsSq) return false;\n }\n return sumSq <= epsSq;\n }\n\n /**\n * Find all neighbors within eps distance.\n */\n private getNeighbors(data: number[][], pointIdx: number): number[] {\n const neighbors: number[] = [];\n const point = data[pointIdx];\n if (!point) return neighbors;\n\n for (let i = 0; i < data.length; i++) {\n const other = data[i];\n if (other && this.isNeighbor(point, other)) {\n neighbors.push(i);\n }\n }\n\n return neighbors;\n }\n\n /**\n * Perform DBSCAN clustering on data X.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param _y - Ignored (exists for API compatibility)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n fit(X: Tensor, _y?: Tensor): this {\n validateUnsupervisedFitInputs(X);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n // Extract data\n const data: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n data.push(row);\n }\n\n // Initialize labels to undefined (-2 means unvisited)\n const labels: number[] = new Array(nSamples).fill(-2);\n const coreIndices: number[] = [];\n\n let clusterId = 0;\n\n for (let i = 0; i < nSamples; i++) {\n // Skip if already processed\n if (labels[i] !== -2) continue;\n\n const neighbors = this.getNeighbors(data, i);\n\n if (neighbors.length < this.minSamples) {\n // Mark as noise (for now, might be claimed by another cluster later)\n labels[i] = -1;\n continue;\n }\n\n // Start a new cluster\n coreIndices.push(i);\n labels[i] = clusterId;\n\n // Process neighbors\n const seedSet = new Set(neighbors);\n\n for (const q of seedSet) {\n // If noise, claim it for this cluster\n if (labels[q] === -1) {\n labels[q] = clusterId;\n }\n\n // If unvisited\n if (labels[q] === -2) {\n labels[q] = clusterId;\n\n const qNeighbors = this.getNeighbors(data, q);\n\n if (qNeighbors.length >= this.minSamples) {\n coreIndices.push(q);\n // Add new neighbors to seed set\n for (const n of qNeighbors) {\n if (labels[n] === -2 || labels[n] === -1) {\n seedSet.add(n);\n }\n }\n }\n }\n }\n\n clusterId++;\n }\n\n this.labels_ = tensor(labels, { dtype: \"int32\" });\n this.coreIndices_ = coreIndices;\n this.fitted = true;\n\n return this;\n }\n\n /**\n * Predict cluster labels for samples in X.\n *\n * @param _X - Samples (unused)\n * @throws {NotImplementedError} Always — DBSCAN is transductive and does not support prediction on new data\n */\n predict(_X: Tensor): Tensor {\n throw new NotImplementedError(\n \"DBSCAN is a transductive clustering algorithm and does not support prediction on new data. Use fitPredict() instead.\"\n );\n }\n\n /**\n * Fit DBSCAN and return cluster labels.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param _y - Ignored (exists for API compatibility)\n * @returns Cluster labels of shape (n_samples,). Noise points are labeled -1.\n * @throws {ShapeError} If X is not 2D\n * @throws {DataValidationError} If X contains NaN/Inf values\n * @throws {NotFittedError} If fit did not produce labels (internal error)\n */\n fitPredict(X: Tensor, _y?: Tensor): Tensor {\n this.fit(X);\n if (!this.labels_) {\n throw new NotFittedError(\"DBSCAN fit did not produce labels\");\n }\n return this.labels_;\n }\n\n /**\n * Get cluster labels assigned during fitting.\n *\n * @returns Tensor of cluster labels. Noise points are labeled -1.\n * @throws {NotFittedError} If the model has not been fitted\n */\n get labels(): Tensor {\n if (!this.fitted || !this.labels_) {\n throw new NotFittedError(\"DBSCAN must be fitted to access labels\");\n }\n return this.labels_;\n }\n\n /**\n * Get indices of core samples discovered during fitting.\n *\n * Core samples are points with at least `minSamples` neighbors within `eps`.\n *\n * @returns Array of core sample indices\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coreIndices(): number[] {\n if (!this.fitted || !this.coreIndices_) {\n throw new NotFittedError(\"DBSCAN must be fitted to access core indices\");\n }\n return [...this.coreIndices_];\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n eps: this.eps,\n minSamples: this.minSamples,\n metric: this.metric,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param params - Parameters to set (eps, minSamples, metric)\n * @returns this\n * @throws {InvalidParameterError} If any parameter value is invalid\n */\n setParams(params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(params)) {\n switch (key) {\n case \"eps\":\n if (typeof value !== \"number\" || value <= 0) {\n throw new InvalidParameterError(\"eps must be > 0\", \"eps\", value);\n }\n this.eps = value;\n break;\n case \"minSamples\":\n if (typeof value !== \"number\" || !Number.isInteger(value) || value < 1) {\n throw new InvalidParameterError(\n \"minSamples must be an integer >= 1\",\n \"minSamples\",\n value\n );\n }\n this.minSamples = value;\n break;\n case \"metric\":\n if (value !== \"euclidean\" && value !== \"manhattan\") {\n throw new InvalidParameterError(\n `metric must be \"euclidean\" or \"manhattan\"`,\n \"metric\",\n value\n );\n }\n this.metric = value;\n break;\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n}\n","import { InvalidParameterError, NotFittedError } from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { validatePredictInputs, validateUnsupervisedFitInputs } from \"../_validation\";\nimport type { Clusterer } from \"../base\";\n\n/**\n * K-Means clustering algorithm.\n *\n * Partitions n samples into k clusters by minimizing the within-cluster\n * sum of squared distances to cluster centroids.\n *\n * **Algorithm**: Lloyd's algorithm (iterative refinement)\n * 1. Initialize k centroids (random or k-means++)\n * 2. Assign each point to nearest centroid\n * 3. Update centroids as mean of assigned points\n * 4. Repeat until convergence or max iterations\n *\n * **Time Complexity**: O(n * k * i * d) where:\n * - n = number of samples\n * - k = number of clusters\n * - i = number of iterations\n * - d = number of features\n *\n * @example\n * ```ts\n * import { KMeans } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [1.5, 1.8], [5, 8], [8, 8], [1, 0.6], [9, 11]]);\n * const kmeans = new KMeans({ nClusters: 2, randomState: 42 });\n * kmeans.fit(X);\n *\n * const labels = kmeans.predict(X);\n * console.log('Cluster labels:', labels);\n * console.log('Centroids:', kmeans.clusterCenters);\n * ```\n *\n * @see {@link https://en.wikipedia.org/wiki/K-means_clustering | Wikipedia: K-means}\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html | scikit-learn KMeans}\n */\nexport class KMeans implements Clusterer {\n private nClusters: number;\n private maxIter: number;\n private tol: number;\n private init: \"random\" | \"kmeans++\";\n private randomState: number | undefined;\n\n private clusterCenters_?: Tensor;\n private labels_?: Tensor;\n private inertia_?: number;\n private nIter_?: number;\n private nFeaturesIn_?: number;\n private fitted = false;\n\n /**\n * Create a new K-Means clustering model.\n *\n * @param options - Configuration options\n * @param options.nClusters - Number of clusters (default: 8)\n * @param options.maxIter - Maximum number of iterations (default: 300)\n * @param options.tol - Tolerance for convergence (default: 1e-4)\n * @param options.init - Initialization method: 'random' or 'kmeans++' (default: 'kmeans++')\n * @param options.randomState - Random seed for reproducibility\n */\n constructor(\n options: {\n readonly nClusters?: number;\n readonly maxIter?: number;\n readonly tol?: number;\n readonly init?: \"random\" | \"kmeans++\";\n readonly randomState?: number;\n } = {}\n ) {\n this.nClusters = options.nClusters ?? 8;\n this.maxIter = options.maxIter ?? 300;\n this.tol = options.tol ?? 1e-4;\n this.init = options.init ?? \"kmeans++\";\n if (options.randomState !== undefined) {\n this.randomState = options.randomState;\n }\n\n if (!Number.isInteger(this.nClusters) || this.nClusters < 1) {\n throw new InvalidParameterError(\n \"nClusters must be an integer >= 1\",\n \"nClusters\",\n this.nClusters\n );\n }\n if (!Number.isInteger(this.maxIter) || this.maxIter < 1) {\n throw new InvalidParameterError(\"maxIter must be an integer >= 1\", \"maxIter\", this.maxIter);\n }\n if (!Number.isFinite(this.tol) || this.tol < 0) {\n throw new InvalidParameterError(\"tol must be a finite number >= 0\", \"tol\", this.tol);\n }\n if (this.init !== \"random\" && this.init !== \"kmeans++\") {\n throw new InvalidParameterError(\n `init must be \"random\" or \"kmeans++\"; received ${String(this.init)}`,\n \"init\",\n this.init\n );\n }\n if (options.randomState !== undefined && !Number.isFinite(options.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(options.randomState)}`,\n \"randomState\",\n options.randomState\n );\n }\n }\n\n /**\n * Fit K-Means clustering on training data.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Ignored (exists for compatibility)\n * @returns this - The fitted estimator\n */\n fit(X: Tensor, _y?: Tensor): this {\n validateUnsupervisedFitInputs(X);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n this.nFeaturesIn_ = nFeatures;\n\n if (nSamples < this.nClusters) {\n throw new InvalidParameterError(\n `n_samples=${nSamples} should be >= n_clusters=${this.nClusters}`,\n \"nClusters\",\n this.nClusters\n );\n }\n\n // Initialize centroids\n let centroids = this.initializeCentroids(X);\n\n let prevInertia = Number.POSITIVE_INFINITY;\n\n for (let iter = 0; iter < this.maxIter; iter++) {\n // Assign points to nearest centroid\n const labels = this.assignClusters(X, centroids);\n\n // Update centroids\n const newCentroids: number[][] = [];\n for (let k = 0; k < this.nClusters; k++) {\n const clusterPoints: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n if (Number(labels.data[labels.offset + i]) === k) {\n const point: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n point.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n clusterPoints.push(point);\n }\n }\n\n if (clusterPoints.length > 0) {\n const centroid: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n let sum = 0;\n for (const point of clusterPoints) {\n sum += point[j] ?? 0;\n }\n centroid.push(sum / clusterPoints.length);\n }\n newCentroids.push(centroid);\n } else {\n // Keep old centroid if no points assigned\n const oldCentroid: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n oldCentroid.push(Number(centroids.data[centroids.offset + k * nFeatures + j]));\n }\n newCentroids.push(oldCentroid);\n }\n }\n\n centroids = tensor(newCentroids);\n\n // Calculate inertia (sum of squared distances to centroids)\n const inertia = this.calculateInertia(X, centroids, labels);\n\n // Check convergence\n if (Math.abs(prevInertia - inertia) < this.tol) {\n this.nIter_ = iter + 1;\n break;\n }\n\n prevInertia = inertia;\n this.nIter_ = iter + 1;\n }\n\n this.clusterCenters_ = centroids;\n this.labels_ = this.assignClusters(X, centroids);\n this.inertia_ = this.calculateInertia(X, centroids, this.labels_);\n this.fitted = true;\n\n return this;\n }\n\n /**\n * Predict cluster labels for samples.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Cluster labels of shape (n_samples,)\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.clusterCenters_) {\n throw new NotFittedError(\"KMeans must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"KMeans\");\n\n return this.assignClusters(X, this.clusterCenters_);\n }\n\n /**\n * Fit and predict in one step.\n *\n * @param X - Training data\n * @param y - Ignored (exists for compatibility)\n * @returns Cluster labels\n */\n fitPredict(X: Tensor, _y?: Tensor): Tensor {\n this.fit(X);\n if (!this.labels_) {\n throw new NotFittedError(\"KMeans fit did not produce labels\");\n }\n return this.labels_;\n }\n\n /**\n * Initialize centroids using specified method.\n */\n private initializeCentroids(X: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n if (this.init === \"random\") {\n // Random initialization\n const indices = new Set<number>();\n const rng = this.createRNG();\n\n while (indices.size < this.nClusters) {\n indices.add(Math.floor(rng() * nSamples));\n }\n\n const centroids: number[][] = [];\n for (const idx of indices) {\n const centroid: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n centroid.push(Number(X.data[X.offset + idx * nFeatures + j]));\n }\n centroids.push(centroid);\n }\n\n return tensor(centroids);\n } else {\n // K-means++ initialization\n const rng = this.createRNG();\n const centroids: number[][] = [];\n const minDistSq = new Float64Array(nSamples).fill(Infinity);\n\n // Choose first centroid randomly\n const firstIdx = Math.floor(rng() * nSamples);\n const firstCentroid: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n firstCentroid.push(Number(X.data[X.offset + firstIdx * nFeatures + j]));\n }\n centroids.push(firstCentroid);\n\n // Update distances for first centroid\n for (let i = 0; i < nSamples; i++) {\n let dist = 0;\n for (let j = 0; j < nFeatures; j++) {\n const diff = Number(X.data[X.offset + i * nFeatures + j]) - (firstCentroid[j] ?? 0);\n dist += diff * diff;\n }\n minDistSq[i] = dist;\n }\n\n // Choose remaining centroids\n for (let k = 1; k < this.nClusters; k++) {\n // Choose next centroid with probability proportional to distance squared\n const totalDist = minDistSq.reduce((a, b) => a + b, 0);\n let r = rng() * totalDist;\n let nextIdx = 0;\n\n for (let i = 0; i < nSamples; i++) {\n r -= minDistSq[i] ?? 0;\n if (r <= 0) {\n nextIdx = i;\n break;\n }\n }\n\n const nextCentroid: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n nextCentroid.push(Number(X.data[X.offset + nextIdx * nFeatures + j]));\n }\n centroids.push(nextCentroid);\n\n // Update minimum squared distances\n for (let i = 0; i < nSamples; i++) {\n let dist = 0;\n for (let j = 0; j < nFeatures; j++) {\n const diff = Number(X.data[X.offset + i * nFeatures + j]) - (nextCentroid[j] ?? 0);\n dist += diff * diff;\n }\n if (dist < (minDistSq[i] ?? Infinity)) {\n minDistSq[i] = dist;\n }\n }\n }\n\n return tensor(centroids);\n }\n }\n\n /**\n * Assign each sample to nearest centroid.\n */\n private assignClusters(X: Tensor, centroids: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const labels: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n let minDist = Number.POSITIVE_INFINITY;\n let minLabel = 0;\n\n for (let k = 0; k < this.nClusters; k++) {\n let dist = 0;\n for (let j = 0; j < nFeatures; j++) {\n const diff =\n Number(X.data[X.offset + i * nFeatures + j]) -\n Number(centroids.data[centroids.offset + k * nFeatures + j]);\n dist += diff * diff;\n }\n\n if (dist < minDist) {\n minDist = dist;\n minLabel = k;\n }\n }\n\n labels.push(minLabel);\n }\n\n return tensor(labels, { dtype: \"int32\" });\n }\n\n /**\n * Calculate inertia (sum of squared distances to centroids).\n */\n private calculateInertia(X: Tensor, centroids: Tensor, labels: Tensor): number {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n let inertia = 0;\n\n for (let i = 0; i < nSamples; i++) {\n const label = Number(labels.data[labels.offset + i]);\n for (let j = 0; j < nFeatures; j++) {\n const diff =\n Number(X.data[X.offset + i * nFeatures + j]) -\n Number(centroids.data[centroids.offset + label * nFeatures + j]);\n inertia += diff * diff;\n }\n }\n\n return inertia;\n }\n\n /**\n * Create a simple RNG for reproducibility.\n */\n private createRNG(): () => number {\n if (this.randomState !== undefined) {\n let seed = this.randomState;\n return () => {\n seed = (seed * 9301 + 49297) % 233280;\n return seed / 233280;\n };\n }\n return Math.random;\n }\n\n /**\n * Get cluster centers.\n */\n get clusterCenters(): Tensor {\n if (!this.fitted || !this.clusterCenters_) {\n throw new NotFittedError(\"KMeans must be fitted to access cluster centers\");\n }\n return this.clusterCenters_;\n }\n\n /**\n * Get training labels.\n */\n get labels(): Tensor {\n if (!this.fitted || !this.labels_) {\n throw new NotFittedError(\"KMeans must be fitted to access labels\");\n }\n return this.labels_;\n }\n\n /**\n * Get inertia (sum of squared distances to centroids).\n */\n get inertia(): number {\n if (!this.fitted || this.inertia_ === undefined) {\n throw new NotFittedError(\"KMeans must be fitted to access inertia\");\n }\n return this.inertia_;\n }\n\n /**\n * Get number of iterations run.\n */\n get nIter(): number {\n if (!this.fitted || this.nIter_ === undefined) {\n throw new NotFittedError(\"KMeans must be fitted to access n_iter\");\n }\n return this.nIter_;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nClusters: this.nClusters,\n maxIter: this.maxIter,\n tol: this.tol,\n init: this.init,\n randomState: this.randomState,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param params - Parameters to set (nClusters, maxIter, tol, init, randomState)\n * @returns this\n * @throws {InvalidParameterError} If any parameter value is invalid\n */\n setParams(params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(params)) {\n switch (key) {\n case \"nClusters\":\n if (typeof value !== \"number\" || !Number.isInteger(value) || value < 1) {\n throw new InvalidParameterError(\n \"nClusters must be an integer >= 1\",\n \"nClusters\",\n value\n );\n }\n this.nClusters = value;\n break;\n case \"maxIter\":\n if (typeof value !== \"number\" || !Number.isInteger(value) || value < 1) {\n throw new InvalidParameterError(\"maxIter must be an integer >= 1\", \"maxIter\", value);\n }\n this.maxIter = value;\n break;\n case \"tol\":\n if (typeof value !== \"number\" || value < 0) {\n throw new InvalidParameterError(\"tol must be >= 0\", \"tol\", value);\n }\n this.tol = value;\n break;\n case \"init\":\n if (value !== \"random\" && value !== \"kmeans++\") {\n throw new InvalidParameterError(`init must be \"random\" or \"kmeans++\"`, \"init\", value);\n }\n this.init = value;\n break;\n case \"randomState\":\n if (value !== undefined && (typeof value !== \"number\" || !Number.isFinite(value))) {\n throw new InvalidParameterError(\n \"randomState must be a finite number\",\n \"randomState\",\n value\n );\n }\n this.randomState = value === undefined ? undefined : value;\n break;\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { svd } from \"../../linalg\";\nimport { mean, type Tensor, tensor } from \"../../ndarray\";\nimport {\n assertContiguous,\n validatePredictInputs,\n validateUnsupervisedFitInputs,\n} from \"../_validation\";\nimport type { Transformer } from \"../base\";\n\n/**\n * Principal Component Analysis (PCA).\n *\n * Linear dimensionality reduction using Singular Value Decomposition (SVD)\n * to project data to a lower dimensional space.\n *\n * **Algorithm**:\n * 1. Center the data by subtracting the mean\n * 2. Compute SVD: X = U * Σ * V^T\n * 3. Principal components are columns of V\n * 4. Transform data by projecting onto principal components\n *\n * **Time Complexity**: O(min(n*d^2, d*n^2)) where n=samples, d=features\n *\n * @example\n * ```ts\n * import { PCA } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[2.5, 2.4], [0.5, 0.7], [2.2, 2.9], [1.9, 2.2], [3.1, 3.0]]);\n * const pca = new PCA({ nComponents: 1 });\n * pca.fit(X);\n *\n * const XTransformed = pca.transform(X);\n * console.log('Explained variance ratio:', pca.explainedVarianceRatio);\n * ```\n *\n * @see {@link https://en.wikipedia.org/wiki/Principal_component_analysis | Wikipedia: PCA}\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.decomposition.PCA.html | scikit-learn PCA}\n */\nexport class PCA implements Transformer {\n private readonly nComponents?: number;\n private readonly whiten: boolean;\n\n private components_?: Tensor;\n private explainedVariance_?: Tensor;\n private explainedVarianceRatio_?: Tensor;\n private mean_?: Tensor;\n private nComponentsActual_?: number;\n private nFeaturesIn_?: number;\n private fitted = false;\n\n /**\n * Create a new PCA model.\n *\n * @param options - Configuration options\n * @param options.nComponents - Number of components to keep (default: min(n_samples, n_features))\n * @param options.whiten - Whether to whiten the data (default: false)\n */\n constructor(\n options: {\n readonly nComponents?: number;\n readonly whiten?: boolean;\n } = {}\n ) {\n if (options.nComponents !== undefined) {\n this.nComponents = options.nComponents;\n }\n this.whiten = options.whiten ?? false;\n\n if (this.nComponents !== undefined) {\n if (!Number.isInteger(this.nComponents) || this.nComponents < 1) {\n throw new InvalidParameterError(\n \"nComponents must be an integer >= 1\",\n \"nComponents\",\n this.nComponents\n );\n }\n }\n }\n\n /**\n * Fit PCA on training data.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Ignored (exists for compatibility)\n * @returns this\n */\n fit(X: Tensor, _y?: Tensor): this {\n validateUnsupervisedFitInputs(X);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n this.nFeaturesIn_ = nFeatures;\n\n if (nSamples < 2) {\n throw new DataValidationError(\"X must have at least 2 samples for PCA\");\n }\n\n // Determine number of components\n const nComponentsActual = this.nComponents ?? Math.min(nSamples, nFeatures);\n if (nComponentsActual > Math.min(nSamples, nFeatures)) {\n throw new InvalidParameterError(\n `nComponents=${nComponentsActual} must be <= min(n_samples, n_features)=${Math.min(nSamples, nFeatures)}`,\n \"nComponents\",\n nComponentsActual\n );\n }\n\n // Center the data\n const meanVec = mean(X, 0);\n this.mean_ = meanVec;\n\n const XCentered = this.centerData(X, meanVec);\n\n // Compute SVD\n const [_U, s, Vt] = svd(XCentered, false);\n\n // Extract components (rows of Vt are principal components)\n const components: number[][] = [];\n for (let i = 0; i < nComponentsActual; i++) {\n const component: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n component.push(Number(Vt.data[Vt.offset + i * nFeatures + j]));\n }\n components.push(component);\n }\n this.components_ = tensor(components);\n\n // Compute explained variance\n const explainedVariance: number[] = [];\n for (let i = 0; i < nComponentsActual; i++) {\n const sv = Number(s.data[s.offset + i]);\n explainedVariance.push((sv * sv) / (nSamples - 1));\n }\n this.explainedVariance_ = tensor(explainedVariance);\n\n // Compute explained variance ratio\n let totalVariance = 0;\n for (let i = 0; i < s.size; i++) {\n const sv = Number(s.data[s.offset + i]);\n totalVariance += (sv * sv) / (nSamples - 1);\n }\n const explainedVarianceRatio =\n totalVariance === 0\n ? explainedVariance.map(() => 0)\n : explainedVariance.map((v) => v / totalVariance);\n this.explainedVarianceRatio_ = tensor(explainedVarianceRatio);\n\n this.nComponentsActual_ = nComponentsActual;\n this.fitted = true;\n\n return this;\n }\n\n /**\n * Transform data to principal component space.\n *\n * @param X - Data of shape (n_samples, n_features)\n * @returns Transformed data of shape (n_samples, n_components)\n */\n transform(X: Tensor): Tensor {\n if (!this.fitted || !this.components_ || !this.mean_) {\n throw new NotFittedError(\"PCA must be fitted before transform\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"PCA\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const nComponents = this.nComponentsActual_ ?? 0;\n\n // Center the data\n const XCentered = this.centerData(X, this.mean_);\n\n // Project onto principal components: X_transformed = X_centered @ components.T\n const transformed: number[][] = [];\n const varianceEps = 1e-12;\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let k = 0; k < nComponents; k++) {\n let sum = 0;\n for (let j = 0; j < nFeatures; j++) {\n sum +=\n Number(XCentered.data[XCentered.offset + i * nFeatures + j]) *\n Number(this.components_.data[this.components_.offset + k * nFeatures + j]);\n }\n // If whitening is enabled, scale each component to unit variance.\n if (this.whiten) {\n const variance = Number(\n this.explainedVariance_?.data[this.explainedVariance_.offset + k] ?? 0\n );\n row.push(sum / Math.sqrt(variance + varianceEps));\n } else {\n row.push(sum);\n }\n }\n transformed.push(row);\n }\n\n return tensor(transformed);\n }\n\n /**\n * Fit and transform in one step.\n *\n * @param X - Training data\n * @param y - Ignored (exists for compatibility)\n * @returns Transformed data\n */\n fitTransform(X: Tensor, _y?: Tensor): Tensor {\n this.fit(X);\n return this.transform(X);\n }\n\n /**\n * Transform data back to original space.\n *\n * @param X - Transformed data of shape (n_samples, n_components)\n * @returns Reconstructed data of shape (n_samples, n_features)\n */\n inverseTransform(X: Tensor): Tensor {\n if (!this.fitted || !this.components_ || !this.mean_) {\n throw new NotFittedError(\"PCA must be fitted before inverse transform\");\n }\n\n if (X.ndim !== 2) {\n throw new ShapeError(`X must be 2-dimensional; got ndim=${X.ndim}`);\n }\n assertContiguous(X, \"X\");\n\n const nSamples = X.shape[0] ?? 0;\n const nComponents = this.nComponentsActual_ ?? 0;\n const nFeatures = this.components_.shape[1] ?? 0;\n if ((X.shape[1] ?? 0) !== nComponents) {\n throw new ShapeError(\n `X must have ${nComponents} components; got ${(X.shape[1] ?? 0).toString()}`\n );\n }\n\n for (let i = 0; i < X.size; i++) {\n const val = X.data[X.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"X contains non-finite values (NaN or Inf)\");\n }\n }\n\n // Reconstruct: X_reconstructed = X_transformed @ components\n const reconstructed: number[][] = [];\n const varianceEps = 1e-12;\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n let sum = 0;\n for (let k = 0; k < nComponents; k++) {\n const xVal = Number(X.data[X.offset + i * nComponents + k]);\n const variance = Number(\n this.explainedVariance_?.data[this.explainedVariance_.offset + k] ?? 0\n );\n // Undo whitening by restoring the original component scale.\n const scaled = this.whiten ? xVal * Math.sqrt(variance + varianceEps) : xVal;\n sum +=\n scaled * Number(this.components_.data[this.components_.offset + k * nFeatures + j]);\n }\n // Add back the mean\n sum += Number(this.mean_.data[this.mean_.offset + j]);\n row.push(sum);\n }\n reconstructed.push(row);\n }\n\n return tensor(reconstructed);\n }\n\n /**\n * Center data by subtracting mean.\n */\n private centerData(X: Tensor, meanVec: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const centered: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n const val = Number(X.data[X.offset + i * nFeatures + j]);\n const meanVal = Number(meanVec.data[meanVec.offset + j]);\n row.push(val - meanVal);\n }\n centered.push(row);\n }\n\n return tensor(centered);\n }\n\n /**\n * Get principal components.\n */\n get components(): Tensor {\n if (!this.fitted || !this.components_) {\n throw new NotFittedError(\"PCA must be fitted to access components\");\n }\n return this.components_;\n }\n\n /**\n * Get explained variance.\n */\n get explainedVariance(): Tensor {\n if (!this.fitted || !this.explainedVariance_) {\n throw new NotFittedError(\"PCA must be fitted to access explained variance\");\n }\n return this.explainedVariance_;\n }\n\n /**\n * Get explained variance ratio.\n */\n get explainedVarianceRatio(): Tensor {\n if (!this.fitted || !this.explainedVarianceRatio_) {\n throw new NotFittedError(\"PCA must be fitted to access explained variance ratio\");\n }\n return this.explainedVarianceRatio_;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nComponents: this.nComponents,\n whiten: this.whiten,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\"PCA does not support setParams after construction\");\n }\n}\n","import {\n DataValidationError,\n DeepboxError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier, Regressor } from \"../base\";\n\ntype TreeNode = {\n readonly isLeaf: boolean;\n readonly prediction?: number | undefined;\n readonly classProbabilities?: number[] | undefined;\n readonly featureIndex?: number;\n readonly threshold?: number;\n readonly left?: TreeNode;\n readonly right?: TreeNode;\n};\n\n/**\n * Decision Tree Classifier.\n *\n * A non-parametric supervised learning method that learns simple decision rules\n * inferred from the data features.\n *\n * **Algorithm**: CART (Classification and Regression Trees)\n * - Uses Gini impurity for classification\n * - Recursively splits data based on feature thresholds\n * - Supports max_depth and min_samples_split for regularization\n *\n * @example\n * ```ts\n * import { DecisionTreeClassifier } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [3, 4], [5, 6], [7, 8]]);\n * const y = tensor([0, 0, 1, 1]);\n *\n * const clf = new DecisionTreeClassifier({ maxDepth: 3 });\n * clf.fit(X, y);\n * const predictions = clf.predict(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html | scikit-learn DecisionTreeClassifier}\n */\nexport class DecisionTreeClassifier implements Classifier {\n private maxDepth: number;\n private minSamplesSplit: number;\n private minSamplesLeaf: number;\n private maxFeatures: number | undefined;\n private randomState: number | undefined;\n\n private tree?: TreeNode;\n private nFeatures?: number;\n private classLabels?: number[];\n private fitted = false;\n\n constructor(\n options: {\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n readonly minSamplesLeaf?: number;\n readonly maxFeatures?: number;\n readonly randomState?: number;\n } = {}\n ) {\n this.maxDepth = options.maxDepth ?? 10;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n this.minSamplesLeaf = options.minSamplesLeaf ?? 1;\n if (options.maxFeatures !== undefined) {\n this.maxFeatures = options.maxFeatures;\n }\n if (options.randomState !== undefined) {\n this.randomState = options.randomState;\n }\n\n if (this.randomState !== undefined && !Number.isFinite(this.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(this.randomState)}`,\n \"randomState\",\n this.randomState\n );\n }\n\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n `maxDepth must be an integer >= 1; received ${this.maxDepth}`,\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n `minSamplesSplit must be an integer >= 2; received ${this.minSamplesSplit}`,\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n if (!Number.isInteger(this.minSamplesLeaf) || this.minSamplesLeaf < 1) {\n throw new InvalidParameterError(\n `minSamplesLeaf must be an integer >= 1; received ${this.minSamplesLeaf}`,\n \"minSamplesLeaf\",\n this.minSamplesLeaf\n );\n }\n if (\n this.maxFeatures !== undefined &&\n (!Number.isInteger(this.maxFeatures) || this.maxFeatures < 1)\n ) {\n throw new InvalidParameterError(\n `maxFeatures must be an integer >= 1; received ${this.maxFeatures}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n }\n\n private getRng(): () => number {\n if (this.randomState === undefined) {\n return Math.random;\n }\n let seed = this.randomState;\n return () => {\n seed = (seed * 1664525 + 1013904223) >>> 0;\n return seed / 4294967296;\n };\n }\n\n /**\n * Build a decision tree classifier from the training set (X, y).\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target class labels of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n // Extract data as arrays\n const XData: number[][] = [];\n const yData: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n // Get unique classes\n this.classLabels = [...new Set(yData)].sort((a, b) => a - b);\n\n // Build tree\n const indices = Array.from({ length: nSamples }, (_, i) => i);\n this.tree = this.buildTree(XData, yData, indices, 0);\n this.fitted = true;\n\n return this;\n }\n\n private buildTree(\n XData: number[][],\n yData: number[],\n indices: number[],\n depth: number\n ): TreeNode {\n const n = indices.length;\n\n // Check stopping conditions\n if (depth >= this.maxDepth || n < this.minSamplesSplit || n < this.minSamplesLeaf) {\n return {\n isLeaf: true,\n prediction: this.getMajorityClass(yData, indices),\n classProbabilities: this.getClassProbabilities(yData, indices),\n };\n }\n\n // Check if all samples have same class\n const classes = new Set(indices.map((i) => yData[i]));\n if (classes.size === 1) {\n const firstIdx = indices[0] ?? 0;\n return {\n isLeaf: true,\n prediction: yData[firstIdx] ?? 0,\n classProbabilities: this.getClassProbabilities(yData, indices),\n };\n }\n\n // Find best split\n const { featureIndex, threshold, leftIndices, rightIndices } = this.findBestSplit(\n XData,\n yData,\n indices\n );\n\n if (leftIndices.length === 0 || rightIndices.length === 0) {\n return {\n isLeaf: true,\n prediction: this.getMajorityClass(yData, indices),\n classProbabilities: this.getClassProbabilities(yData, indices),\n };\n }\n\n // Recursively build subtrees\n const left = this.buildTree(XData, yData, leftIndices, depth + 1);\n const right = this.buildTree(XData, yData, rightIndices, depth + 1);\n\n return {\n isLeaf: false,\n featureIndex,\n threshold,\n left,\n right,\n };\n }\n\n private getMajorityClass(yData: number[], indices: number[]): number {\n const counts = new Map<number, number>();\n for (const i of indices) {\n const label = yData[i] ?? 0;\n counts.set(label, (counts.get(label) ?? 0) + 1);\n }\n\n let maxCount = 0;\n let maxLabel = 0;\n for (const [label, count] of counts) {\n if (count > maxCount) {\n maxCount = count;\n maxLabel = label;\n }\n }\n return maxLabel;\n }\n\n private getClassProbabilities(yData: number[], indices: number[]): number[] {\n const labels = this.classLabels ?? [];\n if (labels.length === 0 || indices.length === 0) {\n return [];\n }\n\n const labelIndex = new Map<number, number>();\n for (let i = 0; i < labels.length; i++) {\n const v = labels[i];\n if (v !== undefined) labelIndex.set(v, i);\n }\n\n const counts = new Array<number>(labels.length).fill(0);\n for (const index of indices) {\n const label = yData[index] ?? 0;\n const idx = labelIndex.get(label);\n if (idx !== undefined) counts[idx] = (counts[idx] ?? 0) + 1;\n }\n\n const invN = 1 / indices.length;\n return counts.map((c) => c * invN);\n }\n\n private findBestSplit(\n XData: number[][],\n yData: number[],\n indices: number[]\n ): {\n featureIndex: number;\n threshold: number;\n leftIndices: number[];\n rightIndices: number[];\n } {\n let bestGini = Infinity;\n let bestFeature = 0;\n let bestThreshold = 0;\n let bestLeft: number[] = [];\n let bestRight: number[] = [];\n\n const nFeatures = XData[0]?.length ?? 0;\n let featureIndices = Array.from({ length: nFeatures }, (_, i) => i);\n\n if (this.maxFeatures !== undefined && this.maxFeatures < nFeatures) {\n const rng = this.getRng();\n // Fisher-Yates shuffle partial\n for (let i = 0; i < this.maxFeatures; i++) {\n const j = i + Math.floor(rng() * (nFeatures - i));\n const temp = featureIndices[i];\n if (temp !== undefined) {\n const swapVal = featureIndices[j];\n if (swapVal !== undefined) {\n featureIndices[i] = swapVal;\n featureIndices[j] = temp;\n }\n }\n }\n featureIndices = featureIndices.slice(0, this.maxFeatures);\n }\n\n const n = indices.length;\n // Pre-calculate total class counts\n const totalCounts = new Map<number, number>();\n for (const i of indices) {\n const label = yData[i] ?? 0;\n totalCounts.set(label, (totalCounts.get(label) ?? 0) + 1);\n }\n\n for (const f of featureIndices) {\n // Sort indices by feature value\n // Create a copy to sort\n const sortedIndices = [...indices].sort(\n (a, b) => (XData[a]?.[f] ?? 0) - (XData[b]?.[f] ?? 0)\n );\n\n const leftCounts = new Map<number, number>();\n const rightCounts = new Map<number, number>(totalCounts);\n let leftSize = 0;\n let rightSize = n;\n\n for (let i = 0; i < n - 1; i++) {\n const idx = sortedIndices[i];\n if (idx === undefined) continue;\n const label = yData[idx] ?? 0;\n const val = XData[idx]?.[f] ?? 0;\n const nextIdx = sortedIndices[i + 1];\n if (nextIdx === undefined) continue;\n const nextVal = XData[nextIdx]?.[f] ?? 0;\n\n // Move from Right to Left\n const currentRight = rightCounts.get(label) ?? 0;\n if (currentRight <= 1) rightCounts.delete(label);\n else rightCounts.set(label, currentRight - 1);\n rightSize--;\n\n leftCounts.set(label, (leftCounts.get(label) ?? 0) + 1);\n leftSize++;\n\n if (val === nextVal) continue; // Cannot split between same values\n\n if (leftSize < this.minSamplesLeaf || rightSize < this.minSamplesLeaf) continue;\n\n // Calculate weighted Gini\n const leftGini = this.giniFromCounts(leftCounts, leftSize);\n const rightGini = this.giniFromCounts(rightCounts, rightSize);\n const weightedGini = (leftSize * leftGini + rightSize * rightGini) / n;\n\n if (weightedGini < bestGini) {\n bestGini = weightedGini;\n bestFeature = f;\n bestThreshold = (val + nextVal) / 2;\n bestLeft = sortedIndices.slice(0, i + 1);\n bestRight = sortedIndices.slice(i + 1);\n }\n }\n }\n\n return {\n featureIndex: bestFeature,\n threshold: bestThreshold,\n leftIndices: bestLeft,\n rightIndices: bestRight,\n };\n }\n\n private giniFromCounts(counts: Map<number, number>, n: number): number {\n let impurity = 1.0;\n for (const count of counts.values()) {\n const p = count / n;\n impurity -= p * p;\n }\n return impurity;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted class labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.tree) {\n throw new NotFittedError(\"DecisionTreeClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"DecisionTreeClassifier\");\n\n const predictions: number[] = [];\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n predictions.push(this.predictSample(sample, this.tree));\n }\n\n return tensor(predictions, { dtype: \"int32\" });\n }\n\n private predictSample(sample: number[], node: TreeNode): number {\n let current = node;\n while (!current.isLeaf) {\n const featureValue = sample[current.featureIndex ?? 0] ?? 0;\n if (featureValue <= (current.threshold ?? 0)) {\n if (!current.left)\n throw new DeepboxError(\"Corrupted tree: Internal node missing left child\");\n current = current.left;\n } else {\n if (!current.right)\n throw new DeepboxError(\"Corrupted tree: Internal node missing right child\");\n current = current.right;\n }\n }\n return current.prediction ?? 0;\n }\n\n /**\n * Predict class probabilities for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Class probability matrix of shape (n_samples, n_classes)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted || !this.tree || !this.classLabels) {\n throw new NotFittedError(\"DecisionTreeClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"DecisionTreeClassifier\");\n assertContiguous(X, \"X\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const nClasses = this.classLabels.length;\n\n const proba: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n\n const leaf = this.predictLeaf(sample, this.tree);\n const row = leaf.classProbabilities\n ? [...leaf.classProbabilities]\n : new Array(nClasses).fill(0);\n proba.push(row);\n }\n\n return tensor(proba);\n }\n\n private predictLeaf(sample: number[], node: TreeNode): TreeNode {\n if (node.isLeaf) {\n return node;\n }\n\n const featureValue = sample[node.featureIndex ?? 0] ?? 0;\n if (featureValue <= (node.threshold ?? 0)) {\n if (!node.left) {\n return node;\n }\n return this.predictLeaf(sample, node.left);\n }\n if (!node.right) {\n return node;\n }\n return this.predictLeaf(sample, node.right);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n let correct = 0;\n for (let i = 0; i < y.size; i++) {\n if (Number(predictions.data[predictions.offset + i]) === Number(y.data[y.offset + i])) {\n correct++;\n }\n }\n return correct / y.size;\n }\n\n /**\n * Get the unique class labels discovered during fitting.\n *\n * @returns Tensor of class labels or undefined if not fitted\n */\n get classes(): Tensor | undefined {\n if (!this.fitted || !this.classLabels) {\n return undefined;\n }\n return tensor(this.classLabels, { dtype: \"int32\" });\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: this.maxFeatures,\n randomState: this.randomState,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"DecisionTreeClassifier does not support setParams after construction\"\n );\n }\n}\n\n/**\n * Decision Tree Regressor.\n *\n * Uses MSE reduction to find optimal splits for regression tasks.\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeRegressor.html | scikit-learn DecisionTreeRegressor}\n */\nexport class DecisionTreeRegressor implements Regressor {\n private maxDepth: number;\n private minSamplesSplit: number;\n private minSamplesLeaf: number;\n private maxFeatures: number | undefined;\n private randomState: number | undefined;\n\n private tree?: TreeNode;\n private nFeatures?: number;\n private fitted = false;\n\n constructor(\n options: {\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n readonly minSamplesLeaf?: number;\n readonly maxFeatures?: number;\n readonly randomState?: number;\n } = {}\n ) {\n this.maxDepth = options.maxDepth ?? 10;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n this.minSamplesLeaf = options.minSamplesLeaf ?? 1;\n if (options.maxFeatures !== undefined) {\n this.maxFeatures = options.maxFeatures;\n }\n if (options.randomState !== undefined) {\n this.randomState = options.randomState;\n }\n\n if (this.randomState !== undefined && !Number.isFinite(this.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(this.randomState)}`,\n \"randomState\",\n this.randomState\n );\n }\n\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n `maxDepth must be an integer >= 1; received ${this.maxDepth}`,\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n `minSamplesSplit must be an integer >= 2; received ${this.minSamplesSplit}`,\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n if (!Number.isInteger(this.minSamplesLeaf) || this.minSamplesLeaf < 1) {\n throw new InvalidParameterError(\n `minSamplesLeaf must be an integer >= 1; received ${this.minSamplesLeaf}`,\n \"minSamplesLeaf\",\n this.minSamplesLeaf\n );\n }\n if (\n this.maxFeatures !== undefined &&\n (!Number.isInteger(this.maxFeatures) || this.maxFeatures < 1)\n ) {\n throw new InvalidParameterError(\n `maxFeatures must be an integer >= 1; received ${this.maxFeatures}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n }\n\n private getRng(): () => number {\n if (this.randomState === undefined) {\n return Math.random;\n }\n let seed = this.randomState;\n return () => {\n seed = (seed * 1664525 + 1013904223) >>> 0;\n return seed / 4294967296;\n };\n }\n\n /**\n * Build a decision tree regressor from the training set (X, y).\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n assertContiguous(X, \"X\");\n assertContiguous(y, \"y\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n const XData: number[][] = [];\n const yData: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n const indices = Array.from({ length: nSamples }, (_, i) => i);\n this.tree = this.buildTree(XData, yData, indices, 0);\n this.fitted = true;\n\n return this;\n }\n\n private buildTree(\n XData: number[][],\n yData: number[],\n indices: number[],\n depth: number\n ): TreeNode {\n const n = indices.length;\n if (n === 0) {\n throw new DataValidationError(\"Cannot build a decision tree from an empty dataset\");\n }\n\n if (depth >= this.maxDepth || n < this.minSamplesSplit || n < this.minSamplesLeaf) {\n return { isLeaf: true, prediction: this.getMean(yData, indices) };\n }\n\n const { featureIndex, threshold, leftIndices, rightIndices } = this.findBestSplit(\n XData,\n yData,\n indices\n );\n\n if (leftIndices.length === 0 || rightIndices.length === 0) {\n return { isLeaf: true, prediction: this.getMean(yData, indices) };\n }\n\n const left = this.buildTree(XData, yData, leftIndices, depth + 1);\n const right = this.buildTree(XData, yData, rightIndices, depth + 1);\n\n return {\n isLeaf: false,\n featureIndex,\n threshold,\n left,\n right,\n };\n }\n\n private getMean(yData: number[], indices: number[]): number {\n let sum = 0;\n for (const i of indices) {\n sum += yData[i] ?? 0;\n }\n return sum / indices.length;\n }\n\n private findBestSplit(\n XData: number[][],\n yData: number[],\n indices: number[]\n ): {\n featureIndex: number;\n threshold: number;\n leftIndices: number[];\n rightIndices: number[];\n } {\n let bestScore = -Infinity; // We maximize the proxy score\n let bestFeature = 0;\n let bestThreshold = 0;\n let bestLeft: number[] = [];\n let bestRight: number[] = [];\n\n const nFeatures = XData[0]?.length ?? 0;\n let featureIndices = Array.from({ length: nFeatures }, (_, i) => i);\n\n if (this.maxFeatures !== undefined && this.maxFeatures < nFeatures) {\n const rng = this.getRng();\n for (let i = 0; i < this.maxFeatures; i++) {\n const j = i + Math.floor(rng() * (nFeatures - i));\n const a = featureIndices[i];\n const b = featureIndices[j];\n if (a === undefined || b === undefined) {\n throw new DeepboxError(`Internal error: featureIndices out of bounds: i=${i}, j=${j}`);\n }\n featureIndices[i] = b;\n featureIndices[j] = a;\n }\n featureIndices = featureIndices.slice(0, this.maxFeatures);\n }\n\n const n = indices.length;\n let totalSum = 0;\n\n for (const i of indices) {\n const yVal = yData[i] ?? 0;\n totalSum += yVal;\n }\n\n for (const f of featureIndices) {\n // Sort indices by feature value\n const sortedIndices = [...indices].sort(\n (a, b) => (XData[a]?.[f] ?? 0) - (XData[b]?.[f] ?? 0)\n );\n\n let leftSum = 0;\n let leftCnt = 0;\n let rightSum = totalSum;\n let rightCnt = n;\n\n for (let i = 0; i < n - 1; i++) {\n const idx = sortedIndices[i];\n if (idx === undefined) continue;\n const val = XData[idx]?.[f] ?? 0;\n const nextIdx = sortedIndices[i + 1];\n if (nextIdx === undefined) continue;\n const nextVal = XData[nextIdx]?.[f] ?? 0;\n const yVal = yData[idx] ?? 0;\n\n // Move from Right to Left\n leftSum += yVal;\n leftCnt++;\n rightSum -= yVal;\n rightCnt--;\n\n if (val === nextVal) continue; // Cannot split between same values\n\n if (leftCnt < this.minSamplesLeaf || rightCnt < this.minSamplesLeaf) continue;\n\n // Proxy score to maximize: (SumL^2 / nL) + (SumR^2 / nR)\n // This is equivalent to minimizing weighted MSE\n const score = (leftSum * leftSum) / leftCnt + (rightSum * rightSum) / rightCnt;\n\n if (score > bestScore) {\n bestScore = score;\n bestFeature = f;\n bestThreshold = (val + nextVal) / 2;\n bestLeft = sortedIndices.slice(0, i + 1);\n bestRight = sortedIndices.slice(i + 1);\n }\n }\n }\n\n // If no split found (e.g. pure node or all features constant), return empty\n if (bestScore === -Infinity) {\n return {\n featureIndex: 0,\n threshold: 0,\n leftIndices: [],\n rightIndices: [],\n };\n }\n\n return {\n featureIndex: bestFeature,\n threshold: bestThreshold,\n leftIndices: bestLeft,\n rightIndices: bestRight,\n };\n }\n\n /**\n * Predict target values for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.tree) {\n throw new NotFittedError(\"DecisionTreeRegressor must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"DecisionTreeRegressor\");\n assertContiguous(X, \"X\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n predictions.push(this.predictSample(sample, this.tree));\n }\n\n return tensor(predictions);\n }\n\n private predictSample(sample: number[], node: TreeNode): number {\n if (node.isLeaf) {\n return node.prediction ?? 0;\n }\n\n const featureValue = sample[node.featureIndex ?? 0] ?? 0;\n if (featureValue <= (node.threshold ?? 0)) {\n if (!node.left) throw new DeepboxError(\"Corrupted tree: Internal node missing left child\");\n return this.predictSample(sample, node.left);\n } else {\n if (!node.right) throw new DeepboxError(\"Corrupted tree: Internal node missing right child\");\n return this.predictSample(sample, node.right);\n }\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * R² = 1 - SS_res / SS_tot.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n\n // Calculate R² score\n let ssRes = 0;\n let ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i]);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yTrue = Number(y.data[y.offset + i]);\n const yPred = Number(predictions.data[predictions.offset + i]);\n ssRes += (yTrue - yPred) ** 2;\n ssTot += (yTrue - yMean) ** 2;\n }\n\n return ssTot === 0 ? (ssRes === 0 ? 1.0 : 0.0) : 1 - ssRes / ssTot;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: this.maxFeatures,\n randomState: this.randomState,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"DecisionTreeRegressor does not support setParams after construction\"\n );\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier, Regressor } from \"../base\";\nimport { DecisionTreeRegressor } from \"../tree/DecisionTree\";\n\n/**\n * Gradient Boosting Regressor.\n *\n * Builds an additive model in a forward stage-wise fashion using\n * regression trees as weak learners. Optimizes squared error loss.\n *\n * **Algorithm**: Gradient Boosting with regression trees\n * - Stage-wise additive modeling\n * - Uses gradient of squared loss (residuals)\n *\n * @example\n * ```ts\n * import { GradientBoostingRegressor } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1], [2], [3], [4], [5]]);\n * const y = tensor([1.2, 2.1, 2.9, 4.0, 5.1]);\n *\n * const gbr = new GradientBoostingRegressor({ nEstimators: 100 });\n * gbr.fit(X, y);\n * const predictions = gbr.predict(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingRegressor.html | scikit-learn GradientBoostingRegressor}\n */\nexport class GradientBoostingRegressor implements Regressor {\n /** Number of boosting stages (trees) */\n private nEstimators: number;\n\n /** Learning rate shrinks the contribution of each tree */\n private learningRate: number;\n\n /** Maximum depth of individual regression trees */\n private maxDepth: number;\n\n /** Minimum samples required to split */\n private minSamplesSplit: number;\n\n /** Array of weak learners (regression trees) */\n private estimators: DecisionTreeRegressor[] = [];\n\n /** Initial prediction (mean of targets) */\n private initPrediction = 0;\n\n /** Number of features */\n private nFeatures = 0;\n\n /** Whether the model has been fitted */\n private fitted = false;\n\n constructor(\n options: {\n readonly nEstimators?: number;\n readonly learningRate?: number;\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n } = {}\n ) {\n this.nEstimators = options.nEstimators ?? 100;\n this.learningRate = options.learningRate ?? 0.1;\n this.maxDepth = options.maxDepth ?? 3;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n\n if (!Number.isInteger(this.nEstimators) || this.nEstimators <= 0) {\n throw new InvalidParameterError(\n \"nEstimators must be a positive integer\",\n \"nEstimators\",\n this.nEstimators\n );\n }\n if (!Number.isFinite(this.learningRate) || this.learningRate <= 0) {\n throw new InvalidParameterError(\n \"learningRate must be positive\",\n \"learningRate\",\n this.learningRate\n );\n }\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n \"maxDepth must be an integer >= 1\",\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n \"minSamplesSplit must be an integer >= 2\",\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n }\n\n /**\n * Fit the gradient boosting regressor on training data.\n *\n * Builds an additive model by sequentially fitting regression trees\n * to the negative gradient (residuals) of the loss function.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n const yData: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n yData.push(Number(y.data[y.offset + i]));\n }\n\n // Initialize with mean prediction (F0)\n this.initPrediction = yData.reduce((sum, val) => sum + val, 0) / nSamples;\n\n // Current predictions\n const predictions = new Array<number>(nSamples).fill(this.initPrediction);\n\n // Build ensemble\n this.estimators = [];\n\n for (let m = 0; m < this.nEstimators; m++) {\n // Compute residuals (negative gradient of squared loss)\n const residuals: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n residuals.push((yData[i] ?? 0) - (predictions[i] ?? 0));\n }\n\n // Fit a regression tree to residuals\n const tree = new DecisionTreeRegressor({\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: 1,\n });\n tree.fit(X, tensor(residuals));\n this.estimators.push(tree);\n\n // Update predictions\n const treePred = tree.predict(X);\n for (let i = 0; i < nSamples; i++) {\n predictions[i] =\n (predictions[i] ?? 0) + this.learningRate * Number(treePred.data[treePred.offset + i]);\n }\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict target values for samples in X.\n *\n * Aggregates the initial prediction and the scaled contributions of all trees.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"GradientBoostingRegressor must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"GradientBoostingRegressor\");\n\n const nSamples = X.shape[0] ?? 0;\n const predictions = new Array<number>(nSamples).fill(this.initPrediction);\n\n for (const tree of this.estimators) {\n const treePred = tree.predict(X);\n for (let i = 0; i < nSamples; i++) {\n predictions[i] =\n (predictions[i] ?? 0) + this.learningRate * Number(treePred.data[treePred.offset + i]);\n }\n }\n\n return tensor(predictions);\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n\n let ssRes = 0;\n let ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i]);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yTrue = Number(y.data[y.offset + i]);\n const yPred = Number(predictions.data[predictions.offset + i]);\n ssRes += (yTrue - yPred) ** 2;\n ssTot += (yTrue - yMean) ** 2;\n }\n\n return ssTot === 0 ? (ssRes === 0 ? 1.0 : 0.0) : 1 - ssRes / ssTot;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nEstimators: this.nEstimators,\n learningRate: this.learningRate,\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"GradientBoostingRegressor does not support setParams after construction\"\n );\n }\n}\n\n/**\n * Gradient Boosting Classifier.\n *\n * Uses gradient boosting with shallow regression trees for binary classification.\n * Optimizes log loss (cross-entropy) using sigmoid function.\n *\n * @example\n * ```ts\n * import { GradientBoostingClassifier } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [2, 3], [3, 1], [4, 2]]);\n * const y = tensor([0, 0, 1, 1]);\n *\n * const gbc = new GradientBoostingClassifier({ nEstimators: 100 });\n * gbc.fit(X, y);\n * const predictions = gbc.predict(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingClassifier.html | scikit-learn GradientBoostingClassifier}\n */\nexport class GradientBoostingClassifier implements Classifier {\n /** Number of boosting stages */\n private nEstimators: number;\n\n /** Learning rate */\n private learningRate: number;\n\n /** Maximum depth */\n private maxDepth: number;\n\n /** Minimum samples to split */\n private minSamplesSplit: number;\n\n /** Array of weak learners */\n private estimators: DecisionTreeRegressor[] = [];\n\n /** Initial log-odds prediction */\n private initPrediction = 0;\n\n /** Number of features */\n private nFeatures = 0;\n\n /** Unique class labels */\n private classLabels: number[] = [];\n\n /** Whether fitted */\n private fitted = false;\n\n constructor(\n options: {\n readonly nEstimators?: number;\n readonly learningRate?: number;\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n } = {}\n ) {\n this.nEstimators = options.nEstimators ?? 100;\n this.learningRate = options.learningRate ?? 0.1;\n this.maxDepth = options.maxDepth ?? 3;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n\n if (!Number.isInteger(this.nEstimators) || this.nEstimators <= 0) {\n throw new InvalidParameterError(\n \"nEstimators must be a positive integer\",\n \"nEstimators\",\n this.nEstimators\n );\n }\n if (!Number.isFinite(this.learningRate) || this.learningRate <= 0) {\n throw new InvalidParameterError(\n \"learningRate must be positive\",\n \"learningRate\",\n this.learningRate\n );\n }\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n \"maxDepth must be an integer >= 1\",\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n \"minSamplesSplit must be an integer >= 2\",\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n }\n\n /**\n * Fit the gradient boosting classifier on training data.\n *\n * Builds an additive model by sequentially fitting regression trees\n * to the pseudo-residuals (gradient of log loss).\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target class labels of shape (n_samples,). Must contain exactly 2 classes.\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {InvalidParameterError} If y does not contain exactly 2 classes\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n const yData: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n yData.push(Number(y.data[y.offset + i]));\n }\n\n // Get unique classes\n this.classLabels = [...new Set(yData)].sort((a, b) => a - b);\n if (this.classLabels.length !== 2) {\n throw new InvalidParameterError(\n \"GradientBoostingClassifier requires exactly 2 classes\",\n \"y\",\n this.classLabels.length\n );\n }\n\n // Map to {0, 1}\n const yBinary = yData.map((label) => (label === this.classLabels[0] ? 0 : 1));\n\n // Initialize with log-odds\n const posCount = yBinary.filter((v) => v === 1).length;\n const negCount = nSamples - posCount;\n this.initPrediction = Math.log((posCount + 1) / (negCount + 1)); // Add smoothing\n\n // Current raw scores (log-odds)\n const rawScores = new Array<number>(nSamples).fill(this.initPrediction);\n\n // Build ensemble\n this.estimators = [];\n\n for (let m = 0; m < this.nEstimators; m++) {\n // Compute pseudo-residuals (gradient of log loss)\n const residuals: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n const prob = 1 / (1 + Math.exp(-(rawScores[i] ?? 0))); // Sigmoid\n const y_i = yBinary[i] ?? 0;\n residuals.push(y_i - prob);\n }\n\n // Fit regression tree to residuals\n const tree = new DecisionTreeRegressor({\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: 1,\n });\n tree.fit(X, tensor(residuals));\n this.estimators.push(tree);\n\n // Update raw scores\n const treePred = tree.predict(X);\n for (let i = 0; i < nSamples; i++) {\n rawScores[i] =\n (rawScores[i] ?? 0) + this.learningRate * Number(treePred.data[treePred.offset + i]);\n }\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted class labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"GradientBoostingClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"GradientBoostingClassifier\");\n\n const nSamples = X.shape[0] ?? 0;\n\n const rawScores = new Array<number>(nSamples).fill(this.initPrediction);\n for (const tree of this.estimators) {\n const treePred = tree.predict(X);\n for (let i = 0; i < nSamples; i++) {\n rawScores[i] =\n (rawScores[i] ?? 0) + this.learningRate * Number(treePred.data[treePred.offset + i]);\n }\n }\n\n const predictions: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n const prob = 1 / (1 + Math.exp(-(rawScores[i] ?? 0)));\n const predictedClass = prob >= 0.5 ? this.classLabels[1] : this.classLabels[0];\n predictions.push(predictedClass ?? 0);\n }\n\n return tensor(predictions, { dtype: \"int32\" });\n }\n\n /**\n * Predict class probabilities for samples in X.\n *\n * Returns a matrix of shape (n_samples, 2) where columns are\n * [P(class_0), P(class_1)].\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Class probability matrix of shape (n_samples, 2)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"GradientBoostingClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"GradientBoostingClassifier\");\n\n const nSamples = X.shape[0] ?? 0;\n const rawScores = new Array<number>(nSamples).fill(this.initPrediction);\n for (const tree of this.estimators) {\n const treePred = tree.predict(X);\n for (let i = 0; i < nSamples; i++) {\n rawScores[i] =\n (rawScores[i] ?? 0) + this.learningRate * Number(treePred.data[treePred.offset + i]);\n }\n }\n\n const proba: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n const prob = 1 / (1 + Math.exp(-(rawScores[i] ?? 0)));\n proba.push([1 - prob, prob]);\n }\n\n return tensor(proba);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n let correct = 0;\n for (let i = 0; i < y.size; i++) {\n if (Number(predictions.data[predictions.offset + i]) === Number(y.data[y.offset + i])) {\n correct++;\n }\n }\n return correct / y.size;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nEstimators: this.nEstimators,\n learningRate: this.learningRate,\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"GradientBoostingClassifier does not support setParams after construction\"\n );\n }\n}\n","import { DataValidationError, InvalidParameterError, NotFittedError, ShapeError } from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Regressor } from \"../base\";\n\n/**\n * Lasso Regression (L1 Regularized Linear Regression).\n *\n * Lasso performs both regularization and feature selection by adding\n * an L1 penalty that can drive coefficients exactly to zero.\n *\n * @example\n * ```ts\n * import { Lasso } from 'deepbox/ml';\n *\n * const model = new Lasso({ alpha: 0.1, maxIter: 1000 });\n * model.fit(X_train, y_train);\n *\n * // Many coefficients will be exactly 0\n * console.log(model.coef);\n *\n * const predictions = model.predict(X_test);\n * ```\n *\n * @category Linear Models\n * @implements {Regressor}\n */\nexport class Lasso implements Regressor {\n /** Configuration options for the Lasso regression model */\n private options: {\n alpha?: number;\n fitIntercept?: boolean;\n normalize?: boolean;\n maxIter?: number;\n tol?: number;\n warmStart?: boolean;\n positive?: boolean;\n selection?: \"cyclic\" | \"random\";\n randomState?: number;\n };\n\n /** Model coefficients (weights) after fitting - shape (n_features,) */\n private coef_?: Tensor;\n\n /** Intercept (bias) term after fitting */\n private intercept_ = 0;\n\n /** Number of features seen during fit - used for validation */\n private nFeaturesIn_?: number;\n\n /** Number of iterations run by coordinate descent */\n private nIter_: number | undefined;\n\n /** Whether the model has been fitted to data */\n private fitted = false;\n\n /**\n * Create a new Lasso Regression model.\n *\n * @param options - Configuration options\n * @param options.alpha - Regularization strength (default: 1.0). Must be >= 0. Controls sparsity of solution.\n * @param options.fitIntercept - Whether to calculate the intercept (default: true)\n * @param options.normalize - Whether to normalize features before regression (default: false)\n * @param options.maxIter - Maximum iterations for coordinate descent (default: 1000)\n * @param options.tol - Tolerance for convergence (default: 1e-4). Smaller = more precise but slower.\n * @param options.warmStart - Whether to reuse previous solution as initialization (default: false)\n * @param options.positive - Whether to force coefficients to be positive (default: false)\n * @param options.selection - Coordinate selection: 'cyclic' (default) or 'random'\n */\n constructor(\n options: {\n readonly alpha?: number;\n readonly fitIntercept?: boolean;\n readonly normalize?: boolean;\n readonly maxIter?: number;\n readonly tol?: number;\n readonly warmStart?: boolean;\n readonly positive?: boolean;\n readonly selection?: \"cyclic\" | \"random\";\n readonly randomState?: number;\n } = {}\n ) {\n this.options = { ...options };\n if (this.options.randomState !== undefined && !Number.isFinite(this.options.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(this.options.randomState)}`,\n \"randomState\",\n this.options.randomState\n );\n }\n }\n\n private createRNG(): () => number {\n if (this.options.randomState !== undefined) {\n let seed = this.options.randomState;\n return () => {\n seed = (seed * 9301 + 49297) % 233280;\n return seed / 233280;\n };\n }\n return Math.random;\n }\n\n /**\n * Fit Lasso regression model using Coordinate Descent.\n *\n * Solves the L1-regularized least squares problem:\n * minimize (1/(2*n)) ||y - Xw||² + α||w||₁\n *\n * **Algorithm**: Coordinate Descent with Soft Thresholding\n * 1. Initialize coefficients (warm start if enabled)\n * 2. For each iteration:\n * - For each feature (cyclic or random order):\n * - Compute residual correlation\n * - Apply soft thresholding operator\n * - Update predictions incrementally\n * 3. Check convergence based on coefficient changes\n *\n * **Time Complexity**: O(k * n * p) where k = iterations, n = samples, p = features\n * **Space Complexity**: O(n + p)\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator for method chaining\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {DataValidationError} If X or y are empty\n * @throws {InvalidParameterError} If alpha < 0\n */\n fit(X: Tensor, y: Tensor): this {\n // Validate inputs (dimensions, empty data, NaN/Inf)\n validateFitInputs(X, y);\n this.nIter_ = undefined;\n\n // Extract and validate regularization parameter\n const alpha = this.options.alpha ?? 1.0;\n if (!(alpha >= 0)) {\n throw new InvalidParameterError(`alpha must be >= 0; received ${alpha}`, \"alpha\", alpha);\n }\n\n // Extract optimization parameters\n const maxIter = this.options.maxIter ?? 1000;\n const tol = this.options.tol ?? 1e-4;\n const fitIntercept = this.options.fitIntercept ?? true;\n const normalize = this.options.normalize ?? false;\n const positive = this.options.positive ?? false;\n const selection = this.options.selection ?? \"cyclic\";\n const rng = this.createRNG();\n\n // Extract dimensions: m = number of samples, n = number of features\n const m = X.shape[0] ?? 0;\n const n = X.shape[1] ?? 0;\n\n // Store number of features for prediction validation\n this.nFeaturesIn_ = n;\n\n // Compute means for centering (if fitIntercept is true)\n // Centering is crucial for proper intercept calculation and numerical stability\n let yMean = 0;\n const xMean = new Array<number>(n).fill(0);\n\n if (fitIntercept) {\n // Compute sum of y values\n for (let i = 0; i < m; i++) {\n yMean += Number(y.data[y.offset + i] ?? 0);\n }\n\n // Compute sum of each feature column\n for (let i = 0; i < m; i++) {\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n xMean[j] = (xMean[j] ?? 0) + Number(X.data[rowBase + j] ?? 0);\n }\n }\n\n // Convert sums to means\n const invM = m === 0 ? 0 : 1 / m;\n yMean *= invM;\n for (let j = 0; j < n; j++) {\n xMean[j] = (xMean[j] ?? 0) * invM;\n }\n }\n\n let xScale: number[] | undefined;\n if (normalize) {\n xScale = new Array<number>(n).fill(0);\n for (let i = 0; i < m; i++) {\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n const centered = Number(X.data[rowBase + j] ?? 0) - (fitIntercept ? (xMean[j] ?? 0) : 0);\n xScale[j] = (xScale[j] ?? 0) + centered * centered;\n }\n }\n for (let j = 0; j < n; j++) {\n xScale[j] = Math.sqrt(xScale[j] ?? 0);\n }\n }\n\n const getX = (sampleIndex: number, featureIndex: number): number => {\n const raw = Number(X.data[X.offset + sampleIndex * n + featureIndex] ?? 0);\n const centered = raw - (fitIntercept ? (xMean[featureIndex] ?? 0) : 0);\n if (normalize && xScale) {\n const s = xScale[featureIndex] ?? 0;\n return s === 0 ? 0 : centered / s;\n }\n return centered;\n };\n\n // Precompute column squared norms of centered X: (1/m) * Σᵢ xᵢⱼ²\n // This is used in the coordinate descent update formula\n // Caching these norms significantly improves performance (O(n) vs O(nm) per iteration)\n const colNorm2 = new Array<number>(n).fill(0);\n for (let j = 0; j < n; j++) {\n let s = 0;\n for (let i = 0; i < m; i++) {\n const xij = getX(i, j);\n s += xij * xij;\n }\n // Normalize by number of samples\n colNorm2[j] = m === 0 ? 0 : s / m;\n }\n\n // Initialize coefficients\n // If warm_start is enabled and we have previous coefficients, reuse them\n // Otherwise initialize to zeros\n const w = new Array<number>(n).fill(0);\n if (this.options.warmStart && this.coef_ && this.coef_.ndim === 1 && this.coef_.size === n) {\n // Copy previous coefficients for warm start\n for (let j = 0; j < n; j++) {\n w[j] = Number(this.coef_.data[this.coef_.offset + j] ?? 0);\n }\n }\n\n // Maintain current predictions in centered space: ŷ = X_centered @ w\n // We update this incrementally during coordinate descent for efficiency\n // This avoids recomputing all predictions from scratch each iteration\n const yHat = new Array<number>(m).fill(0);\n for (let i = 0; i < m; i++) {\n let pred = 0;\n for (let j = 0; j < n; j++) {\n pred += getX(i, j) * (w[j] ?? 0);\n }\n yHat[i] = pred;\n }\n\n // Precompute 1/m for efficiency\n const invM = m === 0 ? 0 : 1 / m;\n\n // Coordinate descent main loop\n // Each iteration updates all coefficients once\n for (let iter = 0; iter < maxIter; iter++) {\n // Track maximum coefficient change for convergence check\n let maxChange = 0;\n\n // Determine order of coordinate updates\n // Cyclic: iterate through features in order (more cache-friendly)\n // Random: shuffle features each iteration (can help convergence)\n let indices: number[] | null = null;\n if (selection === \"random\") {\n indices = Array.from({ length: n }, (_, j) => j);\n // Fisher-Yates shuffle for random selection\n for (let k = n - 1; k > 0; k--) {\n const r = Math.floor(rng() * (k + 1));\n const tmp = indices[k];\n indices[k] = indices[r] ?? 0;\n indices[r] = tmp ?? 0;\n }\n }\n\n // Update each coefficient using coordinate descent\n const iterOrder = indices ?? Array.from({ length: n }, (_, j) => j);\n for (const j of iterOrder) {\n const denom = colNorm2[j] ?? 0;\n\n // Skip features with zero variance (constant columns)\n if (denom === 0) {\n const prevW = w[j] ?? 0;\n if (prevW !== 0) {\n const delta = -prevW;\n for (let i = 0; i < m; i++) {\n yHat[i] = (yHat[i] ?? 0) + delta * getX(i, j);\n }\n maxChange = Math.max(maxChange, Math.abs(delta));\n }\n w[j] = 0;\n continue;\n }\n\n // Compute correlation between feature j and current residual\n // rho = (1/m) * Σᵢ xᵢⱼ * (yᵢ - ŷᵢ + wⱼ * xᵢⱼ)\n // The term (wⱼ * xᵢⱼ) adds back the contribution of feature j to the residual\n let rho = 0;\n for (let i = 0; i < m; i++) {\n const xij = getX(i, j);\n const yi = Number(y.data[y.offset + i] ?? 0) - (fitIntercept ? yMean : 0);\n // Current residual plus contribution from feature j\n const r = yi - (yHat[i] ?? 0) + (w[j] ?? 0) * xij;\n rho += xij * r;\n }\n rho *= invM;\n\n // Apply soft thresholding operator (proximal operator for L1 norm)\n // This is the key step that induces sparsity in Lasso\n // newW = soft_threshold(rho, α) / ||xⱼ||²\n let newW = this.softThreshold(rho, alpha) / denom;\n\n // Enforce non-negativity constraint if requested\n if (positive && newW < 0) {\n newW = 0;\n }\n\n // Compute change in coefficient\n const delta = newW - (w[j] ?? 0);\n\n // Update predictions incrementally if coefficient changed\n // This is much more efficient than recomputing all predictions\n // ŷ_new = ŷ_old + Δwⱼ * xⱼ\n if (delta !== 0) {\n for (let i = 0; i < m; i++) {\n yHat[i] = (yHat[i] ?? 0) + delta * getX(i, j);\n }\n }\n\n // Update coefficient\n w[j] = newW;\n\n // Track maximum change for convergence check\n maxChange = Math.max(maxChange, Math.abs(delta));\n }\n\n // Check convergence: if no coefficient changed by more than tol, we're done\n if (maxChange < tol) {\n this.nIter_ = iter + 1;\n break;\n }\n }\n\n // Store final iteration count if we didn't converge early\n if (this.nIter_ === undefined) {\n this.nIter_ = maxIter;\n }\n\n // Rescale coefficients back to original feature space if normalized\n if (normalize && xScale) {\n for (let j = 0; j < n; j++) {\n const s = xScale[j] ?? 1;\n w[j] = s === 0 ? 0 : (w[j] ?? 0) / s;\n }\n }\n\n // Store final coefficients\n this.coef_ = tensor(w);\n\n // Compute intercept if needed\n // intercept = mean(y) - mean(X) @ coef\n // This accounts for the centering we did during optimization\n if (fitIntercept) {\n let xMeanDotW = 0;\n for (let j = 0; j < n; j++) {\n xMeanDotW += (xMean[j] ?? 0) * (w[j] ?? 0);\n }\n this.intercept_ = yMean - xMeanDotW;\n } else {\n this.intercept_ = 0;\n }\n\n // Mark model as fitted\n this.fitted = true;\n return this;\n }\n\n /**\n * Soft thresholding operator (proximal operator for L1 norm).\n *\n * This is the key operation in Lasso that induces sparsity.\n *\n * Formula:\n * - If x > λ: return x - λ\n * - If x < -λ: return x + λ\n * - Otherwise: return 0\n *\n * Geometrically, this \"shrinks\" x towards zero by λ,\n * and sets it exactly to zero if |x| ≤ λ.\n *\n * **Time Complexity**: O(1)\n *\n * @param x - Input value\n * @param lambda - Threshold parameter (regularization strength)\n * @returns Soft-thresholded value\n */\n private softThreshold(x: number, lambda: number): number {\n // Guard against non-finite inputs\n if (!Number.isFinite(x) || !Number.isFinite(lambda)) {\n throw new DataValidationError(\"Non-finite value encountered during soft-thresholding\");\n }\n if (x > lambda) return x - lambda;\n if (x < -lambda) return x + lambda;\n return 0;\n }\n\n /**\n * Get the model coefficients (weights).\n *\n * Many coefficients will be exactly zero due to L1 regularization (sparsity).\n *\n * @returns Coefficient tensor of shape (n_features,)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coef(): Tensor {\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"Lasso must be fitted to access coefficients\");\n }\n return this.coef_;\n }\n\n /**\n * Get the intercept (bias term).\n *\n * @returns Intercept value\n * @throws {NotFittedError} If the model has not been fitted\n */\n get intercept(): number {\n if (!this.fitted) {\n throw new NotFittedError(\"Lasso must be fitted to access intercept\");\n }\n return this.intercept_;\n }\n\n /**\n * Get the number of iterations run by coordinate descent.\n *\n * @returns Number of iterations until convergence\n * @throws {NotFittedError} If the model has not been fitted\n */\n get nIter(): number | undefined {\n if (!this.fitted) {\n throw new NotFittedError(\"Lasso must be fitted to access nIter\");\n }\n return this.nIter_;\n }\n\n /**\n * Predict using the Lasso regression model.\n *\n * Computes predictions as: ŷ = X @ coef + intercept\n *\n * **Time Complexity**: O(nm) where n = samples, m = features\n * **Space Complexity**: O(n)\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n // Check if model has been fitted\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"Lasso must be fitted before prediction\");\n }\n\n // Validate input\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"Lasso\");\n\n const m = X.shape[0] ?? 0; // Number of samples to predict\n const n = X.shape[1] ?? 0; // Number of features\n const pred = Array(m).fill(0);\n\n // Compute predictions: ŷ[i] = Σⱼ X[i,j] * coef[j] + intercept\n for (let i = 0; i < m; i++) {\n // Compute weighted sum of features\n for (let j = 0; j < n; j++) {\n pred[i] +=\n Number(X.data[X.offset + i * n + j] ?? 0) *\n Number(this.coef_.data[this.coef_.offset + j] ?? 0);\n }\n // Add intercept\n pred[i] += this.intercept_;\n }\n\n return tensor(pred);\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (!this.fitted) {\n throw new NotFittedError(\"Lasso must be fitted before scoring\");\n }\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const pred = this.predict(X);\n if (pred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${pred.size}, y=${y.size}`\n );\n }\n let ssRes = 0,\n ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i] ?? 0);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yVal = Number(y.data[y.offset + i] ?? 0);\n const predVal = Number(pred.data[pred.offset + i] ?? 0);\n ssRes += (yVal - predVal) ** 2;\n ssTot += (yVal - yMean) ** 2;\n }\n\n if (ssTot === 0) {\n return ssRes === 0 ? 1.0 : 0.0;\n }\n\n return 1 - ssRes / ssTot;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return { ...this.options };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param params - Parameters to set (alpha, maxIter, tol, fitIntercept, normalize)\n * @returns this\n * @throws {InvalidParameterError} If any parameter value is invalid\n */\n setParams(params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(params)) {\n switch (key) {\n case \"alpha\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\"alpha must be a finite number\", \"alpha\", value);\n }\n this.options.alpha = value;\n break;\n case \"maxIter\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\"maxIter must be a finite number\", \"maxIter\", value);\n }\n this.options.maxIter = value;\n break;\n case \"tol\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\"tol must be a finite number\", \"tol\", value);\n }\n this.options.tol = value;\n break;\n case \"fitIntercept\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n \"fitIntercept must be a boolean\",\n \"fitIntercept\",\n value\n );\n }\n this.options.fitIntercept = value;\n break;\n case \"normalize\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\"normalize must be a boolean\", \"normalize\", value);\n }\n this.options.normalize = value;\n break;\n case \"warmStart\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\"warmStart must be a boolean\", \"warmStart\", value);\n }\n this.options.warmStart = value;\n break;\n case \"positive\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\"positive must be a boolean\", \"positive\", value);\n }\n this.options.positive = value;\n break;\n case \"selection\":\n if (value !== \"cyclic\" && value !== \"random\") {\n throw new InvalidParameterError(\n `Invalid selection: ${String(value)}`,\n \"selection\",\n value\n );\n }\n this.options.selection = value;\n break;\n case \"randomState\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(value)}`,\n \"randomState\",\n value\n );\n }\n this.options.randomState = value;\n break;\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n}\n","import { DataValidationError, InvalidParameterError, NotFittedError, ShapeError } from \"../../core\";\nimport { lstsq } from \"../../linalg\";\nimport { dot, mean, sub, type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Regressor } from \"../base\";\n\n/**\n * Ordinary Least Squares Linear Regression.\n *\n * Fits a linear model with coefficients w = (w1, ..., wp) to minimize\n * the residual sum of squares between the observed targets and the\n * targets predicted by the linear approximation.\n *\n * @example\n * ```ts\n * import { LinearRegression } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * // Create training data\n * const X = tensor([[1, 1], [1, 2], [2, 2], [2, 3]]);\n * const y = tensor([1, 2, 2, 3]);\n *\n * // Fit model\n * const model = new LinearRegression({ fitIntercept: true });\n * model.fit(X, y);\n *\n * // Make predictions\n * const X_test = tensor([[3, 5]]);\n * const predictions = model.predict(X_test);\n *\n * // Get R^2 score\n * const score = model.score(X, y);\n * ```\n */\nexport class LinearRegression implements Regressor {\n /** Model coefficients (weights) of shape (n_features,) or (n_features, n_targets) */\n private coef_?: Tensor;\n\n /** Independent term (bias/intercept) in the linear model */\n private intercept_?: Tensor;\n\n /** Number of features seen during fit */\n private nFeaturesIn_?: number;\n\n /** Whether the model has been fitted */\n private fitted = false;\n\n private options: {\n fitIntercept?: boolean;\n normalize?: boolean;\n copyX?: boolean;\n };\n\n /**\n * Create a new Linear Regression model.\n *\n * @param options - Configuration options\n * @param options.fitIntercept - Whether to calculate the intercept (default: true)\n * @param options.normalize - Whether to normalize features before regression (default: false)\n * @param options.copyX - Whether to copy X or overwrite it (default: true)\n */\n constructor(\n options: {\n readonly fitIntercept?: boolean;\n readonly normalize?: boolean;\n readonly copyX?: boolean;\n } = {}\n ) {\n this.options = { ...options };\n }\n\n /**\n * Fit linear model using Ordinary Least Squares.\n *\n * Uses SVD-based least squares solver for numerical stability.\n * When fitIntercept is true, centers the data before fitting.\n *\n * **Algorithm Complexity**: O(n * p^2) where n = samples, p = features\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,). Multi-output regression is not currently supported.\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {DataValidationError} If X or y are empty\n */\n fit(X: Tensor, y: Tensor): this {\n // Validate inputs (dimensions, empty data, NaN/Inf)\n validateFitInputs(X, y);\n\n // Store number of features for validation during predict\n const nFeatures = X.shape[1] ?? 0;\n this.nFeaturesIn_ = nFeatures;\n\n // Handle intercept by centering the data\n // This is more numerically stable than adding a column of ones\n const fitIntercept = this.options.fitIntercept ?? true;\n const copyX = this.options.copyX ?? true;\n const allowInPlace = copyX === false && (X.dtype === \"float32\" || X.dtype === \"float64\");\n\n if (fitIntercept) {\n // Compute mean of X along axis 0 (column means)\n const X_mean = mean(X, 0);\n // Compute mean of y\n const y_mean = mean(y);\n\n // Center X and y by subtracting means\n let X_processed = allowInPlace\n ? this.centerDataInPlace(X, X_mean)\n : this.centerData(X, X_mean);\n const y_centered = this.centerData(y, y_mean);\n\n // Handle normalization if requested\n let X_scale: Tensor | undefined;\n if (this.options.normalize) {\n // Compute L2 norm of centered X\n X_scale = this.computeL2Norm(X_processed);\n // Divide by norm\n X_processed = allowInPlace\n ? this.scaleDataInPlace(X_processed, X_scale)\n : this.scaleData(X_processed, X_scale);\n }\n\n // Solve least squares on processed data: X_processed * w = y_centered\n const result = lstsq(X_processed, y_centered);\n let w = result.x;\n\n // If normalized, rescale coefficients: w_original = w_normalized / scale\n if (X_scale) {\n w = this.rescaleCoefs(w, X_scale);\n }\n this.coef_ = w;\n\n // Compute intercept: b = y_mean - X_mean @ coef\n const X_mean_dot_coef = dot(X_mean, this.coef_);\n this.intercept_ = sub(y_mean, X_mean_dot_coef);\n } else {\n // No intercept - solve directly using least squares\n const result = lstsq(X, y);\n this.coef_ = result.x;\n // intercept_ remains undefined when fitIntercept is false\n }\n\n // Mark model as fitted\n this.fitted = true;\n return this;\n }\n\n /**\n * Center data by subtracting the mean.\n *\n * @param data - Input tensor to center\n * @param dataMean - Mean tensor to subtract\n * @returns Centered tensor\n */\n private centerData(data: Tensor, dataMean: Tensor): Tensor {\n // For 1D data, manually subtract mean to avoid dtype mismatch\n if (data.ndim === 1) {\n const n = data.size;\n const meanVal = Number(dataMean.data[dataMean.offset] ?? 0);\n const centered: number[] = [];\n for (let i = 0; i < n; i++) {\n centered.push(Number(data.data[data.offset + i] ?? 0) - meanVal);\n }\n return tensor(centered);\n }\n\n // For 2D data, broadcast subtract row mean from each row\n const nSamples = data.shape[0] ?? 0;\n const nFeatures = data.shape[1] ?? 0;\n\n const result: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n const val = Number(data.data[data.offset + i * nFeatures + j] ?? 0);\n const meanVal = Number(dataMean.data[dataMean.offset + j] ?? 0);\n row.push(val - meanVal);\n }\n result.push(row);\n }\n\n return tensor(result);\n }\n\n private centerDataInPlace(data: Tensor, dataMean: Tensor): Tensor {\n if (data.ndim === 1) {\n const n = data.size;\n const meanVal = Number(dataMean.data[dataMean.offset] ?? 0);\n for (let i = 0; i < n; i++) {\n const idx = data.offset + i;\n data.data[idx] = Number(data.data[idx] ?? 0) - meanVal;\n }\n return data;\n }\n\n const nSamples = data.shape[0] ?? 0;\n const nFeatures = data.shape[1] ?? 0;\n for (let i = 0; i < nSamples; i++) {\n const rowBase = data.offset + i * nFeatures;\n for (let j = 0; j < nFeatures; j++) {\n const idx = rowBase + j;\n const meanVal = Number(dataMean.data[dataMean.offset + j] ?? 0);\n data.data[idx] = Number(data.data[idx] ?? 0) - meanVal;\n }\n }\n return data;\n }\n\n private computeL2Norm(X: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const norms: number[] = new Array(nFeatures).fill(0);\n\n for (let j = 0; j < nFeatures; j++) {\n let sumSq = 0;\n for (let i = 0; i < nSamples; i++) {\n const val = Number(X.data[X.offset + i * nFeatures + j] ?? 0);\n sumSq += val * val;\n }\n norms[j] = Math.sqrt(sumSq);\n }\n return tensor(norms);\n }\n\n private scaleData(X: Tensor, scale: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const result: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n const val = Number(X.data[X.offset + i * nFeatures + j] ?? 0);\n const s = Number(scale.data[scale.offset + j] ?? 1);\n row.push(s === 0 ? 0 : val / s); // Handle zero norm\n }\n result.push(row);\n }\n return tensor(result);\n }\n\n private scaleDataInPlace(X: Tensor, scale: Tensor): Tensor {\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n for (let i = 0; i < nSamples; i++) {\n const rowBase = X.offset + i * nFeatures;\n for (let j = 0; j < nFeatures; j++) {\n const idx = rowBase + j;\n const s = Number(scale.data[scale.offset + j] ?? 1);\n X.data[idx] = s === 0 ? 0 : Number(X.data[idx] ?? 0) / s;\n }\n }\n return X;\n }\n\n private rescaleCoefs(coef: Tensor, scale: Tensor): Tensor {\n // coef is (n_features,) or (n_features, n_targets)\n // We need to divide each row j by scale[j]\n const nFeatures = coef.shape[0] ?? 0;\n const nTargets = coef.ndim > 1 ? (coef.shape[1] ?? 1) : 1;\n\n if (coef.ndim === 1) {\n const res: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n const c = Number(coef.data[coef.offset + j] ?? 0);\n const s = Number(scale.data[scale.offset + j] ?? 1);\n res.push(s === 0 ? 0 : c / s);\n }\n return tensor(res);\n }\n\n const result: number[][] = [];\n for (let j = 0; j < nFeatures; j++) {\n const row: number[] = [];\n const s = Number(scale.data[scale.offset + j] ?? 1);\n for (let k = 0; k < nTargets; k++) {\n const c = Number(coef.data[coef.offset + j * nTargets + k] ?? 0);\n row.push(s === 0 ? 0 : c / s);\n }\n result.push(row);\n }\n return tensor(result);\n }\n\n /**\n * Predict using the linear model.\n *\n * Computes y_pred = X * coef_ + intercept_\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"LinearRegression must be fitted before prediction\");\n }\n\n // Validate input\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"LinearRegression\");\n\n // Compute predictions: y_pred = X * w + b\n const y_pred_raw = dot(X, this.coef_);\n\n // Add intercept if model was fitted with fitIntercept=true\n if (this.intercept_ !== undefined) {\n // Manually add intercept to avoid dtype mismatch\n const interceptVal = Number(this.intercept_.data[this.intercept_.offset] ?? 0);\n const result: number[] = [];\n for (let i = 0; i < y_pred_raw.size; i++) {\n result.push(Number(y_pred_raw.data[y_pred_raw.offset + i] ?? 0) + interceptVal);\n }\n return tensor(result);\n }\n\n return y_pred_raw;\n }\n\n /**\n * Return the coefficient of determination R^2 of the prediction.\n *\n * R^2 = 1 - (SS_res / SS_tot)\n *\n * Where:\n * - SS_res = Σ(y_true - y_pred)^2 (residual sum of squares)\n * - SS_tot = Σ(y_true - y_mean)^2 (total sum of squares)\n *\n * Best possible score is 1.0, and it can be negative (worse than random).\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (!this.fitted) {\n throw new NotFittedError(\"LinearRegression must be fitted before scoring\");\n }\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n\n // Get predictions\n const y_pred = this.predict(X);\n if (y_pred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${y_pred.size}, y=${y.size}`\n );\n }\n\n // Compute residual sum of squares: SS_res = Σ(y_true - y_pred)^2\n // Manually compute to avoid dtype mismatch\n let ss_res = 0;\n for (let i = 0; i < y.size; i++) {\n const diff = Number(y.data[y.offset + i] ?? 0) - Number(y_pred.data[y_pred.offset + i] ?? 0);\n ss_res += diff * diff;\n }\n\n // Compute total sum of squares: SS_tot = Σ(y_true - y_mean)^2\n const y_mean_tensor = mean(y);\n const y_mean_val = Number(y_mean_tensor.data[y_mean_tensor.offset] ?? 0);\n let ss_tot = 0;\n for (let i = 0; i < y.size; i++) {\n const diff = Number(y.data[y.offset + i] ?? 0) - y_mean_val;\n ss_tot += diff * diff;\n }\n\n // R^2 = 1 - (SS_res / SS_tot)\n // Handle edge case where ss_tot is 0 (constant y)\n if (ss_tot === 0) {\n return ss_res === 0 ? 1.0 : 0.0;\n }\n\n return 1 - ss_res / ss_tot;\n }\n\n /**\n * Get the model coefficients (weights).\n *\n * @returns Coefficient tensor of shape (n_features,) or (n_features, n_targets)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coef(): Tensor {\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"LinearRegression must be fitted to access coefficients\");\n }\n return this.coef_;\n }\n\n /**\n * Get the intercept (bias term).\n *\n * @returns Intercept value or tensor\n * @throws {NotFittedError} If the model has not been fitted\n */\n get intercept(): Tensor | undefined {\n if (!this.fitted) {\n throw new NotFittedError(\"LinearRegression must be fitted to access intercept\");\n }\n return this.intercept_;\n }\n\n /**\n * Get parameters for this estimator.\n *\n * @returns Object containing all parameters\n */\n getParams(): Record<string, unknown> {\n return {\n fitIntercept: this.options.fitIntercept ?? true,\n normalize: this.options.normalize ?? false,\n copyX: this.options.copyX ?? true,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param params - Parameters to set\n * @returns this - The estimator\n */\n setParams(_params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(_params)) {\n switch (key) {\n case \"fitIntercept\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n `fitIntercept must be a boolean; received ${String(value)}`,\n \"fitIntercept\",\n value\n );\n }\n this.options.fitIntercept = value;\n break;\n case \"normalize\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n `normalize must be a boolean; received ${String(value)}`,\n \"normalize\",\n value\n );\n }\n this.options.normalize = value;\n break;\n case \"copyX\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n `copyX must be a boolean; received ${String(value)}`,\n \"copyX\",\n value\n );\n }\n this.options.copyX = value;\n break;\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n}\n","import {\n DataValidationError,\n DeepboxError,\n InvalidParameterError,\n NotFittedError,\n ShapeError,\n} from \"../../core\";\nimport { reshape, type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier } from \"../base\";\n\n/**\n * Logistic Regression (Binary and Multiclass Classification).\n *\n * Logistic regression uses the logistic (sigmoid) function to model\n * the probability of class membership.\n *\n * @example\n * ```ts\n * import { LogisticRegression } from 'deepbox/ml';\n *\n * // Binary classification\n * const model = new LogisticRegression({ C: 1.0, maxIter: 100 });\n * model.fit(X_train, y_train);\n *\n * const predictions = model.predict(X_test);\n * const probabilities = model.predictProba(X_test);\n *\n * // Multiclass classification\n * const multiModel = new LogisticRegression({\n * multiClass: 'multinomial',\n * solver: 'lbfgs'\n * });\n * multiModel.fit(X_train_multi, y_train_multi);\n * ```\n *\n * @category Linear Models\n * @implements {Classifier}\n */\nexport class LogisticRegression implements Classifier {\n private options: {\n penalty?: \"l2\" | \"none\";\n tol?: number;\n C?: number;\n fitIntercept?: boolean;\n maxIter?: number;\n learningRate?: number;\n multiClass?: \"ovr\" | \"auto\";\n };\n\n private coef_?: Tensor; // Shape (n_features,) for binary, (n_classes, n_features) for multiclass\n private intercept_?: number | number[]; // Scalar for binary, Array for multiclass\n private nFeaturesIn_?: number;\n private classes_?: Tensor;\n private fitted = false;\n private multiclass_ = false;\n\n /**\n * Create a new Logistic Regression classifier.\n *\n * @param options - Configuration options\n * @param options.penalty - Regularization type: 'l2' or 'none' (default: 'l2')\n * @param options.C - Inverse regularization strength (default: 1.0). Must be > 0.\n * @param options.tol - Tolerance for stopping criterion (default: 1e-4)\n * @param options.maxIter - Maximum number of iterations (default: 100)\n * @param options.fitIntercept - Whether to fit intercept (default: true)\n * @param options.learningRate - Learning rate for gradient descent (default: 0.1)\n * @param options.multiClass - Multiclass strategy: 'ovr' (One-vs-Rest) or 'auto' (default: 'auto')\n */\n constructor(\n options: {\n readonly penalty?: \"l2\" | \"none\";\n readonly tol?: number;\n readonly C?: number;\n readonly fitIntercept?: boolean;\n readonly maxIter?: number;\n readonly learningRate?: number;\n readonly multiClass?: \"ovr\" | \"auto\";\n } = {}\n ) {\n this.options = { ...options };\n\n const penalty = this.options.penalty ?? \"l2\";\n if (penalty !== \"l2\" && penalty !== \"none\") {\n throw new InvalidParameterError(\n `Only penalty='l2' or 'none' is supported; received ${String(penalty)}`,\n \"penalty\",\n penalty\n );\n }\n this.options.penalty = penalty;\n\n const multiClass = this.options.multiClass ?? \"auto\";\n if (multiClass !== \"ovr\" && multiClass !== \"auto\") {\n throw new InvalidParameterError(\n `multiClass must be 'ovr' or 'auto'; received ${String(multiClass)}`,\n \"multiClass\",\n multiClass\n );\n }\n this.options.multiClass = multiClass;\n\n if (this.options.C !== undefined && this.options.C <= 0) {\n throw new InvalidParameterError(\n `C must be > 0; received ${this.options.C}`,\n \"C\",\n this.options.C\n );\n }\n if (\n this.options.maxIter !== undefined &&\n (!Number.isFinite(this.options.maxIter) || this.options.maxIter <= 0)\n ) {\n throw new InvalidParameterError(\n `maxIter must be a positive finite number; received ${this.options.maxIter}`,\n \"maxIter\",\n this.options.maxIter\n );\n }\n if (\n this.options.tol !== undefined &&\n (!Number.isFinite(this.options.tol) || this.options.tol < 0)\n ) {\n throw new InvalidParameterError(\n `tol must be a finite number >= 0; received ${this.options.tol}`,\n \"tol\",\n this.options.tol\n );\n }\n if (\n this.options.learningRate !== undefined &&\n (!Number.isFinite(this.options.learningRate) || this.options.learningRate <= 0)\n ) {\n throw new InvalidParameterError(\n `learningRate must be a positive finite number; received ${this.options.learningRate}`,\n \"learningRate\",\n this.options.learningRate\n );\n }\n }\n\n /**\n * Numerically stable sigmoid function.\n *\n * Uses different formulations for positive and negative inputs\n * to avoid overflow:\n * - For z >= 0: σ(z) = 1 / (1 + exp(-z))\n * - For z < 0: σ(z) = exp(z) / (1 + exp(z))\n *\n * @param z - Input value\n * @returns Sigmoid output in [0, 1]\n */\n private sigmoid(z: number): number {\n // Guard against non-finite inputs\n if (!Number.isFinite(z)) {\n return z > 0 ? 1 : 0;\n }\n if (z >= 0) {\n const ez = Math.exp(-z);\n return 1 / (1 + ez);\n }\n const ez = Math.exp(z);\n return ez / (1 + ez);\n }\n\n private ensureFitted(): void {\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"LogisticRegression must be fitted before using this method\");\n }\n }\n\n private _fitBinary(\n X: Tensor,\n y: Tensor,\n m: number,\n n: number,\n lambda: number\n ): { w: number[]; b: number } {\n const maxIter = this.options.maxIter ?? 100;\n const tol = this.options.tol ?? 1e-4;\n const lr = this.options.learningRate ?? 0.1;\n const fitIntercept = this.options.fitIntercept ?? true;\n\n // Initialize weights and bias\n const w = new Array<number>(n).fill(0);\n let b = 0;\n\n // Gradient descent training loop\n for (let iter = 0; iter < maxIter; iter++) {\n const gradW = new Array<number>(n).fill(0);\n let gradB = 0;\n\n // Compute gradients over all samples\n for (let i = 0; i < m; i++) {\n let z = fitIntercept ? b : 0;\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n z += Number(X.data[rowBase + j] ?? 0) * (w[j] ?? 0);\n }\n\n const yi = Number(y.data[y.offset + i] ?? 0);\n const pi = this.sigmoid(z);\n const error = pi - yi;\n\n gradB += error;\n for (let j = 0; j < n; j++) {\n gradW[j] = (gradW[j] ?? 0) + error * Number(X.data[rowBase + j] ?? 0);\n }\n }\n\n // Update weights with gradient descent + L2 regularization\n const invM = m === 0 ? 0 : 1 / m;\n let maxUpdate = 0;\n for (let j = 0; j < n; j++) {\n const g = (gradW[j] ?? 0) * invM + lambda * (w[j] ?? 0);\n const update = lr * g;\n w[j] = (w[j] ?? 0) - update;\n maxUpdate = Math.max(maxUpdate, Math.abs(update));\n }\n if (fitIntercept) {\n const gB = gradB * invM;\n const updateB = lr * gB;\n b -= updateB;\n maxUpdate = Math.max(maxUpdate, Math.abs(updateB));\n }\n\n // Check convergence\n if (maxUpdate < tol) {\n break;\n }\n }\n return { w, b: fitIntercept ? b : 0 };\n }\n\n /**\n * Fit logistic regression model.\n *\n * Uses gradient descent with L2 regularization.\n * Supports binary and multiclass (One-vs-Rest) classification.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target labels of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {DataValidationError} If X or y are empty\n * @throws {InvalidParameterError} If C <= 0 or penalty is invalid\n */\n fit(X: Tensor, y: Tensor): this {\n // Validate inputs (dimensions, empty data, NaN/Inf)\n validateFitInputs(X, y);\n\n const m = X.shape[0] ?? 0;\n const n = X.shape[1] ?? 0;\n this.nFeaturesIn_ = n;\n\n // Identify unique classes\n const yData = new Float64Array(m);\n for (let i = 0; i < m; i++) {\n yData[i] = Number(y.data[y.offset + i]);\n }\n const uniqueClasses = [...new Set(yData)].sort((a, b) => a - b);\n this.classes_ = tensor(uniqueClasses);\n\n // Extract and validate hyperparameters\n const penalty = this.options.penalty ?? \"l2\";\n const C = this.options.C ?? 1.0;\n if (!(C > 0)) {\n throw new InvalidParameterError(`C must be > 0; received ${C}`, \"C\", C);\n }\n const lambda = penalty === \"l2\" ? 1 / C : 0;\n const multiClass = this.options.multiClass ?? \"auto\";\n\n if (uniqueClasses.length <= 2) {\n // Binary classification\n this.multiclass_ = false;\n\n // Map labels to 0/1 if they are not already\n let yBinary = y;\n if (uniqueClasses.length === 2 && (uniqueClasses[0] !== 0 || uniqueClasses[1] !== 1)) {\n // Map uniqueClasses[0] -> 0, uniqueClasses[1] -> 1\n const mappedData = new Float64Array(m);\n for (let i = 0; i < m; i++) {\n mappedData[i] = yData[i] === uniqueClasses[1] ? 1 : 0;\n }\n yBinary = tensor(mappedData);\n } else if (uniqueClasses.length === 1) {\n // If only 1 class, we still need 0/1 for the math, though it's degenerate\n const mappedData = new Float64Array(m);\n const target = uniqueClasses[0] === 1 ? 1 : 0; // If only class is 1, treat as all 1s\n mappedData.fill(target);\n yBinary = tensor(mappedData);\n } else {\n // Check if they are 0 and 1\n for (const val of uniqueClasses) {\n if (val !== 0 && val !== 1) {\n // Should be caught above, but as fallback\n throw new DataValidationError(\"Binary classification expects labels 0 and 1\");\n }\n }\n }\n\n const { w, b } = this._fitBinary(X, yBinary, m, n, lambda);\n this.coef_ = tensor(w);\n this.intercept_ = b;\n } else {\n // Multiclass (One-vs-Rest)\n if (multiClass !== \"ovr\" && multiClass !== \"auto\") {\n throw new InvalidParameterError(\n `multiClass must be 'ovr' or 'auto'; received ${String(multiClass)}`,\n \"multiClass\",\n multiClass\n );\n }\n this.multiclass_ = true;\n const nClasses = uniqueClasses.length;\n const allCoefs: number[] = []; // Flattened (nClasses * nFeatures)\n const allIntercepts: number[] = [];\n\n for (let k = 0; k < nClasses; k++) {\n const targetClass = uniqueClasses[k];\n // Create binary target for class k\n const yBinaryData = new Float64Array(m);\n for (let i = 0; i < m; i++) {\n yBinaryData[i] = yData[i] === targetClass ? 1 : 0;\n }\n const yBinary = tensor(yBinaryData);\n\n const { w, b } = this._fitBinary(X, yBinary, m, n, lambda);\n allCoefs.push(...w);\n allIntercepts.push(b);\n }\n\n const allCoefsTensor = tensor(allCoefs);\n this.coef_ = reshape(allCoefsTensor, [nClasses, n]);\n this.intercept_ = allIntercepts;\n }\n\n this.fitted = true;\n return this;\n }\n\n get classes(): Tensor | undefined {\n return this.classes_;\n }\n\n /**\n * Get the model coefficients (weights).\n *\n * @returns Coefficient tensor of shape (n_features,)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coef(): Tensor {\n this.ensureFitted();\n const coef = this.coef_;\n if (!coef) {\n throw new DeepboxError(\"Internal error: coef_ is missing after ensureFitted() \");\n }\n return coef;\n }\n\n /**\n * Get the intercept (bias term).\n *\n * @returns Intercept value (scalar for binary, array for multiclass)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get intercept(): number | number[] {\n this.ensureFitted();\n if (this.intercept_ === undefined) {\n return 0;\n }\n return this.intercept_;\n }\n\n predict(X: Tensor): Tensor {\n const proba = this.predictProba(X);\n const m = X.shape[0] ?? 0;\n const pred = new Array<number>(m).fill(0);\n\n if (this.multiclass_) {\n const classes = this.classes_;\n if (!classes) {\n throw new NotFittedError(\"Model not fitted (classes_ missing)\");\n }\n const nClasses = classes.size;\n for (let i = 0; i < m; i++) {\n let maxProb = -1;\n let maxClassIdx = 0;\n for (let k = 0; k < nClasses; k++) {\n const p = Number(proba.data[proba.offset + i * nClasses + k]);\n if (p > maxProb) {\n maxProb = p;\n maxClassIdx = k;\n }\n }\n // Map index back to class label\n pred[i] = Number(classes.data[classes.offset + maxClassIdx]);\n }\n } else {\n const classes = this.classes_;\n if (!classes) {\n throw new NotFittedError(\"Model not fitted (classes_ missing)\");\n }\n\n // Handle binary classification mapping\n // If we have 2 classes, map 0->classes[0], 1->classes[1]\n // If we have 1 class, we always predict that class (degenerate case)\n if (classes.size === 1) {\n const cls = Number(classes.data[classes.offset]);\n pred.fill(cls);\n } else {\n const cls0 = Number(classes.data[classes.offset]);\n const cls1 = Number(classes.data[classes.offset + 1]);\n\n for (let i = 0; i < m; i++) {\n const p1 = Number(proba.data[proba.offset + i * 2 + 1]);\n pred[i] = p1 >= 0.5 ? cls1 : cls0;\n }\n }\n }\n return tensor(pred);\n }\n\n /**\n * Predict class probabilities for samples.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Probabilities of shape (n_samples, n_classes)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n this.ensureFitted();\n\n const coef = this.coef_;\n if (!coef) {\n throw new DeepboxError(\"Internal error: coef_ is missing after ensureFitted()\");\n }\n\n // Validate input\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"LogisticRegression\");\n\n const m = X.shape[0] ?? 0;\n const n = X.shape[1] ?? 0;\n\n if (this.multiclass_) {\n const nClasses = this.classes_?.size ?? 0;\n const proba = new Float64Array(m * nClasses);\n const interceptValue = this.intercept_;\n if (!Array.isArray(interceptValue)) {\n throw new DeepboxError(\"Internal error: intercept_ must be an array for multiclass\");\n }\n\n for (let i = 0; i < m; i++) {\n const rowBase = X.offset + i * n;\n let sumExp = 0;\n const scores = new Array<number>(nClasses).fill(0);\n\n // Compute scores for each class\n for (let k = 0; k < nClasses; k++) {\n let z = interceptValue[k] ?? 0;\n const coefRowBase = coef.offset + k * n;\n for (let j = 0; j < n; j++) {\n z += Number(X.data[rowBase + j] ?? 0) * Number(coef.data[coefRowBase + j] ?? 0);\n }\n // For OvR, we apply sigmoid to get probability of class k vs rest\n // Then we normalize these probabilities to sum to 1 (scikit-learn approach)\n scores[k] = this.sigmoid(z);\n sumExp += scores[k] ?? 0;\n }\n\n // Normalize\n for (let k = 0; k < nClasses; k++) {\n // If sum is 0 (unlikely with sigmoid), avoid NaN\n proba[i * nClasses + k] = sumExp > 0 ? (scores[k] ?? 0) / sumExp : 1.0 / nClasses;\n }\n }\n\n return tensor(proba, { dtype: \"float64\" }).reshape([m, nClasses]);\n } else {\n // Binary case\n const proba = new Array<number>(m * 2).fill(0);\n const interceptValue = this.intercept_;\n if (Array.isArray(interceptValue) || typeof interceptValue !== \"number\") {\n throw new DeepboxError(\"Internal error: intercept_ must be a number for binary case\");\n }\n\n for (let i = 0; i < m; i++) {\n let z = interceptValue;\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n z += Number(X.data[rowBase + j] ?? 0) * Number(coef.data[coef.offset + j] ?? 0);\n }\n\n const p1 = this.sigmoid(z);\n proba[i * 2 + 0] = 1 - p1;\n proba[i * 2 + 1] = p1;\n }\n\n return tensor(proba).reshape([m, 2]);\n }\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const pred = this.predict(X);\n if (pred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${pred.size}, y=${y.size}`\n );\n }\n let correct = 0;\n\n for (let i = 0; i < y.size; i++) {\n if (Number(pred.data[pred.offset + i] ?? 0) === Number(y.data[y.offset + i] ?? 0)) {\n correct++;\n }\n }\n return correct / y.size;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return { ...this.options };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param params - Parameters to set (maxIter, tol, C, learningRate, penalty, fitIntercept)\n * @returns this\n * @throws {InvalidParameterError} If any parameter value is invalid\n */\n setParams(params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(params)) {\n switch (key) {\n case \"maxIter\":\n if (typeof value !== \"number\" || !Number.isFinite(value) || value <= 0) {\n throw new InvalidParameterError(\n \"maxIter must be a positive finite number\",\n \"maxIter\",\n value\n );\n }\n this.options.maxIter = value;\n break;\n case \"tol\":\n if (typeof value !== \"number\" || !Number.isFinite(value) || value < 0) {\n throw new InvalidParameterError(\"tol must be a finite number >= 0\", \"tol\", value);\n }\n this.options.tol = value;\n break;\n case \"C\":\n if (typeof value !== \"number\" || !Number.isFinite(value) || value <= 0) {\n throw new InvalidParameterError(\"C must be a positive finite number\", \"C\", value);\n }\n this.options.C = value;\n break;\n case \"learningRate\":\n if (typeof value !== \"number\" || !Number.isFinite(value) || value <= 0) {\n throw new InvalidParameterError(\n \"learningRate must be a positive finite number\",\n \"learningRate\",\n value\n );\n }\n this.options.learningRate = value;\n break;\n case \"fitIntercept\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n \"fitIntercept must be a boolean\",\n \"fitIntercept\",\n value\n );\n }\n this.options.fitIntercept = value;\n break;\n case \"penalty\":\n if (value !== \"none\" && value !== \"l2\") {\n throw new InvalidParameterError(\n `Only penalty='l2' or 'none' is supported; received ${String(value)}`,\n \"penalty\",\n value\n );\n }\n this.options.penalty = value;\n break;\n case \"multiClass\":\n if (value !== \"ovr\" && value !== \"auto\") {\n throw new InvalidParameterError(\n `multiClass must be 'ovr' or 'auto'; received ${String(value)}`,\n \"multiClass\",\n value\n );\n }\n this.options.multiClass = value;\n break;\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n}\n","import { DataValidationError, InvalidParameterError, NotFittedError, ShapeError } from \"../../core\";\nimport { cholesky } from \"../../linalg/decomposition/cholesky\";\nimport { svd } from \"../../linalg/decomposition/svd\";\nimport { solveTriangular } from \"../../linalg/solvers/solve\";\nimport { dot, type Tensor, tensor, transpose } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Regressor } from \"../base\";\n\n/**\n * Ridge Regression (L2 Regularized Linear Regression).\n *\n * Ridge regression addresses multicollinearity by adding a penalty term\n * (L2 regularization) to the loss function.\n *\n * @example\n * ```ts\n * import { Ridge } from 'deepbox/ml';\n *\n * const model = new Ridge({ alpha: 0.5 });\n * model.fit(X_train, y_train);\n * const predictions = model.predict(X_test);\n * ```\n *\n * @category Linear Models\n * @implements {Regressor}\n */\nexport class Ridge implements Regressor {\n /** Configuration options for the Ridge regression model */\n private options: {\n alpha?: number;\n fitIntercept?: boolean;\n normalize?: boolean;\n solver?: \"auto\" | \"svd\" | \"cholesky\" | \"lsqr\" | \"sag\";\n maxIter?: number;\n tol?: number;\n };\n\n /** Model coefficients (weights) after fitting - shape (n_features,) */\n private coef_?: Tensor;\n\n /** Intercept (bias) term after fitting */\n private intercept_?: number;\n\n /** Number of features seen during fit - used for validation */\n private nFeaturesIn_?: number;\n\n /** Number of iterations run by the solver (for iterative solvers) */\n private nIter_: number | undefined;\n\n /** Whether the model has been fitted to data */\n private fitted = false;\n\n /**\n * Create a new Ridge Regression model.\n *\n * @param options - Configuration options\n * @param options.alpha - Regularization strength (default: 1.0). Must be >= 0.\n * @param options.fitIntercept - Whether to calculate the intercept (default: true).\n * @param options.normalize - Whether to normalize features before regression (default: false).\n * @param options.solver - Solver to use (default: 'auto'). Options: 'auto', 'svd', 'cholesky', 'lsqr', 'sag'.\n * @param options.maxIter - Maximum number of iterations for iterative solvers (default: 1000)\n * @param options.tol - Tolerance for stopping criterion (default: 1e-4)\n */\n constructor(\n options: {\n readonly alpha?: number;\n readonly fitIntercept?: boolean;\n readonly normalize?: boolean;\n readonly solver?: \"auto\" | \"svd\" | \"cholesky\" | \"lsqr\" | \"sag\";\n readonly maxIter?: number;\n readonly tol?: number;\n } = {}\n ) {\n this.options = { ...options };\n }\n\n /**\n * Fit Ridge regression model.\n *\n * Solves the regularized least squares problem:\n * minimize ||y - Xw||² + α||w||²\n *\n * Uses the closed-form solution:\n * w = (X^T X + αI)^(-1) X^T y\n *\n * **Time Complexity**: O(n²p + p³) where n = samples, p = features\n * **Space Complexity**: O(p²)\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator for method chaining\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {DataValidationError} If X or y are empty\n * @throws {InvalidParameterError} If alpha < 0\n */\n fit(X: Tensor, y: Tensor): this {\n // Validate inputs (dimensions, empty data, NaN/Inf)\n validateFitInputs(X, y);\n this.nIter_ = undefined;\n\n // Extract and validate regularization parameter\n const alpha = this.options.alpha ?? 1.0;\n if (!(alpha >= 0)) {\n throw new InvalidParameterError(`alpha must be >= 0; received ${alpha}`, \"alpha\", alpha);\n }\n\n // Determine whether to fit intercept\n const fitIntercept = this.options.fitIntercept ?? true;\n\n // Extract dimensions: m = number of samples, n = number of features\n const m = X.shape[0] ?? 0;\n const n = X.shape[1] ?? 0;\n\n // Store number of features for prediction validation\n this.nFeaturesIn_ = n;\n\n // Compute means for centering (if fitIntercept is true)\n // Centering improves numerical stability and allows intercept calculation\n // Note: By centering X and y, we ensure the intercept is not regularized.\n // The regularization penalty α||w||² only applies to the coefficients,\n // not the intercept term. This is the standard Ridge regression behavior.\n let yMean = 0;\n const xMean = new Array<number>(n).fill(0);\n\n if (fitIntercept) {\n // Compute sum of y values\n for (let i = 0; i < m; i++) {\n yMean += Number(y.data[y.offset + i] ?? 0);\n }\n\n // Compute sum of each feature column\n for (let i = 0; i < m; i++) {\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n xMean[j] = (xMean[j] ?? 0) + Number(X.data[rowBase + j] ?? 0);\n }\n }\n\n // Convert sums to means by dividing by number of samples\n const invM = m === 0 ? 0 : 1 / m;\n yMean *= invM;\n for (let j = 0; j < n; j++) {\n xMean[j] = (xMean[j] ?? 0) * invM;\n }\n }\n\n const normalize = this.options.normalize ?? false;\n const maxIter = this.options.maxIter ?? 1000;\n const tol = this.options.tol ?? 1e-4;\n\n let xScale: number[] | undefined;\n if (normalize) {\n xScale = new Array<number>(n).fill(0);\n for (let i = 0; i < m; i++) {\n const rowBase = X.offset + i * n;\n for (let j = 0; j < n; j++) {\n const centered = Number(X.data[rowBase + j] ?? 0) - (fitIntercept ? (xMean[j] ?? 0) : 0);\n xScale[j] = (xScale[j] ?? 0) + centered * centered;\n }\n }\n for (let j = 0; j < n; j++) {\n xScale[j] = Math.sqrt(xScale[j] ?? 0);\n }\n }\n\n const getX = (sampleIndex: number, featureIndex: number): number => {\n const raw = Number(X.data[X.offset + sampleIndex * n + featureIndex] ?? 0);\n const centered = raw - (fitIntercept ? (xMean[featureIndex] ?? 0) : 0);\n if (normalize && xScale) {\n const s = xScale[featureIndex] ?? 0;\n return s === 0 ? 0 : centered / s;\n }\n return centered;\n };\n\n const getY = (sampleIndex: number): number => {\n const raw = Number(y.data[y.offset + sampleIndex] ?? 0);\n return fitIntercept ? raw - yMean : raw;\n };\n\n // Solve the linear system (X^T X + αI) w = X^T y\n // This gives us the optimal coefficients w\n let coefTensor: Tensor;\n const solver = this.options.solver ?? \"auto\";\n\n if (solver === \"sag\") {\n const res = this.solveSag(getX, getY, m, n, alpha, maxIter, tol);\n coefTensor = tensor(res.x);\n this.nIter_ = res.nIter;\n } else {\n // Compute X^T X + αI (Gram matrix with regularization)\n // This is the core of the Ridge regression solution\n // Time complexity: O(n²m) for computing X^T X\n const XTX = Array(n)\n .fill(0)\n .map(() => Array(n).fill(0));\n\n for (let i = 0; i < n; i++) {\n for (let j = 0; j < n; j++) {\n let sum = 0;\n\n // Compute (X^T X)[i,j] = Σ_k X[k,i] * X[k,j]\n for (let k = 0; k < m; k++) {\n const xi = getX(k, i);\n const xj = getX(k, j);\n sum += xi * xj;\n }\n\n // Add regularization term αI to diagonal\n // This ensures the matrix is positive definite and invertible\n const xtxRow = XTX[i];\n if (xtxRow) xtxRow[j] = sum + (i === j ? alpha : 0);\n }\n }\n\n // Compute X^T y (feature-target correlation vector)\n // Time complexity: O(nm)\n const XTy = new Array<number>(n).fill(0);\n\n for (let i = 0; i < n; i++) {\n let sum = 0;\n\n // Compute (X^T y)[i] = Σ_j X[j,i] * y[j]\n for (let j = 0; j < m; j++) {\n const yVal = getY(j);\n const xVal = getX(j, i);\n sum += xVal * yVal;\n }\n XTy[i] = sum;\n }\n\n if (solver === \"lsqr\") {\n const res = this.solveConjugateGradient(XTX, XTy, maxIter, tol);\n coefTensor = tensor(res.x);\n this.nIter_ = res.nIter;\n } else if (solver === \"cholesky\" || solver === \"auto\") {\n try {\n const xtxTensor = tensor(XTX);\n const xtyTensor = tensor(XTy);\n const L = cholesky(xtxTensor);\n const y_ = solveTriangular(L, xtyTensor, true);\n coefTensor = solveTriangular(transpose(L), y_, false);\n } catch (e) {\n if (solver === \"auto\") {\n // Fallback to Gaussian elimination\n const res = this.solveLinearSystem(XTX, XTy);\n coefTensor = tensor(res);\n } else {\n throw e;\n }\n }\n } else if (solver === \"svd\") {\n const xtxTensor = tensor(XTX);\n const xtyTensor = tensor(XTy);\n const [U, s, Vt] = svd(xtxTensor);\n\n // w = V * S^-1 * U^T * y\n const Ut = transpose(U);\n const Uty = dot(Ut, xtyTensor);\n\n const sData = s.data;\n if (!(sData instanceof Float64Array)) {\n throw new DataValidationError(\"svd returned non-float64 singular values\");\n }\n const scaledData = new Float64Array(Uty.size);\n for (let i = 0; i < Uty.size; i++) {\n const val = Number(Uty.data[Uty.offset + i]);\n const sigma = sData[i] ?? 0;\n scaledData[i] = Math.abs(sigma) > 1e-15 ? val / sigma : 0;\n }\n const scaled = tensor(scaledData);\n\n const V = transpose(Vt);\n coefTensor = dot(V, scaled);\n } else {\n const res = this.solveLinearSystem(XTX, XTy);\n coefTensor = tensor(res);\n }\n }\n\n if (normalize && xScale) {\n coefTensor = this.rescaleCoefs(coefTensor, xScale);\n }\n\n this.coef_ = coefTensor;\n\n // Compute intercept if needed\n // intercept = mean(y) - mean(X) @ coef\n // This accounts for the centering we did earlier\n if (fitIntercept) {\n let xMeanDotW = 0;\n for (let j = 0; j < n; j++) {\n const wj = Number(coefTensor.data[coefTensor.offset + j] ?? 0);\n xMeanDotW += (xMean[j] ?? 0) * wj;\n }\n this.intercept_ = yMean - xMeanDotW;\n } else {\n this.intercept_ = 0;\n }\n\n // Mark model as fitted\n this.fitted = true;\n return this;\n }\n\n /**\n * Solve linear system Ax = b using Gaussian elimination with partial pivoting.\n *\n * This is a numerically stable method for solving dense linear systems.\n * For Ridge regression, A = X^T X + αI is symmetric positive definite,\n * so Cholesky decomposition would be more efficient, but Gaussian elimination\n * is more general and still provides good numerical stability.\n *\n * **Algorithm**:\n * 1. Forward elimination: Convert A to upper triangular form\n * 2. Partial pivoting: Swap rows to avoid division by small numbers\n * 3. Back substitution: Solve for x from bottom to top\n *\n * **Time Complexity**: O(n³)\n * **Space Complexity**: O(n²)\n *\n * @param A - Coefficient matrix (n × n)\n * @param b - Right-hand side vector (n × 1)\n * @returns Solution vector x such that Ax = b\n */\n private solveLinearSystem(A: number[][], b: number[]): number[] {\n const n = A.length;\n\n // Create augmented matrix [A | b]\n // This allows us to perform row operations on both A and b simultaneously\n const aug = A.map((row, i) => [...row, b[i] ?? 0]);\n let maxAbs = 0;\n for (let i = 0; i < n; i++) {\n const row = aug[i];\n if (!row) continue;\n for (let j = 0; j < n; j++) {\n const v = Math.abs(row[j] ?? 0);\n if (v > maxAbs) maxAbs = v;\n }\n }\n if (maxAbs === 0 || !Number.isFinite(maxAbs)) {\n throw new DataValidationError(\"Matrix is singular or ill-conditioned\");\n }\n const tol = Number.EPSILON * n * maxAbs;\n\n // Forward elimination with partial pivoting\n for (let i = 0; i < n; i++) {\n // Find pivot: row with largest absolute value in column i\n // This improves numerical stability by avoiding division by small numbers\n let maxRow = i;\n for (let k = i + 1; k < n; k++) {\n if (Math.abs(aug[k]?.[i] ?? 0) > Math.abs(aug[maxRow]?.[i] ?? 0)) {\n maxRow = k;\n }\n }\n\n // Swap rows i and maxRow\n const augI = aug[i] ?? [];\n const augMax = aug[maxRow] ?? [];\n aug[i] = augMax;\n aug[maxRow] = augI;\n\n const pivot = aug[i]?.[i] ?? 0;\n if (!Number.isFinite(pivot) || Math.abs(pivot) <= tol) {\n throw new DataValidationError(\"Matrix is singular or ill-conditioned\");\n }\n\n // Eliminate column i in rows below i\n for (let k = i + 1; k < n; k++) {\n // Compute multiplier: c = A[k,i] / A[i,i]\n const c = (aug[k]?.[i] ?? 0) / pivot;\n const augK = aug[k];\n\n if (augK) {\n // Subtract c * row_i from row_k\n for (let j = i; j <= n; j++) {\n augK[j] = (augK[j] ?? 0) - c * (aug[i]?.[j] ?? 0);\n }\n }\n }\n }\n\n // Back substitution: solve upper triangular system\n const x = Array(n).fill(0);\n for (let i = n - 1; i >= 0; i--) {\n // Start with b[i]\n x[i] = aug[i]?.[n] ?? 0;\n\n // Subtract contributions from already-solved variables\n for (let j = i + 1; j < n; j++) {\n x[i] = (x[i] ?? 0) - (aug[i]?.[j] ?? 0) * (x[j] ?? 0);\n }\n\n // Divide by diagonal element\n const diag = aug[i]?.[i] ?? 0;\n if (!Number.isFinite(diag) || Math.abs(diag) <= tol) {\n throw new DataValidationError(\"Matrix is singular or ill-conditioned\");\n }\n x[i] = (x[i] ?? 0) / diag;\n }\n\n return x;\n }\n\n private solveConjugateGradient(\n A: number[][],\n b: number[],\n maxIter: number,\n tol: number\n ): { x: number[]; nIter: number } {\n const n = A.length;\n const x = new Array<number>(n).fill(0);\n const r = new Array<number>(n).fill(0);\n\n let rsOld = 0;\n for (let i = 0; i < n; i++) {\n const bi = b[i] ?? 0;\n r[i] = bi;\n rsOld += bi * bi;\n }\n\n if (rsOld === 0) {\n return { x, nIter: 0 };\n }\n\n const p = r.slice();\n const tolSq = tol * tol;\n let nIter = 0;\n\n for (let iter = 0; iter < maxIter; iter++) {\n const Ap = new Array<number>(n).fill(0);\n for (let i = 0; i < n; i++) {\n let sum = 0;\n const row = A[i];\n if (!row) continue;\n for (let j = 0; j < n; j++) {\n sum += (row[j] ?? 0) * (p[j] ?? 0);\n }\n Ap[i] = sum;\n }\n\n let denom = 0;\n for (let i = 0; i < n; i++) {\n denom += (p[i] ?? 0) * (Ap[i] ?? 0);\n }\n if (!Number.isFinite(denom) || denom === 0) {\n throw new DataValidationError(\n \"Conjugate gradient failed: denominator is zero or non-finite\"\n );\n }\n\n const alpha = rsOld / denom;\n for (let i = 0; i < n; i++) {\n x[i] = (x[i] ?? 0) + alpha * (p[i] ?? 0);\n r[i] = (r[i] ?? 0) - alpha * (Ap[i] ?? 0);\n }\n\n let rsNew = 0;\n for (let i = 0; i < n; i++) {\n const ri = r[i] ?? 0;\n rsNew += ri * ri;\n }\n nIter = iter + 1;\n if (rsNew < tolSq) {\n break;\n }\n\n const beta = rsNew / rsOld;\n for (let i = 0; i < n; i++) {\n p[i] = (r[i] ?? 0) + beta * (p[i] ?? 0);\n }\n rsOld = rsNew;\n }\n\n return { x, nIter };\n }\n\n private solveSag(\n getX: (sampleIndex: number, featureIndex: number) => number,\n getY: (sampleIndex: number) => number,\n nSamples: number,\n nFeatures: number,\n alpha: number,\n maxIter: number,\n tol: number\n ): { x: number[]; nIter: number } {\n const w = new Array<number>(nFeatures).fill(0);\n const avgGrad = new Array<number>(nFeatures).fill(0);\n const residuals = new Array<number>(nSamples).fill(0);\n\n let maxNormSq = 0;\n for (let i = 0; i < nSamples; i++) {\n let normSq = 0;\n for (let j = 0; j < nFeatures; j++) {\n const xij = getX(i, j);\n normSq += xij * xij;\n }\n if (normSq > maxNormSq) {\n maxNormSq = normSq;\n }\n }\n\n const scale = nSamples === 0 ? 1 : nSamples;\n const L = maxNormSq * scale + alpha;\n const step = L > 0 ? 1 / L : 1;\n\n let nIter = 0;\n for (let iter = 0; iter < maxIter; iter++) {\n let maxUpdate = 0;\n\n for (let i = 0; i < nSamples; i++) {\n let dotProd = 0;\n for (let j = 0; j < nFeatures; j++) {\n dotProd += (w[j] ?? 0) * getX(i, j);\n }\n\n const yi = getY(i);\n const newResidual = dotProd - yi;\n const delta = newResidual - (residuals[i] ?? 0);\n residuals[i] = newResidual;\n\n if (delta !== 0) {\n for (let j = 0; j < nFeatures; j++) {\n avgGrad[j] = (avgGrad[j] ?? 0) + delta * getX(i, j);\n }\n }\n\n for (let j = 0; j < nFeatures; j++) {\n const grad = (avgGrad[j] ?? 0) + alpha * (w[j] ?? 0);\n const update = step * grad;\n w[j] = (w[j] ?? 0) - update;\n if (Math.abs(update) > maxUpdate) {\n maxUpdate = Math.abs(update);\n }\n }\n }\n\n nIter = iter + 1;\n if (maxUpdate < tol) {\n break;\n }\n }\n\n return { x: w, nIter };\n }\n\n private rescaleCoefs(coef: Tensor, scale: number[]): Tensor {\n const nFeatures = coef.shape[0] ?? 0;\n const result: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n const c = Number(coef.data[coef.offset + j] ?? 0);\n const s = scale[j] ?? 1;\n result.push(s === 0 ? 0 : c / s);\n }\n return tensor(result);\n }\n\n /**\n * Predict using the Ridge regression model.\n *\n * Computes predictions as: ŷ = X @ coef + intercept\n *\n * **Time Complexity**: O(nm) where n = samples, m = features\n * **Space Complexity**: O(n)\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n // Check if model has been fitted\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"Ridge must be fitted before prediction\");\n }\n\n // Validate input\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"Ridge\");\n\n const m = X.shape[0] ?? 0; // Number of samples to predict\n const n = X.shape[1] ?? 0; // Number of features\n const pred = Array(m).fill(0);\n\n // Compute predictions: ŷ[i] = Σ_j X[i,j] * coef[j] + intercept\n for (let i = 0; i < m; i++) {\n let sum = this.intercept_ ?? 0; // Start with intercept\n\n // Add weighted sum of features\n for (let j = 0; j < n; j++) {\n sum +=\n Number(X.data[X.offset + i * n + j] ?? 0) *\n Number(this.coef_.data[this.coef_.offset + j] ?? 0);\n }\n pred[i] = sum;\n }\n\n return tensor(pred);\n }\n\n /**\n * Return the coefficient of determination R² of the prediction.\n *\n * R² (R-squared) measures the proportion of variance in y explained by the model.\n * Formula: R² = 1 - (SS_res / SS_tot)\n *\n * Where:\n * - SS_res = Σ(y_true - y_pred)² (residual sum of squares)\n * - SS_tot = Σ(y_true - y_mean)² (total sum of squares)\n *\n * **Interpretation**:\n * - R² = 1: Perfect predictions\n * - R² = 0: Model performs as well as predicting the mean\n * - R² < 0: Model performs worse than predicting the mean\n *\n * **Time Complexity**: O(n) where n = number of samples\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True values of shape (n_samples,)\n * @returns R² score (best possible score is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional\n */\n score(X: Tensor, y: Tensor): number {\n // Check if model has been fitted\n if (!this.fitted) {\n throw new NotFittedError(\"Ridge must be fitted before scoring\");\n }\n\n // Validate y dimensions\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n\n // Get predictions\n const pred = this.predict(X);\n if (pred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${pred.size}, y=${y.size}`\n );\n }\n\n let ssRes = 0; // Residual sum of squares\n let ssTot = 0; // Total sum of squares\n let yMean = 0; // Mean of y\n\n // Compute mean of y\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i] ?? 0);\n }\n yMean /= y.size;\n\n // Compute SS_res and SS_tot\n for (let i = 0; i < y.size; i++) {\n const yVal = Number(y.data[y.offset + i] ?? 0);\n const predVal = Number(pred.data[pred.offset + i] ?? 0);\n\n // Residual sum of squares: measures prediction error\n ssRes += (yVal - predVal) ** 2;\n\n // Total sum of squares: measures total variance in y\n ssTot += (yVal - yMean) ** 2;\n }\n\n // Handle edge case: constant y (zero variance)\n // If y is constant and predictions match, R² = 1\n // If y is constant but predictions don't match, R² = 0\n if (ssTot === 0) {\n return ssRes === 0 ? 1.0 : 0.0;\n }\n\n // Compute R² = 1 - (SS_res / SS_tot)\n return 1 - ssRes / ssTot;\n }\n\n /**\n * Get parameters for this estimator.\n *\n * Returns a copy of all hyperparameters set during construction or via setParams.\n *\n * @returns Object containing all parameters with their current values\n */\n getParams(): Record<string, unknown> {\n return { ...this.options };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * Allows modifying hyperparameters after construction.\n * Note: Changing parameters requires refitting the model.\n *\n * @param params - Dictionary of parameters to set\n * @returns this - The estimator for method chaining\n * @throws {TypeError} If parameter value has wrong type\n * @throws {Error} If parameter name is unknown or value is invalid\n */\n setParams(params: Record<string, unknown>): this {\n for (const [key, value] of Object.entries(params)) {\n switch (key) {\n case \"alpha\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\n `alpha must be a finite number; received ${String(value)}`,\n \"alpha\",\n value\n );\n }\n this.options.alpha = value;\n break;\n\n case \"maxIter\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\n `maxIter must be a finite number; received ${String(value)}`,\n \"maxIter\",\n value\n );\n }\n this.options.maxIter = value;\n break;\n\n case \"tol\":\n if (typeof value !== \"number\" || !Number.isFinite(value)) {\n throw new InvalidParameterError(\n `tol must be a finite number; received ${String(value)}`,\n \"tol\",\n value\n );\n }\n this.options.tol = value;\n break;\n\n case \"fitIntercept\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n `fitIntercept must be a boolean; received ${String(value)}`,\n \"fitIntercept\",\n value\n );\n }\n this.options.fitIntercept = value;\n break;\n\n case \"normalize\":\n if (typeof value !== \"boolean\") {\n throw new InvalidParameterError(\n `normalize must be a boolean; received ${String(value)}`,\n \"normalize\",\n value\n );\n }\n this.options.normalize = value;\n break;\n\n case \"solver\":\n if (\n value !== \"auto\" &&\n value !== \"svd\" &&\n value !== \"cholesky\" &&\n value !== \"lsqr\" &&\n value !== \"sag\"\n ) {\n throw new InvalidParameterError(`Invalid solver: ${String(value)}`, \"solver\", value);\n }\n this.options.solver = value;\n break;\n\n default:\n throw new InvalidParameterError(`Unknown parameter: ${key}`, key, value);\n }\n }\n return this;\n }\n\n /**\n * Get the model coefficients (weights).\n *\n * @returns Coefficient tensor of shape (n_features,)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coef(): Tensor {\n if (!this.fitted || !this.coef_) {\n throw new NotFittedError(\"Ridge must be fitted to access coefficients\");\n }\n return this.coef_;\n }\n\n /**\n * Get the intercept (bias term).\n *\n * @returns Intercept value\n * @throws {NotFittedError} If the model has not been fitted\n */\n get intercept(): number {\n if (!this.fitted) {\n throw new NotFittedError(\"Ridge must be fitted to access intercept\");\n }\n return this.intercept_ ?? 0;\n }\n\n /**\n * Get the number of iterations run by the solver.\n *\n * @returns Number of iterations (undefined for direct solvers)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get nIter(): number | undefined {\n if (!this.fitted) {\n throw new NotFittedError(\"Ridge must be fitted to access nIter\");\n }\n return this.nIter_;\n }\n}\n","import { InvalidParameterError, NotFittedError } from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { validateUnsupervisedFitInputs } from \"../_validation\";\n\ntype SparseRow = { indices: number[]; values: number[] };\ntype SparseMatrix = SparseRow[];\ntype SampleRow = { indices: number[]; qValues: number[] };\n\n/**\n * t-Distributed Stochastic Neighbor Embedding (t-SNE).\n *\n * A nonlinear dimensionality reduction technique for embedding high-dimensional\n * data into a low-dimensional space (typically 2D or 3D) for visualization.\n *\n * **Algorithm**: Exact t-SNE with an optional sampling-based approximation\n * - Computes pairwise affinities in high-dimensional space using Gaussian kernel (exact)\n * - Computes pairwise affinities in low-dimensional space using Student-t distribution\n * - Minimizes KL divergence between the two distributions\n *\n * **Scalability Note**:\n * Exact t-SNE is O(n^2) in time and memory. For large datasets, use\n * `method: \"approximate\"` (sampled neighbors + negative sampling) or reduce samples.\n *\n * @example\n * ```ts\n * import { TSNE } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]);\n *\n * const tsne = new TSNE({ nComponents: 2, perplexity: 5 });\n * const embedding = tsne.fitTransform(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.manifold.TSNE.html | scikit-learn TSNE}\n * @see van der Maaten, L.J.P.; Hinton, G.E. (2008). \"Visualizing High-Dimensional Data Using t-SNE\"\n */\nexport class TSNE {\n /** Number of dimensions in the embedding */\n private readonly nComponents: number;\n\n /** Perplexity parameter (related to number of nearest neighbors) */\n private readonly perplexity: number;\n\n /** Learning rate for gradient descent */\n private readonly learningRate: number;\n\n /** Number of iterations */\n private readonly nIter: number;\n\n /** Early exaggeration factor */\n private readonly earlyExaggeration: number;\n\n /** Number of iterations with early exaggeration */\n private readonly earlyExaggerationIter: number;\n\n /** Random seed for reproducibility */\n private readonly randomState: number | undefined;\n\n /** Minimum gradient norm for convergence */\n private readonly minGradNorm: number;\n\n /** Method for computing affinities and gradients */\n private readonly method: \"exact\" | \"approximate\";\n\n /** Maximum samples allowed for exact mode */\n private readonly maxExactSamples: number;\n\n /** Neighbor count for approximate mode */\n private readonly approximateNeighbors: number;\n\n /** Negative samples per point for approximate mode */\n private readonly negativeSamples: number;\n\n /** The embedded points after fitting */\n private embedding: number[][] = [];\n\n /** Whether the model has been fitted */\n private fitted = false;\n\n constructor(\n options: {\n readonly nComponents?: number;\n readonly perplexity?: number;\n readonly learningRate?: number;\n readonly nIter?: number;\n readonly earlyExaggeration?: number;\n readonly earlyExaggerationIter?: number;\n readonly randomState?: number;\n readonly minGradNorm?: number;\n /** \"exact\" uses full pairwise interactions; \"approximate\" uses sampling for large datasets. */\n readonly method?: \"exact\" | \"approximate\";\n /** Maximum samples allowed in exact mode before requiring approximate. */\n readonly maxExactSamples?: number;\n /** Number of neighbors to sample per point in approximate mode. */\n readonly approximateNeighbors?: number;\n /** Number of negative samples per point in approximate mode. */\n readonly negativeSamples?: number;\n } = {}\n ) {\n this.nComponents = options.nComponents ?? 2;\n this.perplexity = options.perplexity ?? 30;\n this.learningRate = options.learningRate ?? 200;\n this.nIter = options.nIter ?? 1000;\n this.earlyExaggeration = options.earlyExaggeration ?? 12;\n const earlyExaggerationIter = options.earlyExaggerationIter ?? 250;\n this.earlyExaggerationIter = Math.min(earlyExaggerationIter, this.nIter);\n this.randomState = options.randomState;\n this.minGradNorm = options.minGradNorm ?? 1e-7;\n this.method = options.method ?? \"exact\";\n this.maxExactSamples = options.maxExactSamples ?? 2000;\n this.approximateNeighbors =\n options.approximateNeighbors ?? Math.max(5, Math.floor(this.perplexity * 3));\n this.negativeSamples = options.negativeSamples ?? Math.max(10, Math.floor(this.perplexity * 2));\n\n if (!Number.isInteger(this.nComponents) || this.nComponents <= 0) {\n throw new InvalidParameterError(\n \"nComponents must be positive\",\n \"nComponents\",\n this.nComponents\n );\n }\n if (!Number.isFinite(this.perplexity) || this.perplexity <= 0) {\n throw new InvalidParameterError(\"perplexity must be positive\", \"perplexity\", this.perplexity);\n }\n if (!Number.isFinite(this.learningRate) || this.learningRate <= 0) {\n throw new InvalidParameterError(\n \"learningRate must be positive\",\n \"learningRate\",\n this.learningRate\n );\n }\n if (!Number.isInteger(this.nIter) || this.nIter <= 0) {\n throw new InvalidParameterError(\"nIter must be a positive integer\", \"nIter\", this.nIter);\n }\n if (!Number.isFinite(this.earlyExaggeration) || this.earlyExaggeration <= 0) {\n throw new InvalidParameterError(\n \"earlyExaggeration must be positive\",\n \"earlyExaggeration\",\n this.earlyExaggeration\n );\n }\n if (!Number.isInteger(earlyExaggerationIter) || earlyExaggerationIter < 0) {\n throw new InvalidParameterError(\n \"earlyExaggerationIter must be an integer >= 0\",\n \"earlyExaggerationIter\",\n earlyExaggerationIter\n );\n }\n if (!Number.isFinite(this.minGradNorm) || this.minGradNorm <= 0) {\n throw new InvalidParameterError(\n \"minGradNorm must be positive\",\n \"minGradNorm\",\n this.minGradNorm\n );\n }\n if (this.method !== \"exact\" && this.method !== \"approximate\") {\n throw new InvalidParameterError(\n \"method must be 'exact' or 'approximate'\",\n \"method\",\n this.method\n );\n }\n if (!Number.isInteger(this.maxExactSamples) || this.maxExactSamples <= 0) {\n throw new InvalidParameterError(\n \"maxExactSamples must be a positive integer\",\n \"maxExactSamples\",\n this.maxExactSamples\n );\n }\n if (!Number.isInteger(this.approximateNeighbors) || this.approximateNeighbors <= 0) {\n throw new InvalidParameterError(\n \"approximateNeighbors must be a positive integer\",\n \"approximateNeighbors\",\n this.approximateNeighbors\n );\n }\n if (!Number.isInteger(this.negativeSamples) || this.negativeSamples <= 0) {\n throw new InvalidParameterError(\n \"negativeSamples must be a positive integer\",\n \"negativeSamples\",\n this.negativeSamples\n );\n }\n if (options.randomState !== undefined && !Number.isFinite(options.randomState)) {\n throw new InvalidParameterError(\n \"randomState must be a finite number\",\n \"randomState\",\n options.randomState\n );\n }\n }\n\n /**\n * Compute pairwise squared Euclidean distances.\n */\n private computeDistances(X: number[][]): number[][] {\n const n = X.length;\n const distances: number[][] = [];\n\n for (let i = 0; i < n; i++) {\n const row: number[] = [];\n for (let j = 0; j < n; j++) {\n if (i === j) {\n row.push(0);\n } else {\n let dist = 0;\n const xi = X[i];\n const xj = X[j];\n if (xi && xj) {\n for (let k = 0; k < xi.length; k++) {\n const diff = (xi[k] ?? 0) - (xj[k] ?? 0);\n dist += diff * diff;\n }\n }\n row.push(dist);\n }\n }\n distances.push(row);\n }\n\n return distances;\n }\n\n /**\n * Compute squared Euclidean distance between two vectors.\n */\n private computeSquaredDistance(a: number[], b: number[]): number {\n let dist = 0;\n for (let k = 0; k < a.length; k++) {\n const diff = (a[k] ?? 0) - (b[k] ?? 0);\n dist += diff * diff;\n }\n return dist;\n }\n\n /**\n * Sample unique indices excluding a single index.\n */\n private sampleIndices(n: number, exclude: number, k: number, rng: () => number): number[] {\n const result = new Set<number>();\n const maxAttempts = k * 10 + 100;\n let attempts = 0;\n\n while (result.size < k && attempts < maxAttempts) {\n const raw = Math.floor(rng() * (n - 1));\n const idx = raw >= exclude ? raw + 1 : raw;\n result.add(idx);\n attempts += 1;\n }\n\n if (result.size < k) {\n for (let i = 0; i < n && result.size < k; i++) {\n if (i !== exclude) {\n result.add(i);\n }\n }\n }\n\n return Array.from(result);\n }\n\n /**\n * Sample unique indices excluding a set of indices.\n */\n private sampleIndicesWithExclusions(\n n: number,\n exclusions: ReadonlySet<number>,\n k: number,\n rng: () => number\n ): number[] {\n const result = new Set<number>();\n const maxAttempts = k * 12 + 200;\n let attempts = 0;\n\n while (result.size < k && attempts < maxAttempts) {\n const idx = Math.floor(rng() * n);\n if (!exclusions.has(idx)) {\n result.add(idx);\n }\n attempts += 1;\n }\n\n if (result.size < k) {\n for (let i = 0; i < n && result.size < k; i++) {\n if (!exclusions.has(i)) {\n result.add(i);\n }\n }\n }\n\n return Array.from(result);\n }\n\n /**\n * Compute conditional probabilities P(j|i) using binary search for sigma.\n */\n private computeProbabilities(distances: number[][]): number[][] {\n const n = distances.length;\n const targetEntropy = Math.log(this.perplexity);\n const P: number[][] = [];\n\n for (let i = 0; i < n; i++) {\n const row: number[] = new Array(n).fill(0);\n\n // Binary search for sigma\n let sigmaMin = 1e-10;\n let sigmaMax = 1e10;\n let sigma = 1.0;\n\n for (let iter = 0; iter < 50; iter++) {\n // Compute probabilities with current sigma\n let sumExp = 0;\n for (let j = 0; j < n; j++) {\n if (i !== j) {\n const distRow = distances[i];\n const dist = distRow ? (distRow[j] ?? 0) : 0;\n sumExp += Math.exp(-dist / (2 * sigma * sigma));\n }\n }\n\n // Compute entropy\n let entropy = 0;\n for (let j = 0; j < n; j++) {\n if (i !== j) {\n const distRow = distances[i];\n const dist = distRow ? (distRow[j] ?? 0) : 0;\n const pij = Math.exp(-dist / (2 * sigma * sigma)) / (sumExp + 1e-10);\n if (pij > 1e-10) {\n entropy -= pij * Math.log(pij);\n }\n }\n }\n\n // Binary search adjustment\n if (Math.abs(entropy - targetEntropy) < 1e-5) {\n break;\n }\n\n if (entropy > targetEntropy) {\n sigmaMax = sigma;\n sigma = (sigma + sigmaMin) / 2;\n } else {\n sigmaMin = sigma;\n sigma = (sigma + sigmaMax) / 2;\n }\n }\n\n // Compute final probabilities\n let sumExp = 0;\n for (let j = 0; j < n; j++) {\n if (i !== j) {\n const distRow = distances[i];\n const dist = distRow ? (distRow[j] ?? 0) : 0;\n sumExp += Math.exp(-dist / (2 * sigma * sigma));\n }\n }\n\n for (let j = 0; j < n; j++) {\n if (i !== j) {\n const distRow = distances[i];\n const dist = distRow ? (distRow[j] ?? 0) : 0;\n row[j] = Math.exp(-dist / (2 * sigma * sigma)) / (sumExp + 1e-10);\n }\n }\n\n P.push(row);\n }\n\n // Symmetrize: P = (P + P') / (2n)\n const Psym: number[][] = [];\n for (let i = 0; i < n; i++) {\n const row: number[] = [];\n for (let j = 0; j < n; j++) {\n const pij = P[i]?.[j] ?? 0;\n const pji = P[j]?.[i] ?? 0;\n row.push((pij + pji) / (2 * n));\n }\n Psym.push(row);\n }\n\n return Psym;\n }\n\n /**\n * Compute sparse joint probabilities using sampled neighbors (approximate).\n */\n private computeProbabilitiesSparse(\n X: number[][],\n neighbors: number,\n rng: () => number\n ): SparseMatrix {\n const n = X.length;\n const targetEntropy = Math.log(this.perplexity);\n const rows: SparseMatrix = [];\n\n for (let i = 0; i < n; i++) {\n const neighborIndices = this.sampleIndices(n, i, neighbors, rng);\n const distances = neighborIndices.map((j) =>\n this.computeSquaredDistance(X[i] ?? [], X[j] ?? [])\n );\n\n let sigmaMin = 1e-10;\n let sigmaMax = 1e10;\n let sigma = 1.0;\n\n for (let iter = 0; iter < 50; iter++) {\n let sumExp = 0;\n for (const dist of distances) {\n sumExp += Math.exp(-dist / (2 * sigma * sigma));\n }\n\n let entropy = 0;\n for (const dist of distances) {\n const pij = Math.exp(-dist / (2 * sigma * sigma)) / (sumExp + 1e-10);\n if (pij > 1e-10) {\n entropy -= pij * Math.log(pij);\n }\n }\n\n if (Math.abs(entropy - targetEntropy) < 1e-5) {\n break;\n }\n\n if (entropy > targetEntropy) {\n sigmaMax = sigma;\n sigma = (sigma + sigmaMin) / 2;\n } else {\n sigmaMin = sigma;\n sigma = (sigma + sigmaMax) / 2;\n }\n }\n\n let sumExp = 0;\n for (const dist of distances) {\n sumExp += Math.exp(-dist / (2 * sigma * sigma));\n }\n\n const values: number[] = distances.map(\n (dist) => Math.exp(-dist / (2 * sigma * sigma)) / (sumExp + 1e-10)\n );\n\n rows.push({ indices: neighborIndices, values });\n }\n\n return this.symmetrizeSparse(rows, n);\n }\n\n /**\n * Symmetrize sparse probabilities: P = (P + P^T) / (2n).\n */\n private symmetrizeSparse(rows: SparseMatrix, n: number): SparseMatrix {\n const maps: Array<Map<number, number>> = rows.map((row) => {\n const map = new Map<number, number>();\n for (let k = 0; k < row.indices.length; k++) {\n const j = row.indices[k];\n if (j === undefined) continue;\n const val = row.values[k] ?? 0;\n map.set(j, val);\n }\n return map;\n });\n\n const sym: SparseMatrix = [];\n for (let i = 0; i < n; i++) {\n const row = rows[i];\n if (!row) {\n sym.push({ indices: [], values: [] });\n continue;\n }\n const indices = row.indices.slice();\n const values: number[] = [];\n for (let k = 0; k < indices.length; k++) {\n const j = indices[k];\n if (j === undefined) continue;\n const pij = maps[i]?.get(j) ?? 0;\n const pji = maps[j]?.get(i) ?? 0;\n values.push((pij + pji) / (2 * n));\n }\n sym.push({ indices, values });\n }\n\n return sym;\n }\n\n /**\n * Initialize embedding with small random values.\n */\n private initializeEmbedding(n: number): number[][] {\n const embedding: number[][] = [];\n\n // Simple LCG random number generator for reproducibility\n let seed = this.randomState ?? Date.now();\n const random = (): number => {\n seed = (seed * 1103515245 + 12345) & 0x7fffffff;\n return seed / 0x7fffffff;\n };\n\n for (let i = 0; i < n; i++) {\n const row: number[] = [];\n for (let j = 0; j < this.nComponents; j++) {\n // Initialize with small random values (normal-ish distribution)\n const u1 = random() || 0.0001;\n const u2 = random();\n const z = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);\n row.push(z * 0.0001);\n }\n embedding.push(row);\n }\n\n return embedding;\n }\n\n /**\n * Compute Q distribution (Student-t with 1 degree of freedom).\n */\n private computeQ(Y: number[][]): { Q: number[][]; sumQ: number } {\n const n = Y.length;\n const Q: number[][] = [];\n let sumQ = 0;\n\n for (let i = 0; i < n; i++) {\n const row: number[] = [];\n for (let j = 0; j < n; j++) {\n if (i === j) {\n row.push(0);\n } else {\n let dist = 0;\n const yi = Y[i];\n const yj = Y[j];\n if (yi && yj) {\n for (let k = 0; k < this.nComponents; k++) {\n const diff = (yi[k] ?? 0) - (yj[k] ?? 0);\n dist += diff * diff;\n }\n }\n // Student-t distribution with 1 degree of freedom\n const qij = 1 / (1 + dist);\n row.push(qij);\n sumQ += qij;\n }\n }\n Q.push(row);\n }\n\n return { Q, sumQ };\n }\n\n /**\n * Compute approximate Q using sampled pairs.\n */\n private computeQApprox(\n Y: number[][],\n neighbors: SparseMatrix,\n negativeSamples: number,\n rng: () => number\n ): { rows: SampleRow[]; sumQ: number } {\n const n = Y.length;\n const rows: SampleRow[] = [];\n let sumQ = 0;\n\n for (let i = 0; i < n; i++) {\n const neighborIndices = neighbors[i]?.indices ?? [];\n const exclusion = new Set<number>(neighborIndices);\n exclusion.add(i);\n\n const negatives = this.sampleIndicesWithExclusions(n, exclusion, negativeSamples, rng);\n const indices = neighborIndices.concat(negatives);\n const qValues: number[] = [];\n\n const yi = Y[i] ?? [];\n for (let k = 0; k < indices.length; k++) {\n const j = indices[k] ?? 0;\n const yj = Y[j] ?? [];\n const dist = this.computeSquaredDistance(yi, yj);\n const qij = 1 / (1 + dist);\n qValues.push(qij);\n sumQ += qij;\n }\n\n rows.push({ indices, qValues });\n }\n\n return { rows, sumQ };\n }\n\n /**\n * Compute gradients of KL divergence.\n */\n private computeGradients(P: number[][], Q: number[][], sumQ: number, Y: number[][]): number[][] {\n const n = Y.length;\n const gradients: number[][] = [];\n\n for (let i = 0; i < n; i++) {\n const grad: number[] = new Array(this.nComponents).fill(0);\n\n for (let j = 0; j < n; j++) {\n if (i !== j) {\n const pij = P[i]?.[j] ?? 0;\n const qij = (Q[i]?.[j] ?? 0) / (sumQ + 1e-10);\n\n const yi = Y[i];\n const yj = Y[j];\n if (yi && yj) {\n // Compute (1 + ||y_i - y_j||^2)^-1\n let dist = 0;\n for (let k = 0; k < this.nComponents; k++) {\n const diff = (yi[k] ?? 0) - (yj[k] ?? 0);\n dist += diff * diff;\n }\n const mult = (pij - qij) * (1 / (1 + dist));\n\n for (let k = 0; k < this.nComponents; k++) {\n grad[k] = (grad[k] ?? 0) + 4 * mult * ((yi[k] ?? 0) - (yj[k] ?? 0));\n }\n }\n }\n }\n\n gradients.push(grad);\n }\n\n return gradients;\n }\n\n /**\n * Compute approximate gradients of KL divergence using sampled pairs.\n */\n private computeGradientsApprox(\n PMaps: Array<Map<number, number>>,\n QRows: SampleRow[],\n sumQ: number,\n Y: number[][],\n exaggeration: number\n ): number[][] {\n const n = Y.length;\n const gradients: number[][] = [];\n const denom = sumQ + 1e-10;\n\n for (let i = 0; i < n; i++) {\n const grad: number[] = new Array(this.nComponents).fill(0);\n const yi = Y[i] ?? [];\n const pMap = PMaps[i];\n const qRow = QRows[i];\n\n if (qRow) {\n for (let idx = 0; idx < qRow.indices.length; idx++) {\n const j = qRow.indices[idx] ?? 0;\n if (i === j) continue;\n const yj = Y[j] ?? [];\n\n const pijBase = pMap?.get(j) ?? 0;\n const pij = pijBase * exaggeration;\n const qUnnorm = qRow.qValues[idx] ?? 0;\n const qij = qUnnorm / denom;\n\n const mult = (pij - qij) * qUnnorm;\n for (let k = 0; k < this.nComponents; k++) {\n grad[k] = (grad[k] ?? 0) + 4 * mult * ((yi[k] ?? 0) - (yj[k] ?? 0));\n }\n }\n }\n\n gradients.push(grad);\n }\n\n return gradients;\n }\n\n /**\n * Fit the t-SNE model and return the embedding.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @returns Low-dimensional embedding of shape (n_samples, n_components)\n */\n fitTransform(X: Tensor): Tensor {\n validateUnsupervisedFitInputs(X);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n if (nSamples < 4) {\n throw new InvalidParameterError(\"t-SNE requires at least 4 samples\", \"nSamples\", nSamples);\n }\n if (this.perplexity >= nSamples) {\n throw new InvalidParameterError(\n `perplexity must be less than n_samples; received perplexity=${this.perplexity}, n_samples=${nSamples}`,\n \"perplexity\",\n this.perplexity\n );\n }\n if (this.method === \"exact\" && nSamples > this.maxExactSamples) {\n throw new InvalidParameterError(\n `Exact t-SNE is O(n^2) and limited to n_samples <= ${this.maxExactSamples}; received n_samples=${nSamples}. Use method=\"approximate\" or increase maxExactSamples.`,\n \"nSamples\",\n nSamples\n );\n }\n\n // Extract data\n const XData: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n }\n\n const baseSeed = this.randomState ?? Date.now();\n const rng = (() => {\n let state = baseSeed >>> 0;\n return (): number => {\n state = (state * 1664525 + 1013904223) >>> 0;\n return state / 2 ** 32;\n };\n })();\n\n const useApproximate = this.method === \"approximate\";\n const neighborCount = Math.min(this.approximateNeighbors, nSamples - 1);\n const availableNegatives = Math.max(0, nSamples - 1 - neighborCount);\n const negativeCount = Math.min(this.negativeSamples, availableNegatives);\n if (useApproximate && neighborCount < 2) {\n throw new InvalidParameterError(\n \"approximateNeighbors must be at least 2 for approximate mode\",\n \"approximateNeighbors\",\n neighborCount\n );\n }\n if (useApproximate && this.perplexity >= neighborCount) {\n throw new InvalidParameterError(\n `perplexity must be less than approximateNeighbors; received perplexity=${this.perplexity}, approximateNeighbors=${neighborCount}`,\n \"perplexity\",\n this.perplexity\n );\n }\n\n // Compute joint probabilities P\n const PExact = useApproximate ? null : this.computeProbabilities(this.computeDistances(XData));\n const PSparse = useApproximate\n ? this.computeProbabilitiesSparse(XData, neighborCount, rng)\n : null;\n\n // Initialize embedding\n const Y = this.initializeEmbedding(nSamples);\n\n // Momentum terms\n const velocities: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n velocities.push(new Array(this.nComponents).fill(0));\n }\n\n const momentum = 0.5;\n const finalMomentum = 0.8;\n\n const PMaps =\n useApproximate && PSparse\n ? PSparse.map((row) => {\n const map = new Map<number, number>();\n for (let k = 0; k < row.indices.length; k++) {\n const j = row.indices[k];\n if (j === undefined) continue;\n const val = row.values[k] ?? 0;\n map.set(j, val);\n }\n return map;\n })\n : [];\n\n // Gradient descent\n for (let iter = 0; iter < this.nIter; iter++) {\n // Early exaggeration\n const exaggeration = iter < this.earlyExaggerationIter ? this.earlyExaggeration : 1;\n\n let gradients: number[][];\n if (useApproximate && PSparse) {\n const { rows: QRows, sumQ } = this.computeQApprox(Y, PSparse, negativeCount, rng);\n gradients = this.computeGradientsApprox(PMaps, QRows, sumQ, Y, exaggeration);\n } else {\n // Apply exaggeration to P\n const Pexag: number[][] = [];\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nSamples; j++) {\n row.push((PExact?.[i]?.[j] ?? 0) * exaggeration);\n }\n Pexag.push(row);\n }\n\n // Compute Q\n const { Q, sumQ } = this.computeQ(Y);\n\n // Compute gradients\n gradients = this.computeGradients(Pexag, Q, sumQ, Y);\n }\n\n // Check convergence\n let gradNorm = 0;\n for (let i = 0; i < nSamples; i++) {\n const grad = gradients[i];\n if (grad) {\n for (let k = 0; k < this.nComponents; k++) {\n gradNorm += (grad[k] ?? 0) ** 2;\n }\n }\n }\n gradNorm = Math.sqrt(gradNorm);\n\n if (gradNorm < this.minGradNorm) {\n break;\n }\n\n // Update momentum\n const currentMomentum = iter < this.earlyExaggerationIter ? momentum : finalMomentum;\n\n // Update embedding with momentum\n for (let i = 0; i < nSamples; i++) {\n const yi = Y[i];\n const grad = gradients[i];\n const vel = velocities[i];\n\n if (yi && grad && vel) {\n for (let k = 0; k < this.nComponents; k++) {\n vel[k] = currentMomentum * (vel[k] ?? 0) - this.learningRate * (grad[k] ?? 0);\n yi[k] = (yi[k] ?? 0) + (vel[k] ?? 0);\n }\n }\n }\n\n // Center the embedding\n const center: number[] = new Array(this.nComponents).fill(0);\n for (let i = 0; i < nSamples; i++) {\n const yi = Y[i];\n if (yi) {\n for (let k = 0; k < this.nComponents; k++) {\n center[k] = (center[k] ?? 0) + (yi[k] ?? 0);\n }\n }\n }\n for (let k = 0; k < this.nComponents; k++) {\n center[k] = (center[k] ?? 0) / nSamples;\n }\n for (let i = 0; i < nSamples; i++) {\n const yi = Y[i];\n if (yi) {\n for (let k = 0; k < this.nComponents; k++) {\n yi[k] = (yi[k] ?? 0) - (center[k] ?? 0);\n }\n }\n }\n }\n\n this.embedding = Y;\n this.fitted = true;\n\n return tensor(Y);\n }\n\n /**\n * Fit the model (same as fitTransform for t-SNE).\n */\n fit(X: Tensor): this {\n this.fitTransform(X);\n return this;\n }\n\n /**\n * Get the embedding.\n */\n get embeddingResult(): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"TSNE must be fitted before accessing embedding\");\n }\n return tensor(this.embedding);\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nComponents: this.nComponents,\n perplexity: this.perplexity,\n learningRate: this.learningRate,\n nIter: this.nIter,\n earlyExaggeration: this.earlyExaggeration,\n earlyExaggerationIter: this.earlyExaggerationIter,\n randomState: this.randomState,\n minGradNorm: this.minGradNorm,\n method: this.method,\n maxExactSamples: this.maxExactSamples,\n approximateNeighbors: this.approximateNeighbors,\n negativeSamples: this.negativeSamples,\n };\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier } from \"../base\";\n\n/**\n * Gaussian Naive Bayes classifier.\n *\n * Implements the Gaussian Naive Bayes algorithm for classification.\n * Assumes features follow a Gaussian (normal) distribution.\n *\n * **Algorithm**:\n * 1. Calculate mean and variance for each feature per class\n * 2. For prediction, calculate likelihood using Gaussian PDF\n * 3. Apply Bayes' theorem to get posterior probabilities\n * 4. Predict class with highest posterior probability\n *\n * **Time Complexity**:\n * - Training: O(n * d) where n=samples, d=features\n * - Prediction: O(k * d) per sample where k=classes\n *\n * @example\n * ```ts\n * import { GaussianNB } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [2, 3], [3, 4], [4, 5]]);\n * const y = tensor([0, 0, 1, 1]);\n *\n * const nb = new GaussianNB();\n * nb.fit(X, y);\n *\n * const predictions = nb.predict(tensor([[2.5, 3.5]]));\n * ```\n *\n * @see {@link https://en.wikipedia.org/wiki/Naive_Bayes_classifier | Wikipedia: Naive Bayes}\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.GaussianNB.html | scikit-learn GaussianNB}\n */\nexport class GaussianNB implements Classifier {\n private readonly varSmoothing: number;\n\n private classes_?: number[];\n private classPrior_?: Map<number, number>;\n private theta_?: Map<number, number[]>; // Mean for each class and feature\n private var_?: Map<number, number[]>; // Variance for each class and feature\n private nFeaturesIn_?: number;\n private fitted = false;\n\n /**\n * Create a new Gaussian Naive Bayes classifier.\n *\n * @param options - Configuration options\n * @param options.varSmoothing - Portion of largest variance added to variances for stability (default: 1e-9)\n */\n constructor(\n options: {\n readonly varSmoothing?: number;\n } = {}\n ) {\n this.varSmoothing = options.varSmoothing ?? 1e-9;\n if (!Number.isFinite(this.varSmoothing) || this.varSmoothing < 0) {\n throw new InvalidParameterError(\n \"varSmoothing must be a finite number >= 0\",\n \"varSmoothing\",\n this.varSmoothing\n );\n }\n }\n\n /**\n * Fit Gaussian Naive Bayes classifier from the training set.\n *\n * Computes per-class mean, variance, and prior probabilities.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target class labels of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {DataValidationError} If zero variance encountered with varSmoothing=0\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n this.nFeaturesIn_ = nFeatures;\n\n // Get unique classes\n const classSet = new Set<number>();\n for (let i = 0; i < y.size; i++) {\n classSet.add(Number(y.data[y.offset + i]));\n }\n this.classes_ = Array.from(classSet).sort((a, b) => a - b);\n\n // Calculate class priors\n this.classPrior_ = new Map();\n for (const cls of this.classes_) {\n let count = 0;\n for (let i = 0; i < nSamples; i++) {\n if (Number(y.data[y.offset + i]) === cls) {\n count++;\n }\n }\n this.classPrior_.set(cls, count / nSamples);\n }\n\n // Calculate mean and variance for each class and feature\n this.theta_ = new Map();\n this.var_ = new Map();\n\n for (const cls of this.classes_) {\n const classSamples: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n if (Number(y.data[y.offset + i]) === cls) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n classSamples.push(sample);\n }\n }\n\n const means: number[] = [];\n const variances: number[] = [];\n\n for (let j = 0; j < nFeatures; j++) {\n // Calculate mean\n let sum = 0;\n for (const sample of classSamples) {\n sum += sample[j] ?? 0;\n }\n const mean = sum / classSamples.length;\n means.push(mean);\n\n // Calculate variance\n let varSum = 0;\n for (const sample of classSamples) {\n const diff = (sample[j] ?? 0) - mean;\n varSum += diff * diff;\n }\n const variance = varSum / classSamples.length;\n if (variance === 0 && this.varSmoothing === 0) {\n throw new DataValidationError(\n \"Zero variance encountered with varSmoothing=0; set varSmoothing > 0 to avoid degenerate Gaussians\"\n );\n }\n variances.push(variance);\n }\n\n this.theta_.set(cls, means);\n this.var_.set(cls, variances);\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted class labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"GaussianNB must be fitted before prediction\");\n }\n\n const proba = this.predictProba(X);\n const nSamples = proba.shape[0] ?? 0;\n const nClasses = proba.shape[1] ?? 0;\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n let maxProb = -1;\n let maxClass = 0;\n\n for (let j = 0; j < nClasses; j++) {\n const prob = Number(proba.data[proba.offset + i * nClasses + j]);\n if (prob > maxProb) {\n maxProb = prob;\n maxClass = this.classes_?.[j] ?? 0;\n }\n }\n\n predictions.push(maxClass);\n }\n\n return tensor(predictions, { dtype: \"int32\" });\n }\n\n /**\n * Predict class probabilities for samples in X.\n *\n * Uses Bayes' theorem with Gaussian class-conditional likelihoods.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Class probability matrix of shape (n_samples, n_classes)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted || !this.classes_ || !this.classPrior_ || !this.theta_ || !this.var_) {\n throw new NotFittedError(\"GaussianNB must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"GaussianNB\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n const probabilities: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const logProbs: number[] = [];\n\n for (const cls of this.classes_) {\n const prior = this.classPrior_.get(cls) ?? 0;\n const means = this.theta_.get(cls) ?? [];\n const variances = this.var_.get(cls) ?? [];\n\n let logProb = Math.log(prior);\n\n for (let j = 0; j < nFeatures; j++) {\n const x = Number(X.data[X.offset + i * nFeatures + j]);\n const mean = means[j] ?? 0;\n const variance = (variances[j] ?? 0) + this.varSmoothing;\n\n // Gaussian PDF in log space\n logProb -= 0.5 * Math.log(2 * Math.PI * variance);\n logProb -= (x - mean) ** 2 / (2 * variance);\n }\n\n logProbs.push(logProb);\n }\n\n // Convert log probabilities to probabilities using log-sum-exp trick\n const maxLogProb = Math.max(...logProbs);\n const expProbs = logProbs.map((lp) => Math.exp(lp - maxLogProb));\n const sumExpProbs = expProbs.reduce((a, b) => a + b, 0);\n const probs = expProbs.map((ep) => ep / sumExpProbs);\n\n probabilities.push(probs);\n }\n\n return tensor(probabilities);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const yPred = this.predict(X);\n if (yPred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${yPred.size}, y=${y.size}`\n );\n }\n let correct = 0;\n\n for (let i = 0; i < y.size; i++) {\n if (Number(y.data[y.offset + i]) === Number(yPred.data[yPred.offset + i])) {\n correct++;\n }\n }\n\n return correct / y.size;\n }\n\n /**\n * Get the unique class labels discovered during fitting.\n *\n * @returns Tensor of class labels or undefined if not fitted\n */\n get classes(): Tensor | undefined {\n if (!this.fitted || !this.classes_) {\n return undefined;\n }\n return tensor(this.classes_, { dtype: \"int32\" });\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n varSmoothing: this.varSmoothing,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\"GaussianNB does not support setParams after construction\");\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier, Regressor } from \"../base\";\n\n/**\n * K-Nearest Neighbors base class.\n */\nabstract class KNeighborsBase {\n protected readonly nNeighbors: number;\n protected readonly weights: \"uniform\" | \"distance\";\n protected readonly metric: \"euclidean\" | \"manhattan\";\n\n protected XTrain_?: Tensor;\n protected yTrain_?: Tensor;\n protected nFeaturesIn_?: number;\n protected fitted = false;\n\n constructor(\n options: {\n readonly nNeighbors?: number;\n readonly weights?: \"uniform\" | \"distance\";\n readonly metric?: \"euclidean\" | \"manhattan\";\n } = {}\n ) {\n this.nNeighbors = options.nNeighbors ?? 5;\n this.weights = options.weights ?? \"uniform\";\n this.metric = options.metric ?? \"euclidean\";\n\n if (!Number.isInteger(this.nNeighbors) || this.nNeighbors < 1) {\n throw new InvalidParameterError(\n \"nNeighbors must be an integer >= 1\",\n \"nNeighbors\",\n this.nNeighbors\n );\n }\n if (this.weights !== \"uniform\" && this.weights !== \"distance\") {\n throw new InvalidParameterError(\n `weights must be \"uniform\" or \"distance\"; received ${String(this.weights)}`,\n \"weights\",\n this.weights\n );\n }\n if (this.metric !== \"euclidean\" && this.metric !== \"manhattan\") {\n throw new InvalidParameterError(\n `metric must be \"euclidean\" or \"manhattan\"; received ${String(this.metric)}`,\n \"metric\",\n this.metric\n );\n }\n }\n\n protected calculateDistance(x1: number[], x2: number[]): number {\n let dist = 0;\n if (this.metric === \"euclidean\") {\n for (let i = 0; i < x1.length; i++) {\n const diff = (x1[i] ?? 0) - (x2[i] ?? 0);\n dist += diff * diff;\n }\n return Math.sqrt(dist);\n } else {\n // manhattan\n for (let i = 0; i < x1.length; i++) {\n dist += Math.abs((x1[i] ?? 0) - (x2[i] ?? 0));\n }\n return dist;\n }\n }\n\n protected findKNearest(sample: number[]): Array<{ index: number; distance: number }> {\n if (!this.XTrain_) {\n throw new NotFittedError(\"Model must be fitted before finding neighbors\");\n }\n\n const nSamples = this.XTrain_.shape[0] ?? 0;\n const nFeatures = this.XTrain_.shape[1] ?? 0;\n\n const distances: Array<{ index: number; distance: number }> = [];\n\n for (let i = 0; i < nSamples; i++) {\n const trainSample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n trainSample.push(Number(this.XTrain_.data[this.XTrain_.offset + i * nFeatures + j]));\n }\n const dist = this.calculateDistance(sample, trainSample);\n distances.push({ index: i, distance: dist });\n }\n\n distances.sort((a, b) => a.distance - b.distance);\n return distances.slice(0, this.nNeighbors);\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nNeighbors: this.nNeighbors,\n weights: this.weights,\n metric: this.metric,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\"KNeighbors does not support setParams after construction\");\n }\n}\n\n/**\n * K-Nearest Neighbors Classifier.\n *\n * Classification based on k nearest neighbors. Predicts class by majority vote\n * of k nearest training samples.\n *\n * **Algorithm**: Instance-based learning\n * 1. Store all training data\n * 2. For each test sample, find k nearest training samples\n * 3. Predict class by majority vote (or weighted vote)\n *\n * **Time Complexity**:\n * - Training: O(1) (just stores data)\n * - Prediction: O(n * d) per sample where n=training samples, d=features\n *\n * @example\n * ```ts\n * import { KNeighborsClassifier } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[0, 0], [1, 1], [2, 2], [3, 3]]);\n * const y = tensor([0, 0, 1, 1]);\n *\n * const knn = new KNeighborsClassifier({ nNeighbors: 3 });\n * knn.fit(X, y);\n *\n * const predictions = knn.predict(tensor([[1.5, 1.5]]));\n * ```\n *\n * @see {@link https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm | Wikipedia: KNN}\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html | scikit-learn KNeighborsClassifier}\n */\nexport class KNeighborsClassifier extends KNeighborsBase implements Classifier {\n /**\n * Fit the k-nearest neighbors classifier from the training set.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target class labels of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {InvalidParameterError} If nNeighbors > n_samples\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n if (this.nNeighbors > nSamples) {\n throw new InvalidParameterError(\n `nNeighbors must be <= n_samples; received ${this.nNeighbors} > ${nSamples}`,\n \"nNeighbors\",\n this.nNeighbors\n );\n }\n\n this.XTrain_ = X;\n this.yTrain_ = y;\n this.nFeaturesIn_ = nFeatures;\n this.fitted = true;\n\n return this;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted class labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.XTrain_ || !this.yTrain_) {\n throw new NotFittedError(\"KNeighborsClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"KNeighborsClassifier\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n\n const neighbors = this.findKNearest(sample);\n\n // Count votes for each class\n const votes = new Map<number, number>();\n\n for (const neighbor of neighbors) {\n const label = Number(this.yTrain_.data[this.yTrain_.offset + neighbor.index]);\n const weight = this.weights === \"uniform\" ? 1 : 1 / (neighbor.distance + 1e-10);\n votes.set(label, (votes.get(label) ?? 0) + weight);\n }\n\n // Find class with most votes\n let maxVotes = -1;\n let predictedClass = 0;\n for (const [label, voteCount] of votes.entries()) {\n if (voteCount > maxVotes) {\n maxVotes = voteCount;\n predictedClass = label;\n }\n }\n\n predictions.push(predictedClass);\n }\n\n return tensor(predictions, { dtype: \"int32\" });\n }\n\n /**\n * Predict class probabilities for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Class probability matrix of shape (n_samples, n_classes)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted || !this.XTrain_ || !this.yTrain_) {\n throw new NotFittedError(\"KNeighborsClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"KNeighborsClassifier\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n // Get unique classes\n const classSet = new Set<number>();\n for (let i = 0; i < this.yTrain_.size; i++) {\n classSet.add(Number(this.yTrain_.data[this.yTrain_.offset + i]));\n }\n const classes = Array.from(classSet).sort((a, b) => a - b);\n\n const probabilities: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n\n const neighbors = this.findKNearest(sample);\n\n // Count votes for each class\n const votes = new Map<number, number>();\n let totalWeight = 0;\n\n for (const neighbor of neighbors) {\n const label = Number(this.yTrain_.data[this.yTrain_.offset + neighbor.index]);\n const weight = this.weights === \"uniform\" ? 1 : 1 / (neighbor.distance + 1e-10);\n votes.set(label, (votes.get(label) ?? 0) + weight);\n totalWeight += weight;\n }\n\n // Convert to probabilities\n const probs: number[] = [];\n for (const cls of classes) {\n probs.push((votes.get(cls) ?? 0) / totalWeight);\n }\n probabilities.push(probs);\n }\n\n return tensor(probabilities);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const yPred = this.predict(X);\n if (yPred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${yPred.size}, y=${y.size}`\n );\n }\n let correct = 0;\n\n for (let i = 0; i < y.size; i++) {\n if (Number(y.data[y.offset + i]) === Number(yPred.data[yPred.offset + i])) {\n correct++;\n }\n }\n\n return correct / y.size;\n }\n}\n\n/**\n * K-Nearest Neighbors Regressor.\n *\n * Regression based on k nearest neighbors. Predicts value as mean (or weighted mean)\n * of k nearest training samples.\n *\n * @example\n * ```ts\n * import { KNeighborsRegressor } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[0], [1], [2], [3]]);\n * const y = tensor([0, 1, 4, 9]);\n *\n * const knn = new KNeighborsRegressor({ nNeighbors: 2 });\n * knn.fit(X, y);\n *\n * const predictions = knn.predict(tensor([[1.5]]));\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsRegressor.html | scikit-learn KNeighborsRegressor}\n */\nexport class KNeighborsRegressor extends KNeighborsBase implements Regressor {\n /**\n * Fit the k-nearest neighbors regressor from the training set.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {InvalidParameterError} If nNeighbors > n_samples\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n if (this.nNeighbors > nSamples) {\n throw new InvalidParameterError(\n `nNeighbors must be <= n_samples; received ${this.nNeighbors} > ${nSamples}`,\n \"nNeighbors\",\n this.nNeighbors\n );\n }\n\n this.XTrain_ = X;\n this.yTrain_ = y;\n this.nFeaturesIn_ = nFeatures;\n this.fitted = true;\n\n return this;\n }\n\n /**\n * Predict target values for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted || !this.XTrain_ || !this.yTrain_) {\n throw new NotFittedError(\"KNeighborsRegressor must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeaturesIn_ ?? 0, \"KNeighborsRegressor\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const sample: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n sample.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n\n const neighbors = this.findKNearest(sample);\n\n // Calculate weighted mean\n let sumValues = 0;\n let sumWeights = 0;\n\n for (const neighbor of neighbors) {\n const value = Number(this.yTrain_.data[this.yTrain_.offset + neighbor.index]);\n const weight = this.weights === \"uniform\" ? 1 : 1 / (neighbor.distance + 1e-10);\n sumValues += value * weight;\n sumWeights += weight;\n }\n\n predictions.push(sumValues / sumWeights);\n }\n\n return tensor(predictions);\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const yPred = this.predict(X);\n if (yPred.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${yPred.size}, y=${y.size}`\n );\n }\n\n let ssRes = 0;\n let ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i]);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yTrue = Number(y.data[y.offset + i]);\n const yPredVal = Number(yPred.data[yPred.offset + i]);\n ssRes += (yTrue - yPredVal) ** 2;\n ssTot += (yTrue - yMean) ** 2;\n }\n\n if (ssTot === 0) {\n return ssRes === 0 ? 1.0 : 0.0;\n }\n\n return 1 - ssRes / ssTot;\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier, Regressor } from \"../base\";\n\n/**\n * Support Vector Machine (SVM) Classifier.\n *\n * Implements a linear SVM using sub-gradient descent on the hinge loss\n * with L2 regularization (soft margin). Suitable for binary classification tasks.\n *\n * **Algorithm**: Sub-gradient descent on hinge loss (linear kernel)\n *\n * **Mathematical Formulation**:\n * - Decision function: f(x) = sign(w · x + b)\n * - Optimization: minimize (1/2)||w||² + C * Σmax(0, 1 - y_i(w · x_i + b))\n *\n * @example\n * ```ts\n * import { LinearSVC } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1, 2], [2, 3], [3, 1], [4, 2]]);\n * const y = tensor([0, 0, 1, 1]);\n *\n * const svm = new LinearSVC({ C: 1.0 });\n * svm.fit(X, y);\n * const predictions = svm.predict(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.svm.LinearSVC.html | scikit-learn LinearSVC}\n */\nexport class LinearSVC implements Classifier {\n /** Regularization parameter (inverse of regularization strength) */\n private readonly C: number;\n\n /** Maximum number of iterations for optimization */\n private readonly maxIter: number;\n\n /** Tolerance for stopping criterion */\n private readonly tol: number;\n\n /** Weight vector of shape (n_features,) */\n private weights: number[] = [];\n\n /** Bias term */\n private bias = 0;\n\n /** Number of features seen during fit */\n private nFeatures = 0;\n\n /** Unique class labels [0, 1] mapped from original labels */\n private classLabels: number[] = [];\n\n /** Whether the model has been fitted */\n private fitted = false;\n\n /**\n * Create a new SVM Classifier.\n *\n * @param options - Configuration options\n * @param options.C - Regularization parameter (default: 1.0). Larger C = stronger penalty on errors = harder margin.\n * @param options.maxIter - Maximum iterations (default: 1000)\n * @param options.tol - Convergence tolerance (default: 1e-4)\n */\n constructor(\n options: {\n readonly C?: number;\n readonly maxIter?: number;\n readonly tol?: number;\n } = {}\n ) {\n this.C = options.C ?? 1.0;\n this.maxIter = options.maxIter ?? 1000;\n this.tol = options.tol ?? 1e-4;\n\n // Validate parameters\n if (!Number.isFinite(this.C) || this.C <= 0) {\n throw new InvalidParameterError(\"C must be positive\", \"C\", this.C);\n }\n if (!Number.isInteger(this.maxIter) || this.maxIter <= 0) {\n throw new InvalidParameterError(\n \"maxIter must be a positive integer\",\n \"maxIter\",\n this.maxIter\n );\n }\n if (!Number.isFinite(this.tol) || this.tol < 0) {\n throw new InvalidParameterError(\"tol must be >= 0\", \"tol\", this.tol);\n }\n }\n\n /**\n * Fit the SVM classifier using sub-gradient descent.\n *\n * Uses a simplified hinge loss optimization with L2 regularization.\n * Objective: minimize (1/2)||w||² + C * Σmax(0, 1 - y_i(w · x_i + b))\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target labels of shape (n_samples,). Must contain exactly 2 classes.\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n * @throws {InvalidParameterError} If y does not contain exactly 2 classes\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n // Extract data\n const XData: number[][] = [];\n const yData: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n // Get unique classes and map to {-1, 1} for SVM\n this.classLabels = [...new Set(yData)].sort((a, b) => a - b);\n if (this.classLabels.length !== 2) {\n throw new InvalidParameterError(\n \"LinearSVC requires exactly 2 classes for binary classification\",\n \"y\",\n this.classLabels.length\n );\n }\n\n // Map labels to {-1, 1}\n const yMapped = yData.map((label) => (label === this.classLabels[0] ? -1 : 1));\n\n // Initialize weights and bias\n this.weights = new Array(nFeatures).fill(0);\n this.bias = 0;\n\n // Sub-gradient descent optimization\n // We use a constant learning rate schedule for simplicity, scaled by 1/(lambda*n)\n // Here lambda = 1/C. So eta = C/n.\n // However, to ensure convergence, usually eta decays as 1/t.\n // For fixed iterations, small constant rate is often sufficient.\n // We choose eta such that eta * lambda * n = 1 (approx) implies eta = 1/(lambda*n) = C/n?\n // Let's use eta = 1 / (nSamples * lambda) = C / nSamples.\n // But if C is large, eta is large, which might be unstable.\n // Let's use a safe learning rate.\n const learningRate = 0.01; // Fixed small learning rate for stability\n\n for (let iter = 0; iter < this.maxIter; iter++) {\n let maxViolation = 0;\n\n for (let i = 0; i < nSamples; i++) {\n const xi = XData[i];\n const yi = yMapped[i];\n\n if (xi === undefined || yi === undefined) continue;\n\n // Compute decision function: w · x + b\n let decision = this.bias;\n for (let j = 0; j < nFeatures; j++) {\n decision += (this.weights[j] ?? 0) * (xi[j] ?? 0);\n }\n\n // Hinge loss margin: y * (w · x + b)\n const margin = yi * decision;\n\n // Track constraint violation for convergence check\n if (margin < 1) {\n maxViolation = Math.max(maxViolation, 1 - margin);\n }\n\n // Sub-gradient update\n // Objective: 0.5*|w|^2 + C * sum(max(0, 1 - y(wx+b)))\n // Grad w: w - C*y*x (if margin < 1)\n // Update: w <- w - eta * (w - C*y*x) = w(1-eta) + eta*C*y*x\n\n // Regularization part (always applied)\n // Let's trust the user input C implies hard margin.\n // We will use: w = w - learningRate * (w - C * y * x)\n // To prevent explosion, learningRate must be < 1/C.\n // So we adapt LR.\n const effectiveLR = Math.min(learningRate, 1.0 / (this.C * 10));\n\n if (margin < 1) {\n // Misclassified or within margin\n for (let j = 0; j < nFeatures; j++) {\n this.weights[j] =\n (this.weights[j] ?? 0) * (1 - effectiveLR) + effectiveLR * this.C * yi * (xi[j] ?? 0);\n }\n this.bias += effectiveLR * this.C * yi;\n } else {\n // Correctly classified outside margin: only apply regularization\n for (let j = 0; j < nFeatures; j++) {\n this.weights[j] = (this.weights[j] ?? 0) * (1 - effectiveLR);\n }\n }\n }\n\n // Check convergence\n if (maxViolation < this.tol) {\n break;\n }\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"SVC must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"LinearSVC\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n // Compute decision function\n let decision = this.bias;\n for (let j = 0; j < nFeatures; j++) {\n decision += (this.weights[j] ?? 0) * Number(X.data[X.offset + i * nFeatures + j]);\n }\n\n // Map back to original labels\n const predictedClass = decision >= 0 ? this.classLabels[1] : this.classLabels[0];\n predictions.push(predictedClass ?? 0);\n }\n\n return tensor(predictions, { dtype: \"int32\" });\n }\n\n /**\n * Predict class probabilities using Platt scaling approximation.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Probability estimates of shape (n_samples, 2)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"LinearSVC must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"LinearSVC\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n const proba: number[][] = [];\n\n for (let i = 0; i < nSamples; i++) {\n // Compute decision function\n let decision = this.bias;\n for (let j = 0; j < nFeatures; j++) {\n decision += (this.weights[j] ?? 0) * Number(X.data[X.offset + i * nFeatures + j]);\n }\n\n // Use sigmoid for probability approximation (Platt scaling)\n const p1 = 1 / (1 + Math.exp(-decision));\n proba.push([1 - p1, p1]);\n }\n\n return tensor(proba);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n let correct = 0;\n for (let i = 0; i < y.size; i++) {\n if (Number(predictions.data[predictions.offset + i]) === Number(y.data[y.offset + i])) {\n correct++;\n }\n }\n return correct / y.size;\n }\n\n /**\n * Get the weight vector.\n *\n * @returns Weight vector as tensor of shape (1, n_features)\n * @throws {NotFittedError} If the model has not been fitted\n */\n get coef(): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"LinearSVC must be fitted to access coefficients\");\n }\n return tensor([this.weights]);\n }\n\n /**\n * Get the bias term.\n *\n * @returns Bias value\n * @throws {NotFittedError} If the model has not been fitted\n */\n get intercept(): number {\n if (!this.fitted) {\n throw new NotFittedError(\"LinearSVC must be fitted to access intercept\");\n }\n return this.bias;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n C: this.C,\n maxIter: this.maxIter,\n tol: this.tol,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\"LinearSVC does not support setParams after construction\");\n }\n}\n\n/**\n * Support Vector Machine (SVM) Regressor.\n *\n * Implements epsilon-SVR (Support Vector Regression) using sub-gradient descent.\n *\n * @example\n * ```ts\n * import { LinearSVR } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const X = tensor([[1], [2], [3], [4]]);\n * const y = tensor([1.5, 2.5, 3.5, 4.5]);\n *\n * const svr = new LinearSVR({ C: 1.0, epsilon: 0.1 });\n * svr.fit(X, y);\n * const predictions = svr.predict(X);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.svm.LinearSVR.html | scikit-learn LinearSVR}\n */\nexport class LinearSVR implements Regressor {\n /** Regularization parameter */\n private readonly C: number;\n\n /** Epsilon in the epsilon-SVR model */\n private readonly epsilon: number;\n\n /** Maximum number of iterations */\n private readonly maxIter: number;\n\n /** Tolerance for stopping criterion */\n private readonly tol: number;\n\n /** Weight vector */\n private weights: number[] = [];\n\n /** Bias term */\n private bias = 0;\n\n /** Number of features */\n private nFeatures = 0;\n\n /** Whether the model has been fitted */\n private fitted = false;\n\n constructor(\n options: {\n readonly C?: number;\n readonly epsilon?: number;\n readonly maxIter?: number;\n readonly tol?: number;\n } = {}\n ) {\n this.C = options.C ?? 1.0;\n this.epsilon = options.epsilon ?? 0.1;\n this.maxIter = options.maxIter ?? 1000;\n this.tol = options.tol ?? 1e-4;\n\n if (!Number.isFinite(this.C) || this.C <= 0) {\n throw new InvalidParameterError(\"C must be positive\", \"C\", this.C);\n }\n if (!Number.isFinite(this.epsilon) || this.epsilon < 0) {\n throw new InvalidParameterError(\"epsilon must be >= 0\", \"epsilon\", this.epsilon);\n }\n if (!Number.isInteger(this.maxIter) || this.maxIter <= 0) {\n throw new InvalidParameterError(\"maxIter must be positive\", \"maxIter\", this.maxIter);\n }\n if (!Number.isFinite(this.tol) || this.tol < 0) {\n throw new InvalidParameterError(\"tol must be >= 0\", \"tol\", this.tol);\n }\n }\n\n /**\n * Fit the SVR model using sub-gradient descent on epsilon-insensitive loss.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n this.nFeatures = nFeatures;\n\n // Extract data\n const XData: number[][] = [];\n const yData: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n // Initialize weights\n this.weights = new Array(nFeatures).fill(0);\n this.bias = 0;\n\n const learningRate = 0.01;\n\n for (let iter = 0; iter < this.maxIter; iter++) {\n let totalLoss = 0;\n\n for (let i = 0; i < nSamples; i++) {\n const xi = XData[i];\n const yi = yData[i];\n\n if (xi === undefined || yi === undefined) continue;\n\n // Compute prediction\n let pred = this.bias;\n for (let j = 0; j < nFeatures; j++) {\n pred += (this.weights[j] ?? 0) * (xi[j] ?? 0);\n }\n\n const error = pred - yi;\n const absError = Math.abs(error);\n\n // Epsilon-insensitive loss\n if (absError > this.epsilon) {\n totalLoss += absError - this.epsilon;\n\n // Sub-gradient\n const sign = error > 0 ? 1 : -1;\n\n for (let j = 0; j < nFeatures; j++) {\n this.weights[j] =\n (this.weights[j] ?? 0) -\n learningRate * (this.C * sign * (xi[j] ?? 0) + (this.weights[j] ?? 0));\n }\n this.bias -= learningRate * this.C * sign;\n } else {\n // Only regularization\n for (let j = 0; j < nFeatures; j++) {\n this.weights[j] = (this.weights[j] ?? 0) - learningRate * (this.weights[j] ?? 0);\n }\n }\n }\n\n if (totalLoss / nSamples < this.tol) {\n break;\n }\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict target values for samples in X.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"SVR must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"SVR\");\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n\n const predictions: number[] = [];\n\n for (let i = 0; i < nSamples; i++) {\n let pred = this.bias;\n for (let j = 0; j < nFeatures; j++) {\n pred += (this.weights[j] ?? 0) * Number(X.data[X.offset + i * nFeatures + j]);\n }\n predictions.push(pred);\n }\n\n return tensor(predictions);\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n\n let ssRes = 0;\n let ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i]);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yTrue = Number(y.data[y.offset + i]);\n const yPred = Number(predictions.data[predictions.offset + i]);\n ssRes += (yTrue - yPred) ** 2;\n ssTot += (yTrue - yMean) ** 2;\n }\n\n return ssTot === 0 ? (ssRes === 0 ? 1.0 : 0.0) : 1 - ssRes / ssTot;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n C: this.C,\n epsilon: this.epsilon,\n maxIter: this.maxIter,\n tol: this.tol,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\"LinearSVR does not support setParams after construction\");\n }\n}\n","import {\n DataValidationError,\n InvalidParameterError,\n NotFittedError,\n NotImplementedError,\n ShapeError,\n} from \"../../core\";\nimport { type Tensor, tensor } from \"../../ndarray\";\nimport { assertContiguous, validateFitInputs, validatePredictInputs } from \"../_validation\";\nimport type { Classifier, Regressor } from \"../base\";\nimport { DecisionTreeClassifier, DecisionTreeRegressor } from \"./DecisionTree\";\n\n/**\n * Random Forest Classifier.\n *\n * An ensemble of decision trees trained on random subsets of data and features.\n * Predictions are made by majority voting.\n *\n * **Algorithm**:\n * 1. Create n_estimators bootstrap samples from training data\n * 2. Train a decision tree on each sample with random feature subsets\n * 3. Aggregate predictions via majority voting\n *\n * @example\n * ```ts\n * import { RandomForestClassifier } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const clf = new RandomForestClassifier({ nEstimators: 100 });\n * clf.fit(X_train, y_train);\n * const predictions = clf.predict(X_test);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html | scikit-learn RandomForestClassifier}\n */\nexport class RandomForestClassifier implements Classifier {\n private readonly nEstimators: number;\n private readonly maxDepth: number;\n private readonly minSamplesSplit: number;\n private readonly minSamplesLeaf: number;\n private readonly maxFeatures: \"sqrt\" | \"log2\" | number;\n private readonly bootstrap: boolean;\n private readonly randomState?: number;\n\n private trees: DecisionTreeClassifier[] = [];\n private classLabels?: number[];\n private nFeatures?: number;\n private fitted = false;\n\n constructor(\n options: {\n readonly nEstimators?: number;\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n readonly minSamplesLeaf?: number;\n readonly maxFeatures?: \"sqrt\" | \"log2\" | number;\n readonly bootstrap?: boolean;\n readonly randomState?: number;\n } = {}\n ) {\n this.nEstimators = options.nEstimators ?? 100;\n this.maxDepth = options.maxDepth ?? 10;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n this.minSamplesLeaf = options.minSamplesLeaf ?? 1;\n this.maxFeatures = options.maxFeatures ?? \"sqrt\";\n this.bootstrap = options.bootstrap ?? true;\n if (options.randomState !== undefined) {\n this.randomState = options.randomState;\n }\n\n if (!Number.isInteger(this.nEstimators) || this.nEstimators < 1) {\n throw new InvalidParameterError(\n `nEstimators must be an integer >= 1; received ${this.nEstimators}`,\n \"nEstimators\",\n this.nEstimators\n );\n }\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n `maxDepth must be an integer >= 1; received ${this.maxDepth}`,\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n `minSamplesSplit must be an integer >= 2; received ${this.minSamplesSplit}`,\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n if (!Number.isInteger(this.minSamplesLeaf) || this.minSamplesLeaf < 1) {\n throw new InvalidParameterError(\n `minSamplesLeaf must be an integer >= 1; received ${this.minSamplesLeaf}`,\n \"minSamplesLeaf\",\n this.minSamplesLeaf\n );\n }\n if (typeof this.maxFeatures === \"number\") {\n if (!Number.isInteger(this.maxFeatures) || this.maxFeatures < 1) {\n throw new InvalidParameterError(\n `maxFeatures must be an integer >= 1; received ${this.maxFeatures}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n } else if (this.maxFeatures !== \"sqrt\" && this.maxFeatures !== \"log2\") {\n throw new InvalidParameterError(\n `maxFeatures must be \"sqrt\", \"log2\", or a positive integer; received ${String(this.maxFeatures)}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n if (options.randomState !== undefined && !Number.isFinite(options.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(options.randomState)}`,\n \"randomState\",\n options.randomState\n );\n }\n }\n\n private createRNG(): () => number {\n if (this.randomState !== undefined) {\n let seed = this.randomState;\n return () => {\n seed = (seed * 9301 + 49297) % 233280;\n return seed / 233280;\n };\n }\n return Math.random;\n }\n\n /**\n * Fit the random forest classifier on training data.\n *\n * Builds an ensemble of decision trees, each trained on a bootstrapped\n * sample with random feature subsets.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target class labels of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n this.nFeatures = nFeatures;\n\n // Extract data\n const XData: number[][] = [];\n const yData: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n this.classLabels = [...new Set(yData)].sort((a, b) => a - b);\n\n // Determine number of features to select\n let nSelectFeatures: number;\n if (typeof this.maxFeatures === \"number\") {\n nSelectFeatures = Math.min(this.maxFeatures, nFeatures);\n } else if (this.maxFeatures === \"sqrt\") {\n nSelectFeatures = Math.floor(Math.sqrt(nFeatures));\n } else {\n nSelectFeatures = Math.floor(Math.log2(nFeatures));\n }\n nSelectFeatures = Math.max(1, nSelectFeatures);\n\n const rng = this.createRNG();\n\n this.trees = [];\n\n for (let t = 0; t < this.nEstimators; t++) {\n // Bootstrap sample\n const sampleIndices: number[] = [];\n if (this.bootstrap) {\n for (let i = 0; i < nSamples; i++) {\n sampleIndices.push(Math.floor(rng() * nSamples));\n }\n } else {\n for (let i = 0; i < nSamples; i++) {\n sampleIndices.push(i);\n }\n }\n\n // Create subset of data (all features, bootstrapped samples)\n const XSubset: number[][] = [];\n const ySubset: number[] = [];\n for (const sampleIdx of sampleIndices) {\n // We copy the whole row (all features)\n XSubset.push(XData[sampleIdx] ?? []);\n ySubset.push(yData[sampleIdx] ?? 0);\n }\n\n // Train tree with maxFeatures set for random feature selection at each split\n const treeOptions: {\n maxDepth: number;\n minSamplesSplit: number;\n minSamplesLeaf: number;\n maxFeatures: number;\n randomState?: number;\n } = {\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: nSelectFeatures,\n };\n if (this.randomState !== undefined) {\n treeOptions.randomState = this.randomState + t;\n }\n const tree = new DecisionTreeClassifier(treeOptions);\n tree.fit(tensor(XSubset), tensor(ySubset, { dtype: \"int32\" }));\n this.trees.push(tree);\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict class labels for samples in X.\n *\n * Aggregates predictions from all trees via majority voting.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted class labels of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"RandomForestClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"RandomForestClassifier\");\n\n const nSamples = X.shape[0] ?? 0;\n\n // Get predictions from all trees\n const allPredictions: number[][] = [];\n\n for (const tree of this.trees) {\n const preds = tree.predict(X);\n const treePreds: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n treePreds.push(Number(preds.data[preds.offset + i]));\n }\n allPredictions.push(treePreds);\n }\n\n // Majority voting\n const finalPredictions: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n const votes = new Map<number, number>();\n for (const treePreds of allPredictions) {\n const pred = treePreds[i] ?? 0;\n votes.set(pred, (votes.get(pred) ?? 0) + 1);\n }\n\n let maxVotes = 0;\n let prediction = 0;\n for (const [label, count] of votes) {\n if (count > maxVotes) {\n maxVotes = count;\n prediction = label;\n }\n }\n finalPredictions.push(prediction);\n }\n\n return tensor(finalPredictions, { dtype: \"int32\" });\n }\n\n /**\n * Predict class probabilities for samples in X.\n *\n * Averages the predicted class probabilities from all trees in the ensemble.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Class probability matrix of shape (n_samples, n_classes)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predictProba(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"RandomForestClassifier must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"RandomForestClassifier\");\n assertContiguous(X, \"X\");\n\n const nSamples = X.shape[0] ?? 0;\n const classLabels = this.classLabels ?? [];\n const nClasses = classLabels.length;\n\n if (nClasses === 0) {\n throw new NotFittedError(\"RandomForestClassifier must be fitted before prediction\");\n }\n\n const classIndex = new Map<number, number>();\n for (let i = 0; i < nClasses; i++) {\n const v = classLabels[i];\n if (v !== undefined) classIndex.set(v, i);\n }\n\n const proba: number[][] = Array.from({ length: nSamples }, () =>\n new Array<number>(nClasses).fill(0)\n );\n\n for (const tree of this.trees) {\n const treeProba = tree.predictProba(X);\n const treeClasses = tree.classes;\n if (!treeClasses) continue;\n assertContiguous(treeClasses, \"classes\");\n\n const k = treeClasses.size;\n for (let j = 0; j < k; j++) {\n const lbl = Number(treeClasses.data[treeClasses.offset + j] ?? 0);\n const globalJ = classIndex.get(lbl);\n if (globalJ === undefined) continue;\n\n for (let i = 0; i < nSamples; i++) {\n const row = proba[i];\n if (row) {\n row[globalJ] =\n (row[globalJ] ?? 0) + Number(treeProba.data[treeProba.offset + i * k + j] ?? 0);\n }\n }\n }\n }\n\n const invTrees = this.trees.length === 0 ? 0 : 1 / this.trees.length;\n for (let i = 0; i < nSamples; i++) {\n const row = proba[i];\n if (row) {\n for (let j = 0; j < nClasses; j++) {\n row[j] = (row[j] ?? 0) * invTrees;\n }\n }\n }\n\n return tensor(proba);\n }\n\n /**\n * Return the mean accuracy on the given test data and labels.\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True labels of shape (n_samples,)\n * @returns Accuracy score in range [0, 1]\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (!this.fitted) {\n throw new NotFittedError(\"RandomForestClassifier must be fitted before scoring\");\n }\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n let correct = 0;\n for (let i = 0; i < y.size; i++) {\n if (Number(predictions.data[predictions.offset + i]) === Number(y.data[y.offset + i])) {\n correct++;\n }\n }\n return correct / y.size;\n }\n\n /**\n * Get the unique class labels discovered during fitting.\n *\n * @returns Tensor of class labels or undefined if not fitted\n */\n get classes(): Tensor | undefined {\n if (!this.fitted || !this.classLabels) {\n return undefined;\n }\n return tensor(this.classLabels, { dtype: \"int32\" });\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nEstimators: this.nEstimators,\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: this.maxFeatures,\n bootstrap: this.bootstrap,\n randomState: this.randomState,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"RandomForestClassifier does not support setParams after construction\"\n );\n }\n}\n\n/**\n * Random Forest Regressor.\n *\n * An ensemble of decision tree regressors trained on random subsets of data\n * and features. Predictions are averaged across all trees.\n *\n * **Algorithm**:\n * 1. Create n_estimators bootstrap samples from training data\n * 2. Train a decision tree on each sample with random feature subsets\n * 3. Aggregate predictions via averaging\n *\n * @example\n * ```ts\n * import { RandomForestRegressor } from 'deepbox/ml';\n * import { tensor } from 'deepbox/ndarray';\n *\n * const reg = new RandomForestRegressor({ nEstimators: 100 });\n * reg.fit(X_train, y_train);\n * const predictions = reg.predict(X_test);\n * ```\n *\n * @see {@link https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestRegressor.html | scikit-learn RandomForestRegressor}\n */\nexport class RandomForestRegressor implements Regressor {\n private readonly nEstimators: number;\n private readonly maxDepth: number;\n private readonly minSamplesSplit: number;\n private readonly minSamplesLeaf: number;\n private readonly maxFeatures: \"sqrt\" | \"log2\" | number;\n private readonly bootstrap: boolean;\n private readonly randomState?: number;\n\n private trees: DecisionTreeRegressor[] = [];\n private nFeatures?: number;\n private fitted = false;\n\n constructor(\n options: {\n readonly nEstimators?: number;\n readonly maxDepth?: number;\n readonly minSamplesSplit?: number;\n readonly minSamplesLeaf?: number;\n readonly maxFeatures?: \"sqrt\" | \"log2\" | number;\n readonly bootstrap?: boolean;\n readonly randomState?: number;\n } = {}\n ) {\n this.nEstimators = options.nEstimators ?? 100;\n this.maxDepth = options.maxDepth ?? 10;\n this.minSamplesSplit = options.minSamplesSplit ?? 2;\n this.minSamplesLeaf = options.minSamplesLeaf ?? 1;\n this.maxFeatures = options.maxFeatures ?? 1.0;\n this.bootstrap = options.bootstrap ?? true;\n if (options.randomState !== undefined) {\n this.randomState = options.randomState;\n }\n\n if (!Number.isInteger(this.nEstimators) || this.nEstimators < 1) {\n throw new InvalidParameterError(\n `nEstimators must be an integer >= 1; received ${this.nEstimators}`,\n \"nEstimators\",\n this.nEstimators\n );\n }\n if (!Number.isInteger(this.maxDepth) || this.maxDepth < 1) {\n throw new InvalidParameterError(\n `maxDepth must be an integer >= 1; received ${this.maxDepth}`,\n \"maxDepth\",\n this.maxDepth\n );\n }\n if (!Number.isInteger(this.minSamplesSplit) || this.minSamplesSplit < 2) {\n throw new InvalidParameterError(\n `minSamplesSplit must be an integer >= 2; received ${this.minSamplesSplit}`,\n \"minSamplesSplit\",\n this.minSamplesSplit\n );\n }\n if (!Number.isInteger(this.minSamplesLeaf) || this.minSamplesLeaf < 1) {\n throw new InvalidParameterError(\n `minSamplesLeaf must be an integer >= 1; received ${this.minSamplesLeaf}`,\n \"minSamplesLeaf\",\n this.minSamplesLeaf\n );\n }\n if (typeof this.maxFeatures === \"number\") {\n if (\n this.maxFeatures !== 1.0 &&\n (!Number.isInteger(this.maxFeatures) || this.maxFeatures < 1)\n ) {\n throw new InvalidParameterError(\n `maxFeatures must be 1.0, an integer >= 1, \"sqrt\", or \"log2\"; received ${this.maxFeatures}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n } else if (this.maxFeatures !== \"sqrt\" && this.maxFeatures !== \"log2\") {\n throw new InvalidParameterError(\n `maxFeatures must be \"sqrt\", \"log2\", or a positive integer; received ${String(this.maxFeatures)}`,\n \"maxFeatures\",\n this.maxFeatures\n );\n }\n if (options.randomState !== undefined && !Number.isFinite(options.randomState)) {\n throw new InvalidParameterError(\n `randomState must be a finite number; received ${String(options.randomState)}`,\n \"randomState\",\n options.randomState\n );\n }\n }\n\n private createRNG(): () => number {\n if (this.randomState !== undefined) {\n let seed = this.randomState;\n return () => {\n seed = (seed * 9301 + 49297) % 233280;\n return seed / 233280;\n };\n }\n return Math.random;\n }\n\n /**\n * Fit the random forest regressor on training data.\n *\n * Builds an ensemble of decision trees, each trained on a bootstrapped\n * sample with random feature subsets.\n *\n * @param X - Training data of shape (n_samples, n_features)\n * @param y - Target values of shape (n_samples,)\n * @returns this - The fitted estimator\n * @throws {ShapeError} If X is not 2D or y is not 1D\n * @throws {ShapeError} If X and y have different number of samples\n * @throws {DataValidationError} If X or y contain NaN/Inf values\n */\n fit(X: Tensor, y: Tensor): this {\n validateFitInputs(X, y);\n\n const nSamples = X.shape[0] ?? 0;\n const nFeatures = X.shape[1] ?? 0;\n this.nFeatures = nFeatures;\n\n const XData: number[][] = [];\n const yData: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n const row: number[] = [];\n for (let j = 0; j < nFeatures; j++) {\n row.push(Number(X.data[X.offset + i * nFeatures + j]));\n }\n XData.push(row);\n yData.push(Number(y.data[y.offset + i]));\n }\n\n let nSelectFeatures: number;\n if (typeof this.maxFeatures === \"number\") {\n if (this.maxFeatures === 1.0) {\n nSelectFeatures = nFeatures;\n } else {\n nSelectFeatures = Math.min(this.maxFeatures, nFeatures);\n }\n } else if (this.maxFeatures === \"sqrt\") {\n nSelectFeatures = Math.floor(Math.sqrt(nFeatures));\n } else {\n nSelectFeatures = Math.floor(Math.log2(nFeatures));\n }\n nSelectFeatures = Math.max(1, nSelectFeatures);\n\n const rng = this.createRNG();\n\n this.trees = [];\n\n for (let t = 0; t < this.nEstimators; t++) {\n const sampleIndices: number[] = [];\n if (this.bootstrap) {\n for (let i = 0; i < nSamples; i++) {\n sampleIndices.push(Math.floor(rng() * nSamples));\n }\n } else {\n for (let i = 0; i < nSamples; i++) {\n sampleIndices.push(i);\n }\n }\n\n // Create subset of data (all features, bootstrapped samples)\n const XSubset: number[][] = [];\n const ySubset: number[] = [];\n for (const sampleIdx of sampleIndices) {\n XSubset.push(XData[sampleIdx] ?? []);\n ySubset.push(yData[sampleIdx] ?? 0);\n }\n\n const treeOptions: {\n maxDepth: number;\n minSamplesSplit: number;\n minSamplesLeaf: number;\n maxFeatures: number;\n randomState?: number;\n } = {\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: nSelectFeatures,\n };\n if (this.randomState !== undefined) {\n treeOptions.randomState = this.randomState + t;\n }\n const tree = new DecisionTreeRegressor(treeOptions);\n tree.fit(tensor(XSubset), tensor(ySubset));\n this.trees.push(tree);\n }\n\n this.fitted = true;\n return this;\n }\n\n /**\n * Predict target values for samples in X.\n *\n * Averages predictions from all trees in the ensemble.\n *\n * @param X - Samples of shape (n_samples, n_features)\n * @returns Predicted values of shape (n_samples,)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If X has wrong dimensions or feature count\n * @throws {DataValidationError} If X contains NaN/Inf values\n */\n predict(X: Tensor): Tensor {\n if (!this.fitted) {\n throw new NotFittedError(\"RandomForestRegressor must be fitted before prediction\");\n }\n\n validatePredictInputs(X, this.nFeatures ?? 0, \"RandomForestRegressor\");\n\n const nSamples = X.shape[0] ?? 0;\n\n const allPredictions: number[][] = [];\n\n for (const tree of this.trees) {\n const preds = tree.predict(X);\n const treePreds: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n treePreds.push(Number(preds.data[preds.offset + i]));\n }\n allPredictions.push(treePreds);\n }\n\n // Average predictions\n const finalPredictions: number[] = [];\n for (let i = 0; i < nSamples; i++) {\n let sum = 0;\n for (const treePreds of allPredictions) {\n sum += treePreds[i] ?? 0;\n }\n finalPredictions.push(sum / this.trees.length);\n }\n\n return tensor(finalPredictions);\n }\n\n /**\n * Return the R² score on the given test data and target values.\n *\n * R² = 1 - SS_res / SS_tot, where SS_res = Σ(y - ŷ)² and SS_tot = Σ(y - ȳ)².\n *\n * @param X - Test samples of shape (n_samples, n_features)\n * @param y - True target values of shape (n_samples,)\n * @returns R² score (best possible is 1.0, can be negative)\n * @throws {NotFittedError} If the model has not been fitted\n * @throws {ShapeError} If y is not 1-dimensional or sample counts mismatch\n * @throws {DataValidationError} If y contains NaN/Inf values\n */\n score(X: Tensor, y: Tensor): number {\n if (!this.fitted) {\n throw new NotFittedError(\"RandomForestRegressor must be fitted before scoring\");\n }\n if (y.ndim !== 1) {\n throw new ShapeError(`y must be 1-dimensional; got ndim=${y.ndim}`);\n }\n assertContiguous(y, \"y\");\n for (let i = 0; i < y.size; i++) {\n const val = y.data[y.offset + i] ?? 0;\n if (!Number.isFinite(val)) {\n throw new DataValidationError(\"y contains non-finite values (NaN or Inf)\");\n }\n }\n const predictions = this.predict(X);\n if (predictions.size !== y.size) {\n throw new ShapeError(\n `X and y must have the same number of samples; got X=${predictions.size}, y=${y.size}`\n );\n }\n\n let ssRes = 0;\n let ssTot = 0;\n let yMean = 0;\n\n for (let i = 0; i < y.size; i++) {\n yMean += Number(y.data[y.offset + i]);\n }\n yMean /= y.size;\n\n for (let i = 0; i < y.size; i++) {\n const yTrue = Number(y.data[y.offset + i]);\n const yPred = Number(predictions.data[predictions.offset + i]);\n ssRes += (yTrue - yPred) ** 2;\n ssTot += (yTrue - yMean) ** 2;\n }\n\n return ssTot === 0 ? (ssRes === 0 ? 1.0 : 0.0) : 1 - ssRes / ssTot;\n }\n\n /**\n * Get hyperparameters for this estimator.\n *\n * @returns Object containing all hyperparameters\n */\n getParams(): Record<string, unknown> {\n return {\n nEstimators: this.nEstimators,\n maxDepth: this.maxDepth,\n minSamplesSplit: this.minSamplesSplit,\n minSamplesLeaf: this.minSamplesLeaf,\n maxFeatures: this.maxFeatures,\n bootstrap: this.bootstrap,\n randomState: this.randomState,\n };\n }\n\n /**\n * Set the parameters of this estimator.\n *\n * @param _params - Parameters to set\n * @throws {NotImplementedError} Always — parameters cannot be changed after construction\n */\n setParams(_params: Record<string, unknown>): this {\n throw new NotImplementedError(\n \"RandomForestRegressor does not support setParams after construction\"\n );\n }\n}\n"]}
|