@astermind/cybernetic-chatbot-client 2.2.7 → 2.2.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"cybernetic-chatbot-client.esm.js","sources":["../src/ApiClient.ts","../node_modules/idb/build/wrap-idb-value.js","../node_modules/idb/build/index.js","../src/CyberneticCache.ts","../src/CyberneticLocalRAG.ts","../src/CyberneticOfflineStorage.ts","../src/license/base64url.ts","../src/license/verifier.ts","../src/license/licenseManager.ts","../src/CyberneticClient.ts","../src/config.ts","../src/agentic/CyberneticIntentClassifier.ts","../src/agentic/CyberneticAgent.ts","../src/agentic/register.ts","../node_modules/@astermind/astermind-community/dist/astermind.esm.js","../src/omega/OmegaOfflineRAG.ts","../src/index.ts"],"sourcesContent":[null,"const instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c);\n\nlet idbProxyableTypes;\nlet cursorAdvanceMethods;\n// This is a function to prevent it throwing up in node environments.\nfunction getIdbProxyableTypes() {\n return (idbProxyableTypes ||\n (idbProxyableTypes = [\n IDBDatabase,\n IDBObjectStore,\n IDBIndex,\n IDBCursor,\n IDBTransaction,\n ]));\n}\n// This is a function to prevent it throwing up in node environments.\nfunction getCursorAdvanceMethods() {\n return (cursorAdvanceMethods ||\n (cursorAdvanceMethods = [\n IDBCursor.prototype.advance,\n IDBCursor.prototype.continue,\n IDBCursor.prototype.continuePrimaryKey,\n ]));\n}\nconst cursorRequestMap = new WeakMap();\nconst transactionDoneMap = new WeakMap();\nconst transactionStoreNamesMap = new WeakMap();\nconst transformCache = new WeakMap();\nconst reverseTransformCache = new WeakMap();\nfunction promisifyRequest(request) {\n const promise = new Promise((resolve, reject) => {\n const unlisten = () => {\n request.removeEventListener('success', success);\n request.removeEventListener('error', error);\n };\n const success = () => {\n resolve(wrap(request.result));\n unlisten();\n };\n const error = () => {\n reject(request.error);\n unlisten();\n };\n request.addEventListener('success', success);\n request.addEventListener('error', error);\n });\n promise\n .then((value) => {\n // Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval\n // (see wrapFunction).\n if (value instanceof IDBCursor) {\n cursorRequestMap.set(value, request);\n }\n // Catching to avoid \"Uncaught Promise exceptions\"\n })\n .catch(() => { });\n // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This\n // is because we create many promises from a single IDBRequest.\n reverseTransformCache.set(promise, request);\n return promise;\n}\nfunction cacheDonePromiseForTransaction(tx) {\n // Early bail if we've already created a done promise for this transaction.\n if (transactionDoneMap.has(tx))\n return;\n const done = new Promise((resolve, reject) => {\n const unlisten = () => {\n tx.removeEventListener('complete', complete);\n tx.removeEventListener('error', error);\n tx.removeEventListener('abort', error);\n };\n const complete = () => {\n resolve();\n unlisten();\n };\n const error = () => {\n reject(tx.error || new DOMException('AbortError', 'AbortError'));\n unlisten();\n };\n tx.addEventListener('complete', complete);\n tx.addEventListener('error', error);\n tx.addEventListener('abort', error);\n });\n // Cache it for later retrieval.\n transactionDoneMap.set(tx, done);\n}\nlet idbProxyTraps = {\n get(target, prop, receiver) {\n if (target instanceof IDBTransaction) {\n // Special handling for transaction.done.\n if (prop === 'done')\n return transactionDoneMap.get(target);\n // Polyfill for objectStoreNames because of Edge.\n if (prop === 'objectStoreNames') {\n return target.objectStoreNames || transactionStoreNamesMap.get(target);\n }\n // Make tx.store return the only store in the transaction, or undefined if there are many.\n if (prop === 'store') {\n return receiver.objectStoreNames[1]\n ? undefined\n : receiver.objectStore(receiver.objectStoreNames[0]);\n }\n }\n // Else transform whatever we get back.\n return wrap(target[prop]);\n },\n set(target, prop, value) {\n target[prop] = value;\n return true;\n },\n has(target, prop) {\n if (target instanceof IDBTransaction &&\n (prop === 'done' || prop === 'store')) {\n return true;\n }\n return prop in target;\n },\n};\nfunction replaceTraps(callback) {\n idbProxyTraps = callback(idbProxyTraps);\n}\nfunction wrapFunction(func) {\n // Due to expected object equality (which is enforced by the caching in `wrap`), we\n // only create one new func per func.\n // Edge doesn't support objectStoreNames (booo), so we polyfill it here.\n if (func === IDBDatabase.prototype.transaction &&\n !('objectStoreNames' in IDBTransaction.prototype)) {\n return function (storeNames, ...args) {\n const tx = func.call(unwrap(this), storeNames, ...args);\n transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);\n return wrap(tx);\n };\n }\n // Cursor methods are special, as the behaviour is a little more different to standard IDB. In\n // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the\n // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense\n // with real promises, so each advance methods returns a new promise for the cursor object, or\n // undefined if the end of the cursor has been reached.\n if (getCursorAdvanceMethods().includes(func)) {\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n func.apply(unwrap(this), args);\n return wrap(cursorRequestMap.get(this));\n };\n }\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n return wrap(func.apply(unwrap(this), args));\n };\n}\nfunction transformCachableValue(value) {\n if (typeof value === 'function')\n return wrapFunction(value);\n // This doesn't return, it just creates a 'done' promise for the transaction,\n // which is later returned for transaction.done (see idbObjectHandler).\n if (value instanceof IDBTransaction)\n cacheDonePromiseForTransaction(value);\n if (instanceOfAny(value, getIdbProxyableTypes()))\n return new Proxy(value, idbProxyTraps);\n // Return the same value back if we're not going to transform it.\n return value;\n}\nfunction wrap(value) {\n // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because\n // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.\n if (value instanceof IDBRequest)\n return promisifyRequest(value);\n // If we've already transformed this value before, reuse the transformed value.\n // This is faster, but it also provides object equality.\n if (transformCache.has(value))\n return transformCache.get(value);\n const newValue = transformCachableValue(value);\n // Not all types are transformed.\n // These may be primitive types, so they can't be WeakMap keys.\n if (newValue !== value) {\n transformCache.set(value, newValue);\n reverseTransformCache.set(newValue, value);\n }\n return newValue;\n}\nconst unwrap = (value) => reverseTransformCache.get(value);\n\nexport { reverseTransformCache as a, instanceOfAny as i, replaceTraps as r, unwrap as u, wrap as w };\n","import { w as wrap, r as replaceTraps } from './wrap-idb-value.js';\nexport { u as unwrap, w as wrap } from './wrap-idb-value.js';\n\n/**\n * Open a database.\n *\n * @param name Name of the database.\n * @param version Schema version.\n * @param callbacks Additional callbacks.\n */\nfunction openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) {\n const request = indexedDB.open(name, version);\n const openPromise = wrap(request);\n if (upgrade) {\n request.addEventListener('upgradeneeded', (event) => {\n upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event);\n });\n }\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event.newVersion, event));\n }\n openPromise\n .then((db) => {\n if (terminated)\n db.addEventListener('close', () => terminated());\n if (blocking) {\n db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event));\n }\n })\n .catch(() => { });\n return openPromise;\n}\n/**\n * Delete a database.\n *\n * @param name Name of the database.\n */\nfunction deleteDB(name, { blocked } = {}) {\n const request = indexedDB.deleteDatabase(name);\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event));\n }\n return wrap(request).then(() => undefined);\n}\n\nconst readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];\nconst writeMethods = ['put', 'add', 'delete', 'clear'];\nconst cachedMethods = new Map();\nfunction getMethod(target, prop) {\n if (!(target instanceof IDBDatabase &&\n !(prop in target) &&\n typeof prop === 'string')) {\n return;\n }\n if (cachedMethods.get(prop))\n return cachedMethods.get(prop);\n const targetFuncName = prop.replace(/FromIndex$/, '');\n const useIndex = prop !== targetFuncName;\n const isWrite = writeMethods.includes(targetFuncName);\n if (\n // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.\n !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) ||\n !(isWrite || readMethods.includes(targetFuncName))) {\n return;\n }\n const method = async function (storeName, ...args) {\n // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(\n const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');\n let target = tx.store;\n if (useIndex)\n target = target.index(args.shift());\n // Must reject if op rejects.\n // If it's a write operation, must reject if tx.done rejects.\n // Must reject with op rejection first.\n // Must resolve with op value.\n // Must handle both promises (no unhandled rejections)\n return (await Promise.all([\n target[targetFuncName](...args),\n isWrite && tx.done,\n ]))[0];\n };\n cachedMethods.set(prop, method);\n return method;\n}\nreplaceTraps((oldTraps) => ({\n ...oldTraps,\n get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),\n has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop),\n}));\n\nexport { deleteDB, openDB };\n",null,null,null,null,null,null,null,null,null,null,null,"import * as fs from 'fs';\nimport * as path from 'path';\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Matrix.ts — tolerant, safe helpers with dimension checks and stable ops\nclass DimError extends Error {\n constructor(msg) {\n super(msg);\n this.name = 'DimError';\n }\n}\nconst EPS$5 = 1e-12;\n/* ===================== Array-like coercion helpers ===================== */\n// ✅ Narrow to ArrayLike<number> so numeric indexing is allowed\nfunction isArrayLikeRow(row) {\n return row != null && typeof row.length === 'number';\n}\n/**\n * Coerce any 2D array-like into a strict rectangular number[][]\n * - If width is not provided, infer from the first row's length\n * - Pads/truncates to width\n * - Non-finite values become 0\n */\nfunction ensureRectNumber2D(M, width, name = 'matrix') {\n if (!M || typeof M.length !== 'number') {\n throw new DimError(`${name} must be a non-empty 2D array`);\n }\n const rows = Array.from(M);\n if (rows.length === 0)\n throw new DimError(`${name} is empty`);\n const first = rows[0];\n if (!isArrayLikeRow(first))\n throw new DimError(`${name} row 0 missing/invalid`);\n const C = ((width !== null && width !== void 0 ? width : first.length) | 0);\n if (C <= 0)\n throw new DimError(`${name} has zero width`);\n const out = new Array(rows.length);\n for (let r = 0; r < rows.length; r++) {\n const src = rows[r];\n const rr = new Array(C);\n if (isArrayLikeRow(src)) {\n const sr = src; // ✅ typed\n for (let c = 0; c < C; c++) {\n const v = sr[c];\n rr[c] = Number.isFinite(v) ? Number(v) : 0;\n }\n }\n else {\n for (let c = 0; c < C; c++)\n rr[c] = 0;\n }\n out[r] = rr;\n }\n return out;\n}\n/**\n * Relaxed rectangularity check:\n * - Accepts any array-like rows (typed arrays included)\n * - Verifies consistent width and finite numbers\n */\nfunction assertRect(A, name = 'matrix') {\n if (!A || typeof A.length !== 'number') {\n throw new DimError(`${name} must be a non-empty 2D array`);\n }\n const rows = A.length | 0;\n if (rows <= 0)\n throw new DimError(`${name} must be a non-empty 2D array`);\n const first = A[0];\n if (!isArrayLikeRow(first))\n throw new DimError(`${name} row 0 missing/invalid`);\n const C = first.length | 0;\n if (C <= 0)\n throw new DimError(`${name} must have positive column count`);\n for (let r = 0; r < rows; r++) {\n const rowAny = A[r];\n if (!isArrayLikeRow(rowAny)) {\n throw new DimError(`${name} row ${r} invalid`);\n }\n const row = rowAny; // ✅ typed\n if ((row.length | 0) !== C) {\n throw new DimError(`${name} has ragged rows: row 0 = ${C} cols, row ${r} = ${row.length} cols`);\n }\n for (let c = 0; c < C; c++) {\n const v = row[c];\n if (!Number.isFinite(v)) {\n throw new DimError(`${name} row ${r}, col ${c} is not finite: ${v}`);\n }\n }\n }\n}\nfunction assertMulDims(A, B) {\n assertRect(A, 'A');\n assertRect(B, 'B');\n const nA = A[0].length;\n const mB = B.length;\n if (nA !== mB) {\n throw new DimError(`matmul dims mismatch: A(${A.length}x${nA}) * B(${mB}x${B[0].length})`);\n }\n}\nfunction isSquare(A) {\n return isArrayLikeRow(A === null || A === void 0 ? void 0 : A[0]) && (A.length === (A[0].length | 0));\n}\nfunction isSymmetric(A, tol = 1e-10) {\n if (!isSquare(A))\n return false;\n const n = A.length;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n if (Math.abs(A[i][j] - A[j][i]) > tol)\n return false;\n }\n }\n return true;\n}\n/* ============================== Matrix ============================== */\nclass Matrix {\n /* ========= constructors / basics ========= */\n static shape(A) {\n assertRect(A, 'A');\n return [A.length, A[0].length];\n }\n static clone(A) {\n assertRect(A, 'A');\n return ensureRectNumber2D(A, A[0].length, 'A(clone)');\n }\n static zeros(rows, cols) {\n const out = new Array(rows);\n for (let i = 0; i < rows; i++)\n out[i] = new Array(cols).fill(0);\n return out;\n }\n static identity(n) {\n const I = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++)\n I[i][i] = 1;\n return I;\n }\n static transpose(A) {\n assertRect(A, 'A');\n const m = A.length, n = A[0].length;\n const T = Matrix.zeros(n, m);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n for (let j = 0; j < n; j++)\n T[j][i] = Number(Ai[j]);\n }\n return T;\n }\n /* ========= algebra ========= */\n static add(A, B) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertRect(A, 'A');\n assertRect(B, 'B');\n if (A.length !== B.length || A[0].length !== B[0].length) {\n throw new DimError(`add dims mismatch: A(${A.length}x${A[0].length}) vs B(${B.length}x${B[0].length})`);\n }\n const m = A.length, n = A[0].length;\n const C = Matrix.zeros(m, n);\n for (let i = 0; i < m; i++) {\n const Ai = A[i], Bi = B[i], Ci = C[i];\n for (let j = 0; j < n; j++)\n Ci[j] = Ai[j] + Bi[j];\n }\n return C;\n }\n /** Adds lambda to the diagonal (ridge regularization) */\n static addRegularization(A, lambda = 1e-6) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A)) {\n throw new DimError(`addRegularization expects square matrix, got ${A.length}x${A[0].length}`);\n }\n const C = Matrix.clone(A);\n for (let i = 0; i < C.length; i++)\n C[i][i] += lambda;\n return C;\n }\n static multiply(A, B) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertMulDims(A, B);\n const m = A.length, n = B.length, p = B[0].length;\n const C = Matrix.zeros(m, p);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n for (let k = 0; k < n; k++) {\n const aik = Number(Ai[k]);\n const Bk = B[k];\n for (let j = 0; j < p; j++)\n C[i][j] += aik * Number(Bk[j]);\n }\n }\n return C;\n }\n static multiplyVec(A, v) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!v || typeof v.length !== 'number') {\n throw new DimError(`matvec expects vector 'v' with length ${A[0].length}`);\n }\n if (A[0].length !== v.length) {\n throw new DimError(`matvec dims mismatch: A cols ${A[0].length} vs v len ${v.length}`);\n }\n const m = A.length, n = v.length;\n const out = new Array(m).fill(0);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n let s = 0;\n for (let j = 0; j < n; j++)\n s += Number(Ai[j]) * Number(v[j]);\n out[i] = s;\n }\n return out;\n }\n /* ========= decompositions / solve ========= */\n static cholesky(A, jitter = 0) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A))\n throw new DimError(`cholesky expects square matrix, got ${A.length}x${A[0].length}`);\n const n = A.length;\n const L = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n if (i === j) {\n const v = sum + jitter;\n L[i][j] = Math.sqrt(Math.max(v, EPS$5));\n }\n else {\n L[i][j] = sum / L[j][j];\n }\n }\n }\n return L;\n }\n static solveCholesky(A, B, jitter = 1e-10) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertRect(A, 'A');\n assertRect(B, 'B');\n if (!isSquare(A) || A.length !== B.length) {\n throw new DimError(`solveCholesky dims: A(${A.length}x${A[0].length}) vs B(${B.length}x${B[0].length})`);\n }\n const n = A.length, k = B[0].length;\n const L = Matrix.cholesky(A, jitter);\n // Solve L Z = B (forward)\n const Z = Matrix.zeros(n, k);\n for (let i = 0; i < n; i++) {\n for (let c = 0; c < k; c++) {\n let s = B[i][c];\n for (let p = 0; p < i; p++)\n s -= L[i][p] * Z[p][c];\n Z[i][c] = s / L[i][i];\n }\n }\n // Solve L^T X = Z (backward)\n const X = Matrix.zeros(n, k);\n for (let i = n - 1; i >= 0; i--) {\n for (let c = 0; c < k; c++) {\n let s = Z[i][c];\n for (let p = i + 1; p < n; p++)\n s -= L[p][i] * X[p][c];\n X[i][c] = s / L[i][i];\n }\n }\n return X;\n }\n static inverse(A) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A))\n throw new DimError(`inverse expects square matrix, got ${A.length}x${A[0].length}`);\n const n = A.length;\n const M = Matrix.clone(A);\n const I = Matrix.identity(n);\n // Augment [M | I]\n const aug = new Array(n);\n for (let i = 0; i < n; i++)\n aug[i] = M[i].concat(I[i]);\n const cols = 2 * n;\n for (let p = 0; p < n; p++) {\n // Pivot\n let maxRow = p, maxVal = Math.abs(aug[p][p]);\n for (let r = p + 1; r < n; r++) {\n const v = Math.abs(aug[r][p]);\n if (v > maxVal) {\n maxVal = v;\n maxRow = r;\n }\n }\n if (maxVal < EPS$5)\n throw new Error('Matrix is singular or ill-conditioned');\n if (maxRow !== p) {\n const tmp = aug[p];\n aug[p] = aug[maxRow];\n aug[maxRow] = tmp;\n }\n // Normalize pivot row\n const piv = aug[p][p];\n const invPiv = 1 / piv;\n for (let c = 0; c < cols; c++)\n aug[p][c] *= invPiv;\n // Eliminate other rows\n for (let r = 0; r < n; r++) {\n if (r === p)\n continue;\n const f = aug[r][p];\n if (Math.abs(f) < EPS$5)\n continue;\n for (let c = 0; c < cols; c++)\n aug[r][c] -= f * aug[p][c];\n }\n }\n // Extract right half as inverse\n const inv = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n for (let j = 0; j < n; j++)\n inv[i][j] = aug[i][n + j];\n }\n return inv;\n }\n /* ========= helpers ========= */\n static inverseSPDOrFallback(A) {\n if (isSymmetric(A)) {\n try {\n return Matrix.solveCholesky(A, Matrix.identity(A.length), 1e-10);\n }\n catch (_a) {\n // fall through\n }\n }\n return Matrix.inverse(A);\n }\n /* ========= Symmetric Eigen (Jacobi) & Inverse Square Root ========= */\n static assertSquare(A, ctx = 'Matrix') {\n assertRect(A, ctx);\n if (!isSquare(A)) {\n throw new DimError(`${ctx}: expected square matrix, got ${A.length}x${A[0].length}`);\n }\n }\n static eigSym(A, maxIter = 64, tol = 1e-12) {\n A = ensureRectNumber2D(A, undefined, 'eigSym/A');\n Matrix.assertSquare(A, 'eigSym');\n const n = A.length;\n const B = Matrix.clone(A);\n let V = Matrix.identity(n);\n const abs = Math.abs;\n const offdiagNorm = () => {\n let s = 0;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = B[i][j];\n s += v * v;\n }\n }\n return Math.sqrt(s);\n };\n for (let it = 0; it < maxIter; it++) {\n if (offdiagNorm() <= tol)\n break;\n let p = 0, q = 1, max = 0;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = abs(B[i][j]);\n if (v > max) {\n max = v;\n p = i;\n q = j;\n }\n }\n }\n if (max <= tol)\n break;\n const app = B[p][p], aqq = B[q][q], apq = B[p][q];\n const tau = (aqq - app) / (2 * apq);\n const t = Math.sign(tau) / (abs(tau) + Math.sqrt(1 + tau * tau));\n const c = 1 / Math.sqrt(1 + t * t);\n const s = t * c;\n const Bpp = c * c * app - 2 * s * c * apq + s * s * aqq;\n const Bqq = s * s * app + 2 * s * c * apq + c * c * aqq;\n B[p][p] = Bpp;\n B[q][q] = Bqq;\n B[p][q] = B[q][p] = 0;\n for (let k = 0; k < n; k++) {\n if (k === p || k === q)\n continue;\n const aip = B[k][p], aiq = B[k][q];\n const new_kp = c * aip - s * aiq;\n const new_kq = s * aip + c * aiq;\n B[k][p] = B[p][k] = new_kp;\n B[k][q] = B[q][k] = new_kq;\n }\n for (let k = 0; k < n; k++) {\n const vip = V[k][p], viq = V[k][q];\n V[k][p] = c * vip - s * viq;\n V[k][q] = s * vip + c * viq;\n }\n }\n const vals = new Array(n);\n for (let i = 0; i < n; i++)\n vals[i] = B[i][i];\n const order = vals.map((v, i) => [v, i]).sort((a, b) => a[0] - b[0]).map(([, i]) => i);\n const values = order.map(i => vals[i]);\n const vectors = Matrix.zeros(n, n);\n for (let r = 0; r < n; r++) {\n for (let c = 0; c < n; c++)\n vectors[r][c] = V[r][order[c]];\n }\n return { values, vectors };\n }\n static invSqrtSym(A, eps = 1e-10) {\n A = ensureRectNumber2D(A, undefined, 'invSqrtSym/A');\n Matrix.assertSquare(A, 'invSqrtSym');\n const { values, vectors: U } = Matrix.eigSym(A);\n const n = values.length;\n const Dm12 = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n const lam = Math.max(values[i], eps);\n Dm12[i][i] = 1 / Math.sqrt(lam);\n }\n const UD = Matrix.multiply(U, Dm12);\n return Matrix.multiply(UD, Matrix.transpose(U));\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Activations.ts - Common activation functions (with derivatives)\nclass Activations {\n /* ========= Forward ========= */\n /** Rectified Linear Unit */\n static relu(x) {\n return x > 0 ? x : 0;\n }\n /** Leaky ReLU with configurable slope for x<0 (default 0.01) */\n static leakyRelu(x, alpha = 0.01) {\n return x >= 0 ? x : alpha * x;\n }\n /** Logistic sigmoid */\n static sigmoid(x) {\n return 1 / (1 + Math.exp(-x));\n }\n /** Hyperbolic tangent */\n static tanh(x) {\n return Math.tanh(x);\n }\n /** Linear / identity activation */\n static linear(x) {\n return x;\n }\n /**\n * GELU (Gaussian Error Linear Unit), tanh approximation.\n * 0.5 * x * (1 + tanh(√(2/π) * (x + 0.044715 x^3)))\n */\n static gelu(x) {\n const k = Math.sqrt(2 / Math.PI);\n const u = k * (x + 0.044715 * x * x * x);\n return 0.5 * x * (1 + Math.tanh(u));\n }\n /**\n * Softmax with numerical stability and optional temperature.\n * @param arr logits\n * @param temperature >0; higher = flatter distribution\n */\n static softmax(arr, temperature = 1) {\n const t = Math.max(temperature, 1e-12);\n let max = -Infinity;\n for (let i = 0; i < arr.length; i++) {\n const v = arr[i] / t;\n if (v > max)\n max = v;\n }\n const exps = new Array(arr.length);\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n const e = Math.exp(arr[i] / t - max);\n exps[i] = e;\n sum += e;\n }\n const denom = sum || 1e-12;\n for (let i = 0; i < exps.length; i++)\n exps[i] = exps[i] / denom;\n return exps;\n }\n /* ========= Derivatives (elementwise) ========= */\n /** d/dx ReLU */\n static dRelu(x) {\n // subgradient at 0 -> 0\n return x > 0 ? 1 : 0;\n }\n /** d/dx LeakyReLU */\n static dLeakyRelu(x, alpha = 0.01) {\n return x >= 0 ? 1 : alpha;\n }\n /** d/dx Sigmoid = s(x)*(1-s(x)) */\n static dSigmoid(x) {\n const s = Activations.sigmoid(x);\n return s * (1 - s);\n }\n /** d/dx tanh = 1 - tanh(x)^2 */\n static dTanh(x) {\n const t = Math.tanh(x);\n return 1 - t * t;\n }\n /** d/dx Linear = 1 */\n static dLinear(_) {\n return 1;\n }\n /**\n * d/dx GELU (tanh approximation)\n * 0.5*(1 + tanh(u)) + 0.5*x*(1 - tanh(u)^2) * du/dx\n * where u = k*(x + 0.044715 x^3), du/dx = k*(1 + 0.134145 x^2), k = sqrt(2/pi)\n */\n static dGelu(x) {\n const k = Math.sqrt(2 / Math.PI);\n const x2 = x * x;\n const u = k * (x + 0.044715 * x * x2);\n const t = Math.tanh(u);\n const sech2 = 1 - t * t;\n const du = k * (1 + 0.134145 * x2);\n return 0.5 * (1 + t) + 0.5 * x * sech2 * du;\n }\n /* ========= Apply helpers ========= */\n /** Apply an elementwise activation across a 2D matrix, returning a new matrix. */\n static apply(matrix, fn) {\n const out = new Array(matrix.length);\n for (let i = 0; i < matrix.length; i++) {\n const row = matrix[i];\n const r = new Array(row.length);\n for (let j = 0; j < row.length; j++)\n r[j] = fn(row[j]);\n out[i] = r;\n }\n return out;\n }\n /** Apply an elementwise derivative across a 2D matrix, returning a new matrix. */\n static applyDerivative(matrix, dfn) {\n const out = new Array(matrix.length);\n for (let i = 0; i < matrix.length; i++) {\n const row = matrix[i];\n const r = new Array(row.length);\n for (let j = 0; j < row.length; j++)\n r[j] = dfn(row[j]);\n out[i] = r;\n }\n return out;\n }\n /* ========= Getters ========= */\n /**\n * Get an activation function by name. Case-insensitive.\n * For leaky ReLU, you can pass { alpha } to override the negative slope.\n */\n static get(name, opts) {\n var _a;\n const key = name.toLowerCase();\n switch (key) {\n case 'relu': return this.relu;\n case 'leakyrelu':\n case 'leaky-relu': {\n const alpha = (_a = opts === null || opts === void 0 ? void 0 : opts.alpha) !== null && _a !== void 0 ? _a : 0.01;\n return (x) => this.leakyRelu(x, alpha);\n }\n case 'sigmoid': return this.sigmoid;\n case 'tanh': return this.tanh;\n case 'linear':\n case 'identity':\n case 'none': return this.linear;\n case 'gelu': return this.gelu;\n default:\n throw new Error(`Unknown activation: ${name}`);\n }\n }\n /** Get derivative function by name (mirrors get). */\n static getDerivative(name, opts) {\n var _a;\n const key = name.toLowerCase();\n switch (key) {\n case 'relu': return this.dRelu;\n case 'leakyrelu':\n case 'leaky-relu': {\n const alpha = (_a = opts === null || opts === void 0 ? void 0 : opts.alpha) !== null && _a !== void 0 ? _a : 0.01;\n return (x) => this.dLeakyRelu(x, alpha);\n }\n case 'sigmoid': return this.dSigmoid;\n case 'tanh': return this.dTanh;\n case 'linear':\n case 'identity':\n case 'none': return this.dLinear;\n case 'gelu': return this.dGelu;\n default:\n throw new Error(`Unknown activation derivative: ${name}`);\n }\n }\n /** Get both forward and derivative together. */\n static getPair(name, opts) {\n return { f: this.get(name, opts), df: this.getDerivative(name, opts) };\n }\n /* ========= Optional: Softmax Jacobian (for research/tools) ========= */\n /**\n * Given softmax probabilities p, returns the Jacobian J = diag(p) - p p^T\n * (Useful for analysis; not typically needed for ELM.)\n */\n static softmaxJacobian(p) {\n const n = p.length;\n const J = new Array(n);\n for (let i = 0; i < n; i++) {\n const row = new Array(n);\n for (let j = 0; j < n; j++) {\n row[j] = (i === j ? p[i] : 0) - p[i] * p[j];\n }\n J[i] = row;\n }\n return J;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMConfig.ts - Configuration interfaces, defaults, helpers for ELM-based models\n/* =========== Defaults =========== */\nconst defaultBase = {\n hiddenUnits: 50,\n activation: 'relu',\n ridgeLambda: 1e-2,\n weightInit: 'xavier',\n seed: 1337,\n dropout: 0,\n log: { verbose: true, toFile: false, modelName: 'Unnamed ELM Model', level: 'info' },\n};\nconst defaultNumericConfig = Object.assign(Object.assign({}, defaultBase), { useTokenizer: false });\nconst defaultTextConfig = Object.assign(Object.assign({}, defaultBase), { useTokenizer: true, maxLen: 30, charSet: 'abcdefghijklmnopqrstuvwxyz', tokenizerDelimiter: /\\s+/ });\n/* =========== Type guards =========== */\nfunction isTextConfig(cfg) {\n return cfg.useTokenizer === true;\n}\nfunction isNumericConfig(cfg) {\n return cfg.useTokenizer !== true;\n}\n/* =========== Helpers =========== */\n/**\n * Normalize a user config with sensible defaults depending on mode.\n * (Keeps the original structural type, only fills in missing optional fields.)\n */\nfunction normalizeConfig(cfg) {\n var _a, _b, _c, _d;\n if (isTextConfig(cfg)) {\n const merged = Object.assign(Object.assign(Object.assign({}, defaultTextConfig), cfg), { log: Object.assign(Object.assign({}, ((_a = defaultBase.log) !== null && _a !== void 0 ? _a : {})), ((_b = cfg.log) !== null && _b !== void 0 ? _b : {})) });\n return merged;\n }\n else {\n const merged = Object.assign(Object.assign(Object.assign({}, defaultNumericConfig), cfg), { log: Object.assign(Object.assign({}, ((_c = defaultBase.log) !== null && _c !== void 0 ? _c : {})), ((_d = cfg.log) !== null && _d !== void 0 ? _d : {})) });\n return merged;\n }\n}\n/**\n * Rehydrate text-specific fields from a JSON-safe config\n * (e.g., convert tokenizerDelimiter source string → RegExp).\n */\nfunction deserializeTextBits(config) {\n var _a, _b, _c, _d;\n // If useTokenizer not true, assume numeric config\n if (config.useTokenizer !== true) {\n const nc = Object.assign(Object.assign(Object.assign({}, defaultNumericConfig), config), { log: Object.assign(Object.assign({}, ((_a = defaultBase.log) !== null && _a !== void 0 ? _a : {})), ((_b = config.log) !== null && _b !== void 0 ? _b : {})) });\n return nc;\n }\n // Text config: coerce delimiter\n const tDelim = config.tokenizerDelimiter;\n let delimiter = undefined;\n if (tDelim instanceof RegExp) {\n delimiter = tDelim;\n }\n else if (typeof tDelim === 'string' && tDelim.length > 0) {\n delimiter = new RegExp(tDelim);\n }\n else {\n delimiter = defaultTextConfig.tokenizerDelimiter;\n }\n const tc = Object.assign(Object.assign(Object.assign({}, defaultTextConfig), config), { tokenizerDelimiter: delimiter, log: Object.assign(Object.assign({}, ((_c = defaultBase.log) !== null && _c !== void 0 ? _c : {})), ((_d = config.log) !== null && _d !== void 0 ? _d : {})), useTokenizer: true });\n return tc;\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass Tokenizer {\n constructor(customDelimiter) {\n this.delimiter = customDelimiter || /[\\s,.;!?()\\[\\]{}\"']+/;\n }\n tokenize(text) {\n if (typeof text !== 'string') {\n console.warn('[Tokenizer] Expected a string, got:', typeof text, text);\n try {\n text = String(text !== null && text !== void 0 ? text : '');\n }\n catch (_a) {\n return [];\n }\n }\n return text\n .trim()\n .toLowerCase()\n .split(this.delimiter)\n .filter(Boolean);\n }\n ngrams(tokens, n) {\n if (n <= 0 || tokens.length < n)\n return [];\n const result = [];\n for (let i = 0; i <= tokens.length - n; i++) {\n result.push(tokens.slice(i, i + n).join(' '));\n }\n return result;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// TextEncoder.ts - Text preprocessing and one-hot encoding for ELM\nconst defaultTextEncoderConfig = {\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n maxLen: 15,\n useTokenizer: false\n};\nclass TextEncoder {\n constructor(config = {}) {\n const cfg = Object.assign(Object.assign({}, defaultTextEncoderConfig), config);\n this.charSet = cfg.charSet;\n this.charSize = cfg.charSet.length;\n this.maxLen = cfg.maxLen;\n this.useTokenizer = cfg.useTokenizer;\n if (this.useTokenizer) {\n this.tokenizer = new Tokenizer(config.tokenizerDelimiter);\n }\n }\n charToOneHot(c) {\n const index = this.charSet.indexOf(c.toLowerCase());\n const vec = Array(this.charSize).fill(0);\n if (index !== -1)\n vec[index] = 1;\n return vec;\n }\n textToVector(text) {\n let cleaned;\n if (this.useTokenizer && this.tokenizer) {\n const tokens = this.tokenizer.tokenize(text).join('');\n cleaned = tokens.slice(0, this.maxLen).padEnd(this.maxLen, ' ');\n }\n else {\n cleaned = text.toLowerCase().replace(new RegExp(`[^${this.charSet}]`, 'g'), '').padEnd(this.maxLen, ' ').slice(0, this.maxLen);\n }\n const vec = [];\n for (let i = 0; i < cleaned.length; i++) {\n vec.push(...this.charToOneHot(cleaned[i]));\n }\n return vec;\n }\n normalizeVector(v) {\n const norm = Math.sqrt(v.reduce((sum, x) => sum + x * x, 0));\n return norm > 0 ? v.map(x => x / norm) : v;\n }\n getVectorSize() {\n return this.charSize * this.maxLen;\n }\n getCharSet() {\n return this.charSet;\n }\n getMaxLen() {\n return this.maxLen;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// UniversalEncoder.ts - Automatically selects appropriate encoder (char or token based)\nconst defaultUniversalConfig = {\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n maxLen: 15,\n useTokenizer: false,\n mode: 'char'\n};\nclass UniversalEncoder {\n constructor(config = {}) {\n const merged = Object.assign(Object.assign({}, defaultUniversalConfig), config);\n const useTokenizer = merged.mode === 'token';\n this.encoder = new TextEncoder({\n charSet: merged.charSet,\n maxLen: merged.maxLen,\n useTokenizer,\n tokenizerDelimiter: config.tokenizerDelimiter\n });\n }\n encode(text) {\n return this.encoder.textToVector(text);\n }\n normalize(v) {\n return this.encoder.normalizeVector(v);\n }\n getVectorSize() {\n return this.encoder.getVectorSize();\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Augment.ts - Basic augmentation utilities for category training examples\nclass Augment {\n static addSuffix(text, suffixes) {\n return suffixes.map(suffix => `${text} ${suffix}`);\n }\n static addPrefix(text, prefixes) {\n return prefixes.map(prefix => `${prefix} ${text}`);\n }\n static addNoise(text, charSet, noiseRate = 0.1) {\n const chars = text.split('');\n for (let i = 0; i < chars.length; i++) {\n if (Math.random() < noiseRate) {\n const randomChar = charSet[Math.floor(Math.random() * charSet.length)];\n chars[i] = randomChar;\n }\n }\n return chars.join('');\n }\n static mix(text, mixins) {\n return mixins.map(m => `${text} ${m}`);\n }\n static generateVariants(text, charSet, options) {\n const variants = [text];\n if (options === null || options === void 0 ? void 0 : options.suffixes) {\n variants.push(...this.addSuffix(text, options.suffixes));\n }\n if (options === null || options === void 0 ? void 0 : options.prefixes) {\n variants.push(...this.addPrefix(text, options.prefixes));\n }\n if (options === null || options === void 0 ? void 0 : options.includeNoise) {\n variants.push(this.addNoise(text, charSet));\n }\n return variants;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELM.ts - Core ELM logic with TypeScript types (numeric & text modes)\n// Seeded PRNG (xorshift-ish) for deterministic init\nfunction makePRNG$2(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n return ((s >>> 0) / 0xffffffff);\n };\n}\nfunction clampInt(x, lo, hi) {\n const xi = x | 0;\n return xi < lo ? lo : (xi > hi ? hi : xi);\n}\nfunction isOneHot2D(Y) {\n return Array.isArray(Y) && Array.isArray(Y[0]) && Number.isFinite(Y[0][0]);\n}\nfunction maxLabel(y) {\n let m = -Infinity;\n for (let i = 0; i < y.length; i++) {\n const v = y[i] | 0;\n if (v > m)\n m = v;\n }\n return m === -Infinity ? 0 : m;\n}\n/** One-hot (clamped) */\nfunction toOneHotClamped(labels, k) {\n const K = k | 0;\n const Y = new Array(labels.length);\n for (let i = 0; i < labels.length; i++) {\n const j = clampInt(labels[i], 0, K - 1);\n const row = new Array(K).fill(0);\n row[j] = 1;\n Y[i] = row;\n }\n return Y;\n}\n/** (HᵀH + λI)B = HᵀY solved via Cholesky */\nfunction ridgeSolve$1(H, Y, lambda) {\n const Ht = Matrix.transpose(H);\n const A = Matrix.addRegularization(Matrix.multiply(Ht, H), lambda + 1e-10);\n const R = Matrix.multiply(Ht, Y);\n return Matrix.solveCholesky(A, R, 1e-10);\n}\n/* =========================\n * ELM class\n * ========================= */\nclass ELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;\n // Merge with mode-appropriate defaults\n const cfg = normalizeConfig(config);\n this.config = cfg;\n this.categories = cfg.categories;\n this.hiddenUnits = cfg.hiddenUnits;\n this.activation = (_a = cfg.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.useTokenizer = isTextConfig(cfg);\n this.maxLen = isTextConfig(cfg) ? cfg.maxLen : 0;\n this.charSet = isTextConfig(cfg) ? ((_b = cfg.charSet) !== null && _b !== void 0 ? _b : 'abcdefghijklmnopqrstuvwxyz') : 'abcdefghijklmnopqrstuvwxyz';\n this.tokenizerDelimiter = isTextConfig(cfg) ? cfg.tokenizerDelimiter : undefined;\n this.metrics = cfg.metrics;\n this.verbose = (_d = (_c = cfg.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : true;\n this.modelName = (_f = (_e = cfg.log) === null || _e === void 0 ? void 0 : _e.modelName) !== null && _f !== void 0 ? _f : 'Unnamed ELM Model';\n this.logToFile = (_h = (_g = cfg.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false;\n this.dropout = (_j = cfg.dropout) !== null && _j !== void 0 ? _j : 0;\n this.ridgeLambda = Math.max((_k = cfg.ridgeLambda) !== null && _k !== void 0 ? _k : 1e-2, 1e-8);\n // Seeded RNG\n const seed = (_l = cfg.seed) !== null && _l !== void 0 ? _l : 1337;\n this.rng = makePRNG$2(seed);\n // Create encoder only if tokenizer is enabled\n if (this.useTokenizer) {\n this.encoder = new UniversalEncoder({\n charSet: this.charSet,\n maxLen: this.maxLen,\n useTokenizer: this.useTokenizer,\n tokenizerDelimiter: this.tokenizerDelimiter,\n mode: this.useTokenizer ? 'token' : 'char'\n });\n }\n // Weights are allocated on first training call (inputDim known then)\n this.model = null;\n }\n /* ========= Encoder narrowing (Option A) ========= */\n assertEncoder() {\n if (!this.encoder) {\n throw new Error('Encoder is not initialized. Enable useTokenizer:true or construct an encoder.');\n }\n return this.encoder;\n }\n /* ========= initialization ========= */\n xavierLimit(fanIn, fanOut) {\n return Math.sqrt(6 / (fanIn + fanOut));\n }\n randomMatrix(rows, cols) {\n var _a;\n const weightInit = (_a = this.config.weightInit) !== null && _a !== void 0 ? _a : 'uniform';\n if (weightInit === 'xavier') {\n const limit = this.xavierLimit(cols, rows);\n if (this.verbose)\n console.log(`✨ Xavier init with limit sqrt(6/(${cols}+${rows})) ≈ ${limit.toFixed(4)}`);\n return Array.from({ length: rows }, () => Array.from({ length: cols }, () => (this.rng() * 2 - 1) * limit));\n }\n else {\n if (this.verbose)\n console.log(`✨ Uniform init [-1,1] (seeded)`);\n return Array.from({ length: rows }, () => Array.from({ length: cols }, () => (this.rng() * 2 - 1)));\n }\n }\n buildHidden(X, W, b) {\n const tempH = Matrix.multiply(X, Matrix.transpose(W)); // N x hidden\n const activationFn = Activations.get(this.activation);\n let H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn);\n if (this.dropout > 0) {\n const keepProb = 1 - this.dropout;\n for (let i = 0; i < H.length; i++) {\n for (let j = 0; j < H[0].length; j++) {\n if (this.rng() < this.dropout)\n H[i][j] = 0;\n else\n H[i][j] /= keepProb;\n }\n }\n }\n return H;\n }\n /* ========= public helpers ========= */\n oneHot(n, index) {\n return Array.from({ length: n }, (_, i) => (i === index ? 1 : 0));\n }\n setCategories(categories) {\n this.categories = categories;\n }\n loadModelFromJSON(json) {\n var _a, _b, _c, _d, _e;\n try {\n const parsed = JSON.parse(json);\n const cfg = deserializeTextBits(parsed.config);\n // Rebuild instance config\n this.config = cfg;\n this.categories = (_a = cfg.categories) !== null && _a !== void 0 ? _a : this.categories;\n this.hiddenUnits = (_b = cfg.hiddenUnits) !== null && _b !== void 0 ? _b : this.hiddenUnits;\n this.activation = (_c = cfg.activation) !== null && _c !== void 0 ? _c : this.activation;\n this.useTokenizer = cfg.useTokenizer === true;\n this.maxLen = (_d = cfg.maxLen) !== null && _d !== void 0 ? _d : this.maxLen;\n this.charSet = (_e = cfg.charSet) !== null && _e !== void 0 ? _e : this.charSet;\n this.tokenizerDelimiter = cfg.tokenizerDelimiter;\n if (this.useTokenizer) {\n this.encoder = new UniversalEncoder({\n charSet: this.charSet,\n maxLen: this.maxLen,\n useTokenizer: this.useTokenizer,\n tokenizerDelimiter: this.tokenizerDelimiter,\n mode: this.useTokenizer ? 'token' : 'char'\n });\n }\n else {\n this.encoder = undefined;\n }\n // Restore weights\n const { W, b, B } = parsed;\n this.model = { W, b, beta: B };\n this.savedModelJSON = json;\n if (this.verbose)\n console.log(`✅ ${this.modelName} Model loaded from JSON`);\n }\n catch (e) {\n console.error(`❌ Failed to load ${this.modelName} model from JSON:`, e);\n }\n }\n /* ========= Numeric training tolerance ========= */\n /** Decide output dimension from config/categories/labels/one-hot */\n resolveOutputDim(yOrY) {\n // Prefer explicit config\n const cfgOut = this.config.outputDim;\n if (Number.isFinite(cfgOut) && cfgOut > 0)\n return cfgOut | 0;\n // Then categories length if present\n if (Array.isArray(this.categories) && this.categories.length > 0)\n return this.categories.length | 0;\n // Infer from data\n if (isOneHot2D(yOrY))\n return (yOrY[0].length | 0) || 1;\n return (maxLabel(yOrY) + 1) | 0;\n }\n /** Coerce X, and turn labels→one-hot if needed. Always returns strict number[][] */\n coerceXY(X, yOrY) {\n const Xnum = ensureRectNumber2D(X, undefined, 'X');\n const outDim = this.resolveOutputDim(yOrY);\n let Ynum;\n if (isOneHot2D(yOrY)) {\n // Ensure rect with exact width outDim (pad/trunc to be safe)\n Ynum = ensureRectNumber2D(yOrY, outDim, 'Y(one-hot)');\n }\n else {\n // Labels → clamped one-hot\n Ynum = ensureRectNumber2D(toOneHotClamped(yOrY, outDim), outDim, 'Y(labels→one-hot)');\n }\n // If categories length mismatches inferred outDim, adjust categories (non-breaking)\n if (!this.categories || this.categories.length !== outDim) {\n this.categories = Array.from({ length: outDim }, (_, i) => { var _a, _b; return (_b = (_a = this.categories) === null || _a === void 0 ? void 0 : _a[i]) !== null && _b !== void 0 ? _b : String(i); });\n }\n return { Xnum, Ynum, outDim };\n }\n /* ========= Training on numeric vectors =========\n * y can be class indices OR one-hot.\n */\n trainFromData(X, y, options) {\n if (!(X === null || X === void 0 ? void 0 : X.length))\n throw new Error('trainFromData: X is empty');\n // Coerce & shape\n const { Xnum, Ynum, outDim } = this.coerceXY(X, y);\n const n = Xnum.length;\n const inputDim = Xnum[0].length;\n // init / reuse\n let W, b;\n const reuseWeights = (options === null || options === void 0 ? void 0 : options.reuseWeights) === true && this.model;\n if (reuseWeights && this.model) {\n W = this.model.W;\n b = this.model.b;\n if (this.verbose)\n console.log('🔄 Reusing existing weights/biases for training.');\n }\n else {\n W = this.randomMatrix(this.hiddenUnits, inputDim);\n b = this.randomMatrix(this.hiddenUnits, 1);\n if (this.verbose)\n console.log('✨ Initializing fresh weights/biases for training.');\n }\n // Hidden\n let H = this.buildHidden(Xnum, W, b);\n // Optional sample weights\n let Yw = Ynum;\n if (options === null || options === void 0 ? void 0 : options.weights) {\n const ww = options.weights;\n if (ww.length !== n) {\n throw new Error(`Weight array length ${ww.length} does not match sample count ${n}`);\n }\n H = H.map((row, i) => row.map(x => x * Math.sqrt(ww[i])));\n Yw = Ynum.map((row, i) => row.map(x => x * Math.sqrt(ww[i])));\n }\n // Solve ridge (stable)\n const beta = ridgeSolve$1(H, Yw, this.ridgeLambda);\n this.model = { W, b, beta };\n // Evaluate & maybe save\n const predictions = Matrix.multiply(H, beta);\n if (this.metrics) {\n const rmse = this.calculateRMSE(Ynum, predictions);\n const mae = this.calculateMAE(Ynum, predictions);\n const acc = this.calculateAccuracy(Ynum, predictions);\n const f1 = this.calculateF1Score(Ynum, predictions);\n const ce = this.calculateCrossEntropy(Ynum, predictions);\n const r2 = this.calculateR2Score(Ynum, predictions);\n const results = { rmse, mae, accuracy: acc, f1, crossEntropy: ce, r2 };\n let allPassed = true;\n if (this.metrics.rmse !== undefined && rmse > this.metrics.rmse)\n allPassed = false;\n if (this.metrics.mae !== undefined && mae > this.metrics.mae)\n allPassed = false;\n if (this.metrics.accuracy !== undefined && acc < this.metrics.accuracy)\n allPassed = false;\n if (this.metrics.f1 !== undefined && f1 < this.metrics.f1)\n allPassed = false;\n if (this.metrics.crossEntropy !== undefined && ce > this.metrics.crossEntropy)\n allPassed = false;\n if (this.metrics.r2 !== undefined && r2 < this.metrics.r2)\n allPassed = false;\n if (this.verbose)\n this.logMetrics(results);\n if (allPassed) {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model passed thresholds and was saved to JSON.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n else {\n if (this.verbose)\n console.log('❌ Model not saved: One or more thresholds not met.');\n }\n }\n else {\n // No metrics—always save\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model trained with no metrics—saved by default.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n return { epochs: 1, metrics: undefined };\n }\n /* ========= Training from category strings (text mode) ========= */\n train(augmentationOptions, weights) {\n if (!this.useTokenizer) {\n throw new Error('train(): text training requires useTokenizer:true');\n }\n const enc = this.assertEncoder();\n const X = [];\n let Y = [];\n this.categories.forEach((cat, i) => {\n const variants = Augment.generateVariants(cat, this.charSet, augmentationOptions);\n for (const variant of variants) {\n const vec = enc.normalize(enc.encode(variant));\n X.push(vec);\n Y.push(this.oneHot(this.categories.length, i));\n }\n });\n const inputDim = X[0].length;\n const W = this.randomMatrix(this.hiddenUnits, inputDim);\n const b = this.randomMatrix(this.hiddenUnits, 1);\n let H = this.buildHidden(X, W, b);\n if (weights) {\n if (weights.length !== H.length) {\n throw new Error(`Weight array length ${weights.length} does not match sample count ${H.length}`);\n }\n H = H.map((row, i) => row.map(x => x * Math.sqrt(weights[i])));\n Y = Y.map((row, i) => row.map(x => x * Math.sqrt(weights[i])));\n }\n const beta = ridgeSolve$1(H, Y, this.ridgeLambda);\n this.model = { W, b, beta };\n const predictions = Matrix.multiply(H, beta);\n if (this.metrics) {\n const rmse = this.calculateRMSE(Y, predictions);\n const mae = this.calculateMAE(Y, predictions);\n const acc = this.calculateAccuracy(Y, predictions);\n const f1 = this.calculateF1Score(Y, predictions);\n const ce = this.calculateCrossEntropy(Y, predictions);\n const r2 = this.calculateR2Score(Y, predictions);\n const results = { rmse, mae, accuracy: acc, f1, crossEntropy: ce, r2 };\n let allPassed = true;\n if (this.metrics.rmse !== undefined && rmse > this.metrics.rmse)\n allPassed = false;\n if (this.metrics.mae !== undefined && mae > this.metrics.mae)\n allPassed = false;\n if (this.metrics.accuracy !== undefined && acc < this.metrics.accuracy)\n allPassed = false;\n if (this.metrics.f1 !== undefined && f1 < this.metrics.f1)\n allPassed = false;\n if (this.metrics.crossEntropy !== undefined && ce > this.metrics.crossEntropy)\n allPassed = false;\n if (this.metrics.r2 !== undefined && r2 < this.metrics.r2)\n allPassed = false;\n if (this.verbose)\n this.logMetrics(results);\n if (allPassed) {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model passed thresholds and was saved to JSON.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n else {\n if (this.verbose)\n console.log('❌ Model not saved: One or more thresholds not met.');\n }\n }\n else {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model trained with no metrics—saved by default.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n return { epochs: 1, metrics: undefined };\n }\n /* ========= Prediction ========= */\n /** Text prediction (uses Option A narrowing) */\n predict(text, topK = 5) {\n if (!this.model)\n throw new Error('Model not trained.');\n if (!this.useTokenizer) {\n throw new Error('predict(text) requires useTokenizer:true');\n }\n const enc = this.assertEncoder();\n const vec = enc.normalize(enc.encode(text));\n const logits = this.predictLogitsFromVector(vec);\n const probs = Activations.softmax(logits);\n return probs\n .map((p, i) => ({ label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, topK);\n }\n /** Vector batch prediction (kept for back-compat) */\n predictFromVector(inputVecRows, topK = 5) {\n if (!this.model)\n throw new Error('Model not trained.');\n return inputVecRows.map(vec => {\n const logits = this.predictLogitsFromVector(vec);\n const probs = Activations.softmax(logits);\n return probs\n .map((p, i) => ({ label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, topK);\n });\n }\n /** Raw logits for a single numeric vector */\n predictLogitsFromVector(vec) {\n if (!this.model)\n throw new Error('Model not trained.');\n const { W, b, beta } = this.model;\n // Hidden\n const tempH = Matrix.multiply([vec], Matrix.transpose(W)); // 1 x hidden\n const activationFn = Activations.get(this.activation);\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn); // 1 x hidden\n // Output logits\n return Matrix.multiply(H, beta)[0]; // 1 x outDim → vec\n }\n /** Raw logits for a batch of numeric vectors */\n predictLogitsFromVectors(X) {\n if (!this.model)\n throw new Error('Model not trained.');\n const { W, b, beta } = this.model;\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const activationFn = Activations.get(this.activation);\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn);\n return Matrix.multiply(H, beta);\n }\n /** Probability vector (softmax) for a single numeric vector */\n predictProbaFromVector(vec) {\n return Activations.softmax(this.predictLogitsFromVector(vec));\n }\n /** Probability matrix (softmax per row) for a batch of numeric vectors */\n predictProbaFromVectors(X) {\n return this.predictLogitsFromVectors(X).map(Activations.softmax);\n }\n /** Top-K results for a single numeric vector */\n predictTopKFromVector(vec, k = 5) {\n const probs = this.predictProbaFromVector(vec);\n return probs\n .map((p, i) => ({ index: i, label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k);\n }\n /** Top-K results for a batch of numeric vectors */\n predictTopKFromVectors(X, k = 5) {\n return this.predictProbaFromVectors(X).map(row => row\n .map((p, i) => ({ index: i, label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k));\n }\n /* ========= Metrics ========= */\n calculateRMSE(Y, P) {\n const N = Y.length, C = Y[0].length;\n let sum = 0;\n for (let i = 0; i < N; i++)\n for (let j = 0; j < C; j++) {\n const d = Y[i][j] - P[i][j];\n sum += d * d;\n }\n return Math.sqrt(sum / (N * C));\n }\n calculateMAE(Y, P) {\n const N = Y.length, C = Y[0].length;\n let sum = 0;\n for (let i = 0; i < N; i++)\n for (let j = 0; j < C; j++) {\n sum += Math.abs(Y[i][j] - P[i][j]);\n }\n return sum / (N * C);\n }\n calculateAccuracy(Y, P) {\n let correct = 0;\n for (let i = 0; i < Y.length; i++) {\n const yMax = this.argmax(Y[i]);\n const pMax = this.argmax(P[i]);\n if (yMax === pMax)\n correct++;\n }\n return correct / Y.length;\n }\n calculateF1Score(Y, P) {\n let tp = 0, fp = 0, fn = 0;\n for (let i = 0; i < Y.length; i++) {\n const yIdx = this.argmax(Y[i]);\n const pIdx = this.argmax(P[i]);\n if (yIdx === pIdx)\n tp++;\n else {\n fp++;\n fn++;\n }\n }\n const precision = tp / (tp + fp || 1);\n const recall = tp / (tp + fn || 1);\n return 2 * (precision * recall) / (precision + recall || 1);\n }\n calculateCrossEntropy(Y, P) {\n let loss = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < Y[0].length; j++) {\n const pred = Math.min(Math.max(P[i][j], 1e-15), 1 - 1e-15);\n loss += -Y[i][j] * Math.log(pred);\n }\n }\n return loss / Y.length;\n }\n calculateR2Score(Y, P) {\n const C = Y[0].length;\n const mean = new Array(C).fill(0);\n for (let i = 0; i < Y.length; i++)\n for (let j = 0; j < C; j++)\n mean[j] += Y[i][j];\n for (let j = 0; j < C; j++)\n mean[j] /= Y.length;\n let ssRes = 0, ssTot = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < C; j++) {\n ssRes += Math.pow(Y[i][j] - P[i][j], 2);\n ssTot += Math.pow(Y[i][j] - mean[j], 2);\n }\n }\n return 1 - ssRes / ssTot;\n }\n /* ========= Hidden layer / embeddings ========= */\n computeHiddenLayer(X) {\n if (!this.model)\n throw new Error('Model not trained.');\n const WX = Matrix.multiply(X, Matrix.transpose(this.model.W));\n const WXb = WX.map(row => row.map((val, j) => val + this.model.b[j][0]));\n const activationFn = Activations.get(this.activation);\n return WXb.map(row => row.map(activationFn));\n }\n getEmbedding(X) {\n return this.computeHiddenLayer(X);\n }\n /* ========= Logging & export ========= */\n logMetrics(results) {\n var _a, _b, _c, _d, _e, _f;\n const logLines = [`📋 ${this.modelName} — Metrics Summary:`];\n const push = (label, value, threshold, cmp) => {\n if (threshold !== undefined)\n logLines.push(` ${label}: ${value.toFixed(4)} (threshold: ${cmp} ${threshold})`);\n };\n push('RMSE', results.rmse, (_a = this.metrics) === null || _a === void 0 ? void 0 : _a.rmse, '<=');\n push('MAE', results.mae, (_b = this.metrics) === null || _b === void 0 ? void 0 : _b.mae, '<=');\n push('Accuracy', results.accuracy, (_c = this.metrics) === null || _c === void 0 ? void 0 : _c.accuracy, '>=');\n push('F1 Score', results.f1, (_d = this.metrics) === null || _d === void 0 ? void 0 : _d.f1, '>=');\n push('Cross-Entropy', results.crossEntropy, (_e = this.metrics) === null || _e === void 0 ? void 0 : _e.crossEntropy, '<=');\n push('R² Score', results.r2, (_f = this.metrics) === null || _f === void 0 ? void 0 : _f.r2, '>=');\n if (this.verbose)\n console.log('\\n' + logLines.join('\\n'));\n if (this.logToFile) {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const logFile = this.config.logFileName || `${this.modelName.toLowerCase().replace(/\\s+/g, '_')}_metrics_${timestamp}.txt`;\n const blob = new Blob([logLines.join('\\n')], { type: 'text/plain' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = logFile;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n }\n }\n saveModelAsJSONFile(filename) {\n if (!this.savedModelJSON) {\n if (this.verbose)\n console.warn('No model saved — did not meet metric thresholds.');\n return;\n }\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const fallback = `${this.modelName.toLowerCase().replace(/\\s+/g, '_')}_${timestamp}.json`;\n const finalName = filename || this.config.exportFileName || fallback;\n const blob = new Blob([this.savedModelJSON], { type: 'application/json' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = finalName;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n if (this.verbose)\n console.log(`📦 Model exported as ${finalName}`);\n }\n serializeConfig() {\n const cfg = Object.assign({}, this.config);\n // Remove non-serializable / volatile fields\n delete cfg.seed;\n delete cfg.log;\n delete cfg.encoder;\n // Serialize tokenizerDelimiter for JSON\n if (cfg.tokenizerDelimiter instanceof RegExp) {\n cfg.tokenizerDelimiter = cfg.tokenizerDelimiter.source;\n }\n return cfg;\n }\n argmax(arr) {\n let i = 0;\n for (let k = 1; k < arr.length; k++)\n if (arr[k] > arr[i])\n i = k;\n return i;\n }\n getEncoder() {\n return this.encoder;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// KernelELM.ts — Kernel Extreme Learning Machine (Exact + Nyström + Whitening)\n// Dependencies: Matrix (multiply, transpose, addRegularization, solveCholesky, identity, zeros)\nclass KernelRegistry {\n static register(name, fn) {\n if (!name || typeof fn !== 'function')\n throw new Error('KernelRegistry.register: invalid args');\n this.map.set(name, fn);\n }\n static get(name) {\n const f = this.map.get(name);\n if (!f)\n throw new Error(`KernelRegistry: kernel \"${name}\" not found`);\n return f;\n }\n}\nKernelRegistry.map = new Map();\nfunction l2sq(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return s;\n}\nfunction l1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.abs(a[i] - b[i]);\n return s;\n}\nfunction dot$4(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction softmaxRow(v) {\n const m = Math.max(...v);\n const ex = v.map(x => Math.exp(x - m));\n const s = ex.reduce((a, b) => a + b, 0) || 1;\n return ex.map(e => e / s);\n}\nfunction makePRNG$1(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => { s ^= s << 13; s ^= s >>> 17; s ^= s << 5; return (s >>> 0) / 0xffffffff; };\n}\nfunction buildKernel(spec, dim) {\n var _a, _b, _c, _d, _e;\n switch (spec.type) {\n case 'custom':\n if (!spec.name)\n throw new Error('custom kernel requires \"name\"');\n return KernelRegistry.get(spec.name);\n case 'linear':\n return (x, z) => dot$4(x, z);\n case 'poly': {\n const gamma = (_a = spec.gamma) !== null && _a !== void 0 ? _a : 1 / Math.max(1, dim);\n const degree = (_b = spec.degree) !== null && _b !== void 0 ? _b : 2;\n const coef0 = (_c = spec.coef0) !== null && _c !== void 0 ? _c : 1;\n return (x, z) => Math.pow(gamma * dot$4(x, z) + coef0, degree);\n }\n case 'laplacian': {\n const gamma = (_d = spec.gamma) !== null && _d !== void 0 ? _d : 1 / Math.max(1, dim);\n return (x, z) => Math.exp(-gamma * l1(x, z));\n }\n case 'rbf':\n default: {\n const gamma = (_e = spec.gamma) !== null && _e !== void 0 ? _e : 1 / Math.max(1, dim);\n return (x, z) => Math.exp(-gamma * l2sq(x, z));\n }\n }\n}\n/* ============== Landmark selection (Nyström) ============== */\nfunction pickUniform(X, m, seed = 1337) {\n const prng = makePRNG$1(seed);\n const N = X.length;\n const idx = Array.from({ length: N }, (_, i) => i);\n // Fisher–Yates (only first m)\n for (let i = 0; i < m; i++) {\n const j = i + Math.floor(prng() * (N - i));\n const t = idx[i];\n idx[i] = idx[j];\n idx[j] = t;\n }\n return idx.slice(0, m);\n}\nfunction pickKMeansPP(X, m, seed = 1337) {\n const prng = makePRNG$1(seed);\n const N = X.length;\n if (m >= N)\n return Array.from({ length: N }, (_, i) => i);\n const centers = [];\n centers.push(Math.floor(prng() * N));\n const D2 = new Float64Array(N).fill(Infinity);\n while (centers.length < m) {\n const c = centers[centers.length - 1];\n for (let i = 0; i < N; i++) {\n const d2 = l2sq(X[i], X[c]);\n if (d2 < D2[i])\n D2[i] = d2;\n }\n let sum = 0;\n for (let i = 0; i < N; i++)\n sum += D2[i];\n let r = prng() * (sum || 1);\n let next = 0;\n for (let i = 0; i < N; i++) {\n r -= D2[i];\n if (r <= 0) {\n next = i;\n break;\n }\n }\n centers.push(next);\n }\n return centers;\n}\n/* ====================== KernelELM ====================== */\nclass KernelELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s;\n // exact mode params\n this.Xtrain = [];\n this.alpha = [];\n // nystrom params\n this.Z = []; // landmarks (m x D)\n this.W = []; // weights in feature space (m x K)\n this.R = []; // symmetric whitener K_mm^{-1/2} (m x m) when whitening\n const resolved = {\n outputDim: config.outputDim,\n kernel: config.kernel,\n ridgeLambda: (_a = config.ridgeLambda) !== null && _a !== void 0 ? _a : 1e-2,\n task: (_b = config.task) !== null && _b !== void 0 ? _b : 'classification',\n mode: (_c = config.mode) !== null && _c !== void 0 ? _c : 'exact',\n nystrom: {\n m: (_d = config.nystrom) === null || _d === void 0 ? void 0 : _d.m,\n strategy: (_f = (_e = config.nystrom) === null || _e === void 0 ? void 0 : _e.strategy) !== null && _f !== void 0 ? _f : 'uniform',\n seed: (_h = (_g = config.nystrom) === null || _g === void 0 ? void 0 : _g.seed) !== null && _h !== void 0 ? _h : 1337,\n preset: (_j = config.nystrom) === null || _j === void 0 ? void 0 : _j.preset,\n whiten: (_l = (_k = config.nystrom) === null || _k === void 0 ? void 0 : _k.whiten) !== null && _l !== void 0 ? _l : false,\n jitter: (_o = (_m = config.nystrom) === null || _m === void 0 ? void 0 : _m.jitter) !== null && _o !== void 0 ? _o : 1e-10,\n },\n log: {\n modelName: (_q = (_p = config.log) === null || _p === void 0 ? void 0 : _p.modelName) !== null && _q !== void 0 ? _q : 'KernelELM',\n verbose: (_s = (_r = config.log) === null || _r === void 0 ? void 0 : _r.verbose) !== null && _s !== void 0 ? _s : false,\n },\n };\n this.cfg = resolved;\n this.verbose = this.cfg.log.verbose;\n this.name = this.cfg.log.modelName;\n }\n /* ------------------- Train ------------------- */\n fit(X, Y) {\n var _a, _b, _c, _d, _e;\n if (!(X === null || X === void 0 ? void 0 : X.length) || !((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length))\n throw new Error('KernelELM.fit: empty X');\n if (!(Y === null || Y === void 0 ? void 0 : Y.length) || !((_b = Y[0]) === null || _b === void 0 ? void 0 : _b.length))\n throw new Error('KernelELM.fit: empty Y');\n if (X.length !== Y.length)\n throw new Error(`KernelELM.fit: X rows ${X.length} != Y rows ${Y.length}`);\n if (Y[0].length !== this.cfg.outputDim) {\n throw new Error(`KernelELM.fit: Y dims ${Y[0].length} != outputDim ${this.cfg.outputDim}`);\n }\n const N = X.length, D = X[0].length, K = Y[0].length;\n this.kernel = buildKernel(this.cfg.kernel, D);\n if (this.cfg.mode === 'exact') {\n // Gram K (N x N)\n if (this.verbose)\n console.log(`🔧 [${this.name}] exact Gram: N=${N}, D=${D}`);\n const Kmat = new Array(N);\n for (let i = 0; i < N; i++) {\n const row = new Array(N);\n Kmat[i] = row;\n row[i] = 1;\n for (let j = i + 1; j < N; j++)\n row[j] = this.kernel(X[i], X[j]);\n }\n for (let i = 1; i < N; i++)\n for (let j = 0; j < i; j++)\n Kmat[i][j] = Kmat[j][i];\n // (K + λI) α = Y\n const A = Matrix.addRegularization(Kmat, this.cfg.ridgeLambda + 1e-10);\n const Alpha = Matrix.solveCholesky(A, Y, 1e-12); // (N x K)\n this.Xtrain = X.map(r => r.slice());\n this.alpha = Alpha;\n this.Z = [];\n this.W = [];\n this.R = [];\n if (this.verbose)\n console.log(`✅ [${this.name}] exact fit complete: alpha(${N}x${K})`);\n return;\n }\n // ---------- Nyström ----------\n const ny = this.cfg.nystrom;\n let Z;\n if (ny.strategy === 'preset' && (((_c = ny.preset) === null || _c === void 0 ? void 0 : _c.points) || ((_d = ny.preset) === null || _d === void 0 ? void 0 : _d.indices))) {\n Z = ny.preset.points ? ny.preset.points.map(r => r.slice())\n : ny.preset.indices.map(i => X[i]);\n }\n else {\n const m = (_e = ny.m) !== null && _e !== void 0 ? _e : Math.max(10, Math.min(300, Math.floor(Math.sqrt(N))));\n const idx = (ny.strategy === 'kmeans++') ? pickKMeansPP(X, m, ny.seed) : pickUniform(X, m, ny.seed);\n Z = idx.map(i => X[i]);\n }\n const m = Z.length;\n if (this.verbose)\n console.log(`🔹 [${this.name}] Nyström: m=${m}, strategy=${ny.strategy}, whiten=${ny.whiten ? 'on' : 'off'}`);\n // K_nm (N x m)\n const Knm = new Array(N);\n for (let i = 0; i < N; i++) {\n const row = new Array(m), xi = X[i];\n for (let j = 0; j < m; j++)\n row[j] = this.kernel(xi, Z[j]);\n Knm[i] = row;\n }\n // Optional whitening with R = K_mm^{-1/2} (symmetric via eigen)\n let Phi = Knm;\n let R = [];\n if (ny.whiten) {\n // K_mm (m x m)\n const Kmm = new Array(m);\n for (let i = 0; i < m; i++) {\n const row = new Array(m);\n Kmm[i] = row;\n row[i] = 1;\n for (let j = i + 1; j < m; j++)\n row[j] = this.kernel(Z[i], Z[j]);\n }\n for (let i = 1; i < m; i++)\n for (let j = 0; j < i; j++)\n Kmm[i][j] = Kmm[j][i];\n // R = K_mm^{-1/2} with jitter\n const KmmJ = Matrix.addRegularization(Kmm, ny.jitter);\n R = Matrix.invSqrtSym(KmmJ, ny.jitter);\n Phi = Matrix.multiply(Knm, R); // (N x m)\n }\n // Ridge in feature space: W = (Φᵀ Φ + λ I)^-1 Φᵀ Y (m x K)\n const PhiT = Matrix.transpose(Phi);\n const G = Matrix.multiply(PhiT, Phi); // (m x m)\n const Greg = Matrix.addRegularization(G, this.cfg.ridgeLambda + 1e-10);\n const Rhs = Matrix.multiply(PhiT, Y); // (m x K)\n const W = Matrix.solveCholesky(Greg, Rhs, 1e-12); // (m x K)\n this.Z = Z;\n this.W = W;\n this.R = R; // empty when whiten=false\n this.Xtrain = [];\n this.alpha = [];\n if (this.verbose)\n console.log(`✅ [${this.name}] Nyström fit complete: Z(${m}x${D}), W(${m}x${K})`);\n }\n /* --------------- Features / Predict --------------- */\n featuresFor(X) {\n if (this.cfg.mode === 'exact') {\n const N = this.Xtrain.length, M = X.length;\n const Kqx = new Array(M);\n for (let i = 0; i < M; i++) {\n const row = new Array(N), xi = X[i];\n for (let j = 0; j < N; j++)\n row[j] = this.kernel(xi, this.Xtrain[j]);\n Kqx[i] = row;\n }\n return Kqx;\n }\n // Nyström\n if (!this.Z.length)\n throw new Error('featuresFor: Nyström model not fitted');\n const M = X.length, m = this.Z.length;\n const Kxm = new Array(M);\n for (let i = 0; i < M; i++) {\n const row = new Array(m), xi = X[i];\n for (let j = 0; j < m; j++)\n row[j] = this.kernel(xi, this.Z[j]);\n Kxm[i] = row;\n }\n return this.R.length ? Matrix.multiply(Kxm, this.R) : Kxm;\n }\n /** Raw logits for batch (M x K) */\n predictLogitsFromVectors(X) {\n const Phi = this.featuresFor(X);\n if (this.cfg.mode === 'exact') {\n if (!this.alpha.length)\n throw new Error('predict: exact model not fitted');\n return Matrix.multiply(Phi, this.alpha);\n }\n if (!this.W.length)\n throw new Error('predict: Nyström model not fitted');\n return Matrix.multiply(Phi, this.W);\n }\n /** Probabilities for classification; raw scores for regression */\n predictProbaFromVectors(X) {\n const logits = this.predictLogitsFromVectors(X);\n return this.cfg.task === 'classification' ? logits.map(softmaxRow) : logits;\n }\n /** Top-K for classification */\n predictTopKFromVectors(X, k = 5) {\n const P = this.predictProbaFromVectors(X);\n return P.map(row => row.map((p, i) => ({ index: i, prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k));\n }\n /** Embedding for chaining:\n * - exact: Φ = K(X, X_train) (M x N)\n * - nystrom: Φ = K(X, Z) (M x m) or K(X,Z)·R if whiten=true\n */\n getEmbedding(X) {\n return this.featuresFor(X);\n }\n /* -------------------- JSON I/O -------------------- */\n toJSON() {\n const base = { config: Object.assign(Object.assign({}, this.cfg), { __version: 'kelm-2.1.0' }) };\n if (this.cfg.mode === 'exact') {\n return Object.assign(Object.assign({}, base), { X: this.Xtrain, alpha: this.alpha });\n }\n return Object.assign(Object.assign({}, base), { Z: this.Z, W: this.W, R: this.R.length ? this.R : undefined });\n }\n fromJSON(payload) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n const obj = typeof payload === 'string' ? JSON.parse(payload) : payload;\n // Merge config (keep current defaults where missing)\n this.cfg.kernel = Object.assign({}, obj.config.kernel);\n this.cfg.ridgeLambda = (_a = obj.config.ridgeLambda) !== null && _a !== void 0 ? _a : this.cfg.ridgeLambda;\n this.cfg.task = ((_b = obj.config.task) !== null && _b !== void 0 ? _b : this.cfg.task);\n this.cfg.mode = ((_c = obj.config.mode) !== null && _c !== void 0 ? _c : this.cfg.mode);\n this.cfg.nystrom = Object.assign(Object.assign({}, this.cfg.nystrom), ((_d = obj.config.nystrom) !== null && _d !== void 0 ? _d : {}));\n // Restore params\n if (obj.X && obj.alpha) {\n this.Xtrain = obj.X.map(r => r.slice());\n this.alpha = obj.alpha.map(r => r.slice());\n this.Z = [];\n this.W = [];\n this.R = [];\n const D = (_f = (_e = this.Xtrain[0]) === null || _e === void 0 ? void 0 : _e.length) !== null && _f !== void 0 ? _f : 1;\n this.kernel = buildKernel(this.cfg.kernel, D);\n return;\n }\n if (obj.Z && obj.W) {\n this.Z = obj.Z.map(r => r.slice());\n this.W = obj.W.map(r => r.slice());\n this.R = obj.R ? obj.R.map(r => r.slice()) : [];\n this.Xtrain = [];\n this.alpha = [];\n const D = (_h = (_g = this.Z[0]) === null || _g === void 0 ? void 0 : _g.length) !== null && _h !== void 0 ? _h : 1;\n this.kernel = buildKernel(this.cfg.kernel, D);\n return;\n }\n throw new Error('KernelELM.fromJSON: invalid payload');\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// OnlineELM.ts — Online / OS-ELM with RLS updates\n/* ========== utils ========== */\nconst EPS$4 = 1e-10;\nfunction makePRNG(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n return ((s >>> 0) / 0xffffffff);\n };\n}\n/* ========== Online ELM (RLS) ========== */\nclass OnlineELM {\n constructor(cfg) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.inputDim = cfg.inputDim | 0;\n this.outputDim = cfg.outputDim | 0;\n this.hiddenUnits = cfg.hiddenUnits | 0;\n if (this.inputDim <= 0 || this.outputDim <= 0 || this.hiddenUnits <= 0) {\n throw new Error(`OnlineELM: invalid dims (inputDim=${this.inputDim}, outputDim=${this.outputDim}, hidden=${this.hiddenUnits})`);\n }\n this.activation = (_a = cfg.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.ridgeLambda = Math.max((_b = cfg.ridgeLambda) !== null && _b !== void 0 ? _b : 1e-2, EPS$4);\n this.weightInit = (_c = cfg.weightInit) !== null && _c !== void 0 ? _c : 'xavier';\n this.forgettingFactor = Math.max(Math.min((_d = cfg.forgettingFactor) !== null && _d !== void 0 ? _d : 1.0, 1.0), 1e-4);\n this.verbose = (_f = (_e = cfg.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false;\n this.modelName = (_h = (_g = cfg.log) === null || _g === void 0 ? void 0 : _g.modelName) !== null && _h !== void 0 ? _h : 'Online ELM';\n const seed = (_j = cfg.seed) !== null && _j !== void 0 ? _j : 1337;\n this.rng = makePRNG(seed);\n this.actFn = Activations.get(this.activation);\n // Random features\n this.W = this.initW(this.hiddenUnits, this.inputDim);\n this.b = this.initB(this.hiddenUnits);\n // Not initialized yet — init() will set these\n this.beta = null;\n this.P = null;\n }\n /* ===== init helpers ===== */\n xavierLimit(fanIn, fanOut) { return Math.sqrt(6 / (fanIn + fanOut)); }\n heLimit(fanIn) { return Math.sqrt(6 / fanIn); }\n initW(rows, cols) {\n let limit = 1;\n if (this.weightInit === 'xavier') {\n limit = this.xavierLimit(cols, rows);\n if (this.verbose)\n console.log(`✨ [${this.modelName}] Xavier W ~ U(±${limit.toFixed(4)})`);\n }\n else if (this.weightInit === 'he') {\n limit = this.heLimit(cols);\n if (this.verbose)\n console.log(`✨ [${this.modelName}] He W ~ U(±${limit.toFixed(4)})`);\n }\n else if (this.verbose) {\n console.log(`✨ [${this.modelName}] Uniform W ~ U(±1)`);\n }\n const rnd = () => (this.rng() * 2 - 1) * limit;\n return Array.from({ length: rows }, () => Array.from({ length: cols }, rnd));\n }\n initB(rows) {\n const rnd = () => (this.rng() * 2 - 1) * 0.01;\n return Array.from({ length: rows }, () => [rnd()]);\n }\n hidden(X) {\n const tempH = Matrix.multiply(X, Matrix.transpose(this.W)); // (n x hidden)\n const f = this.actFn;\n return tempH.map(row => row.map((v, j) => f(v + this.b[j][0])));\n }\n /* ===== public API ===== */\n /** Initialize β and P from a batch (ridge): P0=(HᵀH+λI)^-1, β0=P0 HᵀY */\n init(X0, Y0) {\n if (!(X0 === null || X0 === void 0 ? void 0 : X0.length) || !(Y0 === null || Y0 === void 0 ? void 0 : Y0.length))\n throw new Error('init: empty X0 or Y0');\n if (X0.length !== Y0.length)\n throw new Error(`init: X0 rows ${X0.length} != Y0 rows ${Y0.length}`);\n if (X0[0].length !== this.inputDim)\n throw new Error(`init: X0 cols ${X0[0].length} != inputDim ${this.inputDim}`);\n if (Y0[0].length !== this.outputDim)\n throw new Error(`init: Y0 cols ${Y0[0].length} != outputDim ${this.outputDim}`);\n const H0 = this.hidden(X0); // (n x h)\n const Ht = Matrix.transpose(H0); // (h x n)\n const A = Matrix.addRegularization(Matrix.multiply(Ht, H0), this.ridgeLambda + 1e-10); // (h x h)\n const R = Matrix.multiply(Ht, Y0); // (h x k)\n const P0 = Matrix.solveCholesky(A, Matrix.identity(this.hiddenUnits), 1e-10); // A^-1\n const B0 = Matrix.multiply(P0, R); // (h x k)\n this.P = P0;\n this.beta = B0;\n if (this.verbose)\n console.log(`✅ [${this.modelName}] init: n=${X0.length}, hidden=${this.hiddenUnits}, out=${this.outputDim}`);\n }\n /** If not initialized, init(); otherwise RLS update. */\n fit(X, Y) {\n if (!(X === null || X === void 0 ? void 0 : X.length) || !(Y === null || Y === void 0 ? void 0 : Y.length))\n throw new Error('fit: empty X or Y');\n if (X.length !== Y.length)\n throw new Error(`fit: X rows ${X.length} != Y rows ${Y.length}`);\n if (!this.P || !this.beta)\n this.init(X, Y);\n else\n this.update(X, Y);\n }\n /**\n * RLS / OS-ELM update with forgetting ρ:\n * S = I + HPHᵀ\n * K = P Hᵀ S^-1\n * β ← β + K (Y - Hβ)\n * P ← (P - K H P) / ρ\n */\n update(X, Y) {\n if (!(X === null || X === void 0 ? void 0 : X.length) || !(Y === null || Y === void 0 ? void 0 : Y.length))\n throw new Error('update: empty X or Y');\n if (X.length !== Y.length)\n throw new Error(`update: X rows ${X.length} != Y rows ${Y.length}`);\n if (!this.P || !this.beta)\n throw new Error('update: model not initialized (call init() first)');\n const n = X.length;\n const H = this.hidden(X); // (n x h)\n const Ht = Matrix.transpose(H); // (h x n)\n const rho = this.forgettingFactor;\n let P = this.P;\n if (rho < 1.0) {\n // Equivalent to P <- P / ρ (more responsive to new data)\n P = P.map(row => row.map(v => v / rho));\n }\n // S = I + H P Hᵀ (n x n, SPD)\n const HP = Matrix.multiply(H, P); // (n x h)\n const HPHt = Matrix.multiply(HP, Ht); // (n x n)\n const S = Matrix.add(HPHt, Matrix.identity(n));\n const S_inv = Matrix.solveCholesky(S, Matrix.identity(n), 1e-10);\n // K = P Hᵀ S^-1 (h x n)\n const PHt = Matrix.multiply(P, Ht); // (h x n)\n const K = Matrix.multiply(PHt, S_inv); // (h x n)\n // Innovation: (Y - Hβ) (n x k)\n const Hbeta = Matrix.multiply(H, this.beta);\n const innov = Y.map((row, i) => row.map((yij, j) => yij - Hbeta[i][j]));\n // β ← β + K * innov\n const Delta = Matrix.multiply(K, innov); // (h x k)\n this.beta = this.beta.map((row, i) => row.map((bij, j) => bij + Delta[i][j]));\n // P ← P - K H P\n const KH = Matrix.multiply(K, H); // (h x h)\n const KHP = Matrix.multiply(KH, P); // (h x h)\n this.P = P.map((row, i) => row.map((pij, j) => pij - KHP[i][j]));\n if (this.verbose) {\n const diagAvg = this.P.reduce((s, r, i) => s + r[i], 0) / this.P.length;\n console.log(`🔁 [${this.modelName}] update: n=${n}, avg diag(P)≈${diagAvg.toFixed(6)}`);\n }\n }\n /* ===== Prediction ===== */\n logitsFromVectors(X) {\n if (!this.beta)\n throw new Error('predict: model not initialized');\n const H = this.hidden(X);\n return Matrix.multiply(H, this.beta);\n }\n predictLogitsFromVector(x) {\n return this.logitsFromVectors([x])[0];\n }\n predictLogitsFromVectors(X) {\n return this.logitsFromVectors(X);\n }\n predictProbaFromVector(x) {\n return Activations.softmax(this.predictLogitsFromVector(x));\n }\n predictProbaFromVectors(X) {\n return this.predictLogitsFromVectors(X).map(Activations.softmax);\n }\n predictTopKFromVector(x, k = 5) {\n const p = this.predictProbaFromVector(x);\n const kk = Math.max(1, Math.min(k, p.length));\n return p.map((prob, index) => ({ index, prob }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, kk);\n }\n predictTopKFromVectors(X, k = 5) {\n return this.predictProbaFromVectors(X).map(p => {\n const kk = Math.max(1, Math.min(k, p.length));\n return p.map((prob, index) => ({ index, prob }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, kk);\n });\n }\n /* ===== Serialization ===== */\n toJSON(includeP = false) {\n if (!this.beta || !this.P)\n throw new Error('toJSON: model not initialized');\n const cfg = {\n hiddenUnits: this.hiddenUnits,\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n activation: this.activation,\n ridgeLambda: this.ridgeLambda,\n weightInit: this.weightInit,\n forgettingFactor: this.forgettingFactor,\n __version: 'online-elm-1.0.0',\n };\n const o = { W: this.W, b: this.b, B: this.beta, config: cfg };\n if (includeP)\n o.P = this.P;\n return o;\n }\n loadFromJSON(json) {\n var _a;\n const parsed = typeof json === 'string' ? JSON.parse(json) : json;\n const { W, b, B, P, config } = parsed;\n if (!W || !b || !B)\n throw new Error('loadFromJSON: missing W/b/B');\n if (W.length !== this.hiddenUnits || W[0].length !== this.inputDim) {\n throw new Error(`loadFromJSON: mismatched W shape (${W.length}x${W[0].length})`);\n }\n if (b.length !== this.hiddenUnits || b[0].length !== 1) {\n throw new Error(`loadFromJSON: mismatched b shape (${b.length}x${b[0].length})`);\n }\n if (B.length !== this.hiddenUnits || B[0].length !== this.outputDim) {\n throw new Error(`loadFromJSON: mismatched B shape (${B.length}x${B[0].length})`);\n }\n this.W = W;\n this.b = b;\n this.beta = B;\n this.P = P !== null && P !== void 0 ? P : null;\n if (config === null || config === void 0 ? void 0 : config.activation) {\n this.activation = config.activation;\n this.actFn = Activations.get(this.activation); // refresh cache\n }\n if (config === null || config === void 0 ? void 0 : config.ridgeLambda)\n this.ridgeLambda = config.ridgeLambda;\n if (this.verbose)\n console.log(`✅ [${this.modelName}] model loaded (v=${(_a = config === null || config === void 0 ? void 0 : config.__version) !== null && _a !== void 0 ? _a : 'n/a'})`);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMChain.ts — simple encoder pipeline with checks, normalization, and profiling\nfunction l2NormalizeRows$1(M) {\n return M.map(row => {\n let s = 0;\n for (let i = 0; i < row.length; i++)\n s += row[i] * row[i];\n const n = Math.sqrt(s) || 1;\n const inv = 1 / n;\n return row.map(v => v * inv);\n });\n}\nfunction asBatch(x) {\n return Array.isArray(x[0]) ? x : [x];\n}\nfunction fromBatch(y, originalWasVector) {\n var _a;\n return originalWasVector ? ((_a = y[0]) !== null && _a !== void 0 ? _a : []) : y;\n}\nclass ELMChain {\n constructor(encoders = [], opts) {\n var _a, _b, _c, _d, _e;\n this.lastDims = []; // input dim -> stage dims (for summary)\n this.encoders = [...encoders];\n this.opts = {\n normalizeEach: (_a = opts === null || opts === void 0 ? void 0 : opts.normalizeEach) !== null && _a !== void 0 ? _a : false,\n normalizeFinal: (_b = opts === null || opts === void 0 ? void 0 : opts.normalizeFinal) !== null && _b !== void 0 ? _b : false,\n validate: (_c = opts === null || opts === void 0 ? void 0 : opts.validate) !== null && _c !== void 0 ? _c : true,\n strict: (_d = opts === null || opts === void 0 ? void 0 : opts.strict) !== null && _d !== void 0 ? _d : true,\n name: (_e = opts === null || opts === void 0 ? void 0 : opts.name) !== null && _e !== void 0 ? _e : 'ELMChain',\n };\n }\n /** Add encoder at end */\n add(encoder) {\n this.encoders.push(encoder);\n }\n /** Insert encoder at position (0..length) */\n insertAt(index, encoder) {\n if (index < 0 || index > this.encoders.length)\n throw new Error('insertAt: index out of range');\n this.encoders.splice(index, 0, encoder);\n }\n /** Remove encoder at index; returns removed or undefined */\n removeAt(index) {\n if (index < 0 || index >= this.encoders.length)\n return undefined;\n return this.encoders.splice(index, 1)[0];\n }\n /** Remove all encoders */\n clear() {\n this.encoders.length = 0;\n this.lastDims.length = 0;\n }\n /** Number of stages */\n length() {\n return this.encoders.length;\n }\n /** Human-friendly overview (dims are filled after the first successful run) */\n summary() {\n const lines = [];\n lines.push(`📦 ${this.opts.name} — ${this.encoders.length} stage(s)`);\n this.encoders.forEach((enc, i) => {\n var _a, _b, _c;\n const nm = (_a = enc.name) !== null && _a !== void 0 ? _a : `Encoder#${i}`;\n const dimIn = (_b = this.lastDims[i]) !== null && _b !== void 0 ? _b : '?';\n const dimOut = (_c = this.lastDims[i + 1]) !== null && _c !== void 0 ? _c : '?';\n lines.push(` ${i}: ${nm} ${dimIn} → ${dimOut}`);\n });\n return lines.join('\\n');\n }\n getEmbedding(input) {\n var _a, _b;\n const wasVector = !Array.isArray(input[0]);\n const X0 = asBatch(input);\n if (this.opts.validate) {\n if (!X0.length || !((_a = X0[0]) === null || _a === void 0 ? void 0 : _a.length))\n throw new Error('ELMChain.getEmbedding: empty input');\n }\n let X = X0;\n this.lastDims = [X0[0].length];\n for (let i = 0; i < this.encoders.length; i++) {\n const enc = this.encoders[i];\n try {\n if (this.opts.validate) {\n // Ensure rows consistent\n const d = X[0].length;\n for (let r = 1; r < X.length; r++) {\n if (X[r].length !== d)\n throw new Error(`Stage ${i} input row ${r} has dim ${X[r].length} != ${d}`);\n }\n }\n let Y = enc.getEmbedding(X);\n if (this.opts.validate) {\n if (!Y.length || !((_b = Y[0]) === null || _b === void 0 ? void 0 : _b.length)) {\n throw new Error(`Stage ${i} produced empty output`);\n }\n }\n if (this.opts.normalizeEach) {\n Y = l2NormalizeRows$1(Y);\n }\n // Record dims for summary\n this.lastDims[i + 1] = Y[0].length;\n X = Y;\n }\n catch (err) {\n if (this.opts.strict)\n throw err;\n // Non-strict: return what we have so far\n return fromBatch(X, wasVector);\n }\n }\n if (this.opts.normalizeFinal && !this.opts.normalizeEach) {\n X = l2NormalizeRows$1(X);\n }\n return fromBatch(X, wasVector);\n }\n /**\n * Run once to collect per-stage timings (ms) and final dims.\n * Returns { timings, dims } where dims[i] is input dim to stage i,\n * dims[i+1] is that stage’s output dim.\n */\n profile(input) {\n !Array.isArray(input[0]);\n let X = asBatch(input);\n const timings = [];\n const dims = [X[0].length];\n for (let i = 0; i < this.encoders.length; i++) {\n const t0 = performance.now();\n X = this.encoders[i].getEmbedding(X);\n const t1 = performance.now();\n timings.push(t1 - t0);\n dims[i + 1] = X[0].length;\n }\n // Don’t mutate options; just return diagnostics\n return { timings, dims };\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMAdapter.ts — unify ELM / OnlineELM as EncoderLike for ELMChain\nfunction assertNonEmptyBatch(X, where) {\n if (!Array.isArray(X) || X.length === 0 || !Array.isArray(X[0]) || X[0].length === 0) {\n throw new Error(`${where}: expected non-empty (N x D) batch`);\n }\n}\nfunction matmulXWtAddB(X, // (N x D)\nW, // (H x D)\nb // (H x 1)\n) {\n var _a, _b, _c, _d, _e;\n const N = X.length, D = X[0].length, H = W.length;\n // quick shape sanity\n if (((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) !== D)\n throw new Error(`matmulXWtAddB: W is ${W.length}x${(_b = W[0]) === null || _b === void 0 ? void 0 : _b.length}, expected Hx${D}`);\n if (b.length !== H || ((_d = (_c = b[0]) === null || _c === void 0 ? void 0 : _c.length) !== null && _d !== void 0 ? _d : 0) !== 1)\n throw new Error(`matmulXWtAddB: b is ${b.length}x${(_e = b[0]) === null || _e === void 0 ? void 0 : _e.length}, expected Hx1`);\n const out = new Array(N);\n for (let n = 0; n < N; n++) {\n const xn = X[n];\n const row = new Array(H);\n for (let h = 0; h < H; h++) {\n const wh = W[h];\n let s = b[h][0] || 0;\n // unrolled dot\n for (let d = 0; d < D; d++)\n s += xn[d] * wh[d];\n row[h] = s;\n }\n out[n] = row;\n }\n return out;\n}\nclass ELMAdapter {\n constructor(target) {\n var _a, _b;\n this.target = target;\n this.mode = target.type === 'online' ? ((_a = target.mode) !== null && _a !== void 0 ? _a : 'hidden') : 'hidden';\n this.name = (_b = target.name) !== null && _b !== void 0 ? _b : (target.type === 'elm' ? 'ELM' : `OnlineELM(${this.mode})`);\n }\n /** Return embeddings for a batch (N x D) -> (N x H/L) */\n getEmbedding(X) {\n var _a, _b, _c, _d;\n assertNonEmptyBatch(X, `${this.name}.getEmbedding`);\n if (this.target.type === 'elm') {\n const m = this.target.model;\n // ELM already exposes getEmbedding()\n if (typeof m.getEmbedding !== 'function') {\n throw new Error(`${this.name}: underlying ELM lacks getEmbedding(X)`);\n }\n try {\n return m.getEmbedding(X);\n }\n catch (err) {\n // Helpful hint if model wasn’t trained\n if (m.model == null) {\n throw new Error(`${this.name}: model not trained/initialized (call train/trainFromData or load model).`);\n }\n throw err;\n }\n }\n // OnlineELM path\n const o = this.target.model;\n // Guard dims early\n const D = X[0].length;\n if (!Array.isArray(o.W) || ((_a = o.W[0]) === null || _a === void 0 ? void 0 : _a.length) !== D) {\n throw new Error(`${this.name}: input dim ${D} does not match model.W columns ${(_d = (_c = (_b = o.W) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.length) !== null && _d !== void 0 ? _d : 'n/a'}`);\n }\n if (this.mode === 'logits') {\n // Use public logits as an “embedding”\n try {\n return o.predictLogitsFromVectors(X);\n }\n catch (err) {\n if (o.beta == null) {\n throw new Error(`${this.name}: model not initialized (call init()/fit() before logits mode).`);\n }\n throw err;\n }\n }\n // mode === 'hidden' → compute hidden activations: act(X Wᵀ + b)\n const W = o.W;\n const BIAS = o.b;\n const actName = o.activation;\n const act = Activations.get((actName !== null && actName !== void 0 ? actName : 'relu').toLowerCase());\n const Hpre = matmulXWtAddB(X, W, BIAS);\n // apply activation in-place\n for (let n = 0; n < Hpre.length; n++) {\n const row = Hpre[n];\n for (let j = 0; j < row.length; j++)\n row[j] = act(row[j]);\n }\n return Hpre;\n }\n}\n/* -------- convenience helpers -------- */\nfunction wrapELM(model, name) {\n return new ELMAdapter({ type: 'elm', model, name });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// DeepELM.ts — stacked ELM autoencoders + top ELM classifier\nclass DeepELM {\n constructor(cfg) {\n this.aeLayers = [];\n this.chain = null;\n this.clf = null;\n this.cfg = Object.assign({ clfHiddenUnits: 0, clfActivation: 'linear', clfWeightInit: 'xavier', normalizeEach: false, normalizeFinal: true }, cfg);\n }\n /** Layer-wise unsupervised training with Y=X (autoencoder). Returns transformed X_L. */\n fitAutoencoders(X) {\n var _a, _b, _c, _d;\n let cur = X;\n this.aeLayers = [];\n for (let i = 0; i < this.cfg.layers.length; i++) {\n const spec = this.cfg.layers[i];\n // Minimal ELM config for numeric mode—categories aren’t used by trainFromData:\n const elm = new ELM({\n categories: ['ae'], // placeholder (unused in trainFromData)\n hiddenUnits: spec.hiddenUnits,\n activation: (_a = spec.activation) !== null && _a !== void 0 ? _a : 'relu',\n weightInit: (_b = spec.weightInit) !== null && _b !== void 0 ? _b : 'xavier',\n dropout: (_c = spec.dropout) !== null && _c !== void 0 ? _c : 0,\n log: { modelName: (_d = spec.name) !== null && _d !== void 0 ? _d : `AE#${i + 1}`, verbose: false },\n });\n // Autoencoder: targets are the inputs\n elm.trainFromData(cur, cur);\n this.aeLayers.push(elm);\n // Forward to next layer using hidden activations\n cur = elm.getEmbedding(cur);\n if (this.cfg.normalizeEach) {\n cur = l2NormalizeRows(cur);\n }\n }\n // Build chain for fast forward passes\n this.chain = new ELMChain(this.aeLayers.map((m, i) => {\n const a = wrapELM(m, m['modelName'] || `AE#${i + 1}`);\n return a;\n }), {\n normalizeEach: !!this.cfg.normalizeEach,\n normalizeFinal: !!this.cfg.normalizeFinal,\n name: 'DeepELM-Chain',\n });\n return this.transform(X);\n }\n /** Supervised training of a top classifier ELM on last-layer features. */\n fitClassifier(X, yOneHot) {\n var _a, _b;\n if (!this.chain)\n throw new Error('fitClassifier: call fitAutoencoders() first');\n const Z = this.chain.getEmbedding(X);\n // If clfHiddenUnits === 0, we mimic a “linear readout” by using a very small hidden layer with linear activation.\n const hidden = Math.max(1, this.cfg.clfHiddenUnits || 1);\n this.clf = new ELM({\n categories: Array.from({ length: this.cfg.numClasses }, (_, i) => String(i)),\n hiddenUnits: hidden,\n activation: (_a = this.cfg.clfActivation) !== null && _a !== void 0 ? _a : 'linear',\n weightInit: (_b = this.cfg.clfWeightInit) !== null && _b !== void 0 ? _b : 'xavier',\n log: { modelName: 'DeepELM-Classifier', verbose: false },\n });\n this.clf.trainFromData(Z, yOneHot);\n }\n /** One-shot convenience: train AEs then classifier. */\n fit(X, yOneHot) {\n this.fitAutoencoders(X);\n this.fitClassifier(X, yOneHot);\n }\n /** Forward through stacked AEs (no classifier). */\n transform(X) {\n if (!this.chain)\n throw new Error('transform: model not fitted');\n const Z = this.chain.getEmbedding(X);\n return Z;\n }\n /** Classifier probabilities (softmax) for a batch. */\n predictProba(X) {\n if (!this.clf)\n throw new Error('predictProba: classifier not fitted');\n // Reuse existing ELM method on batch:\n const Z = this.transform(X);\n const res = this.clf.predictFromVector(Z, this.cfg.numClasses);\n // predictFromVector returns topK lists; convert back into dense probs when possible\n // If you’d rather have dense probs, expose a new method on ELM to return raw softmax scores for a batch.\n return topKListToDense(res, this.cfg.numClasses);\n }\n /** Utility: export all models for persistence. */\n toJSON() {\n var _a;\n return {\n cfg: this.cfg,\n layers: this.aeLayers.map(m => { var _a; return (_a = m.savedModelJSON) !== null && _a !== void 0 ? _a : JSON.stringify(m.model); }),\n clf: this.clf ? ((_a = this.clf.savedModelJSON) !== null && _a !== void 0 ? _a : JSON.stringify(this.clf.model)) : null,\n __version: 'deep-elm-1.0.0',\n };\n }\n /** Utility: load from exported payload. */\n fromJSON(payload) {\n const { cfg, layers, clf } = payload !== null && payload !== void 0 ? payload : {};\n if (!Array.isArray(layers))\n throw new Error('fromJSON: invalid payload');\n this.cfg = Object.assign(Object.assign({}, this.cfg), cfg);\n this.aeLayers = layers.map((j, i) => {\n const m = new ELM({ categories: ['ae'], hiddenUnits: 1 });\n m.loadModelFromJSON(j);\n return m;\n });\n this.chain = new ELMChain(this.aeLayers.map((m, i) => wrapELM(m, `AE#${i + 1}`)), {\n normalizeEach: !!this.cfg.normalizeEach,\n normalizeFinal: !!this.cfg.normalizeFinal,\n name: 'DeepELM-Chain',\n });\n if (clf) {\n const c = new ELM({ categories: Array.from({ length: this.cfg.numClasses }, (_, i) => String(i)), hiddenUnits: 1 });\n c.loadModelFromJSON(clf);\n this.clf = c;\n }\n }\n}\n/* ---------- helpers ---------- */\nfunction l2NormalizeRows(M) {\n return M.map(r => {\n let s = 0;\n for (let i = 0; i < r.length; i++)\n s += r[i] * r[i];\n const inv = 1 / (Math.sqrt(s) || 1);\n return r.map(v => v * inv);\n });\n}\nfunction topKListToDense(list, K) {\n // Convert the ELM.predictFromVector top-K output back to dense [N x K] probs if needed.\n // (If your ELM exposes a dense “predictProbaFromVectors” for the batch, prefer that.)\n return list.map(row => {\n const out = new Array(K).fill(0);\n for (const { label, prob } of row) {\n const idx = Number(label);\n if (Number.isFinite(idx) && idx >= 0 && idx < K)\n out[idx] = prob;\n }\n return out;\n });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// EmbeddingStore.ts — Powerful in-memory vector store with fast KNN, thresholds, and JSON I/O\nconst EPS$3 = 1e-12;\n/* ================= math utils ================= */\nfunction l2Norm$1(v) {\n let s = 0;\n for (let i = 0; i < v.length; i++)\n s += v[i] * v[i];\n return Math.sqrt(s);\n}\nfunction l1Dist(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.abs(a[i] - b[i]);\n return s;\n}\nfunction dot$3(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction normalizeToUnit(v) {\n const out = new Float32Array(v.length);\n const n = l2Norm$1(v);\n if (n < EPS$3)\n return out; // zero vector → stay zero; cosine with zero returns 0\n const inv = 1 / n;\n for (let i = 0; i < v.length; i++)\n out[i] = v[i] * inv;\n return out;\n}\n/** Quickselect (nth_element) on-place for top-k largest by score. Returns cutoff value index. */\nfunction quickselectTopK(arr, k, scoreOf) {\n if (k <= 0 || k >= arr.length)\n return arr.length - 1;\n let left = 0, right = arr.length - 1;\n const target = k - 1; // 0-based index of kth largest after partition\n function swap(i, j) {\n const t = arr[i];\n arr[i] = arr[j];\n arr[j] = t;\n }\n function partition(l, r, pivotIdx) {\n const pivotScore = scoreOf(arr[pivotIdx]);\n swap(pivotIdx, r);\n let store = l;\n for (let i = l; i < r; i++) {\n if (scoreOf(arr[i]) > pivotScore) { // \">\" for largest-first\n swap(store, i);\n store++;\n }\n }\n swap(store, r);\n return store;\n }\n while (true) {\n const pivotIdx = Math.floor((left + right) / 2);\n const idx = partition(left, right, pivotIdx);\n if (idx === target)\n return idx;\n if (target < idx)\n right = idx - 1;\n else\n left = idx + 1;\n }\n}\n/* ================= store ================= */\nclass EmbeddingStore {\n constructor(dim, opts) {\n var _a, _b;\n // Data\n this.ids = [];\n this.metas = [];\n this.vecs = []; // if storeUnit=true -> unit vectors; else raw vectors\n // Index\n this.idToIdx = new Map();\n if (!Number.isFinite(dim) || dim <= 0)\n throw new Error(`EmbeddingStore: invalid dim=${dim}`);\n this.dim = dim | 0;\n this.storeUnit = (_a = opts === null || opts === void 0 ? void 0 : opts.storeUnit) !== null && _a !== void 0 ? _a : true;\n this.alsoStoreRaw = (_b = opts === null || opts === void 0 ? void 0 : opts.alsoStoreRaw) !== null && _b !== void 0 ? _b : this.storeUnit; // default: if normalizing, also keep raw so Euclidean is valid\n if ((opts === null || opts === void 0 ? void 0 : opts.capacity) !== undefined) {\n if (!Number.isFinite(opts.capacity) || opts.capacity <= 0)\n throw new Error(`capacity must be > 0`);\n this.capacity = Math.floor(opts.capacity);\n }\n if (this.alsoStoreRaw) {\n this.rawVecs = [];\n this.rawNorms = new Float32Array(0);\n }\n if (!this.storeUnit) {\n // storing raw in vecs → maintain norms for fast cosine\n this.norms = new Float32Array(0);\n }\n }\n /* ========== basic ops ========== */\n size() { return this.ids.length; }\n dimension() { return this.dim; }\n isUnitStored() { return this.storeUnit; }\n keepsRaw() { return !!this.rawVecs; }\n getCapacity() { return this.capacity; }\n setCapacity(capacity) {\n if (capacity === undefined) {\n this.capacity = undefined;\n return;\n }\n if (!Number.isFinite(capacity) || capacity <= 0)\n throw new Error(`capacity must be > 0`);\n this.capacity = Math.floor(capacity);\n this.enforceCapacity();\n }\n clear() {\n this.ids = [];\n this.vecs = [];\n this.metas = [];\n this.idToIdx.clear();\n if (this.rawVecs)\n this.rawVecs = [];\n if (this.norms)\n this.norms = new Float32Array(0);\n if (this.rawNorms)\n this.rawNorms = new Float32Array(0);\n }\n has(id) { return this.idToIdx.has(id); }\n get(id) {\n const idx = this.idToIdx.get(id);\n if (idx === undefined)\n return undefined;\n return {\n id,\n vec: this.vecs[idx],\n raw: this.rawVecs ? this.rawVecs[idx] : undefined,\n meta: this.metas[idx],\n };\n }\n /** Remove by id. Returns true if removed. */\n remove(id) {\n const idx = this.idToIdx.get(id);\n if (idx === undefined)\n return false;\n // capture id, splice arrays\n this.ids.splice(idx, 1);\n this.vecs.splice(idx, 1);\n this.metas.splice(idx, 1);\n if (this.rawVecs)\n this.rawVecs.splice(idx, 1);\n if (this.norms)\n this.norms = this.removeFromNorms(this.norms, idx);\n if (this.rawNorms)\n this.rawNorms = this.removeFromNorms(this.rawNorms, idx);\n this.idToIdx.delete(id);\n this.rebuildIndex(idx);\n return true;\n }\n /** Add or replace an item by id. Returns true if added, false if replaced. */\n upsert(item) {\n var _a;\n const { id, vec, meta } = item;\n if (!id)\n throw new Error('upsert: id is required');\n if (!vec || vec.length !== this.dim) {\n throw new Error(`upsert: vector dim ${(_a = vec === null || vec === void 0 ? void 0 : vec.length) !== null && _a !== void 0 ? _a : 'n/a'} != store dim ${this.dim}`);\n }\n const raw = new Float32Array(vec);\n const unit = this.storeUnit ? normalizeToUnit(raw) : raw;\n const idx = this.idToIdx.get(id);\n if (idx !== undefined) {\n // replace in place\n this.vecs[idx] = unit;\n this.metas[idx] = meta;\n if (this.rawVecs)\n this.rawVecs[idx] = raw;\n if (this.norms && !this.storeUnit)\n this.norms[idx] = l2Norm$1(raw);\n if (this.rawNorms && this.rawVecs)\n this.rawNorms[idx] = l2Norm$1(raw);\n return false;\n }\n else {\n this.ids.push(id);\n this.vecs.push(unit);\n this.metas.push(meta);\n if (this.rawVecs)\n this.rawVecs.push(raw);\n if (this.norms && !this.storeUnit) {\n // append norm\n const n = l2Norm$1(raw);\n const newNorms = new Float32Array(this.ids.length);\n newNorms.set(this.norms, 0);\n newNorms[this.ids.length - 1] = n;\n this.norms = newNorms;\n }\n if (this.rawNorms && this.rawVecs) {\n const n = l2Norm$1(raw);\n const newNorms = new Float32Array(this.ids.length);\n newNorms.set(this.rawNorms, 0);\n newNorms[this.ids.length - 1] = n;\n this.rawNorms = newNorms;\n }\n this.idToIdx.set(id, this.ids.length - 1);\n this.enforceCapacity();\n return true;\n }\n }\n add(item) {\n const added = this.upsert(item);\n if (!added)\n throw new Error(`add: id \"${item.id}\" already exists (use upsert instead)`);\n }\n addAll(items, allowUpsert = true) {\n for (const it of items) {\n if (allowUpsert)\n this.upsert(it);\n else\n this.add(it);\n }\n }\n /** Merge another store (same dim & normalization strategy) into this one. */\n merge(other, allowOverwrite = true) {\n var _a;\n if (other.dimension() !== this.dim)\n throw new Error('merge: dimension mismatch');\n if (other.isUnitStored() !== this.storeUnit)\n throw new Error('merge: normalized flag mismatch');\n if (other.keepsRaw() !== this.keepsRaw())\n throw new Error('merge: raw retention mismatch');\n for (let i = 0; i < other.ids.length; i++) {\n const id = other.ids[i];\n const vec = other.vecs[i];\n const raw = (_a = other.rawVecs) === null || _a === void 0 ? void 0 : _a[i];\n const meta = other.metas[i];\n if (!allowOverwrite && this.has(id))\n continue;\n // Use upsert path, but avoid double-normalizing when both stores have unit vectors:\n this.upsert({ id, vec, meta });\n if (this.rawVecs && raw)\n this.rawVecs[this.idToIdx.get(id)] = new Float32Array(raw);\n }\n }\n /* ========== querying ========== */\n /** Top-K KNN query. For L2/L1 we return NEGATIVE distance so higher is better. */\n query(queryVec, k = 10, opts) {\n var _a, _b, _c, _d, _e, _f;\n if (queryVec.length !== this.dim) {\n throw new Error(`query: vector dim ${queryVec.length} != store dim ${this.dim}`);\n }\n const metric = (_a = opts === null || opts === void 0 ? void 0 : opts.metric) !== null && _a !== void 0 ? _a : 'cosine';\n const filter = opts === null || opts === void 0 ? void 0 : opts.filter;\n const returnVectors = (_b = opts === null || opts === void 0 ? void 0 : opts.returnVectors) !== null && _b !== void 0 ? _b : false;\n const minScore = opts === null || opts === void 0 ? void 0 : opts.minScore;\n const maxDistance = opts === null || opts === void 0 ? void 0 : opts.maxDistance;\n const restrictSet = (opts === null || opts === void 0 ? void 0 : opts.restrictToIds) ? new Set(opts.restrictToIds) : undefined;\n let q;\n let qNorm = 0;\n if (metric === 'cosine') {\n // cosine → normalize query; stored data either unit (fast) or raw (use cached norms)\n q = normalizeToUnit(queryVec);\n }\n else if (metric === 'dot') {\n q = new Float32Array(queryVec);\n qNorm = l2Norm$1(q); // only used for potential future scoring transforms\n }\n else {\n // L2/L1 use RAW query\n q = new Float32Array(queryVec);\n qNorm = l2Norm$1(q);\n }\n const hits = [];\n const N = this.vecs.length;\n // helpers\n const pushHit = (i, score) => {\n if (restrictSet && !restrictSet.has(this.ids[i]))\n return;\n if (filter && !filter(this.metas[i], this.ids[i]))\n return;\n // Apply thresholds\n if (metric === 'euclidean' || metric === 'manhattan') {\n const dist = -score; // score is negative distance\n if (maxDistance !== undefined && dist > maxDistance)\n return;\n }\n else {\n if (minScore !== undefined && score < minScore)\n return;\n }\n hits.push(returnVectors\n ? { id: this.ids[i], score, index: i, meta: this.metas[i], vec: this.vecs[i] }\n : { id: this.ids[i], score, index: i, meta: this.metas[i] });\n };\n if (metric === 'cosine') {\n if (this.storeUnit) {\n // both unit → score = dot\n for (let i = 0; i < N; i++) {\n const s = dot$3(q, this.vecs[i]);\n pushHit(i, s);\n }\n }\n else {\n // stored raw in vecs → use cached norms (if available) for cos = dot / (||q||*||v||)\n if (!this.norms || this.norms.length !== N) {\n // build norms on-demand once\n this.norms = new Float32Array(N);\n for (let i = 0; i < N; i++)\n this.norms[i] = l2Norm$1(this.vecs[i]);\n }\n const qn = l2Norm$1(q) || 1; // guard\n for (let i = 0; i < N; i++) {\n const dn = this.norms[i] || 1;\n const s = dn < EPS$3 ? 0 : dot$3(q, this.vecs[i]) / (qn * dn);\n pushHit(i, s);\n }\n }\n }\n else if (metric === 'dot') {\n for (let i = 0; i < N; i++) {\n const s = dot$3(q, this.storeUnit ? this.vecs[i] : this.vecs[i]); // same storage\n pushHit(i, s);\n }\n }\n else if (metric === 'euclidean') {\n // Need RAW vectors\n const base = (_c = this.rawVecs) !== null && _c !== void 0 ? _c : (!this.storeUnit ? this.vecs : null);\n if (!base)\n throw new Error('euclidean query requires raw vectors; create store with alsoStoreRaw=true or storeUnit=false');\n // Use fast formula: ||q - v|| = sqrt(||q||^2 + ||v||^2 - 2 q·v)\n const vNorms = this.rawVecs ? ((_d = this.rawNorms) !== null && _d !== void 0 ? _d : this.buildRawNorms()) :\n (_e = this.norms) !== null && _e !== void 0 ? _e : this.buildNorms();\n const q2 = qNorm * qNorm;\n for (let i = 0; i < N; i++) {\n const d2 = Math.max(q2 + vNorms[i] * vNorms[i] - 2 * dot$3(q, base[i]), 0);\n const dist = Math.sqrt(d2);\n pushHit(i, -dist); // NEGATIVE distance so higher is better\n }\n }\n else { // 'manhattan'\n const base = (_f = this.rawVecs) !== null && _f !== void 0 ? _f : (!this.storeUnit ? this.vecs : null);\n if (!base)\n throw new Error('manhattan query requires raw vectors; create store with alsoStoreRaw=true or storeUnit=false');\n for (let i = 0; i < N; i++) {\n const dist = l1Dist(q, base[i]);\n pushHit(i, -dist); // NEGATIVE distance\n }\n }\n if (hits.length === 0)\n return [];\n const kk = Math.max(1, Math.min(k, hits.length));\n // Use quickselect to avoid full O(n log n) sort\n quickselectTopK(hits, kk, (h) => h.score);\n // Now sort just the top-K region for stable ordering\n hits\n .slice(0, kk)\n .sort((a, b) => b.score - a.score)\n .forEach((h, i) => (hits[i] = h));\n return hits.slice(0, kk);\n }\n /** Batch query helper. Returns array of results aligned to input queries. */\n queryBatch(queries, k = 10, opts) {\n return queries.map(q => this.query(q, k, opts));\n }\n /** Convenience: query by id */\n queryById(id, k = 10, opts) {\n var _a;\n const rec = this.get(id);\n if (!rec)\n return [];\n const use = ((opts === null || opts === void 0 ? void 0 : opts.metric) === 'euclidean' || (opts === null || opts === void 0 ? void 0 : opts.metric) === 'manhattan')\n ? ((_a = rec.raw) !== null && _a !== void 0 ? _a : rec.vec) // prefer raw for distance\n : rec.vec;\n return this.query(use, k, opts);\n }\n /* ========== export / import ========== */\n toJSON() {\n const includeRaw = !!this.rawVecs;\n return {\n dim: this.dim,\n normalized: this.storeUnit,\n alsoStoredRaw: includeRaw,\n capacity: this.capacity,\n items: this.ids.map((id, i) => ({\n id,\n vec: Array.from(this.vecs[i]),\n raw: includeRaw ? Array.from(this.rawVecs[i]) : undefined,\n meta: this.metas[i],\n })),\n __version: 'embedding-store-2.0.0',\n };\n }\n static fromJSON(obj) {\n var _a, _b;\n const parsed = typeof obj === 'string' ? JSON.parse(obj) : obj;\n if (!parsed || !parsed.dim || !Array.isArray(parsed.items)) {\n throw new Error('EmbeddingStore.fromJSON: invalid payload');\n }\n const store = new EmbeddingStore(parsed.dim, {\n storeUnit: parsed.normalized,\n capacity: parsed.capacity,\n alsoStoreRaw: (_a = parsed.alsoStoredRaw) !== null && _a !== void 0 ? _a : false,\n });\n for (const it of parsed.items) {\n if (!it || typeof it.id !== 'string' || !Array.isArray(it.vec))\n continue;\n if (it.vec.length !== parsed.dim) {\n throw new Error(`fromJSON: vector dim ${it.vec.length} != dim ${parsed.dim} for id ${it.id}`);\n }\n // Use public API to keep norms consistent\n store.upsert({ id: it.id, vec: (_b = it.raw) !== null && _b !== void 0 ? _b : it.vec, meta: it.meta });\n // If payload includes both vec and raw, ensure both sides are *exactly* respected\n if (store.storeUnit && store.rawVecs && it.raw) {\n const idx = store.idToIdx.get(it.id);\n store.rawVecs[idx] = new Float32Array(it.raw);\n if (store.rawNorms) {\n const newNorms = new Float32Array(store.size());\n newNorms.set(store.rawNorms, 0);\n newNorms[idx] = l2Norm$1(store.rawVecs[idx]);\n store.rawNorms = newNorms;\n }\n }\n else if (!store.storeUnit && it.vec) ;\n }\n return store;\n }\n /* ========== diagnostics / utils ========== */\n /** Estimate memory footprint in bytes (arrays only; metadata excluded). */\n memoryUsageBytes() {\n const f32 = 4;\n let bytes = 0;\n for (const v of this.vecs)\n bytes += v.length * f32;\n if (this.rawVecs)\n for (const v of this.rawVecs)\n bytes += v.length * f32;\n if (this.norms)\n bytes += this.norms.length * f32;\n if (this.rawNorms)\n bytes += this.rawNorms.length * f32;\n // ids + metas are JS objects; not included\n return bytes;\n }\n /** Re-normalize all vectors in-place (useful if you bulk-updated raw storage). */\n reNormalizeAll() {\n if (!this.storeUnit)\n return; // nothing to do\n for (let i = 0; i < this.vecs.length; i++) {\n const raw = this.rawVecs ? this.rawVecs[i] : this.vecs[i];\n this.vecs[i] = normalizeToUnit(raw);\n }\n }\n /** Iterate over all items */\n *entries() {\n var _a;\n for (let i = 0; i < this.ids.length; i++) {\n yield { id: this.ids[i], vec: this.vecs[i], raw: (_a = this.rawVecs) === null || _a === void 0 ? void 0 : _a[i], meta: this.metas[i] };\n }\n }\n /* ========== internals ========== */\n removeFromNorms(src, removeIdx) {\n const out = new Float32Array(src.length - 1);\n if (removeIdx > 0)\n out.set(src.subarray(0, removeIdx), 0);\n if (removeIdx < src.length - 1)\n out.set(src.subarray(removeIdx + 1), removeIdx);\n return out;\n }\n /** After a splice at 'start', rebuild id→index for shifted tail */\n rebuildIndex(start = 0) {\n if (start <= 0) {\n this.idToIdx.clear();\n for (let i = 0; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n return;\n }\n for (let i = start; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n }\n /** Enforce capacity by evicting oldest items (front of arrays) */\n enforceCapacity() {\n if (this.capacity === undefined)\n return;\n while (this.ids.length > this.capacity) {\n const removedId = this.ids[0];\n // shift( ) is O(n); for very large stores consider a circular buffer\n this.ids.shift();\n this.vecs.shift();\n this.metas.shift();\n if (this.rawVecs)\n this.rawVecs.shift();\n if (this.norms)\n this.norms = this.removeFromNorms(this.norms, 0);\n if (this.rawNorms)\n this.rawNorms = this.removeFromNorms(this.rawNorms, 0);\n this.idToIdx.delete(removedId);\n // rebuild full index (ids shifted)\n this.idToIdx.clear();\n for (let i = 0; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n }\n }\n buildNorms() {\n const out = new Float32Array(this.vecs.length);\n for (let i = 0; i < this.vecs.length; i++)\n out[i] = l2Norm$1(this.vecs[i]);\n this.norms = out;\n return out;\n }\n buildRawNorms() {\n if (!this.rawVecs)\n throw new Error('no raw vectors to build norms for');\n const out = new Float32Array(this.rawVecs.length);\n for (let i = 0; i < this.rawVecs.length; i++)\n out[i] = l2Norm$1(this.rawVecs[i]);\n this.rawNorms = out;\n return out;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Evaluation.ts — Classification & Regression metrics (no deps)\nconst EPS$2 = 1e-12;\n/* =========================\n * Helpers\n * ========================= */\nfunction isOneHot(Y) {\n return Array.isArray(Y[0]);\n}\nfunction argmax$1(a) {\n let i = 0;\n for (let k = 1; k < a.length; k++)\n if (a[k] > a[i])\n i = k;\n return i;\n}\nfunction toIndexLabels(yTrue, yPred, numClasses) {\n let yTrueIdx;\n let yPredIdx;\n if (isOneHot(yTrue))\n yTrueIdx = yTrue.map(argmax$1);\n else\n yTrueIdx = yTrue;\n if (isOneHot(yPred))\n yPredIdx = yPred.map(argmax$1);\n else\n yPredIdx = yPred;\n const C = 1 + Math.max(Math.max(...yTrueIdx), Math.max(...yPredIdx));\n return { yTrueIdx, yPredIdx, C };\n}\n/* =========================\n * Confusion matrix\n * ========================= */\nfunction confusionMatrixFromIndices(yTrueIdx, yPredIdx, C) {\n if (yTrueIdx.length !== yPredIdx.length) {\n throw new Error(`confusionMatrix: length mismatch (${yTrueIdx.length} vs ${yPredIdx.length})`);\n }\n const classes = C !== null && C !== void 0 ? C : 1 + Math.max(Math.max(...yTrueIdx), Math.max(...yPredIdx));\n const M = Array.from({ length: classes }, () => new Array(classes).fill(0));\n for (let i = 0; i < yTrueIdx.length; i++) {\n const r = yTrueIdx[i] | 0;\n const c = yPredIdx[i] | 0;\n if (r >= 0 && r < classes && c >= 0 && c < classes)\n M[r][c]++;\n }\n return M;\n}\n/* =========================\n * Per-class metrics\n * ========================= */\nfunction perClassFromCM(M, labels) {\n var _a;\n const C = M.length;\n const totals = new Array(C).fill(0);\n const colTotals = new Array(C).fill(0);\n let N = 0;\n for (let i = 0; i < C; i++) {\n let rsum = 0;\n for (let j = 0; j < C; j++) {\n rsum += M[i][j];\n colTotals[j] += M[i][j];\n N += M[i][j];\n }\n totals[i] = rsum;\n }\n const perClass = [];\n for (let k = 0; k < C; k++) {\n const tp = M[k][k];\n const fp = colTotals[k] - tp;\n const fn = totals[k] - tp;\n const tn = N - tp - fp - fn;\n const precision = tp / (tp + fp + EPS$2);\n const recall = tp / (tp + fn + EPS$2);\n const f1 = (2 * precision * recall) / (precision + recall + EPS$2);\n perClass.push({\n label: (_a = labels === null || labels === void 0 ? void 0 : labels[k]) !== null && _a !== void 0 ? _a : k,\n support: totals[k],\n tp, fp, fn, tn,\n precision, recall, f1\n });\n }\n return perClass;\n}\n/* =========================\n * Averages\n * ========================= */\nfunction averagesFromPerClass(per, accuracy) {\n const C = per.length;\n let sumP = 0, sumR = 0, sumF = 0;\n let sumWP = 0, sumWR = 0, sumWF = 0, total = 0;\n let tp = 0, fp = 0, fn = 0; // for micro\n for (const c of per) {\n sumP += c.precision;\n sumR += c.recall;\n sumF += c.f1;\n sumWP += c.precision * c.support;\n sumWR += c.recall * c.support;\n sumWF += c.f1 * c.support;\n total += c.support;\n tp += c.tp;\n fp += c.fp;\n fn += c.fn;\n }\n const microP = tp / (tp + fp + EPS$2);\n const microR = tp / (tp + fn + EPS$2);\n const microF = (2 * microP * microR) / (microP + microR + EPS$2);\n return {\n accuracy,\n macroPrecision: sumP / C,\n macroRecall: sumR / C,\n macroF1: sumF / C,\n microPrecision: microP,\n microRecall: microR,\n microF1: microF,\n weightedPrecision: sumWP / (total + EPS$2),\n weightedRecall: sumWR / (total + EPS$2),\n weightedF1: sumWF / (total + EPS$2)\n };\n}\n/* =========================\n * Log loss / cross-entropy\n * ========================= */\nfunction logLoss(yTrue, yPredProba) {\n if (!isOneHot(yTrue) || !isOneHot(yPredProba)) {\n throw new Error('logLoss expects one-hot ground truth and probability matrix (N x C).');\n }\n const Y = yTrue;\n const P = yPredProba;\n if (Y.length !== P.length)\n throw new Error('logLoss: length mismatch');\n const N = Y.length;\n let sum = 0;\n for (let i = 0; i < N; i++) {\n const yi = Y[i];\n const pi = P[i];\n if (yi.length !== pi.length)\n throw new Error('logLoss: class count mismatch');\n for (let j = 0; j < yi.length; j++) {\n if (yi[j] > 0) {\n const p = Math.min(Math.max(pi[j], EPS$2), 1 - EPS$2);\n sum += -Math.log(p);\n }\n }\n }\n return sum / N;\n}\n/* =========================\n * Top-K accuracy\n * ========================= */\nfunction topKAccuracy(yTrueIdx, yPredProba, k = 5) {\n const N = yTrueIdx.length;\n let correct = 0;\n for (let i = 0; i < N; i++) {\n const probs = yPredProba[i];\n const idx = probs.map((p, j) => j).sort((a, b) => probs[b] - probs[a]).slice(0, Math.max(1, Math.min(k, probs.length)));\n if (idx.includes(yTrueIdx[i]))\n correct++;\n }\n return correct / N;\n}\nfunction pairSortByScore(yTrue01, yScore) {\n const pairs = yScore.map((s, i) => [s, yTrue01[i]]);\n pairs.sort((a, b) => b[0] - a[0]);\n return pairs;\n}\nfunction binaryROC(yTrue01, yScore) {\n if (yTrue01.length !== yScore.length)\n throw new Error('binaryROC: length mismatch');\n const pairs = pairSortByScore(yTrue01, yScore);\n const P = yTrue01.reduce((s, v) => s + (v ? 1 : 0), 0);\n const N = yTrue01.length - P;\n let tp = 0, fp = 0;\n const tpr = [0], fpr = [0], thr = [Infinity];\n for (let i = 0; i < pairs.length; i++) {\n const [score, y] = pairs[i];\n if (y === 1)\n tp++;\n else\n fp++;\n tpr.push(tp / (P + EPS$2));\n fpr.push(fp / (N + EPS$2));\n thr.push(score);\n }\n tpr.push(1);\n fpr.push(1);\n thr.push(-Infinity);\n // Trapezoidal AUC\n let auc = 0;\n for (let i = 1; i < tpr.length; i++) {\n const dx = fpr[i] - fpr[i - 1];\n const yAvg = (tpr[i] + tpr[i - 1]) / 2;\n auc += dx * yAvg;\n }\n return { thresholds: thr, tpr, fpr, auc };\n}\nfunction binaryPR(yTrue01, yScore) {\n if (yTrue01.length !== yScore.length)\n throw new Error('binaryPR: length mismatch');\n const pairs = pairSortByScore(yTrue01, yScore);\n const P = yTrue01.reduce((s, v) => s + (v ? 1 : 0), 0);\n let tp = 0, fp = 0;\n const precision = [], recall = [], thr = [];\n // Add starting point\n precision.push(P > 0 ? P / (P + 0) : 1);\n recall.push(0);\n thr.push(Infinity);\n for (let i = 0; i < pairs.length; i++) {\n const [score, y] = pairs[i];\n if (y === 1)\n tp++;\n else\n fp++;\n const prec = tp / (tp + fp + EPS$2);\n const rec = tp / (P + EPS$2);\n precision.push(prec);\n recall.push(rec);\n thr.push(score);\n }\n // AUPRC via trapezoid over recall axis\n let auc = 0;\n for (let i = 1; i < precision.length; i++) {\n const dx = recall[i] - recall[i - 1];\n const yAvg = (precision[i] + precision[i - 1]) / 2;\n auc += dx * yAvg;\n }\n return { thresholds: thr, precision, recall, auc };\n}\n/* =========================\n * Main: evaluate classification\n * ========================= */\n/**\n * Evaluate multi-class classification.\n * - yTrue can be indices (N) or one-hot (N x C)\n * - yPred can be indices (N) or probabilities (N x C)\n * - If yPred are probabilities, we also compute logLoss and optional topK.\n */\nfunction evaluateClassification(yTrue, yPred, opts) {\n const labels = opts === null || opts === void 0 ? void 0 : opts.labels;\n const { yTrueIdx, yPredIdx, C } = toIndexLabels(yTrue, yPred);\n const M = confusionMatrixFromIndices(yTrueIdx, yPredIdx, C);\n const per = perClassFromCM(M, labels);\n const correct = yTrueIdx.reduce((s, yt, i) => s + (yt === yPredIdx[i] ? 1 : 0), 0);\n const accuracy = correct / yTrueIdx.length;\n const averages = averagesFromPerClass(per, accuracy);\n // Optional extras if we have probabilities\n if (isOneHot(yTrue) && isOneHot(yPred)) {\n try {\n averages.logLoss = logLoss(yTrue, yPred);\n if ((opts === null || opts === void 0 ? void 0 : opts.topK) && opts.topK > 1) {\n averages.topKAccuracy = topKAccuracy(yTrueIdx, yPred, opts.topK);\n }\n }\n catch ( /* ignore extras if shapes disagree */_a) { /* ignore extras if shapes disagree */ }\n }\n return { confusionMatrix: M, perClass: per, averages };\n}\n/* =========================\n * Regression\n * ========================= */\nfunction evaluateRegression(yTrue, yPred) {\n const Y = Array.isArray(yTrue[0]) ? yTrue : yTrue.map(v => [v]);\n const P = Array.isArray(yPred[0]) ? yPred : yPred.map(v => [v]);\n if (Y.length !== P.length)\n throw new Error('evaluateRegression: length mismatch');\n const N = Y.length;\n const D = Y[0].length;\n const perOutput = [];\n let sumMSE = 0, sumMAE = 0, sumR2 = 0;\n for (let d = 0; d < D; d++) {\n let mse = 0, mae = 0;\n // mean of Y[:,d]\n let mean = 0;\n for (let i = 0; i < N; i++)\n mean += Y[i][d];\n mean /= N;\n let ssTot = 0, ssRes = 0;\n for (let i = 0; i < N; i++) {\n const y = Y[i][d], p = P[i][d];\n const e = y - p;\n mse += e * e;\n mae += Math.abs(e);\n ssRes += e * e;\n const dy = y - mean;\n ssTot += dy * dy;\n }\n mse /= N;\n const rmse = Math.sqrt(mse);\n mae /= N;\n const r2 = 1 - (ssRes / (ssTot + EPS$2));\n perOutput.push({ index: d, mse, rmse, mae, r2 });\n sumMSE += mse;\n sumMAE += mae;\n sumR2 += r2;\n }\n const mse = sumMSE / D;\n const rmse = Math.sqrt(mse);\n const mae = sumMAE / D;\n const r2 = sumR2 / D;\n return { perOutput, mse, rmse, mae, r2 };\n}\n/* =========================\n * Pretty report (optional)\n * ========================= */\nfunction formatClassificationReport(rep) {\n const lines = [];\n lines.push('Class\\tSupport\\tPrecision\\tRecall\\tF1');\n for (const c of rep.perClass) {\n lines.push(`${c.label}\\t${c.support}\\t${c.precision.toFixed(4)}\\t${c.recall.toFixed(4)}\\t${c.f1.toFixed(4)}`);\n }\n const a = rep.averages;\n lines.push('');\n lines.push(`Accuracy:\\t${a.accuracy.toFixed(4)}`);\n lines.push(`Macro P/R/F1:\\t${a.macroPrecision.toFixed(4)}\\t${a.macroRecall.toFixed(4)}\\t${a.macroF1.toFixed(4)}`);\n lines.push(`Micro P/R/F1:\\t${a.microPrecision.toFixed(4)}\\t${a.microRecall.toFixed(4)}\\t${a.microF1.toFixed(4)}`);\n lines.push(`Weighted P/R/F1:\\t${a.weightedPrecision.toFixed(4)}\\t${a.weightedRecall.toFixed(4)}\\t${a.weightedF1.toFixed(4)}`);\n if (a.logLoss !== undefined)\n lines.push(`LogLoss:\\t${a.logLoss.toFixed(6)}`);\n if (a.topKAccuracy !== undefined)\n lines.push(`TopK Acc:\\t${a.topKAccuracy.toFixed(4)}`);\n return lines.join('\\n');\n}\n\nconst EPS$1 = 1e-12;\n/* ---------- math helpers ---------- */\nfunction l2Norm(v) {\n let s = 0;\n for (let i = 0; i < v.length; i++)\n s += v[i] * v[i];\n return Math.sqrt(s);\n}\nfunction normalize(v) {\n const out = new Float32Array(v.length);\n const n = l2Norm(v);\n if (n < EPS$1)\n return out; // keep zeros; cosine with zero gives 0\n const inv = 1 / n;\n for (let i = 0; i < v.length; i++)\n out[i] = v[i] * inv;\n return out;\n}\nfunction dot$2(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/* ---------- main evaluation ---------- */\nfunction evaluateEnsembleRetrieval(queries, reference, chains, k, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n const metric = (_a = options === null || options === void 0 ? void 0 : options.metric) !== null && _a !== void 0 ? _a : \"cosine\";\n const aggregate = (_b = options === null || options === void 0 ? void 0 : options.aggregate) !== null && _b !== void 0 ? _b : \"mean\";\n const weights = options === null || options === void 0 ? void 0 : options.weights;\n const topK = Math.max(1, (_d = (_c = options === null || options === void 0 ? void 0 : options.k) !== null && _c !== void 0 ? _c : k) !== null && _d !== void 0 ? _d : 5);\n const ignoreUnlabeled = (_e = options === null || options === void 0 ? void 0 : options.ignoreUnlabeledQueries) !== null && _e !== void 0 ? _e : true;\n const reportPerLabel = (_f = options === null || options === void 0 ? void 0 : options.reportPerLabel) !== null && _f !== void 0 ? _f : false;\n const returnRankings = (_g = options === null || options === void 0 ? void 0 : options.returnRankings) !== null && _g !== void 0 ? _g : false;\n const logEvery = Math.max(1, (_h = options === null || options === void 0 ? void 0 : options.logEvery) !== null && _h !== void 0 ? _h : 10);\n if (chains.length === 0) {\n throw new Error(\"evaluateEnsembleRetrieval: 'chains' must be non-empty.\");\n }\n if (aggregate === \"weighted\") {\n if (!weights || weights.length !== chains.length) {\n throw new Error(`aggregate='weighted' requires weights.length === chains.length`);\n }\n // normalize weights to sum=1 for interpretability\n const sumW = weights.reduce((s, w) => s + w, 0) || 1;\n for (let i = 0; i < weights.length; i++)\n weights[i] = weights[i] / sumW;\n }\n console.log(\"🔹 Precomputing embeddings...\");\n // Pull raw embeddings from each chain\n const chainQueryEmb = [];\n const chainRefEmb = [];\n for (let c = 0; c < chains.length; c++) {\n const qMat = chains[c].getEmbedding(queries.map(q => {\n const v = q.embedding;\n if (!v || v.length === 0)\n throw new Error(`Query ${c} has empty embedding`);\n return Array.from(v);\n }));\n const rMat = chains[c].getEmbedding(reference.map(r => {\n const v = r.embedding;\n if (!v || v.length === 0)\n throw new Error(`Reference has empty embedding`);\n return Array.from(v);\n }));\n // Validate dims & normalize if cosine\n const qArr = qMat.map(row => Float32Array.from(row));\n const rArr = rMat.map(row => Float32Array.from(row));\n if (metric === \"cosine\") {\n chainQueryEmb.push(qArr.map(normalize));\n chainRefEmb.push(rArr.map(normalize));\n }\n else {\n chainQueryEmb.push(qArr);\n chainRefEmb.push(rArr);\n }\n // Basic safety: check dimensions match across Q/R for this chain\n const dimQ = (_k = (_j = qArr[0]) === null || _j === void 0 ? void 0 : _j.length) !== null && _k !== void 0 ? _k : 0;\n const dimR = (_m = (_l = rArr[0]) === null || _l === void 0 ? void 0 : _l.length) !== null && _m !== void 0 ? _m : 0;\n if (dimQ === 0 || dimR === 0 || dimQ !== dimR) {\n throw new Error(`Chain ${c}: query/ref embedding dims mismatch (${dimQ} vs ${dimR})`);\n }\n }\n console.log(\"✅ Precomputation complete. Starting retrieval evaluation...\");\n let hitsAt1 = 0, hitsAtK = 0, reciprocalRanks = 0;\n let used = 0;\n const perLabelRaw = {};\n const rankings = [];\n for (let i = 0; i < queries.length; i++) {\n if (i % logEvery === 0)\n console.log(`🔍 Query ${i + 1}/${queries.length}`);\n const correctLabel = ((_o = queries[i].metadata.label) !== null && _o !== void 0 ? _o : \"\").toString();\n if (!correctLabel && ignoreUnlabeled) {\n continue; // skip this query entirely\n }\n // Accumulate ensemble scores per reference\n // We keep (label, score) per reference j\n const scores = new Array(reference.length);\n for (let j = 0; j < reference.length; j++) {\n let sAgg;\n if (aggregate === \"max\") {\n // Take max across chains\n let sMax = -Infinity;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n const s = metric === \"cosine\" || metric === \"dot\" ? dot$2(q, r) : dot$2(q, r); // only cosine/dot supported\n if (s > sMax)\n sMax = s;\n }\n sAgg = sMax;\n }\n else if (aggregate === \"sum\") {\n let sSum = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sSum += (metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r);\n }\n sAgg = sSum;\n }\n else if (aggregate === \"weighted\") {\n let sW = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sW += ((metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r)) * weights[c];\n }\n sAgg = sW;\n }\n else { // \"mean\"\n let sSum = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sSum += (metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r);\n }\n sAgg = sSum / chains.length;\n }\n scores[j] = {\n label: ((_p = reference[j].metadata.label) !== null && _p !== void 0 ? _p : \"\").toString(),\n score: sAgg\n };\n }\n // Sort by score desc\n scores.sort((a, b) => b.score - a.score);\n const rankedLabels = scores.map(s => s.label);\n // Update metrics\n const r1 = rankedLabels[0] === correctLabel ? 1 : 0;\n const rK = rankedLabels.slice(0, topK).includes(correctLabel) ? 1 : 0;\n const rank = rankedLabels.indexOf(correctLabel);\n const rr = rank === -1 ? 0 : 1 / (rank + 1);\n hitsAt1 += r1;\n hitsAtK += rK;\n reciprocalRanks += rr;\n used++;\n if (reportPerLabel) {\n const bucket = (_q = perLabelRaw[correctLabel]) !== null && _q !== void 0 ? _q : (perLabelRaw[correctLabel] = { count: 0, hitsAt1: 0, hitsAtK: 0, mrrSum: 0 });\n bucket.count++;\n bucket.hitsAt1 += r1;\n bucket.hitsAtK += rK;\n bucket.mrrSum += rr;\n }\n if (returnRankings) {\n rankings.push({\n queryIndex: i,\n queryId: queries[i].id,\n label: correctLabel,\n topK: scores.slice(0, topK),\n correctRank: rank\n });\n }\n }\n const denom = used || 1;\n const result = {\n usedQueries: used,\n recallAt1: hitsAt1 / denom,\n recallAtK: hitsAtK / denom,\n mrr: reciprocalRanks / denom\n };\n if (reportPerLabel) {\n const out = {};\n for (const [label, s] of Object.entries(perLabelRaw)) {\n out[label] = {\n support: s.count,\n recallAt1: s.hitsAt1 / (s.count || 1),\n recallAtK: s.hitsAtK / (s.count || 1),\n mrr: s.mrrSum / (s.count || 1)\n };\n }\n result.perLabel = out;\n }\n if (returnRankings)\n result.rankings = rankings;\n return result;\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// src/workers/ELMWorkerClient.ts\nclass ELMWorkerClient {\n constructor(worker) {\n this.pending = new Map();\n this.worker = worker;\n this.worker.onmessage = (ev) => {\n var _a;\n const msg = ev.data;\n // Progress event\n if ((msg === null || msg === void 0 ? void 0 : msg.type) === 'progress' && (msg === null || msg === void 0 ? void 0 : msg.id)) {\n const pend = this.pending.get(msg.id);\n (_a = pend === null || pend === void 0 ? void 0 : pend.onProgress) === null || _a === void 0 ? void 0 : _a.call(pend, msg);\n return;\n }\n // RPC response\n const id = msg === null || msg === void 0 ? void 0 : msg.id;\n if (!id)\n return;\n const pend = this.pending.get(id);\n if (!pend)\n return;\n this.pending.delete(id);\n if (msg.ok)\n pend.resolve(msg.result);\n else\n pend.reject(new Error(msg.error));\n };\n }\n call(action, payload, onProgress) {\n const id = Math.random().toString(36).slice(2);\n return new Promise((resolve, reject) => {\n this.pending.set(id, { resolve, reject, onProgress });\n this.worker.postMessage({ id, action, payload });\n });\n }\n // -------- lifecycle --------\n getKind() { return this.call('getKind'); }\n dispose() { return this.call('dispose'); }\n setVerbose(verbose) { return this.call('setVerbose', { verbose }); }\n // -------- ELM --------\n initELM(config) { return this.call('initELM', config); }\n elmTrain(opts, onProgress) {\n return this.call('elm.train', opts, onProgress);\n }\n elmTrainFromData(X, Y, options, onProgress) {\n return this.call('elm.trainFromData', { X, Y, options }, onProgress);\n }\n elmPredict(text, topK = 5) { return this.call('elm.predict', { text, topK }); }\n elmPredictFromVector(X, topK = 5) { return this.call('elm.predictFromVector', { X, topK }); }\n elmGetEmbedding(X) { return this.call('elm.getEmbedding', { X }); }\n elmToJSON() { return this.call('elm.toJSON'); }\n elmLoadJSON(json) { return this.call('elm.loadJSON', { json }); }\n // -------- OnlineELM --------\n initOnlineELM(config) { return this.call('initOnlineELM', config); }\n oelmInit(X0, Y0) { return this.call('oelm.init', { X0, Y0 }); }\n oelmFit(X, Y) { return this.call('oelm.fit', { X, Y }); }\n oelmUpdate(X, Y) { return this.call('oelm.update', { X, Y }); }\n oelmLogits(X) { return this.call('oelm.logits', { X }); }\n oelmToJSON() { return this.call('oelm.toJSON'); }\n oelmLoadJSON(json) { return this.call('oelm.loadJSON', { json }); }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass TFIDF {\n constructor(corpusDocs) {\n this.termFrequency = {};\n this.inverseDocFreq = {};\n this.wordsInDoc = [];\n this.processedWords = [];\n this.scores = {};\n this.corpus = \"\";\n this.corpus = corpusDocs.join(\" \");\n const wordsFinal = [];\n const re = /[^a-zA-Z0-9]+/g;\n corpusDocs.forEach(doc => {\n const tokens = doc.split(/\\s+/);\n tokens.forEach(word => {\n const cleaned = word.replace(re, \" \");\n wordsFinal.push(...cleaned.split(/\\s+/).filter(Boolean));\n });\n });\n this.wordsInDoc = wordsFinal;\n this.processedWords = TFIDF.processWords(wordsFinal);\n // Compute term frequency\n this.processedWords.forEach(token => {\n this.termFrequency[token] = (this.termFrequency[token] || 0) + 1;\n });\n // Compute inverse document frequency\n for (const term in this.termFrequency) {\n const count = TFIDF.countDocsContainingTerm(corpusDocs, term);\n this.inverseDocFreq[term] = Math.log(corpusDocs.length / (1 + count));\n }\n }\n static countDocsContainingTerm(corpusDocs, term) {\n return corpusDocs.reduce((acc, doc) => (doc.includes(term) ? acc + 1 : acc), 0);\n }\n static processWords(words) {\n const filtered = TFIDF.removeStopWordsAndStem(words).map(w => TFIDF.lemmatize(w));\n const bigrams = TFIDF.generateNGrams(filtered, 2);\n const trigrams = TFIDF.generateNGrams(filtered, 3);\n return [...filtered, ...bigrams, ...trigrams];\n }\n static removeStopWordsAndStem(words) {\n const stopWords = new Set([\n \"a\", \"and\", \"the\", \"is\", \"to\", \"of\", \"in\", \"it\", \"that\", \"you\",\n \"this\", \"for\", \"on\", \"are\", \"with\", \"as\", \"be\", \"by\", \"at\", \"from\",\n \"or\", \"an\", \"but\", \"not\", \"we\"\n ]);\n return words.filter(w => !stopWords.has(w)).map(w => TFIDF.advancedStem(w));\n }\n static advancedStem(word) {\n const programmingKeywords = new Set([\n \"func\", \"package\", \"import\", \"interface\", \"go\",\n \"goroutine\", \"channel\", \"select\", \"struct\",\n \"map\", \"slice\", \"var\", \"const\", \"type\",\n \"defer\", \"fallthrough\"\n ]);\n if (programmingKeywords.has(word))\n return word;\n const suffixes = [\"es\", \"ed\", \"ing\", \"s\", \"ly\", \"ment\", \"ness\", \"ity\", \"ism\", \"er\"];\n for (const suffix of suffixes) {\n if (word.endsWith(suffix)) {\n if (suffix === \"es\" && word.length > 2 && word[word.length - 3] === \"i\") {\n return word.slice(0, -2);\n }\n return word.slice(0, -suffix.length);\n }\n }\n return word;\n }\n static lemmatize(word) {\n const rules = {\n execute: \"execute\",\n running: \"run\",\n returns: \"return\",\n defined: \"define\",\n compiles: \"compile\",\n calls: \"call\",\n creating: \"create\",\n invoke: \"invoke\",\n declares: \"declare\",\n references: \"reference\",\n implements: \"implement\",\n utilizes: \"utilize\",\n tests: \"test\",\n loops: \"loop\",\n deletes: \"delete\",\n functions: \"function\"\n };\n if (rules[word])\n return rules[word];\n if (word.endsWith(\"ing\"))\n return word.slice(0, -3);\n if (word.endsWith(\"ed\"))\n return word.slice(0, -2);\n return word;\n }\n static generateNGrams(tokens, n) {\n if (tokens.length < n)\n return [];\n const ngrams = [];\n for (let i = 0; i <= tokens.length - n; i++) {\n ngrams.push(tokens.slice(i, i + n).join(\" \"));\n }\n return ngrams;\n }\n calculateScores() {\n const totalWords = this.processedWords.length;\n const scores = {};\n this.processedWords.forEach(token => {\n const tf = this.termFrequency[token] || 0;\n scores[token] = (tf / totalWords) * (this.inverseDocFreq[token] || 0);\n });\n this.scores = scores;\n return scores;\n }\n extractKeywords(topN) {\n const entries = Object.entries(this.scores).sort((a, b) => b[1] - a[1]);\n return Object.fromEntries(entries.slice(0, topN));\n }\n processedWordsIndex(word) {\n return this.processedWords.indexOf(word);\n }\n}\nclass TFIDFVectorizer {\n constructor(docs, maxVocabSize = 2000) {\n this.docTexts = docs;\n this.tfidf = new TFIDF(docs);\n // Collect all unique terms with frequencies\n const termFreq = {};\n docs.forEach(doc => {\n const tokens = doc.split(/\\s+/);\n const cleaned = tokens.map(t => t.replace(/[^a-zA-Z0-9]+/g, \"\"));\n const processed = TFIDF.processWords(cleaned);\n processed.forEach(t => {\n termFreq[t] = (termFreq[t] || 0) + 1;\n });\n });\n // Sort terms by frequency descending\n const sortedTerms = Object.entries(termFreq)\n .sort((a, b) => b[1] - a[1])\n .slice(0, maxVocabSize)\n .map(([term]) => term);\n this.vocabulary = sortedTerms;\n console.log(`✅ TFIDFVectorizer vocabulary capped at: ${this.vocabulary.length} terms.`);\n }\n /**\n * Returns the dense TFIDF vector for a given document text.\n */\n vectorize(doc) {\n const tokens = doc.split(/\\s+/);\n const cleaned = tokens.map(t => t.replace(/[^a-zA-Z0-9]+/g, \"\"));\n const processed = TFIDF.processWords(cleaned);\n // Compute term frequency in this document\n const termFreq = {};\n processed.forEach(token => {\n termFreq[token] = (termFreq[token] || 0) + 1;\n });\n const totalTerms = processed.length;\n return this.vocabulary.map(term => {\n const tf = totalTerms > 0 ? (termFreq[term] || 0) / totalTerms : 0;\n const idf = this.tfidf.inverseDocFreq[term] || 0;\n return tf * idf;\n });\n }\n /**\n * Returns vectors for all original training docs.\n */\n vectorizeAll() {\n return this.docTexts.map(doc => this.vectorize(doc));\n }\n /**\n * Optional L2 normalization utility.\n */\n static l2normalize(vec) {\n const norm = Math.sqrt(vec.reduce((s, x) => s + x * x, 0));\n return norm === 0 ? vec : vec.map(x => x / norm);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass KNN {\n /**\n * Compute cosine similarity between two numeric vectors.\n */\n static cosineSimilarity(vec1, vec2) {\n let dot = 0, norm1 = 0, norm2 = 0;\n for (let i = 0; i < vec1.length; i++) {\n dot += vec1[i] * vec2[i];\n norm1 += vec1[i] * vec1[i];\n norm2 += vec2[i] * vec2[i];\n }\n if (norm1 === 0 || norm2 === 0)\n return 0;\n return dot / (Math.sqrt(norm1) * Math.sqrt(norm2));\n }\n /**\n * Compute Euclidean distance between two numeric vectors.\n */\n static euclideanDistance(vec1, vec2) {\n let sum = 0;\n for (let i = 0; i < vec1.length; i++) {\n const diff = vec1[i] - vec2[i];\n sum += diff * diff;\n }\n return Math.sqrt(sum);\n }\n /**\n * Find k nearest neighbors.\n * @param queryVec - Query vector\n * @param dataset - Dataset to search\n * @param k - Number of neighbors\n * @param topX - Number of top results to return\n * @param metric - Similarity metric\n */\n static find(queryVec, dataset, k = 5, topX = 3, metric = \"cosine\") {\n const similarities = dataset.map((item, idx) => {\n let score;\n if (metric === \"cosine\") {\n score = this.cosineSimilarity(queryVec, item.vector);\n }\n else {\n // For Euclidean, invert distance so higher = closer\n const dist = this.euclideanDistance(queryVec, item.vector);\n score = -dist;\n }\n return { index: idx, score };\n });\n similarities.sort((a, b) => b.score - a.score);\n const labelWeights = {};\n for (let i = 0; i < Math.min(k, similarities.length); i++) {\n const label = dataset[similarities[i].index].label;\n const weight = similarities[i].score;\n labelWeights[label] = (labelWeights[label] || 0) + weight;\n }\n const weightedLabels = Object.entries(labelWeights)\n .map(([label, weight]) => ({ label, weight }))\n .sort((a, b) => b.weight - a.weight);\n return weightedLabels.slice(0, topX);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// BindUI.ts - Utility to bind ELM model to HTML inputs and outputs\nfunction bindAutocompleteUI({ model, inputElement, outputElement, topK = 5 }) {\n inputElement.addEventListener('input', () => {\n const typed = inputElement.value.trim();\n if (typed.length === 0) {\n outputElement.innerHTML = '<em>Start typing...</em>';\n return;\n }\n try {\n const results = model.predict(typed, topK);\n outputElement.innerHTML = results.map(r => `\n <div><strong>${r.label}</strong>: ${(r.prob * 100).toFixed(1)}%</div>\n `).join('');\n }\n catch (e) {\n const message = e instanceof Error ? e.message : 'Unknown error';\n outputElement.innerHTML = `<span style=\"color: red;\">Error: ${message}</span>`;\n }\n });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Presets.ts — Reusable configuration presets for ELM (updated for new ELMConfig union)\n/**\n * NOTE:\n * - These are TEXT presets (token-mode). They set `useTokenizer: true`.\n * - If you need char-level, create an inline config where `useTokenizer: false`\n * and pass it directly to ELM (numeric presets generally need an explicit inputSize).\n */\n/** English token-level preset */\nconst EnglishTokenPreset = {\n useTokenizer: true,\n maxLen: 20,\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n tokenizerDelimiter: /[\\s,.;!?()\\[\\]{}\"']+/};\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ✅ AutoComplete.ts — ELM | KernelELM (Nyström+whiten) | OnlineELM\n// Fixes:\n// • Avoids union narrowing on EnglishTokenPreset by shimming preset fields (no ExtendedELMConfig maxLen error)\n// • activation typed as Activation (not string)\n// • Removed non-existent \"task\" option in trainFromData()\n/** Safe accessor for preset fields (avoids type errors on ExtendedELMConfig) */\nconst PRESET = (() => {\n var _a, _b, _c, _d;\n const p = EnglishTokenPreset;\n return {\n maxLen: (_a = p === null || p === void 0 ? void 0 : p.maxLen) !== null && _a !== void 0 ? _a : 30,\n charSet: (_b = p === null || p === void 0 ? void 0 : p.charSet) !== null && _b !== void 0 ? _b : 'abcdefghijklmnopqrstuvwxyz',\n useTokenizer: (_c = p === null || p === void 0 ? void 0 : p.useTokenizer) !== null && _c !== void 0 ? _c : true,\n tokenizerDelimiter: (_d = p === null || p === void 0 ? void 0 : p.tokenizerDelimiter) !== null && _d !== void 0 ? _d : /\\s+/\n };\n})();\nfunction oneHot(idx, n) {\n const v = new Array(n).fill(0);\n if (idx >= 0 && idx < n)\n v[idx] = 1;\n return v;\n}\nfunction sortTopK(labels, probs, k) {\n return probs\n .map((p, i) => ({ label: labels[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k);\n}\nclass AutoComplete {\n constructor(pairs, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5;\n this.trainPairs = pairs;\n this.activation = (_a = options.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.engine = (_b = options.engine) !== null && _b !== void 0 ? _b : 'elm';\n this.topKDefault = (_c = options.topK) !== null && _c !== void 0 ? _c : 5;\n // Labels\n this.categories = Array.from(new Set(pairs.map(p => p.label)));\n // Text → numeric encoder (Kernel/Online need numeric; ELM can also consume numeric directly)\n this.encoder = new UniversalEncoder({\n charSet: PRESET.charSet,\n maxLen: PRESET.maxLen,\n useTokenizer: PRESET.useTokenizer,\n tokenizerDelimiter: PRESET.tokenizerDelimiter,\n mode: (PRESET.useTokenizer ? 'token' : 'char'),\n });\n const hiddenUnits = (_d = options.hiddenUnits) !== null && _d !== void 0 ? _d : 128;\n const ridgeLambda = (_e = options.ridgeLambda) !== null && _e !== void 0 ? _e : 1e-2;\n const weightInit = (_f = options.weightInit) !== null && _f !== void 0 ? _f : 'xavier';\n const verbose = (_g = options.verbose) !== null && _g !== void 0 ? _g : false;\n if (this.engine === 'kernel') {\n const D = this.encoder.getVectorSize();\n const ktype = (_j = (_h = options.kernel) === null || _h === void 0 ? void 0 : _h.type) !== null && _j !== void 0 ? _j : 'rbf';\n const kernel = ktype === 'poly'\n ? { type: 'poly', gamma: (_l = (_k = options.kernel) === null || _k === void 0 ? void 0 : _k.gamma) !== null && _l !== void 0 ? _l : (1 / Math.max(1, D)), degree: (_o = (_m = options.kernel) === null || _m === void 0 ? void 0 : _m.degree) !== null && _o !== void 0 ? _o : 2, coef0: (_q = (_p = options.kernel) === null || _p === void 0 ? void 0 : _p.coef0) !== null && _q !== void 0 ? _q : 1 }\n : ktype === 'linear'\n ? { type: 'linear' }\n : ktype === 'laplacian'\n ? { type: 'laplacian', gamma: (_s = (_r = options.kernel) === null || _r === void 0 ? void 0 : _r.gamma) !== null && _s !== void 0 ? _s : (1 / Math.max(1, D)) }\n : { type: 'rbf', gamma: (_u = (_t = options.kernel) === null || _t === void 0 ? void 0 : _t.gamma) !== null && _u !== void 0 ? _u : (1 / Math.max(1, D)) };\n this.model = new KernelELM({\n outputDim: this.categories.length,\n kernel,\n ridgeLambda,\n task: 'classification',\n mode: 'nystrom',\n nystrom: {\n m: (_v = options.kernel) === null || _v === void 0 ? void 0 : _v.m,\n strategy: (_x = (_w = options.kernel) === null || _w === void 0 ? void 0 : _w.strategy) !== null && _x !== void 0 ? _x : 'uniform',\n seed: (_z = (_y = options.kernel) === null || _y === void 0 ? void 0 : _y.seed) !== null && _z !== void 0 ? _z : 1337,\n preset: (_0 = options.kernel) === null || _0 === void 0 ? void 0 : _0.preset,\n whiten: (_2 = (_1 = options.kernel) === null || _1 === void 0 ? void 0 : _1.whiten) !== null && _2 !== void 0 ? _2 : true,\n jitter: (_4 = (_3 = options.kernel) === null || _3 === void 0 ? void 0 : _3.jitter) !== null && _4 !== void 0 ? _4 : 1e-10,\n },\n log: { modelName: 'AutoComplete-KELM', verbose }\n });\n }\n else if (this.engine === 'online') {\n const inputDim = this.encoder.getVectorSize();\n this.model = new OnlineELM({\n inputDim,\n outputDim: this.categories.length,\n hiddenUnits,\n activation: this.activation,\n ridgeLambda,\n weightInit: (_5 = weightInit) !== null && _5 !== void 0 ? _5 : 'he',\n forgettingFactor: 0.997,\n log: { modelName: 'AutoComplete-OnlineELM', verbose }\n });\n }\n else {\n // Classic ELM — use TextConfig branch explicitly\n this.model = new ELM({\n categories: this.categories,\n hiddenUnits,\n activation: this.activation,\n ridgeLambda,\n weightInit: weightInit === 'he' ? 'xavier' : weightInit, // map 'he' to 'xavier' if needed\n // Text branch fields:\n useTokenizer: true,\n maxLen: PRESET.maxLen,\n charSet: PRESET.charSet,\n tokenizerDelimiter: PRESET.tokenizerDelimiter,\n // Logging / export\n metrics: options.metrics,\n log: { modelName: 'AutoComplete', verbose },\n exportFileName: options.exportFileName\n });\n }\n // Bind UI to a small adapter that calls our predict()\n bindAutocompleteUI({\n model: {\n predict: (text, k = this.topKDefault) => this.predict(text, k)\n },\n inputElement: options.inputElement,\n outputElement: options.outputElement,\n topK: options.topK\n });\n }\n /* ============= Training ============= */\n train() {\n // Build numeric X/Y\n const X = [];\n const Y = [];\n for (const { input, label } of this.trainPairs) {\n const vec = this.encoder.normalize(this.encoder.encode(input));\n const idx = this.categories.indexOf(label);\n if (idx === -1)\n continue;\n X.push(vec);\n Y.push(oneHot(idx, this.categories.length));\n }\n if (this.engine === 'kernel') {\n this.model.fit(X, Y);\n return;\n }\n if (this.engine === 'online') {\n this.model.init(X, Y); // then .update() for new batches\n return;\n }\n // Classic ELM — options: { reuseWeights?, weights? }; do NOT pass \"task\"\n this.model.trainFromData(X, Y);\n }\n /* ============= Prediction ============= */\n predict(input, topN = 1) {\n const k = Math.max(1, topN);\n if (this.engine === 'elm') {\n const out = this.model.predict(input, k);\n return out.map(p => ({ completion: p.label, prob: p.prob }));\n }\n const x = this.encoder.normalize(this.encoder.encode(input));\n if (this.engine === 'kernel') {\n const probs = this.model.predictProbaFromVectors([x])[0];\n return sortTopK(this.categories, probs, k).map(p => ({ completion: p.label, prob: p.prob }));\n }\n const probs = this.model.predictProbaFromVector(x);\n return sortTopK(this.categories, probs, k).map(p => ({ completion: p.label, prob: p.prob }));\n }\n /* ============= Persistence ============= */\n getModel() { return this.model; }\n loadModelFromJSON(json) {\n if (this.model.fromJSON) {\n this.model.fromJSON(json);\n }\n else if (this.model.loadModelFromJSON) {\n this.model.loadModelFromJSON(json);\n }\n else if (this.model.loadFromJSON) {\n this.model.loadFromJSON(json);\n }\n else {\n console.warn('No compatible load method found on model.');\n }\n }\n saveModelAsJSONFile(filename = 'model.json') {\n let payload;\n if (this.model.toJSON) {\n payload = this.model.toJSON(true); // OnlineELM supports includeP; KernelELM ignores extra arg\n }\n else if (this.model.savedModelJSON) {\n payload = this.model.savedModelJSON;\n }\n else {\n console.warn('No compatible toJSON/savedModelJSON on model; skipping export.');\n return;\n }\n const blob = new Blob([typeof payload === 'string' ? payload : JSON.stringify(payload, null, 2)], { type: 'application/json' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = filename;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n }\n /* ============= Evaluation helpers ============= */\n top1Accuracy(pairs) {\n var _a;\n let correct = 0;\n for (const { input, label } of pairs) {\n const [pred] = this.predict(input, 1);\n if (((_a = pred === null || pred === void 0 ? void 0 : pred.completion) === null || _a === void 0 ? void 0 : _a.toLowerCase().trim()) === label.toLowerCase().trim())\n correct++;\n }\n return correct / Math.max(1, pairs.length);\n }\n crossEntropy(pairs) {\n var _a;\n let total = 0;\n for (const { input, label } of pairs) {\n const preds = this.predict(input, this.categories.length);\n const match = preds.find(p => p.completion.toLowerCase().trim() === label.toLowerCase().trim());\n const prob = (_a = match === null || match === void 0 ? void 0 : match.prob) !== null && _a !== void 0 ? _a : 1e-12;\n total += -Math.log(prob);\n }\n return total / Math.max(1, pairs.length);\n }\n /** Internal CE via W/b/β (only for classic ELM); others fall back to external CE. */\n internalCrossEntropy(verbose = false) {\n if (!(this.model instanceof ELM)) {\n const ce = this.crossEntropy(this.trainPairs);\n if (verbose)\n console.log(`📏 Internal CE not applicable to ${this.engine}; external CE: ${ce.toFixed(4)}`);\n return ce;\n }\n const elm = this.model;\n const { model, categories } = elm;\n if (!model) {\n if (verbose)\n console.warn('⚠️ Cannot compute internal cross-entropy: model not trained.');\n return Infinity;\n }\n const X = [];\n const Y = [];\n for (const { input, label } of this.trainPairs) {\n const vec = this.encoder.normalize(this.encoder.encode(input));\n const idx = categories.indexOf(label);\n if (idx === -1)\n continue;\n X.push(vec);\n Y.push(oneHot(idx, categories.length));\n }\n const { W, b, beta } = model; // W: hidden x in, b: hidden x 1, beta: hidden x out\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const act = Activations.get(this.activation);\n const H = tempH.map(row => row.map((v, j) => act(v + b[j][0])));\n const logits = Matrix.multiply(H, beta);\n const probs = logits.map(row => Activations.softmax(row));\n let total = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < Y[0].length; j++) {\n if (Y[i][j] === 1) {\n const p = Math.min(Math.max(probs[i][j], 1e-15), 1 - 1e-15);\n total += -Math.log(p);\n }\n }\n }\n const ce = total / Math.max(1, Y.length);\n if (verbose)\n console.log(`📏 Internal Cross-Entropy (ELM W/b/β): ${ce.toFixed(4)}`);\n return ce;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// CharacterLangEncoderELM.ts — robust char/token text encoder on top of ELM\n// Upgrades:\n// • Safe preset extraction (no union-type errors on maxLen/charSet)\n// • Proper (inputs, labels) training via trainFromData()\n// • Hidden-layer embeddings via elm.getEmbedding() (with matrix fallback)\n// • Batch encode(), JSON I/O passthrough, gentle logging\n// • Activation typed, no reliance on private fields\n// If you have a preset (optional). Otherwise remove this import.\n// import { EnglishTokenPreset } from '../config/Presets';\nclass CharacterLangEncoderELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n // Make sure we have the basics\n if (!config.hiddenUnits) {\n throw new Error('CharacterLangEncoderELM requires hiddenUnits');\n }\n // Activation defaults to 'relu' if not provided\n this.activation = (_a = config.activation) !== null && _a !== void 0 ? _a : 'relu';\n // Safely coerce into a *text* config (avoid NumericConfig branch)\n // We do not assume a preset exists; provide conservative defaults.\n const textMaxLen = (_b = config === null || config === void 0 ? void 0 : config.maxLen) !== null && _b !== void 0 ? _b : 64;\n const textCharSet = (_c = config === null || config === void 0 ? void 0 : config.charSet) !== null && _c !== void 0 ? _c : 'abcdefghijklmnopqrstuvwxyz';\n const textTokDelim = (_d = config === null || config === void 0 ? void 0 : config.tokenizerDelimiter) !== null && _d !== void 0 ? _d : /\\s+/;\n // Merge into a TEXT-leaning config object.\n // NOTE: We keep categories if provided, but we will override them in train() from labels.\n this.config = Object.assign(Object.assign({}, config), { \n // Force text branch:\n useTokenizer: true, maxLen: textMaxLen, charSet: textCharSet, tokenizerDelimiter: textTokDelim, activation: this.activation, \n // Make logging robust:\n log: {\n modelName: 'CharacterLangEncoderELM',\n verbose: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = config.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n } }); // cast to any to avoid union friction\n this.elm = new ELM(this.config);\n // Forward thresholds/export if present\n if (config.metrics) {\n this.elm.metrics = config.metrics;\n }\n if (this.config.exportFileName) {\n this.elm.config.exportFileName = this.config.exportFileName;\n }\n }\n /**\n * Train on parallel arrays: inputs (strings) + labels (strings).\n * We:\n * • dedupe labels → categories\n * • encode inputs with the ELM’s text encoder\n * • one-hot the labels\n * • call trainFromData(X, Y)\n */\n train(inputStrings, labels) {\n var _a, _b, _c, _d;\n if (!(inputStrings === null || inputStrings === void 0 ? void 0 : inputStrings.length) || !(labels === null || labels === void 0 ? void 0 : labels.length) || inputStrings.length !== labels.length) {\n throw new Error('train() expects equal-length inputStrings and labels');\n }\n // Build categories from labels\n const categories = Array.from(new Set(labels));\n this.elm.setCategories(categories);\n // Get the encoder (support getEncoder() or .encoder)\n const enc = (_c = (_b = (_a = this.elm).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.elm.encoder;\n if (!(enc === null || enc === void 0 ? void 0 : enc.encode) || !(enc === null || enc === void 0 ? void 0 : enc.normalize)) {\n throw new Error('ELM text encoder is not available. Ensure useTokenizer/maxLen/charSet are set.');\n }\n const X = [];\n const Y = [];\n for (let i = 0; i < inputStrings.length; i++) {\n const x = enc.normalize(enc.encode(String((_d = inputStrings[i]) !== null && _d !== void 0 ? _d : '')));\n X.push(x);\n const li = categories.indexOf(labels[i]);\n const y = new Array(categories.length).fill(0);\n if (li >= 0)\n y[li] = 1;\n Y.push(y);\n }\n // Classic ELM closed-form training\n this.elm.trainFromData(X, Y);\n }\n /**\n * Returns a dense embedding for one string.\n * Uses ELM.getEmbedding() if available; otherwise computes H = act(XW^T + b).\n * By design this returns the *hidden* feature (length = hiddenUnits).\n */\n encode(text) {\n var _a, _b, _c;\n // Get encoder\n const enc = (_c = (_b = (_a = this.elm).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.elm.encoder;\n if (!(enc === null || enc === void 0 ? void 0 : enc.encode) || !(enc === null || enc === void 0 ? void 0 : enc.normalize)) {\n throw new Error('ELM text encoder is not available. Train or configure text settings first.');\n }\n const x = enc.normalize(enc.encode(String(text !== null && text !== void 0 ? text : '')));\n // Prefer official embedding API if present\n if (typeof this.elm.getEmbedding === 'function') {\n const E = this.elm.getEmbedding([x]);\n if (Array.isArray(E) && Array.isArray(E[0]))\n return E[0];\n }\n // Fallback: compute hidden act via model params (W,b)\n const model = this.elm.model;\n if (!model)\n throw new Error('Model not trained.');\n const { W, b } = model; // W: hidden x in, b: hidden x 1\n const tempH = Matrix.multiply([x], Matrix.transpose(W)); // (1 x hidden)\n const act = Activations.get(this.activation);\n const H = tempH.map(row => row.map((v, j) => act(v + b[j][0]))); // (1 x hidden)\n // Return hidden vector\n return H[0];\n }\n /** Batch encoding convenience */\n encodeBatch(texts) {\n return texts.map(t => this.encode(t));\n }\n /** Load/save passthroughs */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// FeatureCombinerELM.ts — combine encoder vectors + metadata, train numeric ELM\nclass FeatureCombinerELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.categories = [];\n const hidden = config.hiddenUnits;\n const act = config.activation;\n if (typeof hidden !== 'number') {\n throw new Error('FeatureCombinerELM requires config.hiddenUnits (number)');\n }\n if (!act) {\n throw new Error('FeatureCombinerELM requires config.activation');\n }\n // Force numeric mode (tokenizer off). Provide a safe inputSize placeholder;\n // ELM's trainFromData learns actual dims from X at train-time.\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: false, inputSize: (_b = config.inputSize) !== null && _b !== void 0 ? _b : 1, log: {\n modelName: 'FeatureCombinerELM',\n verbose: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : false,\n toFile: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.toFile) !== null && _f !== void 0 ? _f : false,\n // @ts-ignore optional level passthrough\n level: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.level) !== null && _h !== void 0 ? _h : 'info',\n } });\n this.elm = new ELM(this.config);\n // Optional thresholds/export passthrough\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /** Concatenate encoder vector + metadata vector */\n static combineFeatures(encodedVec, meta) {\n // Fast path avoids spread copies in tight loops\n const out = new Array(encodedVec.length + meta.length);\n let i = 0;\n for (; i < encodedVec.length; i++)\n out[i] = encodedVec[i];\n for (let j = 0; j < meta.length; j++)\n out[i + j] = meta[j];\n return out;\n }\n /** Convenience for batch combination */\n static combineBatch(encoded, metas) {\n if (encoded.length !== metas.length) {\n throw new Error(`combineBatch: encoded length ${encoded.length} != metas length ${metas.length}`);\n }\n const X = new Array(encoded.length);\n for (let i = 0; i < encoded.length; i++) {\n X[i] = FeatureCombinerELM.combineFeatures(encoded[i], metas[i]);\n }\n return X;\n }\n /** Train from encoder vectors + metadata + labels (classification) */\n train(encoded, metas, labels) {\n if (!(encoded === null || encoded === void 0 ? void 0 : encoded.length) || !(metas === null || metas === void 0 ? void 0 : metas.length) || !(labels === null || labels === void 0 ? void 0 : labels.length)) {\n throw new Error('train: empty encoded/metas/labels');\n }\n if (encoded.length !== metas.length || encoded.length !== labels.length) {\n throw new Error('train: lengths must match (encoded, metas, labels)');\n }\n const X = FeatureCombinerELM.combineBatch(encoded, metas);\n this.categories = Array.from(new Set(labels));\n this.elm.setCategories(this.categories);\n const Y = labels.map((lab) => {\n const idx = this.categories.indexOf(lab);\n const row = new Array(this.categories.length).fill(0);\n if (idx >= 0)\n row[idx] = 1;\n return row;\n });\n // Closed-form solve via ELM; no private internals needed\n this.elm.trainFromData(X, Y);\n }\n /** Predict top-K labels from a single (vec, meta) pair */\n predict(encodedVec, meta, topK = 1) {\n const input = [FeatureCombinerELM.combineFeatures(encodedVec, meta)];\n const batches = this.elm.predictFromVector(input, topK);\n return batches[0];\n }\n /** Predict the single best label + prob */\n predictLabel(encodedVec, meta) {\n const [top] = this.predict(encodedVec, meta, 1);\n return top;\n }\n /** Get hidden embedding for (vec, meta) pair (useful for chaining) */\n getEmbedding(encodedVec, meta) {\n const input = [FeatureCombinerELM.combineFeatures(encodedVec, meta)];\n const H = this.elm.getEmbedding(input);\n return H[0];\n }\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ConfidenceClassifierELM.ts — numeric confidence classifier on top of ELM\n// Upgrades:\n// • Numeric-only pipeline (no tokenizer)\n// • Proper trainFromData(X, Y) with one-hot labels\n// • Vector-safe prediction (predictFromVector)\n// • Score helpers, batch APIs, and simple evaluation\n// • Robust logging + safe handling of ELMConfig union\n/**\n * ConfidenceClassifierELM is a lightweight wrapper that classifies whether\n * an upstream model’s prediction is \"low\" or \"high\" confidence based on\n * (embedding, metadata) numeric features.\n */\nclass ConfidenceClassifierELM {\n constructor(baseConfig, opts = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;\n this.baseConfig = baseConfig;\n this.categories = (_a = opts.categories) !== null && _a !== void 0 ? _a : ['low', 'high'];\n this.activation = (_b = opts.activation) !== null && _b !== void 0 ? _b : ((_c = baseConfig.activation) !== null && _c !== void 0 ? _c : 'relu');\n // We force a numeric ELM config. Many ELM builds don’t require inputSize\n // at construction because trainFromData(X,Y) uses X[0].length to size W.\n // We still pass useTokenizer=false and categories to be explicit.\n const cfg = Object.assign(Object.assign({}, this.baseConfig), { useTokenizer: false, categories: this.categories, activation: this.activation, log: {\n modelName: 'ConfidenceClassifierELM',\n verbose: (_f = (_e = (_d = baseConfig.log) === null || _d === void 0 ? void 0 : _d.verbose) !== null && _e !== void 0 ? _e : opts.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = baseConfig.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = baseConfig.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n }, \n // Optional passthroughs:\n exportFileName: (_l = opts.exportFileName) !== null && _l !== void 0 ? _l : this.baseConfig.exportFileName });\n this.elm = new ELM(cfg);\n // Forward thresholds if present\n if (this.baseConfig.metrics) {\n this.elm.metrics = this.baseConfig.metrics;\n }\n }\n /** One-hot helper */\n oneHot(n, idx) {\n const v = new Array(n).fill(0);\n if (idx >= 0 && idx < n)\n v[idx] = 1;\n return v;\n }\n /**\n * Train from numeric (vector, meta) → combined features + labels.\n * `vectors[i]` and `metas[i]` must be aligned with `labels[i]`.\n */\n train(vectors, metas, labels) {\n if (!(vectors === null || vectors === void 0 ? void 0 : vectors.length) || !(metas === null || metas === void 0 ? void 0 : metas.length) || !(labels === null || labels === void 0 ? void 0 : labels.length)) {\n throw new Error('train: empty inputs');\n }\n if (vectors.length !== metas.length || vectors.length !== labels.length) {\n throw new Error('train: vectors, metas, labels must have same length');\n }\n // Ensure categories include all observed labels (keeps order of existing categories first)\n const uniq = Array.from(new Set(labels));\n const merged = Array.from(new Set([...this.categories, ...uniq]));\n this.categories = merged;\n this.elm.setCategories(this.categories);\n // Build X, Y\n const X = new Array(vectors.length);\n const Y = new Array(vectors.length);\n for (let i = 0; i < vectors.length; i++) {\n const x = FeatureCombinerELM.combineFeatures(vectors[i], metas[i]); // numeric feature vector\n X[i] = x;\n const li = this.categories.indexOf(labels[i]);\n Y[i] = this.oneHot(this.categories.length, li);\n }\n // Closed-form ELM training\n this.elm.trainFromData(X, Y);\n }\n /** Predict full distribution for a single (vec, meta). */\n predict(vec, meta, topK = 2) {\n var _a, _b;\n const x = FeatureCombinerELM.combineFeatures(vec, meta);\n // Prefer vector-safe API; most Astermind builds expose predictFromVector([x], topK)\n const fn = this.elm.predictFromVector;\n if (typeof fn === 'function') {\n const out = fn.call(this.elm, [x], topK); // PredictResult[][]\n return Array.isArray(out) && Array.isArray(out[0]) ? out[0] : (out !== null && out !== void 0 ? out : []);\n }\n // Fallback to predict() if it supports numeric vectors (some builds do)\n const maybe = (_b = (_a = this.elm).predict) === null || _b === void 0 ? void 0 : _b.call(_a, x, topK);\n if (Array.isArray(maybe))\n return maybe;\n throw new Error('No vector-safe predict available on underlying ELM.');\n }\n /** Probability the label is \"high\" (or the second category by default). */\n predictScore(vec, meta, positive = 'high') {\n var _a;\n const dist = this.predict(vec, meta, this.categories.length);\n const hit = dist.find(d => d.label === positive);\n return (_a = hit === null || hit === void 0 ? void 0 : hit.prob) !== null && _a !== void 0 ? _a : 0;\n }\n /** Predicted top-1 label. */\n predictLabel(vec, meta) {\n var _a, _b;\n const dist = this.predict(vec, meta, 1);\n return (_b = (_a = dist[0]) === null || _a === void 0 ? void 0 : _a.label) !== null && _b !== void 0 ? _b : this.categories[0];\n }\n /** Batch prediction (distributions). */\n predictBatch(vectors, metas, topK = 2) {\n if (vectors.length !== metas.length) {\n throw new Error('predictBatch: vectors and metas must have same length');\n }\n return vectors.map((v, i) => this.predict(v, metas[i], topK));\n }\n /* ============ Simple evaluation helpers ============ */\n /** Compute accuracy and confusion counts for a labeled set. */\n evaluate(vectors, metas, labels) {\n if (vectors.length !== metas.length || vectors.length !== labels.length) {\n throw new Error('evaluate: inputs must have same length');\n }\n const confusion = {};\n for (const a of this.categories) {\n confusion[a] = {};\n for (const b of this.categories)\n confusion[a][b] = 0;\n }\n let correct = 0;\n for (let i = 0; i < vectors.length; i++) {\n const pred = this.predictLabel(vectors[i], metas[i]);\n const gold = labels[i];\n if (pred === gold)\n correct++;\n if (!confusion[gold])\n confusion[gold] = {};\n if (confusion[gold][pred] === undefined)\n confusion[gold][pred] = 0;\n confusion[gold][pred]++;\n }\n return { accuracy: correct / labels.length, confusion };\n }\n /* ============ I/O passthroughs ============ */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n /** Access underlying ELM if needed */\n getELM() {\n return this.elm;\n }\n /** Current category ordering used by the model */\n getCategories() {\n return this.categories.slice();\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// EncoderELM.ts — string→vector encoder using ELM (batch) + OnlineELM (incremental)\nclass EncoderELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n if (typeof config.hiddenUnits !== 'number') {\n throw new Error('EncoderELM requires config.hiddenUnits (number).');\n }\n if (!config.activation) {\n throw new Error('EncoderELM requires config.activation.');\n }\n // Force text-encoder mode by default (safe even if NumericConfig is passed:\n // ELM will ignore tokenizer fields in numeric flows)\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: (_b = config.useTokenizer) !== null && _b !== void 0 ? _b : true, \n // keep charSet/maxLen if caller provided; otherwise ELM defaults will kick in\n log: {\n modelName: 'EncoderELM',\n verbose: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : false,\n toFile: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.toFile) !== null && _f !== void 0 ? _f : false,\n level: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.level) !== null && _h !== void 0 ? _h : 'info',\n } });\n this.elm = new ELM(this.config);\n // Forward thresholds/file export if present\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /** Batch training for string → dense vector mapping. */\n train(inputStrings, targetVectors) {\n if (!(inputStrings === null || inputStrings === void 0 ? void 0 : inputStrings.length) || !(targetVectors === null || targetVectors === void 0 ? void 0 : targetVectors.length)) {\n throw new Error('train: empty inputs');\n }\n if (inputStrings.length !== targetVectors.length) {\n throw new Error('train: inputStrings and targetVectors lengths differ');\n }\n const enc = this.elm.encoder;\n if (!enc || typeof enc.encode !== 'function') {\n throw new Error('EncoderELM: underlying ELM has no encoder; set useTokenizer/maxLen/charSet in config.');\n }\n // X = normalized encoded text; Y = dense targets\n const X = inputStrings.map(s => enc.normalize(enc.encode(s)));\n const Y = targetVectors;\n // Closed-form solve via ELM\n // (ELM learns W,b randomly and solves β; Y can be any numeric outputDim)\n this.elm.trainFromData(X, Y);\n }\n /** Encode a string into a dense feature vector using the trained model. */\n encode(text) {\n var _a;\n const enc = this.elm.encoder;\n if (!enc || typeof enc.encode !== 'function') {\n throw new Error('encode: underlying ELM has no encoder');\n }\n const model = this.elm.model;\n if (!model)\n throw new Error('EncoderELM model has not been trained yet.');\n const x = enc.normalize(enc.encode(text)); // 1 x D\n const { W, b, beta } = model;\n // H = act( x W^T + b )\n const tempH = Matrix.multiply([x], Matrix.transpose(W));\n const act = Activations.get((_a = this.config.activation) !== null && _a !== void 0 ? _a : 'relu');\n const H = Activations.apply(tempH.map(row => row.map((v, j) => v + b[j][0])), act);\n // y = H β\n return Matrix.multiply(H, beta)[0];\n }\n /* ===================== Online / Incremental API ===================== */\n /**\n * Begin an online OS-ELM run for string→vector encoding.\n * Provide outputDim and either inputDim OR a sampleText we can encode to infer inputDim.\n */\n beginOnline(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n const outputDim = opts.outputDim | 0;\n if (!(outputDim > 0))\n throw new Error('beginOnline: outputDim must be > 0');\n // Derive inputDim if not provided\n let inputDim = opts.inputDim;\n if (inputDim == null) {\n const enc = this.elm.encoder;\n if (!opts.sampleText || !enc) {\n throw new Error('beginOnline: provide inputDim or sampleText (and ensure encoder is available).');\n }\n inputDim = enc.normalize(enc.encode(opts.sampleText)).length;\n }\n const hiddenUnits = ((_a = opts.hiddenUnits) !== null && _a !== void 0 ? _a : this.config.hiddenUnits) | 0;\n if (!(hiddenUnits > 0))\n throw new Error('beginOnline: hiddenUnits must be > 0');\n const activation = ((_c = (_b = opts.activation) !== null && _b !== void 0 ? _b : this.config.activation) !== null && _c !== void 0 ? _c : 'relu');\n // Build OnlineELM with our new config-style constructor\n this.online = new OnlineELM({\n inputDim: inputDim,\n outputDim,\n hiddenUnits,\n activation,\n ridgeLambda: (_d = opts.ridgeLambda) !== null && _d !== void 0 ? _d : 1e-2,\n weightInit: (_e = opts.weightInit) !== null && _e !== void 0 ? _e : 'xavier',\n forgettingFactor: (_f = opts.forgettingFactor) !== null && _f !== void 0 ? _f : 1.0,\n seed: (_g = opts.seed) !== null && _g !== void 0 ? _g : 1337,\n log: { verbose: (_j = (_h = this.config.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false, modelName: 'EncoderELM-Online' },\n });\n this.onlineInputDim = inputDim;\n this.onlineOutputDim = outputDim;\n }\n /**\n * Online partial fit with *pre-encoded* numeric vectors.\n * If not initialized, this call seeds the model via `init`, else it performs an `update`.\n */\n partialTrainOnlineVectors(batch) {\n if (!this.online || this.onlineInputDim == null || this.onlineOutputDim == null) {\n throw new Error('partialTrainOnlineVectors: call beginOnline() first.');\n }\n if (!(batch === null || batch === void 0 ? void 0 : batch.length))\n return;\n const D = this.onlineInputDim, O = this.onlineOutputDim;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { x, y } = batch[i];\n if (x.length !== D)\n throw new Error(`x length ${x.length} != inputDim ${D}`);\n if (y.length !== O)\n throw new Error(`y length ${y.length} != outputDim ${O}`);\n X[i] = x;\n Y[i] = y;\n }\n if (!this.online.beta || !this.online.P) {\n this.online.init(X, Y);\n }\n else {\n this.online.update(X, Y);\n }\n }\n /**\n * Online partial fit with raw texts and dense numeric targets.\n * Texts are encoded + normalized internally.\n */\n partialTrainOnlineTexts(batch) {\n if (!this.online || this.onlineInputDim == null || this.onlineOutputDim == null) {\n throw new Error('partialTrainOnlineTexts: call beginOnline() first.');\n }\n if (!(batch === null || batch === void 0 ? void 0 : batch.length))\n return;\n const enc = this.elm.encoder;\n if (!enc)\n throw new Error('partialTrainOnlineTexts: encoder not available on underlying ELM');\n const D = this.onlineInputDim, O = this.onlineOutputDim;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { text, target } = batch[i];\n const x = enc.normalize(enc.encode(text));\n if (x.length !== D)\n throw new Error(`encoded text dim ${x.length} != inputDim ${D}`);\n if (target.length !== O)\n throw new Error(`target length ${target.length} != outputDim ${O}`);\n X[i] = x;\n Y[i] = target;\n }\n if (!this.online.beta || !this.online.P) {\n this.online.init(X, Y);\n }\n else {\n this.online.update(X, Y);\n }\n }\n /**\n * Finalize the online run by publishing learned weights into the standard ELM model.\n * After this, the normal encode() path works unchanged.\n */\n endOnline() {\n if (!this.online)\n return;\n const W = this.online.W;\n const b = this.online.b;\n const beta = this.online.beta;\n if (!W || !b || !beta) {\n throw new Error('endOnline: online model has no learned parameters (did you call init/fit/update?)');\n }\n this.elm.model = { W, b, beta };\n // Clear online state\n this.online = undefined;\n this.onlineInputDim = undefined;\n this.onlineOutputDim = undefined;\n }\n /* ===================== I/O passthrough ===================== */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// intentClassifier.ts — ELM-based intent classification (text → label)\nclass IntentClassifier {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.categories = [];\n // Basic guardrails (common footguns)\n const hidden = config.hiddenUnits;\n const act = config.activation;\n if (typeof hidden !== 'number') {\n throw new Error('IntentClassifier requires config.hiddenUnits (number)');\n }\n if (!act) {\n throw new Error('IntentClassifier requires config.activation');\n }\n // Force TEXT mode (tokenizer on). We set categories during train().\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: true, log: {\n modelName: 'IntentClassifier',\n verbose: (_c = (_b = config.log) === null || _b === void 0 ? void 0 : _b.verbose) !== null && _c !== void 0 ? _c : false,\n toFile: (_e = (_d = config.log) === null || _d === void 0 ? void 0 : _d.toFile) !== null && _e !== void 0 ? _e : false,\n // @ts-ignore: optional passthrough\n level: (_g = (_f = config.log) === null || _f === void 0 ? void 0 : _f.level) !== null && _g !== void 0 ? _g : 'info',\n } });\n this.model = new ELM(this.config);\n // Optional thresholds/export passthrough\n if (config.metrics)\n this.model.metrics = config.metrics;\n if (config.exportFileName)\n this.model.config.exportFileName = config.exportFileName;\n }\n /* ==================== Training ==================== */\n /**\n * Train from (text, label) pairs using closed-form ELM solve.\n * Uses the ELM's UniversalEncoder (token mode).\n */\n train(textLabelPairs, augmentation) {\n var _a, _b, _c, _d, _e;\n if (!(textLabelPairs === null || textLabelPairs === void 0 ? void 0 : textLabelPairs.length))\n throw new Error('train: empty training data');\n // Build label set\n this.categories = Array.from(new Set(textLabelPairs.map(p => p.label)));\n this.model.setCategories(this.categories);\n // Prepare encoder\n const enc = (_c = (_b = (_a = this.model).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.model.encoder;\n if (!enc)\n throw new Error('IntentClassifier: encoder unavailable on ELM instance.');\n // Inline augmentation (prefix/suffix/noise) — lightweight so we avoid importing Augment here\n const charSet = (augmentation === null || augmentation === void 0 ? void 0 : augmentation.charSet) ||\n enc.charSet ||\n 'abcdefghijklmnopqrstuvwxyz';\n const makeNoisy = (s, rate) => {\n var _a, _b;\n if (rate === void 0) { rate = (_a = augmentation === null || augmentation === void 0 ? void 0 : augmentation.noiseRate) !== null && _a !== void 0 ? _a : 0.05; }\n if (!(augmentation === null || augmentation === void 0 ? void 0 : augmentation.includeNoise) || rate <= 0)\n return [s];\n const arr = s.split('');\n for (let i = 0; i < arr.length; i++) {\n if (Math.random() < rate) {\n const r = Math.floor(Math.random() * charSet.length);\n arr[i] = (_b = charSet[r]) !== null && _b !== void 0 ? _b : arr[i];\n }\n }\n return [s, arr.join('')];\n };\n const expanded = [];\n for (const p of textLabelPairs) {\n const base = [p.text];\n const withPrefixes = ((_d = augmentation === null || augmentation === void 0 ? void 0 : augmentation.prefixes) !== null && _d !== void 0 ? _d : []).map(px => `${px}${p.text}`);\n const withSuffixes = ((_e = augmentation === null || augmentation === void 0 ? void 0 : augmentation.suffixes) !== null && _e !== void 0 ? _e : []).map(sx => `${p.text}${sx}`);\n const candidates = [...base, ...withPrefixes, ...withSuffixes];\n for (const c of candidates) {\n for (const v of makeNoisy(c)) {\n expanded.push({ text: v, label: p.label });\n }\n }\n }\n // Encode + one-hot\n const X = new Array(expanded.length);\n const Y = new Array(expanded.length);\n for (let i = 0; i < expanded.length; i++) {\n const { text, label } = expanded[i];\n const vec = enc.normalize(enc.encode(text));\n X[i] = vec;\n const row = new Array(this.categories.length).fill(0);\n const li = this.categories.indexOf(label);\n if (li >= 0)\n row[li] = 1;\n Y[i] = row;\n }\n // Closed-form ELM training\n this.model.trainFromData(X, Y);\n }\n /* ==================== Inference ==================== */\n /** Top-K predictions with an optional probability threshold */\n predict(text, topK = 1, threshold = 0) {\n const res = this.model.predict(text, Math.max(1, topK));\n return threshold > 0 ? res.filter(r => r.prob >= threshold) : res;\n }\n /** Batched predict */\n predictBatch(texts, topK = 1, threshold = 0) {\n return texts.map(t => this.predict(t, topK, threshold));\n }\n /** Convenience: best label + prob (or undefined if below threshold) */\n predictLabel(text, threshold = 0) {\n const [top] = this.predict(text, 1, threshold);\n return top;\n }\n /* ==================== Model I/O ==================== */\n loadModelFromJSON(json) {\n this.model.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.model.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// IO.ts - Import/export utilities for labeled training data\nclass IO {\n static importJSON(json) {\n try {\n const data = JSON.parse(json);\n if (!Array.isArray(data))\n throw new Error('Invalid format');\n return data.filter(item => typeof item.text === 'string' && typeof item.label === 'string');\n }\n catch (err) {\n console.error('Failed to parse training data JSON:', err);\n return [];\n }\n }\n static exportJSON(pairs) {\n return JSON.stringify(pairs, null, 2);\n }\n static importDelimited(text, delimiter = ',', hasHeader = true) {\n var _a, _b, _c, _d;\n const lines = text.trim().split('\\n');\n const examples = [];\n const headers = hasHeader\n ? lines[0].split(delimiter).map(h => h.trim().toLowerCase())\n : lines[0].split(delimiter).length === 1\n ? ['label']\n : ['text', 'label'];\n const startIndex = hasHeader ? 1 : 0;\n for (let i = startIndex; i < lines.length; i++) {\n const parts = lines[i].split(delimiter);\n if (parts.length === 1) {\n examples.push({ text: parts[0].trim(), label: parts[0].trim() });\n }\n else {\n const textIdx = headers.indexOf('text');\n const labelIdx = headers.indexOf('label');\n const text = textIdx !== -1 ? (_a = parts[textIdx]) === null || _a === void 0 ? void 0 : _a.trim() : (_b = parts[0]) === null || _b === void 0 ? void 0 : _b.trim();\n const label = labelIdx !== -1 ? (_c = parts[labelIdx]) === null || _c === void 0 ? void 0 : _c.trim() : (_d = parts[1]) === null || _d === void 0 ? void 0 : _d.trim();\n if (text && label) {\n examples.push({ text, label });\n }\n }\n }\n return examples;\n }\n static exportDelimited(pairs, delimiter = ',', includeHeader = true) {\n const header = includeHeader ? `text${delimiter}label\\n` : '';\n const rows = pairs.map(p => `${p.text.replace(new RegExp(delimiter, 'g'), '')}${delimiter}${p.label.replace(new RegExp(delimiter, 'g'), '')}`);\n return header + rows.join('\\n');\n }\n static importCSV(csv, hasHeader = true) {\n return this.importDelimited(csv, ',', hasHeader);\n }\n static exportCSV(pairs, includeHeader = true) {\n return this.exportDelimited(pairs, ',', includeHeader);\n }\n static importTSV(tsv, hasHeader = true) {\n return this.importDelimited(tsv, '\\t', hasHeader);\n }\n static exportTSV(pairs, includeHeader = true) {\n return this.exportDelimited(pairs, '\\t', includeHeader);\n }\n static inferSchemaFromCSV(csv) {\n var _a;\n const lines = csv.trim().split('\\n');\n if (lines.length === 0)\n return { fields: [] };\n const header = lines[0].split(',').map(h => h.trim().toLowerCase());\n const row = ((_a = lines[1]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];\n const fields = header.map((name, i) => {\n var _a;\n const sample = (_a = row[i]) === null || _a === void 0 ? void 0 : _a.trim();\n let type = 'unknown';\n if (!sample)\n type = 'unknown';\n else if (!isNaN(Number(sample)))\n type = 'number';\n else if (sample === 'true' || sample === 'false')\n type = 'boolean';\n else\n type = 'string';\n return { name, type };\n });\n const suggestedMapping = {\n text: header.find(h => h.includes('text') || h.includes('utterance') || h.includes('input')) || header[0],\n label: header.find(h => h.includes('label') || h.includes('intent') || h.includes('tag')) || header[1] || header[0],\n };\n return { fields, suggestedMapping };\n }\n static inferSchemaFromJSON(json) {\n try {\n const data = JSON.parse(json);\n if (!Array.isArray(data) || data.length === 0 || typeof data[0] !== 'object')\n return { fields: [] };\n const keys = Object.keys(data[0]);\n const fields = keys.map(key => {\n const val = data[0][key];\n let type = 'unknown';\n if (typeof val === 'string')\n type = 'string';\n else if (typeof val === 'number')\n type = 'number';\n else if (typeof val === 'boolean')\n type = 'boolean';\n return { name: key.toLowerCase(), type };\n });\n const suggestedMapping = {\n text: keys.find(k => k.toLowerCase().includes('text') || k.toLowerCase().includes('utterance') || k.toLowerCase().includes('input')) || keys[0],\n label: keys.find(k => k.toLowerCase().includes('label') || k.toLowerCase().includes('intent') || k.toLowerCase().includes('tag')) || keys[1] || keys[0],\n };\n return { fields, suggestedMapping };\n }\n catch (err) {\n console.error('Failed to infer schema from JSON:', err);\n return { fields: [] };\n }\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// LanguageClassifier.ts — upgraded for new ELM/OnlineELM APIs (with requireEncoder guard)\nclass LanguageClassifier {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f;\n this.config = Object.assign(Object.assign({}, config), { log: {\n modelName: 'LanguageClassifier',\n verbose: (_b = (_a = config.log) === null || _a === void 0 ? void 0 : _a.verbose) !== null && _b !== void 0 ? _b : false,\n toFile: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.toFile) !== null && _d !== void 0 ? _d : false,\n level: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.level) !== null && _f !== void 0 ? _f : 'info',\n } });\n this.elm = new ELM(this.config);\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /* ============== tiny helper to guarantee an encoder ============== */\n requireEncoder() {\n const enc = this.elm.encoder;\n if (!enc) {\n throw new Error('LanguageClassifier: encoder unavailable. Use text mode (useTokenizer=true with maxLen/charSet) ' +\n 'or pass a UniversalEncoder in the ELM config.');\n }\n return enc;\n }\n /* ================= I/O helpers ================= */\n loadTrainingData(raw, format = 'json') {\n switch (format) {\n case 'csv': return IO.importCSV(raw);\n case 'tsv': return IO.importTSV(raw);\n case 'json':\n default: return IO.importJSON(raw);\n }\n }\n /* ================= Supervised training ================= */\n /** Train from labeled text examples (uses internal encoder). */\n train(data) {\n if (!(data === null || data === void 0 ? void 0 : data.length))\n throw new Error('LanguageClassifier.train: empty dataset');\n const enc = this.requireEncoder();\n const categories = Array.from(new Set(data.map(d => d.label)));\n this.elm.setCategories(categories);\n const X = [];\n const Y = [];\n for (const { text, label } of data) {\n const x = enc.normalize(enc.encode(text));\n const yi = categories.indexOf(label);\n if (yi < 0)\n continue;\n X.push(x);\n Y.push(this.elm.oneHot(categories.length, yi));\n }\n this.elm.trainFromData(X, Y);\n }\n /** Predict from raw text (uses internal encoder). */\n predict(text, topK = 3) {\n // let ELM handle encode→predict (works in text mode)\n return this.elm.predict(text, topK);\n }\n /** Train using already-encoded numeric vectors (no text encoder). */\n trainVectors(data) {\n var _a;\n if (!(data === null || data === void 0 ? void 0 : data.length))\n throw new Error('LanguageClassifier.trainVectors: empty dataset');\n const categories = Array.from(new Set(data.map(d => d.label)));\n this.elm.setCategories(categories);\n const X = data.map(d => d.vector);\n const Y = data.map(d => this.elm.oneHot(categories.length, categories.indexOf(d.label)));\n if (typeof this.elm.trainFromData === 'function') {\n this.elm.trainFromData(X, Y);\n return;\n }\n // Fallback closed-form (compat)\n const hidden = this.config.hiddenUnits;\n const W = this.elm.randomMatrix(hidden, X[0].length);\n const b = this.elm.randomMatrix(hidden, 1);\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const act = Activations.get((_a = this.config.activation) !== null && _a !== void 0 ? _a : 'relu');\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), act);\n const Hpinv = this.elm.pseudoInverse(H);\n const beta = Matrix.multiply(Hpinv, Y);\n this.elm.model = { W, b, beta };\n }\n /** Predict from an already-encoded vector (no text encoder). */\n predictFromVector(vec, topK = 1) {\n const out = this.elm.predictFromVector([vec], topK);\n return out[0];\n }\n /* ================= Online (incremental) API ================= */\n beginOnline(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n const cats = opts.categories.slice();\n const D = opts.inputDim | 0;\n if (!cats.length)\n throw new Error('beginOnline: categories must be non-empty');\n if (D <= 0)\n throw new Error('beginOnline: inputDim must be > 0');\n const H = ((_a = opts.hiddenUnits) !== null && _a !== void 0 ? _a : this.config.hiddenUnits) | 0;\n if (H <= 0)\n throw new Error('beginOnline: hiddenUnits must be > 0');\n const activation = (_c = (_b = opts.activation) !== null && _b !== void 0 ? _b : this.config.activation) !== null && _c !== void 0 ? _c : 'relu';\n const ridgeLambda = Math.max((_d = opts.lambda) !== null && _d !== void 0 ? _d : 1e-2, 1e-12);\n this.onlineMdl = new OnlineELM({\n inputDim: D,\n outputDim: cats.length,\n hiddenUnits: H,\n activation,\n ridgeLambda,\n seed: (_e = opts.seed) !== null && _e !== void 0 ? _e : 1337,\n weightInit: (_f = opts.weightInit) !== null && _f !== void 0 ? _f : 'xavier',\n forgettingFactor: (_g = opts.forgettingFactor) !== null && _g !== void 0 ? _g : 1.0,\n log: { verbose: (_j = (_h = this.config.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false, modelName: 'LanguageClassifier/Online' },\n });\n this.onlineCats = cats;\n this.onlineInputDim = D;\n }\n partialTrainVectorsOnline(batch) {\n if (!this.onlineMdl || !this.onlineCats || !this.onlineInputDim) {\n throw new Error('Call beginOnline() before partialTrainVectorsOnline().');\n }\n if (!batch.length)\n return;\n const D = this.onlineInputDim;\n const O = this.onlineCats.length;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { vector, label } = batch[i];\n if (vector.length !== D)\n throw new Error(`vector dim ${vector.length} != inputDim ${D}`);\n X[i] = vector.slice();\n const y = new Array(O).fill(0);\n const li = this.onlineCats.indexOf(label);\n if (li < 0)\n throw new Error(`Unknown label \"${label}\" for this online run.`);\n y[li] = 1;\n Y[i] = y;\n }\n if (this.onlineMdl.beta && this.onlineMdl.P) {\n this.onlineMdl.update(X, Y);\n }\n else {\n this.onlineMdl.init(X, Y);\n }\n }\n endOnline() {\n if (!this.onlineMdl || !this.onlineCats)\n return;\n const W = this.onlineMdl.W;\n const b = this.onlineMdl.b;\n const B = this.onlineMdl.beta;\n if (!W || !b || !B)\n throw new Error('endOnline: online model is not initialized.');\n this.elm.setCategories(this.onlineCats);\n this.elm.model = { W, b, beta: B };\n this.onlineMdl = undefined;\n this.onlineCats = undefined;\n this.onlineInputDim = undefined;\n }\n /* ================= Persistence ================= */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// RefinerELM.ts — numeric “refinement” classifier on top of arbitrary feature vectors\nclass RefinerELM {\n constructor(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n if (!Number.isFinite(opts.inputSize) || opts.inputSize <= 0) {\n throw new Error('RefinerELM: opts.inputSize must be a positive number.');\n }\n if (!Number.isFinite(opts.hiddenUnits) || opts.hiddenUnits <= 0) {\n throw new Error('RefinerELM: opts.hiddenUnits must be a positive number.');\n }\n // Build a *numeric* ELM config (no text fields here)\n const numericConfig = {\n // numeric discriminator:\n useTokenizer: false,\n inputSize: opts.inputSize,\n // required for ELM\n categories: (_a = opts.categories) !== null && _a !== void 0 ? _a : [],\n // base config\n hiddenUnits: opts.hiddenUnits,\n activation: (_b = opts.activation) !== null && _b !== void 0 ? _b : 'relu',\n ridgeLambda: opts.ridgeLambda,\n dropout: opts.dropout,\n weightInit: opts.weightInit,\n // misc\n exportFileName: opts.exportFileName,\n log: {\n modelName: (_d = (_c = opts.log) === null || _c === void 0 ? void 0 : _c.modelName) !== null && _d !== void 0 ? _d : 'RefinerELM',\n verbose: (_f = (_e = opts.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = opts.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = opts.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n },\n };\n this.elm = new ELM(numericConfig);\n // Set metric thresholds on the instance (not inside the config)\n if (opts.metrics) {\n this.elm.metrics = opts.metrics;\n }\n }\n /** Train from feature vectors + string labels. */\n train(inputs, labels, opts) {\n var _a;\n if (!(inputs === null || inputs === void 0 ? void 0 : inputs.length) || !(labels === null || labels === void 0 ? void 0 : labels.length) || inputs.length !== labels.length) {\n throw new Error('RefinerELM.train: inputs/labels must be non-empty and aligned.');\n }\n // Allow overriding categories at train time\n const categories = (_a = opts === null || opts === void 0 ? void 0 : opts.categories) !== null && _a !== void 0 ? _a : Array.from(new Set(labels));\n this.elm.setCategories(categories);\n const Y = labels.map((label) => this.elm.oneHot(categories.length, categories.indexOf(label)));\n // Public training path; no 'task' key here\n const options = {};\n if ((opts === null || opts === void 0 ? void 0 : opts.reuseWeights) !== undefined)\n options.reuseWeights = opts.reuseWeights;\n if (opts === null || opts === void 0 ? void 0 : opts.sampleWeights)\n options.weights = opts.sampleWeights;\n this.elm.trainFromData(inputs, Y, options);\n }\n /** Full probability vector aligned to `this.elm.categories`. */\n predictProbaFromVector(vec) {\n // Use the vector-safe path provided by the core ELM\n const out = this.elm.predictFromVector([vec], /*topK*/ this.elm.categories.length);\n // predictFromVector returns Array<PredictResult[]>, i.e., topK sorted.\n // We want a dense prob vector in category order, so map from topK back:\n const probs = new Array(this.elm.categories.length).fill(0);\n if (out && out[0]) {\n for (const { label, prob } of out[0]) {\n const idx = this.elm.categories.indexOf(label);\n if (idx >= 0)\n probs[idx] = prob;\n }\n }\n return probs;\n }\n /** Top-K predictions ({label, prob}) for a single vector. */\n predict(vec, topK = 1) {\n const [res] = this.elm.predictFromVector([vec], topK);\n return res;\n }\n /** Batch top-K predictions for an array of vectors. */\n predictBatch(vectors, topK = 1) {\n return this.elm.predictFromVector(vectors, topK);\n }\n /** Hidden-layer embedding(s) — useful for chaining. */\n embed(vec) {\n return this.elm.getEmbedding([vec])[0];\n }\n embedBatch(vectors) {\n return this.elm.getEmbedding(vectors);\n }\n /** Persistence passthroughs */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// VotingClassifierELM.ts — meta-classifier that learns to combine multiple ELMs' predictions\nclass VotingClassifierELM {\n // Keep constructor shape compatible with your existing calls\n constructor(baseConfig) {\n this.baseConfig = baseConfig;\n this.modelWeights = [];\n this.usesConfidence = false;\n this.categories = baseConfig.categories || ['English', 'French', 'Spanish'];\n }\n setModelWeights(weights) {\n this.modelWeights = weights.slice();\n }\n calibrateWeights(predictionLists, trueLabels) {\n var _a, _b;\n const numModels = predictionLists.length;\n const numExamples = trueLabels.length;\n const accuracies = new Array(numModels).fill(0);\n for (let m = 0; m < numModels; m++) {\n let correct = 0;\n for (let i = 0; i < numExamples; i++) {\n if (predictionLists[m][i] === trueLabels[i])\n correct++;\n }\n accuracies[m] = correct / Math.max(1, numExamples);\n }\n const total = accuracies.reduce((s, a) => s + a, 0) || 1;\n this.modelWeights = accuracies.map(a => a / total);\n if ((_b = (_a = this.baseConfig) === null || _a === void 0 ? void 0 : _a.log) === null || _b === void 0 ? void 0 : _b.verbose) {\n console.log('🔧 Calibrated model weights:', this.modelWeights);\n }\n }\n /** Train meta-classifier on model predictions (+ optional confidences) and true labels. */\n train(predictionLists, // shape: [numModels][numExamples]\n confidenceLists, trueLabels) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n if (!Array.isArray(predictionLists) || predictionLists.length === 0 || !trueLabels) {\n throw new Error('VotingClassifierELM.train: invalid inputs');\n }\n const numModels = predictionLists.length;\n const numExamples = predictionLists[0].length;\n for (const list of predictionLists) {\n if (list.length !== numExamples)\n throw new Error('Prediction list lengths must match');\n }\n this.usesConfidence = Array.isArray(confidenceLists);\n if (this.usesConfidence) {\n if (confidenceLists.length !== numModels)\n throw new Error('Confidence list count != numModels');\n for (const list of confidenceLists) {\n if (list.length !== numExamples)\n throw new Error('Confidence list length mismatch');\n }\n }\n if (!this.modelWeights.length || this.modelWeights.length !== numModels) {\n this.calibrateWeights(predictionLists, trueLabels);\n }\n // Categories (target space) => from true labels\n this.categories = Array.from(new Set(trueLabels));\n const C = this.categories.length;\n // Compute numeric input size for the meta-ELM:\n // per-model features = one-hot over C + (optional) 1 confidence\n const perModel = C + (this.usesConfidence ? 1 : 0);\n this.inputSize = numModels * perModel;\n // Build X, Y\n const X = new Array(numExamples);\n for (let i = 0; i < numExamples; i++) {\n let row = [];\n for (let m = 0; m < numModels; m++) {\n const predLabel = predictionLists[m][i];\n if (predLabel == null)\n throw new Error(`Invalid label at predictionLists[${m}][${i}]`);\n const w = (_a = this.modelWeights[m]) !== null && _a !== void 0 ? _a : 1;\n // one-hot over final categories (C)\n const idx = this.categories.indexOf(predLabel);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n row = row.concat(oh.map(x => x * w));\n if (this.usesConfidence) {\n const conf = confidenceLists[m][i];\n const norm = Math.max(0, Math.min(1, Number(conf) || 0));\n row.push(norm * w);\n }\n }\n X[i] = row;\n }\n const Y = trueLabels.map(lbl => {\n const idx = this.categories.indexOf(lbl);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n return oh;\n });\n // Construct numeric ELM config now that we know inputSize\n const cfg = {\n useTokenizer: false, // numeric mode\n inputSize: this.inputSize,\n categories: this.categories,\n hiddenUnits: (_b = this.baseConfig.hiddenUnits) !== null && _b !== void 0 ? _b : 64,\n activation: (_c = this.baseConfig.activation) !== null && _c !== void 0 ? _c : 'relu',\n ridgeLambda: this.baseConfig.ridgeLambda,\n dropout: this.baseConfig.dropout,\n weightInit: this.baseConfig.weightInit,\n exportFileName: this.baseConfig.exportFileName,\n log: {\n modelName: (_f = (_e = (_d = this.baseConfig) === null || _d === void 0 ? void 0 : _d.log) === null || _e === void 0 ? void 0 : _e.modelName) !== null && _f !== void 0 ? _f : 'VotingClassifierELM',\n verbose: (_j = (_h = (_g = this.baseConfig) === null || _g === void 0 ? void 0 : _g.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false,\n toFile: (_m = (_l = (_k = this.baseConfig) === null || _k === void 0 ? void 0 : _k.log) === null || _l === void 0 ? void 0 : _l.toFile) !== null && _m !== void 0 ? _m : false,\n level: (_q = (_p = (_o = this.baseConfig) === null || _o === void 0 ? void 0 : _o.log) === null || _p === void 0 ? void 0 : _p.level) !== null && _q !== void 0 ? _q : 'info',\n },\n };\n // Create (or recreate) the inner ELM with correct dims\n this.elm = new ELM(cfg);\n // Forward optional metrics gate\n if (this.baseConfig.metrics) {\n this.elm.metrics = this.baseConfig.metrics;\n }\n // Train numerically\n this.elm.trainFromData(X, Y);\n }\n /** Predict final label from a single stacked set of model labels (+ optional confidences). */\n predict(labels, confidences, topK = 1) {\n var _a;\n if (!this.elm)\n throw new Error('VotingClassifierELM: call train() before predict().');\n if (!(labels === null || labels === void 0 ? void 0 : labels.length))\n throw new Error('VotingClassifierELM.predict: empty labels');\n const C = this.categories.length;\n const numModels = labels.length;\n // Build numeric input row consistent with training\n let row = [];\n for (let m = 0; m < numModels; m++) {\n const w = (_a = this.modelWeights[m]) !== null && _a !== void 0 ? _a : 1;\n const idx = this.categories.indexOf(labels[m]);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n row = row.concat(oh.map(x => x * w));\n if (this.usesConfidence) {\n const norm = Math.max(0, Math.min(1, Number(confidences === null || confidences === void 0 ? void 0 : confidences[m]) || 0));\n row.push(norm * w);\n }\n }\n const [res] = this.elm.predictFromVector([row], topK);\n return res;\n }\n loadModelFromJSON(json) {\n var _a, _b, _c, _d, _e;\n if (!this.elm)\n this.elm = new ELM({\n // minimal placeholder; will be overwritten by fromJSON content\n useTokenizer: false,\n inputSize: 1,\n categories: ['_tmp'],\n hiddenUnits: 1,\n activation: 'relu',\n log: { modelName: 'VotingClassifierELM' },\n });\n this.elm.loadModelFromJSON(json);\n // Try to recover categories & inputSize from loaded model\n this.categories = (_a = this.elm.categories) !== null && _a !== void 0 ? _a : this.categories;\n this.inputSize = (_e = ((_d = (_c = (_b = this.elm.model) === null || _b === void 0 ? void 0 : _b.W) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.length)) !== null && _e !== void 0 ? _e : this.inputSize;\n }\n saveModelAsJSONFile(filename) {\n if (!this.elm)\n throw new Error('VotingClassifierELM: no model to save.');\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// adaptive-online-elm.ts — Adaptive Online ELM with dynamic hidden unit adjustment\n// Adjusts hidden units dynamically based on data complexity\n// Import OnlineELM directly - now that we're using ES modules, this works!\n/**\n * Adaptive Online ELM that dynamically adjusts hidden units\n * Features:\n * - Grows hidden units when error is high\n * - Shrinks hidden units when performance is stable\n * - Maintains efficiency while adapting to data complexity\n */\nclass AdaptiveOnlineELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n this.elm = null;\n this.trained = false;\n this.errorHistory = [];\n this.performanceHistory = [];\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n initialHiddenUnits: (_a = options.initialHiddenUnits) !== null && _a !== void 0 ? _a : 128,\n minHiddenUnits: (_b = options.minHiddenUnits) !== null && _b !== void 0 ? _b : 32,\n maxHiddenUnits: (_c = options.maxHiddenUnits) !== null && _c !== void 0 ? _c : 1024,\n growthThreshold: (_d = options.growthThreshold) !== null && _d !== void 0 ? _d : 0.3,\n shrinkThreshold: (_e = options.shrinkThreshold) !== null && _e !== void 0 ? _e : 0.1,\n growthFactor: (_f = options.growthFactor) !== null && _f !== void 0 ? _f : 1.5,\n shrinkFactor: (_g = options.shrinkFactor) !== null && _g !== void 0 ? _g : 0.8,\n activation: (_h = options.activation) !== null && _h !== void 0 ? _h : 'relu',\n maxLen: (_j = options.maxLen) !== null && _j !== void 0 ? _j : 100,\n useTokenizer: (_k = options.useTokenizer) !== null && _k !== void 0 ? _k : true,\n };\n this.currentHiddenUnits = this.options.initialHiddenUnits;\n this._initializeELM();\n }\n /**\n * Initialize or reinitialize ELM with current hidden units\n */\n _initializeELM(inputDim) {\n // inputDim must be provided if elm is null or needs reinitialization\n if (inputDim === undefined && this.elm && typeof this.elm.inputDim === 'number') {\n inputDim = this.elm.inputDim;\n }\n if (inputDim === undefined) {\n // Can't initialize without inputDim\n return;\n }\n this.elm = new OnlineELM({\n inputDim: inputDim,\n outputDim: this.categories.length,\n hiddenUnits: this.currentHiddenUnits,\n activation: this.options.activation,\n });\n }\n /**\n * Train with batch data\n */\n fit(X, y) {\n // Convert to one-hot if needed\n const oneHotY = this._toOneHot(y);\n // Initialize or reinitialize if needed\n if (!this.elm || (this.elm && typeof this.elm.inputDim === 'number' && this.elm.inputDim === 0)) {\n if (X.length > 0) {\n this._initializeELM(X[0].length);\n }\n }\n if (!this.elm) {\n throw new Error('Failed to initialize ELM model');\n }\n // Initial training with OnlineELM\n if (this.elm) {\n this.elm.fit(X, oneHotY);\n }\n // Evaluate and potentially adjust\n const error = this._evaluateError(X, oneHotY);\n this.errorHistory.push(error);\n // Adaptive adjustment (may reinitialize ELM)\n this._adaptHiddenUnits(error);\n this.trained = true;\n }\n /**\n * Incremental update with adaptive adjustment\n */\n update(x, y) {\n if (!this.trained || !this.elm) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Update model with OnlineELM\n if (this.elm) {\n this.elm.update([x], [oneHotY]);\n }\n else {\n throw new Error('Model not initialized');\n }\n // Evaluate recent performance\n const recentError = this._evaluateRecentError();\n this.errorHistory.push(recentError);\n // Keep history limited\n if (this.errorHistory.length > 100) {\n this.errorHistory.shift();\n }\n // Adaptive adjustment (may reinitialize ELM)\n this._adaptHiddenUnits(recentError);\n }\n /**\n * Predict with adaptive model\n */\n predict(x, topK = 3) {\n if (!this.trained || !this.elm) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n if (!this.elm)\n continue;\n const predVec = this.elm.predictLogitsFromVector(xi);\n if (!predVec)\n continue;\n // Convert to probabilities\n const probs = this._softmax(Array.from(predVec));\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n results.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n }\n return results;\n }\n /**\n * Adapt hidden units based on error\n */\n _adaptHiddenUnits(currentError) {\n if (this.errorHistory.length < 5)\n return; // Need some history\n const avgError = this.errorHistory.slice(-10).reduce((a, b) => a + b, 0) / Math.min(10, this.errorHistory.length);\n const recentError = this.errorHistory.slice(-3).reduce((a, b) => a + b, 0) / Math.min(3, this.errorHistory.length);\n // Grow if error is high\n if (recentError > this.options.growthThreshold &&\n this.currentHiddenUnits < this.options.maxHiddenUnits) {\n const newUnits = Math.min(this.options.maxHiddenUnits, Math.floor(this.currentHiddenUnits * this.options.growthFactor));\n if (newUnits > this.currentHiddenUnits) {\n const oldInputDim = this.elm && typeof this.elm.inputDim === 'number'\n ? this.elm.inputDim\n : undefined;\n this.currentHiddenUnits = newUnits;\n if (oldInputDim !== undefined) {\n this._initializeELM(oldInputDim);\n }\n // Note: In practice, you'd want to store recent data for retraining\n // For now, model will need to be retrained\n }\n }\n // Shrink if error is low and stable\n if (recentError < this.options.shrinkThreshold &&\n avgError < this.options.shrinkThreshold &&\n this.currentHiddenUnits > this.options.minHiddenUnits) {\n const newUnits = Math.max(this.options.minHiddenUnits, Math.floor(this.currentHiddenUnits * this.options.shrinkFactor));\n if (newUnits < this.currentHiddenUnits) {\n const oldInputDim = this.elm && typeof this.elm.inputDim === 'number'\n ? this.elm.inputDim\n : undefined;\n this.currentHiddenUnits = newUnits;\n if (oldInputDim !== undefined) {\n this._initializeELM(oldInputDim);\n }\n }\n }\n }\n /**\n * Evaluate error on data\n */\n _evaluateError(X, y) {\n var _a, _b, _c, _d;\n if (!this.elm)\n return 1.0;\n let totalError = 0;\n let count = 0;\n for (let i = 0; i < Math.min(100, X.length); i++) {\n const pred = ((_b = (_a = this.elm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [X[i]])) || ((_d = (_c = this.elm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [X[i]]));\n const predVec = Array.isArray(pred) ? pred[0] : pred;\n if (!predVec)\n continue;\n const trueIdx = this._argmax(y[i]);\n const predIdx = this._argmax(Array.from(predVec));\n if (trueIdx !== predIdx)\n totalError++;\n count++;\n }\n return count > 0 ? totalError / count : 1.0;\n }\n /**\n * Evaluate recent error (for incremental updates)\n */\n _evaluateRecentError() {\n // Use last few predictions for error estimate\n // In practice, you'd track actual errors\n if (this.errorHistory.length === 0)\n return 0.5;\n return this.errorHistory[this.errorHistory.length - 1];\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n /**\n * Get current number of hidden units\n */\n getHiddenUnits() {\n return this.currentHiddenUnits;\n }\n /**\n * Get error history\n */\n getErrorHistory() {\n return [...this.errorHistory];\n }\n}\n\n// forgetting-online-elm.ts — Forgetting Online ELM with time-decay for concept drift\n// Handles concept drift by decaying old samples over time\n// Import OnlineELM directly - now that we're using ES modules, this works!\n/**\n * Forgetting Online ELM with time-decay for concept drift\n * Features:\n * - Exponential decay of old samples\n * - Time-based or sample-based forgetting\n * - Sliding window for memory efficiency\n * - Handles concept drift automatically\n */\nclass ForgettingOnlineELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.elm = null;\n this.samples = [];\n this.trained = false;\n this.currentTime = 0;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n decayRate: (_b = options.decayRate) !== null && _b !== void 0 ? _b : 0.99,\n windowSize: (_c = options.windowSize) !== null && _c !== void 0 ? _c : 1000,\n timeBasedDecay: (_d = options.timeBasedDecay) !== null && _d !== void 0 ? _d : false,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n };\n // inputDim will be set during first fit\n // Note: OnlineELM will be initialized during fit() when we have inputDim\n this.elm = null;\n }\n /**\n * Initial training with batch data\n */\n fit(X, y) {\n const oneHotY = this._toOneHot(y);\n // Store samples with timestamps\n for (let i = 0; i < X.length; i++) {\n this.samples.push({\n x: [...X[i]],\n y: [...oneHotY[i]],\n timestamp: this.currentTime++,\n weight: 1.0,\n });\n }\n // Train on all samples (will initialize OnlineELM if needed)\n this._retrain();\n this.trained = true;\n }\n /**\n * Incremental update with forgetting mechanism\n */\n update(x, y) {\n if (!this.trained) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Apply decay to existing samples\n this._applyDecay();\n // Add new sample\n this.samples.push({\n x: [...x],\n y: [...oneHotY],\n timestamp: this.currentTime++,\n weight: 1.0,\n });\n // Remove old samples if window exceeded\n if (this.samples.length > this.options.windowSize) {\n const removeCount = this.samples.length - this.options.windowSize;\n this.samples.splice(0, removeCount);\n }\n // Retrain with weighted samples\n this._retrain();\n }\n /**\n * Predict with forgetting model\n */\n predict(x, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n const predVec = this.elm ? this.elm.predictLogitsFromVector(xi) : null;\n if (!predVec)\n continue;\n // Convert to probabilities\n const probs = this._softmax(Array.from(predVec));\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n results.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n }\n return results;\n }\n /**\n * Apply decay to all samples\n */\n _applyDecay() {\n if (this.options.timeBasedDecay) {\n // Time-based: decay based on time difference\n const currentTime = this.currentTime;\n for (const sample of this.samples) {\n const timeDiff = currentTime - sample.timestamp;\n sample.weight *= Math.pow(this.options.decayRate, timeDiff);\n }\n }\n else {\n // Sample-based: uniform decay\n for (const sample of this.samples) {\n sample.weight *= this.options.decayRate;\n }\n }\n }\n /**\n * Retrain model with weighted samples\n */\n _retrain() {\n if (this.samples.length === 0)\n return;\n // Get inputDim from first sample\n const inputDim = this.samples[0].x.length;\n // Reinitialize ELM if inputDim changed or not set\n if (!this.elm || (this.elm && this.elm.inputDim !== inputDim)) {\n this.elm = new OnlineELM({\n inputDim: inputDim,\n outputDim: this.categories.length,\n hiddenUnits: this.options.hiddenUnits,\n activation: this.options.activation,\n });\n }\n // Train with weighted samples\n // In practice, you'd use weighted training\n // For now, we'll use samples with weights above threshold\n const threshold = 0.01;\n const activeSamples = this.samples.filter(s => s.weight > threshold);\n // Batch samples for efficiency\n const X = [];\n const Y = [];\n for (const sample of activeSamples) {\n // Repeat samples based on weight (simplified approach)\n const repetitions = Math.max(1, Math.floor(sample.weight * 10));\n for (let i = 0; i < repetitions; i++) {\n X.push(sample.x);\n Y.push(sample.y);\n }\n }\n if (X.length > 0) {\n this.elm.fit(X, Y);\n }\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n /**\n * Get sample statistics\n */\n getSampleStats() {\n const active = this.samples.filter(s => s.weight > 0.01).length;\n const avgWeight = this.samples.length > 0\n ? this.samples.reduce((sum, s) => sum + s.weight, 0) / this.samples.length\n : 0;\n return {\n total: this.samples.length,\n active,\n avgWeight,\n };\n }\n}\n\n// hierarchical-elm.ts — Hierarchical ELM for tree-structured classification\n// Coarse-to-fine classification with hierarchical decision making\n/**\n * Hierarchical ELM for tree-structured classification\n * Features:\n * - Coarse-to-fine classification\n * - Tree-structured decision making\n * - Multi-level probability estimation\n * - Efficient hierarchical search\n */\nclass HierarchicalELM {\n constructor(options) {\n var _a, _b, _c, _d;\n this.elms = new Map();\n this.trained = false;\n this.hierarchy = new Map(Object.entries(options.hierarchy));\n this.rootCategories = options.rootCategories;\n this.options = {\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n activation: (_b = options.activation) !== null && _b !== void 0 ? _b : 'relu',\n maxLen: (_c = options.maxLen) !== null && _c !== void 0 ? _c : 100,\n useTokenizer: (_d = options.useTokenizer) !== null && _d !== void 0 ? _d : true,\n };\n // Initialize ELM for each level\n this._initializeELMs();\n }\n /**\n * Initialize ELMs for each level in hierarchy\n */\n _initializeELMs() {\n // Root level ELM\n this.elms.set('root', new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.rootCategories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n }));\n // Child level ELMs\n for (const [parent, children] of this.hierarchy.entries()) {\n this.elms.set(parent, new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: children,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n }));\n }\n }\n /**\n * Train hierarchical ELM\n * @param X Input features\n * @param yLabels Full hierarchical paths (e.g., ['root', 'parent', 'child'])\n */\n train(X, yLabels) {\n var _a, _b, _c, _d;\n // Group samples by level\n const levelData = new Map();\n // Root level\n const rootX = [];\n const rootY = [];\n for (let i = 0; i < X.length; i++) {\n if (yLabels[i].length > 0) {\n rootX.push(X[i]);\n rootY.push(this.rootCategories.indexOf(yLabels[i][0]));\n }\n }\n levelData.set('root', { X: rootX, y: rootY });\n // Child levels\n for (const [parent, children] of this.hierarchy.entries()) {\n const parentX = [];\n const parentY = [];\n for (let i = 0; i < X.length; i++) {\n const path = yLabels[i];\n const parentIdx = path.indexOf(parent);\n if (parentIdx >= 0 && parentIdx < path.length - 1) {\n const child = path[parentIdx + 1];\n if (children.includes(child)) {\n parentX.push(X[i]);\n parentY.push(children.indexOf(child));\n }\n }\n }\n if (parentX.length > 0) {\n levelData.set(parent, { X: parentX, y: parentY });\n }\n }\n // Train each ELM\n for (const [level, data] of levelData.entries()) {\n const elm = this.elms.get(level);\n if (elm && data.X.length > 0) {\n (_b = (_a = elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, level === 'root' ? this.rootCategories : this.hierarchy.get(level) || []);\n (_d = (_c = elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, data.X, data.y);\n }\n }\n this.trained = true;\n }\n /**\n * Predict with hierarchical model\n */\n predict(x, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const allResults = [];\n for (const xi of XArray) {\n const results = this._predictHierarchical(xi, topK);\n allResults.push(...results);\n }\n return allResults;\n }\n /**\n * Hierarchical prediction from root to leaf\n */\n _predictHierarchical(x, topK) {\n var _a, _b, _c, _d;\n const rootELM = this.elms.get('root');\n const rootPred = ((_b = (_a = rootELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n const allPaths = [];\n // For each root prediction, explore children\n for (const rootPredItem of rootPred.slice(0, topK)) {\n const rootLabel = rootPredItem.label || this.rootCategories[rootPredItem.index || 0];\n const rootProb = rootPredItem.prob || 0;\n // Check if root has children\n const children = this.hierarchy.get(rootLabel);\n if (!children || children.length === 0) {\n // Leaf node\n allPaths.push({\n path: [rootLabel],\n label: rootLabel,\n prob: rootProb,\n levelProbs: [rootProb],\n });\n continue;\n }\n // Predict children\n const childELM = this.elms.get(rootLabel);\n if (childELM) {\n const childPred = ((_d = (_c = childELM).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [x], topK)) || [];\n for (const childPredItem of childPred.slice(0, topK)) {\n const childLabel = childPredItem.label || children[childPredItem.index || 0];\n const childProb = childPredItem.prob || 0;\n const combinedProb = rootProb * childProb;\n allPaths.push({\n path: [rootLabel, childLabel],\n label: childLabel,\n prob: combinedProb,\n levelProbs: [rootProb, childProb],\n });\n }\n }\n else {\n // No child ELM, use root\n allPaths.push({\n path: [rootLabel],\n label: rootLabel,\n prob: rootProb,\n levelProbs: [rootProb],\n });\n }\n }\n // Sort by probability and return top-K\n allPaths.sort((a, b) => b.prob - a.prob);\n return allPaths.slice(0, topK);\n }\n /**\n * Get hierarchy structure\n */\n getHierarchy() {\n return new Map(this.hierarchy);\n }\n /**\n * Get root categories\n */\n getRootCategories() {\n return [...this.rootCategories];\n }\n}\n\n// attention-enhanced-elm.ts — Attention-Enhanced ELM with attention mechanisms\n// Query-key-value attention and self-attention for sequences\n/**\n * Attention-Enhanced ELM with attention mechanisms\n * Features:\n * - Query-key-value attention in hidden layer\n * - Self-attention for sequences\n * - Multi-head attention support\n * - Context-aware classification\n */\nclass AttentionEnhancedELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.attentionWeights = []; // [head][sequence][weight]\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n attentionHeads: (_b = options.attentionHeads) !== null && _b !== void 0 ? _b : 4,\n attentionDim: (_c = options.attentionDim) !== null && _c !== void 0 ? _c : 64,\n useSelfAttention: (_d = options.useSelfAttention) !== null && _d !== void 0 ? _d : true,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with attention-enhanced features\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract features with attention\n const attentionFeatures = this._extractAttentionFeatures(X);\n // Train base ELM on attention-enhanced features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, attentionFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Predict with attention\n */\n predict(X, topK = 3, returnAttention = false) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Extract attention features\n const attentionFeatures = this._extractAttentionFeatures([x])[0];\n // Predict\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [attentionFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnAttention && this.attentionWeights.length > 0) {\n result.attentionWeights = this.attentionWeights[this.attentionWeights.length - 1];\n }\n results.push(result);\n }\n }\n return results;\n }\n /**\n * Extract features with attention mechanism\n */\n _extractAttentionFeatures(X) {\n const features = [];\n for (const x of X) {\n // Compute attention for each head\n const headFeatures = [];\n for (let head = 0; head < this.options.attentionHeads; head++) {\n const attentionOutput = this._computeAttention(x, head);\n headFeatures.push(attentionOutput);\n }\n // Concatenate all heads\n const concatenated = headFeatures.flat();\n // Project to hidden units size\n const projected = this._projectToHiddenSize(concatenated);\n features.push(projected);\n }\n return features;\n }\n /**\n * Compute attention for a sequence\n */\n _computeAttention(x, headIndex) {\n // Simple attention mechanism: Q, K, V projection\n const seqLen = x.length;\n const dim = this.options.attentionDim;\n // Generate Q, K, V (simplified - using random projections)\n const Q = this._project(x, dim, `Q_${headIndex}`);\n const K = this._project(x, dim, `K_${headIndex}`);\n const V = this._project(x, dim, `V_${headIndex}`);\n // Compute attention scores: Q * K^T\n const scores = [];\n for (let i = 0; i < seqLen; i++) {\n let score = 0;\n for (let j = 0; j < dim; j++) {\n score += Q[j] * K[j];\n }\n scores.push(score / Math.sqrt(dim)); // Scaled dot-product\n }\n // Softmax attention weights\n const weights = this._softmax(scores);\n // Apply attention to values\n const output = new Array(dim).fill(0);\n for (let i = 0; i < seqLen; i++) {\n for (let j = 0; j < dim; j++) {\n output[j] += weights[i] * V[j];\n }\n }\n // Store attention weights for this head\n if (!this.attentionWeights[headIndex]) {\n this.attentionWeights[headIndex] = [];\n }\n this.attentionWeights[headIndex].push(weights);\n return output;\n }\n /**\n * Project input to attention dimension\n */\n _project(x, dim, key) {\n // Simple linear projection (in practice, you'd use learned weights)\n const projected = new Array(dim).fill(0);\n const scale = Math.sqrt(2.0 / (x.length + dim));\n for (let i = 0; i < dim; i++) {\n for (let j = 0; j < x.length; j++) {\n // Simple hash-based projection for determinism\n const hash = this._hash(`${key}_${i}_${j}`);\n projected[i] += x[j] * (hash * scale);\n }\n }\n return projected;\n }\n /**\n * Project attention output to hidden units size\n */\n _projectToHiddenSize(attentionOutput) {\n const hiddenSize = this.options.hiddenUnits;\n const output = new Array(hiddenSize).fill(0);\n const scale = Math.sqrt(2.0 / (attentionOutput.length + hiddenSize));\n for (let i = 0; i < hiddenSize; i++) {\n for (let j = 0; j < attentionOutput.length; j++) {\n const hash = this._hash(`proj_${i}_${j}`);\n output[i] += attentionOutput[j] * (hash * scale);\n }\n // Apply activation\n if (this.options.activation === 'relu') {\n output[i] = Math.max(0, output[i]);\n }\n else if (this.options.activation === 'tanh') {\n output[i] = Math.tanh(output[i]);\n }\n else if (this.options.activation === 'sigmoid') {\n output[i] = 1 / (1 + Math.exp(-output[i]));\n }\n }\n return output;\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n _hash(str) {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = ((hash << 5) - hash) + char;\n hash = hash & hash; // Convert to 32-bit integer\n }\n return (hash / 2147483647); // Normalize to [-1, 1]\n }\n /**\n * Get attention weights for last prediction\n */\n getAttentionWeights() {\n return this.attentionWeights.map(head => [...head]);\n }\n}\n\n// variational-elm.ts — Variational ELM with uncertainty estimation\n// Probabilistic ELM with Bayesian inference and confidence intervals\n/**\n * Variational ELM with uncertainty estimation\n * Features:\n * - Probabilistic predictions with uncertainty\n * - Bayesian inference\n * - Confidence intervals\n * - Robust predictions with uncertainty quantification\n */\nclass VariationalELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.weightSamples = []; // Sampled weight matrices\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n priorVariance: (_b = options.priorVariance) !== null && _b !== void 0 ? _b : 1.0,\n posteriorSamples: (_c = options.posteriorSamples) !== null && _c !== void 0 ? _c : 10,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train variational ELM\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n // Sample weights for uncertainty estimation\n this._sampleWeights();\n this.trained = true;\n }\n /**\n * Predict with uncertainty estimation\n */\n predict(X, topK = 3, includeUncertainty = true) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Get base prediction\n const basePreds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n // Estimate uncertainty\n const uncertainty = includeUncertainty ? this._estimateUncertainty(x) : 0.5;\n for (const pred of basePreds.slice(0, topK)) {\n const prob = pred.prob || 0;\n const confidence = Math.max(0, Math.min(1, 1 - uncertainty));\n // Compute confidence interval\n const stdDev = Math.sqrt(uncertainty * prob * (1 - prob));\n const confidenceInterval = [\n Math.max(0, prob - 1.96 * stdDev),\n Math.min(1, prob + 1.96 * stdDev)\n ];\n allResults.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob,\n confidence,\n uncertainty,\n confidenceInterval,\n });\n }\n }\n return allResults;\n }\n /**\n * Estimate uncertainty using weight sampling\n */\n _estimateUncertainty(x) {\n if (this.weightSamples.length === 0) {\n return 0.5; // Default uncertainty\n }\n // Get predictions from multiple weight samples\n const predictions = [];\n for (const weights of this.weightSamples) {\n // Simplified: use variance in predictions as uncertainty measure\n // In practice, you'd compute actual predictions with sampled weights\n const pred = this._predictWithWeights(x, weights);\n predictions.push(pred);\n }\n // Compute variance as uncertainty measure\n const mean = predictions.reduce((a, b) => a + b, 0) / predictions.length;\n const variance = predictions.reduce((sum, p) => sum + Math.pow(p - mean, 2), 0) / predictions.length;\n // Normalize to [0, 1]\n return Math.min(1, variance);\n }\n /**\n * Predict with specific weight matrix (simplified)\n */\n _predictWithWeights(x, weights) {\n // Simplified prediction - in practice, you'd use the actual ELM forward pass\n // This is a placeholder for uncertainty estimation\n return 0.5;\n }\n /**\n * Sample weight matrices for uncertainty estimation\n */\n _sampleWeights() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const baseWeights = model.W;\n this.weightSamples = [];\n // Sample weights by adding Gaussian noise\n for (let s = 0; s < this.options.posteriorSamples; s++) {\n const sampled = [];\n for (let i = 0; i < baseWeights.length; i++) {\n sampled[i] = [];\n for (let j = 0; j < baseWeights[i].length; j++) {\n // Sample from posterior (Gaussian around base weight)\n const noise = this._gaussianRandom(0, this.options.priorVariance);\n sampled[i][j] = baseWeights[i][j] + noise;\n }\n }\n this.weightSamples.push(sampled);\n }\n }\n _gaussianRandom(mean, variance) {\n // Box-Muller transform\n const u1 = Math.random();\n const u2 = Math.random();\n const z0 = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);\n return mean + z0 * Math.sqrt(variance);\n }\n}\n\n// time-series-elm.ts — Time-Series ELM for temporal pattern recognition\n// Sequence-to-sequence ELM with temporal dependencies\n/**\n * Time-Series ELM for temporal pattern recognition\n * Features:\n * - Temporal pattern recognition\n * - Optional recurrent connections\n * - Sequence-to-sequence prediction\n * - Forecasting capabilities\n */\nclass TimeSeriesELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.history = []; // Store recent history for recurrent mode\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n sequenceLength: (_b = options.sequenceLength) !== null && _b !== void 0 ? _b : 10,\n lookbackWindow: (_c = options.lookbackWindow) !== null && _c !== void 0 ? _c : 5,\n useRecurrent: (_d = options.useRecurrent) !== null && _d !== void 0 ? _d : false,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on time-series data\n * @param X Sequences of features (each element is a time step)\n * @param y Labels for each sequence\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Flatten sequences to features\n const flattenedFeatures = this._flattenSequences(X);\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, flattenedFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Train on single sequences (convenience method)\n */\n trainSequences(sequences, labels) {\n this.train(sequences, labels);\n }\n /**\n * Predict from time-series sequence\n */\n predict(sequence, topK = 3) {\n var _a, _b, _c, _d;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const sequences = Array.isArray(sequence[0][0])\n ? sequence\n : [sequence];\n const allResults = [];\n for (const seq of sequences) {\n // Flatten sequence to features\n const features = this._flattenSequence(seq);\n // Update history if using recurrent mode\n if (this.options.useRecurrent) {\n this._updateHistory(features);\n // Use history-enhanced features\n const enhancedFeatures = this._enhanceWithHistory(features);\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [enhancedFeatures], topK)) || [];\n allResults.push(...preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n else {\n const preds = ((_d = (_c = this.elm).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [features], topK)) || [];\n allResults.push(...preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n }\n return allResults;\n }\n /**\n * Forecast future values (for regression/forecasting tasks)\n */\n forecast(sequence, steps = 1) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before forecasting');\n }\n const forecasts = [];\n let currentSeq = [...sequence];\n for (let step = 0; step < steps; step++) {\n const features = this._flattenSequence(currentSeq);\n const pred = ((_b = (_a = this.elm).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, features)) || [];\n // Use prediction as next step (simplified - in practice, you'd have a regression head)\n forecasts.push([...pred]);\n // Update sequence for next step\n currentSeq.push(pred);\n if (currentSeq.length > this.options.sequenceLength) {\n currentSeq.shift();\n }\n }\n return forecasts;\n }\n /**\n * Flatten sequences to feature vectors\n */\n _flattenSequences(sequences) {\n return sequences.map(seq => this._flattenSequence(seq));\n }\n /**\n * Flatten a single sequence\n */\n _flattenSequence(sequence) {\n var _a;\n // Concatenate all time steps\n const flattened = [];\n // Take last lookbackWindow steps\n const relevantSteps = sequence.slice(-this.options.lookbackWindow);\n for (const step of relevantSteps) {\n flattened.push(...step);\n }\n // Pad if necessary\n while (flattened.length < this.options.lookbackWindow * (((_a = sequence[0]) === null || _a === void 0 ? void 0 : _a.length) || 1)) {\n flattened.push(0);\n }\n return flattened;\n }\n /**\n * Update history for recurrent mode\n */\n _updateHistory(features) {\n this.history.push([...features]);\n // Keep only recent history\n if (this.history.length > this.options.lookbackWindow) {\n this.history.shift();\n }\n }\n /**\n * Enhance features with history (recurrent mode)\n */\n _enhanceWithHistory(currentFeatures) {\n if (this.history.length === 0) {\n return currentFeatures;\n }\n // Concatenate history with current features\n const historyFeatures = this.history.flat();\n return [...historyFeatures, ...currentFeatures];\n }\n /**\n * Clear history (useful for new sequences)\n */\n clearHistory() {\n this.history = [];\n }\n}\n\n// transfer-learning-elm.ts — Transfer Learning ELM\n// Pre-trained ELM adaptation, domain adaptation, and few-shot learning\n/**\n * Transfer Learning ELM\n * Features:\n * - Pre-trained model adaptation\n * - Domain adaptation\n * - Few-shot learning\n * - Fine-tuning capabilities\n */\nclass TransferLearningELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.sourceModel = null;\n this.trained = false;\n this.categories = options.categories;\n this.sourceModel = options.sourceModel || null;\n this.options = {\n categories: options.categories,\n sourceModel: this.sourceModel,\n freezeBase: (_a = options.freezeBase) !== null && _a !== void 0 ? _a : false,\n fineTuneLayers: (_b = options.fineTuneLayers) !== null && _b !== void 0 ? _b : 1,\n hiddenUnits: (_c = options.hiddenUnits) !== null && _c !== void 0 ? _c : 256,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Transfer weights from source model if available\n if (this.sourceModel) {\n this._transferWeights();\n }\n }\n /**\n * Transfer weights from source model\n */\n _transferWeights() {\n var _a, _b;\n if (!this.sourceModel)\n return;\n const sourceModelData = this.sourceModel.model;\n const targetModel = this.elm.model;\n if (!sourceModelData || !targetModel)\n return;\n // Transfer hidden layer weights if dimensions match\n if (sourceModelData.W && targetModel.W) {\n const sourceW = sourceModelData.W;\n const targetW = targetModel.W;\n // Copy matching dimensions\n for (let i = 0; i < Math.min(sourceW.length, targetW.length); i++) {\n for (let j = 0; j < Math.min(((_a = sourceW[i]) === null || _a === void 0 ? void 0 : _a.length) || 0, ((_b = targetW[i]) === null || _b === void 0 ? void 0 : _b.length) || 0); j++) {\n if (!this.options.freezeBase) {\n targetW[i][j] = sourceW[i][j];\n }\n }\n }\n }\n // Transfer biases if available\n if (sourceModelData.b && targetModel.b) {\n const sourceB = sourceModelData.b;\n const targetB = targetModel.b;\n for (let i = 0; i < Math.min(sourceB.length, targetB.length); i++) {\n if (!this.options.freezeBase) {\n targetB[i] = sourceB[i];\n }\n }\n }\n }\n /**\n * Train with transfer learning\n * @param X Target domain features\n * @param y Target domain labels\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // If source model exists and we're not freezing, fine-tune\n if (this.sourceModel && !this.options.freezeBase) {\n // Fine-tune: train on new data with transferred weights\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices, {\n reuseWeights: true, // Reuse transferred weights\n });\n }\n else {\n // Standard training\n (_f = (_e = this.elm).setCategories) === null || _f === void 0 ? void 0 : _f.call(_e, this.options.categories);\n (_h = (_g = this.elm).trainFromData) === null || _h === void 0 ? void 0 : _h.call(_g, X, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Few-shot learning: train with very few examples\n */\n fewShotTrain(X, y, shots = 5) {\n if (!this.sourceModel) {\n throw new Error('Few-shot learning requires a pre-trained source model');\n }\n // Use only a few examples\n const limitedX = X.slice(0, shots);\n const limitedY = y.slice(0, shots);\n // Fine-tune on limited data\n this.train(limitedX, limitedY);\n }\n /**\n * Predict with transferred model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n /**\n * Load pre-trained model\n */\n loadSourceModel(model) {\n this.sourceModel = model;\n this._transferWeights();\n }\n /**\n * Export current model for use as source in other transfers\n */\n exportModel() {\n return {\n model: this.elm.model,\n categories: this.options.categories,\n config: {\n hiddenUnits: this.options.hiddenUnits,\n activation: this.options.activation,\n },\n };\n }\n}\n\n// graph-elm.ts — Graph ELM for graph-structured data\n// Graph neural network + ELM for node/edge classification\n/**\n * Graph ELM for graph-structured data\n * Features:\n * - Node feature learning\n * - Graph structure encoding\n * - Edge-aware classification\n * - Graph convolution operations\n */\nclass GraphELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.trained = false;\n this.nodeFeatureMap = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n aggregationType: (_b = options.aggregationType) !== null && _b !== void 0 ? _b : 'mean',\n numLayers: (_c = options.numLayers) !== null && _c !== void 0 ? _c : 2,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on graph data\n * @param graphs Array of graphs\n * @param y Labels for each graph (or node labels)\n */\n train(graphs, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract graph features\n const graphFeatures = graphs.map(graph => this._extractGraphFeatures(graph));\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, graphFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features from graph structure\n */\n _extractGraphFeatures(graph) {\n // Build adjacency map\n const adjacencyMap = new Map();\n for (const edge of graph.edges) {\n if (!adjacencyMap.has(edge.source)) {\n adjacencyMap.set(edge.source, []);\n }\n if (!adjacencyMap.has(edge.target)) {\n adjacencyMap.set(edge.target, []);\n }\n adjacencyMap.get(edge.source).push(String(edge.target));\n adjacencyMap.get(edge.target).push(String(edge.source));\n }\n // Compute node features through graph convolution\n const nodeFeatures = new Map();\n // Initialize with node features\n for (const node of graph.nodes) {\n nodeFeatures.set(node.id, [...node.features]);\n }\n // Graph convolution layers\n for (let layer = 0; layer < this.options.numLayers; layer++) {\n const newFeatures = new Map();\n for (const node of graph.nodes) {\n const neighbors = adjacencyMap.get(node.id) || [];\n const neighborFeatures = neighbors\n .map(nid => {\n const node = graph.nodes.find(n => String(n.id) === String(nid));\n return node ? nodeFeatures.get(node.id) : null;\n })\n .filter(f => f !== null);\n // Aggregate neighbor features\n const aggregated = this._aggregateNeighbors(neighborFeatures);\n // Combine with self features\n const selfFeatures = nodeFeatures.get(node.id) || [];\n const combined = this._combineFeatures(selfFeatures, aggregated);\n newFeatures.set(node.id, combined);\n }\n // Update features\n for (const [id, features] of newFeatures) {\n nodeFeatures.set(id, features);\n }\n }\n // Aggregate all node features to graph-level features\n const allNodeFeatures = Array.from(nodeFeatures.values());\n const graphFeatures = this._aggregateNodes(allNodeFeatures);\n return graphFeatures;\n }\n /**\n * Aggregate neighbor features\n */\n _aggregateNeighbors(neighborFeatures) {\n if (neighborFeatures.length === 0) {\n return [];\n }\n const dim = neighborFeatures[0].length;\n const aggregated = new Array(dim).fill(0);\n for (const features of neighborFeatures) {\n for (let i = 0; i < dim; i++) {\n if (this.options.aggregationType === 'mean') {\n aggregated[i] += features[i] / neighborFeatures.length;\n }\n else if (this.options.aggregationType === 'sum') {\n aggregated[i] += features[i];\n }\n else if (this.options.aggregationType === 'max') {\n aggregated[i] = Math.max(aggregated[i], features[i]);\n }\n }\n }\n return aggregated;\n }\n /**\n * Combine self and neighbor features\n */\n _combineFeatures(self, neighbors) {\n const dim = Math.max(self.length, neighbors.length);\n const combined = new Array(dim).fill(0);\n for (let i = 0; i < dim; i++) {\n const selfVal = i < self.length ? self[i] : 0;\n const neighborVal = i < neighbors.length ? neighbors[i] : 0;\n combined[i] = selfVal + neighborVal; // Simple addition\n }\n // Apply activation\n if (this.options.activation === 'relu') {\n return combined.map(x => Math.max(0, x));\n }\n else if (this.options.activation === 'tanh') {\n return combined.map(x => Math.tanh(x));\n }\n else if (this.options.activation === 'sigmoid') {\n return combined.map(x => 1 / (1 + Math.exp(-x)));\n }\n return combined;\n }\n /**\n * Aggregate all node features to graph level\n */\n _aggregateNodes(nodeFeatures) {\n if (nodeFeatures.length === 0) {\n return [];\n }\n const dim = nodeFeatures[0].length;\n const graphFeatures = new Array(dim).fill(0);\n for (const features of nodeFeatures) {\n for (let i = 0; i < dim; i++) {\n if (this.options.aggregationType === 'mean') {\n graphFeatures[i] += features[i] / nodeFeatures.length;\n }\n else if (this.options.aggregationType === 'sum') {\n graphFeatures[i] += features[i];\n }\n else if (this.options.aggregationType === 'max') {\n graphFeatures[i] = Math.max(graphFeatures[i], features[i]);\n }\n }\n }\n return graphFeatures;\n }\n /**\n * Predict on graph\n */\n predict(graph, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const graphs = Array.isArray(graph) ? graph : [graph];\n const results = [];\n for (const g of graphs) {\n const graphFeatures = this._extractGraphFeatures(g);\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [graphFeatures], topK)) || [];\n // Store node features for first node (for interpretability)\n const firstNodeFeatures = g.nodes.length > 0\n ? this.nodeFeatureMap.get(g.nodes[0].id) || g.nodes[0].features\n : undefined;\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n nodeFeatures: firstNodeFeatures,\n });\n }\n }\n return results;\n }\n}\n\n// adaptive-kernel-elm.ts — Adaptive Kernel ELM\n// Data-dependent kernel parameters with local kernel adaptation\n/**\n * Adaptive Kernel ELM with data-dependent kernel parameters\n * Features:\n * - Local kernel adaptation\n * - Sample-specific kernels\n * - Adaptive gamma/degree parameters\n * - Improved performance on non-stationary data\n */\nclass AdaptiveKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.trained = false;\n this.adaptiveKernels = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n adaptiveGamma: (_b = options.adaptiveGamma) !== null && _b !== void 0 ? _b : true,\n adaptiveDegree: (_c = options.adaptiveDegree) !== null && _c !== void 0 ? _c : false,\n baseGamma: (_d = options.baseGamma) !== null && _d !== void 0 ? _d : 1.0,\n baseDegree: (_e = options.baseDegree) !== null && _e !== void 0 ? _e : 2,\n baseCoef0: (_f = options.baseCoef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.baseGamma,\n degree: this.options.baseDegree,\n coef0: this.options.baseCoef0,\n });\n }\n /**\n * Train with adaptive kernels\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Compute adaptive kernel parameters for each sample\n if (this.options.adaptiveGamma || this.options.adaptiveDegree) {\n this._computeAdaptiveKernels(X);\n }\n // Train base KernelELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n this.trained = true;\n }\n /**\n * Compute adaptive kernel parameters\n */\n _computeAdaptiveKernels(X) {\n // Compute local statistics for each sample\n for (let i = 0; i < X.length; i++) {\n const x = X[i];\n const neighbors = this._findNeighbors(x, X, 5); // Find 5 nearest neighbors\n const params = {};\n if (this.options.adaptiveGamma) {\n // Adapt gamma based on local density\n const localDensity = this._computeLocalDensity(x, neighbors);\n params.gamma = this.options.baseGamma / (1 + localDensity);\n }\n if (this.options.adaptiveDegree) {\n // Adapt degree based on local complexity\n const localComplexity = this._computeLocalComplexity(neighbors);\n params.degree = Math.max(1, Math.round(this.options.baseDegree * localComplexity));\n }\n this.adaptiveKernels.set(i, params);\n }\n }\n /**\n * Find nearest neighbors\n */\n _findNeighbors(x, X, k) {\n const distances = X.map((xi, i) => ({\n index: i,\n dist: this._euclideanDistance(x, xi),\n }));\n distances.sort((a, b) => a.dist - b.dist);\n return distances.slice(1, k + 1).map(d => X[d.index]);\n }\n /**\n * Compute local density\n */\n _computeLocalDensity(x, neighbors) {\n if (neighbors.length === 0)\n return 1;\n const avgDist = neighbors.reduce((sum, n) => sum + this._euclideanDistance(x, n), 0) / neighbors.length;\n return avgDist;\n }\n /**\n * Compute local complexity\n */\n _computeLocalComplexity(neighbors) {\n if (neighbors.length < 2)\n return 1;\n // Compute variance in neighbors as complexity measure\n const variances = [];\n for (let i = 0; i < neighbors[0].length; i++) {\n const values = neighbors.map(n => n[i]);\n const mean = values.reduce((a, b) => a + b, 0) / values.length;\n const variance = values.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0) / values.length;\n variances.push(variance);\n }\n const avgVariance = variances.reduce((a, b) => a + b, 0) / variances.length;\n return Math.sqrt(avgVariance);\n }\n _euclideanDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += Math.pow(a[i] - b[i], 2);\n }\n return Math.sqrt(sum);\n }\n /**\n * Predict with adaptive kernels\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Get base prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n // Get adaptive kernel params for this sample (if available)\n const sampleIndex = XArray.indexOf(x);\n const kernelParams = this.adaptiveKernels.get(sampleIndex);\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n kernelParams,\n });\n }\n }\n return results;\n }\n}\n\n// sparse-kernel-elm.ts — Sparse Kernel ELM\n// Sparse kernel matrix approximation with landmark selection\n/**\n * Sparse Kernel ELM with landmark-based approximation\n * Features:\n * - Sparse kernel matrix approximation\n * - Landmark selection strategies\n * - Reduced computational complexity\n * - Scalable to large datasets\n */\nclass SparseKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.landmarks = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n numLandmarks: (_b = options.numLandmarks) !== null && _b !== void 0 ? _b : 100,\n landmarkSelection: (_c = options.landmarkSelection) !== null && _c !== void 0 ? _c : 'kmeans',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train with sparse kernel approximation\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Select landmarks\n this._selectLandmarks(X);\n // Train on landmarks (reduced dataset)\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, this.landmarks, this._getLandmarkLabels(X, y, labelIndices));\n this.trained = true;\n }\n /**\n * Select landmark points\n */\n _selectLandmarks(X) {\n const numLandmarks = Math.min(this.options.numLandmarks, X.length);\n if (this.options.landmarkSelection === 'random') {\n // Random selection\n const indices = new Set();\n while (indices.size < numLandmarks) {\n indices.add(Math.floor(Math.random() * X.length));\n }\n this.landmarks = Array.from(indices).map(i => [...X[i]]);\n }\n else if (this.options.landmarkSelection === 'kmeans') {\n // K-means centroids as landmarks\n this.landmarks = this._kmeansLandmarks(X, numLandmarks);\n }\n else if (this.options.landmarkSelection === 'diverse') {\n // Diverse selection (maximize distance)\n this.landmarks = this._diverseLandmarks(X, numLandmarks);\n }\n else {\n // Default: first N points\n this.landmarks = X.slice(0, numLandmarks).map(x => [...x]);\n }\n }\n /**\n * K-means landmark selection\n */\n _kmeansLandmarks(X, k) {\n // Simplified k-means (in practice, use proper k-means)\n const centroids = [];\n const dim = X[0].length;\n // Initialize centroids randomly\n for (let i = 0; i < k; i++) {\n const idx = Math.floor(Math.random() * X.length);\n centroids.push([...X[idx]]);\n }\n // Simple iteration (simplified)\n for (let iter = 0; iter < 10; iter++) {\n const clusters = Array(k).fill(null).map(() => []);\n // Assign points to nearest centroid\n for (const x of X) {\n let minDist = Infinity;\n let nearest = 0;\n for (let i = 0; i < k; i++) {\n const dist = this._euclideanDistance(x, centroids[i]);\n if (dist < minDist) {\n minDist = dist;\n nearest = i;\n }\n }\n clusters[nearest].push(x);\n }\n // Update centroids\n for (let i = 0; i < k; i++) {\n if (clusters[i].length > 0) {\n const newCentroid = new Array(dim).fill(0);\n for (const point of clusters[i]) {\n for (let j = 0; j < dim; j++) {\n newCentroid[j] += point[j];\n }\n }\n for (let j = 0; j < dim; j++) {\n newCentroid[j] /= clusters[i].length;\n }\n centroids[i] = newCentroid;\n }\n }\n }\n return centroids;\n }\n /**\n * Diverse landmark selection\n */\n _diverseLandmarks(X, k) {\n const landmarks = [];\n // Start with random point\n let firstIdx = Math.floor(Math.random() * X.length);\n landmarks.push([...X[firstIdx]]);\n // Greedily select points that maximize minimum distance\n while (landmarks.length < k) {\n let maxMinDist = -1;\n let bestIdx = -1;\n for (let i = 0; i < X.length; i++) {\n const minDist = Math.min(...landmarks.map(l => this._euclideanDistance(X[i], l)));\n if (minDist > maxMinDist) {\n maxMinDist = minDist;\n bestIdx = i;\n }\n }\n if (bestIdx >= 0) {\n landmarks.push([...X[bestIdx]]);\n }\n else {\n break;\n }\n }\n return landmarks;\n }\n /**\n * Get labels for landmarks\n */\n _getLandmarkLabels(X, y, labelIndices) {\n const landmarkLabels = [];\n for (const landmark of this.landmarks) {\n // Find nearest point in X\n let minDist = Infinity;\n let nearestIdx = 0;\n for (let i = 0; i < X.length; i++) {\n const dist = this._euclideanDistance(landmark, X[i]);\n if (dist < minDist) {\n minDist = dist;\n nearestIdx = i;\n }\n }\n landmarkLabels.push(labelIndices[nearestIdx]);\n }\n return landmarkLabels;\n }\n _euclideanDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += Math.pow(a[i] - b[i], 2);\n }\n return Math.sqrt(sum);\n }\n /**\n * Predict using sparse kernel\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Use landmarks for prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n /**\n * Get selected landmarks\n */\n getLandmarks() {\n return this.landmarks.map(l => [...l]);\n }\n}\n\n// ensemble-kernel-elm.ts — Ensemble Kernel ELM\n// Multiple KELM models with different kernels, voting/weighted combination\n/**\n * Ensemble Kernel ELM\n * Features:\n * - Multiple KELM models with different kernels\n * - Voting/weighted combination\n * - Diversity promotion\n * - Robust predictions\n */\nclass EnsembleKernelELM {\n constructor(options) {\n var _a, _b, _c, _d;\n this.models = [];\n this.trained = false;\n this.categories = options.categories;\n // Default kernels if not provided\n const defaultKernels = options.kernels || [\n { type: 'rbf', gamma: 1.0, weight: 1.0 },\n { type: 'polynomial', degree: 2, coef0: 0, weight: 1.0 },\n { type: 'linear', weight: 1.0 },\n ];\n this.options = {\n categories: options.categories,\n kernels: defaultKernels.map(k => { var _a; return (Object.assign(Object.assign({}, k), { weight: (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 })); }),\n votingType: (_a = options.votingType) !== null && _a !== void 0 ? _a : 'weighted',\n activation: (_b = options.activation) !== null && _b !== void 0 ? _b : 'relu',\n maxLen: (_c = options.maxLen) !== null && _c !== void 0 ? _c : 100,\n useTokenizer: (_d = options.useTokenizer) !== null && _d !== void 0 ? _d : true,\n };\n // Initialize models for each kernel\n for (const kernel of this.options.kernels) {\n const kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: kernel.type,\n gamma: kernel.gamma,\n degree: kernel.degree,\n coef0: kernel.coef0,\n });\n this.models.push(kelm);\n }\n }\n /**\n * Train ensemble\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Train each model\n for (const model of this.models) {\n (_b = (_a = model).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = model).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Predict with ensemble voting\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Get predictions from all models\n const modelPredictions = [];\n for (const model of this.models) {\n const preds = ((_b = (_a = model).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n modelPredictions.push(preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n index: p.index || 0,\n })));\n }\n // Combine predictions\n const combined = this._combinePredictions(modelPredictions, topK);\n allResults.push(...combined);\n }\n return allResults;\n }\n /**\n * Combine predictions from multiple models\n */\n _combinePredictions(modelPredictions, topK) {\n // Aggregate predictions by label\n const labelScores = new Map();\n for (let modelIdx = 0; modelIdx < modelPredictions.length; modelIdx++) {\n const kernel = this.options.kernels[modelIdx];\n const weight = kernel.weight;\n for (const pred of modelPredictions[modelIdx]) {\n if (!labelScores.has(pred.label)) {\n labelScores.set(pred.label, { prob: 0, votes: 0, weight: 0 });\n }\n const score = labelScores.get(pred.label);\n if (this.options.votingType === 'majority') {\n score.votes += 1;\n }\n else if (this.options.votingType === 'weighted') {\n score.prob += pred.prob * weight;\n score.weight += weight;\n score.votes += 1;\n }\n else if (this.options.votingType === 'average') {\n score.prob += pred.prob;\n score.votes += 1;\n }\n }\n }\n // Normalize and sort\n const results = [];\n for (const [label, score] of labelScores) {\n let finalProb;\n if (this.options.votingType === 'majority') {\n finalProb = score.votes / this.models.length;\n }\n else if (this.options.votingType === 'weighted') {\n finalProb = score.weight > 0 ? score.prob / score.weight : 0;\n }\n else {\n finalProb = score.votes > 0 ? score.prob / score.votes : 0;\n }\n results.push({\n label,\n prob: finalProb,\n votes: score.votes,\n confidence: finalProb * (score.votes / this.models.length),\n });\n }\n // Sort by probability and return top K\n results.sort((a, b) => b.prob - a.prob);\n return results.slice(0, topK);\n }\n}\n\n// deep-kernel-elm.ts — Deep Kernel ELM\n// Multi-layer kernel transformations with hierarchical kernel learning\n/**\n * Deep Kernel ELM with multi-layer kernel transformations\n * Features:\n * - Hierarchical kernel learning\n * - Deep feature extraction\n * - Multi-layer kernel transformations\n * - Complex non-linear pattern learning\n */\nclass DeepKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.layers = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n numLayers: (_a = options.numLayers) !== null && _a !== void 0 ? _a : 3,\n kernelType: (_b = options.kernelType) !== null && _b !== void 0 ? _b : 'rbf',\n hiddenUnitsPerLayer: (_c = options.hiddenUnitsPerLayer) !== null && _c !== void 0 ? _c : 256,\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n // Initialize layers\n for (let i = 0; i < this.options.numLayers; i++) {\n const kelm = new KernelELM({\n useTokenizer: i === 0 && this.options.useTokenizer ? true : undefined,\n categories: i === this.options.numLayers - 1 ? this.options.categories : [],\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n this.layers.push(kelm);\n }\n }\n /**\n * Train deep kernel ELM\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Forward pass through layers\n let currentFeatures = X;\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n if (i === this.layers.length - 1) {\n // Final layer: train with labels\n (_b = (_a = layer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = layer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, currentFeatures, labelIndices);\n }\n else {\n // Intermediate layers: train autoencoder-style\n (_f = (_e = layer).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, currentFeatures, currentFeatures.map((_, idx) => idx));\n }\n // Extract features from this layer\n currentFeatures = this._extractLayerFeatures(currentFeatures, layer);\n }\n this.trained = true;\n }\n /**\n * Extract features from a layer\n */\n _extractLayerFeatures(X, layer) {\n var _a, _b;\n const features = [];\n for (const x of X) {\n // Get kernel features (simplified - in practice, you'd extract actual kernel features)\n const pred = ((_b = (_a = layer).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || [];\n features.push(pred.length > 0 ? pred : x); // Use prediction as features or fallback to input\n }\n return features;\n }\n /**\n * Predict with deep kernel\n */\n predict(X, topK = 3, returnLayerFeatures = false) {\n var _a, _b, _c, _d;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Forward pass through layers\n let currentFeatures = x;\n const layerFeatures = [];\n for (let i = 0; i < this.layers.length - 1; i++) {\n const layer = this.layers[i];\n const features = ((_b = (_a = layer).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, currentFeatures)) || currentFeatures;\n layerFeatures.push(features);\n currentFeatures = features;\n }\n // Final layer prediction\n const finalLayer = this.layers[this.layers.length - 1];\n const preds = ((_d = (_c = finalLayer).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [currentFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnLayerFeatures) {\n result.layerFeatures = layerFeatures.map(f => [...f]);\n }\n allResults.push(result);\n }\n }\n return allResults;\n }\n}\n\n// robust-kernel-elm.ts — Robust Kernel ELM\n// Outlier-resistant kernels with robust loss functions\n/**\n * Robust Kernel ELM with outlier resistance\n * Features:\n * - Outlier-resistant kernels\n * - Robust loss functions\n * - Noise-tolerant learning\n * - Outlier detection\n */\nclass RobustKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.outlierIndices = new Set();\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n robustLoss: (_b = options.robustLoss) !== null && _b !== void 0 ? _b : 'huber',\n outlierThreshold: (_c = options.outlierThreshold) !== null && _c !== void 0 ? _c : 2.0,\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train with robust loss\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Detect outliers\n this._detectOutliers(X);\n // Filter outliers for training (or use robust weighting)\n const filteredX = [];\n const filteredY = [];\n for (let i = 0; i < X.length; i++) {\n if (!this.outlierIndices.has(i)) {\n filteredX.push(X[i]);\n filteredY.push(labelIndices[i]);\n }\n }\n // Train on filtered data\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, filteredX.length > 0 ? filteredX : X, filteredY.length > 0 ? filteredY : labelIndices);\n this.trained = true;\n }\n /**\n * Detect outliers using statistical methods\n */\n _detectOutliers(X) {\n this.outlierIndices.clear();\n if (X.length === 0)\n return;\n // Compute mean and std for each dimension\n const dim = X[0].length;\n const means = new Array(dim).fill(0);\n const stds = new Array(dim).fill(0);\n // Compute means\n for (const x of X) {\n for (let i = 0; i < dim; i++) {\n means[i] += x[i] || 0;\n }\n }\n for (let i = 0; i < dim; i++) {\n means[i] /= X.length;\n }\n // Compute standard deviations\n for (const x of X) {\n for (let i = 0; i < dim; i++) {\n stds[i] += Math.pow((x[i] || 0) - means[i], 2);\n }\n }\n for (let i = 0; i < dim; i++) {\n stds[i] = Math.sqrt(stds[i] / X.length);\n }\n // Detect outliers (points far from mean)\n for (let i = 0; i < X.length; i++) {\n const x = X[i];\n let maxZScore = 0;\n for (let j = 0; j < dim; j++) {\n if (stds[j] > 0) {\n const zScore = Math.abs((x[j] || 0) - means[j]) / stds[j];\n maxZScore = Math.max(maxZScore, zScore);\n }\n }\n if (maxZScore > this.options.outlierThreshold) {\n this.outlierIndices.add(i);\n }\n }\n }\n /**\n * Apply robust loss function\n */\n _robustLoss(error) {\n if (this.options.robustLoss === 'huber') {\n const delta = 1.0;\n if (Math.abs(error) <= delta) {\n return 0.5 * error * error;\n }\n else {\n return delta * (Math.abs(error) - 0.5 * delta);\n }\n }\n else if (this.options.robustLoss === 'hinge') {\n return Math.max(0, 1 - error);\n }\n else if (this.options.robustLoss === 'epsilon-insensitive') {\n const epsilon = 0.1;\n return Math.max(0, Math.abs(error) - epsilon);\n }\n return error * error; // Default: squared loss\n }\n /**\n * Predict with outlier detection\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Check if input is outlier\n const isOutlier = this._isOutlier(x);\n // Get prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const prob = pred.prob || 0;\n const robustness = isOutlier ? 0.5 : 1.0; // Lower robustness for outliers\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob,\n isOutlier,\n robustness,\n });\n }\n }\n return results;\n }\n /**\n * Check if a point is an outlier\n */\n _isOutlier(x) {\n // Simplified outlier check (in practice, use trained model statistics)\n const mean = x.reduce((a, b) => a + b, 0) / x.length;\n const std = Math.sqrt(x.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0) / x.length);\n if (std === 0)\n return false;\n const maxZScore = Math.max(...x.map(v => Math.abs((v - mean) / std)));\n return maxZScore > this.options.outlierThreshold;\n }\n}\n\n// elm-kelm-cascade.ts — ELM-KELM Cascade\n// ELM feature extraction → KELM classification\n/**\n * ELM-KELM Cascade\n * Features:\n * - ELM for feature extraction\n * - KELM for classification\n * - Hierarchical learning\n * - Efficiency + accuracy\n */\nclass ELMKELMCascade {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n elmHiddenUnits: (_a = options.elmHiddenUnits) !== null && _a !== void 0 ? _a : 256,\n kelmKernel: (_b = options.kelmKernel) !== null && _b !== void 0 ? _b : 'rbf',\n kelmGamma: (_c = options.kelmGamma) !== null && _c !== void 0 ? _c : 1.0,\n kelmDegree: (_d = options.kelmDegree) !== null && _d !== void 0 ? _d : 2,\n kelmCoef0: (_e = options.kelmCoef0) !== null && _e !== void 0 ? _e : 0,\n activation: (_f = options.activation) !== null && _f !== void 0 ? _f : 'relu',\n maxLen: (_g = options.maxLen) !== null && _g !== void 0 ? _g : 100,\n useTokenizer: (_h = options.useTokenizer) !== null && _h !== void 0 ? _h : true,\n };\n // Initialize ELM for feature extraction\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.elmHiddenUnits,\n categories: [], // No categories for feature extraction\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize KELM for classification\n this.kelm = new KernelELM({\n useTokenizer: false, // Already tokenized by ELM\n categories: this.options.categories,\n maxLen: undefined,\n kernel: this.options.kelmKernel,\n gamma: this.options.kelmGamma,\n degree: this.options.kelmDegree,\n coef0: this.options.kelmCoef0,\n });\n }\n /**\n * Train cascade\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Step 1: Train ELM for feature extraction (autoencoder-style)\n (_b = (_a = this.elm).trainFromData) === null || _b === void 0 ? void 0 : _b.call(_a, X, X.map((_, i) => i)); // Self-supervised\n // Step 2: Extract features using ELM\n const extractedFeatures = this._extractFeatures(X);\n // Step 3: Train KELM on extracted features\n (_d = (_c = this.kelm).setCategories) === null || _d === void 0 ? void 0 : _d.call(_c, this.options.categories);\n (_f = (_e = this.kelm).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, extractedFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features using ELM\n */\n _extractFeatures(X) {\n var _a, _b;\n const features = [];\n for (const x of X) {\n // Get hidden layer activations as features\n const logits = ((_b = (_a = this.elm).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || [];\n features.push(logits.length > 0 ? logits : x); // Fallback to input if no logits\n }\n return features;\n }\n /**\n * Predict with cascade\n */\n predict(X, topK = 3, returnFeatures = false) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Step 1: Extract features\n const extractedFeatures = this._extractFeatures([x])[0];\n // Step 2: Classify with KELM\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [extractedFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnFeatures) {\n result.extractedFeatures = [...extractedFeatures];\n }\n results.push(result);\n }\n }\n return results;\n }\n}\n\n// string-kernel-elm.ts — String Kernel ELM\n// String kernels for text/DNA/protein sequences\n/**\n * String Kernel ELM for sequence data\n * Features:\n * - N-gram kernels\n * - Subsequence kernels\n * - Spectrum kernels\n * - Text/DNA/protein sequence analysis\n */\nclass StringKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.trained = false;\n this.vocabulary = new Set();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'ngram',\n n: (_b = options.n) !== null && _b !== void 0 ? _b : 3,\n lambda: (_c = options.lambda) !== null && _c !== void 0 ? _c : 0.5,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n };\n // Use polynomial kernel as base (will be adapted for strings)\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: 'polynomial',\n degree: this.options.n,\n });\n }\n /**\n * Train on string sequences\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Convert strings to feature vectors\n const stringX = X;\n const featureVectors = this._stringsToFeatures(stringX);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, featureVectors, labelIndices);\n this.trained = true;\n }\n /**\n * Convert strings to feature vectors using string kernels\n */\n _stringsToFeatures(strings) {\n // Build vocabulary\n this.vocabulary.clear();\n for (const s of strings) {\n const ngrams = this._extractNgrams(s);\n for (const ngram of ngrams) {\n this.vocabulary.add(ngram);\n }\n }\n const vocabArray = Array.from(this.vocabulary);\n const features = [];\n for (const s of strings) {\n const feature = new Array(vocabArray.length).fill(0);\n const ngrams = this._extractNgrams(s);\n for (const ngram of ngrams) {\n const idx = vocabArray.indexOf(ngram);\n if (idx >= 0) {\n feature[idx] += 1;\n }\n }\n // Normalize\n const sum = feature.reduce((a, b) => a + b, 0);\n if (sum > 0) {\n for (let i = 0; i < feature.length; i++) {\n feature[i] /= sum;\n }\n }\n features.push(feature);\n }\n return features;\n }\n /**\n * Extract n-grams from string\n */\n _extractNgrams(s) {\n const ngrams = [];\n if (this.options.kernelType === 'ngram' || this.options.kernelType === 'spectrum') {\n // N-gram extraction\n for (let i = 0; i <= s.length - this.options.n; i++) {\n ngrams.push(s.substring(i, i + this.options.n));\n }\n }\n else if (this.options.kernelType === 'subsequence') {\n // Subsequence extraction (simplified)\n for (let i = 0; i <= s.length - this.options.n; i++) {\n ngrams.push(s.substring(i, i + this.options.n));\n }\n }\n return ngrams;\n }\n /**\n * Predict on strings\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const stringX = X;\n const featureVectors = this._stringsToFeatures(stringX);\n const results = [];\n for (const features of featureVectors) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// convolutional-elm.ts — Convolutional ELM (C-ELM)\n// Convolutional layers + ELM for image/sequence processing\n/**\n * Convolutional ELM\n * Features:\n * - Convolutional layers for feature extraction\n * - ELM for classification\n * - Translation invariance\n * - Image/sequence processing\n */\nclass ConvolutionalELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n inputShape: (_a = options.inputShape) !== null && _a !== void 0 ? _a : [28, 28, 1],\n filters: (_b = options.filters) !== null && _b !== void 0 ? _b : [32, 64],\n kernelSizes: (_c = options.kernelSizes) !== null && _c !== void 0 ? _c : [3, 3],\n poolSizes: (_d = options.poolSizes) !== null && _d !== void 0 ? _d : [2, 2],\n hiddenUnits: (_e = options.hiddenUnits) !== null && _e !== void 0 ? _e : 256,\n activation: (_f = options.activation) !== null && _f !== void 0 ? _f : 'relu',\n maxLen: (_g = options.maxLen) !== null && _g !== void 0 ? _g : 100,\n useTokenizer: (_h = options.useTokenizer) !== null && _h !== void 0 ? _h : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on image/sequence data\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract convolutional features\n const images = Array.isArray(X[0][0]) ? X : X.map(x => [x]);\n const features = this._extractConvolutionalFeatures(images);\n // Train ELM on features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features using convolutional layers\n */\n _extractConvolutionalFeatures(images) {\n const features = [];\n for (const image of images) {\n let current = image;\n // Apply convolutional layers\n for (let layer = 0; layer < this.options.filters.length; layer++) {\n const convInput = Array.isArray(current[0][0])\n ? current[0]\n : current;\n const convOutput = this._convLayer(convInput, this.options.filters[layer], this.options.kernelSizes[layer] || 3);\n current = this._poolLayer(convOutput, this.options.poolSizes[layer] || 2);\n }\n // Flatten\n const flattened = this._flatten(current);\n features.push(flattened);\n }\n return features;\n }\n /**\n * Convolutional layer (simplified)\n */\n _convLayer(input, numFilters, kernelSize) {\n // Simplified convolution (in practice, use proper convolution)\n const output = [];\n for (let f = 0; f < numFilters; f++) {\n const featureMap = [];\n for (let i = 0; i < input.length; i++) {\n featureMap[i] = [];\n for (let j = 0; j < input[i].length; j++) {\n // Simple convolution (simplified)\n let sum = 0;\n for (let ki = 0; ki < kernelSize; ki++) {\n for (let kj = 0; kj < kernelSize; kj++) {\n const row = i + ki - Math.floor(kernelSize / 2);\n const col = j + kj - Math.floor(kernelSize / 2);\n if (row >= 0 && row < input.length && col >= 0 && col < input[i].length) {\n sum += input[row][col] || 0;\n }\n }\n }\n featureMap[i][j] = Math.max(0, sum / (kernelSize * kernelSize)); // ReLU\n }\n }\n output.push(featureMap);\n }\n return output;\n }\n /**\n * Pooling layer\n */\n _poolLayer(input, poolSize) {\n // Simplified pooling\n const images = Array.isArray(input[0][0])\n ? input\n : [input];\n const pooled = [];\n for (const img of images) {\n const pooledImg = [];\n for (let i = 0; i < img.length; i += poolSize) {\n pooledImg[i / poolSize] = [];\n for (let j = 0; j < img[i].length; j += poolSize) {\n // Max pooling\n let max = -Infinity;\n for (let pi = 0; pi < poolSize && i + pi < img.length; pi++) {\n for (let pj = 0; pj < poolSize && j + pj < img[i].length; pj++) {\n max = Math.max(max, img[i + pi][j + pj] || 0);\n }\n }\n pooledImg[i / poolSize][j / poolSize] = max;\n }\n }\n pooled.push(pooledImg);\n }\n return pooled;\n }\n /**\n * Flatten feature maps\n */\n _flatten(featureMaps) {\n if (Array.isArray(featureMaps[0][0])) {\n const maps = featureMaps;\n const flattened = [];\n for (const map of maps) {\n for (const row of map) {\n flattened.push(...row);\n }\n }\n return flattened;\n }\n else {\n const map = featureMaps;\n const flattened = [];\n for (const row of map) {\n flattened.push(...row);\n }\n return flattened;\n }\n }\n /**\n * Predict\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const images = Array.isArray(X[0][0]) ? X : X.map(x => [x]);\n const features = this._extractConvolutionalFeatures(images);\n const results = [];\n for (const feature of features) {\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [feature], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// recurrent-elm.ts — Recurrent ELM (R-ELM)\n// Recurrent connections in ELM for sequence modeling\n/**\n * Recurrent ELM for sequence modeling\n * Features:\n * - Recurrent connections\n * - Sequence modeling\n * - Temporal dependencies\n * - Memory of past inputs\n */\nclass RecurrentELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.hiddenState = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n recurrentUnits: (_b = options.recurrentUnits) !== null && _b !== void 0 ? _b : 128,\n sequenceLength: (_c = options.sequenceLength) !== null && _c !== void 0 ? _c : 10,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'tanh',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize hidden state\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n }\n /**\n * Train on sequences\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Process sequences with recurrent connections\n const features = this._processSequences(X);\n // Train ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Process sequences with recurrent connections\n */\n _processSequences(sequences) {\n const features = [];\n for (const sequence of sequences) {\n // Reset hidden state for each sequence\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n // Process sequence step by step\n for (const step of sequence) {\n // Combine input with hidden state\n [...step, ...this.hiddenState];\n // Update hidden state (simplified recurrent update)\n this._updateHiddenState(step);\n }\n // Use final hidden state + last input as features\n const finalFeatures = [...sequence[sequence.length - 1] || [], ...this.hiddenState];\n features.push(finalFeatures);\n }\n return features;\n }\n /**\n * Update hidden state (recurrent connection)\n */\n _updateHiddenState(input) {\n // Simplified recurrent update: h_t = tanh(W * [x_t; h_{t-1}])\n const combined = [...input, ...this.hiddenState];\n const newState = new Array(this.options.recurrentUnits).fill(0);\n // Simple linear transformation (in practice, use learned weights)\n for (let i = 0; i < this.options.recurrentUnits; i++) {\n let sum = 0;\n for (let j = 0; j < combined.length; j++) {\n // Simple hash-based weight (in practice, use learned weights)\n const hash = this._hash(`recurrent_${i}_${j}`);\n sum += combined[j] * hash;\n }\n // Apply activation\n if (this.options.activation === 'tanh') {\n newState[i] = Math.tanh(sum);\n }\n else if (this.options.activation === 'relu') {\n newState[i] = Math.max(0, sum);\n }\n else if (this.options.activation === 'sigmoid') {\n newState[i] = 1 / (1 + Math.exp(-sum));\n }\n else {\n newState[i] = sum;\n }\n }\n this.hiddenState = newState;\n }\n _hash(str) {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = ((hash << 5) - hash) + char;\n hash = hash & hash;\n }\n return (hash / 2147483647) * 0.1; // Small weights\n }\n /**\n * Predict on sequence\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const sequences = Array.isArray(X[0][0])\n ? X\n : [X];\n const results = [];\n for (const sequence of sequences) {\n // Process sequence\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n for (const step of sequence) {\n this._updateHiddenState(step);\n }\n const finalFeatures = [...sequence[sequence.length - 1] || [], ...this.hiddenState];\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [finalFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n hiddenState: [...this.hiddenState],\n });\n }\n }\n return results;\n }\n /**\n * Reset hidden state\n */\n resetState() {\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n }\n}\n\n// fuzzy-elm.ts — Fuzzy ELM\n// Fuzzy logic + ELM for uncertainty handling and soft classification\n/**\n * Fuzzy ELM\n * Features:\n * - Fuzzy logic integration\n * - Uncertainty handling\n * - Soft classification\n * - Membership functions\n */\nclass FuzzyELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.trained = false;\n this.membershipParams = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n fuzzyMembership: (_b = options.fuzzyMembership) !== null && _b !== void 0 ? _b : 'gaussian',\n fuzzificationLevel: (_c = options.fuzzificationLevel) !== null && _c !== void 0 ? _c : 0.5,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with fuzzy logic\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Fuzzify input features\n const fuzzifiedX = this._fuzzifyFeatures(X);\n // Compute membership parameters\n this._computeMembershipParams(X, labelIndices);\n // Train ELM on fuzzified features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, fuzzifiedX, labelIndices);\n this.trained = true;\n }\n /**\n * Fuzzify input features\n */\n _fuzzifyFeatures(X) {\n const fuzzified = [];\n for (const x of X) {\n const fuzzy = x.map(val => this._fuzzifyValue(val));\n fuzzified.push(fuzzy);\n }\n return fuzzified;\n }\n /**\n * Fuzzify a single value\n */\n _fuzzifyValue(value) {\n // Apply fuzzification based on membership function\n if (this.options.fuzzyMembership === 'triangular') {\n // Triangular membership\n const center = 0;\n const width = this.options.fuzzificationLevel;\n if (Math.abs(value - center) <= width) {\n return 1 - Math.abs(value - center) / width;\n }\n return 0;\n }\n else if (this.options.fuzzyMembership === 'gaussian') {\n // Gaussian membership\n const center = 0;\n const sigma = this.options.fuzzificationLevel;\n return Math.exp(-Math.pow(value - center, 2) / (2 * sigma * sigma));\n }\n else if (this.options.fuzzyMembership === 'trapezoidal') {\n // Trapezoidal membership\n const center = 0;\n const width = this.options.fuzzificationLevel;\n const dist = Math.abs(value - center);\n if (dist <= width * 0.5) {\n return 1;\n }\n else if (dist <= width) {\n return 1 - (dist - width * 0.5) / (width * 0.5);\n }\n return 0;\n }\n return value; // Default: no fuzzification\n }\n /**\n * Compute membership parameters for each category\n */\n _computeMembershipParams(X, y) {\n // Compute mean and std for each category\n const categoryData = new Map();\n for (let i = 0; i < X.length; i++) {\n const label = y[i];\n if (!categoryData.has(label)) {\n categoryData.set(label, []);\n }\n categoryData.get(label).push(X[i]);\n }\n for (const [label, data] of categoryData) {\n const mean = this._computeMean(data);\n const std = this._computeStd(data, mean);\n this.membershipParams.set(this.options.categories[label], {\n center: mean,\n width: std * 2, // 2 standard deviations\n });\n }\n }\n _computeMean(data) {\n var _a;\n if (data.length === 0)\n return 0;\n const sum = data.reduce((s, x) => s + x.reduce((a, b) => a + b, 0), 0);\n const count = data.length * (((_a = data[0]) === null || _a === void 0 ? void 0 : _a.length) || 1);\n return sum / count;\n }\n _computeStd(data, mean) {\n var _a;\n if (data.length === 0)\n return 1;\n const variance = data.reduce((s, x) => s + x.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0), 0) / (data.length * (((_a = data[0]) === null || _a === void 0 ? void 0 : _a.length) || 1));\n return Math.sqrt(variance);\n }\n /**\n * Compute fuzzy membership for a prediction\n */\n _computeMembership(label, features) {\n const params = this.membershipParams.get(label);\n if (!params)\n return 0.5; // Default membership\n const mean = features.reduce((a, b) => a + b, 0) / features.length;\n const dist = Math.abs(mean - params.center);\n if (this.options.fuzzyMembership === 'gaussian') {\n return Math.exp(-Math.pow(dist, 2) / (2 * params.width * params.width));\n }\n else {\n // Triangular\n if (dist <= params.width) {\n return 1 - dist / params.width;\n }\n return 0;\n }\n }\n /**\n * Predict with fuzzy logic\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Fuzzify input\n const fuzzified = this._fuzzifyFeatures([x])[0];\n // Get base prediction\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [fuzzified], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const label = pred.label || this.options.categories[pred.index || 0];\n const prob = pred.prob || 0;\n // Compute fuzzy membership\n const membership = this._computeMembership(label, x);\n // Combine probability with membership\n const confidence = prob * membership;\n results.push({\n label,\n prob,\n membership,\n confidence,\n });\n }\n }\n return results;\n }\n}\n\n// quantum-inspired-elm.ts — Quantum-Inspired ELM\n// Quantum computing principles for feature maps and optimization\n/**\n * Quantum-Inspired ELM\n * Features:\n * - Quantum feature maps\n * - Quantum superposition\n * - Quantum entanglement\n * - Quantum kernel methods\n */\nclass QuantumInspiredELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.quantumStates = [];\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n quantumLayers: (_b = options.quantumLayers) !== null && _b !== void 0 ? _b : 2,\n entanglement: (_c = options.entanglement) !== null && _c !== void 0 ? _c : true,\n superposition: (_d = options.superposition) !== null && _d !== void 0 ? _d : true,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with quantum-inspired features\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Apply quantum feature maps\n const quantumFeatures = this._applyQuantumFeatureMap(X);\n // Train ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, quantumFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Apply quantum feature map\n */\n _applyQuantumFeatureMap(X) {\n const features = [];\n for (const x of X) {\n let quantumState = this._encodeToQuantumState(x);\n // Apply quantum layers\n for (let layer = 0; layer < this.options.quantumLayers; layer++) {\n quantumState = this._applyQuantumLayer(quantumState, layer);\n }\n // Measure quantum state (convert to classical features)\n const measured = this._measureQuantumState(quantumState);\n features.push(measured);\n }\n return features;\n }\n /**\n * Encode classical data to quantum state\n */\n _encodeToQuantumState(x) {\n // Quantum state encoding (amplitude encoding)\n const state = new Array(Math.pow(2, Math.ceil(Math.log2(x.length)))).fill(0);\n // Normalize input\n const norm = Math.sqrt(x.reduce((sum, v) => sum + v * v, 0));\n if (norm > 0) {\n for (let i = 0; i < x.length; i++) {\n state[i] = x[i] / norm;\n }\n }\n return state;\n }\n /**\n * Apply quantum layer (quantum gates simulation)\n */\n _applyQuantumLayer(state, layer) {\n let newState = [...state];\n // Apply quantum gates (simplified simulation)\n if (this.options.superposition) {\n // Hadamard-like transformation (superposition)\n newState = this._applySuperposition(newState);\n }\n if (this.options.entanglement) {\n // Entanglement (CNOT-like)\n newState = this._applyEntanglement(newState);\n }\n // Rotation gates\n newState = this._applyRotation(newState, layer);\n return newState;\n }\n /**\n * Apply superposition (Hadamard-like)\n */\n _applySuperposition(state) {\n const newState = new Array(state.length).fill(0);\n const factor = 1 / Math.sqrt(2);\n for (let i = 0; i < state.length; i++) {\n for (let j = 0; j < state.length; j++) {\n // Simplified Hadamard transformation\n const phase = (i === j) ? factor : factor * Math.cos(Math.PI * i * j / state.length);\n newState[i] += state[j] * phase;\n }\n }\n return newState;\n }\n /**\n * Apply entanglement (CNOT-like)\n */\n _applyEntanglement(state) {\n const newState = [...state];\n // Entangle pairs of qubits\n for (let i = 0; i < state.length - 1; i += 2) {\n const temp = newState[i];\n newState[i] = newState[i + 1];\n newState[i + 1] = temp;\n }\n return newState;\n }\n /**\n * Apply rotation gates\n */\n _applyRotation(state, layer) {\n const newState = new Array(state.length).fill(0);\n const angle = Math.PI / (2 * (layer + 1));\n for (let i = 0; i < state.length; i++) {\n const cos = Math.cos(angle);\n const sin = Math.sin(angle);\n newState[i] = state[i] * cos - state[(i + 1) % state.length] * sin;\n }\n return newState;\n }\n /**\n * Measure quantum state (convert to classical)\n */\n _measureQuantumState(state) {\n // Measure by computing probabilities (amplitudes squared)\n const probabilities = state.map(amp => amp * amp);\n // Project to hidden units dimension\n const hiddenDim = this.options.hiddenUnits;\n const features = new Array(hiddenDim).fill(0);\n for (let i = 0; i < hiddenDim; i++) {\n const idx = i % probabilities.length;\n features[i] = probabilities[idx];\n }\n return features;\n }\n /**\n * Predict with quantum-inspired model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Apply quantum feature map\n const quantumFeatures = this._applyQuantumFeatureMap([x])[0];\n // Predict\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [quantumFeatures], topK)) || [];\n // Get quantum state for this input\n let quantumState = this._encodeToQuantumState(x);\n for (let layer = 0; layer < this.options.quantumLayers; layer++) {\n quantumState = this._applyQuantumLayer(quantumState, layer);\n }\n const amplitude = Math.sqrt(quantumState.reduce((sum, v) => sum + v * v, 0));\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n quantumState: [...quantumState],\n amplitude,\n });\n }\n }\n return results;\n }\n}\n\n// graph-kernel-elm.ts — Graph Kernel ELM\n// Graph kernels (Weisfeiler-Lehman, etc.) for graph structure encoding\n/**\n * Graph Kernel ELM\n * Features:\n * - Graph kernels (Weisfeiler-Lehman, shortest-path, random-walk)\n * - Graph structure encoding\n * - Node classification/regression\n */\nclass GraphKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'weisfeiler-lehman',\n wlIterations: (_b = options.wlIterations) !== null && _b !== void 0 ? _b : 3,\n kernel: (_c = options.kernel) !== null && _c !== void 0 ? _c : 'rbf',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n };\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: this.options.kernel,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train on graphs\n */\n train(graphs, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Compute graph kernel features\n const features = this._computeGraphKernelFeatures(graphs);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Compute graph kernel features\n */\n _computeGraphKernelFeatures(graphs) {\n const features = [];\n for (const graph of graphs) {\n let graphFeatures;\n if (this.options.kernelType === 'weisfeiler-lehman') {\n graphFeatures = this._weisfeilerLehmanKernel(graph);\n }\n else if (this.options.kernelType === 'shortest-path') {\n graphFeatures = this._shortestPathKernel(graph);\n }\n else {\n graphFeatures = this._randomWalkKernel(graph);\n }\n features.push(graphFeatures);\n }\n return features;\n }\n /**\n * Weisfeiler-Lehman kernel\n */\n _weisfeilerLehmanKernel(graph) {\n const features = [];\n const nodeLabels = new Map();\n // Initialize labels with node features\n for (const node of graph.nodes) {\n const label = node.features.join(',');\n nodeLabels.set(node.id, label);\n }\n // WL iterations\n for (let iter = 0; iter < this.options.wlIterations; iter++) {\n const newLabels = new Map();\n for (const node of graph.nodes) {\n // Get neighbor labels\n const neighbors = graph.edges\n .filter(e => e.source === node.id || e.target === node.id)\n .map(e => e.source === node.id ? e.target : e.source);\n const neighborLabels = neighbors\n .map(nid => nodeLabels.get(nid) || '')\n .sort()\n .join(',');\n // New label = current label + sorted neighbor labels\n const newLabel = `${nodeLabels.get(node.id)}|${neighborLabels}`;\n newLabels.set(node.id, newLabel);\n }\n // Count label frequencies\n const labelCounts = new Map();\n for (const label of newLabels.values()) {\n labelCounts.set(label, (labelCounts.get(label) || 0) + 1);\n }\n // Add to features\n for (const [label, count] of labelCounts) {\n features.push(count);\n }\n nodeLabels.clear();\n for (const [id, label] of newLabels) {\n nodeLabels.set(id, label);\n }\n }\n return features.length > 0 ? features : new Array(10).fill(0);\n }\n /**\n * Shortest-path kernel\n */\n _shortestPathKernel(graph) {\n // Compute shortest paths between all pairs\n const distances = this._computeShortestPaths(graph);\n // Create histogram of distances\n const maxDist = Math.max(...distances.flat().filter(d => d < Infinity));\n const bins = Math.min(10, maxDist + 1);\n const histogram = new Array(bins).fill(0);\n for (const row of distances) {\n for (const dist of row) {\n if (dist < Infinity) {\n const bin = Math.min(Math.floor(dist), bins - 1);\n histogram[bin]++;\n }\n }\n }\n return histogram;\n }\n /**\n * Random-walk kernel\n */\n _randomWalkKernel(graph) {\n // Simplified random-walk kernel\n const features = [];\n // Node degree distribution\n const degrees = new Map();\n for (const edge of graph.edges) {\n degrees.set(edge.source, (degrees.get(edge.source) || 0) + 1);\n degrees.set(edge.target, (degrees.get(edge.target) || 0) + 1);\n }\n const degreeHist = new Array(10).fill(0);\n for (const degree of degrees.values()) {\n const bin = Math.min(degree, 9);\n degreeHist[bin]++;\n }\n features.push(...degreeHist);\n // Graph statistics\n features.push(graph.nodes.length);\n features.push(graph.edges.length);\n features.push(graph.nodes.length > 0 ? graph.edges.length / graph.nodes.length : 0);\n return features;\n }\n /**\n * Compute shortest paths (Floyd-Warshall simplified)\n */\n _computeShortestPaths(graph) {\n const n = graph.nodes.length;\n const dist = Array(n).fill(null).map(() => Array(n).fill(Infinity));\n // Initialize\n for (let i = 0; i < n; i++) {\n dist[i][i] = 0;\n }\n // Add edges\n for (const edge of graph.edges) {\n const srcIdx = graph.nodes.findIndex(n => n.id === edge.source);\n const tgtIdx = graph.nodes.findIndex(n => n.id === edge.target);\n if (srcIdx >= 0 && tgtIdx >= 0) {\n dist[srcIdx][tgtIdx] = 1;\n dist[tgtIdx][srcIdx] = 1;\n }\n }\n // Floyd-Warshall\n for (let k = 0; k < n; k++) {\n for (let i = 0; i < n; i++) {\n for (let j = 0; j < n; j++) {\n if (dist[i][k] + dist[k][j] < dist[i][j]) {\n dist[i][j] = dist[i][k] + dist[k][j];\n }\n }\n }\n }\n return dist;\n }\n /**\n * Predict on graphs\n */\n predict(graphs, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const graphArray = Array.isArray(graphs) ? graphs : [graphs];\n const features = this._computeGraphKernelFeatures(graphArray);\n const results = [];\n for (const feature of features) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [feature], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// tensor-kernel-elm.ts — Tensor Kernel ELM\n// Multi-dimensional kernel learning with tensor factorization\n/**\n * Tensor Kernel ELM\n * Features:\n * - Multi-dimensional kernel learning\n * - Tensor factorization\n * - Multi-modal data fusion\n * - Complex relationship modeling\n */\nclass TensorKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.tensorFactors = []; // [sample][mode][dim][rank] - each sample has multiple modes, each mode is a 2D matrix\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n tensorRank: (_a = options.tensorRank) !== null && _a !== void 0 ? _a : 10,\n modes: (_b = options.modes) !== null && _b !== void 0 ? _b : [10, 10, 10],\n kernel: (_c = options.kernel) !== null && _c !== void 0 ? _c : 'rbf',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n };\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: this.options.kernel,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train on tensor data\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Factorize tensors\n const tensorX = Array.isArray(X[0][0]) ? X : this._reshapeToTensors(X);\n this._factorizeTensors(tensorX);\n // Extract features from tensor factorization\n const features = this._extractTensorFeatures(tensorX);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Reshape 2D data to 3D tensors\n */\n _reshapeToTensors(X) {\n const [h, w, c] = this.options.modes;\n const result = [];\n for (const x of X) {\n const tensor = [];\n let idx = 0;\n for (let k = 0; k < c; k++) {\n const matrix = [];\n for (let i = 0; i < h; i++) {\n const row = [];\n for (let j = 0; j < w; j++) {\n row.push(x[idx % x.length] || 0);\n idx++;\n }\n matrix.push(row);\n }\n tensor.push(matrix);\n }\n result.push(tensor);\n }\n return result;\n }\n /**\n * Factorize tensors using CP decomposition\n */\n _factorizeTensors(tensors) {\n // Simplified CP (CANDECOMP/PARAFAC) decomposition\n this.tensorFactors = [];\n for (const tensor of tensors) {\n const factors = [];\n // Factorize each mode\n for (let mode = 0; mode < this.options.modes.length; mode++) {\n const factor = new Array(this.options.modes[mode]).fill(0).map(() => new Array(this.options.tensorRank).fill(0).map(() => Math.random() * 0.1));\n factors.push(factor);\n }\n this.tensorFactors.push(factors);\n }\n }\n /**\n * Extract features from tensor factorization\n */\n _extractTensorFeatures(tensors) {\n const features = [];\n for (let i = 0; i < tensors.length; i++) {\n const factors = this.tensorFactors[i] || [];\n // Flatten factors\n const feature = [];\n for (const factor of factors) {\n for (const row of factor) {\n for (const val of row) {\n feature.push(val);\n }\n }\n }\n // Add tensor statistics\n const tensor = tensors[i];\n if (tensor && tensor.length > 0) {\n feature.push(tensor.length); // Height\n if (Array.isArray(tensor[0])) {\n feature.push(tensor[0].length); // Width\n if (Array.isArray(tensor[0][0])) {\n feature.push(tensor[0][0].length); // Channels\n }\n else {\n feature.push(1);\n }\n }\n else {\n feature.push(1);\n feature.push(1);\n }\n // Add tensor norm\n let norm = 0;\n for (const matrix of tensor) {\n if (Array.isArray(matrix)) {\n for (const row of matrix) {\n if (Array.isArray(row)) {\n for (const val of row) {\n norm += val * val;\n }\n }\n else {\n norm += row * row;\n }\n }\n }\n }\n feature.push(Math.sqrt(norm));\n }\n else {\n feature.push(0, 0, 0, 0);\n }\n features.push(feature);\n }\n return features;\n }\n /**\n * Predict on tensor data\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const tensorX = Array.isArray(X[0][0]) ? X : this._reshapeToTensors(X);\n const features = this._extractTensorFeatures(tensorX);\n const results = [];\n for (let i = 0; i < features.length; i++) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features[i]], topK)) || [];\n const factors = this.tensorFactors[i] || [];\n for (const pred of preds.slice(0, topK)) {\n const factorCopy = factors.map((f) => f.map((row) => row.slice()));\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n tensorFactors: factorCopy,\n });\n }\n }\n return results;\n }\n}\n\nfunction buildRFF(d, D, sigma = 1.0, rng = Math.random) {\n const W = new Float64Array(D * d);\n const b = new Float64Array(D);\n const s = 1 / Math.max(1e-12, sigma); // N(0, 1/sigma^2)\n for (let i = 0; i < D * d; i++)\n W[i] = gauss$1(rng) * s;\n for (let i = 0; i < D; i++)\n b[i] = rng() * 2 * Math.PI;\n return { W, b, D, d, sigma };\n}\nfunction mapRFF(rff, x) {\n const { W, b, D, d } = rff;\n const z = new Float64Array(2 * D);\n for (let k = 0; k < D; k++) {\n let dot = b[k];\n const off = k * d;\n for (let j = 0; j < d; j++)\n dot += W[off + j] * (x[j] || 0);\n z[k] = Math.cos(dot);\n z[D + k] = Math.sin(dot);\n }\n // L2 normalize block to keep ridge well-conditioned\n let s = 0;\n for (let i = 0; i < z.length; i++)\n s += z[i] * z[i];\n const inv = 1 / Math.sqrt(Math.max(s, 1e-12));\n for (let i = 0; i < z.length; i++)\n z[i] *= inv;\n return z;\n}\n// Box-Muller\nfunction gauss$1(rng) {\n let u = 0, v = 0;\n while (u === 0)\n u = rng();\n while (v === 0)\n v = rng();\n return Math.sqrt(-2 * Math.log(u)) * Math.cos(2 * Math.PI * v);\n}\n\n// online_ridge.ts — maintain (Φ^T Φ + λI)^{-1} and β for linear ridge\nclass OnlineRidge {\n constructor(p, m, lambda = 1e-4) {\n this.p = p;\n this.m = m;\n this.lambda = lambda;\n this.Ainv = new Float64Array(p * p);\n this.Beta = new Float64Array(p * m);\n // Ainv = (λ I)^-1 = (1/λ) I\n const inv = 1 / Math.max(1e-12, lambda);\n for (let i = 0; i < p; i++)\n this.Ainv[i * p + i] = inv;\n }\n // rank-1 update with a single sample (φ, y)\n update(phi, y) {\n const { p, m, Ainv, Beta } = this;\n // u = Ainv * phi\n const u = new Float64Array(p);\n for (let i = 0; i < p; i++) {\n let s = 0, row = i * p;\n for (let j = 0; j < p; j++)\n s += Ainv[row + j] * phi[j];\n u[i] = s;\n }\n // denom = 1 + phi^T u\n let denom = 1;\n for (let j = 0; j < p; j++)\n denom += phi[j] * u[j];\n denom = Math.max(denom, 1e-12);\n const scale = 1 / denom;\n // Ainv <- Ainv - (u u^T) * scale\n for (let i = 0; i < p; i++) {\n const ui = u[i] * scale;\n for (let j = 0; j < p; j++)\n Ainv[i * p + j] -= ui * u[j];\n }\n // Beta <- Beta + Ainv * (phi * y^T)\n // compute t = Ainv * phi (reuse u after Ainv update)\n for (let i = 0; i < p; i++) {\n let s = 0, row = i * p;\n for (let j = 0; j < p; j++)\n s += Ainv[row + j] * phi[j];\n u[i] = s; // reuse u as t\n }\n // Beta += outer(u, y)\n for (let i = 0; i < p; i++) {\n const ui = u[i];\n for (let c = 0; c < m; c++)\n Beta[i * m + c] += ui * y[c];\n }\n }\n // yhat = φ^T Beta\n predict(phi) {\n const { p, m, Beta } = this;\n const out = new Float64Array(m);\n for (let c = 0; c < m; c++) {\n let s = 0;\n for (let i = 0; i < p; i++)\n s += phi[i] * Beta[i * m + c];\n out[c] = s;\n }\n return out;\n }\n}\n\nfunction isFiniteMatrix(M) {\n for (let i = 0; i < M.length; i++) {\n const row = M[i];\n if (!row || row.length !== M[0].length)\n return false;\n for (let j = 0; j < row.length; j++) {\n const v = row[j];\n if (!Number.isFinite(v))\n return false;\n }\n }\n return true;\n}\nfunction symmetrize(A) {\n const n = A.length;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = 0.5 * (A[i][j] + A[j][i]);\n A[i][j] = v;\n A[j][i] = v;\n }\n }\n}\nfunction choleskySolve(A, Y) {\n const n = A.length, m = Y[0].length;\n // L\n const L = Array.from({ length: n }, () => Array(n).fill(0));\n for (let i = 0; i < n; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n if (i === j) {\n if (!(sum > 0) || !Number.isFinite(sum))\n return null; // not PD\n L[i][j] = Math.sqrt(sum);\n }\n else {\n L[i][j] = sum / L[j][j];\n }\n }\n }\n // forward solve: L Z = Y\n const Z = Array.from({ length: n }, () => Array(m).fill(0));\n for (let c = 0; c < m; c++) {\n for (let i = 0; i < n; i++) {\n let s = Y[i][c];\n for (let k = 0; k < i; k++)\n s -= L[i][k] * Z[k][c];\n Z[i][c] = s / L[i][i];\n }\n }\n // back solve: L^T Θ = Z\n const Theta = Array.from({ length: n }, () => Array(m).fill(0));\n for (let c = 0; c < m; c++) {\n for (let i = n - 1; i >= 0; i--) {\n let s = Z[i][c];\n for (let k = i + 1; k < n; k++)\n s -= L[k][i] * Theta[k][c];\n Theta[i][c] = s / L[i][i];\n }\n }\n return { Theta, L };\n}\n// CG fallback for SPD system A x = b, where A is given as matrix\nfunction cgSolve(A, b, tol, maxIter) {\n const n = A.length;\n const x = new Array(n).fill(0);\n const r = b.slice(); // r = b - A x = b initially\n const p = r.slice();\n let rsold = dot$1(r, r);\n let it = 0;\n for (; it < maxIter; it++) {\n const Ap = matvec(A, p);\n const alpha = rsold / Math.max(1e-300, dot$1(p, Ap));\n for (let i = 0; i < n; i++)\n x[i] += alpha * p[i];\n for (let i = 0; i < n; i++)\n r[i] -= alpha * Ap[i];\n const rsnew = dot$1(r, r);\n if (Math.sqrt(rsnew) <= tol)\n break;\n const beta = rsnew / Math.max(1e-300, rsold);\n for (let i = 0; i < n; i++)\n p[i] = r[i] + beta * p[i];\n rsold = rsnew;\n }\n return { x, iters: it + 1 };\n}\nfunction dot$1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction matvec(A, x) {\n const n = A.length, out = new Array(n).fill(0);\n for (let i = 0; i < n; i++) {\n const Ai = A[i];\n let s = 0;\n for (let j = 0; j < n; j++)\n s += Ai[j] * x[j];\n out[i] = s;\n }\n return out;\n}\n/**\n * Production-grade ridge regression solver:\n * Solves (K + λ I) Θ = Y, with symmetry enforcement, adaptive jitter, and CG fallback.\n */\nfunction ridgeSolvePro(K, Y, opts = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n const info = [];\n const n = K.length;\n if (n === 0)\n return { Theta: [], usedLambda: (_a = opts.lambda) !== null && _a !== void 0 ? _a : 1e-4, method: \"cholesky\", info: [\"empty system\"] };\n if (!isFiniteMatrix(K))\n throw new Error(\"K contains NaN/Inf or ragged rows\");\n if (!Array.isArray(Y) || Y.length !== n || Y[0].length === undefined)\n throw new Error(\"Y shape mismatch\");\n if (!isFiniteMatrix(Y))\n throw new Error(\"Y contains NaN/Inf\");\n const m = Y[0].length;\n const baseLambda = Math.max(0, (_b = opts.lambda) !== null && _b !== void 0 ? _b : 1e-4);\n const ensureSym = (_c = opts.ensureSymmetry) !== null && _c !== void 0 ? _c : true;\n let jitter = (_d = opts.jitterInit) !== null && _d !== void 0 ? _d : 1e-10;\n const jitterMax = (_e = opts.jitterMax) !== null && _e !== void 0 ? _e : 1e-1;\n const jitterFactor = (_f = opts.jitterFactor) !== null && _f !== void 0 ? _f : 10;\n // Build A = (symmetrized K) + (lambda + jitter) I\n const A = Array.from({ length: n }, (_, i) => K[i].slice());\n if (ensureSym)\n symmetrize(A);\n // Try Cholesky with increasing jitter\n let usedLambda = baseLambda;\n while (true) {\n if ((_g = opts.abortSignal) === null || _g === void 0 ? void 0 : _g.aborted)\n throw new Error(\"ridgeSolvePro aborted\");\n // add diag\n for (let i = 0; i < n; i++)\n A[i][i] = (ensureSym ? A[i][i] : (A[i][i] + A[i][i]) * 0.5) + usedLambda;\n const chol = choleskySolve(A, Y);\n if (chol) {\n info.push(`Cholesky ok with lambda=${usedLambda.toExponential(2)}`);\n return { Theta: chol.Theta, usedLambda, method: \"cholesky\", info };\n }\n else {\n // remove the just-added lambda before next try\n for (let i = 0; i < n; i++)\n A[i][i] -= usedLambda;\n if (jitter > jitterMax) {\n info.push(`Cholesky failed up to jitter=${jitterMax}; falling back to CG`);\n break;\n }\n usedLambda = baseLambda + jitter;\n info.push(`Cholesky failed; retry with lambda=${usedLambda.toExponential(2)}`);\n jitter *= jitterFactor;\n }\n }\n // CG fallback: solve A Θ = Y column-wise\n // Rebuild A once with final usedLambda\n for (let i = 0; i < n; i++)\n A[i][i] = (ensureSym ? A[i][i] : (A[i][i] + A[i][i]) * 0.5) + usedLambda;\n const tol = (_h = opts.cgTol) !== null && _h !== void 0 ? _h : 1e-6;\n const maxIter = (_j = opts.cgMaxIter) !== null && _j !== void 0 ? _j : Math.min(1000, n * 3);\n const Theta = Array.from({ length: n }, () => Array(m).fill(0));\n let maxIters = 0;\n for (let c = 0; c < m; c++) {\n if ((_k = opts.abortSignal) === null || _k === void 0 ? void 0 : _k.aborted)\n throw new Error(\"ridgeSolvePro aborted\");\n const b = new Array(n);\n for (let i = 0; i < n; i++)\n b[i] = Y[i][c];\n const { x, iters } = cgSolve(A, b, tol, maxIter);\n maxIters = Math.max(maxIters, iters);\n for (let i = 0; i < n; i++)\n Theta[i][c] = x[i];\n }\n info.push(`CG solved columns with tol=${tol}, maxIter=${maxIter}, max iters used=${maxIters}`);\n return { Theta, usedLambda, method: \"cg\", iters: maxIters, info };\n}\n\n// src/math/index.ts — production-grade numerics for Ω\n// Backward compatible with previous exports; adds robust, stable helpers.\n// ---------- Constants\nconst EPS = 1e-12; // general epsilon for divides/sqrt\nconst DISK_EPS = 0.95; // strict radius for Poincaré-like ops\nconst MAX_EXP = 709; // ~ ln(Number.MAX_VALUE)\nconst MIN_EXP = -745; // ~ ln(Number.MIN_VALUE)\n// ---------- Constructors / guards\nfunction zeros(n) { return new Float64Array(n); }\nfunction isFiniteVec(a) {\n const n = a.length;\n for (let i = 0; i < n; i++)\n if (!Number.isFinite(a[i]))\n return false;\n return true;\n}\nfunction asVec(a) {\n // Copy into Float64Array for consistent math perf\n return a instanceof Float64Array ? a : new Float64Array(Array.from(a));\n}\n// ---------- Basic algebra (pure, allocation)\nfunction dot(a, b) {\n const n = Math.min(a.length, b.length);\n let s = 0;\n for (let i = 0; i < n; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction add(a, b) {\n const n = Math.min(a.length, b.length);\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] + b[i];\n return o;\n}\nfunction scal(a, k) {\n const n = a.length;\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] * k;\n return o;\n}\nfunction hadamard(a, b) {\n const n = Math.min(a.length, b.length);\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] * b[i];\n return o;\n}\nfunction tanhVec(a) {\n const n = a.length;\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = Math.tanh(a[i]);\n return o;\n}\n// ---------- In-place variants (underscore suffix) to reduce GC\nfunction add_(out, a, b) {\n const n = Math.min(out.length, a.length, b.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] + b[i];\n return out;\n}\nfunction scal_(out, a, k) {\n const n = Math.min(out.length, a.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] * k;\n return out;\n}\nfunction hadamard_(out, a, b) {\n const n = Math.min(out.length, a.length, b.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] * b[i];\n return out;\n}\nfunction tanhVec_(out, a) {\n const n = Math.min(out.length, a.length);\n for (let i = 0; i < n; i++)\n out[i] = Math.tanh(a[i]);\n return out;\n}\n// ---------- Norms / normalization\nfunction l2$1(a) {\n // robust L2 (avoids NaN on weird input)\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * a[i];\n return Math.sqrt(Math.max(0, s));\n}\nfunction normalizeL2(a, eps = EPS) {\n const nrm = l2$1(a);\n if (!(nrm > eps) || !Number.isFinite(nrm))\n return new Float64Array(a.length); // zero vec\n const o = new Float64Array(a.length);\n const inv = 1 / nrm;\n for (let i = 0; i < a.length; i++)\n o[i] = a[i] * inv;\n return o;\n}\nfunction clampVec(a, lo = -Infinity, hi = Infinity) {\n const n = a.length, o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = Math.min(hi, Math.max(lo, a[i]));\n return o;\n}\n// ---------- Stats\nfunction mean(a) {\n if (a.length === 0)\n return 0;\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i];\n return s / a.length;\n}\nfunction variance(a, mu = mean(a)) {\n if (a.length === 0)\n return 0;\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - mu;\n s += d * d;\n }\n return s / a.length;\n}\nfunction standardize(a) {\n const mu = mean(a);\n const v = variance(a, mu);\n const sd = Math.sqrt(Math.max(v, 0));\n if (!(sd > EPS)) {\n // zero-variance edge: return zeros to avoid blowing up downstream\n return new Float64Array(a.length);\n }\n const o = new Float64Array(a.length);\n const inv = 1 / sd;\n for (let i = 0; i < a.length; i++)\n o[i] = (a[i] - mu) * inv;\n return o;\n}\n// ---------- Cosine (robust)\nfunction cosine$2(a, b) {\n var _a, _b;\n const n = Math.min(a.length, b.length);\n if (n === 0)\n return 0;\n let dotv = 0, na = 0, nb = 0;\n for (let i = 0; i < n; i++) {\n const ai = ((_a = a[i]) !== null && _a !== void 0 ? _a : 0), bi = ((_b = b[i]) !== null && _b !== void 0 ? _b : 0);\n dotv += ai * bi;\n na += ai * ai;\n nb += bi * bi;\n }\n const denom = Math.sqrt(Math.max(na * nb, EPS));\n const v = dotv / denom;\n return Number.isFinite(v) ? v : 0;\n}\n// ---------- Stable softmax / log-sum-exp\nfunction logSumExp(a) {\n let m = -Infinity;\n for (let i = 0; i < a.length; i++)\n if (a[i] > m)\n m = a[i];\n if (!Number.isFinite(m))\n m = 0;\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, a[i] - m)));\n return m + Math.log(Math.max(s, EPS));\n}\nfunction softmax(a) {\n const out = new Float64Array(a.length);\n const lse = logSumExp(a);\n for (let i = 0; i < a.length; i++)\n out[i] = Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, a[i] - lse)));\n // tiny renorm to remove drift\n let s = 0;\n for (let i = 0; i < out.length; i++)\n s += out[i];\n const inv = 1 / Math.max(s, EPS);\n for (let i = 0; i < out.length; i++)\n out[i] *= inv;\n return out;\n}\n// ---------- Argmax / Top-K\nfunction argmax(a) {\n var _a, _b;\n if (a.length === 0)\n return -1;\n let idx = 0;\n let m = ((_a = a[0]) !== null && _a !== void 0 ? _a : -Infinity);\n for (let i = 1; i < a.length; i++) {\n const v = ((_b = a[i]) !== null && _b !== void 0 ? _b : -Infinity);\n if (v > m) {\n m = v;\n idx = i;\n }\n }\n return idx;\n}\nfunction topK(a, k) {\n var _a;\n const n = a.length;\n if (k <= 0 || n === 0)\n return [];\n const K = Math.min(k, n);\n // simple partial selection (O(nk)); fine for small k in UI\n const res = [];\n for (let i = 0; i < n; i++) {\n const v = ((_a = a[i]) !== null && _a !== void 0 ? _a : -Infinity);\n if (res.length < K) {\n res.push({ index: i, value: v });\n if (res.length === K)\n res.sort((x, y) => y.value - x.value);\n }\n else if (v > res[K - 1].value) {\n res[K - 1] = { index: i, value: v };\n res.sort((x, y) => y.value - x.value);\n }\n }\n return res;\n}\n// ---------- Safe exp/log/sigmoid\nfunction expSafe(x) {\n return Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, x)));\n}\nfunction log1pSafe(x) {\n // log(1+x) with guard (x>-1)\n const y = Math.max(x, -1 + EPS);\n return Math.log(1 + y);\n}\nfunction sigmoid$1(x) {\n if (x >= 0) {\n const z = Math.exp(-Math.min(x, MAX_EXP));\n return 1 / (1 + z);\n }\n else {\n const z = Math.exp(Math.max(x, MIN_EXP));\n return z / (1 + z);\n }\n}\n// ---------- Hyperbolic (proxy) distance with strict disk clamp\n// Assumes inputs are already bounded; still clamps defensively.\nfunction hDistProxy(a, b) {\n // clamp radii to avoid denom blow-ups\n let na = 0, nb = 0, sum = 0;\n for (let i = 0; i < a.length; i++) {\n const ai = Math.max(-DISK_EPS, Math.min(DISK_EPS, a[i]));\n const bi = Math.max(-DISK_EPS, Math.min(DISK_EPS, b[i]));\n na += ai * ai;\n nb += bi * bi;\n const d = ai - bi;\n sum += d * d;\n }\n const num = 2 * Math.sqrt(Math.max(0, sum));\n const den = Math.max(EPS, (1 - na) * (1 - nb));\n // smooth, monotone proxy; bounded growth; stable near boundary\n return Math.log1p(Math.min(2 * num / den, 1e12));\n}\n// ---------- Small utilities for UI formatting\nfunction fmtHead(a, n = 4, digits = 3) {\n return Array.from(a).slice(0, n).map(v => v.toFixed(digits)).join(\", \");\n}\n\n/******************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise, SuppressedError, Symbol, Iterator */\r\n\r\n\r\nfunction __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\ntypeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\r\n var e = new Error(message);\r\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\r\n};\n\n// Omega.ts v2 — improved local reasoning + summarization\n// uses your math.ts, rff.ts, online_ridge.ts\n// -------- sentence + text helpers ----------\nfunction splitSentences$1(text) {\n return text\n .replace(/\\s+/g, \" \")\n .split(/(?<=[.?!])\\s+/)\n .map((s) => s.trim())\n .filter((s) => s.length > 8 && /\\w/.test(s));\n}\nfunction clean(text) {\n return text\n .replace(/```[\\s\\S]*?```/g, \" \")\n .replace(/`[^`]+`/g, \" \")\n .replace(/\\[[^\\]]*\\]\\([^)]*\\)/g, \"\") // strip markdown links\n .replace(/[-–>•→]/g, \" \")\n .replace(/\\s+/g, \" \")\n .trim();\n}\nfunction isMetaSentence(s) {\n // simple heuristics for table-of-contents or chapter headings\n return (/^(\\*|#)/.test(s) || // markdown markers\n /chapter/i.test(s) || // \"Chapter 11\", \"Chapters 11–15\"\n /part\\s*\\d+/i.test(s) || // \"Part 3\"\n /section/i.test(s) || // \"Section 2.3\"\n /^\\s*[A-Z]\\)\\s*$/.test(s) || // single-letter outlines\n s.length < 15 // very short stray lines\n );\n}\nfunction rewrite(summary) {\n return summary\n .replace(/\\s+[-–>•→]\\s+/g, \" \")\n .replace(/\\s+\\.\\s+/g, \". \")\n .replace(/([a-z]) - ([a-z])/gi, \"$1-$2\")\n .replace(/\\s{2,}/g, \" \")\n .trim();\n}\n// ------------------------------------------------------------\nfunction omegaComposeAnswer(question_1, items_1) {\n return __awaiter(this, arguments, void 0, function* (question, items, opts = {}) {\n // License check removed // Premium feature - requires valid license\n if (!(items === null || items === void 0 ? void 0 : items.length))\n return \"No results found.\";\n const { dim = 64, features = 32, sigma = 1.0, rounds = 3, topSentences = 8, personality = \"neutral\", } = opts;\n // ---------- 1. Clean + collect sentences ----------\n const allText = items.map((i) => clean(i.content)).join(\" \");\n let sentences = splitSentences$1(allText)\n .filter(s => !isMetaSentence(s))\n .slice(0, 120);\n if (sentences.length === 0)\n return clean(items[0].content).slice(0, 400);\n // ---------- 2. Build encoder + ridge ----------\n const rff = buildRFF(dim, features, sigma);\n const ridge = new OnlineRidge(2 * features, 1, 1e-3);\n const encode = (s) => {\n const vec = new Float64Array(dim);\n const len = Math.min(s.length, dim);\n for (let i = 0; i < len; i++)\n vec[i] = s.charCodeAt(i) / 255;\n return mapRFF(rff, normalizeL2(vec));\n };\n const qVec = encode(question);\n const qTokens = question.toLowerCase().split(/\\W+/).filter((t) => t.length > 2);\n // ---------- 3. Score + select top sentences ----------\n const scored = sentences.map((s) => {\n const v = encode(s);\n let w = cosine$2(v, qVec);\n // small lexical bonus for overlapping words\n const lower = s.toLowerCase();\n for (const t of qTokens)\n if (lower.includes(t))\n w += 0.02;\n return { s, v, w };\n });\n scored.sort((a, b) => b.w - a.w);\n let top = scored.slice(0, topSentences);\n // ---------- 4. Recursive compression ----------\n let summary = top.map((t) => t.s).join(\" \");\n let meanVec = new Float64Array(2 * features);\n for (let r = 0; r < rounds; r++) {\n const subs = splitSentences$1(summary).slice(0, topSentences);\n const embeds = subs.map((s) => encode(s));\n const weights = embeds.map((v) => cosine$2(v, qVec));\n for (let i = 0; i < embeds.length; i++) {\n ridge.update(embeds[i], new Float64Array([weights[i]]));\n }\n // weighted mean vector\n meanVec.fill(0);\n for (let i = 0; i < embeds.length; i++) {\n const v = embeds[i], w = weights[i];\n for (let j = 0; j < v.length; j++)\n meanVec[j] += v[j] * w;\n }\n const norm = l2$1(meanVec) || 1;\n for (let j = 0; j < meanVec.length; j++)\n meanVec[j] /= norm;\n const rescored = subs.map((s) => ({\n s,\n w: cosine$2(encode(s), meanVec),\n }));\n rescored.sort((a, b) => b.w - a.w);\n summary = rescored\n .slice(0, Math.max(3, Math.floor(topSentences / 2)))\n .map((r) => r.s)\n .join(\" \");\n }\n // ---------- 5. Compose readable answer ----------\n summary = rewrite(summary);\n const firstChar = summary.charAt(0).toUpperCase() + summary.slice(1);\n const title = items[0].heading || \"Answer\";\n const prefix = personality === \"teacher\"\n ? \"Here’s a simple way to think about it:\\n\\n\"\n : personality === \"scientist\"\n ? \"From the retrieved material, we can infer:\\n\\n\"\n : \"\";\n return `${prefix}${firstChar}\\n\\n(${title}, Ω-synthesized)`;\n });\n}\n\n// Vectorization utilities for sparse and dense vectors\n// Extracted from workers for reuse\n/**\n * Compute TF-IDF vector from tokens\n */\nfunction toTfidf(tokens, idf, vmap, headingW = 1) {\n const counts = new Map();\n // crude heuristic: first 8 tokens considered heading-weighted\n for (let i = 0; i < tokens.length; i++) {\n const t = tokens[i];\n const id = vmap.get(t);\n if (id === undefined)\n continue;\n const w = (i < 8) ? headingW : 1;\n counts.set(id, (counts.get(id) || 0) + w);\n }\n const maxTf = Math.max(1, ...counts.values());\n const v = new Map();\n for (const [i, c] of counts) {\n const tf = 0.5 + 0.5 * (c / maxTf);\n v.set(i, tf * (idf[i] || 0));\n }\n return v;\n}\n/**\n * Cosine similarity between two sparse vectors\n */\nfunction cosineSparse(a, b) {\n let dot = 0, na = 0, nb = 0;\n for (const [i, av] of a) {\n na += av * av;\n const bv = b.get(i);\n if (bv)\n dot += av * bv;\n }\n for (const [, bv] of b)\n nb += bv * bv;\n if (!na || !nb)\n return 0;\n return dot / (Math.sqrt(na) * Math.sqrt(nb));\n}\n/**\n * Convert sparse vector to dense Float64Array\n */\nfunction sparseToDense(v, dim) {\n const x = new Float64Array(dim);\n for (const [i, val] of v)\n x[i] = val;\n return x;\n}\n/**\n * Dot product of two dense vectors\n */\nfunction dotProd$1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/**\n * Base kernel function (RBF, cosine, or poly2)\n */\nfunction baseKernel$1(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd$1(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\n/**\n * Kernel similarity between two dense vectors\n */\nfunction kernelSim(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd$1(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\n/**\n * Project sparse vector to dense using Nyström landmarks\n */\nfunction projectToDense(v, vocabSize, landmarkMat, kernel, sigma) {\n const x = sparseToDense(v, vocabSize);\n const feats = new Float64Array(landmarkMat.length);\n for (let j = 0; j < landmarkMat.length; j++) {\n const l = landmarkMat[j];\n feats[j] = baseKernel$1(x, l, kernel, sigma);\n }\n const n = Math.hypot(...feats);\n if (n > 0)\n for (let i = 0; i < feats.length; i++)\n feats[i] /= n;\n return feats;\n}\n\n// Tokenization and stemming utilities\n// Extracted from workers for reuse\n// Memo for speed\nconst STEM_CACHE = new Map();\nfunction normalizeWord(raw) {\n const k = raw;\n const cached = STEM_CACHE.get(k);\n if (cached)\n return cached;\n let w = raw.toLowerCase();\n w = w.replace(/^[^a-z0-9]+|[^a-z0-9]+$/g, '');\n if (w.length <= 2) {\n STEM_CACHE.set(k, w);\n return w;\n }\n // plural → singular\n if (w.endsWith('ies') && w.length > 4) {\n w = w.slice(0, -3) + 'y';\n }\n else if (/(xes|ches|shes|zes|sses)$/.test(w) && w.length > 4) {\n w = w.replace(/(xes|ches|shes|zes|sses)$/, (m) => (m === 'sses' ? 'ss' : m.replace(/es$/, '')));\n }\n else if (w.endsWith('s') && !/(ss|us)$/.test(w) && w.length > 3) {\n w = w.slice(0, -1);\n }\n // conservative suffix trimming\n const rules = [\n [/ization$|isation$/, 'ize'],\n [/ational$/, 'ate'],\n [/fulness$/, 'ful'],\n [/ousness$/, 'ous'],\n [/iveness$/, 'ive'],\n [/ability$/, 'able'],\n [/ness$/, ''],\n [/ment$/, ''],\n [/ations?$/, 'ate'],\n [/izer$|iser$/, 'ize'],\n [/ally$/, 'al'],\n [/ically$/, 'ic'],\n [/ingly$|edly$/, ''],\n [/ing$|ed$/, ''],\n ];\n for (const [re, rep] of rules) {\n if (re.test(w) && w.length - rep.length >= 4) {\n w = w.replace(re, rep);\n break;\n }\n }\n STEM_CACHE.set(k, w);\n return w;\n}\nfunction tokenize$1(text, doStem) {\n const base = text.toLowerCase()\n .replace(/[`*_>~]/g, ' ')\n .replace(/[^a-z0-9]+/g, ' ')\n .split(/\\s+/)\n .filter(Boolean);\n if (!doStem)\n return base;\n const out = [];\n for (const t of base) {\n const n = normalizeWord(t);\n if (n && n.length > 1)\n out.push(n);\n }\n return out;\n}\nfunction expandQuery(q) {\n const adds = [];\n if (/\\bmap\\b/.test(q))\n adds.push('dict key value make');\n if (/\\bchan|channel\\b/.test(q))\n adds.push('goroutine concurrency select buffer');\n if (/\\berror\\b/.test(q))\n adds.push('fmt wrap unwrap sentinel try catch');\n if (/\\bstruct\\b/.test(q))\n adds.push('field method receiver init zero value');\n return q + ' ' + adds.join(' ');\n}\n\n// Index building utilities\n// Extracted from workers for reuse\n// License removed - all features are now free!\n/**\n * Build vocabulary and IDF from chunks\n */\nfunction buildVocabAndIdf(chunks, vocabSize, useStem) {\n const docsTokens = chunks.map(ch => tokenize$1((ch.heading + ' \\n' + ch.content), useStem));\n const df = new Map();\n for (const toks of docsTokens) {\n const unique = new Set(toks);\n for (const t of unique)\n df.set(t, (df.get(t) || 0) + 1);\n }\n const sorted = [...df.entries()].sort((a, b) => b[1] - a[1]).slice(0, vocabSize);\n const vocabMap = new Map(sorted.map(([tok], i) => [tok, i]));\n const idf = new Array(vocabMap.size).fill(0);\n const N = docsTokens.length;\n for (const [tok, i] of vocabMap.entries()) {\n const dfi = df.get(tok) || 1;\n idf[i] = Math.log((N + 1) / (dfi + 1)) + 1;\n }\n return { vocabMap, idf };\n}\n/**\n * Build TF-IDF vectors for all chunks\n */\nfunction buildTfidfDocs(chunks, vocabMap, idf, headingW, useStem) {\n return chunks.map(ch => {\n const toks = tokenize$1((ch.heading + ' \\n' + ch.content), useStem);\n return toTfidf(toks, idf, vocabMap, headingW);\n });\n}\n/**\n * Build Nyström landmarks from TF-IDF documents\n */\nfunction buildLandmarks(tfidfDocs, vocabSize, numLandmarks) {\n const L = Math.max(32, numLandmarks);\n const step = Math.max(1, Math.floor(Math.max(1, tfidfDocs.length) / L));\n const landmarksIdx = Array.from({ length: L }, (_, k) => Math.min(tfidfDocs.length - 1, k * step));\n const landmarkMat = landmarksIdx.map(i => sparseToDense(tfidfDocs[i], vocabSize));\n return { landmarksIdx, landmarkMat };\n}\n/**\n * Build dense projections for all TF-IDF documents\n */\nfunction buildDenseDocs(tfidfDocs, vocabSize, landmarkMat, kernel, sigma) {\n return tfidfDocs.map(v => {\n const x = sparseToDense(v, vocabSize);\n const feats = new Float64Array(landmarkMat.length);\n for (let j = 0; j < landmarkMat.length; j++) {\n const l = landmarkMat[j];\n feats[j] = baseKernel(x, l, kernel, sigma);\n }\n const n = Math.hypot(...feats);\n if (n > 0)\n for (let i = 0; i < feats.length; i++)\n feats[i] /= n;\n return feats;\n });\n}\nfunction baseKernel(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\nfunction dotProd(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/**\n * Build complete index from chunks\n */\nfunction buildIndex(opts) {\n // License check removed // Premium feature - requires valid license\n const { chunks, vocab, landmarks, headingW, useStem, kernel, sigma } = opts;\n // Build vocab and IDF\n const { vocabMap, idf } = buildVocabAndIdf(chunks, vocab, useStem);\n // Build TF-IDF vectors\n const tfidfDocs = buildTfidfDocs(chunks, vocabMap, idf, headingW, useStem);\n // Build landmarks\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabMap.size, landmarks);\n // Build dense projections\n const denseDocs = buildDenseDocs(tfidfDocs, vocabMap.size, landmarkMat, kernel, sigma);\n return {\n vocabMap,\n idf,\n tfidfDocs,\n landmarksIdx,\n landmarkMat,\n denseDocs,\n };\n}\n\n// Hybrid retrieval system (sparse + dense + keyword bonus)\n// Extracted from workers for reuse\n// License removed - all features are now free!\n/**\n * Compute keyword bonus scores for chunks\n */\nfunction keywordBonus(chunks, query) {\n const kws = Array.from(new Set(query.toLowerCase().split(/\\W+/).filter(t => t.length > 2)));\n const syntaxBoost = /\\b(define|declare|syntax|example|function|struct|map|interface)\\b/i.test(query);\n return chunks.map(c => {\n const text = c.rich || c.content || '';\n const lc = text.toLowerCase();\n let hit = 0;\n for (const k of kws)\n if (lc.includes(k))\n hit++;\n if (syntaxBoost && /```/.test(text))\n hit += 5; // strong bonus for code presence\n return Math.min(1.0, hit * 0.03);\n });\n}\n/**\n * Get top K indices from scores\n */\nfunction topKIndices(arr, k) {\n const idx = Array.from(arr, (_, i) => i);\n idx.sort((i, j) => (arr[j] - arr[i]));\n return idx.slice(0, k);\n}\n/**\n * Clamp value between min and max\n */\nfunction clamp$1(x, a, b) {\n return Math.max(a, Math.min(b, x));\n}\n/**\n * Perform hybrid retrieval (sparse + dense + keyword bonus)\n */\nfunction hybridRetrieve(opts) {\n // License check removed // Premium feature - requires valid license\n const { query, chunks, vocabMap, idf, tfidfDocs, denseDocs, landmarksIdx, landmarkMat, vocabSize, kernel, sigma, alpha, beta, ridge, headingW, useStem, expandQuery: shouldExpand, topK: k, prefilter, } = opts;\n // Expand query if needed\n const qexp = shouldExpand ? expandQuery(query) : query;\n const toks = tokenize$1(qexp, useStem);\n const qvec = toTfidf(toks, idf, vocabMap, headingW);\n const qdense = projectToDense(qvec, vocabSize, landmarkMat, kernel, sigma);\n // Compute sparse (TF-IDF) scores\n const tfidfScores = tfidfDocs.map(v => cosineSparse(v, qvec));\n // Compute dense (kernel) scores\n const denseScores = denseDocs.map((v) => kernelSim(v, qdense, kernel, sigma));\n // Compute keyword bonus\n const bonus = keywordBonus(chunks, query);\n // Hybrid scoring with ridge regularization\n const alphaClamped = clamp$1(alpha, 0, 1);\n const lambda = ridge !== null && ridge !== void 0 ? ridge : 0.08;\n const scores = denseScores.map((d, i) => {\n const t = tfidfScores[i];\n const b = beta * bonus[i];\n // Ridge damping on ALL components (dense, tfidf, and keyword bonus)\n const reg = 1 / (1 + lambda * (d * d + t * t + 0.5 * b * b));\n const s = reg * (alphaClamped * d + (1 - alphaClamped) * t + b);\n // soft clip extremes; helps prevent a single noisy dimension from dominating\n return Math.tanh(s);\n });\n // Pre-filter then final topK (retrieval stage)\n const pre = Math.max(k, prefilter !== null && prefilter !== void 0 ? prefilter : 0);\n const idxs = topKIndices(scores, pre);\n const finalIdxs = topKIndices(idxs.map(i => scores[i]), k).map(k => idxs[k]);\n // Build result items\n const items = finalIdxs.map(i => {\n const c = chunks[i];\n const body = (c.rich && c.rich.trim()) || (c.content && c.content.trim()) || '(see subsections)';\n return {\n score: scores[i],\n heading: c.heading,\n content: body,\n index: i,\n };\n });\n return {\n items,\n scores: finalIdxs.map(i => scores[i]),\n indices: finalIdxs,\n tfidfScores: finalIdxs.map(i => tfidfScores[i]),\n denseScores: finalIdxs.map(i => denseScores[i]),\n };\n}\n\n// OmegaRR.ts\n// Reranker + Reducer for AsterMind docs\n// - Extracts rich query–chunk features (sparse text + structural signals)\n// - Trains a tiny ridge model on-the-fly with weak supervision (per query)\n// - Produces score_rr and p_relevant\n// - Filters with threshold + MMR coverage under a character budget\n// - (v2) Optionally exposes engineered features (values + names) for TE/diagnostics\n/* ====================== Tokenization ======================= */\nconst STOP$1 = new Set([\n \"a\", \"an\", \"the\", \"and\", \"or\", \"but\", \"if\", \"then\", \"else\", \"for\", \"to\", \"of\", \"in\", \"on\", \"at\", \"by\", \"with\",\n \"is\", \"are\", \"was\", \"were\", \"be\", \"been\", \"being\", \"as\", \"from\", \"that\", \"this\", \"it\", \"its\", \"you\", \"your\",\n \"i\", \"we\", \"they\", \"he\", \"she\", \"them\", \"his\", \"her\", \"our\", \"us\", \"do\", \"does\", \"did\", \"done\", \"not\", \"no\",\n \"yes\", \"can\", \"could\", \"should\", \"would\", \"may\", \"might\", \"into\", \"about\", \"over\", \"under\", \"between\"\n]);\nfunction tokenize(s) {\n return s\n .toLowerCase()\n .replace(/[`*_#>~=\\[\\]{}()!?.:,;'\"<>|/\\\\+-]+/g, \" \")\n .split(/\\s+/)\n .filter(t => t && !STOP$1.has(t));\n}\nfunction unique(arr) { return Array.from(new Set(arr)); }\nfunction buildCorpusStats(docs) {\n const vocab = new Map();\n const tfs = [];\n const docLens = [];\n let nextId = 0;\n for (const d of docs) {\n const toks = tokenize(d);\n docLens.push(toks.length);\n const tf = new Map();\n for (const w of toks) {\n let id = vocab.get(w);\n if (id === undefined) {\n id = nextId++;\n vocab.set(w, id);\n }\n tf.set(id, (tf.get(id) || 0) + 1);\n }\n tfs.push(tf);\n }\n const N = docs.length;\n const df = Array(nextId).fill(0);\n for (const tf of tfs)\n for (const id of tf.keys())\n df[id] += 1;\n const idf = df.map(df_i => Math.log((N + 1) / (df_i + 1)) + 1);\n const avgLen = docLens.reduce((a, b) => a + b, 0) / Math.max(1, N);\n return { stats: { vocab, idf, avgLen, df }, tf: tfs, docLens };\n}\nfunction tfidfVector(tf, idf) {\n const out = new Map();\n let norm2 = 0;\n for (const [i, f] of tf) {\n const val = (f) * (idf[i] || 0);\n out.set(i, val);\n norm2 += val * val;\n }\n const norm = Math.sqrt(norm2) || 1e-12;\n for (const [i, v] of out)\n out.set(i, v / norm);\n return out;\n}\nfunction cosine$1(a, b) {\n const [small, large] = a.size < b.size ? [a, b] : [b, a];\n let dot = 0;\n for (const [i, v] of small) {\n const u = large.get(i);\n if (u !== undefined)\n dot += v * u;\n }\n return dot;\n}\nfunction bm25Score(qTf, dTf, stats, dLen, k1 = 1.5, b = 0.75) {\n let score = 0;\n for (const [i] of qTf) {\n const f = dTf.get(i) || 0;\n if (f <= 0)\n continue;\n const idf = Math.log(((stats.df[i] || 0) + 0.5) / ((stats.idf.length - (stats.df[i] || 0)) + 0.5) + 1);\n const denom = f + k1 * (1 - b + b * (dLen / (stats.avgLen || 1)));\n score += idf * ((f * (k1 + 1)) / (denom || 1e-12));\n }\n return score;\n}\n/* ========== Light Random Projection from TF-IDF (dense hint) ========== */\nfunction projectSparse(vec, dim, seed = 1337) {\n // deterministic per (feature, j) hash: simple LCG/xorshift mix\n const out = new Float64Array(dim);\n for (const [i, v] of vec) {\n let s = (i * 2654435761) >>> 0;\n for (let j = 0; j < dim; j++) {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n const r = ((s >>> 0) / 4294967296) * 2 - 1; // [-1,1]\n out[j] += v * r;\n }\n }\n let n2 = 0;\n for (let j = 0; j < dim; j++)\n n2 += out[j] * out[j];\n const n = Math.sqrt(n2) || 1e-12;\n for (let j = 0; j < dim; j++)\n out[j] /= n;\n return out;\n}\n/* ===================== Structural Signals ===================== */\nfunction containsGoCodeBlock(s) {\n return /```+\\s*go([\\s\\S]*?)```/i.test(s) || /\\bfunc\\s+\\w+\\s*\\(.*\\)\\s*\\w*\\s*{/.test(s);\n}\nfunction containsCodeBlock(s) {\n return /```+/.test(s) || /{[^}]*}/.test(s);\n}\nfunction headingQueryMatch(head, q) {\n const ht = unique(tokenize(head));\n const qt = new Set(tokenize(q));\n if (ht.length === 0 || qt.size === 0)\n return 0;\n let hit = 0;\n for (const t of ht)\n if (qt.has(t))\n hit++;\n return hit / ht.length;\n}\nfunction jaccard$1(a, b) {\n const A = new Set(tokenize(a));\n const B = new Set(tokenize(b));\n let inter = 0;\n for (const t of A)\n if (B.has(t))\n inter++;\n const uni = A.size + B.size - inter;\n return uni === 0 ? 0 : inter / uni;\n}\nfunction golangSpecFlag(s) {\n return /(golang\\.org|go\\.dev|pkg\\.go\\.dev)/i.test(s) ? 1 : 0;\n}\nfunction buildFeatures$1(q, chunk, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, cLen, projQ, projC) {\n var _a;\n const f = [];\n const names = [];\n // 1) Sparse sims\n const cos = cosine$1(qTfIdf, cTfIdf);\n f.push(cos);\n names.push(\"cosine_tfidf\");\n const bm25 = bm25Score(qTfRaw, cTfRaw, stats, cLen);\n f.push(bm25);\n names.push(\"bm25\");\n // 2) Heading & lexical overlaps\n const hMatch = headingQueryMatch(chunk.heading || \"\", q);\n f.push(hMatch);\n names.push(\"heading_match_frac\");\n const jac = jaccard$1(q, chunk.content || \"\");\n f.push(jac);\n names.push(\"jaccard_tokens\");\n // 3) Structural flags\n const hasGo = containsGoCodeBlock(chunk.rich || chunk.content || \"\");\n const hasCode = containsCodeBlock(chunk.rich || chunk.content || \"\");\n f.push(hasGo ? 1 : 0);\n names.push(\"flag_go_code\");\n f.push(hasCode ? 1 : 0);\n names.push(\"flag_any_code\");\n // 4) Source cues\n f.push(golangSpecFlag(chunk.content || \"\") ? 1 : 0);\n names.push(\"flag_go_spec_link\");\n // 5) Prior score (baseline)\n f.push(((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0));\n names.push(\"prior_score_base\");\n // 6) Length heuristics (prefer concise answers)\n const lenChars = (chunk.content || \"\").length;\n f.push(1 / Math.sqrt(1 + lenChars));\n names.push(\"len_inv_sqrt\");\n // 7) Dense hint from projection\n if (projQ && projC) {\n let dot = 0, l1 = 0;\n for (let i = 0; i < projQ.length; i++) {\n dot += projQ[i] * projC[i];\n l1 += Math.abs(projQ[i] - projC[i]);\n }\n f.push(dot);\n names.push(\"proj_dot\");\n f.push(l1 / projQ.length);\n names.push(\"proj_l1mean\");\n }\n return { names, values: f };\n}\n/* ======================== Ridge Model ======================== */\nclass Ridge {\n constructor() {\n this.w = null;\n this.mu = null;\n this.sigma = null;\n }\n fit(X, y, lambda = 1e-2) {\n var _a;\n const n = X.length;\n const d = ((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n if (n === 0 || d === 0) {\n this.w = new Float64Array(d);\n return;\n }\n // standardize\n const mu = new Float64Array(d);\n const sig = new Float64Array(d);\n for (let j = 0; j < d; j++) {\n let m = 0;\n for (let i = 0; i < n; i++)\n m += X[i][j];\n m /= n;\n mu[j] = m;\n let v = 0;\n for (let i = 0; i < n; i++) {\n const z = X[i][j] - m;\n v += z * z;\n }\n sig[j] = Math.sqrt(v / n) || 1;\n }\n const Z = Array.from({ length: n }, (_, i) => new Float64Array(d));\n for (let i = 0; i < n; i++)\n for (let j = 0; j < d; j++)\n Z[i][j] = (X[i][j] - mu[j]) / sig[j];\n // A = Z^T Z + λI, Zy = Z^T y\n const A = Array.from({ length: d }, () => new Float64Array(d));\n const Zy = new Float64Array(d);\n for (let i = 0; i < n; i++) {\n const zi = Z[i];\n const yi = y[i];\n for (let j = 0; j < d; j++) {\n Zy[j] += zi[j] * yi;\n const zij = zi[j];\n for (let k = 0; k <= j; k++)\n A[j][k] += zij * zi[k];\n }\n }\n for (let j = 0; j < d; j++) {\n for (let k = 0; k < j; k++)\n A[k][j] = A[j][k];\n A[j][j] += lambda;\n }\n // Cholesky solve\n const L = Array.from({ length: d }, () => new Float64Array(d));\n for (let i = 0; i < d; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n L[i][j] = (i === j) ? Math.sqrt(Math.max(sum, 1e-12)) : (sum / (L[j][j] || 1e-12));\n }\n }\n const z = new Float64Array(d);\n for (let i = 0; i < d; i++) {\n let s = Zy[i];\n for (let k = 0; k < i; k++)\n s -= L[i][k] * z[k];\n z[i] = s / (L[i][i] || 1e-12);\n }\n const w = new Float64Array(d);\n for (let i = d - 1; i >= 0; i--) {\n let s = z[i];\n for (let k = i + 1; k < d; k++)\n s -= L[k][i] * w[k];\n w[i] = s / (L[i][i] || 1e-12);\n }\n this.w = w;\n this.mu = mu;\n this.sigma = sig;\n }\n predict(x) {\n if (!this.w || !this.mu || !this.sigma)\n return 0;\n let s = 0;\n for (let j = 0; j < this.w.length; j++) {\n const z = (x[j] - this.mu[j]) / this.sigma[j];\n s += this.w[j] * z;\n }\n return s;\n }\n}\n/* ===================== Weak Supervision ===================== */\nfunction generateWeakLabel(q, chunk, feats) {\n var _a;\n const txt = (chunk.rich || chunk.content || \"\");\n let y = 0;\n const qIsGoFunc = /\\bgo\\b/.test(q.toLowerCase()) && /(define|declare|function|func)/i.test(q);\n if (qIsGoFunc && containsGoCodeBlock(txt))\n y = Math.max(y, 1.0);\n const headHit = headingQueryMatch(chunk.heading || \"\", q);\n if (headHit >= 0.34 && containsCodeBlock(txt))\n y = Math.max(y, 0.8);\n const cosIdx = feats.names.indexOf(\"cosine_tfidf\");\n const bm25Idx = feats.names.indexOf(\"bm25\");\n const cos = cosIdx >= 0 ? feats.values[cosIdx] : 0;\n const bm = bm25Idx >= 0 ? feats.values[bm25Idx] : 0;\n if (cos > 0.25)\n y = Math.max(y, 0.6);\n if (bm > 1.0)\n y = Math.max(y, 0.6);\n const priorIdx = feats.names.indexOf(\"prior_score_base\");\n const prior = priorIdx >= 0 ? feats.values[priorIdx] : 0;\n if (((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0) > 0)\n y = Math.max(y, Math.min(0.6, 0.2 + 0.5 * prior));\n return y;\n}\nfunction sigmoid(x) {\n if (x >= 0) {\n const z = Math.exp(-x);\n return 1 / (1 + z);\n }\n else {\n const z = Math.exp(x);\n return z / (1 + z);\n }\n}\n/* ========================= MMR Filter ========================= */\nfunction mmrFilter(scored, lambda = 0.7, budgetChars = 1200) {\n const sel = [];\n const docs = scored.map(s => s.content || \"\");\n const { stats, tf: tfList } = buildCorpusStats(docs);\n const tfidf = tfList.map(tf => tfidfVector(tf, stats.idf));\n const selectedIdx = new Set();\n let used = 0;\n while (selectedIdx.size < scored.length) {\n let bestIdx = -1, bestVal = -Infinity;\n for (let i = 0; i < scored.length; i++) {\n if (selectedIdx.has(i))\n continue;\n const cand = scored[i];\n let red = 0;\n for (const j of selectedIdx) {\n const sim = cosine$1(tfidf[i], tfidf[j]);\n if (sim > red)\n red = sim;\n }\n const val = lambda * cand.score_rr - (1 - lambda) * red;\n if (val > bestVal) {\n bestVal = val;\n bestIdx = i;\n }\n }\n if (bestIdx < 0)\n break;\n const chosen = scored[bestIdx];\n const addLen = (chosen.content || \"\").length;\n if (used + addLen > budgetChars && sel.length > 0)\n break;\n sel.push(chosen);\n used += addLen;\n selectedIdx.add(bestIdx);\n }\n return sel;\n}\n/* ========================= Public API ========================= */\n/** Train per-query ridge model and score chunks. */\nfunction rerank(query, chunks, opts = {}) {\n var _a, _b;\n // License check removed // Premium feature - requires valid license\n const { lambdaRidge = 1e-2, randomProjDim = 32, exposeFeatures = true, attachFeatureNames = false, } = opts;\n const docs = [query, ...chunks.map(c => c.content || \"\")];\n const { stats, tf: tfRaw, docLens } = buildCorpusStats(docs);\n const tfidfAll = tfRaw.map(tf => tfidfVector(tf, stats.idf));\n const qTfRaw = tfRaw[0];\n const qTfIdf = tfidfAll[0];\n const projQ = randomProjDim > 0 ? projectSparse(qTfIdf, randomProjDim) : undefined;\n const X = [];\n const y = [];\n const featPacks = [];\n for (let i = 0; i < chunks.length; i++) {\n const c = chunks[i];\n const cTfRaw = tfRaw[i + 1];\n const cTfIdf = tfidfAll[i + 1];\n const projC = randomProjDim > 0 ? projectSparse(cTfIdf, randomProjDim, 1337 + i) : undefined;\n const feats = buildFeatures$1(query, c, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, docLens[i + 1] || 1, projQ, projC);\n featPacks.push(feats);\n X.push(feats.values);\n const label = generateWeakLabel(query, c, feats);\n y.push(label);\n }\n const allSame = y.every(v => Math.abs(v - y[0]) < 1e-9);\n if (allSame) {\n const cosIdx = featPacks[0].names.indexOf(\"cosine_tfidf\");\n if (cosIdx >= 0) {\n for (let i = 0; i < y.length; i++)\n y[i] = Math.max(0, Math.min(1, 0.2 + 0.6 * X[i][cosIdx]));\n }\n }\n const rr = new Ridge();\n rr.fit(X, y, lambdaRidge);\n let minS = Infinity, maxS = -Infinity;\n const rawScores = X.map(x => rr.predict(x));\n for (const s of rawScores) {\n if (s < minS)\n minS = s;\n if (s > maxS)\n maxS = s;\n }\n const range = Math.max(1e-9, maxS - minS);\n const featureNames = attachFeatureNames ? (_b = (_a = featPacks[0]) === null || _a === void 0 ? void 0 : _a.names) !== null && _b !== void 0 ? _b : [] : undefined;\n const scored = chunks.map((c, i) => {\n const s01 = (rawScores[i] - minS) / range;\n const p = sigmoid((rawScores[i] - 0.5 * (minS + maxS)) / (0.2 * range + 1e-6));\n const base = Object.assign(Object.assign({}, c), { score_rr: s01, p_relevant: p });\n if (exposeFeatures)\n base._features = X[i];\n if (featureNames)\n base._feature_names = featureNames;\n return base;\n });\n scored.sort((a, b) => b.score_rr - a.score_rr);\n return scored;\n}\n/** Filter scored chunks using probability/near-top thresholds and MMR coverage. */\nfunction filterMMR(scored, opts = {}) {\n // License check removed // Premium feature - requires valid license\n const { probThresh = 0.45, epsilonTop = 0.05, useMMR = true, mmrLambda = 0.7, budgetChars = 1200 } = opts;\n if (scored.length === 0)\n return [];\n const top = scored[0].score_rr;\n const bandKept = scored.filter(s => s.p_relevant >= probThresh && s.score_rr >= (top - epsilonTop));\n const seed = bandKept.length > 0 ? bandKept : [scored[0]];\n if (!useMMR) {\n const out = [];\n let used = 0;\n for (const s of seed) {\n const add = (s.content || \"\").length;\n if (used + add > budgetChars && out.length > 0)\n break;\n out.push(s);\n used += add;\n }\n return out;\n }\n const boosted = scored.map(s => (Object.assign(Object.assign({}, s), { score_rr: seed.includes(s) ? s.score_rr + 0.01 : s.score_rr })));\n return mmrFilter(boosted, mmrLambda, budgetChars);\n}\n/** Convenience: run rerank then filter. */\nfunction rerankAndFilter(query, chunks, opts = {}) {\n // License check removed // Premium feature - requires valid license\n const scored = rerank(query, chunks, opts);\n return filterMMR(scored, opts);\n}\n/* ========================= Debug Utilities ========================= */\nfunction explainFeatures(query, chunks, opts = {}) {\n var _a;\n const rpd = (_a = opts.randomProjDim) !== null && _a !== void 0 ? _a : 32;\n const docs = [query, ...chunks.map(c => c.content || \"\")];\n const { stats, tf: tfRaw } = buildCorpusStats(docs);\n const tfidfAll = tfRaw.map(tf => tfidfVector(tf, stats.idf));\n const projQ = rpd > 0 ? projectSparse(tfidfAll[0], rpd) : undefined;\n const namesRef = [];\n const rows = [];\n for (let i = 0; i < chunks.length; i++) {\n const feats = buildFeatures$1(query, chunks[i], tfidfAll[0], tfidfAll[i + 1], tfRaw[0], tfRaw[i + 1], stats, 1, projQ, rpd > 0 ? projectSparse(tfidfAll[i + 1], rpd, 1337 + i) : undefined);\n if (namesRef.length === 0)\n namesRef.push(...feats.names);\n rows.push({ heading: chunks[i].heading, features: feats.values });\n }\n return { names: namesRef, rows };\n}\n\n// OmegaSumDet.ts — Deterministic, context-locked summarizer (v2.2)\n// -----------------------------------------------------------------------------\n// Goals\n// - ONLY summarize from the already-kept, top-ranked chunks (no leakage).\n// - Deterministic ordering, scoring, and composition.\n// - Stable weighting with explicit, normalized features.\n// - Code is treated as atomic and only included when query-aligned.\n// - Section diversity is capped to keep answers focused.\n// - Scored, stemmed, stopword-aware heading alignment (Dice) + small intent & RR boosts.\n// - Intent-aware code gating (e.g., require `func` for \"define function\" queries).\n// -----------------------------------------------------------------------------\nconst DEFAULTS = {\n maxAnswerChars: 900,\n maxBullets: 6,\n preferCode: true,\n includeCitations: true,\n addFooter: true,\n teWeight: 0.25,\n queryWeight: 0.45,\n evidenceWeight: 0.20,\n rrWeight: 0.10,\n codeBonus: 0.05,\n headingBonus: 0.04,\n jaccardDedupThreshold: 0.6,\n allowOffTopic: false,\n minQuerySimForCode: 0.40,\n maxSectionsInAnswer: 1,\n focusTopAlignedHeadings: 2,\n};\nfunction summarizeDeterministic(query, kept, opts) {\n var _a, _b, _c;\n // License check removed // Premium feature - requires valid license\n const O = Object.assign(Object.assign({}, DEFAULTS), (opts || {}));\n // 0) Normalize kept list with stable rrRank/rrScore defaults\n const K = kept.map((c, i) => (Object.assign(Object.assign({}, c), { rrRank: (typeof c.rrRank === \"number\" ? c.rrRank : i), rrScore: (typeof c.rrScore === \"number\" ? c.rrScore : (kept.length - i) / Math.max(1, kept.length)) })));\n if (K.length === 0) {\n return { text: \"No answer could be composed from the provided context.\", cites: [] };\n }\n // 1) Scored, stemmed, stopword-aware heading alignment + RR + intent bumps\n const intent = detectIntent(query);\n // normalize rrScore across kept for a small deterministic boost\n let rrMin = Infinity, rrMax = -Infinity;\n for (const c of K) {\n rrMin = Math.min(rrMin, (_a = c.rrScore) !== null && _a !== void 0 ? _a : 0);\n rrMax = Math.max(rrMax, (_b = c.rrScore) !== null && _b !== void 0 ? _b : 0);\n }\n const rrSpan = (rrMax - rrMin) || 1;\n function intentHit(c) {\n const hay = (c.heading + ' ' + (c.content || '') + ' ' + (c.rich || '')).toLowerCase();\n let hit = 0;\n if (intent.function && /\\bfunc\\b|\\bfunction\\b/.test(hay))\n hit += 1;\n if (intent.variable && /\\bvar\\b|\\bvariable\\b|\\b:=\\b/.test(hay))\n hit += 1;\n if (intent.constant && /\\bconst\\b|\\bconstant\\b/.test(hay))\n hit += 1;\n if (intent.concurrency && /\\bgoroutine\\b|\\bgo\\s+func\\b|\\bchan(nel)?\\b|\\bselect\\b/.test(hay))\n hit += 1;\n if (intent.loop && /\\bfor\\b/.test(hay))\n hit += 1;\n return Math.min(1, hit / 2); // 0..1\n }\n const alignScores = K.map(ch => diceStemmed(query, ch.heading)); // 0..1\n const composite = K.map((c, i) => {\n var _a;\n const align = alignScores[i] || 0;\n const rrNorm = (((_a = c.rrScore) !== null && _a !== void 0 ? _a : 0) - rrMin) / rrSpan; // 0..1\n const ih = intentHit(c); // 0..1\n // alignment dominates; rr+intent provide gentle nudges\n return align + 0.15 * rrNorm + 0.20 * ih;\n });\n // rank by composite desc, break ties by rrRank asc\n const allByComposite = K.map((_, i) => i).sort((i, j) => {\n if (composite[j] !== composite[i])\n return composite[j] - composite[i];\n return (K[i].rrRank - K[j].rrRank);\n });\n // choose top-N aligned headings; ensure at least one is chosen\n const alignedIdxs = allByComposite.slice(0, Math.max(1, O.focusTopAlignedHeadings));\n const allowedChunkIdx = new Set(alignedIdxs);\n // 2) Candidate extraction: sentences + fenced code blocks; stable order\n const queryTok = tokens(query);\n const candidates = [];\n for (let i = 0; i < K.length; i++) {\n if (!allowedChunkIdx.has(i))\n continue; // HARD mask to top aligned headings\n const ch = K[i];\n const base = (_c = ch.rich) !== null && _c !== void 0 ? _c : ch.content;\n const parts = splitCodeAware(base); // preserves order; code blocks are atomic\n let localSentIdx = 0;\n for (const part of parts) {\n const hasCode = part.kind === \"code\";\n const sentList = hasCode ? [part.text] : splitSentences(part.text);\n for (const s of sentList) {\n const trimmed = s.trim();\n if (!trimmed)\n continue;\n const f = buildFeatures(trimmed, queryTok, ch, O, hasCode);\n candidates.push({\n sent: trimmed,\n chunkIdx: i,\n sentIdx: localSentIdx++,\n heading: ch.heading,\n hasCode,\n features: f,\n score: 0,\n });\n }\n }\n }\n if (candidates.length === 0) {\n return { text: \"No answer could be composed from the aligned context.\", cites: [] };\n }\n // 3) Normalize numeric features across candidates → [0,1]\n normalizeFeature(candidates, \"querySim\");\n normalizeFeature(candidates, \"teGain\");\n normalizeFeature(candidates, \"evidence\");\n normalizeFeature(candidates, \"rr\");\n // 4) Combine with explicit weights + strict, intent-aware gates (deterministic)\n for (const c of candidates) {\n const f = c.features;\n let s = O.queryWeight * f.querySim +\n O.teWeight * f.teGain +\n O.evidenceWeight * f.evidence +\n O.rrWeight * f.rr;\n // Intent-aware code gating\n if (c.hasCode) {\n const align = alignScores[c.chunkIdx] || 0;\n const txt = c.sent.toLowerCase();\n let intentOK = true;\n if (intent.function)\n intentOK = /\\bfunc\\b/.test(txt);\n if (intent.variable)\n intentOK = intentOK && (/\\bvar\\b/.test(txt) || /\\b:=\\b/.test(txt));\n if (intent.constant)\n intentOK = intentOK && /\\bconst\\b/.test(txt);\n if (intent.concurrency)\n intentOK = intentOK && (/\\bgoroutine\\b|\\bgo\\s+func\\b|\\bchan(nel)?\\b|\\bselect\\b/.test(txt));\n if (!intentOK || align < 0.25 || f.querySim < O.minQuerySimForCode || f.codeRelevance <= 0.2) {\n s *= 0.5; // neuter misaligned code\n }\n else if (O.preferCode) {\n s += O.codeBonus * Math.min(1, f.codeRelevance * 1.25) * align;\n }\n }\n // Heading bonus scaled by composite alignment\n const hb = Math.min(1, composite[c.chunkIdx] || 0);\n if (hb > 0)\n s += O.headingBonus * hb;\n // Off-topic heading handling (shouldn’t happen due to hard mask, but keep as fail-safe)\n if (hb === 0 && !O.allowOffTopic) {\n s *= 0.1; // near-zero\n }\n c.score = clamp01p5(s);\n }\n // 5) TOTAL order sort with explicit tie-breakers (stable)\n candidates.sort((a, b) => {\n if (b.score !== a.score)\n return b.score - a.score;\n const ar = K[a.chunkIdx].rrRank, br = K[b.chunkIdx].rrRank;\n if (ar !== br)\n return ar - br; // better reranker rank first\n if (a.chunkIdx !== b.chunkIdx)\n return a.chunkIdx - b.chunkIdx; // earlier chunk first\n if (a.sentIdx !== b.sentIdx)\n return a.sentIdx - b.sentIdx; // earlier sentence first\n return a.sent.localeCompare(b.sent); // final deterministic tie-breaker\n });\n // 6) Deterministic dedup (Jaccard) — keep first occurrence only\n const picked = [];\n const seen = [];\n for (const c of candidates) {\n const t = c.sent.toLowerCase();\n let dup = false;\n for (const s of seen) {\n if (jaccardText(t, s) >= O.jaccardDedupThreshold) {\n dup = true;\n break;\n }\n }\n if (!dup) {\n picked.push(c);\n seen.push(t);\n }\n }\n // 7) Compose answer under budget with section cap\n const out = [];\n const citesSet = new Set();\n let budget = O.maxAnswerChars;\n const usedHeadings = new Set();\n for (const c of picked) {\n const h = K[c.chunkIdx].heading;\n const alreadyUsed = usedHeadings.has(h);\n // Enforce max distinct headings\n if (!alreadyUsed && usedHeadings.size >= O.maxSectionsInAnswer)\n continue;\n const unit = (picked.length > 1 ? `- ${c.sent}` : c.sent);\n const cost = unit.length + (out.length ? 1 : 0);\n if (cost > budget)\n continue;\n out.push(unit);\n budget -= cost;\n usedHeadings.add(h);\n if (O.includeCitations)\n citesSet.add(h);\n if (out.length >= O.maxBullets)\n break;\n }\n // Fallback if nothing fits budget\n if (out.length === 0 && picked.length > 0) {\n const c = picked[0];\n out.push(c.sent);\n citesSet.add(K[c.chunkIdx].heading);\n }\n let text = picked.length > 1 ? out.join(\"\\n\") : out.join(\"\");\n const cites = [...citesSet].map(h => ({ heading: h }));\n if (O.addFooter && cites.length > 0) {\n text += `\\n\\n---\\n**Sources used:**\\n` + cites.map(c => `- ${c.heading}`).join(\"\\n\");\n }\n return { text, cites };\n}\n/* -------------------- helpers (deterministic) -------------------- */\nfunction clamp01p5(x) {\n if (!Number.isFinite(x))\n return 0;\n return Math.max(0, Math.min(1.5, x));\n}\nfunction tokens(s) {\n var _a;\n return (_a = s.toLowerCase().match(/[a-z0-9_]+/g)) !== null && _a !== void 0 ? _a : [];\n}\n// code-aware split: returns a sequence of {kind: \"code\"|\"text\", text}\nfunction splitCodeAware(raw) {\n const out = [];\n const re = /```([\\s\\S]*?)```/g;\n let last = 0, m;\n while ((m = re.exec(raw)) !== null) {\n const before = raw.slice(last, m.index);\n if (before.trim())\n out.push({ kind: \"text\", text: normalizeWS(before) });\n const code = m[1];\n if (code.trim())\n out.push({ kind: \"code\", text: \"```\" + normalizeWS(code) + \"```\" });\n last = m.index + m[0].length;\n }\n const tail = raw.slice(last);\n if (tail.trim())\n out.push({ kind: \"text\", text: normalizeWS(tail) });\n return out;\n}\n// conservative sentence splitter (period, question, exclamation)\nfunction splitSentences(text) {\n // split on sentence boundaries; also split on blank lines to avoid giant paragraphs\n const parts = text.split(/(?<=[\\.\\?\\!])\\s+(?=[A-Z0-9[`])/g);\n return parts.flatMap(p => p.split(/\\n{2,}/g)).map(s => s.trim()).filter(Boolean);\n}\nfunction normalizeWS(s) {\n return s.replace(/\\r/g, \"\").replace(/[ \\t]+/g, \" \").replace(/\\n{3,}/g, \"\\n\\n\").trim();\n}\nfunction bow(ts) {\n var _a;\n const m = new Map();\n for (const t of ts)\n m.set(t, ((_a = m.get(t)) !== null && _a !== void 0 ? _a : 0) + 1);\n return m;\n}\nfunction cosine(a, b) {\n let dot = 0, na = 0, nb = 0;\n for (const [, v] of a)\n na += v * v;\n for (const [, v] of b)\n nb += v * v;\n const n = Math.sqrt(na || 1e-9) * Math.sqrt(nb || 1e-9);\n if (n === 0)\n return 0;\n const smaller = a.size < b.size ? a : b;\n const larger = a.size < b.size ? b : a;\n for (const [k, v] of smaller) {\n const w = larger.get(k);\n if (w)\n dot += v * w;\n }\n const val = dot / n;\n return Number.isFinite(val) ? Math.max(0, Math.min(1, val)) : 0;\n}\n// normalize each named feature across candidates → [0,1] deterministically\nfunction normalizeFeature(cands, key) {\n var _a, _b;\n let min = Infinity, max = -Infinity;\n for (const c of cands) {\n const v = (_a = c.features[key]) !== null && _a !== void 0 ? _a : 0;\n const vv = Number.isFinite(v) ? v : 0;\n if (vv < min)\n min = vv;\n if (vv > max)\n max = vv;\n }\n const span = (max - min) || 1;\n for (const c of cands) {\n const v = (_b = c.features[key]) !== null && _b !== void 0 ? _b : 0;\n const vv = Number.isFinite(v) ? v : 0;\n c.features[key] = (vv - min) / span;\n }\n}\nfunction jaccardText(a, b) {\n const A = new Set(a.split(/\\W+/).filter(Boolean));\n const B = new Set(b.split(/\\W+/).filter(Boolean));\n let inter = 0;\n for (const x of A)\n if (B.has(x))\n inter++;\n return inter / Math.max(1, A.size + B.size - inter);\n}\n/* ---------- stopwords + intent ---------- */\nconst STOP = new Set([\n 'a', 'an', 'the', 'and', 'or', 'but', 'if', 'then', 'else', 'of', 'in', 'on', 'for', 'to', 'from', 'by',\n 'with', 'without', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'as', 'at', 'it', 'this', 'that',\n 'these', 'those', 'i', 'you', 'he', 'she', 'we', 'they', 'do', 'does', 'did', 'how', 'what', 'when',\n 'where', 'why', 'which', 'can', 'could', 'should', 'would'\n]);\nfunction filterStops(ts) {\n return ts.filter(t => !STOP.has(t));\n}\nfunction detectIntent(q) {\n const s = q.toLowerCase();\n return {\n function: /\\bfunc(tion|)\\b|\\bdefine\\b|\\bdeclar(e|ation)\\b|\\bprototype\\b/.test(s),\n variable: /\\bvar(iable)?\\b|\\bdeclare\\b/.test(s),\n constant: /\\bconst(ant)?\\b/.test(s),\n concurrency: /\\bconcurrency\\b|\\bgoroutine\\b|\\bchannel\\b|\\bselect\\b/.test(s),\n loop: /\\bfor\\s+loop\\b|\\bloop\\b|\\bfor\\b/.test(s),\n };\n}\n/* ---------- light stemming + stemmed Dice alignment (0..1) ---------- */\nfunction stemToken(w) {\n let s = w.toLowerCase().replace(/^[^a-z0-9]+|[^a-z0-9]+$/g, '');\n if (s.length <= 2)\n return s;\n if (s.endsWith('ies') && s.length > 4)\n s = s.slice(0, -3) + 'y';\n else if (/(xes|ches|shes|zes|sses)$/.test(s) && s.length > 4)\n s = s.replace(/(xes|ches|shes|zes|sses)$/, (m) => (m === 'sses' ? 'ss' : m.replace(/es$/, '')));\n else if (s.endsWith('s') && !/(ss|us)$/.test(s) && s.length > 3)\n s = s.slice(0, -1);\n const rules = [\n [/ization$|isation$/, 'ize'],\n [/ational$/, 'ate'],\n [/fulness$/, 'ful'],\n [/ousness$/, 'ous'],\n [/iveness$/, 'ive'],\n [/ability$/, 'able'],\n [/ness$/, ''],\n [/ment$/, ''],\n [/ations?$/, 'ate'],\n [/izer$|iser$/, 'ize'],\n [/ally$/, 'al'],\n [/ically$/, 'ic'],\n [/ingly$|edly$/, ''],\n [/ing$|ed$/, ''],\n ];\n for (const [re, rep] of rules) {\n if (re.test(s) && s.length - rep.length >= 4) {\n s = s.replace(re, rep);\n break;\n }\n }\n return s;\n}\nfunction stemTokens(str) {\n var _a;\n const raw = ((_a = str.toLowerCase().match(/[a-z0-9_]+/g)) !== null && _a !== void 0 ? _a : []);\n const stemmed = raw.map(stemToken).filter(Boolean);\n return filterStops(stemmed);\n}\n// Dice coefficient over stemmed tokens (0..1). Robust for short strings.\nfunction diceStemmed(a, b) {\n const A = new Set(stemTokens(a));\n const B = new Set(stemTokens(b));\n if (A.size === 0 || B.size === 0)\n return 0;\n let inter = 0;\n for (const t of A)\n if (B.has(t))\n inter++;\n return (2 * inter) / (A.size + B.size);\n}\n// Overlap between code tokens and query tokens (fraction of code tokens in query)\nfunction cCodeRelevance(sentence, queryTokens) {\n if (!sentence.includes(\"```\"))\n return 0;\n const codeTokens = tokens(sentence.replace(/```/g, \"\"));\n if (codeTokens.length === 0)\n return 0;\n const Q = new Set(queryTokens);\n let overlap = 0;\n for (const t of codeTokens) {\n if (Q.has(t))\n overlap++;\n }\n return overlap / codeTokens.length;\n}\n// Feature builder (deterministic). If you have TE per chunk/sentence, inject it here.\nfunction buildFeatures(sentence, queryTokens, ch, _O, hasCode) {\n // querySim (raw) via cosine on hashed BoW; normalized later\n const qvec = bow(queryTokens);\n const svec = bow(tokens(sentence));\n const querySimRaw = cosine(qvec, svec); // 0..1\n // sentence↔heading local alignment (stemmed); treat ≥0.15 as aligned\n const localAlignScore = diceStemmed(sentence, ch.heading);\n const headingAligned = localAlignScore >= 0.15;\n // teGain: placeholder (replace with your TE if you have it)\n const teGainRaw = headingAligned ? 1 : 0;\n // evidence: proxy for coverage/utility (bounded length effect)\n const evRaw = Math.min(1, tokens(sentence).length / 40);\n const rrRaw = (typeof ch.rrScore === \"number\") ? ch.rrScore : 0;\n const codeRel = hasCode ? cCodeRelevance(sentence, queryTokens) : 0;\n return {\n querySim: querySimRaw,\n teGain: teGainRaw,\n evidence: evRaw,\n rr: rrRaw,\n headingAligned,\n codeRelevance: codeRel,\n };\n}\n\n// infoflow/TransferEntropy.ts\n// Phase-1: streaming Transfer Entropy (TE) with linear-Gaussian approximation.\n// TE(X→Y) ≈ 1/2 * log( Var[e | Y_past] / Var[e | Y_past, X_past] ), in nats (set bits=true for /ln2)\nfunction zscore(v) {\n const n = v.length || 1;\n let m = 0;\n for (const x of v)\n m += x;\n m /= n;\n let s2 = 0;\n for (const x of v) {\n const d = x - m;\n s2 += d * d;\n }\n const inv = 1 / Math.sqrt(s2 / Math.max(1, n - 1) || 1e-12);\n return v.map(x => (x - m) * inv);\n}\nfunction ridgeSolve(X, y, l2) {\n var _a;\n // Solve (X^T X + l2 I) beta = X^T y via Cholesky (d is small here).\n const n = X.length, d = ((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n if (!n || !d)\n return new Array(d).fill(0);\n const XtX = new Float64Array(d * d);\n const Xty = new Float64Array(d);\n for (let i = 0; i < n; i++) {\n const row = X[i];\n const yi = y[i];\n for (let j = 0; j < d; j++) {\n Xty[j] += row[j] * yi;\n for (let k = 0; k <= j; k++)\n XtX[j * d + k] += row[j] * row[k];\n }\n }\n for (let j = 0; j < d; j++) {\n for (let k = 0; k < j; k++)\n XtX[k * d + j] = XtX[j * d + k];\n XtX[j * d + j] += l2;\n }\n // Cholesky\n const L = new Float64Array(d * d);\n for (let i = 0; i < d; i++) {\n for (let j = 0; j <= i; j++) {\n let s = XtX[i * d + j];\n for (let k = 0; k < j; k++)\n s -= L[i * d + k] * L[j * d + k];\n L[i * d + j] = (i === j) ? Math.sqrt(Math.max(s, 1e-12)) : s / (L[j * d + j] || 1e-12);\n }\n }\n // Solve L z = Xty\n const z = new Float64Array(d);\n for (let i = 0; i < d; i++) {\n let s = Xty[i];\n for (let k = 0; k < i; k++)\n s -= L[i * d + k] * z[k];\n z[i] = s / (L[i * d + i] || 1e-12);\n }\n // Solve L^T beta = z\n const beta = new Float64Array(d);\n for (let i = d - 1; i >= 0; i--) {\n let s = z[i];\n for (let k = i + 1; k < d; k++)\n s -= L[k * d + i] * beta[k];\n beta[i] = s / (L[i * d + i] || 1e-12);\n }\n return Array.from(beta);\n}\nfunction mseResidual(X, y, beta) {\n const n = X.length || 1;\n let s = 0;\n for (let i = 0; i < n; i++) {\n const row = X[i];\n let p = 0;\n for (let j = 0; j < row.length; j++)\n p += row[j] * beta[j];\n const e = y[i] - p;\n s += e * e;\n }\n return s / n;\n}\n// Build supervised datasets for Y_t and regressors made of past Y/X lags.\nfunction makeDesign(ySeq, xSeq, L, LX) {\n // ySeq[i] and xSeq[i] are vectors at time i (we’ll average to 1D to keep it cheap)\n const y1d = ySeq.map(v => v.reduce((a, b) => a + b, 0) / Math.max(1, v.length));\n const x1d = xSeq.map(v => v.reduce((a, b) => a + b, 0) / Math.max(1, v.length));\n const N = y1d.length;\n const rowsY = [];\n const rowsYX = [];\n const target = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n // target: current Y (scalar)\n target.push([y1d[t]]);\n // past Y\n const ylags = [];\n for (let k = 1; k <= L; k++)\n ylags.push(y1d[t - k]);\n // past X\n const xlags = [];\n for (let k = 1; k <= LX; k++)\n xlags.push(x1d[t - k]);\n rowsY.push(ylags);\n rowsYX.push([...ylags, ...xlags]);\n }\n // standardize columns for stability\n const colZ = (M) => {\n var _a;\n const n = M.length, d = ((_a = M[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n const out = Array.from({ length: n }, () => new Array(d).fill(0));\n for (let j = 0; j < d; j++) {\n const col = new Array(n);\n for (let i = 0; i < n; i++)\n col[i] = M[i][j];\n const zs = zscore(col);\n for (let i = 0; i < n; i++)\n out[i][j] = zs[i];\n }\n return out;\n };\n return { XY: colZ(rowsY), XYX: colZ(rowsYX), y: target.map(v => v[0]) };\n}\nclass TransferEntropy {\n constructor(opts = {}) {\n this.xBuf = [];\n this.yBuf = [];\n this.opts = Object.assign({ window: 256, condLags: 1, xLags: 1, ridge: 1e-3, bits: true }, opts);\n }\n /** Push a synchronized sample pair (vectors OK). */\n push(x, y) {\n const X = Array.isArray(x) ? x : [x];\n const Y = Array.isArray(y) ? y : [y];\n this.xBuf.push(X);\n this.yBuf.push(Y);\n const W = this.opts.window;\n if (this.xBuf.length > W) {\n this.xBuf.shift();\n this.yBuf.shift();\n }\n }\n /** Estimate TE(X→Y) over the current window. */\n estimate() {\n const n = this.xBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (n < Math.max(L, LX) + 5)\n return 0;\n const { XY, XYX, y } = makeDesign(this.yBuf, this.xBuf, L, LX);\n if (!XY.length || !XYX.length)\n return 0;\n // H1: regress Y_t on Y_{t-1..t-L}\n const b1 = ridgeSolve(XY, y, this.opts.ridge);\n const v1 = mseResidual(XY, y, b1);\n // H2: regress Y_t on [Y_{t-1..t-L}, X_{t-1..t-L}]\n const b2 = ridgeSolve(XYX, y, this.opts.ridge);\n const v2 = mseResidual(XYX, y, b2);\n // TE ≈ 0.5 * log( v1 / v2 )\n const teNats = 0.5 * Math.log(Math.max(1e-12, v1) / Math.max(1e-12, v2));\n const te = Math.max(0, teNats); // no negatives (numerical guard)\n return this.opts.bits ? (te / Math.LN2) : te;\n }\n}\nclass InfoFlowGraph {\n constructor(defaultOpts = {}) {\n this.defaultOpts = defaultOpts;\n this.monitors = new Map();\n // License check removed // Premium feature - requires valid license\n }\n get(name) {\n if (!this.monitors.has(name))\n this.monitors.set(name, new TransferEntropy(this.defaultOpts));\n return this.monitors.get(name);\n }\n snapshot() {\n const out = {};\n for (const [k, mon] of this.monitors)\n out[k] = Number(mon.estimate().toFixed(4));\n return out;\n }\n}\n\n// src/infoflow/TransferEntropyPWS.ts\n// Phase-2 TE-PWS: importance sampling for rare events + path-weight sampling (PWS)\n// API mirrors Phase-1 so it plugs in with minimal edits.\n// --- small helpers ---\nfunction meanStd(arr) {\n if (arr.length === 0)\n return { m: 0, s: 0 };\n let m = 0;\n for (const v of arr)\n m += v;\n m /= arr.length;\n let v = 0;\n for (const x of arr) {\n const d = x - m;\n v += d * d;\n }\n return { m, s: Math.sqrt(v / Math.max(1, arr.length)) || 1e-12 };\n}\nfunction l2(a) { let s = 0; for (let i = 0; i < a.length; i++)\n s += a[i] * a[i]; return Math.sqrt(s); }\nfunction sub(a, b) { const n = Math.min(a.length, b.length); const o = new Array(n); for (let i = 0; i < n; i++)\n o[i] = a[i] - b[i]; return o; }\nfunction concat(a, b) { const o = new Array(a.length + b.length); let k = 0; for (const v of a)\n o[k++] = v; for (const v of b)\n o[k++] = v; return o; }\nfunction gaussianVec(a, b, s) {\n // product kernel with shared bandwidth\n const n = Math.min(a.length, b.length);\n let q = 0;\n for (let i = 0; i < n; i++) {\n const d = a[i] - b[i];\n q += d * d;\n }\n const ss = s * s || 1e-12;\n return Math.exp(-0.5 * q / ss) / Math.pow(Math.sqrt(2 * Math.PI * ss), n);\n}\nclass TransferEntropyPWS {\n constructor(opts = {}) {\n this.xBuf = [];\n this.yBuf = [];\n this.yDiffBuf = []; // ||ΔY|| magnitude for rarity\n this.wBuf = []; // per-sample weights (importance * decay)\n this.opts = Object.assign({ window: 256, condLags: 1, xLags: 1, normalize: true, tailQuantile: 0.9, tailBoost: 4, decay: 1.0, usePWS: false, jitterSigma: 0.15, pwsIters: 8, bandwidth: 0, ridge: 1e-6, bits: true }, opts);\n }\n /** Push one synchronized sample (vectors OK). */\n push(x, y) {\n const X = Array.isArray(x) ? x.slice() : [x];\n const Y = Array.isArray(y) ? y.slice() : [y];\n // Δ||Y|| for rarity\n const prev = this.yBuf.length ? this.yBuf[this.yBuf.length - 1] : Y;\n const d = l2(sub(Y, prev));\n this.xBuf.push(X);\n this.yBuf.push(Y);\n this.yDiffBuf.push(d);\n // time decay (most recent → weight 1)\n const tDecay = this.opts.decay;\n const wDecay = tDecay < 1 && this.xBuf.length > 1\n ? Math.pow(tDecay, this.xBuf.length - 1)\n : 1;\n // placeholder weight now; we’ll update after we know tail threshold\n this.wBuf.push(wDecay);\n // maintain window\n while (this.xBuf.length > this.opts.window) {\n this.xBuf.shift();\n this.yBuf.shift();\n this.yDiffBuf.shift();\n this.wBuf.shift();\n }\n }\n /** Basic Phase-2 call: choose PWS or vanilla IS+KDE based on opts.usePWS */\n estimate() {\n return this.opts.usePWS ? this.estimatePWS() : this.estimateIS();\n }\n /** Vanilla importance-weighted TE via KDE (no path jitter). */\n estimateIS() {\n const N = this.yBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (N <= Math.max(L, LX) + 2)\n return 0;\n // compute tail threshold on recent Δ||Y||\n const diffs = this.yDiffBuf.slice();\n const thr = quantile(diffs, this.opts.tailQuantile);\n // update importance weights\n for (let i = 0; i < this.wBuf.length; i++) {\n const tail = diffs[i] >= thr ? this.opts.tailBoost : 1;\n this.wBuf[i] = Math.max(1e-8, this.wBuf[i] * tail);\n }\n // Build contexts\n const samples = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n const y = this.yBuf[t];\n const yPast = stackPast(this.yBuf, t, L);\n const xPast = stackPast(this.xBuf, t, LX);\n samples.push({ y, yPast, xPast, w: this.wBuf[t] });\n }\n if (samples.length < 4)\n return 0;\n // bandwidth selection\n const ySc = flatten(samples.map(s => s.y));\n const b = this.opts.bandwidth > 0 ? this.opts.bandwidth\n : silverman(ySc);\n // H(Y|Ypast) and H(Y|Ypast,Xpast) via KDE density ratio\n const HY_Y = condEntropyKDE(samples, 'yPast', b, this.opts.ridge);\n const HY_YX = condEntropyKDE(samples, 'yPast+xPast', b, this.opts.ridge);\n const te = Math.max(0, HY_Y - HY_YX); // >= 0 numerically clipped\n return this.opts.bits ? te / Math.log(2) : te;\n }\n /** Path-Weight Sampling: jitter past contexts, average conditional entropies. */\n estimatePWS() {\n const N = this.yBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (N <= Math.max(L, LX) + 2)\n return 0;\n // tail-aware importance weights\n const diffs = this.yDiffBuf.slice();\n const thr = quantile(diffs, this.opts.tailQuantile);\n for (let i = 0; i < this.wBuf.length; i++) {\n const tail = diffs[i] >= thr ? this.opts.tailBoost : 1;\n this.wBuf[i] = Math.max(1e-8, this.wBuf[i] * tail);\n }\n const samples = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n const y = this.yBuf[t];\n const yPast = stackPast(this.yBuf, t, L);\n const xPast = stackPast(this.xBuf, t, LX);\n samples.push({ y, yPast, xPast, w: this.wBuf[t] });\n }\n if (samples.length < 4)\n return 0;\n const ySc = flatten(samples.map(s => s.y));\n const b = this.opts.bandwidth > 0 ? this.opts.bandwidth : silverman(ySc);\n const J = Math.max(1, this.opts.pwsIters | 0);\n const jSig = this.opts.jitterSigma;\n // baseline entropies\n const baseHY_Y = condEntropyKDE(samples, 'yPast', b, this.opts.ridge);\n const baseHY_YX = condEntropyKDE(samples, 'yPast+xPast', b, this.opts.ridge);\n // jittered contexts\n let accY = 0, accYX = 0;\n for (let j = 0; j < J; j++) {\n const jittered = jitterSamples(samples, jSig);\n accY += condEntropyKDE(jittered, 'yPast', b, this.opts.ridge);\n accYX += condEntropyKDE(jittered, 'yPast+xPast', b, this.opts.ridge);\n }\n const HY_Y = 0.5 * baseHY_Y + 0.5 * (accY / J);\n const HY_YX = 0.5 * baseHY_YX + 0.5 * (accYX / J);\n const te = Math.max(0, HY_Y - HY_YX);\n return this.opts.bits ? te / Math.log(2) : te;\n }\n}\n/** Manage many labeled links, PWS-enabled. Same API as Phase-1. */\nclass InfoFlowGraphPWS {\n constructor(defaultOpts = {}) {\n this.defaultOpts = defaultOpts;\n this.monitors = new Map();\n // License check removed // Premium feature - requires valid license\n }\n get(name) {\n if (!this.monitors.has(name))\n this.monitors.set(name, new TransferEntropyPWS(this.defaultOpts));\n return this.monitors.get(name);\n }\n snapshot() {\n const out = {};\n for (const [k, mon] of this.monitors)\n out[k] = mon.estimate();\n return out;\n }\n}\n// ========================= internals =========================\nfunction stackPast(buf, t, L) {\n var _a;\n const out = [];\n for (let l = 1; l <= L; l++) {\n const v = (_a = buf[t - l]) !== null && _a !== void 0 ? _a : buf[0];\n for (let i = 0; i < v.length; i++)\n out.push(v[i]);\n }\n return out;\n}\nfunction flatten(mats) {\n const out = [];\n for (const v of mats)\n for (const x of v)\n out.push(x);\n return out;\n}\nfunction silverman(vals) {\n // Silverman's rule-of-thumb for Gaussian KDE (per-dim averaged)\n if (vals.length < 2)\n return 1;\n const { s } = meanStd(vals);\n const n = vals.length;\n return 1.06 * s * Math.pow(n, -1 / 5); // scalar, used for product kernel\n}\nfunction quantile(arr, q) {\n if (arr.length === 0)\n return 0;\n const a = arr.slice().sort((x, y) => x - y);\n const idx = Math.min(a.length - 1, Math.max(0, Math.floor(q * (a.length - 1))));\n return a[idx];\n}\nfunction condEntropyKDE(samples, mode, bw, ridge) {\n // H(Y|C) ≈ E[-log p(y|c)] with KDE ratio: p(y,c)/p(c)\n // Use importance weights w and product Gaussian kernels with shared bw.\n const useXY = mode === 'yPast+xPast';\n let totalW = 0, acc = 0;\n // Pre-extract contexts\n const C = samples.map(s => useXY ? concat(s.yPast, s.xPast) : s.yPast);\n const Y = samples.map(s => s.y);\n const W = samples.map(s => s.w);\n for (let i = 0; i < samples.length; i++) {\n const ci = C[i], yi = Y[i], wi = W[i];\n // joint density p(y,c) ~ sum_j w_j K_c(ci,cj) K_y(yi,yj)\n // context density p(c) ~ sum_j w_j K_c(ci,cj)\n let num = 0, den = 0;\n for (let j = 0; j < samples.length; j++) {\n const kc = gaussianVec(ci, C[j], bw);\n den += W[j] * kc;\n num += W[j] * kc * gaussianVec(yi, Y[j], bw);\n }\n const p = Math.max(ridge, num / Math.max(ridge, den));\n acc += -Math.log(p) * wi;\n totalW += wi;\n }\n return (totalW > 0) ? acc / totalW : 0;\n}\nfunction jitterSamples(samples, sigmaFrac) {\n var _a, _b;\n if (sigmaFrac <= 0)\n return samples;\n // Estimate per-dim std of yPast across buffer to scale jitter\n const allYp = samples.map(s => s.yPast);\n const dims = ((_a = allYp[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n const perDim = new Array(dims).fill(0);\n // compute std per dim\n for (let d = 0; d < dims; d++) {\n const vals = [];\n for (const v of allYp)\n vals.push((_b = v[d]) !== null && _b !== void 0 ? _b : 0);\n perDim[d] = meanStd(vals).s || 1e-3;\n }\n // jitter\n const out = new Array(samples.length);\n for (let i = 0; i < samples.length; i++) {\n const s = samples[i];\n const yp = s.yPast.slice();\n for (let d = 0; d < yp.length; d++) {\n const z = gauss() * sigmaFrac * perDim[d];\n yp[d] += z;\n }\n out[i] = { y: s.y, yPast: yp, xPast: s.xPast, w: s.w };\n }\n return out;\n}\nfunction gauss() {\n // Box-Muller\n let u = 0, v = 0;\n while (u === 0)\n u = Math.random();\n while (v === 0)\n v = Math.random();\n return Math.sqrt(-2 * Math.log(u)) * Math.cos(2 * Math.PI * v);\n}\n\n// TEController.ts — TE-PWS closed-loop tuner for Ω\n/* ------------------------ utils ------------------------ */\nfunction clampNumber(x, lo, hi) {\n return Math.max(lo, Math.min(hi, x));\n}\nfunction withinBand(v, band) {\n return v >= band[0] && v <= band[1];\n}\n/* ------------------------ controller ------------------------ */\nclass TEController {\n constructor(params = {}) {\n this.qCount = 0;\n this.emaBeta = 0.2; // EMA smoothing for TE\n // License check removed // Premium feature - requires valid license\n const defaultLimits = {\n alpha: [0.4, 0.98],\n sigma: [0.12, 1.0],\n ridge: [0.01, 0.2],\n probThresh: [0.3, 0.7],\n mmrLambda: [0.4, 0.9],\n budgetChars: [600, 2400],\n };\n const defaultStep = {\n alpha: 0.03,\n sigma: 0.04,\n ridge: 0.01,\n probThresh: 0.03,\n mmrLambda: 0.05,\n budgetChars: 120,\n };\n const defaults = {\n targets: {\n q2score: [0.01, 0.10],\n feat2score: [0.01, 0.10],\n kept2sum: [0.01, 0.10],\n loopMax: 0.25,\n },\n limits: defaultLimits,\n step: defaultStep,\n cooldown: 2,\n maxPerSessionAdjusts: 24,\n trustMinSamples: 8,\n };\n this.p = Object.assign(Object.assign(Object.assign({}, defaults), params), { targets: Object.assign(Object.assign({}, defaults.targets), (params.targets || {})), limits: Object.assign(Object.assign({}, defaultLimits), (params.limits || {})), step: Object.assign(Object.assign({}, defaultStep), (params.step || {})) });\n this.s = { lastAdjustAt: -999, totalAdjusts: 0, ema: {}, history: [] };\n }\n /** Update EMA from a TE snapshot. */\n pushTE(teSnap) {\n var _a;\n this.qCount++;\n for (const [k, v] of Object.entries(teSnap || {})) {\n const prev = (_a = this.s.ema[k]) !== null && _a !== void 0 ? _a : v;\n this.s.ema[k] = prev + this.emaBeta * (v - prev);\n }\n }\n /** Try one adjustment; returns {knobs?, note?}. Only adjusts if safe. */\n maybeAdjust(current) {\n var _a, _b, _c, _d;\n if (this.qCount < this.p.trustMinSamples)\n return {};\n if (this.s.totalAdjusts >= this.p.maxPerSessionAdjusts)\n return {};\n if (this.qCount - this.s.lastAdjustAt < this.p.cooldown)\n return {};\n const te = this.s.ema;\n const { q2score, feat2score, kept2sum, loopMax } = this.p.targets;\n const out = Object.assign({}, current);\n let changed = null;\n const pick = (cand) => {\n if (!changed)\n changed = cand; // single-knob change per step\n };\n const tQS = (_a = te['Retriever:Q->Score']) !== null && _a !== void 0 ? _a : 0;\n const tFS = (_b = te['OmegaRR:Feat->Score']) !== null && _b !== void 0 ? _b : 0;\n const tKS = (_c = te['Omega:Kept->Summary']) !== null && _c !== void 0 ? _c : 0;\n const tLoop = (_d = te['Reservoir:Loop']) !== null && _d !== void 0 ? _d : 0; // optional if you wire it\n // 1) Retrieval signal shaping\n if (!withinBand(tQS, q2score)) {\n if (tQS < q2score[0]) {\n pick({ param: 'alpha', delta: +this.p.step.alpha, why: `Q→Score low (${tQS.toFixed(3)} < ${q2score[0]})` });\n if (!changed)\n pick({ param: 'sigma', delta: -this.p.step.sigma, why: `Q→Score low, sharpen σ` });\n }\n else {\n pick({ param: 'sigma', delta: +this.p.step.sigma, why: `Q→Score high (${tQS.toFixed(3)} > ${q2score[1]})` });\n if (!changed)\n pick({ param: 'alpha', delta: -this.p.step.alpha, why: `Q→Score high, blend TF-IDF more` });\n }\n }\n // 2) Reranker feature effectiveness via ridge\n if (!changed && !withinBand(tFS, feat2score)) {\n if (tFS < feat2score[0]) {\n pick({ param: 'ridge', delta: -this.p.step.ridge, why: `Feat→Score low (${tFS.toFixed(3)}): loosen λ` });\n }\n else {\n pick({ param: 'ridge', delta: +this.p.step.ridge, why: `Feat→Score high (${tFS.toFixed(3)}): stabilize λ` });\n }\n }\n // 3) Grounding strength into summary via kept set\n if (!changed && !withinBand(tKS, kept2sum)) {\n if (tKS < kept2sum[0]) {\n pick({ param: 'probThresh', delta: -this.p.step.probThresh, why: `Kept→Summary low (${tKS.toFixed(3)}): expand kept` });\n if (!changed)\n pick({ param: 'budgetChars', delta: +this.p.step.budgetChars, why: `Kept→Summary low: widen budget` });\n }\n else {\n pick({ param: 'probThresh', delta: +this.p.step.probThresh, why: `Kept→Summary high: tighten kept` });\n }\n }\n // 4) Optional loop stability guard\n if (!changed && loopMax != null && tLoop > loopMax) {\n pick({ param: 'ridge', delta: +this.p.step.ridge, why: `Loop TE ${tLoop.toFixed(3)} > ${loopMax}: damp` });\n if (!changed)\n pick({ param: 'alpha', delta: -this.p.step.alpha, why: `Loop TE high: reduce dense gain` });\n }\n if (!changed)\n return {}; // nothing to do\n // ---- APPLY CHANGE (narrowed & typed) ----\n const change = changed; // non-null\n const limitsTuple = this.p.limits[change.param];\n const lo = limitsTuple[0];\n const hi = limitsTuple[1];\n const cur = out[change.param];\n const next = clampNumber(cur + change.delta, lo, hi);\n out[change.param] = next;\n // commit\n this.s.lastAdjustAt = this.qCount;\n this.s.totalAdjusts++;\n this.s.history.push({ param: change.param, oldVal: current[change.param], newVal: next, why: change.why });\n const note = `auto-adjust ${String(change.param)}: ${current[change.param]} → ${next} (${change.why})`;\n return { knobs: out, note };\n }\n getHistory() { return this.s.history.slice(-8); } // recent changes\n reset() {\n this.s = { lastAdjustAt: -999, totalAdjusts: 0, ema: {}, history: [] };\n this.qCount = 0;\n }\n}\n\n// Markdown parsing utilities\n// Extracted from workers for reuse\nconst FENCE_RE = /```[\\s\\S]*?```/g;\nconst LINK_RE = /\\[([^\\]]+)\\]\\(([^)]+)\\)/g;\nfunction stripForIndex(md, opts) {\n let s = md;\n if (opts.stripCode) {\n // Preserve a 1-line signature from the first non-empty line inside each fenced block.\n s = s.replace(FENCE_RE, m => {\n const lines = m.split('\\n').slice(1, -1);\n const sig = (lines.find(l => l.trim()) || '').trim();\n return sig ? `\\n${sig}\\n` : '\\n<code omitted>\\n';\n });\n }\n if (opts.stripLinks) {\n // Keep anchor text, drop target\n s = s.replace(LINK_RE, '$1');\n }\n // Light cleanup\n s = s.replace(/[ \\t]+/g, ' ')\n .replace(/\\n{3,}/g, '\\n\\n')\n .trim();\n return s;\n}\nfunction parseMarkdownToSections(md, opts = { stripCode: true, stripLinks: true }) {\n const lines = md.split(/\\r?\\n/);\n const root = { id: 0, level: 1, heading: '(root)', content: '', rich: '', children: [] };\n let current = null;\n const stack = [root];\n let nextId = 1;\n let buf = [];\n const flush = (buf, target) => {\n if (!target)\n return;\n const rich = buf.join('\\n').trim();\n target.rich = rich;\n target.content = stripForIndex(rich, opts);\n };\n for (const line of lines) {\n const mH = /^(#{2,6})\\s+(.*)$/.exec(line);\n if (mH) {\n // heading line\n flush(buf, current);\n buf = [];\n const level = mH[1].length;\n const heading = mH[2].trim();\n const sec = { id: nextId++, level, heading, content: '', rich: '', children: [] };\n // Find proper parent\n while (stack.length && stack[stack.length - 1].level >= level)\n stack.pop();\n const parent = stack[stack.length - 1] || root;\n parent.children.push(sec);\n sec.parent = parent.id;\n stack.push(sec);\n current = sec;\n }\n else {\n buf.push(line);\n }\n }\n flush(buf, current);\n return root;\n}\nfunction backfillEmptyParents(root) {\n const visit = (s) => {\n var _a;\n s.children.forEach(visit);\n // Backfill typical chapter parents (##) only; adjust as needed\n if (s.level === 2) {\n const isEmpty = !s.content || !s.content.trim();\n if (isEmpty) {\n const childSummaries = s.children\n .filter(c => (c.content || c.rich).trim())\n .slice(0, 2)\n .map(c => {\n const body = (c.content || c.rich).split('\\n').slice(0, 3).join('\\n');\n return `### ${c.heading}\\n${body}`;\n });\n if (childSummaries.length) {\n s.content = childSummaries.join('\\n\\n');\n if (!((_a = s.rich) === null || _a === void 0 ? void 0 : _a.trim())) {\n s.rich = `> Summary of subsections:\\n\\n${childSummaries.join('\\n\\n')}`;\n }\n }\n }\n }\n };\n visit(root);\n}\nfunction flattenSections(root) {\n const out = [];\n const walk = (s) => {\n if (s.id !== 0 && s.heading) {\n out.push({ heading: s.heading, content: s.content, rich: s.rich, secId: s.id, level: s.level });\n }\n s.children.forEach(walk);\n };\n walk(root);\n return out;\n}\n\n// Auto-tuning utilities for hyperparameter optimization\n// Extracted from dev-worker for reuse\n/**\n * Sample queries from corpus\n */\nfunction sampleQueriesFromCorpus(chunks, n, useStem) {\n const out = [];\n for (let i = 0; i < n; i++) {\n const s = chunks[Math.floor(Math.random() * chunks.length)];\n // short synthetic queries from headings + nouns-ish tokens\n const toks = tokenize$1((s.heading + ' ' + s.content).slice(0, 400), useStem)\n .filter(t => t.length > 3)\n .slice(0, 40);\n const uniq = Array.from(new Set(toks));\n out.push(uniq.slice(0, 6).join(' '));\n }\n return out;\n}\n/**\n * Compute penalty for configuration complexity\n */\nfunction penalty(cfg) {\n const lmCost = (cfg.landmarks - 128) / 512;\n const vocabCost = (cfg.vocab - 8000) / 24000;\n const preCost = (cfg.prefilter - 200) / 1200;\n return 0.02 * (lmCost + vocabCost + preCost);\n}\n/**\n * Jaccard similarity between two index arrays\n */\nfunction jaccard(a, b) {\n const A = new Set(a);\n const B = new Set(b);\n let inter = 0;\n for (const x of A)\n if (B.has(x))\n inter++;\n const uni = new Set([...A, ...B]).size;\n return uni ? inter / uni : 0;\n}\n/**\n * Clamp value between min and max\n */\nfunction clamp(x, a, b) {\n return Math.max(a, Math.min(b, x));\n}\n/**\n * Pick random element from array\n */\nfunction pick(arr) {\n return arr[Math.floor(Math.random() * arr.length)];\n}\n/**\n * Random number in range\n */\nfunction randRange(a, b) {\n return a + Math.random() * (b - a);\n}\n/**\n * Mutate object with patch\n */\nfunction mutate(base, patch) {\n return Object.assign({}, base, patch);\n}\n/**\n * Auto-tune hyperparameters\n */\nfunction autoTune(opts, onProgress) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b;\n // License check removed // Premium feature - requires valid license\n const { chunks, vocabMap, idf, tfidfDocs, vocabSize, budget = 40, sampleQueries: Qn = 24, currentSettings, } = opts;\n const budgetClamped = Math.max(10, Math.min(200, budget));\n const QnClamped = Math.max(8, Math.min(60, Qn));\n const useStem = ((_a = currentSettings.useStem) !== null && _a !== void 0 ? _a : true);\n const queries = sampleQueriesFromCorpus(chunks, QnClamped, useStem);\n // Pre-compute TF-IDF top-K for each query (baseline)\n const tfidfTops = queries.map(q => {\n var _a;\n const qv = toTfidf(tokenize$1(q, useStem), idf, vocabMap, 1);\n const scores = tfidfDocs.map(v => cosineSparse(v, qv));\n return topKIndices(scores, ((_a = currentSettings.topK) !== null && _a !== void 0 ? _a : 8));\n });\n let best = { score: -Infinity, cfg: Object.assign({}, currentSettings) };\n // Cache for dense docs (keyed by kernel params)\n const denseCache = new Map();\n const denseDocsFor = (cfg) => {\n // ridge doesn't affect projection; key on kernel params only\n const key = `${cfg.kernel}:${cfg.landmarks}:${cfg.sigma}`;\n let dd = denseCache.get(key);\n if (!dd) {\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabSize, cfg.landmarks);\n dd = buildDenseDocs(tfidfDocs, vocabSize, landmarkMat, cfg.kernel, cfg.sigma);\n denseCache.set(key, dd);\n }\n return dd;\n };\n let trial = 0;\n const tryCfg = (cfg, note) => {\n var _a;\n const jScores = [];\n const dd = denseDocsFor(cfg);\n const alpha = clamp(cfg.alpha, 0, 1);\n const lambda = ((_a = cfg.ridge) !== null && _a !== void 0 ? _a : 0.05);\n for (let qi = 0; qi < queries.length; qi++) {\n const q = queries[qi];\n const qv = toTfidf(tokenize$1(q, cfg.useStem), idf, vocabMap, 1);\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabSize, cfg.landmarks);\n const qd = projectToDense(qv, vocabSize, landmarkMat, cfg.kernel, cfg.sigma);\n const tfidfScores = tfidfDocs.map(v => cosineSparse(v, qv));\n // Compute dense scores using kernel similarity\n const denseScoresSimple = dd.map((v) => kernelSim(v, qd, cfg.kernel, cfg.sigma));\n // ridge-regularized hybrid (bonus off during tuning)\n const hybrid = denseScoresSimple.map((d, i) => {\n const t = tfidfScores[i];\n const reg = 1 / (1 + lambda * (d * d + t * t));\n return reg * (alpha * d + (1 - alpha) * t);\n });\n const idxs = topKIndices(hybrid, cfg.topK);\n jScores.push(jaccard(tfidfTops[qi], idxs));\n }\n const score = (jScores.reduce((a, b) => a + b, 0) / jScores.length) - penalty(cfg);\n if (score > best.score)\n best = { score, cfg: Object.assign({}, cfg) };\n if (onProgress)\n onProgress(++trial, best.score, note);\n };\n // random warmup\n for (let i = 0; i < Math.floor(budgetClamped * 0.6); i++) {\n const cfg = mutate(currentSettings, {\n alpha: randRange(0.55, 0.95),\n beta: randRange(0.0, 0.35),\n sigma: randRange(0.18, 0.75),\n kernel: pick(['rbf', 'cosine', 'poly2']),\n vocab: pick([8000, 10000, 12000, 15000]),\n landmarks: pick([128, 192, 256, 320, 384]),\n prefilter: pick([200, 300, 400, 600]),\n topK: pick([4, 6, 8]),\n headingW: randRange(1.5, 4.5),\n chunk: pick([450, 550, 650]),\n overlap: pick([50, 75, 100]),\n penalizeLinks: true,\n stripCode: true,\n expandQuery: true,\n useStem: true,\n ridge: randRange(0.02, 0.18),\n });\n tryCfg(cfg, 'random');\n }\n // refinement\n for (let i = trial; i < budgetClamped; i++) {\n const b = best.cfg;\n const cfg = mutate(b, {\n alpha: clamp(b.alpha + randRange(-0.1, 0.1), 0.4, 0.98),\n beta: clamp(b.beta + randRange(-0.1, 0.1), 0, 0.4),\n sigma: clamp(b.sigma + randRange(-0.08, 0.08), 0.12, 1.0),\n kernel: b.kernel,\n vocab: b.vocab,\n landmarks: b.landmarks,\n prefilter: b.prefilter,\n topK: b.topK,\n headingW: clamp(b.headingW + randRange(-0.4, 0.4), 1.0, 6.0),\n chunk: b.chunk,\n overlap: b.overlap,\n penalizeLinks: b.penalizeLinks,\n stripCode: b.stripCode,\n expandQuery: b.expandQuery,\n useStem: b.useStem,\n ridge: clamp(((_b = b.ridge) !== null && _b !== void 0 ? _b : 0.05) + randRange(-0.02, 0.02), 0.0, 0.2),\n });\n tryCfg(cfg, 'refine');\n }\n return {\n bestSettings: best.cfg,\n bestScore: best.score,\n trials: trial,\n };\n });\n}\n\n// Model serialization utilities\n// Extracted from workers for reuse\n/**\n * Small, deterministic hash (not cryptographic)\n */\nfunction quickHash(s) {\n let h1 = 0x9e3779b1, h2 = 0x85ebca6b;\n for (let i = 0; i < s.length; i++) {\n const c = s.charCodeAt(i);\n h1 = Math.imul(h1 ^ c, 0x85ebca6b);\n h2 = Math.imul(h2 ^ c, 0xc2b2ae35);\n }\n h1 = (h1 ^ (h2 >>> 15)) >>> 0;\n return ('00000000' + h1.toString(16)).slice(-8);\n}\n/**\n * Export model to serialized format\n */\nfunction exportModel(opts) {\n // License check removed // Premium feature - requires valid license\n const { settings, vocabMap, idf, chunks, tfidfDocs, landmarksIdx, landmarkMat, denseDocs, includeRich = true, includeDense = false, } = opts;\n // 1) settings snapshot (clone to avoid accidental mutation)\n const settingsSnap = JSON.parse(JSON.stringify(settings || {}));\n // 2) vocab\n const vocab = Array.from(vocabMap.entries());\n // 3) chunks (minimal text)\n const chunksSnap = chunks.map(c => ({\n heading: c.heading,\n content: c.content || '',\n rich: includeRich ? (c.rich || undefined) : undefined,\n level: c.level,\n secId: c.secId,\n }));\n // 4) tfidfDocs → array of pairs\n const tfidfPairs = tfidfDocs.map((m) => {\n const row = [];\n for (const [i, v] of m)\n row.push([i, v]);\n // sort indices for determinism\n row.sort((a, b) => a[0] - b[0]);\n return row;\n });\n // 5) Nyström landmarks and (optional) denseDocs\n const landmarkMatArr = landmarkMat.map(v => Array.from(v));\n const denseDocsArr = includeDense ?\n ((denseDocs === null || denseDocs === void 0 ? void 0 : denseDocs.map(v => Array.from(v))) || undefined) : undefined;\n const payload = {\n version: 'astermind-pro-v1',\n savedAt: new Date().toISOString(),\n settings: settingsSnap,\n vocab,\n idf: Array.from(idf),\n chunks: chunksSnap,\n tfidfDocs: tfidfPairs,\n landmarksIdx: Array.from(landmarksIdx),\n landmarkMat: landmarkMatArr,\n denseDocs: denseDocsArr,\n };\n // (Optional) quick content hash for sanity (small & deterministic)\n payload.hash = quickHash(JSON.stringify({\n idf: payload.idf.slice(0, 64),\n vi: payload.vocab.length,\n ci: payload.chunks.length,\n lm: payload.landmarksIdx.length\n }));\n return payload;\n}\nfunction importModel(model, opts) {\n // License check removed // Premium feature - requires valid license\n if (model.version !== 'astermind-pro-v1' && model.version !== 'astermind-elm-v1') {\n throw new Error(`Unsupported model version: ${model.version}. Expected 'astermind-pro-v1' or 'astermind-elm-v1'`);\n }\n // 1) restore settings\n const settings = JSON.parse(JSON.stringify(model.settings || {}));\n // 2) vocab & idf\n const vocabMap = new Map(model.vocab);\n const idf = Float64Array.from(model.idf); // keep as number[] for compatibility\n // 3) chunks\n const chunks = model.chunks.map(c => ({\n heading: c.heading,\n content: c.content || '',\n rich: c.rich,\n level: c.level,\n secId: c.secId\n }));\n // 4) tfidfDocs from pairs\n const tfidfDocs = model.tfidfDocs.map(row => {\n const m = new Map();\n for (const [i, v] of row)\n m.set(i, v);\n return m;\n });\n // 5) Nyström landmarks\n const landmarksIdx = Array.from(model.landmarksIdx);\n const landmarkMat = model.landmarkMat.map(a => Float64Array.from(a));\n // 6) denseDocs: use stored or recompute\n const needRecompute = ((opts === null || opts === void 0 ? void 0 : opts.recomputeDense) === true) || !model.denseDocs || model.denseDocs.length !== tfidfDocs.length;\n let denseDocs;\n if (needRecompute && (opts === null || opts === void 0 ? void 0 : opts.buildDense)) {\n denseDocs = opts.buildDense(tfidfDocs, vocabMap.size, landmarkMat, settings.kernel || 'rbf', settings.sigma || 1.0);\n }\n else if (needRecompute) {\n throw new Error('recomputeDense=true but buildDense function not provided');\n }\n else {\n denseDocs = model.denseDocs.map(a => Float64Array.from(a));\n }\n return {\n settings,\n vocabMap,\n idf,\n chunks,\n tfidfDocs,\n landmarksIdx,\n landmarkMat,\n denseDocs,\n };\n}\n\n// elm_scorer.ts — tiny, self-contained ELM scorer for (query, chunk) relevance\n// Uses a random single hidden layer + ridge (closed form via OnlineRidge).\n// \n// NOTE: You can also use astermind's ELM or OnlineELM classes from the local build:\n// import { ELM, OnlineELM, defaultNumericConfig } from '@astermind/astermind-elm';\n// License removed - all features are now free!\nfunction rngFactory(seed = 1337) {\n // xorshift32\n let x = (seed >>> 0) || 1;\n return () => {\n x ^= x << 13;\n x ^= x >> 17;\n x ^= x << 5;\n return ((x >>> 0) / 0xFFFFFFFF);\n };\n}\nclass ELMScorer {\n constructor(p, cfg) {\n var _a;\n // License check removed // License check - ELMScorer uses premium OnlineRidge\n this.p = p;\n this.dim = Math.max(8, cfg.dim | 0);\n this.lambda = Math.max(1e-6, cfg.lambda);\n const rng = rngFactory((_a = cfg.seed) !== null && _a !== void 0 ? _a : 1337);\n this.W = new Float64Array(this.dim * p);\n for (let i = 0; i < this.W.length; i++)\n this.W[i] = (rng() * 2 - 1) * Math.sqrt(2 / p);\n this.b = new Float64Array(this.dim);\n for (let i = 0; i < this.b.length; i++)\n this.b[i] = (rng() * 2 - 1);\n this.ridge = new OnlineRidge(this.dim, 1, this.lambda);\n this.ready = false;\n }\n hidden(x) {\n const h = new Float64Array(this.dim);\n for (let j = 0; j < this.dim; j++) {\n let s = this.b[j];\n const row = j * this.p;\n for (let i = 0; i < this.p; i++)\n s += this.W[row + i] * x[i];\n // GELU-ish smooth nonlinearity (fast approximate)\n const t = s;\n h[j] = 0.5 * t * (1 + Math.tanh(Math.sqrt(2 / Math.PI) * (t + 0.044715 * Math.pow(t, 3))));\n }\n return h;\n }\n partialFit(batchX, batchY) {\n if (!this.ridge)\n this.ridge = new OnlineRidge(this.dim, 1, this.lambda);\n for (let k = 0; k < batchX.length; k++) {\n const h = this.hidden(batchX[k]); // Float64Array\n const y = new Float64Array([batchY[k]]); // <-- make it Float64Array\n this.ridge.update(h, y);\n }\n this.ready = true;\n }\n fit(X, y, iters = 1, batch = 256) {\n const n = X.length;\n for (let t = 0; t < iters; t++) {\n for (let i = 0; i < n; i += batch) {\n const xb = X.slice(i, i + batch);\n const yb = y.slice(i, i + batch);\n this.partialFit(xb, yb);\n }\n }\n this.ready = true;\n }\n score(x) {\n if (!this.ready || !this.ridge)\n return 0;\n const h = this.hidden(x);\n // y = h^T Beta (single output)\n const Beta = this.ridge.Beta;\n let s = 0;\n for (let j = 0; j < this.dim; j++)\n s += h[j] * Beta[j];\n return s;\n }\n}\n\n// multi-kernel-elm.ts — Multi-Kernel ELM combining multiple kernel types\n// Combines RBF, polynomial, and linear kernels for improved accuracy\n/**\n * Multi-Kernel ELM that combines multiple kernel types\n * Uses weighted combination of kernels for improved accuracy\n */\nclass MultiKernelELM {\n constructor(categories, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.kelms = [];\n this.kernelWeights = [];\n this.categories = [];\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.categories = categories;\n this.options = {\n kernels: options.kernels,\n ridgeLambda: (_a = options.ridgeLambda) !== null && _a !== void 0 ? _a : 0.001,\n learnWeights: (_b = options.learnWeights) !== null && _b !== void 0 ? _b : true,\n nystrom: {\n m: (_d = (_c = options.nystrom) === null || _c === void 0 ? void 0 : _c.m) !== null && _d !== void 0 ? _d : 100,\n strategy: (_f = (_e = options.nystrom) === null || _e === void 0 ? void 0 : _e.strategy) !== null && _f !== void 0 ? _f : 'uniform',\n },\n };\n // Initialize kernel ELMs\n for (const kernelConfig of this.options.kernels) {\n const kelm = new KernelELM({\n outputDim: categories.length,\n kernel: {\n type: kernelConfig.type === 'polynomial' ? 'rbf' : kernelConfig.type, // Map polynomial to rbf for now\n gamma: (_h = (_g = kernelConfig.params) === null || _g === void 0 ? void 0 : _g.gamma) !== null && _h !== void 0 ? _h : 0.01,\n },\n ridgeLambda: this.options.ridgeLambda,\n task: 'classification',\n mode: 'nystrom',\n nystrom: {\n m: this.options.nystrom.m,\n strategy: this.options.nystrom.strategy === 'random' ? 'uniform' : this.options.nystrom.strategy,\n },\n });\n this.kelms.push(kelm);\n }\n // Initialize kernel weights\n if (this.options.learnWeights) {\n this.kernelWeights = this.options.kernels.map((k, i) => { var _a; return (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 / this.options.kernels.length; });\n }\n else {\n this.kernelWeights = this.options.kernels.map((k) => { var _a; return (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 / this.options.kernels.length; });\n }\n }\n /**\n * Train the multi-kernel ELM\n */\n fit(X, y) {\n // Convert y to one-hot if needed\n const oneHotY = this._toOneHot(y);\n // Train each kernel ELM\n for (const kelm of this.kelms) {\n kelm.fit(X, oneHotY);\n }\n // Learn optimal kernel weights if enabled\n if (this.options.learnWeights && this.kelms.length > 1) {\n this._learnKernelWeights(X, oneHotY);\n }\n this.trained = true;\n }\n /**\n * Predict with multi-kernel combination\n */\n predict(X, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allPredictions = [];\n for (const x of XArray) {\n const predictions = [];\n // Get predictions from each kernel\n const kernelPredictions = this.kelms.map((kelm) => {\n var _a, _b, _c, _d;\n const pred = ((_b = (_a = kelm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [x])) || ((_d = (_c = kelm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [x]));\n return (Array.isArray(pred) ? pred[0] : pred) || new Float64Array(this.categories.length);\n });\n // Weighted combination\n const combined = new Float64Array(this.categories.length);\n for (let i = 0; i < this.kelms.length; i++) {\n const weight = this.kernelWeights[i];\n for (let j = 0; j < this.categories.length; j++) {\n combined[j] += kernelPredictions[i][j] * weight;\n }\n }\n // Convert to probabilities\n const probs = this._softmax(combined);\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n const topResults = [];\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n topResults.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n predictions.push(...topResults);\n allPredictions.push(...predictions);\n }\n return allPredictions;\n }\n /**\n * Learn optimal kernel weights using validation performance\n */\n _learnKernelWeights(X, y) {\n var _a, _b, _c, _d;\n // Simple approach: weight by validation accuracy\n // In practice, you might use cross-validation\n const weights = new Float64Array(this.kelms.length);\n for (let i = 0; i < this.kelms.length; i++) {\n const kelm = this.kelms[i];\n let correct = 0;\n let total = 0;\n // Evaluate on training data (in production, use validation set)\n for (let j = 0; j < Math.min(100, X.length); j++) {\n const pred = ((_b = (_a = kelm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [X[j]])) || ((_d = (_c = kelm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [X[j]]));\n const predVec = (Array.isArray(pred) ? pred[0] : pred) || new Float64Array(0);\n const predIdx = this._argmax(predVec);\n const trueIdx = this._argmax(y[j]);\n if (predIdx === trueIdx)\n correct++;\n total++;\n }\n weights[i] = total > 0 ? correct / total : 1.0 / this.kelms.length;\n }\n // Normalize weights\n const sum = Array.from(weights).reduce((a, b) => a + b, 0);\n if (sum > 0) {\n for (let i = 0; i < weights.length; i++) {\n this.kernelWeights[i] = weights[i] / sum;\n }\n }\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...Array.from(logits));\n const exp = new Float64Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n for (let i = 0; i < exp.length; i++) {\n exp[i] /= sum;\n }\n return exp;\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n /**\n * Get current kernel weights\n */\n getKernelWeights() {\n return [...this.kernelWeights];\n }\n}\n\n// deep-elm-pro.ts — Improved Deep ELM with advanced features\n// Enhanced version of DeepELM with better training strategies and regularization\n/**\n * Improved Deep ELM with advanced training strategies\n * Features:\n * - Layer-wise training with autoencoder pretraining\n * - Dropout and batch normalization\n * - L1/L2/Elastic net regularization\n * - Better initialization strategies\n */\nclass DeepELMPro {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n this.layers = [];\n this.trained = false;\n this.featureExtractors = []; // For pretraining\n // License check removed // Premium feature - requires valid license\n this.options = {\n layers: options.layers,\n activation: (_a = options.activation) !== null && _a !== void 0 ? _a : 'relu',\n useDropout: (_b = options.useDropout) !== null && _b !== void 0 ? _b : false,\n dropoutRate: (_c = options.dropoutRate) !== null && _c !== void 0 ? _c : 0.2,\n useBatchNorm: (_d = options.useBatchNorm) !== null && _d !== void 0 ? _d : false,\n regularization: {\n type: (_f = (_e = options.regularization) === null || _e === void 0 ? void 0 : _e.type) !== null && _f !== void 0 ? _f : 'l2',\n lambda: (_h = (_g = options.regularization) === null || _g === void 0 ? void 0 : _g.lambda) !== null && _h !== void 0 ? _h : 0.0001,\n alpha: (_k = (_j = options.regularization) === null || _j === void 0 ? void 0 : _j.alpha) !== null && _k !== void 0 ? _k : 0.5,\n },\n layerWiseTraining: (_l = options.layerWiseTraining) !== null && _l !== void 0 ? _l : true,\n pretraining: (_m = options.pretraining) !== null && _m !== void 0 ? _m : true,\n categories: options.categories,\n maxLen: (_o = options.maxLen) !== null && _o !== void 0 ? _o : 100,\n };\n // Initialize layers\n for (let i = 0; i < this.options.layers.length; i++) {\n const deepELM = new DeepELM({\n layers: [{ hiddenUnits: this.options.layers[i], activation: this.options.activation }],\n maxLen: this.options.maxLen,\n useTokenizer: i === 0, // Only first layer uses tokenizer\n });\n // Set categories for last layer after construction\n if (i === this.options.layers.length - 1) {\n (_q = (_p = deepELM).setCategories) === null || _q === void 0 ? void 0 : _q.call(_p, this.options.categories);\n }\n this.layers.push(deepELM);\n }\n // Initialize feature extractors for pretraining\n if (this.options.pretraining) {\n for (let i = 0; i < this.options.layers.length - 1; i++) {\n const extractor = new ELM({\n useTokenizer: i === 0 ? true : undefined,\n hiddenUnits: this.options.layers[i],\n categories: [],\n maxLen: this.options.maxLen,\n });\n this.featureExtractors.push(extractor);\n }\n }\n }\n /**\n * Train the deep ELM with improved strategies\n */\n train(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n // Step 1: Pretraining (if enabled)\n if (this.options.pretraining) {\n yield this._pretrain(X);\n }\n // Step 2: Layer-wise or joint training\n if (this.options.layerWiseTraining) {\n yield this._trainLayerWise(X, y);\n }\n else {\n yield this._trainJoint(X, y);\n }\n this.trained = true;\n });\n }\n /**\n * Predict with deep ELM\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const predictions = [];\n for (const x of XArray) {\n // Forward pass through layers\n let features = x;\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n // Apply batch normalization if enabled\n if (this.options.useBatchNorm && i > 0) {\n features = this._batchNormalize(features);\n }\n // Apply dropout if enabled (only during training, but we're in predict mode)\n // In practice, dropout is disabled during inference\n // Forward through layer\n if (i === this.layers.length - 1) {\n // Last layer: get predictions\n const pred = ((_b = (_a = layer).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features], topK)) || [];\n predictions.push(...pred.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n else {\n // Hidden layers: extract features\n features = this._extractFeatures(layer, features);\n }\n }\n }\n return predictions;\n }\n /**\n * Pretrain layers as autoencoders\n */\n _pretrain(X) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b;\n let currentFeatures = X;\n for (let i = 0; i < this.featureExtractors.length; i++) {\n const extractor = this.featureExtractors[i];\n // Train as autoencoder (reconstruct input)\n const encoded = currentFeatures.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = extractor.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = extractor.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n });\n // Use encoded features as both input and target (autoencoder)\n (_b = (_a = extractor).trainFromData) === null || _b === void 0 ? void 0 : _b.call(_a, encoded, encoded.map((_, idx) => idx));\n // Extract features for next layer\n currentFeatures = encoded.map(x => {\n const hidden = this._extractFeaturesFromELM(extractor, x);\n return Array.from(hidden);\n });\n }\n });\n }\n /**\n * Train layers sequentially\n */\n _trainLayerWise(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b, _c, _d, _e, _f;\n let currentFeatures = X;\n const labelIndices = y.map(label => typeof label === 'number' ? label : this.options.categories.indexOf(label));\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n // Prepare features\n const features = currentFeatures.map(x => {\n if (i === 0) {\n // First layer: use raw input\n return x;\n }\n else {\n // Subsequent layers: use previous layer output\n return this._extractFeatures(this.layers[i - 1], x);\n }\n });\n // Train layer\n if (i === this.layers.length - 1) {\n // Last layer: train with labels\n (_b = (_a = layer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = layer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n }\n else {\n // Hidden layers: train to extract features\n // Use next layer's input as target (unsupervised)\n const nextLayerFeatures = i < this.layers.length - 1\n ? features.map(f => this._extractFeatures(this.layers[i + 1], f))\n : features;\n (_f = (_e = layer).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, features, nextLayerFeatures.map((_, idx) => idx));\n }\n // Update features for next layer\n currentFeatures = features.map(f => this._extractFeatures(layer, f));\n }\n });\n }\n /**\n * Train all layers jointly\n */\n _trainJoint(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b, _c, _d;\n const labelIndices = y.map(label => typeof label === 'number' ? label : this.options.categories.indexOf(label));\n // Train the last layer with final features\n const lastLayer = this.layers[this.layers.length - 1];\n const finalFeatures = X.map(x => {\n let features = x;\n for (let i = 0; i < this.layers.length - 1; i++) {\n features = this._extractFeatures(this.layers[i], features);\n }\n return features;\n });\n (_b = (_a = lastLayer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = lastLayer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, finalFeatures, labelIndices);\n });\n }\n _extractFeatures(layer, input) {\n var _a, _b, _c, _d;\n // Extract hidden layer representation\n const hidden = (_b = (_a = layer).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [input], (_c = layer.model) === null || _c === void 0 ? void 0 : _c.W, (_d = layer.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) ? Array.from(hidden[0]) : input;\n }\n _extractFeaturesFromELM(elm, input) {\n var _a, _b, _c, _d;\n const hidden = (_b = (_a = elm).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [input], (_c = elm.model) === null || _c === void 0 ? void 0 : _c.W, (_d = elm.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) || new Float64Array(input.length);\n }\n _batchNormalize(features) {\n const mean = features.reduce((a, b) => a + b, 0) / features.length;\n const variance = features.reduce((sum, x) => sum + Math.pow((x - mean), 2), 0) / features.length;\n const std = Math.sqrt(variance + 1e-8);\n return features.map(x => (x - mean) / std);\n }\n}\n\n// online-kernel-elm.ts — Online Kernel ELM for streaming data\n// Incremental kernel learning with forgetting mechanisms\n/**\n * Online Kernel ELM for real-time learning from streaming data\n * Features:\n * - Incremental kernel matrix updates\n * - Sliding window with forgetting\n * - Adaptive landmark selection\n * - Real-time prediction\n */\nclass OnlineKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n // Storage for streaming data\n this.landmarks = [];\n this.landmarkIndices = [];\n this.samples = [];\n this.labels = [];\n this.sampleWeights = [];\n // Online ridge for incremental updates\n this.onlineRidge = null;\n this.kernelMatrix = [];\n this.kernelMatrixInv = [];\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.kernelType = options.kernel.type;\n this.kernelParams = {\n gamma: (_a = options.kernel.gamma) !== null && _a !== void 0 ? _a : 0.01,\n degree: (_b = options.kernel.degree) !== null && _b !== void 0 ? _b : 2,\n coef0: (_c = options.kernel.coef0) !== null && _c !== void 0 ? _c : 0,\n };\n this.categories = options.categories;\n this.ridgeLambda = (_d = options.ridgeLambda) !== null && _d !== void 0 ? _d : 0.001;\n this.windowSize = (_e = options.windowSize) !== null && _e !== void 0 ? _e : 1000;\n this.decayFactor = (_f = options.decayFactor) !== null && _f !== void 0 ? _f : 0.99;\n this.maxLandmarks = (_g = options.maxLandmarks) !== null && _g !== void 0 ? _g : 100;\n }\n /**\n * Initial training with batch data\n */\n fit(X, y) {\n const oneHotY = this._toOneHot(y);\n // Select landmarks\n this._selectLandmarks(X);\n // Compute initial kernel matrix\n this._computeKernelMatrix(X);\n // Initialize online ridge\n this.onlineRidge = new OnlineRidge(this.landmarks.length, this.categories.length, this.ridgeLambda);\n // Train on initial batch\n for (let i = 0; i < X.length; i++) {\n const phi = this._computeKernelFeatures(X[i]);\n const yVec = new Float64Array(oneHotY[i]);\n this.onlineRidge.update(phi, yVec);\n }\n // Store samples\n this.samples = X.map(x => [...x]);\n this.labels = Array.isArray(y[0])\n ? y.map(yy => this._argmax(yy))\n : y;\n this.sampleWeights = new Array(X.length).fill(1.0);\n this.trained = true;\n }\n /**\n * Incremental update with new sample\n */\n update(x, y) {\n if (!this.trained) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Add to samples\n this.samples.push([...x]);\n this.labels.push(Array.isArray(y) ? this._argmax(y) : y);\n this.sampleWeights.push(1.0);\n // Apply decay to old samples\n for (let i = 0; i < this.sampleWeights.length; i++) {\n this.sampleWeights[i] *= this.decayFactor;\n }\n // Remove old samples if window exceeded\n if (this.samples.length > this.windowSize) {\n const removeCount = this.samples.length - this.windowSize;\n this.samples.splice(0, removeCount);\n this.labels.splice(0, removeCount);\n this.sampleWeights.splice(0, removeCount);\n }\n // Update landmarks if needed (adaptive strategy)\n if (this.landmarkStrategy === 'adaptive') {\n this._updateLandmarksAdaptive();\n }\n // Compute kernel features\n const phi = this._computeKernelFeatures(x);\n const yVec = new Float64Array(oneHotY);\n // Update online ridge\n if (this.onlineRidge) {\n this.onlineRidge.update(phi, yVec);\n }\n }\n /**\n * Predict with online model\n */\n predict(x, topK = 3) {\n if (!this.trained || !this.onlineRidge) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const allPredictions = [];\n for (const xi of XArray) {\n const predictions = [];\n const phi = this._computeKernelFeatures(xi);\n const logits = this.onlineRidge.predict(phi);\n // Convert to probabilities\n const probs = this._softmax(logits);\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n const topResults = [];\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n topResults.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n predictions.push(...topResults);\n allPredictions.push(...predictions);\n }\n return allPredictions;\n }\n /**\n * Select landmarks from data\n */\n _selectLandmarks(X) {\n const strategy = this.landmarkStrategy || 'uniform';\n const n = Math.min(this.maxLandmarks, X.length);\n if (strategy === 'uniform') {\n const step = Math.max(1, Math.floor(X.length / n));\n this.landmarkIndices = Array.from({ length: n }, (_, i) => Math.min(X.length - 1, i * step));\n }\n else if (strategy === 'random') {\n const indices = Array.from({ length: X.length }, (_, i) => i);\n for (let i = indices.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [indices[i], indices[j]] = [indices[j], indices[i]];\n }\n this.landmarkIndices = indices.slice(0, n);\n }\n else {\n // Adaptive: use first n samples initially\n this.landmarkIndices = Array.from({ length: n }, (_, i) => i);\n }\n this.landmarks = this.landmarkIndices.map(idx => [...X[idx]]);\n }\n /**\n * Compute kernel features for a sample\n */\n _computeKernelFeatures(x) {\n const features = new Float64Array(this.landmarks.length);\n for (let i = 0; i < this.landmarks.length; i++) {\n features[i] = this._kernel(x, this.landmarks[i]);\n }\n return features;\n }\n /**\n * Compute kernel between two vectors\n */\n _kernel(x1, x2) {\n if (this.kernelType === 'linear') {\n return this._dot(x1, x2);\n }\n else if (this.kernelType === 'rbf') {\n const dist = this._squaredDistance(x1, x2);\n return Math.exp(-this.kernelParams.gamma * dist);\n }\n else if (this.kernelType === 'polynomial') {\n const dot = this._dot(x1, x2);\n return Math.pow(dot + this.kernelParams.coef0, this.kernelParams.degree);\n }\n return 0;\n }\n _dot(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += a[i] * b[i];\n }\n return sum;\n }\n _squaredDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n const diff = a[i] - b[i];\n sum += diff * diff;\n }\n return sum;\n }\n _computeKernelMatrix(X) {\n // For online learning, we don't need full kernel matrix\n // This is kept for compatibility\n this.kernelMatrix = [];\n }\n _updateLandmarksAdaptive() {\n // Adaptive landmark selection based on prediction error\n // In practice, you might replace landmarks with high error\n // For now, keep existing landmarks\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...Array.from(logits));\n const exp = new Float64Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n for (let i = 0; i < exp.length; i++) {\n exp[i] /= sum;\n }\n return exp;\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n get landmarkStrategy() {\n return 'adaptive'; // Default for online learning\n }\n}\n\n// multi-task-elm.ts — Multi-Task ELM for joint learning across related tasks\n// Shared hidden layer with task-specific output layers\n/**\n * Multi-Task ELM for joint learning across related tasks\n * Features:\n * - Shared feature extraction layer\n * - Task-specific output layers\n * - Task weighting for importance\n * - Joint optimization\n */\nclass MultiTaskELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.taskELMs = new Map();\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.tasks = options.tasks.map((task) => {\n var _a;\n return ({\n name: task.name,\n categories: task.categories,\n weight: (_a = task.weight) !== null && _a !== void 0 ? _a : 1.0,\n });\n });\n this.options = {\n sharedHiddenUnits: (_a = options.sharedHiddenUnits) !== null && _a !== void 0 ? _a : 256,\n taskSpecificHiddenUnits: (_b = options.taskSpecificHiddenUnits) !== null && _b !== void 0 ? _b : options.tasks.map(() => 128),\n activation: (_c = options.activation) !== null && _c !== void 0 ? _c : 'relu',\n maxLen: (_d = options.maxLen) !== null && _d !== void 0 ? _d : 100,\n useTokenizer: (_e = options.useTokenizer) !== null && _e !== void 0 ? _e : true,\n };\n // Initialize shared ELM\n this.sharedELM = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.sharedHiddenUnits,\n categories: [], // No categories for shared layer\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize task-specific ELMs\n for (let i = 0; i < this.tasks.length; i++) {\n const task = this.tasks[i];\n const taskELM = new ELM({\n hiddenUnits: this.options.taskSpecificHiddenUnits[i],\n categories: task.categories,\n maxLen: this.options.sharedHiddenUnits, // Input size is shared layer output\n activation: this.options.activation,\n });\n this.taskELMs.set(task.name, taskELM);\n }\n }\n /**\n * Train multi-task ELM\n * @param X Input features\n * @param yTaskData Map of task name to labels\n */\n train(X, yTaskData) {\n var _a, _b, _c, _d;\n // Step 1: Train shared layer (use all tasks)\n const allFeatures = this._extractSharedFeatures(X);\n // Step 2: Train each task-specific layer\n for (const task of this.tasks) {\n const taskLabels = yTaskData.get(task.name);\n if (!taskLabels)\n continue;\n const taskELM = this.taskELMs.get(task.name);\n const labelIndices = taskLabels.map(label => typeof label === 'number'\n ? label\n : task.categories.indexOf(label));\n // Train task-specific ELM on shared features\n (_b = (_a = taskELM).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, task.categories);\n (_d = (_c = taskELM).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, allFeatures, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Predict for all tasks\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = new Map();\n for (const x of XArray) {\n // Extract shared features\n const sharedFeatures = this._extractSharedFeatures([x])[0];\n // Predict for each task\n for (const task of this.tasks) {\n const taskELM = this.taskELMs.get(task.name);\n const taskPreds = ((_b = (_a = taskELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [sharedFeatures], topK)) || [];\n const taskResults = taskPreds.map((pred) => ({\n task: task.name,\n label: pred.label || task.categories[pred.index || 0],\n prob: pred.prob || 0,\n }));\n if (!results.has(task.name)) {\n results.set(task.name, []);\n }\n results.get(task.name).push(...taskResults);\n }\n }\n return results;\n }\n /**\n * Predict for a specific task\n */\n predictTask(x, taskName, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const taskELM = this.taskELMs.get(taskName);\n if (!taskELM) {\n throw new Error(`Task ${taskName} not found`);\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n // Extract shared features\n const sharedFeatures = this._extractSharedFeatures([xi])[0];\n // Predict with task-specific ELM\n const taskPreds = ((_b = (_a = taskELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [sharedFeatures], topK)) || [];\n results.push(...taskPreds.map((pred) => ({\n task: taskName,\n label: pred.label || this.tasks.find(t => t.name === taskName).categories[pred.index || 0],\n prob: pred.prob || 0,\n })));\n }\n return results;\n }\n /**\n * Extract features from shared layer\n */\n _extractSharedFeatures(X) {\n // Encode inputs if using tokenizer\n const encoded = this.options.useTokenizer\n ? X.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = this.sharedELM.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = this.sharedELM.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n })\n : X;\n // Extract hidden layer features\n return encoded.map(x => {\n var _a, _b, _c, _d;\n const hidden = (_b = (_a = this.sharedELM).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [x], (_c = this.sharedELM.model) === null || _c === void 0 ? void 0 : _c.W, (_d = this.sharedELM.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) ? Array.from(hidden[0]) : x;\n });\n }\n /**\n * Get task names\n */\n getTaskNames() {\n return this.tasks.map(t => t.name);\n }\n /**\n * Get task weights\n */\n getTaskWeights() {\n return new Map(this.tasks.map(t => [t.name, t.weight]));\n }\n}\n\n// sparse-elm.ts — Sparse ELM with L1/L2 regularization and feature selection\n// Efficient for high-dimensional data with interpretability\n/**\n * Sparse ELM with regularization and feature selection\n * Features:\n * - L1/L2/Elastic net regularization\n * - Weight pruning for sparsity\n * - Feature importance ranking\n * - Interpretable models\n */\nclass SparseELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.weightMask = []; // Track which weights are active\n this.featureImportance = [];\n // License check removed // Premium feature - requires valid license\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n maxLen: (_b = options.maxLen) !== null && _b !== void 0 ? _b : 100,\n useTokenizer: (_c = options.useTokenizer) !== null && _c !== void 0 ? _c : true,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n regularization: {\n type: options.regularization.type,\n lambda: options.regularization.lambda,\n alpha: (_e = options.regularization.alpha) !== null && _e !== void 0 ? _e : 0.5,\n },\n sparsityTarget: (_f = options.sparsityTarget) !== null && _f !== void 0 ? _f : 0.5,\n pruneThreshold: (_g = options.pruneThreshold) !== null && _g !== void 0 ? _g : 1e-6,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train sparse ELM with regularization\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Encode inputs\n const encoded = this.options.useTokenizer\n ? X.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = this.elm.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = this.elm.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n })\n : X;\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, encoded, labelIndices);\n // Apply regularization and sparsification\n this._applyRegularization();\n this._pruneWeights();\n this._computeFeatureImportance();\n this.trained = true;\n }\n /**\n * Predict with sparse model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n // Use base ELM for prediction (sparsity is in weights)\n const XArray = Array.isArray(X[0]) ? X : [X];\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, XArray, topK)) || [];\n return preds.map((pred) => ({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n }));\n }\n /**\n * Apply regularization to weights\n */\n _applyRegularization() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const lambda = this.options.regularization.lambda;\n const alpha = this.options.regularization.alpha || 0.5;\n // Apply regularization\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n const w = W[i][j];\n if (this.options.regularization.type === 'l1') {\n // L1: soft thresholding\n const sign = w >= 0 ? 1 : -1;\n W[i][j] = sign * Math.max(0, Math.abs(w) - lambda);\n }\n else if (this.options.regularization.type === 'l2') {\n // L2: shrinkage\n W[i][j] = w / (1 + lambda);\n }\n else if (this.options.regularization.type === 'elastic') {\n // Elastic net: combination\n const l1 = alpha * lambda;\n const l2 = (1 - alpha) * lambda;\n const sign = w >= 0 ? 1 : -1;\n const softThresh = sign * Math.max(0, Math.abs(w) - l1);\n W[i][j] = softThresh / (1 + l2);\n }\n }\n }\n }\n /**\n * Prune small weights for sparsity\n */\n _pruneWeights() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const threshold = this.options.pruneThreshold;\n this.weightMask = [];\n // Prune weights below threshold\n for (let i = 0; i < W.length; i++) {\n this.weightMask[i] = [];\n for (let j = 0; j < W[i].length; j++) {\n if (Math.abs(W[i][j]) < threshold) {\n W[i][j] = 0;\n this.weightMask[i][j] = false;\n }\n else {\n this.weightMask[i][j] = true;\n }\n }\n }\n // Enforce sparsity target\n const currentSparsity = this._computeSparsity();\n if (currentSparsity < this.options.sparsityTarget) {\n this._enforceSparsityTarget();\n }\n }\n /**\n * Compute current sparsity ratio\n */\n _computeSparsity() {\n if (this.weightMask.length === 0)\n return 0;\n let total = 0;\n let zeros = 0;\n for (const row of this.weightMask) {\n for (const active of row) {\n total++;\n if (!active)\n zeros++;\n }\n }\n return total > 0 ? zeros / total : 0;\n }\n /**\n * Enforce target sparsity by pruning more weights\n */\n _enforceSparsityTarget() {\n var _a;\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const target = this.options.sparsityTarget;\n // Collect all weights with their absolute values\n const weights = [];\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n if (Math.abs(W[i][j]) > 0) {\n weights.push({ i, j, abs: Math.abs(W[i][j]) });\n }\n }\n }\n // Sort by absolute value\n weights.sort((a, b) => a.abs - b.abs);\n // Prune smallest weights to reach target\n const totalWeights = W.length * (((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) || 0);\n const targetZeros = Math.floor(totalWeights * target);\n const currentZeros = totalWeights - weights.length;\n const needToPrune = targetZeros - currentZeros;\n for (let k = 0; k < Math.min(needToPrune, weights.length); k++) {\n const { i, j } = weights[k];\n W[i][j] = 0;\n if (this.weightMask[i]) {\n this.weightMask[i][j] = false;\n }\n }\n }\n /**\n * Compute feature importance based on weight magnitudes\n */\n _computeFeatureImportance() {\n var _a;\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const inputDim = ((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n this.featureImportance = new Array(inputDim).fill(0);\n // Sum absolute weights for each input feature\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n this.featureImportance[j] += Math.abs(W[i][j]);\n }\n }\n // Normalize\n const max = Math.max(...this.featureImportance);\n if (max > 0) {\n for (let i = 0; i < this.featureImportance.length; i++) {\n this.featureImportance[i] /= max;\n }\n }\n }\n /**\n * Get feature importance scores\n */\n getFeatureImportance() {\n return [...this.featureImportance];\n }\n /**\n * Get sparsity statistics\n */\n getSparsityStats() {\n const model = this.elm.model;\n if (!model || !model.W) {\n return { sparsity: 0, activeWeights: 0, totalWeights: 0 };\n }\n const W = model.W;\n let total = 0;\n let active = 0;\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n total++;\n if (Math.abs(W[i][j]) > this.options.pruneThreshold) {\n active++;\n }\n }\n }\n return {\n sparsity: total > 0 ? 1 - active / total : 0,\n activeWeights: active,\n totalWeights: total,\n };\n }\n}\n\n/**\n * SyntheticFieldStore - Storage for labeled samples\n * Supports insert, get, and sample operations\n */\nclass SyntheticFieldStore {\n constructor() {\n this.store = new Map();\n }\n /**\n * Insert a labeled sample into the store\n */\n insert(sample) {\n if (!this.store.has(sample.label)) {\n this.store.set(sample.label, []);\n }\n this.store.get(sample.label).push(sample.value);\n }\n /**\n * Insert multiple samples at once\n */\n insertMany(samples) {\n for (const sample of samples) {\n this.insert(sample);\n }\n }\n /**\n * Get all values for a given label\n */\n get(label) {\n return this.store.get(label) || [];\n }\n /**\n * Sample k values uniformly at random for a given label\n */\n sample(label, k = 1) {\n const values = this.get(label);\n if (values.length === 0) {\n return [];\n }\n const result = [];\n const indices = new Set();\n // Simple uniform random sampling without replacement\n while (result.length < k && indices.size < values.length) {\n const idx = Math.floor(Math.random() * values.length);\n if (!indices.has(idx)) {\n indices.add(idx);\n result.push(values[idx]);\n }\n }\n return result;\n }\n /**\n * Check if a label exists in the store\n */\n hasLabel(label) {\n return this.store.has(label);\n }\n /**\n * Get all labels in the store\n */\n getLabels() {\n return Array.from(this.store.keys());\n }\n /**\n * Get the count of samples for a label\n */\n count(label) {\n return this.get(label).length;\n }\n /**\n * Clear all data\n */\n clear() {\n this.store.clear();\n }\n}\n\n/**\n * RetrievalGenerator - Simple deterministic retrieval sampler\n * Uniform random sampling from stored labeled samples\n */\n/**\n * Seeded random number generator for deterministic testing\n */\nlet SeededRNG$1 = class SeededRNG {\n constructor(seed = Date.now()) {\n this.seed = seed;\n }\n next() {\n // Linear congruential generator\n this.seed = (this.seed * 1664525 + 1013904223) % Math.pow(2, 32);\n return this.seed / Math.pow(2, 32);\n }\n setSeed(seed) {\n this.seed = seed;\n }\n};\nclass RetrievalGenerator {\n constructor(seed) {\n // Initialize and require license before allowing generator use\n this.store = new SyntheticFieldStore();\n this.seed = seed;\n this.rng = new SeededRNG$1(seed);\n }\n /**\n * Ingest labeled samples into the store\n */\n ingest(samples) {\n this.store.insertMany(samples);\n }\n /**\n * Sample k values for a given label\n * Returns empty array if label doesn't exist or has no samples\n */\n sample(label, k = 1) {\n const values = this.store.get(label);\n if (values.length === 0) {\n return [];\n }\n const result = [];\n const availableIndices = Array.from({ length: values.length }, (_, i) => i);\n // Sample k values (or all if k > available)\n const sampleCount = Math.min(k, values.length);\n for (let i = 0; i < sampleCount; i++) {\n const randomIndex = Math.floor(this.rng.next() * availableIndices.length);\n const selectedIndex = availableIndices.splice(randomIndex, 1)[0];\n result.push(values[selectedIndex]);\n }\n return result;\n }\n /**\n * Get a single sample (convenience method)\n */\n sampleOne(label) {\n const samples = this.sample(label, 1);\n return samples.length > 0 ? samples[0] : null;\n }\n /**\n * Check if a label has samples\n */\n hasLabel(label) {\n return this.store.hasLabel(label) && this.store.count(label) > 0;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.store.getLabels();\n }\n /**\n * Reset the generator (clears store and optionally resets seed)\n */\n reset(seed) {\n this.store.clear();\n if (seed !== undefined) {\n this.seed = seed;\n this.rng.setSeed(seed);\n }\n }\n}\n\n/**\n * CharVocab - Character vocabulary builder\n * Builds a vocabulary from character sets and training data\n */\nclass CharVocab {\n constructor() {\n this.charToIndex = new Map();\n this.indexToChar = new Map();\n this.size = 0;\n }\n /**\n * Build vocabulary from a set of strings\n * @param samples Array of strings to build vocabulary from\n * @param charSet Optional predefined character set (e.g., alphanumeric + punctuation)\n */\n build(samples, charSet) {\n const chars = new Set();\n // Add padding character first (index 0) - use null character\n // This ensures index 0 is always padding\n chars.add('\\0');\n // Add predefined character set if provided\n if (charSet) {\n for (const char of charSet) {\n // Skip null character if it's in the charSet (we already added it)\n if (char !== '\\0') {\n chars.add(char);\n }\n }\n }\n // Add all characters from samples\n for (const sample of samples) {\n for (const char of sample) {\n // Skip null characters from samples (we use it for padding)\n if (char !== '\\0') {\n chars.add(char);\n }\n }\n }\n // Sort characters for consistent ordering, but keep null char at index 0\n const sortedChars = Array.from(chars).sort((a, b) => {\n // Ensure null char is always first\n if (a === '\\0')\n return -1;\n if (b === '\\0')\n return 1;\n return a.localeCompare(b);\n });\n // Build mappings\n this.charToIndex.clear();\n this.indexToChar.clear();\n this.size = sortedChars.length;\n sortedChars.forEach((char, index) => {\n this.charToIndex.set(char, index);\n this.indexToChar.set(index, char);\n });\n }\n /**\n * Get index for a character\n */\n getIndex(char) {\n const index = this.charToIndex.get(char);\n if (index === undefined) {\n throw new Error(`Character '${char}' not in vocabulary`);\n }\n return index;\n }\n /**\n * Get character for an index\n */\n getChar(index) {\n const char = this.indexToChar.get(index);\n if (char === undefined) {\n throw new Error(`Index ${index} not in vocabulary`);\n }\n return char;\n }\n /**\n * Check if character exists in vocabulary\n */\n hasChar(char) {\n return this.charToIndex.has(char);\n }\n /**\n * Get vocabulary size\n */\n getSize() {\n return this.size;\n }\n /**\n * Get all characters in vocabulary\n */\n getChars() {\n return Array.from(this.charToIndex.keys()).sort();\n }\n /**\n * Get default character set (alphanumeric + common punctuation)\n */\n static getDefaultCharSet() {\n return 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +\n ' !\"#$%&\\'()*+,-./:;<=>?@[\\\\]^_`{|}~';\n }\n}\n\n/**\n * FixedLength - Utilities for fixed-length padding and truncation\n */\nclass FixedLength {\n /**\n * Pad or truncate an array to a fixed length\n * @param arr Array to pad/truncate\n * @param length Target length\n * @param padValue Value to use for padding (default: 0)\n */\n static padOrTruncate(arr, length, padValue = 0) {\n if (arr.length === length) {\n return [...arr];\n }\n if (arr.length > length) {\n // Truncate\n return arr.slice(0, length);\n }\n // Pad\n const result = [...arr];\n while (result.length < length) {\n result.push(padValue);\n }\n return result;\n }\n /**\n * Pad or truncate a string to a fixed length\n * @param str String to pad/truncate\n * @param length Target length\n * @param padChar Character to use for padding (default: space)\n */\n static padOrTruncateString(str, length, padChar = ' ') {\n if (str.length === length) {\n return str;\n }\n if (str.length > length) {\n // Truncate\n return str.slice(0, length);\n }\n // Pad\n return str + padChar.repeat(length - str.length);\n }\n}\n\n/**\n * OneHot - One-hot encoding utilities\n */\nclass OneHot {\n /**\n * Encode an index as a one-hot vector\n * @param index Index to encode\n * @param size Size of the one-hot vector\n */\n static encode(index, size) {\n if (index < 0 || index >= size) {\n throw new Error(`Index ${index} out of range [0, ${size})`);\n }\n const vector = new Array(size).fill(0);\n vector[index] = 1;\n return vector;\n }\n /**\n * Decode a one-hot vector to an index\n * @param vector One-hot vector\n */\n static decode(vector) {\n const index = vector.indexOf(1);\n if (index === -1) {\n throw new Error('Invalid one-hot vector: no element equals 1');\n }\n return index;\n }\n /**\n * Encode multiple indices as one-hot vectors\n * @param indices Array of indices\n * @param size Size of each one-hot vector\n */\n static encodeBatch(indices, size) {\n return indices.map(idx => this.encode(idx, size));\n }\n /**\n * Decode multiple one-hot vectors to indices\n * @param vectors Array of one-hot vectors\n */\n static decodeBatch(vectors) {\n return vectors.map(vec => this.decode(vec));\n }\n}\n\n/**\n * StringEncoder - Encodes strings to vectors and decodes back\n * Compatible with ELM/KELM pipelines\n */\nclass StringEncoder {\n constructor(config) {\n this.config = Object.assign({ useOneHot: false }, config);\n this.vocab = new CharVocab();\n }\n /**\n * Build vocabulary from training samples\n */\n buildVocab(samples) {\n this.vocab.build(samples, this.config.charSet || CharVocab.getDefaultCharSet());\n }\n /**\n * Encode a string to a vector\n * @param str String to encode\n * @returns Encoded vector (either indices or one-hot)\n */\n encode(str) {\n if (this.vocab.getSize() === 0) {\n throw new Error('Vocabulary not built. Call buildVocab() first.');\n }\n // Convert string to indices\n const indices = [];\n for (const char of str) {\n if (this.vocab.hasChar(char)) {\n indices.push(this.vocab.getIndex(char));\n }\n else {\n // For unknown characters, try to find a similar one or use space\n // If space is in vocab, use it; otherwise use 0 (which will be treated as padding)\n if (this.vocab.hasChar(' ')) {\n indices.push(this.vocab.getIndex(' '));\n }\n else {\n indices.push(0);\n }\n }\n }\n // Pad or truncate to fixed length\n const padded = FixedLength.padOrTruncate(indices, this.config.maxLength, 0);\n // Convert to one-hot if requested\n if (this.config.useOneHot) {\n const vocabSize = this.vocab.getSize();\n const oneHotVectors = [];\n for (const idx of padded) {\n oneHotVectors.push(...OneHot.encode(idx, vocabSize));\n }\n return oneHotVectors;\n }\n return padded;\n }\n /**\n * Decode a vector back to a string\n * @param vector Encoded vector\n * @returns Decoded string\n */\n decode(vector) {\n if (this.vocab.getSize() === 0) {\n throw new Error('Vocabulary not built. Call buildVocab() first.');\n }\n let indices;\n if (this.config.useOneHot) {\n // Decode one-hot vectors\n const vocabSize = this.vocab.getSize();\n indices = [];\n for (let i = 0; i < vector.length; i += vocabSize) {\n const oneHot = vector.slice(i, i + vocabSize);\n try {\n indices.push(OneHot.decode(oneHot));\n }\n catch (_a) {\n // If decoding fails, use argmax as fallback\n const maxIdx = oneHot.indexOf(Math.max(...oneHot));\n indices.push(maxIdx);\n }\n }\n // Truncate to maxLength\n indices = indices.slice(0, this.config.maxLength);\n }\n else {\n // Direct index-based decoding\n indices = vector.slice(0, this.config.maxLength);\n }\n // Convert indices to characters, stopping at first padding\n let result = '';\n const vocabSize = this.vocab.getSize();\n const paddingIdx = 0; // Padding is always index 0\n for (const idx of indices) {\n // Clamp index to valid range\n const clampedIdx = Math.max(0, Math.min(vocabSize - 1, Math.round(idx)));\n // Stop decoding at first padding index (0)\n if (clampedIdx === paddingIdx) {\n break;\n }\n // Try to get character for this index\n try {\n const char = this.vocab.getChar(clampedIdx);\n // Skip null characters and control characters (except space, tab, newline)\n if (char === '\\0' || (char.charCodeAt(0) < 32 && char !== ' ' && char !== '\\t' && char !== '\\n')) {\n break; // Stop at first invalid character\n }\n result += char;\n }\n catch (_b) {\n // Invalid index - stop decoding\n break;\n }\n }\n // Trim trailing whitespace but preserve internal spaces\n return result.trimEnd();\n }\n /**\n * Encode multiple strings\n */\n encodeBatch(strings) {\n return strings.map(str => this.encode(str));\n }\n /**\n * Decode multiple vectors\n */\n decodeBatch(vectors) {\n return vectors.map(vec => this.decode(vec));\n }\n /**\n * Get the output vector size\n */\n getVectorSize() {\n if (this.config.useOneHot) {\n return this.config.maxLength * this.vocab.getSize();\n }\n return this.config.maxLength;\n }\n /**\n * Get vocabulary size\n */\n getVocabSize() {\n return this.vocab.getSize();\n }\n /**\n * Get vocabulary\n */\n getVocab() {\n return this.vocab;\n }\n}\n\n/**\n * ELM utilities for OmegaSynth\n * Helper functions for working with ELM models\n */\n/**\n * Create one-hot vector for a label index\n */\nfunction oneHotLabel(labelIndex, numLabels) {\n const vector = new Array(numLabels).fill(0);\n if (labelIndex >= 0 && labelIndex < numLabels) {\n vector[labelIndex] = 1;\n }\n return vector;\n}\n/**\n * Generate random noise vector\n */\nfunction generateNoiseVector(size, seed) {\n const rng = seed !== undefined ? new SeededRNG(seed) : null;\n const noise = [];\n for (let i = 0; i < size; i++) {\n const value = rng ? rng.next() : Math.random();\n // Normalize to [-1, 1]\n noise.push(value * 2 - 1);\n }\n return noise;\n}\n/**\n * Seeded random number generator\n */\nclass SeededRNG {\n constructor(seed) {\n this.seed = seed;\n }\n next() {\n this.seed = (this.seed * 1664525 + 1013904223) % Math.pow(2, 32);\n return this.seed / Math.pow(2, 32);\n }\n}\n\n/**\n * Label-specific validation and cleaning utilities\n */\n/**\n * Validate and clean a generated string based on its label type\n */\nfunction validateForLabel(label, value) {\n if (!value || value.length === 0) {\n return { isValid: false, cleaned: '', reason: 'Empty value' };\n }\n // Get label-specific validator\n const validator = getValidatorForLabel(label);\n return validator(value);\n}\n/**\n * Get validator function for a specific label\n */\nfunction getValidatorForLabel(label) {\n switch (label) {\n case 'first_name':\n case 'last_name':\n return validateName;\n case 'phone_number':\n return validatePhoneNumber;\n case 'email':\n return validateEmail;\n case 'street_address':\n return validateStreetAddress;\n case 'city':\n case 'state':\n case 'country':\n return validateLocation;\n case 'company_name':\n case 'job_title':\n case 'product_name':\n return validateText;\n case 'color':\n return validateColor;\n case 'uuid':\n return validateUUID;\n case 'date':\n return validateDate;\n case 'credit_card_type':\n case 'device_type':\n return validateText;\n default:\n return validateGeneric;\n }\n}\n/**\n * Validate name (first_name, last_name)\n * Rules: Letters only, optional hyphens/apostrophes, no numbers\n */\nfunction validateName(value) {\n // First check for placeholder patterns in original value (before cleaning)\n value.toLowerCase();\n // Reject \"Name\" followed by numbers (e.g., \"Name97\", \"name123\")\n if (/^name\\d+$/i.test(value)) {\n return { isValid: false, cleaned: '', reason: 'Placeholder name with numbers' };\n }\n // Remove all non-letter characters except hyphens and apostrophes\n let cleaned = value.replace(/[^a-zA-Z\\-\\'\\s]/g, '');\n // Remove numbers completely\n cleaned = cleaned.replace(/[0-9]/g, '');\n // Remove excessive special characters\n cleaned = cleaned.replace(/[-']{2,}/g, '-'); // Multiple hyphens/apostrophes -> single\n cleaned = cleaned.replace(/^[-']+|[-']+$/g, ''); // Remove leading/trailing\n // Trim and normalize whitespace\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must be at least 2 characters and contain at least one letter\n if (cleaned.length < 2 || !/[a-zA-Z]/.test(cleaned)) {\n return { isValid: false, cleaned: '', reason: 'Too short or no letters' };\n }\n // Reject common placeholder names (case-insensitive) after cleaning\n const lowerCleaned = cleaned.toLowerCase();\n // Check for exact matches\n if (lowerCleaned === 'name' || lowerCleaned === 'firstname' || lowerCleaned === 'lastname' ||\n lowerCleaned === 'surname') {\n return { isValid: false, cleaned: '', reason: 'Placeholder name' };\n }\n // Check for \"name\" followed by very short variations\n if (lowerCleaned.startsWith('name') && lowerCleaned.length <= 6) {\n return { isValid: false, cleaned: '', reason: 'Placeholder name' };\n }\n // Max length check\n if (cleaned.length > 30) {\n cleaned = cleaned.substring(0, 30).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate phone number\n * Rules: Digits, dashes, parentheses, dots, plus, spaces\n */\nfunction validatePhoneNumber(value) {\n // Keep only valid phone characters\n let cleaned = value.replace(/[^0-9\\-\\+\\(\\)\\.\\s]/g, '');\n // Remove excessive special characters\n cleaned = cleaned.replace(/[-\\.]{2,}/g, '-');\n cleaned = cleaned.replace(/\\s+/g, ' ');\n cleaned = cleaned.trim();\n // Count digits\n const digitCount = (cleaned.match(/\\d/g) || []).length;\n // Must have at least 7 digits (minimum phone number)\n if (digitCount < 7) {\n return { isValid: false, cleaned: '', reason: 'Too few digits' };\n }\n // Max length check\n if (cleaned.length > 25) {\n cleaned = cleaned.substring(0, 25).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate email\n * Rules: Must contain @, valid characters before and after\n */\nfunction validateEmail(value) {\n // Keep valid email characters\n let cleaned = value.replace(/[^a-zA-Z0-9@\\.\\-\\_]/g, '');\n // Must contain @\n if (!cleaned.includes('@')) {\n return { isValid: false, cleaned: '', reason: 'Missing @ symbol' };\n }\n const parts = cleaned.split('@');\n if (parts.length !== 2) {\n return { isValid: false, cleaned: '', reason: 'Invalid @ usage' };\n }\n const [local, domain] = parts;\n // Local part must have at least 1 character\n if (!local || local.length === 0) {\n return { isValid: false, cleaned: '', reason: 'Empty local part' };\n }\n // Domain must have at least 3 characters (x.y)\n if (!domain || domain.length < 3) {\n return { isValid: false, cleaned: '', reason: 'Invalid domain' };\n }\n // Domain must contain at least one dot\n if (!domain.includes('.')) {\n return { isValid: false, cleaned: '', reason: 'Domain missing dot' };\n }\n // Remove leading/trailing dots and hyphens\n const cleanLocal = local.replace(/^[\\.\\-]+|[\\.\\-]+$/g, '');\n const cleanDomain = domain.replace(/^[\\.\\-]+|[\\.\\-]+$/g, '');\n if (!cleanLocal || !cleanDomain) {\n return { isValid: false, cleaned: '', reason: 'Invalid format after cleaning' };\n }\n cleaned = `${cleanLocal}@${cleanDomain}`;\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50);\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate street address\n * Rules: Numbers, letters, spaces, common address characters\n */\nfunction validateStreetAddress(value) {\n // Keep valid address characters\n let cleaned = value.replace(/[^a-zA-Z0-9\\s\\-\\#\\.\\,]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 5 characters\n if (cleaned.length < 5) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate location (city, state, country)\n * Rules: Mostly letters, optional spaces/hyphens\n */\nfunction validateLocation(value) {\n // Keep letters, spaces, hyphens, apostrophes\n let cleaned = value.replace(/[^a-zA-Z\\s\\-\\']/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 2 characters and contain letters\n if (cleaned.length < 2 || !/[a-zA-Z]/.test(cleaned)) {\n return { isValid: false, cleaned: '', reason: 'Too short or no letters' };\n }\n // Max length check\n if (cleaned.length > 30) {\n cleaned = cleaned.substring(0, 30).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate text (company_name, job_title, product_name)\n * Rules: Letters, numbers, spaces, common punctuation\n */\nfunction validateText(value) {\n // Keep alphanumeric and common punctuation\n let cleaned = value.replace(/[^a-zA-Z0-9\\s\\-\\'\\.\\,]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 2 characters\n if (cleaned.length < 2) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate color\n * Rules: Letters only, maybe spaces\n */\nfunction validateColor(value) {\n // Keep letters and spaces only\n let cleaned = value.replace(/[^a-zA-Z\\s]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 3 characters\n if (cleaned.length < 3) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 20) {\n cleaned = cleaned.substring(0, 20).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate UUID\n * Rules: Should follow UUID format (8-4-4-4-12 hex digits with dashes)\n */\nfunction validateUUID(value) {\n // Keep hex characters and dashes\n let cleaned = value.replace(/[^0-9a-fA-F\\-]/g, '');\n // Try to format as UUID if it has enough characters\n const hexOnly = cleaned.replace(/-/g, '');\n if (hexOnly.length >= 32) {\n // Format as UUID: 8-4-4-4-12\n const formatted = [\n hexOnly.substring(0, 8),\n hexOnly.substring(8, 12),\n hexOnly.substring(12, 16),\n hexOnly.substring(16, 20),\n hexOnly.substring(20, 32)\n ].join('-');\n cleaned = formatted;\n }\n // Must have at least 32 hex characters\n const hexCount = cleaned.replace(/-/g, '').length;\n if (hexCount < 32) {\n return { isValid: false, cleaned: '', reason: 'Too few hex characters' };\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate date\n * Rules: Should follow date format (YYYY-MM-DD or similar)\n */\nfunction validateDate(value) {\n // Keep digits, dashes, slashes\n let cleaned = value.replace(/[^0-9\\-\\/]/g, '');\n // Must have at least 8 digits (YYYYMMDD)\n const digitCount = (cleaned.match(/\\d/g) || []).length;\n if (digitCount < 8) {\n return { isValid: false, cleaned: '', reason: 'Too few digits' };\n }\n // Max length check\n if (cleaned.length > 20) {\n cleaned = cleaned.substring(0, 20).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Generic validator for unknown labels\n */\nfunction validateGeneric(value) {\n // Remove control characters\n let cleaned = value.replace(/[\\x00-\\x1F\\x7F]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n if (cleaned.length < 1) {\n return { isValid: false, cleaned: '', reason: 'Empty after cleaning' };\n }\n return { isValid: true, cleaned };\n}\n\n/**\n * PatternCorrector - Post-processing pattern matching and correction\n * Learns patterns from training data and applies them to generated samples\n */\nclass PatternCorrector {\n constructor() {\n this.patterns = new Map();\n }\n /**\n * Learn patterns from training data\n */\n learnPatterns(samples) {\n const byLabel = new Map();\n // Group samples by label\n for (const sample of samples) {\n if (!byLabel.has(sample.label)) {\n byLabel.set(sample.label, []);\n }\n byLabel.get(sample.label).push(sample.value);\n }\n // Learn patterns for each label\n for (const [label, values] of byLabel.entries()) {\n this.learnPattern(label, values);\n }\n }\n /**\n * Learn pattern for a specific label\n */\n learnPattern(label, examples) {\n if (examples.length === 0)\n return;\n // Extract common prefixes (first 1-3 characters)\n const prefixCounts = new Map();\n const suffixCounts = new Map();\n const charFreq = new Map();\n const lengths = [];\n for (const example of examples) {\n lengths.push(example.length);\n // Prefixes\n for (let len = 1; len <= Math.min(3, example.length); len++) {\n const prefix = example.substring(0, len);\n prefixCounts.set(prefix, (prefixCounts.get(prefix) || 0) + 1);\n }\n // Suffixes\n for (let len = 1; len <= Math.min(3, example.length); len++) {\n const suffix = example.substring(example.length - len);\n suffixCounts.set(suffix, (suffixCounts.get(suffix) || 0) + 1);\n }\n // Character frequency\n for (const char of example) {\n charFreq.set(char, (charFreq.get(char) || 0) + 1);\n }\n }\n // Get common prefixes (appear in >10% of examples - lowered from 20% for better pattern matching)\n const commonPrefixes = Array.from(prefixCounts.entries())\n .filter(([_, count]) => count / examples.length > 0.1)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 15) // Increased from 10 to 15\n .map(([prefix]) => prefix);\n // Get common suffixes (appear in >10% of examples - lowered from 20% for better pattern matching)\n const commonSuffixes = Array.from(suffixCounts.entries())\n .filter(([_, count]) => count / examples.length > 0.1)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 15) // Increased from 10 to 15\n .map(([suffix]) => suffix);\n // Normalize character frequencies\n const totalChars = Array.from(charFreq.values()).reduce((a, b) => a + b, 0);\n for (const [char, count] of charFreq.entries()) {\n charFreq.set(char, count / totalChars);\n }\n this.patterns.set(label, {\n label,\n examples,\n commonPrefixes,\n commonSuffixes,\n charFrequency: charFreq,\n lengthDistribution: lengths,\n });\n }\n /**\n * Correct a generated string using learned patterns\n */\n correct(generated, label) {\n const pattern = this.patterns.get(label);\n if (!pattern) {\n return generated; // No pattern learned, return as-is\n }\n let corrected = generated;\n // 1. Check if it matches a known example (exact match)\n if (pattern.examples.includes(generated)) {\n return generated; // Already perfect\n }\n // 2. Check prefix/suffix patterns\n const hasValidPrefix = pattern.commonPrefixes.some(prefix => corrected.toLowerCase().startsWith(prefix.toLowerCase()));\n pattern.commonSuffixes.some(suffix => corrected.toLowerCase().endsWith(suffix.toLowerCase()));\n // 3. If no valid prefix, try to fix it\n if (!hasValidPrefix && pattern.commonPrefixes.length > 0) {\n const mostCommonPrefix = pattern.commonPrefixes[0];\n // Only fix if the generated string is very different\n if (corrected.length > 0 && !corrected.toLowerCase().startsWith(mostCommonPrefix[0].toLowerCase())) ;\n }\n // 4. Check character frequency (remove unlikely characters)\n const charFreq = pattern.charFrequency;\n let cleaned = '';\n for (const char of corrected) {\n const freq = charFreq.get(char) || 0;\n // Keep character if it appears in >0.5% of training data (lowered from 1%), or if it's common (space, etc.)\n if (freq > 0.005 || /[a-zA-Z0-9\\s]/.test(char)) {\n cleaned += char;\n }\n }\n if (cleaned.length > 0) {\n corrected = cleaned;\n }\n // 5. Check length distribution\n pattern.lengthDistribution.reduce((a, b) => a + b, 0) / pattern.lengthDistribution.length;\n Math.min(...pattern.lengthDistribution);\n const maxLength = Math.max(...pattern.lengthDistribution);\n // Truncate if too long\n if (corrected.length > maxLength * 1.5) {\n corrected = corrected.substring(0, Math.floor(maxLength * 1.2));\n }\n return corrected;\n }\n /**\n * Score how well a generated string matches the pattern\n */\n score(generated, label) {\n const pattern = this.patterns.get(label);\n if (!pattern) {\n return 0.5; // Unknown pattern, neutral score\n }\n let score = 0;\n let factors = 0;\n // 1. Exact match bonus\n if (pattern.examples.includes(generated)) {\n return 1.0; // Perfect match\n }\n // 2. Prefix match (30% weight)\n const prefixMatch = pattern.commonPrefixes.some(prefix => generated.toLowerCase().startsWith(prefix.toLowerCase()));\n score += prefixMatch ? 0.3 : 0;\n factors++;\n // 3. Suffix match (20% weight)\n const suffixMatch = pattern.commonSuffixes.some(suffix => generated.toLowerCase().endsWith(suffix.toLowerCase()));\n score += suffixMatch ? 0.2 : 0;\n factors++;\n // 4. Character frequency match (30% weight)\n const charFreq = pattern.charFrequency;\n let charScore = 0;\n let charCount = 0;\n for (const char of generated) {\n const freq = charFreq.get(char) || 0;\n charScore += freq;\n charCount++;\n }\n score += (charCount > 0 ? charScore / charCount : 0) * 0.3;\n factors++;\n // 5. Length match (20% weight)\n const avgLength = pattern.lengthDistribution.reduce((a, b) => a + b, 0) / pattern.lengthDistribution.length;\n const lengthDiff = Math.abs(generated.length - avgLength) / avgLength;\n const lengthScore = Math.max(0, 1 - lengthDiff);\n score += lengthScore * 0.2;\n factors++;\n return factors > 0 ? score / factors : 0;\n }\n /**\n * Get pattern for a label\n */\n getPattern(label) {\n return this.patterns.get(label);\n }\n}\n\n/**\n * SequenceContext - Add sequence context to generation\n * Uses previous characters to inform next character prediction\n */\nclass SequenceContext {\n constructor(n = 3) {\n this.ngramPatterns = new Map();\n this.n = n;\n }\n /**\n * Learn n-gram patterns from training data\n */\n learnPatterns(samples) {\n this.ngramPatterns.clear();\n for (const sample of samples) {\n // Extract n-grams\n for (let i = 0; i <= sample.length - this.n; i++) {\n const ngram = sample.substring(i, i + this.n - 1); // Context (n-1 chars)\n const nextChar = sample[i + this.n - 1]; // Next character\n if (!this.ngramPatterns.has(ngram)) {\n this.ngramPatterns.set(ngram, new Map());\n }\n const charMap = this.ngramPatterns.get(ngram);\n charMap.set(nextChar, (charMap.get(nextChar) || 0) + 1);\n }\n }\n }\n /**\n * Get next character probabilities given context\n */\n getNextCharProbs(context) {\n // Use last n-1 characters as context\n const ctx = context.length >= this.n - 1\n ? context.substring(context.length - (this.n - 1))\n : context;\n const charCounts = this.ngramPatterns.get(ctx);\n if (!charCounts || charCounts.size === 0) {\n return new Map();\n }\n // Convert counts to probabilities\n const total = Array.from(charCounts.values()).reduce((a, b) => a + b, 0);\n const probs = new Map();\n for (const [char, count] of charCounts.entries()) {\n probs.set(char, count / total);\n }\n return probs;\n }\n /**\n * Suggest next character based on context\n */\n suggestNextChar(context) {\n const probs = this.getNextCharProbs(context);\n if (probs.size === 0) {\n return null;\n }\n // Return most likely character\n let bestChar = '';\n let bestProb = 0;\n for (const [char, prob] of probs.entries()) {\n if (prob > bestProb) {\n bestProb = prob;\n bestChar = char;\n }\n }\n return bestChar;\n }\n /**\n * Score how well a character fits the context\n */\n scoreChar(context, char) {\n const probs = this.getNextCharProbs(context);\n return probs.get(char) || 0;\n }\n}\n\n/**\n * ELMGenerator - Label-conditioned string generator using ELM\n * Trains an ELM to generate encoded strings based on labels + noise\n */\nclass ELMGenerator {\n constructor(config) {\n var _a;\n this.elm = null;\n this.labels = [];\n this.patternCorrector = null;\n this.sequenceContext = null;\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ hiddenUnits: 128, activation: 'relu', ridgeLambda: 0.01, noiseSize: 32, useOneHot: false, useClassification: false, usePatternCorrection: true }, config);\n this.noiseSize = this.config.noiseSize;\n this.useClassification = this.config.useClassification;\n this.encoder = new StringEncoder({\n maxLength: config.maxLength,\n useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, // Default to false for memory efficiency\n });\n if (this.config.usePatternCorrection) {\n this.patternCorrector = new PatternCorrector();\n }\n // Always use sequence context for better generation\n this.sequenceContext = new SequenceContext(3); // 3-grams\n }\n /**\n * Train the ELM generator on labeled samples\n */\n train(samples) {\n if (samples.length === 0) {\n throw new Error('Cannot train on empty dataset');\n }\n // Extract unique labels\n const uniqueLabels = Array.from(new Set(samples.map(s => s.label)));\n this.labels = uniqueLabels;\n // Extract all values for vocabulary building\n const allValues = samples.map(s => s.value);\n this.encoder.buildVocab(allValues);\n // Learn patterns if pattern correction is enabled\n if (this.patternCorrector) {\n this.patternCorrector.learnPatterns(samples);\n }\n // Learn sequence context\n if (this.sequenceContext) {\n this.sequenceContext.learnPatterns(allValues);\n }\n // Build training data\n const X = [];\n const Y = [];\n for (const sample of samples) {\n const labelIndex = this.labels.indexOf(sample.label);\n if (labelIndex === -1) {\n continue;\n }\n // Input: concat(oneHot(label), noiseVector)\n const labelOneHot = oneHotLabel(labelIndex, this.labels.length);\n const noise = generateNoiseVector(this.noiseSize, this.config.seed);\n const inputVector = [...labelOneHot, ...noise];\n X.push(inputVector);\n // Target: encoded(value)\n const encodedValue = this.encoder.encode(sample.value);\n Y.push(encodedValue);\n }\n if (X.length === 0) {\n throw new Error('No valid training samples after processing');\n }\n // Create ELM config\n const inputSize = this.labels.length + this.noiseSize;\n this.encoder.getVectorSize();\n const elmConfig = {\n useTokenizer: false, // Numeric mode\n inputSize: inputSize,\n categories: this.useClassification ? [] : [], // For classification, we'll handle it differently\n hiddenUnits: this.config.hiddenUnits,\n activation: this.config.activation,\n // Use lower regularization for better pattern learning\n ridgeLambda: this.config.ridgeLambda * 0.1, // Reduce regularization\n task: this.useClassification ? 'classification' : 'regression',\n };\n // Create and train ELM - resolve constructor robustly across CJS/ESM shapes\n // Replace dynamic require with direct constructor\n this.elm = new ELM(elmConfig);\n this.elm.trainFromData(X, Y);\n }\n /**\n * Generate a string for a given label\n * @param label Label to generate for\n * @param noiseSeed Optional seed for noise generation (for deterministic output)\n */\n generate(label, noiseSeed) {\n var _a;\n if (!this.elm) {\n throw new Error('Model not trained. Call train() first.');\n }\n const labelIndex = this.labels.indexOf(label);\n if (labelIndex === -1) {\n throw new Error(`Label '${label}' not found in training data`);\n }\n // Create input: concat(oneHot(label), noiseVector)\n const labelOneHot = oneHotLabel(labelIndex, this.labels.length);\n const noise = generateNoiseVector(this.noiseSize, noiseSeed !== undefined ? noiseSeed : this.config.seed);\n const inputVector = [...labelOneHot, ...noise];\n // Predict based on mode\n let decoded;\n if (this.useClassification && this.config.useOneHot && typeof this.elm.predictProbaFromVector === 'function') {\n // Classification mode with one-hot: use probabilities\n const vocabSize = this.encoder.getVocabSize();\n const maxLength = this.config.maxLength;\n // Get probabilities for each position\n const probs = this.elm.predictProbaFromVector(inputVector);\n // Reshape to [maxLength, vocabSize] and use argmax\n const indices = [];\n for (let pos = 0; pos < maxLength; pos++) {\n const posProbs = probs.slice(pos * vocabSize, (pos + 1) * vocabSize);\n const maxIdx = posProbs.indexOf(Math.max(...posProbs));\n indices.push(maxIdx);\n }\n decoded = this.encoder.decode(indices);\n }\n else {\n // Regression mode: use logits and round\n const prediction = this.elm.predictLogitsFromVector(inputVector);\n // Convert logits to indices with proper quantization\n const vocabSize = this.encoder.getVocabSize();\n const indices = prediction.map(val => {\n // Clamp value to reasonable range first (prevent extreme values)\n const clamped = Math.max(-vocabSize, Math.min(vocabSize * 2, val));\n // Round to nearest integer\n const rounded = Math.round(clamped);\n // Clamp to valid vocabulary range [0, vocabSize-1]\n const idx = Math.max(0, Math.min(vocabSize - 1, rounded));\n return idx;\n });\n decoded = this.encoder.decode(indices);\n }\n // Apply pattern correction if enabled\n let corrected = decoded;\n if (this.patternCorrector) {\n corrected = this.patternCorrector.correct(decoded, label);\n }\n // Apply sequence context refinement\n if (this.sequenceContext && corrected.length > 0) {\n corrected = this.refineWithSequenceContext(corrected, label);\n }\n // Validate and clean the decoded string using label-specific rules\n const validation = validateForLabel(label, corrected);\n // If validation fails, try to generate again with different noise (up to 3 attempts)\n if (!validation.isValid) {\n for (let attempt = 0; attempt < 3; attempt++) {\n const baseSeed = noiseSeed !== undefined ? noiseSeed : ((_a = this.config.seed) !== null && _a !== void 0 ? _a : Date.now());\n const newNoise = generateNoiseVector(this.noiseSize, baseSeed + attempt + 1000);\n const newInputVector = [...labelOneHot, ...newNoise];\n let newDecoded;\n if (this.useClassification && this.config.useOneHot && typeof this.elm.predictProbaFromVector === 'function') {\n const vocabSize = this.encoder.getVocabSize();\n const maxLength = this.config.maxLength;\n const probs = this.elm.predictProbaFromVector(newInputVector);\n const newIndices = [];\n for (let pos = 0; pos < maxLength; pos++) {\n const posProbs = probs.slice(pos * vocabSize, (pos + 1) * vocabSize);\n const maxIdx = posProbs.indexOf(Math.max(...posProbs));\n newIndices.push(maxIdx);\n }\n newDecoded = this.encoder.decode(newIndices);\n }\n else {\n const newPrediction = this.elm.predictLogitsFromVector(newInputVector);\n const vocabSize = this.encoder.getVocabSize();\n const newIndices = newPrediction.map(val => {\n const clamped = Math.max(-vocabSize, Math.min(vocabSize * 2, val));\n const rounded = Math.round(clamped);\n return Math.max(0, Math.min(vocabSize - 1, rounded));\n });\n newDecoded = this.encoder.decode(newIndices);\n }\n // Apply pattern correction\n if (this.patternCorrector) {\n newDecoded = this.patternCorrector.correct(newDecoded, label);\n }\n const newValidation = validateForLabel(label, newDecoded);\n if (newValidation.isValid) {\n return newValidation.cleaned;\n }\n }\n // If all attempts fail, return empty string\n return '';\n }\n return validation.cleaned;\n }\n /**\n * Generate multiple strings for a label with confidence-based selection\n */\n generateBatch(label, count) {\n const candidates = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 10; // Allow up to 10x attempts to get valid unique samples\n // Generate candidates with scoring\n while (attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n try {\n const generated = this.generate(label, seed);\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n // Score the candidate\n let score = 1.0;\n // Pattern match score\n if (this.patternCorrector) {\n score = this.patternCorrector.score(generated, label);\n }\n // Validation score (valid = 1.0, invalid = 0.0)\n const validation = validateForLabel(label, generated);\n if (!validation.isValid) {\n score = 0;\n }\n candidates.push({ value: generated, score });\n seen.add(generated.toLowerCase());\n }\n }\n catch (error) {\n // Skip errors\n }\n attempts++;\n }\n // Sort by score and return top candidates\n candidates.sort((a, b) => b.score - a.score);\n return candidates.slice(0, count).map(c => c.value);\n }\n /**\n * Refine generated string using sequence context\n */\n refineWithSequenceContext(generated, label) {\n if (!this.sequenceContext || generated.length === 0) {\n return generated;\n }\n // Try to improve the string by checking sequence context\n let refined = '';\n for (let i = 0; i < generated.length; i++) {\n const context = refined; // Use what we've built so far\n const currentChar = generated[i];\n // Check if current char fits the context\n const contextScore = this.sequenceContext.scoreChar(context, currentChar);\n // If score is very low, try to suggest better character\n if (contextScore < 0.1 && context.length > 0) {\n const suggested = this.sequenceContext.suggestNextChar(context);\n if (suggested && suggested !== currentChar) {\n // Only replace if it's a significant improvement\n refined += suggested;\n }\n else {\n refined += currentChar;\n }\n }\n else {\n refined += currentChar;\n }\n // Stop if we hit padding or invalid character\n if (currentChar === '\\0' || currentChar.charCodeAt(0) === 0) {\n break;\n }\n }\n return refined;\n }\n /**\n * Get all trained labels\n */\n getLabels() {\n return [...this.labels];\n }\n /**\n * Check if model is trained\n */\n isTrained() {\n return this.elm !== null;\n }\n}\n\n/**\n * HybridGenerator - Blends Retrieval + ELM jitter for realism + variation\n * 1. Retrieve real sample\n * 2. Encode\n * 3. Apply ELM noise\n * 4. Decode\n */\nclass HybridGenerator {\n constructor(config) {\n var _a;\n this.patternCorrector = null;\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.01, noiseSize: 32, jitterStrength: 0.05, exactMode: false, useOneHot: false, useClassification: false, usePatternCorrection: true }, config);\n // If exact mode, set jitter to 0\n if (this.config.exactMode) {\n this.jitterStrength = 0;\n }\n else {\n this.jitterStrength = this.config.jitterStrength;\n }\n this.retrieval = new RetrievalGenerator(config.seed);\n this.elm = new ELMGenerator({\n maxLength: config.maxLength,\n hiddenUnits: this.config.elmHiddenUnits,\n activation: this.config.elmActivation,\n ridgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n useOneHot: this.config.useOneHot,\n useClassification: this.config.useClassification,\n usePatternCorrection: this.config.usePatternCorrection,\n seed: config.seed,\n });\n this.encoder = new StringEncoder({\n maxLength: config.maxLength,\n useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, // Default to false for memory efficiency\n });\n if (this.config.usePatternCorrection) {\n this.patternCorrector = new PatternCorrector();\n }\n }\n /**\n * Train the hybrid generator on labeled samples\n */\n train(samples) {\n // Train retrieval\n this.retrieval.ingest(samples);\n // Build encoder vocabulary\n const allValues = samples.map(s => s.value);\n this.encoder.buildVocab(allValues);\n // Train ELM for jittering\n this.elm.train(samples);\n // Learn patterns if pattern correction is enabled\n if (this.patternCorrector) {\n this.patternCorrector.learnPatterns(samples);\n }\n }\n /**\n * Generate a hybrid sample (retrieval + jitter)\n * @param label Label to generate for\n * @param noiseSeed Optional seed for deterministic output\n */\n generate(label, noiseSeed) {\n // Step 1: Retrieve real sample\n const retrieved = this.retrieval.sampleOne(label);\n if (!retrieved) {\n // Fallback to pure ELM if no retrieval available\n return this.elm.generate(label, noiseSeed);\n }\n // Step 2: Encode\n const encoded = this.encoder.encode(retrieved);\n // Step 3: Apply ELM noise/jitter\n // Generate a jittered version using ELM\n const jittered = this.applyJitter(encoded, label, noiseSeed);\n // Step 4: Decode\n const decoded = this.encoder.decode(jittered);\n // Step 5: Apply pattern correction if enabled\n let corrected = decoded;\n if (this.patternCorrector) {\n corrected = this.patternCorrector.correct(decoded, label);\n }\n // Step 6: Validate and clean using label-specific rules\n const validation = validateForLabel(label, corrected);\n // If validation fails, try jittering again with different noise (up to 2 attempts)\n if (!validation.isValid) {\n for (let attempt = 0; attempt < 2; attempt++) {\n const newSeed = noiseSeed !== undefined ? noiseSeed + attempt + 1000 : undefined;\n const newJittered = this.applyJitter(encoded, label, newSeed);\n const newDecoded = this.encoder.decode(newJittered);\n let newCorrected = newDecoded;\n if (this.patternCorrector) {\n newCorrected = this.patternCorrector.correct(newDecoded, label);\n }\n const newValidation = validateForLabel(label, newCorrected);\n if (newValidation.isValid) {\n return newValidation.cleaned;\n }\n }\n // If all attempts fail, return original (retrieved is always valid)\n return retrieved;\n }\n return validation.cleaned;\n }\n /**\n * Apply jitter to an encoded vector\n */\n applyJitter(encoded, label, noiseSeed) {\n // Generate ELM output for the label\n const elmOutput = this.generateELMVector(label, noiseSeed);\n // If ELM output is empty or invalid, return original (no jitter)\n if (!elmOutput || elmOutput.length === 0 || elmOutput.every(v => v === 0)) {\n return encoded;\n }\n // Blend: (1 - jitterStrength) * original + jitterStrength * elmOutput\n // Use smaller jitter to preserve more of the original\n const effectiveJitter = Math.min(this.jitterStrength, 0.05); // Cap at 5% jitter\n const jittered = encoded.map((val, idx) => {\n const elmVal = elmOutput[idx] || 0;\n return (1 - effectiveJitter) * val + effectiveJitter * elmVal;\n });\n // Convert blended continuous values to integer indices\n // Round and clamp to valid vocabulary range\n const vocabSize = this.encoder.getVocabSize();\n const indices = jittered.map(val => {\n // Clamp value first\n const clamped = Math.max(0, Math.min(vocabSize - 1, val));\n const idx = Math.round(clamped);\n return Math.max(0, Math.min(vocabSize - 1, idx));\n });\n return indices;\n }\n /**\n * Generate an ELM vector for jittering\n */\n generateELMVector(label, noiseSeed) {\n try {\n // Try to get ELM prediction\n const elmGenerated = this.elm.generate(label, noiseSeed);\n // Only encode if we got a non-empty string\n if (elmGenerated && elmGenerated.length > 0) {\n return this.encoder.encode(elmGenerated);\n }\n // If empty, return zero vector (no jitter)\n return new Array(this.encoder.getVectorSize()).fill(0);\n }\n catch (_a) {\n // If ELM fails, return zero vector (no jitter)\n return new Array(this.encoder.getVectorSize()).fill(0);\n }\n }\n /**\n * Generate multiple hybrid samples\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 5; // Allow up to 5x attempts to get valid unique samples\n while (results.length < count && attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n const generated = this.generate(label, seed);\n // Only add if valid, non-empty, and unique\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n attempts++;\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.retrieval.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n return this.retrieval.hasLabel(this.getLabels()[0] || '') && this.elm.isTrained();\n }\n}\n\n/**\n * ExactGenerator - Perfect retrieval with pattern-based variations\n * Provides 100% realistic data by using exact training samples + pattern matching\n */\nclass ExactGenerator {\n constructor(config = {}) {\n this.trainingSamples = [];\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ usePatternMatching: true, maxVariations: 10 }, config);\n this.retrieval = new RetrievalGenerator(config.seed);\n this.patternCorrector = new PatternCorrector();\n }\n /**\n * Train the exact generator\n */\n train(samples) {\n this.trainingSamples = samples;\n this.retrieval.ingest(samples);\n if (this.config.usePatternMatching) {\n this.patternCorrector.learnPatterns(samples);\n }\n }\n /**\n * Generate an exact sample (100% realistic)\n */\n generate(label, seed) {\n // 1. Try exact retrieval first (100% realistic)\n const exact = this.retrieval.sampleOne(label);\n if (exact) {\n return exact; // ✅ 100% realistic\n }\n // 2. If pattern matching enabled, try pattern-based generation\n if (this.config.usePatternMatching) {\n const pattern = this.patternCorrector.getPattern(label);\n if (pattern && pattern.examples.length > 0) {\n // Return a random example from the pattern\n const randomIndex = seed !== undefined\n ? seed % pattern.examples.length\n : Math.floor(Math.random() * pattern.examples.length);\n return pattern.examples[randomIndex];\n }\n }\n throw new Error(`No samples found for label: ${label}`);\n }\n /**\n * Generate with pattern-based variations\n */\n generateWithVariation(label, seed) {\n // Get base sample\n const base = this.generate(label, seed);\n if (!this.config.usePatternMatching) {\n return base;\n }\n // Try to create variations using pattern matching\n const pattern = this.patternCorrector.getPattern(label);\n if (!pattern) {\n return base;\n }\n // Simple variation: combine prefix from one example with suffix from another\n if (pattern.examples.length >= 2) {\n const seed1 = seed !== undefined ? seed : Date.now();\n const seed2 = seed1 + 1000;\n const idx1 = seed1 % pattern.examples.length;\n const idx2 = seed2 % pattern.examples.length;\n if (idx1 !== idx2) {\n const ex1 = pattern.examples[idx1];\n const ex2 = pattern.examples[idx2];\n // Try combining if they're similar length\n if (Math.abs(ex1.length - ex2.length) <= 2) {\n const mid = Math.floor(ex1.length / 2);\n const variation = ex1.substring(0, mid) + ex2.substring(mid);\n // Validate the variation\n const validation = validateForLabel(label, variation);\n if (validation.isValid) {\n // Score the variation\n const score = this.patternCorrector.score(variation, label);\n if (score > 0.6) { // Only use if reasonably good\n return validation.cleaned;\n }\n }\n }\n }\n }\n return base;\n }\n /**\n * Generate multiple exact samples\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n // Try to get unique exact samples\n for (let i = 0; i < count * 2 && results.length < count; i++) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + i\n : Date.now() + i;\n let generated;\n if (i < count && this.config.usePatternMatching) {\n // First half: exact matches\n generated = this.generate(label, seed);\n }\n else {\n // Second half: try variations\n generated = this.generateWithVariation(label, seed);\n }\n if (generated && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.retrieval.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n return this.retrieval.getLabels().length > 0;\n }\n}\n\n/**\n * PerfectGenerator - Best of all worlds\n * Combines exact retrieval, pattern matching, and improved ELM generation\n * Provides highest realism with good variation\n */\nclass PerfectGenerator {\n constructor(config) {\n this.elm = null;\n this.trainingSamples = [];\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ preferExact: true, usePatternMatching: true, useImprovedELM: false, elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.001, noiseSize: 32 }, config);\n this.exact = new ExactGenerator({\n seed: config.seed,\n usePatternMatching: this.config.usePatternMatching,\n });\n this.hybrid = new HybridGenerator({\n maxLength: config.maxLength,\n seed: config.seed,\n exactMode: false, // Allow some jitter for variation\n jitterStrength: 0.02, // Very low jitter (2%)\n useOneHot: false, // Disable one-hot to reduce memory (was: this.config.useImprovedELM)\n useClassification: false, // Disable classification to reduce memory (was: this.config.useImprovedELM)\n usePatternCorrection: true,\n elmHiddenUnits: this.config.elmHiddenUnits, // Now uses reduced 128 instead of 256\n elmActivation: this.config.elmActivation,\n elmRidgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n });\n // Only create standalone ELM if explicitly requested AND useImprovedELM is true\n // This avoids duplicate ELM training (HybridGenerator already has one)\n if (this.config.useImprovedELM && config.useImprovedELM === true) {\n this.elm = new ELMGenerator({\n maxLength: config.maxLength,\n seed: config.seed,\n hiddenUnits: this.config.elmHiddenUnits,\n activation: this.config.elmActivation,\n ridgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n useOneHot: false, // Disable one-hot to reduce memory\n useClassification: false, // Disable classification to reduce memory\n usePatternCorrection: true,\n });\n }\n this.patternCorrector = new PatternCorrector();\n }\n /**\n * Train the perfect generator\n */\n train(samples) {\n this.trainingSamples = samples;\n // Train generators in order of priority (exact is fastest)\n this.exact.train(samples);\n // Only train hybrid if we need it (lazy training)\n // We'll train it on first use if needed\n // Learn patterns (lightweight)\n this.patternCorrector.learnPatterns(samples);\n }\n /**\n * Lazy train hybrid generator\n */\n ensureHybridTrained() {\n if (!this.hybrid.isTrained() && this.trainingSamples.length > 0) {\n this.hybrid.train(this.trainingSamples);\n }\n }\n /**\n * Lazy train ELM generator\n */\n ensureELMTrained() {\n if (this.elm && !this.elm.isTrained() && this.trainingSamples.length > 0) {\n this.elm.train(this.trainingSamples);\n }\n }\n /**\n * Generate with best strategy\n */\n generate(label, seed) {\n var _a;\n const candidates = [];\n // 1. Try exact retrieval first (100% realistic)\n try {\n const exact = this.exact.generate(label, seed);\n if (exact) {\n candidates.push({ value: exact, score: 1.0, source: 'exact' });\n }\n }\n catch (error) {\n // No exact match available\n }\n // 2. Try exact with variation (95-100% realistic)\n try {\n const exactVar = this.exact.generateWithVariation(label, seed);\n if (exactVar && exactVar !== ((_a = candidates[0]) === null || _a === void 0 ? void 0 : _a.value)) {\n const score = this.patternCorrector.score(exactVar, label);\n candidates.push({ value: exactVar, score: score * 0.95, source: 'exact-variation' });\n }\n }\n catch (error) {\n // Skip\n }\n // 3. Try hybrid (80-90% realistic) - lazy train if needed\n try {\n this.ensureHybridTrained();\n const hybrid = this.hybrid.generate(label, seed);\n if (hybrid && !candidates.some(c => c.value === hybrid)) {\n const score = this.patternCorrector.score(hybrid, label);\n const validation = validateForLabel(label, hybrid);\n const finalScore = validation.isValid ? score * 0.85 : score * 0.5;\n candidates.push({ value: hybrid, score: finalScore, source: 'hybrid' });\n }\n }\n catch (error) {\n // Skip\n }\n // 4. Try improved ELM if available (75-85% realistic) - lazy train if needed\n if (this.elm) {\n try {\n this.ensureELMTrained();\n const elmGen = this.elm.generate(label, seed);\n if (elmGen && !candidates.some(c => c.value === elmGen)) {\n const score = this.patternCorrector.score(elmGen, label);\n const validation = validateForLabel(label, elmGen);\n const finalScore = validation.isValid ? score * 0.8 : score * 0.4;\n candidates.push({ value: elmGen, score: finalScore, source: 'elm' });\n }\n }\n catch (error) {\n // Skip\n }\n }\n // 5. Select best candidate\n if (candidates.length === 0) {\n throw new Error(`No samples found for label: ${label}`);\n }\n // Sort by score (highest first)\n candidates.sort((a, b) => b.score - a.score);\n // If preferExact and we have exact match, use it\n if (this.config.preferExact) {\n const exactCandidate = candidates.find(c => c.source === 'exact');\n if (exactCandidate && exactCandidate.score >= 0.9) {\n return exactCandidate.value;\n }\n }\n // Return highest scoring candidate\n return candidates[0].value;\n }\n /**\n * Generate multiple samples with best strategy\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 5;\n while (results.length < count && attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n try {\n const generated = this.generate(label, seed);\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n }\n catch (error) {\n // Skip errors\n }\n attempts++;\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.exact.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n // At minimum, exact generator should be trained\n return this.exact.isTrained();\n }\n}\n\n/**\n * OmegaSynth - Main class\n * Unified interface for synthetic data generation\n */\nclass OmegaSynth {\n constructor(config) {\n this.generator = null;\n this.config = Object.assign({ maxLength: 32 }, config);\n this.seed = config.seed;\n // Initialize generator based on mode\n this.initializeGenerator();\n }\n initializeGenerator() {\n var _a, _b, _c, _d, _e, _f, _g;\n const commonConfig = {\n maxLength: this.config.maxLength || 32,\n seed: this.seed,\n };\n switch (this.config.mode) {\n case 'retrieval':\n this.generator = new RetrievalGenerator(this.seed);\n break;\n case 'elm':\n this.generator = new ELMGenerator(Object.assign(Object.assign({}, commonConfig), { hiddenUnits: 128, activation: 'relu', ridgeLambda: 0.01, noiseSize: 32, useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, useClassification: (_b = this.config.useClassification) !== null && _b !== void 0 ? _b : false, usePatternCorrection: (_c = this.config.usePatternCorrection) !== null && _c !== void 0 ? _c : true }));\n break;\n case 'hybrid':\n this.generator = new HybridGenerator(Object.assign(Object.assign({}, commonConfig), { elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.01, noiseSize: 32, jitterStrength: this.config.exactMode ? 0 : 0.05, exactMode: (_d = this.config.exactMode) !== null && _d !== void 0 ? _d : false, useOneHot: (_e = this.config.useOneHot) !== null && _e !== void 0 ? _e : false, useClassification: (_f = this.config.useClassification) !== null && _f !== void 0 ? _f : false, usePatternCorrection: (_g = this.config.usePatternCorrection) !== null && _g !== void 0 ? _g : true }));\n break;\n case 'exact':\n this.generator = new ExactGenerator({\n seed: this.seed,\n usePatternMatching: true,\n });\n break;\n case 'perfect':\n this.generator = new PerfectGenerator(Object.assign(Object.assign({}, commonConfig), { preferExact: true, usePatternMatching: true, useImprovedELM: true, elmHiddenUnits: 256, elmActivation: 'relu', elmRidgeLambda: 0.001, noiseSize: 32 }));\n break;\n default:\n throw new Error(`Unknown mode: ${this.config.mode}`);\n }\n }\n /**\n * Train the generator on a dataset\n * @param dataset Array of labeled samples\n */\n train(dataset) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized');\n }\n if (this.config.mode === 'retrieval') {\n this.generator.ingest(dataset);\n }\n else if (this.config.mode === 'elm') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'hybrid') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'exact') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'perfect') {\n this.generator.train(dataset);\n }\n });\n }\n /**\n * Generate a synthetic value for a given label\n * @param label Label to generate for\n * @param seed Optional seed for deterministic generation\n */\n generate(label, seed) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized. Call train() first.');\n }\n if (this.config.mode === 'retrieval') {\n const result = this.generator.sampleOne(label);\n if (!result) {\n throw new Error(`No samples found for label: ${label}`);\n }\n return result;\n }\n else if (this.config.mode === 'elm') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'exact') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.generate(label, seed);\n }\n throw new Error(`Unknown mode: ${this.config.mode}`);\n });\n }\n /**\n * Generate multiple synthetic values for a label\n * @param label Label to generate for\n * @param count Number of samples to generate\n */\n generateBatch(label, count) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized. Call train() first.');\n }\n if (this.config.mode === 'retrieval') {\n return this.generator.sample(label, count);\n }\n else if (this.config.mode === 'elm') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'exact') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.generateBatch(label, count);\n }\n throw new Error(`Unknown mode: ${this.config.mode}`);\n });\n }\n /**\n * Get all available labels\n */\n getLabels() {\n if (!this.generator) {\n return [];\n }\n if (this.config.mode === 'retrieval') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'elm') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'exact') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.getLabels();\n }\n return [];\n }\n /**\n * Check if the generator is trained\n */\n isTrained() {\n if (!this.generator) {\n return false;\n }\n if (this.config.mode === 'retrieval') {\n const labels = this.generator.getLabels();\n return labels.length > 0;\n }\n else if (this.config.mode === 'elm') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'exact') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.isTrained();\n }\n return false;\n }\n /**\n * Set seed for deterministic generation\n */\n setSeed(seed) {\n this.seed = seed;\n // Reinitialize generator with new seed\n this.initializeGenerator();\n }\n}\n\n/**\n * loadPretrained - Load pretrained synthetic data generator\n * Instantiates OmegaSynth with pretrained data for common labels\n */\n/**\n * Load pretrained OmegaSynth instance\n * @param mode Generation mode ('retrieval', 'elm', or 'hybrid')\n * @param config Optional configuration overrides\n */\nfunction loadPretrained(mode = 'retrieval', config) {\n // Initialize license before creating instance\n const synth = new OmegaSynth({\n mode,\n maxLength: (config === null || config === void 0 ? void 0 : config.maxLength) || 32,\n seed: config === null || config === void 0 ? void 0 : config.seed,\n });\n // Load default data\n // Try multiple possible locations for the model file\n let modelPath = null;\n // Helper to find package root by looking for package.json\n function findPackageRoot(startDir) {\n let current = startDir;\n while (current !== path.dirname(current)) {\n const pkgPath = path.join(current, 'package.json');\n if (fs.existsSync(pkgPath)) {\n try {\n const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));\n if (pkg.name === '@astermind/astermind-synth') {\n return current;\n }\n }\n catch (_a) {\n // Continue searching\n }\n }\n current = path.dirname(current);\n }\n return null;\n }\n // Find package root first - this is more reliable than using __dirname\n // since we're looking for files relative to package root, not the current file\n const packageRoot = findPackageRoot(process.cwd());\n const possiblePaths = [];\n // Add paths relative to package root if found\n if (packageRoot) {\n possiblePaths.push(path.join(packageRoot, 'dist/omegasynth/models/default_synth.json'), // Bundled location (npm package)\n path.join(packageRoot, 'src/omegasynth/models/default_synth.json') // Source location (development)\n );\n }\n // Also try common npm package locations (when installed as dependency)\n possiblePaths.push(path.join(process.cwd(), 'node_modules/@astermind/astermind-synth/dist/omegasynth/models/default_synth.json'));\n // Try relative to current working directory (for development)\n possiblePaths.push(path.join(process.cwd(), 'dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'src/omegasynth/models/default_synth.json'));\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n modelPath = possiblePath;\n break;\n }\n }\n if (!modelPath) {\n throw new Error('default_synth.json not found. Tried paths: ' + possiblePaths.join(', '));\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n // Convert pretrained data to LabeledSample format\n const samples = [];\n for (const [label, values] of Object.entries(modelData.labels)) {\n for (const value of values) {\n samples.push({ label, value });\n }\n }\n // Train the generator synchronously for immediate use\n // Note: This is a simplified approach - in production you might want async\n (() => __awaiter(this, void 0, void 0, function* () {\n try {\n yield synth.train(samples);\n }\n catch (err) {\n console.error('Error training pretrained model:', err);\n }\n }))();\n return synth;\n}\n/**\n * Load a fully versioned OmegaSynth model from dist/models/vX.Y.Z\n *\n * This function:\n * - Reads model.json, training_data.json, and elm_model.json from the version directory\n * - Rebuilds the retrieval store from training_data.json\n * - Hydrates the internal ELM from elm_model.json (for elm/hybrid modes) if possible\n *\n * NOTE:\n * - We avoid calling synth.train() here to prevent re-training; instead we:\n * - Directly ingest training samples into the retrieval generator\n * - Attempt to load ELM weights via loadModelFromJSON if available\n */\nfunction loadPretrainedFromVersion(versionDir) {\n var _a;\n // Initialize license before creating instance\n const manifestPath = path.join(versionDir, 'manifest.json');\n const modelPath = path.join(versionDir, 'model.json');\n const trainingDataPath = path.join(versionDir, 'training_data.json');\n const elmModelPath = path.join(versionDir, 'elm_model.json');\n let manifest = null;\n if (fs.existsSync(manifestPath)) {\n manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf-8'));\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n const configFromModel = (_a = manifest === null || manifest === void 0 ? void 0 : manifest.config) !== null && _a !== void 0 ? _a : modelData.config;\n // Load training samples\n if (!fs.existsSync(trainingDataPath)) {\n throw new Error(`training_data.json not found in version directory: ${trainingDataPath}`);\n }\n const trainingSamples = JSON.parse(fs.readFileSync(trainingDataPath, 'utf-8'));\n // Create OmegaSynth.\n // IMPORTANT: For pretrained loading we prefer 'retrieval' mode here:\n // - We only need high-quality samples for downstream ELM/KELM training.\n // - Retrieval over the saved training_data.json gives 100% realistic data\n // without requiring vocab building or ELM retraining.\n //\n // If you ever need to use the original mode (e.g. 'hybrid' or 'elm'),\n // you can swap this back to configFromModel.mode.\n const mode = 'retrieval';\n const synth = new OmegaSynth({\n mode,\n maxLength: configFromModel.maxLength || 50,\n seed: configFromModel.seed,\n });\n // Ingest training samples directly into the retrieval generator\n // For hybrid/elm modes, this ensures retrieval works without retraining\n try {\n const generator = synth.generator;\n if (generator) {\n if (generator.ingest) {\n // RetrievalGenerator\n generator.ingest(trainingSamples);\n }\n else if (generator.retrieval && typeof generator.retrieval.ingest === 'function') {\n // HybridGenerator (has .retrieval)\n generator.retrieval.ingest(trainingSamples);\n }\n }\n }\n catch (err) {\n console.warn('Could not ingest training samples into OmegaSynth generator:', err);\n }\n // Hydrate ELM weights if available and applicable (elm/hybrid modes).\n // NOTE: Since we currently force mode = 'retrieval' above for stability,\n // this block will not run. It is left here for future use if you decide\n // to re-enable elm/hybrid loading via configFromModel.mode.\n if (fs.existsSync(elmModelPath) && (configFromModel.mode === 'elm' || configFromModel.mode === 'hybrid')) {\n try {\n const elmModelJSON = fs.readFileSync(elmModelPath, 'utf-8');\n const generator = synth.generator;\n if (generator) {\n let elmInstance = null;\n if (configFromModel.mode === 'hybrid' && generator.elm && generator.elm.elm) {\n // HybridGenerator -> ELMGenerator -> elm\n elmInstance = generator.elm.elm;\n }\n else if (configFromModel.mode === 'elm' && generator.elm) {\n // ELMGenerator -> elm\n elmInstance = generator.elm;\n }\n if (elmInstance && typeof elmInstance.loadModelFromJSON === 'function') {\n elmInstance.loadModelFromJSON(elmModelJSON);\n console.log('✅ ELM weights loaded from elm_model.json into OmegaSynth');\n }\n else {\n console.warn('Could not load ELM weights: loadModelFromJSON not available on ELM instance');\n }\n }\n }\n catch (err) {\n console.warn('Could not hydrate ELM from elm_model.json:', err);\n }\n }\n return synth;\n}\n/**\n * Load pretrained model from custom JSON data\n * @param modelData Custom model data\n * @param mode Generation mode\n * @param config Optional configuration\n */\nfunction loadPretrainedFromData(modelData, mode = 'retrieval', config) {\n // Initialize license before creating instance\n const synth = new OmegaSynth({\n mode,\n maxLength: (config === null || config === void 0 ? void 0 : config.maxLength) || 32,\n seed: config === null || config === void 0 ? void 0 : config.seed,\n });\n const samples = [];\n for (const [label, values] of Object.entries(modelData.labels)) {\n for (const value of values) {\n samples.push({ label, value });\n }\n }\n (() => __awaiter(this, void 0, void 0, function* () {\n try {\n yield synth.train(samples);\n }\n catch (err) {\n console.error('Error training custom model:', err);\n }\n }))();\n return synth;\n}\n/**\n * Get available pretrained labels\n */\nfunction getPretrainedLabels() {\n try {\n // Helper to find package root\n function findPackageRoot(startDir) {\n let current = startDir;\n while (current !== path.dirname(current)) {\n const pkgPath = path.join(current, 'package.json');\n if (fs.existsSync(pkgPath)) {\n try {\n const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));\n if (pkg.name === '@astermind/astermind-synth') {\n return current;\n }\n }\n catch (_a) {\n // Continue searching\n }\n }\n current = path.dirname(current);\n }\n return null;\n }\n // Try multiple possible locations for the model file\n const packageRoot = findPackageRoot(process.cwd());\n const possiblePaths = [];\n if (packageRoot) {\n possiblePaths.push(path.join(packageRoot, 'dist/omegasynth/models/default_synth.json'), path.join(packageRoot, 'src/omegasynth/models/default_synth.json'));\n }\n possiblePaths.push(path.join(process.cwd(), 'node_modules/@astermind/astermind-synth/dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'src/omegasynth/models/default_synth.json'));\n let modelPath = null;\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n modelPath = possiblePath;\n break;\n }\n }\n if (!modelPath) {\n throw new Error('Model file not found');\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n return Object.keys(modelData.labels);\n }\n catch (_a) {\n // Fallback if file not found\n return [\n 'first_name', 'last_name', 'phone_number', 'email', 'street_address',\n 'city', 'state', 'country', 'company_name', 'job_title', 'product_name',\n 'color', 'uuid', 'date', 'credit_card_type', 'device_type'\n ];\n }\n}\n\n/**\n * Utilities for saving trained OmegaSynth models\n */\n/**\n * Save a trained OmegaSynth model to disk\n *\n * @param synth The trained OmegaSynth instance\n * @param trainingData The training data used to train the model (required for saving)\n * @param outputDir Directory where the model will be saved\n * @param version Optional version string (default: '1.0.0')\n * @returns Path to the saved model directory\n */\nfunction saveTrainedModel(synth_1, trainingData_1, outputDir_1) {\n return __awaiter(this, arguments, void 0, function* (synth, trainingData, outputDir, version = '1.0.0') {\n if (!synth.isTrained()) {\n throw new Error('Model must be trained before saving. Call train() first.');\n }\n if (trainingData.length === 0) {\n throw new Error('Training data is required to save the model.');\n }\n // Create version directory\n const versionDir = path.join(outputDir, `v${version}`);\n if (!fs.existsSync(versionDir)) {\n fs.mkdirSync(versionDir, { recursive: true });\n }\n // Calculate training stats\n const labels = Array.from(new Set(trainingData.map(s => s.label)));\n const samplesPerLabel = {};\n for (const label of labels) {\n samplesPerLabel[label] = trainingData.filter(s => s.label === label).length;\n }\n // Get config from synth (we need to access private config)\n const config = synth.config || {};\n // Save model metadata\n const modelData = {\n config: {\n mode: config.mode || 'retrieval',\n maxLength: config.maxLength,\n seed: config.seed,\n exactMode: config.exactMode,\n useOneHot: config.useOneHot,\n useClassification: config.useClassification,\n usePatternCorrection: config.usePatternCorrection,\n },\n trainingStats: {\n totalSamples: trainingData.length,\n labels,\n samplesPerLabel,\n },\n timestamp: new Date().toISOString(),\n };\n const modelPath = path.join(versionDir, 'model.json');\n fs.writeFileSync(modelPath, JSON.stringify(modelData, null, 2));\n // Save training data (required for loading later)\n const trainingDataPath = path.join(versionDir, 'training_data.json');\n fs.writeFileSync(trainingDataPath, JSON.stringify(trainingData, null, 2));\n // Try to save ELM model weights if available (for elm/hybrid modes)\n try {\n const generator = synth.generator;\n if (generator) {\n let elmInstance = null;\n // Get ELM instance based on mode\n if (config.mode === 'hybrid' && generator.elm) {\n elmInstance = generator.elm.elm; // HybridGenerator -> ELMGenerator -> elm\n }\n else if (config.mode === 'elm' && generator.elm) {\n elmInstance = generator.elm; // ELMGenerator -> elm\n }\n if (elmInstance) {\n let elmModelJSON;\n // Try to get serialized model\n if (elmInstance.savedModelJSON) {\n elmModelJSON = elmInstance.savedModelJSON;\n }\n else if (elmInstance.model) {\n // Manually serialize\n const serialized = {\n config: elmInstance.config,\n W: elmInstance.model.W,\n b: elmInstance.model.b,\n B: elmInstance.model.beta,\n categories: elmInstance.categories || [],\n };\n elmModelJSON = JSON.stringify(serialized);\n }\n if (elmModelJSON) {\n const elmModelPath = path.join(versionDir, 'elm_model.json');\n fs.writeFileSync(elmModelPath, elmModelJSON);\n console.log(`✅ ELM model weights saved to: ${elmModelPath}`);\n }\n }\n }\n }\n catch (error) {\n console.warn('⚠️ Could not save ELM model weights:', error);\n // Continue - ELM weights are optional\n }\n console.log(`\\n✅ Model saved to: ${versionDir}`);\n console.log(` Version: ${version}`);\n console.log(` Training samples: ${trainingData.length}`);\n console.log(` Labels: ${labels.length} (${labels.join(', ')})`);\n console.log(`\\n To load this model later, use:`);\n console.log(` loadPretrainedFromVersion('${versionDir}')`);\n return versionDir;\n });\n}\n\nexport { Activations, AdaptiveKernelELM, AdaptiveOnlineELM, AttentionEnhancedELM, Augment, AutoComplete, CharacterLangEncoderELM, ConfidenceClassifierELM, ConvolutionalELM, DISK_EPS, DeepELM, DeepELMPro, DeepKernelELM, DimError, ELM, ELMAdapter, ELMChain, ELMGenerator, ELMKELMCascade, ELMScorer, ELMWorkerClient, EPS, EmbeddingStore, EncoderELM, EnsembleKernelELM, FeatureCombinerELM, ForgettingOnlineELM, FuzzyELM, GraphELM, GraphKernelELM, HierarchicalELM, HybridGenerator, IO, InfoFlowGraph, InfoFlowGraphPWS, IntentClassifier, KNN, KernelELM, LanguageClassifier, MAX_EXP, MIN_EXP, Matrix, MultiKernelELM, MultiTaskELM, OmegaSynth, OnlineELM, OnlineKernelELM, OnlineRidge, QuantumInspiredELM, RecurrentELM, RefinerELM, RetrievalGenerator, RobustKernelELM, SparseELM, SparseKernelELM, StringEncoder, StringKernelELM, SyntheticFieldStore, TEController, TFIDF, TFIDFVectorizer, TensorKernelELM, TextEncoder, TimeSeriesELM, Tokenizer, TransferEntropy, TransferEntropyPWS, TransferLearningELM, UniversalEncoder, VariationalELM, VotingClassifierELM, add, add_, argmax, asVec, assertRect, autoTune, backfillEmptyParents, baseKernel$1 as baseKernel, binaryPR, binaryROC, bindAutocompleteUI, buildDenseDocs, buildIndex, buildLandmarks, buildRFF, buildTfidfDocs, buildVocabAndIdf, clampVec, confusionMatrixFromIndices, cosine$2 as cosine, cosineSparse, defaultNumericConfig, defaultTextConfig, deserializeTextBits, dot, dotProd$1 as dotProd, ensureRectNumber2D, evaluateClassification, evaluateEnsembleRetrieval, evaluateRegression, expSafe, expandQuery, explainFeatures, exportModel, filterMMR, flattenSections, fmtHead, formatClassificationReport, getPretrainedLabels, hDistProxy, hadamard, hadamard_, hybridRetrieve, importModel, isFiniteVec, isNumericConfig, isTextConfig, jaccard, kernelSim, keywordBonus, l2$1 as l2, loadPretrained, loadPretrainedFromData, loadPretrainedFromVersion, log1pSafe, logLoss, logSumExp, mapRFF, mean, normalizeConfig, normalizeL2, normalizeWord, omegaComposeAnswer, parseMarkdownToSections, penalty, projectToDense, quickHash, rerank, rerankAndFilter, ridgeSolvePro, sampleQueriesFromCorpus, saveTrainedModel, scal, scal_, sigmoid$1 as sigmoid, softmax, sparseToDense, standardize, summarizeDeterministic, tanhVec, tanhVec_, toTfidf, tokenize$1 as tokenize, topK, topKAccuracy, topKIndices, variance, wrapELM, zeros };\n//# sourceMappingURL=astermind.esm.js.map\n",null,null],"names":[],"mappings":";;;AAAA;AACA;AAyDA;;AAEG;AACH,SAAS,SAAS,CAAC,GAAc,EAAA;IAC7B,OAAO;QACH,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,OAAO,EAAE,GAAG,CAAC,OAAO;QACpB,SAAS,EAAE,GAAG,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,WAAW,EAAE,GAAG,CAAC,WAAW;QAC5B,WAAW,EAAE,GAAG,CAAC,WAAW;QAC5B,YAAY,EAAE,GAAG,CAAC,YAAY;QAC9B,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,OAAO,EAAE,GAAG,CAAC;KAChB;AACL;AAEA;;AAEG;MACU,SAAS,CAAA;IAIlB,WAAA,CAAY,OAAe,EAAE,MAAc,EAAA;AACvC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;IACxB;AAEA;;AAEG;AACH,IAAA,MAAM,IAAI,CACN,OAAe,EACf,OAAqB,EAAA;QAErB,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,kBAAA,CAAoB,EAAE;AAC9D,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACL,gBAAA,cAAc,EAAE,kBAAkB;gBAClC,WAAW,EAAE,IAAI,CAAC;AACrB,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;gBACjB,OAAO;gBACP,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE;aACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;AACrD,YAAA,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,EAAA,EAAK,QAAQ,CAAC,UAAU,CAAA,CAAE,CAAC;QACvF;AAEA,QAAA,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;;QAElC,OAAO;AACH,YAAA,GAAG,IAAI;AACP,YAAA,OAAO,EAAE,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,SAAS;SAC9C;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,UAAU,CACZ,OAAe,EACf,OAAsC,EAAA;QAEtC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,yBAAA,CAA2B,EAAE;AACrE,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACL,gBAAA,cAAc,EAAE,kBAAkB;gBAClC,WAAW,EAAE,IAAI,CAAC;AACrB,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;gBACjB,OAAO;gBACP,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE;aACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;AACrD,YAAA,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC/D;QAEA,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,SAAS,EAAE;QACzC,IAAI,CAAC,MAAM,EAAE;AACT,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;QAC9C;AAEA,QAAA,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE;QACjC,IAAI,MAAM,GAAG,EAAE;QACf,IAAI,QAAQ,GAAG,EAAE;QACjB,IAAI,OAAO,GAAa,EAAE;AAC1B,QAAA,IAAI,SAA6B;AAEjC,QAAA,IAAI;YACA,OAAO,IAAI,EAAE;gBACT,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE;AAC3C,gBAAA,IAAI,IAAI;oBAAE;AAEV,gBAAA,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;gBACjD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,gBAAA,MAAM,GAAG,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE;AAE1B,gBAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACtB,oBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;;oBAEhC;AAAO,yBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AAClC,wBAAA,IAAI;AACA,4BAAA,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAEtC,4BAAA,IAAI,IAAI,CAAC,IAAI,KAAK,SAAS,EAAE;;AAEzB,gCAAA,QAAQ,IAAI,IAAI,CAAC,IAAI;gCACrB,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC;4BAChC;AAAO,iCAAA,IAAI,IAAI,CAAC,OAAO,KAAK,SAAS,EAAE;;AAEnC,gCAAA,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,SAAS,CAAC;AAC7C,gCAAA,OAAO,CAAC,SAAS,GAAG,OAAO,CAAC;4BAChC;AAAO,iCAAA,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;;AAErC,gCAAA,SAAS,GAAG,IAAI,CAAC,SAAS;4BAC9B;wBACJ;AAAE,wBAAA,MAAM;;wBAER;oBACJ;gBACJ;YACJ;AAEA,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC;QAC1D;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,OAAO,GAAG,KAAc,CAAC;QACrC;IACJ;AAEA;;AAEG;IACH,MAAM,cAAc,CAAC,KAAqB,EAAA;AAMtC,QAAA,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE;QACpC,IAAI,KAAK,EAAE;AACP,YAAA,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC;QAC9B;QAEA,MAAM,GAAG,GAAG,CAAA,EAAG,IAAI,CAAC,OAAO,CAAA,kBAAA,EAAqB,MAAM,CAAC,QAAQ,EAAE,GAAG,GAAG,GAAG,MAAM,GAAG,EAAE,CAAA,CAAE;AACvF,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAC9B,YAAA,OAAO,EAAE;gBACL,WAAW,EAAE,IAAI,CAAC;AACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;QAClC,OAAO,IAAI,CAAC,SAAS;IACzB;AAEA;;AAEG;AACH,IAAA,MAAM,SAAS,GAAA;QACX,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,oBAAA,CAAsB,EAAE;AAChE,YAAA,OAAO,EAAE;gBACL,WAAW,EAAE,IAAI,CAAC;AACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,OAAO,QAAQ,CAAC,IAAI,EAAE;IAC1B;AAEA;;AAEG;AACH,IAAA,MAAM,MAAM,GAAA;QACR,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,CAAA,EAAG,IAAI,CAAC,OAAO,CAAA,oBAAA,CAAsB,CAAC;AAEnE,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,OAAO,QAAQ,CAAC,IAAI,EAAE;IAC1B;AACH;;AC/PD,MAAM,aAAa,GAAG,CAAC,MAAM,EAAE,YAAY,KAAK,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,MAAM,YAAY,CAAC,CAAC;;AAE7F,IAAI,iBAAiB;AACrB,IAAI,oBAAoB;AACxB;AACA,SAAS,oBAAoB,GAAG;AAChC,IAAI,QAAQ,iBAAiB;AAC7B,SAAS,iBAAiB,GAAG;AAC7B,YAAY,WAAW;AACvB,YAAY,cAAc;AAC1B,YAAY,QAAQ;AACpB,YAAY,SAAS;AACrB,YAAY,cAAc;AAC1B,SAAS,CAAC;AACV;AACA;AACA,SAAS,uBAAuB,GAAG;AACnC,IAAI,QAAQ,oBAAoB;AAChC,SAAS,oBAAoB,GAAG;AAChC,YAAY,SAAS,CAAC,SAAS,CAAC,OAAO;AACvC,YAAY,SAAS,CAAC,SAAS,CAAC,QAAQ;AACxC,YAAY,SAAS,CAAC,SAAS,CAAC,kBAAkB;AAClD,SAAS,CAAC;AACV;AACA,MAAM,gBAAgB,GAAG,IAAI,OAAO,EAAE;AACtC,MAAM,kBAAkB,GAAG,IAAI,OAAO,EAAE;AACxC,MAAM,wBAAwB,GAAG,IAAI,OAAO,EAAE;AAC9C,MAAM,cAAc,GAAG,IAAI,OAAO,EAAE;AACpC,MAAM,qBAAqB,GAAG,IAAI,OAAO,EAAE;AAC3C,SAAS,gBAAgB,CAAC,OAAO,EAAE;AACnC,IAAI,MAAM,OAAO,GAAG,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAK;AACrD,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,OAAO,CAAC,mBAAmB,CAAC,SAAS,EAAE,OAAO,CAAC;AAC3D,YAAY,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AACvD,QAAQ,CAAC;AACT,QAAQ,MAAM,OAAO,GAAG,MAAM;AAC9B,YAAY,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AACzC,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,MAAM,KAAK,GAAG,MAAM;AAC5B,YAAY,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;AACjC,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,OAAO,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAAC;AACpD,QAAQ,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAChD,IAAI,CAAC,CAAC;AACN,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,KAAK,KAAK;AACzB;AACA;AACA,QAAQ,IAAI,KAAK,YAAY,SAAS,EAAE;AACxC,YAAY,gBAAgB,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,QAAQ;AACR;AACA,IAAI,CAAC;AACL,SAAS,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;AACzB;AACA;AACA,IAAI,qBAAqB,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;AAC/C,IAAI,OAAO,OAAO;AAClB;AACA,SAAS,8BAA8B,CAAC,EAAE,EAAE;AAC5C;AACA,IAAI,IAAI,kBAAkB,CAAC,GAAG,CAAC,EAAE,CAAC;AAClC,QAAQ;AACR,IAAI,MAAM,IAAI,GAAG,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAK;AAClD,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,EAAE,CAAC,mBAAmB,CAAC,UAAU,EAAE,QAAQ,CAAC;AACxD,YAAY,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AAClD,YAAY,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AAClD,QAAQ,CAAC;AACT,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,OAAO,EAAE;AACrB,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,MAAM,KAAK,GAAG,MAAM;AAC5B,YAAY,MAAM,CAAC,EAAE,CAAC,KAAK,IAAI,IAAI,YAAY,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;AAC5E,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,EAAE,CAAC,gBAAgB,CAAC,UAAU,EAAE,QAAQ,CAAC;AACjD,QAAQ,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,QAAQ,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,IAAI,CAAC,CAAC;AACN;AACA,IAAI,kBAAkB,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC;AACpC;AACA,IAAI,aAAa,GAAG;AACpB,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE;AAChC,QAAQ,IAAI,MAAM,YAAY,cAAc,EAAE;AAC9C;AACA,YAAY,IAAI,IAAI,KAAK,MAAM;AAC/B,gBAAgB,OAAO,kBAAkB,CAAC,GAAG,CAAC,MAAM,CAAC;AACrD;AACA,YAAY,IAAI,IAAI,KAAK,kBAAkB,EAAE;AAC7C,gBAAgB,OAAO,MAAM,CAAC,gBAAgB,IAAI,wBAAwB,CAAC,GAAG,CAAC,MAAM,CAAC;AACtF,YAAY;AACZ;AACA,YAAY,IAAI,IAAI,KAAK,OAAO,EAAE;AAClC,gBAAgB,OAAO,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClD,sBAAsB;AACtB,sBAAsB,QAAQ,CAAC,WAAW,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;AACxE,YAAY;AACZ,QAAQ;AACR;AACA,QAAQ,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;AACjC,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE;AAC7B,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,KAAK;AAC5B,QAAQ,OAAO,IAAI;AACnB,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE;AACtB,QAAQ,IAAI,MAAM,YAAY,cAAc;AAC5C,aAAa,IAAI,KAAK,MAAM,IAAI,IAAI,KAAK,OAAO,CAAC,EAAE;AACnD,YAAY,OAAO,IAAI;AACvB,QAAQ;AACR,QAAQ,OAAO,IAAI,IAAI,MAAM;AAC7B,IAAI,CAAC;AACL,CAAC;AACD,SAAS,YAAY,CAAC,QAAQ,EAAE;AAChC,IAAI,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAC3C;AACA,SAAS,YAAY,CAAC,IAAI,EAAE;AAC5B;AACA;AACA;AACA,IAAI,IAAI,IAAI,KAAK,WAAW,CAAC,SAAS,CAAC,WAAW;AAClD,QAAQ,EAAE,kBAAkB,IAAI,cAAc,CAAC,SAAS,CAAC,EAAE;AAC3D,QAAQ,OAAO,UAAU,UAAU,EAAE,GAAG,IAAI,EAAE;AAC9C,YAAY,MAAM,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,UAAU,EAAE,GAAG,IAAI,CAAC;AACnE,YAAY,wBAAwB,CAAC,GAAG,CAAC,EAAE,EAAE,UAAU,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;AAChG,YAAY,OAAO,IAAI,CAAC,EAAE,CAAC;AAC3B,QAAQ,CAAC;AACT,IAAI;AACJ;AACA;AACA;AACA;AACA;AACA,IAAI,IAAI,uBAAuB,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;AAClD,QAAQ,OAAO,UAAU,GAAG,IAAI,EAAE;AAClC;AACA;AACA,YAAY,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC;AAC1C,YAAY,OAAO,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AACnD,QAAQ,CAAC;AACT,IAAI;AACJ,IAAI,OAAO,UAAU,GAAG,IAAI,EAAE;AAC9B;AACA;AACA,QAAQ,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC;AACnD,IAAI,CAAC;AACL;AACA,SAAS,sBAAsB,CAAC,KAAK,EAAE;AACvC,IAAI,IAAI,OAAO,KAAK,KAAK,UAAU;AACnC,QAAQ,OAAO,YAAY,CAAC,KAAK,CAAC;AAClC;AACA;AACA,IAAI,IAAI,KAAK,YAAY,cAAc;AACvC,QAAQ,8BAA8B,CAAC,KAAK,CAAC;AAC7C,IAAI,IAAI,aAAa,CAAC,KAAK,EAAE,oBAAoB,EAAE,CAAC;AACpD,QAAQ,OAAO,IAAI,KAAK,CAAC,KAAK,EAAE,aAAa,CAAC;AAC9C;AACA,IAAI,OAAO,KAAK;AAChB;AACA,SAAS,IAAI,CAAC,KAAK,EAAE;AACrB;AACA;AACA,IAAI,IAAI,KAAK,YAAY,UAAU;AACnC,QAAQ,OAAO,gBAAgB,CAAC,KAAK,CAAC;AACtC;AACA;AACA,IAAI,IAAI,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC;AACjC,QAAQ,OAAO,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC;AACxC,IAAI,MAAM,QAAQ,GAAG,sBAAsB,CAAC,KAAK,CAAC;AAClD;AACA;AACA,IAAI,IAAI,QAAQ,KAAK,KAAK,EAAE;AAC5B,QAAQ,cAAc,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,CAAC;AAC3C,QAAQ,qBAAqB,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,CAAC;AAClD,IAAI;AACJ,IAAI,OAAO,QAAQ;AACnB;AACA,MAAM,MAAM,GAAG,CAAC,KAAK,KAAK,qBAAqB,CAAC,GAAG,CAAC,KAAK,CAAC;;ACnL1D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,EAAE,EAAE;AAChF,IAAI,MAAM,OAAO,GAAG,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AACjD,IAAI,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC;AACrC,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,OAAO,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,KAAK,KAAK;AAC7D,YAAY,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE,KAAK,CAAC;AAC/G,QAAQ,CAAC,CAAC;AACV,IAAI;AACJ,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,OAAO,CAAC,gBAAgB,CAAC,SAAS,EAAE,CAAC,KAAK,KAAK,OAAO;AAC9D;AACA,QAAQ,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;AACnD,IAAI;AACJ,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,EAAE,KAAK;AACtB,QAAQ,IAAI,UAAU;AACtB,YAAY,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,UAAU,EAAE,CAAC;AAC5D,QAAQ,IAAI,QAAQ,EAAE;AACtB,YAAY,EAAE,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,KAAK,KAAK,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;AAChH,QAAQ;AACR,IAAI,CAAC;AACL,SAAS,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;AACzB,IAAI,OAAO,WAAW;AACtB;;AAgBA,MAAM,WAAW,GAAG,CAAC,KAAK,EAAE,QAAQ,EAAE,QAAQ,EAAE,YAAY,EAAE,OAAO,CAAC;AACtE,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,CAAC;AACtD,MAAM,aAAa,GAAG,IAAI,GAAG,EAAE;AAC/B,SAAS,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACjC,IAAI,IAAI,EAAE,MAAM,YAAY,WAAW;AACvC,QAAQ,EAAE,IAAI,IAAI,MAAM,CAAC;AACzB,QAAQ,OAAO,IAAI,KAAK,QAAQ,CAAC,EAAE;AACnC,QAAQ;AACR,IAAI;AACJ,IAAI,IAAI,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AAC/B,QAAQ,OAAO,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AACtC,IAAI,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC;AACzD,IAAI,MAAM,QAAQ,GAAG,IAAI,KAAK,cAAc;AAC5C,IAAI,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,cAAc,CAAC;AACzD,IAAI;AACJ;AACA,IAAI,EAAE,cAAc,IAAI,CAAC,QAAQ,GAAG,QAAQ,GAAG,cAAc,EAAE,SAAS,CAAC;AACzE,QAAQ,EAAE,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,EAAE;AAC5D,QAAQ;AACR,IAAI;AACJ,IAAI,MAAM,MAAM,GAAG,gBAAgB,SAAS,EAAE,GAAG,IAAI,EAAE;AACvD;AACA,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,GAAG,WAAW,GAAG,UAAU,CAAC;AAClF,QAAQ,IAAI,MAAM,GAAG,EAAE,CAAC,KAAK;AAC7B,QAAQ,IAAI,QAAQ;AACpB,YAAY,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;AAC/C;AACA;AACA;AACA;AACA;AACA,QAAQ,OAAO,CAAC,MAAM,OAAO,CAAC,GAAG,CAAC;AAClC,YAAY,MAAM,CAAC,cAAc,CAAC,CAAC,GAAG,IAAI,CAAC;AAC3C,YAAY,OAAO,IAAI,EAAE,CAAC,IAAI;AAC9B,SAAS,CAAC,EAAE,CAAC,CAAC;AACd,IAAI,CAAC;AACL,IAAI,aAAa,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC;AACnC,IAAI,OAAO,MAAM;AACjB;AACA,YAAY,CAAC,CAAC,QAAQ,MAAM;AAC5B,IAAI,GAAG,QAAQ;AACf,IAAI,GAAG,EAAE,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAK,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,CAAC;AACpG,IAAI,GAAG,EAAE,CAAC,MAAM,EAAE,IAAI,KAAK,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC;AAClF,CAAC,CAAC,CAAC;;AC5FH;AACA;AA2BA;;AAEG;MACU,eAAe,CAAA;AAOxB,IAAA,WAAA,CAAY,MAAmB,EAAA;QALvB,IAAA,CAAA,EAAE,GAA2C,IAAI;QACjD,IAAA,CAAA,SAAS,GAAoD,IAAI;QACjE,IAAA,CAAA,aAAa,GAAG,CAAC;QACjB,IAAA,CAAA,UAAU,GAAkB,IAAI;AAGpC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QAEpB,IAAI,MAAM,CAAC,OAAO,KAAK,WAAW,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;AACpE,YAAA,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,EAAE;QAClC;AAAO,aAAA,IAAI,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;;YAE1C,IAAI,CAAC,wBAAwB,EAAE;QACnC;IACJ;AAEA;;AAEG;IACK,wBAAwB,GAAA;AAC5B,QAAA,IAAI;YACA,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,CAAC,uBAAuB,CAAC;YAC1D,IAAI,IAAI,EAAE;gBACN,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC/B,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,CAAC;gBAC9C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,IAAI;YAC/C;QACJ;AAAE,QAAA,MAAM;;QAER;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,MAAM,GAAA;QAChB,IAAI,CAAC,EAAE,GAAG,MAAM,MAAM,CAAoB,kBAAkB,EAAE,CAAC,EAAE;AAC7D,YAAA,OAAO,CAAC,EAAE,EAAA;;gBAEN,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;AAC5C,oBAAA,MAAM,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;AACrE,oBAAA,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAE,WAAW,CAAC;gBACnD;;gBAGA,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;oBAC3C,EAAE,CAAC,iBAAiB,CAAC,UAAU,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;gBACxD;YACJ;AACH,SAAA,CAAC;;AAGF,QAAA,MAAM,IAAI,CAAC,YAAY,EAAE;QAEzB,OAAO,IAAI,CAAC,EAAE;IAClB;AAEA;;AAEG;AACK,IAAA,MAAM,KAAK,GAAA;QACf,IAAI,IAAI,CAAC,EAAE;YAAE,OAAO,IAAI,CAAC,EAAE;QAC3B,IAAI,IAAI,CAAC,SAAS;YAAE,OAAO,IAAI,CAAC,SAAS;AACzC,QAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC;IAC7C;AAEA;;AAEG;AACK,IAAA,MAAM,YAAY,GAAA;QACtB,IAAI,CAAC,IAAI,CAAC,EAAE;YAAE;AAEd,QAAA,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,EAAE,eAAe,CAAC;QAChE,IAAI,CAAC,aAAa,GAAG,SAAS,EAAE,KAAe,IAAI,CAAC;AAEpD,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,EAAE,YAAY,CAAC;QAC5D,IAAI,CAAC,UAAU,GAAG,QAAQ,EAAE,KAAe,IAAI,IAAI;IACvD;AAEA;;AAEG;IACH,MAAM,KAAK,CAAC,SAA2B,EAAA;QACnC,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,OAAO,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC;QAC5C;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,GAAG,EAAE,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE,UAAU,CAAC,EAAE,WAAW,CAAC;;AAGjE,QAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;YACzB,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;QAC9C;;AAGA,QAAA,IAAI,CAAC,aAAa,GAAG,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;QAC9D,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;QAE1C,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC;AACjC,YAAA,GAAG,EAAE,eAAe;YACpB,KAAK,EAAE,IAAI,CAAC;AACf,SAAA,CAAC;QACF,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC;AACjC,YAAA,GAAG,EAAE,YAAY;YACjB,KAAK,EAAE,IAAI,CAAC;AACf,SAAA,CAAC;QAEF,MAAM,EAAE,CAAC,IAAI;IACjB;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,GAAA;QACV,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACtC;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,OAAO,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC;IACjC;AAEA;;AAEG;AACH,IAAA,MAAM,WAAW,GAAA;QACb,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,SAAS,GAAA;AACL,QAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE;QACtB,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE,GAAG,CAAC;QAC1E,MAAM,OAAO,GAAG,GAAG,GAAG,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;QAEnD,OAAO;YACH,aAAa,EAAE,IAAI,CAAC,aAAa;YACjC,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3B,YAAA,SAAS,EAAE,IAAI,CAAC,aAAa,GAAG,IAAI;YACpC;SACH;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,GAAA;QACP,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,IAAI,CAAC,aAAa,GAAG,CAAC;AACtB,YAAA,IAAI,CAAC,UAAU,GAAG,IAAI;YACtB;QACJ;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,GAAG,EAAE,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE,UAAU,CAAC,EAAE,WAAW,CAAC;QACjE,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;QACzC,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE;QACxC,MAAM,EAAE,CAAC,IAAI;AAEb,QAAA,IAAI,CAAC,aAAa,GAAG,CAAC;AACtB,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI;IAC1B;;AAIQ,IAAA,iBAAiB,CAAC,SAA2B,EAAA;AACjD,QAAA,IAAI;AACA,YAAA,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;AACxE,YAAA,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC,MAAM;YACrC,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YAC1C,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC;gBACzD,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,UAAU,EAAE,IAAI,CAAC;AACpB,aAAA,CAAC,CAAC;QACP;AAAE,QAAA,MAAM;;AAEJ,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,MAAM,OAAO,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACvC,YAAA,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;AACtE,YAAA,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,MAAM;QACvC;IACJ;IAEQ,oBAAoB,GAAA;AACxB,QAAA,IAAI;YACA,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,CAAC,uBAAuB,CAAC;AAC1D,YAAA,OAAO,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;QACvC;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,EAAE;QACb;IACJ;AACH;;ACrOD;AACA;MAqCa,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;QACY,IAAA,CAAA,SAAS,GAAsB,EAAE;AACjC,QAAA,IAAA,CAAA,GAAG,GAAwB,IAAI,GAAG,EAAE;QACpC,IAAA,CAAA,OAAO,GAAG,KAAK;QACf,IAAA,CAAA,gBAAgB,GAAG,KAAK;AAGxB,QAAA,IAAA,CAAA,UAAU,GAAwB,IAAI,GAAG,EAAE,CAAC;IAmSxD;AAjSI;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC;IACpD;AAEA;;AAEG;IACH,MAAM,KAAK,CAAC,SAA2B,EAAA;AACnC,QAAA,IAAI,CAAC,SAAS,GAAG,EAAE;AACnB,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,EAAE;;AAGpB,QAAA,MAAM,OAAO,GAAG,IAAI,GAAG,EAAkB;AAEzC,QAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;YACzB,MAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;AACzC,YAAA,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC;;AAGpC,YAAA,KAAK,MAAM,KAAK,IAAI,YAAY,EAAE;AAC9B,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACrD;;AAGA,YAAA,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAChB,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,OAAO,EAAE,GAAG,CAAC,OAAO;gBACpB,MAAM;AACN,gBAAA,KAAK,EAAE,IAAI,GAAG,EAAE;AACnB,aAAA,CAAC;QACN;;AAGA,QAAA,MAAM,CAAC,GAAG,SAAS,CAAC,MAAM;QAC1B,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,OAAO,EAAE;YAC9B,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QACxD;;AAGA,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,SAAS,EAAE;YAC9B,MAAM,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,MAAM,CAAC;YACtD,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,QAAQ,EAAE;AAC/B,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;gBACnC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,GAAG,CAAC;YACjC;QACJ;AAEA,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI;IACvB;AAEA;;AAEG;IACH,MAAM,GAAG,CAAC,KAAa,EAAA;AACnB,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC9C,OAAO;AACH,gBAAA,MAAM,EAAE,kDAAkD;AAC1D,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,QAAQ,EAAE;aACb;QACL;;QAGA,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC;QACxC,MAAM,OAAO,GAAG,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC;AACtD,QAAA,MAAM,UAAU,GAAG,IAAI,GAAG,EAAkB;QAE5C,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,OAAO,EAAE;AAC9B,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;YACnC,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,GAAG,CAAC;QAClC;;AAGA,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,KAAK;YACtC,GAAG;YACH,KAAK,EAAE,IAAI,CAAC,gBAAgB,CAAC,UAAU,EAAE,GAAG,CAAC,KAAK;AACrD,SAAA,CAAC,CAAC;;AAGH,QAAA,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;;QAGxC,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,GAAG,CAAC;AAEhE,QAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YACzB,OAAO;AACH,gBAAA,MAAM,EAAE,6EAA6E;AACrF,gBAAA,OAAO,EAAE,EAAE;gBACX,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI;aACjC;QACL;;QAGA,MAAM,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG;AAChC,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,OAAO,EAAE,WAAW,CAAC;QAExE,OAAO;AACH,YAAA,MAAM,EAAE,OAAO;YACf,OAAO,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK;AAC1B,gBAAA,KAAK,EAAE,CAAC,CAAC,GAAG,CAAC,KAAK;AAClB,gBAAA,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBAChD,KAAK,EAAE,CAAC,CAAC;AACZ,aAAA,CAAC,CAAC;AACH,YAAA,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;SAC3B;IACL;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,CAAC,SAAS,GAAG,EAAE;AACnB,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,EAAE;AACpB,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAA,IAAI,CAAC,OAAO,GAAG,KAAK;AACpB,QAAA,IAAI,CAAC,gBAAgB,GAAG,KAAK;AAC7B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS;IAC/B;AAEA;;;;;AAKG;IACH,MAAM,cAAc,CAAC,UAA+B,EAAA;QAChD,IAAI,CAAC,KAAK,EAAE;;AAGZ,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;;QAG7D,IAAI,UAAU,GAAG,CAAC;QAClB,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE;YAChC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,UAAU,EAAE,CAAC;QAC3C;;AAGA,QAAA,KAAK,MAAM,GAAG,IAAI,UAAU,CAAC,SAAS,EAAE;;AAEpC,YAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB;YAC1C,KAAK,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,IAAI,GAAG,CAAC,WAAW,EAAE;;gBAE/C,KAAK,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,IAAI,CAAC,UAAU,EAAE;AACvC,oBAAA,IAAI,GAAG,KAAK,SAAS,EAAE;AACnB,wBAAA,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC;wBAC1B;oBACJ;gBACJ;YACJ;AAEA,YAAA,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAChB,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,OAAO,EAAE,GAAG,CAAC,OAAO;gBACpB,MAAM,EAAE,EAAE;AACV,gBAAA,KAAK,EAAE;AACV,aAAA,CAAC;QACN;AAEA,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI;AACnB,QAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;AAC5B,QAAA,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,OAAO;QACvC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,UAAU;AAE5C,QAAA,OAAO,CAAC,GAAG,CAAC,CAAA,4BAAA,EAA+B,IAAI,CAAC,SAAS,CAAC,MAAM,4BAA4B,UAAU,CAAC,OAAO,CAAA,CAAA,CAAG,CAAC;IACtH;AAEA;;AAEG;IACH,eAAe,GAAA;QACX,OAAO;YACH,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;AACvC,YAAA,aAAa,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM;AACpC,YAAA,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM;YACjC,aAAa,EAAE,IAAI,CAAC,aAAa;YACjC,UAAU,EAAE,IAAI,CAAC;SACpB;IACL;;AAIA;;AAEG;AACK,IAAA,QAAQ,CAAC,IAAY,EAAA;AACzB,QAAA,OAAO;AACF,aAAA,WAAW;AACX,aAAA,OAAO,CAAC,UAAU,EAAE,GAAG;aACvB,KAAK,CAAC,KAAK;AACX,aAAA,MAAM,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACrE;AAEA;;AAEG;AACK,IAAA,oBAAoB,CAAC,MAAgB,EAAA;AACzC,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,EAAkB;AACtC,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;AACxB,YAAA,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/C;;AAGA,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;AAC1C,QAAA,IAAI,OAAO,GAAG,CAAC,EAAE;YACb,KAAK,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;YAC/B;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;IACK,gBAAgB,CACpB,IAAyB,EACzB,IAAyB,EAAA;QAEzB,IAAI,UAAU,GAAG,CAAC;QAClB,IAAI,KAAK,GAAG,CAAC;QACb,IAAI,KAAK,GAAG,CAAC;QAEb,KAAK,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,IAAI,EAAE;YAC7B,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AAChC,YAAA,UAAU,IAAI,IAAI,GAAG,IAAI;AACzB,YAAA,KAAK,IAAI,IAAI,GAAG,IAAI;QACxB;QAEA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;AAC7B,YAAA,KAAK,IAAI,GAAG,GAAG,GAAG;QACtB;AAEA,QAAA,IAAI,KAAK,KAAK,CAAC,IAAI,KAAK,KAAK,CAAC;AAAE,YAAA,OAAO,CAAC;AACxC,QAAA,OAAO,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC7D;AAEA;;AAEG;IACK,sBAAsB,CAAC,OAAe,EAAE,WAAqB,EAAA;QACjE,MAAM,SAAS,GAAG,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;;QAG1E,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,QAAQ,IAAG;AACpC,YAAA,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACvD,IAAI,KAAK,GAAG,CAAC;AACb,YAAA,KAAK,MAAM,EAAE,IAAI,WAAW,EAAE;AAC1B,gBAAA,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAC;AAAE,oBAAA,KAAK,EAAE;YACvC;YACA,OAAO,EAAE,QAAQ,EAAE,QAAQ,CAAC,IAAI,EAAE,EAAE,KAAK,EAAE;AAC/C,QAAA,CAAC,CAAC;AAEF,QAAA,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;;QAGxC,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC;AAEhE,QAAA,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,OAAO,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;QAC5C;AAEA,QAAA,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG;IAC7D;AAEA;;AAEG;AACK,IAAA,UAAU,CAAC,IAAY,EAAA;AAC3B,QAAA,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC;AACtB,YAAA,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK;AACnE,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ;AACtE,YAAA,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvE,YAAA,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO;AACvE,YAAA,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO;AACtE,YAAA,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAC7D,YAAA,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;AACtE,YAAA,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE;AAC7D,SAAA,CAAC;AACF,QAAA,OAAO,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC;IAC9B;AACH;;AChVD;AACA;AA0BA;;;AAGG;MACU,wBAAwB,CAAA;AAArC,IAAA,WAAA,GAAA;QACY,IAAA,CAAA,EAAE,GAA0C,IAAI;QAChD,IAAA,CAAA,SAAS,GAAmD,IAAI;QACvD,IAAA,CAAA,MAAM,GAAG,4BAA4B;QACrC,IAAA,CAAA,SAAS,GAAG,CAAC;IAqIlC;AAnII;;AAEG;AACK,IAAA,MAAM,MAAM,GAAA;AAChB,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,OAAO,IAAI,CAAC,EAAE;QAClB;AAEA,QAAA,IAAI,IAAI,CAAC,SAAS,EAAE;YAChB,OAAO,IAAI,CAAC,SAAS;QACzB;AAEA,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,CAAmB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,SAAS,EAAE;AACnE,YAAA,OAAO,CAAC,EAAE,EAAA;gBACN,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;oBAC1C,EAAE,CAAC,iBAAiB,CAAC,SAAS,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;gBACtD;YACJ;AACH,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,EAAE,GAAG,MAAM,IAAI,CAAC,SAAS;QAC9B,OAAO,IAAI,CAAC,EAAE;IAClB;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,CAAC,EAAU,EAAE,UAA+B,EAAA;AACnD,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE;YACpB,EAAE;YACF,aAAa,EAAE,UAAU,CAAC,OAAO;AACjC,YAAA,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,UAAU;YACtC,IAAI,EAAE,UAAU,CAAC,IAAI;AACrB,YAAA,IAAI,EAAE;AACT,SAAA,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,CAAA,kCAAA,EAAqC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAA,OAAA,CAAS,CAAC;IACzF;AAEA;;AAEG;IACH,MAAM,QAAQ,CAAC,EAAU,EAAA;AACrB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,OAAO,MAAM,EAAE,IAAI,IAAI,IAAI;IAC/B;AAEA;;AAEG;AACH,IAAA,MAAM,OAAO,CAAC,EAAU,EAAE,MAAc,EAAA;AACpC,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM;AAAE,YAAA,OAAO,KAAK;AAEzB,QAAA,MAAM,UAAU,GAAG,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE;QACxD,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,UAAU,GAAG,MAAM;IAC3C;AAEA;;AAEG;IACH,MAAM,OAAO,CAAC,EAAU,EAAA;AACpB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM;AAAE,YAAA,OAAO,IAAI;QAExB,OAAO;YACH,OAAO,EAAE,MAAM,CAAC,aAAa;YAC7B,UAAU,EAAE,MAAM,CAAC,UAAU;AAC7B,YAAA,aAAa,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa;AAC7C,YAAA,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU;YACvC,IAAI,EAAE,MAAM,CAAC;SAChB;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,OAAO,GAAA;AACT,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC;QACvC,OAAO,KAAK,GAAG,CAAC;IACpB;AAEA;;AAEG;AACH,IAAA,MAAM,YAAY,GAAA;AACd,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,OAAO,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC;IACnC;AAEA;;AAEG;IACH,MAAM,MAAM,CAAC,EAAU,EAAA;AACnB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,EAAE,CAAC;IAClC;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,GAAA;AACP,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC;AACzB,QAAA,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC;IAC3D;AAEA;;AAEG;AACK,IAAA,MAAM,KAAK,GAAA;AACf,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,OAAO,IAAI,CAAC,EAAE;QAClB;AACA,QAAA,OAAO,IAAI,CAAC,MAAM,EAAE;IACxB;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;AACT,YAAA,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;AACf,YAAA,IAAI,CAAC,EAAE,GAAG,IAAI;AACd,YAAA,IAAI,CAAC,SAAS,GAAG,IAAI;QACzB;IACJ;AACH;;ACxKD;AACA;AAEA;;;AAGG;AACG,SAAU,eAAe,CAAC,KAAa,EAAA;;IAEzC,IAAI,MAAM,GAAG;AACR,SAAA,OAAO,CAAC,IAAI,EAAE,GAAG;AACjB,SAAA,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC;;AAGvB,IAAA,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC;IACjC,IAAI,OAAO,EAAE;QACT,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,GAAG,OAAO,CAAC;IACrC;;AAGA,IAAA,MAAM,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC;IACjC,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,YAAY,CAAC,MAAM,CAAC;AAEjD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACzC;AAEA,IAAA,OAAO,KAAK;AAChB;AAkBA;;AAEG;AACG,SAAU,qBAAqB,CAAC,KAAa,EAAA;AAC/C,IAAA,MAAM,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;IACpC,OAAO,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1C;AAEA;;;AAGG;AACG,SAAU,QAAQ,CAAC,KAAa,EAAA;AAMlC,IAAA,IAAI;QACA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC;AAC9B,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AACpB,YAAA,OAAO,IAAI;QACf;QAEA,MAAM,CAAC,SAAS,EAAE,UAAU,EAAE,YAAY,CAAC,GAAG,KAAK;QAEnD,OAAO;YACH,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,SAAS,CAAC,CAAC;YACpD,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,UAAU,CAAC,CAAC;AACtD,YAAA,SAAS,EAAE,eAAe,CAAC,YAAY,CAAC;AACxC,YAAA,aAAa,EAAE,CAAA,EAAG,SAAS,CAAA,CAAA,EAAI,UAAU,CAAA;SAC5C;IACL;AAAE,IAAA,MAAM;AACJ,QAAA,OAAO,IAAI;IACf;AACJ;;ACjFA;AACA;AAKA;;;AAGG;AACH,MAAM,oBAAoB,GAAG,CAAA;;;yBAGJ;AAEzB;;AAEG;AACH,eAAe,eAAe,CAAC,MAAc,EAAA;;IAEzC,MAAM,WAAW,GAAG;AACf,SAAA,OAAO,CAAC,4BAA4B,EAAE,EAAE;AACxC,SAAA,OAAO,CAAC,0BAA0B,EAAE,EAAE;AACtC,SAAA,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;;AAGvB,IAAA,MAAM,YAAY,GAAG,IAAI,CAAC,WAAW,CAAC;IACtC,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,YAAY,CAAC,MAAM,CAAC;AACjD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACzC;;IAGA,OAAO,MAAM,CAAC,MAAM,CAAC,SAAS,CAC1B,MAAM,EACN,KAAK,CAAC,MAAM,EACZ;AACI,QAAA,IAAI,EAAE,OAAO;AACb,QAAA,UAAU,EAAE;AACf,KAAA,EACD,KAAK,EACL,CAAC,QAAQ,CAAC,CACb;AACL;AAEA;;;;AAIG;AACH,SAAS,mBAAmB,CAAC,SAAqB,EAAA;;;AAG9C,IAAA,OAAO,SAAS;AACpB;AAEA;;AAEG;AACH,eAAe,eAAe,CAC1B,aAAqB,EACrB,SAAqB,EACrB,SAAiB,EAAA;AAEjB,IAAA,IAAI;AACA,QAAA,MAAM,GAAG,GAAG,MAAM,eAAe,CAAC,SAAS,CAAC;AAC5C,QAAA,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE;QACjC,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC;AAC1C,QAAA,MAAM,GAAG,GAAG,mBAAmB,CAAC,SAAS,CAAC;;QAG1C,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,MAAqB;AAE3D,QAAA,OAAO,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAC7B;AACI,YAAA,IAAI,EAAE,OAAO;AACb,YAAA,IAAI,EAAE;AACT,SAAA,EACD,GAAG,EACH,SAAS,EACT,IAAI,CACP;IACL;IAAE,OAAO,KAAK,EAAE;AACZ,QAAA,OAAO,CAAC,IAAI,CAAC,0CAA0C,EAAE,KAAK,CAAC;AAC/D,QAAA,OAAO,KAAK;IAChB;AACJ;AAEA;;AAEG;AACH,SAAS,eAAe,CAAC,OAAgC,EAAA;;AAErD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AACjD,IAAA,IAAI,OAAO,OAAO,CAAC,KAAK,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;IAClD,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC;AAAE,QAAA,OAAO,IAAI;AACjD,IAAA,IAAI,OAAO,OAAO,CAAC,cAAc,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;IAE3D,OAAO;QACH,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,IAAI,EAAE,OAAO,CAAC,IAA8B;AAC5C,QAAA,GAAG,EAAE,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,GAAG,OAAO,CAAC,GAAG,GAAG,SAAS;QAC9D,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,QAAQ,EAAE,OAAO,CAAC,QAAoB;AACtC,QAAA,UAAU,EAAE,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS;QACnF,cAAc,EAAE,OAAO,CAAC;KAC3B;AACL;AAEA;;AAEG;AACH,SAAS,sBAAsB,CAAC,YAAoB,EAAA;AAChD,IAAA,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;AACzC,IAAA,MAAM,gBAAgB,GAAG,YAAY,GAAG,GAAG;IAC3C,OAAO,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,KAAK,CAAC,CAAC;AAChD;AAEA;;;;;;AAMG;AACI,eAAe,kBAAkB,CACpC,KAAyB,EACzB,SAAkB,EAAA;;IAGlB,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAC/B,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,yBAAyB;AAChC,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC;IAC9B,IAAI,CAAC,MAAM,EAAE;QACT,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,4BAA4B;AACnC,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;IAGA,MAAM,OAAO,GAAG,eAAe,CAAC,MAAM,CAAC,OAAO,CAAC;IAC/C,IAAI,CAAC,OAAO,EAAE;QACV,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,mCAAmC;AAC1C,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,GAAG,KAAK,WAAW,EAAE;QAC7B,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,wBAAwB;AAC/B,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,GAAG,KAAK,oBAAoB,IAAI,OAAO,CAAC,GAAG,KAAK,GAAG,EAAE;QAC7D,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,oCAAoC;AAC3C,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;IAGA,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE;AAChD,QAAA,MAAM,QAAQ,GAAG,SAAS,IAAI,oBAAoB;AAClD,QAAA,MAAM,cAAc,GAAG,MAAM,eAAe,CACxC,MAAM,CAAC,aAAa,EACpB,MAAM,CAAC,SAAS,EAChB,QAAQ,CACX;QAED,IAAI,CAAC,cAAc,EAAE;YACjB,OAAO;AACH,gBAAA,MAAM,EAAE,SAAS;AACjB,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,KAAK,EAAE,uCAAuC;AAC9C,gBAAA,aAAa,EAAE,KAAK;AACpB,gBAAA,aAAa,EAAE;aAClB;QACL;IACJ;;AAGA,IAAA,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;IACzC,MAAM,aAAa,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC;AAEzD,IAAA,IAAI,OAAO,CAAC,GAAG,GAAG,GAAG,EAAE;;QAEnB,IAAI,OAAO,CAAC,UAAU,IAAI,OAAO,CAAC,UAAU,GAAG,GAAG,EAAE;YAChD,OAAO;AACH,gBAAA,MAAM,EAAE,OAAO;gBACf,OAAO;AACP,gBAAA,aAAa,EAAE,IAAI;gBACnB;aACH;QACL;QAEA,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;YACjB,OAAO;AACP,YAAA,KAAK,EAAE,qBAAqB;AAC5B,YAAA,aAAa,EAAE,KAAK;YACpB;SACH;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;QACzB,OAAO;AACH,YAAA,MAAM,EAAE,MAAM;YACd,OAAO;AACP,YAAA,aAAa,EAAE,KAAK;YACpB;SACH;IACL;;IAGA,OAAO;AACH,QAAA,MAAM,EAAE,OAAO;QACf,OAAO;AACP,QAAA,aAAa,EAAE,KAAK;QACpB;KACH;AACL;AAEA;;AAEG;AACG,SAAU,gBAAgB,CAAC,KAAa,EAAA;AAC1C,IAAA,OAAO,QAAQ,CAAC,KAAK,CAAC,KAAK,IAAI;AACnC;AAEA;;;AAGG;AACG,SAAU,kBAAkB,CAAC,KAAa,EAAA;AAC5C,IAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC;AAC9B,IAAA,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;AACnD,QAAA,OAAO,IAAI;IACf;IACA,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC;AAC9C;;ACnRA;AACA;AAUA;;AAEG;AACI,MAAM,iBAAiB,GAAG;;AAE7B,IAAA,MAAM,EAAE,2BAA2B;;AAEnC,IAAA,OAAO,EAAE;;AAGb;;AAEG;AACH,MAAM,uBAAuB,GACzB,8EAA8E;AAC9E,IAAA,kGAAkG;AAEtG;;AAEG;SACa,iBAAiB,GAAA;;AAE7B,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,QAAA,OAAO,OAAO,EAAE,GAAG,EAAE,QAAQ,KAAK,YAAY,GAAG,YAAY,GAAG,aAAa;IACjF;IAEA,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,QAAQ,IAAI,EAAE;;AAGhD,IAAA,MAAM,WAAW,GAAG;QAChB,WAAW;QACX,WAAW;QACX,SAAS;QACT,QAAQ;QACR,MAAM;QACN,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;KACV;;AAGD,IAAA,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QAC/B,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,QAAQ,EAAE,IAAI,KAAK,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;AAClF,YAAA,OAAO,aAAa;QACxB;IACJ;;IAGA,IAAI,MAAM,CAAC,QAAQ,EAAE,QAAQ,KAAK,OAAO,EAAE;AACvC,QAAA,OAAO,aAAa;IACxB;;AAGA,IAAA,MAAM,eAAe,GAAG;QACpB,UAAU;QACV,UAAU;QACV,WAAW;QACX,aAAa;QACb,cAAc;QACd,YAAY;KACf;AAED,IAAA,KAAK,MAAM,OAAO,IAAI,eAAe,EAAE;AACnC,QAAA,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;;AAE5B,YAAA,OAAO,YAAY;QACvB;IACJ;;AAGA,IAAA,OAAO,YAAY;AACvB;AAEA;;AAEG;AACG,SAAU,kBAAkB,CAAC,WAAwB,EAAA;IACvD,OAAO,WAAW,KAAK,aAAa,GAAG,MAAM,GAAG,MAAM;AAC1D;AAEA;;;;;;AAMG;MACU,cAAc,CAAA;AASvB,IAAA,WAAA,CAAY,SAAwB,EAAE,EAAA;QAP9B,IAAA,CAAA,KAAK,GAAwB,IAAI;QAGjC,IAAA,CAAA,mBAAmB,GAAiC,IAAI;QACxD,IAAA,CAAA,gBAAgB,GAAG,KAAK;AACxB,QAAA,IAAA,CAAA,qBAAqB,GAAgB,IAAI,GAAG,EAAE;AAGlD,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QACpB,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,iBAAiB,EAAE;QAC5D,IAAI,CAAC,eAAe,GAAG,kBAAkB,CAAC,IAAI,CAAC,WAAW,CAAC;IAC/D;AAEA;;AAEG;AACH,IAAA,MAAM,MAAM,GAAA;;AAER,QAAA,IAAI,IAAI,CAAC,KAAK,EAAE;YACZ,OAAO,IAAI,CAAC,KAAK;QACrB;;AAGA,QAAA,IAAI,IAAI,CAAC,mBAAmB,EAAE;YAC1B,OAAO,IAAI,CAAC,mBAAmB;QACnC;AAEA,QAAA,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,CACzC,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,SAAS,CACxB;AAED,QAAA,IAAI;AACA,YAAA,IAAI,CAAC,KAAK,GAAG,MAAM,IAAI,CAAC,mBAAmB;;YAG3C,IAAI,CAAC,MAAM,CAAC,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC;;YAGxC,IAAI,CAAC,gBAAgB,EAAE;YAEvB,OAAO,IAAI,CAAC,KAAK;QACrB;gBAAU;AACN,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI;QACnC;IACJ;AAEA;;AAEG;IACH,QAAQ,GAAA;QACJ,OAAO,IAAI,CAAC,KAAK;IACrB;AAEA;;;AAGG;IACH,OAAO,GAAA;QACH,IAAI,CAAC,IAAI,CAAC,KAAK;AAAE,YAAA,OAAO,KAAK;AAC7B,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,OAAO,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,MAAM;IACxE;AAEA;;AAEG;IACH,cAAc,GAAA;QACV,IAAI,CAAC,IAAI,CAAC,KAAK;AAAE,YAAA,OAAO,IAAI;AAC5B,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;AAC/B,YAAA,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;AAC/B,YAAA,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAC1C;AAEA;;AAEG;IACH,cAAc,GAAA;QACV,OAAO,IAAI,CAAC,WAAW;IAC3B;AAEA;;AAEG;IACH,kBAAkB,GAAA;QACd,OAAO,IAAI,CAAC,eAAe;IAC/B;AAEA;;;;;;;AAOG;AACH,IAAA,eAAe,CAAC,QAAgB,EAAA;;AAE5B,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,QAAQ;QACnB;;AAGA,QAAA,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC7C,YAAA,OAAO,QAAQ;QACnB;;AAGA,QAAA,IAAI,IAAI,CAAC,eAAe,KAAK,MAAM,EAAE;;AAEjC,YAAA,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE;gBACxB,IAAI,CAAC,qBAAqB,EAAE;AAC5B,gBAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;YAChC;AACA,YAAA,OAAO,QAAQ;QACnB;;QAGA,OAAO,QAAQ,GAAG,uBAAuB;IAC7C;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,sBAAsB;QACjC;AAEA,QAAA,QAAQ,IAAI,CAAC,KAAK,CAAC,MAAM;AACrB,YAAA,KAAK,OAAO;AACR,gBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC1B,oBAAA,OAAO,CAAA,qCAAA,EAAwC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,gBAAgB;gBAC1G;AACA,gBAAA,OAAO,kBAAkB,IAAI,CAAC,KAAK,CAAC,aAAa,kBAAkB;AAEvE,YAAA,KAAK,MAAM;AACP,gBAAA,OAAO,uBAAuB,IAAI,CAAC,KAAK,CAAC,aAAa,kBAAkB;AAE5E,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAA,gBAAA,EAAmB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,WAAW;AAEhF,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,yBAAyB;AAEpC,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,oBAAoB,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AAEjD,YAAA;AACI,gBAAA,OAAO,wBAAwB;;IAE3C;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,GAAA;AACV,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;AACjB,QAAA,IAAI,CAAC,gBAAgB,GAAG,KAAK;AAC7B,QAAA,IAAI,CAAC,qBAAqB,CAAC,KAAK,EAAE;AAClC,QAAA,OAAO,IAAI,CAAC,MAAM,EAAE;IACxB;AAEA;;;;;AAKG;AACH,IAAA,UAAU,CAAC,OAAe,EAAA;AACtB,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO;AAAE,YAAA,OAAO,KAAK;AACtC,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;IACxD;AAEA;;AAEG;IACH,WAAW,GAAA;QACP,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,QAAQ,IAAI,EAAE;IAC9C;AAEA;;;;;AAKG;IACH,kBAAkB,GAAA;QACd,OAAO,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,MAAM,EAAE,kBAAkB,CAAC;IACjF;AAEA;;;;;;AAMG;IACH,mBAAmB,GAAA;QACf,OAAO,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,OAAO,EAAE,sBAAsB,CAAC;IACtF;AAEA;;AAEG;IACK,mBAAmB,CAAC,OAAe,EAAE,WAAmB,EAAA;;AAE5D,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE;AACtB,YAAA,OAAO,IAAI;QACf;AAEA,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;AAEhE,QAAA,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AACzD,YAAA,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC;AACvC,YAAA,IAAI,CAAC,iBAAiB,CAAC,OAAO,EAAE,WAAW,CAAC;QAChD;AAEA,QAAA,OAAO,UAAU;IACrB;AAEA;;AAEG;IACK,iBAAiB,CAAC,cAAsB,EAAE,WAAmB,EAAA;AACjE,QAAA,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,IAAI,IAAI,SAAS;AAEnD,QAAA,OAAO,CAAC,IAAI,CACR,wBAAwB,WAAW,CAAA,eAAA,EAAkB,cAAc,CAAA,WAAA,CAAa;AAChF,YAAA,CAAA,sCAAA,CAAwC,CAC3C;AACD,QAAA,OAAO,CAAC,IAAI,CACR,8CAA8C,IAAI,CAAA,CAAE,CACvD;QACD,OAAO,CAAC,IAAI,CACR,CAAA,wCAAA,EAA2C,iBAAiB,CAAC,MAAM,GAAG;AAClE,cAAE,iBAAiB,CAAC,IAAI,CAAC,IAAI;AAC7B,cAAE,QAAQ,CAAA,CAAE,CACnB;AACD,QAAA,OAAO,CAAC,IAAI,CACR,CAAA,8FAAA,CAAgG,CACnG;IACL;AAEA;;AAEG;IACK,gBAAgB,GAAA;QACpB,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE;QAEjB,MAAM,MAAM,GAAG,sBAAsB;AAErC,QAAA,QAAQ,IAAI,CAAC,KAAK,CAAC,MAAM;AACrB,YAAA,KAAK,OAAO;AACR,gBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC1B,oBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,sCAAA,CAAwC;AACjD,wBAAA,CAAA,4CAAA,CAA8C,CACjD;gBACL;AAAO,qBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,KAAK,IAAI,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,EAAE,EAAE;AAC5E,oBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,oBAAA,EAAuB,IAAI,CAAC,KAAK,CAAC,aAAa,CAAA,KAAA,CAAO,CAClE;gBACL;gBACA;AAEJ,YAAA,KAAK,MAAM;AACP,gBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,sBAAA,EAAyB,IAAI,CAAC,KAAK,CAAC,aAAa,CAAA,eAAA,CAAiB,CAC9E;gBACD;AAEJ,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAC,KAAK,CACT,CAAA,EAAG,MAAM,CAAA,kBAAA,CAAoB;oBAC7B,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,EAAA,CAAI;AAC7C,oBAAA,CAAA,qCAAA,CAAuC,CAC1C;gBACD;AAEJ,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,0BAAA,CAA4B;oBACrC,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,EAAA,CAAI;AAC7C,oBAAA,CAAA,6CAAA,CAA+C,CAClD;gBACD;AAEJ,YAAA,KAAK,SAAS;gBACV,OAAO,CAAC,KAAK,CACT,CAAA,EAAG,MAAM,CAAA,kBAAA,EAAqB,IAAI,CAAC,KAAK,CAAC,KAAK,CAAA,EAAA,CAAI;AAClD,oBAAA,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,CAAE,CAC9C;gBACD;;IAEZ;AAEA;;AAEG;IACK,qBAAqB,GAAA;QACzB,OAAO,CAAC,IAAI,CACR,wDAAwD;AACxD,YAAA,CAAA,EAAG,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,SAAS,CAAA,UAAA,CAAY;YAC9C,kEAAkE;AAClE,YAAA,+CAA+C,CAClD;IACL;AACH;AAED;;AAEG;AACI,eAAe,oBAAoB,CACtC,MAAqB,EAAA;AAErB,IAAA,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,MAAM,CAAC;AAC1C,IAAA,MAAM,OAAO,CAAC,MAAM,EAAE;AACtB,IAAA,OAAO,OAAO;AAClB;;ACraA;AACA;AA+DA;;;;;AAKG;MACU,gBAAgB,CAAA;AA4BzB,IAAA,WAAA,CAAY,MAAwB,EAAA;QAvB5B,IAAA,CAAA,MAAM,GAAqB,YAAY;QACvC,IAAA,CAAA,SAAS,GAA2B,IAAI;;QAGxC,IAAA,CAAA,cAAc,GAA0B,IAAI;QAC5C,IAAA,CAAA,mBAAmB,GAAW,CAAC;AACtB,QAAA,IAAA,CAAA,uBAAuB,GAAG,MAAM,CAAC;;QAG1C,IAAA,CAAA,mBAAmB,GAA+B,IAAI;;QAMtD,IAAA,CAAA,cAAc,GAAoC,IAAI;;QAGtD,IAAA,CAAA,QAAQ,GAA2B,IAAI;;QAGvC,IAAA,CAAA,mBAAmB,GAAG,KAAK;;QAI/B,IAAI,CAAC,MAAM,GAAG;YACV,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,MAAM,EAAE,MAAM,CAAC,MAAM;AACrB,YAAA,QAAQ,EAAE;AACN,gBAAA,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI;gBACzC,WAAW,EAAE,MAAM,CAAC,QAAQ,EAAE,WAAW,IAAI,QAAQ;AACrD,gBAAA,cAAc,EAAE,MAAM,CAAC,QAAQ,EAAE,cAAc,IAAI,IAAI;AACvD,gBAAA,YAAY,EAAE,MAAM,CAAC,QAAQ,EAAE,YAAY,IAAI;AAClD,aAAA;AACD,YAAA,KAAK,EAAE;AACH,gBAAA,UAAU,EAAE,MAAM,CAAC,KAAK,EAAE,UAAU,IAAI,CAAC;AACzC,gBAAA,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,YAAY,IAAI,IAAI;AAChD,gBAAA,kBAAkB,EAAE,MAAM,CAAC,KAAK,EAAE,kBAAkB,IAAI;AAC3D,aAAA;AACD,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE;AACL,gBAAA,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,OAAO,IAAI,IAAI;AACxC,gBAAA,WAAW,EAAE,MAAM,CAAC,OAAO,EAAE,WAAW,IAAI,IAAI;AAChD,gBAAA,YAAY,EAAE,MAAM,CAAC,OAAO,EAAE,YAAY,IAAI,IAAI;AAClD,gBAAA,kBAAkB,EAAE,MAAM,CAAC,OAAO,EAAE,kBAAkB,IAAI,KAAK;AAC/D,gBAAA,UAAU,EAAE,MAAM,CAAC,OAAO,EAAE,UAAU,IAAI;AAC7C,aAAA;YACD,cAAc,EAAE,MAAM,CAAC,cAAc,KAAK,MAAK,EAAE,CAAC,CAAC;YACnD,OAAO,EAAE,MAAM,CAAC,OAAO,KAAK,MAAK,EAAE,CAAC;SACvC;;AAGD,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC;AAC7B,YAAA,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,YAAY;AAC1C,YAAA,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChC,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,kBAAkB,EAAE;;AAGxC,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,cAAc,CAAC;YACrC,UAAU,EAAE,MAAM,CAAC;AACtB,SAAA,CAAC;;QAGF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,MAAK;;AAEnC,YAAA,IAAI,CAAC,cAAc,CAAC,kBAAkB,EAAE;AAC5C,QAAA,CAAC,CAAC,CAAC,KAAK,CAAC,MAAK;;AAEd,QAAA,CAAC,CAAC;;QAGF,IAAI,CAAC,iBAAiB,EAAE;;AAGxB,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,EAAE;AACrE,YAAA,IAAI,CAAC,SAAS,EAAE,CAAC,KAAK,CAAC,MAAK;;AAE5B,YAAA,CAAC,CAAC;QACN;;QAGA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE;AAC9B,YAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,KAAI;AACxD,gBAAA,OAAO,CAAC,IAAI,CAAC,oDAAoD,EAAE,KAAK,CAAC;AAC7E,YAAA,CAAC,CAAC;QACN;IACJ;;AAIA;;;;;;;;;;AAUG;AACH,IAAA,eAAe,CAAC,YAAiC,EAAA;AAC7C,QAAA,IAAI,CAAC,mBAAmB,GAAG,YAAY;AACvC,QAAA,OAAO,CAAC,GAAG,CAAC,8CAA8C,CAAC;IAC/D;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,IAAI,CAAC,mBAAmB,KAAK,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,EAAE;AAC5E,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI,CAAC,cAAc,CAAC,mBAAmB,EAAE;AAEzC,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;IACH,sBAAsB,GAAA;QAClB,OAAO,IAAI,CAAC,mBAAmB;IACnC;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO;IAC9B;;AAIA;;;AAGG;IACH,gBAAgB,GAAA;QACZ,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;IACrC;AAEA;;;;;;AAMG;AACH,IAAA,wBAAwB,CAAC,OAA+B,EAAA;QACpD,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,EAAE;AAC9B,YAAA,OAAO,KAAK;QAChB;AAEA,QAAA,IAAI,OAAO,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,WAAW;QAC1C;AAEA,QAAA,IAAI,OAAO,KAAK,UAAU,EAAE;;AAExB,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,YAAY,IAAI,IAAI,CAAC,MAAM,KAAK,QAAQ;QACvE;AAEA,QAAA,OAAO,KAAK;IAChB;AAEA;;;;;;AAMG;AACH,IAAA,cAAc,CAAC,OAAe,EAAA;QAa1B,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;AACvD,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,OAAQ;AACxC,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,KAAK;AACjD,QAAA,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,WAAW,CAAC;AAEzC,QAAA,OAAO,KAAK,CAAC,eAAe,CAAC,OAAO,CAAC;IACzC;AAEA;;;;;;AAMG;IACH,MAAM,aAAa,CAAC,MAMnB,EAAA;QAKG,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;YACvD,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE;aACZ;QACL;AAEA,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,OAAQ;AACxC,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,KAAK;AACjD,QAAA,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,WAAW,CAAC;AAEzC,QAAA,OAAO,KAAK,CAAC,aAAa,CAAC,MAAM,CAAC;IACtC;AAEA;;;;;;;AAOG;AACH,IAAA,MAAM,QAAQ,CAAC,OAAe,EAAE,OAAoB,EAAA;;QAiBhD,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;QAEnD,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,EAAE;;YAE3E,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,KAAK,KAAK,EAAE;;AAEpD,gBAAA,OAAO,EAAE,MAAM,EAAE,cAAc,CAAC,MAAM,EAAE;YAC5C;;YAGA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC;YAC9D,OAAO;gBACH,MAAM,EAAE,cAAc,CAAC,MAAM;AAC7B,gBAAA,YAAY,EAAE;aACjB;QACL;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;QACjD,OAAO,EAAE,QAAQ,EAAE;IACvB;;AAIA;;;AAGG;IACK,MAAM,iBAAiB,CAAC,MAAqB,EAAA;AACjD,QAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,QAAQ;QAClD,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,IAAI,SAAS,CAAC;;AAG/C,QAAA,IAAI,WAAW,KAAK,QAAQ,EAAE;AAC1B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,wBAAwB,EAAE;QACxD;;QAGA,IAAI,IAAI,CAAC,cAAc,IAAI,WAAW,KAAK,QAAQ,EAAE;YACjD,MAAM,OAAO,GAAG,SAAS;AACzB,YAAA,IAAI,MAAM,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE;AACpD,gBAAA,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC;gBAChE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC;gBAC1D,IAAI,MAAM,EAAE;oBACR,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC;AAC1C,oBAAA,OAAO,CAAC,GAAG,CAAC,kDAAkD,CAAC;oBAC/D;gBACJ;YACJ;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,aAAa,EAAE;AACtB,YAAA,IAAI;gBACA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC;AAClD,gBAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;oBACd,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;gBAClE;AAEA,gBAAA,MAAM,UAAU,GAAwB,MAAM,QAAQ,CAAC,IAAI,EAAE;;gBAG7D,IAAI,IAAI,CAAC,cAAc,IAAI,WAAW,KAAK,QAAQ,EAAE;oBACjD,MAAM,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,UAAU,CAAC;gBAC1D;;gBAGA,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC;;AAG9C,gBAAA,IAAI,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE;AACvB,oBAAA,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC;gBACtC;gBAEA,OAAO,CAAC,GAAG,CAAC,uCAAuC,EAAE,MAAM,CAAC,aAAa,CAAC;gBAC1E;YACJ;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,uDAAuD,EAAE,KAAK,CAAC;YACjF;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,UAAU,EAAE;YACnB,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC,UAAU,CAAC;;AAGrD,YAAA,IAAI,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE;AACvB,gBAAA,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC;YACtC;AAEA,YAAA,OAAO,CAAC,GAAG,CAAC,wCAAwC,CAAC;YACrD;QACJ;;AAGA,QAAA,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;YAC3B,OAAO,CAAC,IAAI,CACR,2EAA2E;gBAC3E,2EAA2E;gBAC3E,gFAAgF;AAChF,gBAAA,uBAAuB,CAC1B;AACD,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI;QACnC;IACJ;AAEA;;AAEG;IACK,MAAM,eAAe,CAAC,MAAqB,EAAA;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,OAAO;YAAE;AAE5B,QAAA,IAAI;;YAEA,MAAM,EAAE,eAAe,EAAE,GAAG,MAAM,iEAAoC;AAEtE,YAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,eAAe,CAAC;AAChC,gBAAA,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS;AACjC,gBAAA,IAAI,EAAE,CAAC;AACP,gBAAA,OAAO,EAAE;AACZ,aAAA,CAAC;;AAGF,YAAA,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE;AACvB,gBAAA,MAAM,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC;YAC/D;AAAO,iBAAA,IAAI,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE;;AAE/B,gBAAA,MAAM,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,SAAgB,CAAC;YAChE;AAEA,YAAA,OAAO,CAAC,GAAG,CAAC,oCAAoC,CAAC;QACrD;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,8CAA8C,EAAE,KAAK,CAAC;AACnE,YAAA,IAAI,CAAC,QAAQ,GAAG,IAAI;QACxB;IACJ;AAEA;;;AAGG;AACH,IAAA,MAAM,oBAAoB,GAAA;QACtB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE;AAC/B,YAAA,OAAO,CAAC,IAAI,CAAC,0CAA0C,CAAC;YACxD;QACJ;;AAGA,QAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;AACrB,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;AACrB,YAAA,MAAM,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE;QACrC;AACA,QAAA,IAAI,IAAI,CAAC,QAAQ,EAAE;AACf,YAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;QACzB;;QAGA,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC;IACrD;AAEA;;AAEG;IACH,qBAAqB,GAAA;QACjB,OAAO,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,IAAI,KAAK;IAC5C;AAEA;;AAEG;IACH,mBAAmB,GAAA;QAMf,OAAO,IAAI,CAAC,QAAQ,EAAE,YAAY,EAAE,IAAI,IAAI;IAChD;AAEA;;AAEG;IACH,iBAAiB,GAAA;AAMb,QAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,eAAe,EAAE;IAC1C;AAEA;;AAEG;IACH,iBAAiB,GAAA;QACb,OAAO,IAAI,CAAC,cAAc;IAC9B;;AAIA;;;;;;AAMG;AACH,IAAA,MAAM,GAAG,CAAC,OAAe,EAAE,OAAoB,EAAA;;QAE3C,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YACzC,OAAO,IAAI,CAAC,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC;QAClE;;AAGA,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE;QAE/C,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,mBAAmB,EAAE;AAC1D,YAAA,OAAO,CAAC,GAAG,CAAC,yDAAyD,CAAC;;AAGtE,YAAA,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE;gBACtB,OAAO;oBACH,KAAK,EAAE,QAAQ,CAAC,kBAAkB;wBAC9B,qEAAqE;AACzE,oBAAA,UAAU,EAAE,MAAM;AAClB,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,OAAO,EAAE,IAAI;AACb,oBAAA,cAAc,EAAE;iBACnB;YACL;;AAGA,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;QAC1C;;AAGA,QAAA,IAAI;YACA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC;AAC1D,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;;AAGxB,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,QAAQ,CAAC,KAAK,CAAC;YAE1E,OAAO;AACH,gBAAA,KAAK,EAAE,cAAc;AACrB,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,QAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,gBAAA,OAAO,EAAE,KAAK;gBACd,SAAS,EAAE,QAAQ,CAAC;aACvB;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,YAAA,IAAI,CAAC,SAAS,GAAG,UAAU;AAC3B,YAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;;AAG/B,YAAA,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,EAAE;gBAClC,OAAO;AACH,oBAAA,KAAK,EAAE,2EAA2E;AAClF,oBAAA,UAAU,EAAE,MAAM;AAClB,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,OAAO,EAAE,KAAK;oBACd,UAAU,EAAE,UAAU,CAAC;iBAC1B;YACL;;AAGA,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,CAAC,OAAO,EAAE,YAAY,EAAE;AACxD,gBAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;YAC1C;;AAGA,YAAA,OAAO,IAAI,CAAC,mBAAmB,CAC3B,mEAAmE,EACnE,MAAM,EACN,UAAU,CAAC,UAAU,CACxB;QACL;IACJ;AAEA;;;;;;AAMG;AACH,IAAA,MAAM,SAAS,CACX,OAAe,EACf,SAA0B,EAC1B,OAAoB,EAAA;QAEpB,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YACzC,SAAS,CAAC,OAAO,GAAG;AAChB,gBAAA,IAAI,EAAE,iBAAiB;AACvB,gBAAA,OAAO,EAAE;AACZ,aAAA,CAAC;YACF;QACJ;;AAGA,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE;QAE/C,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,mBAAmB,EAAE;AAC1D,YAAA,OAAO,CAAC,GAAG,CAAC,+DAA+D,CAAC;YAC5E,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAChD,YAAA,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC;YAChC;QACJ;AAEA,QAAA,IAAI;AACA,YAAA,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,OAAO,EAAE;gBACrC,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE,OAAO;gBACzB,OAAO,EAAE,SAAS,CAAC,OAAO;gBAC1B,SAAS,EAAE,SAAS,CAAC,SAAS;AAC9B,gBAAA,UAAU,EAAE,CAAC,IAAI,KAAI;AACjB,oBAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;;AAGxB,oBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC;oBAEzE,SAAS,CAAC,UAAU,GAAG;AACnB,wBAAA,KAAK,EAAE,cAAc;AACrB,wBAAA,UAAU,EAAE,MAAM;AAClB,wBAAA,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,EAAE;AAC3B,wBAAA,OAAO,EAAE,KAAK;wBACd,SAAS,EAAE,IAAI,CAAC;AACnB,qBAAA,CAAC;gBACN,CAAC;AACD,gBAAA,OAAO,EAAE,CAAC,KAAK,KAAI;oBACf,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,oBAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;AAC/B,oBAAA,SAAS,CAAC,OAAO,GAAG,UAAU,CAAC;gBACnC;AACH,aAAA,CAAC;QACN;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,YAAA,IAAI,CAAC,SAAS,GAAG,UAAU;AAC3B,YAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;;AAG/B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,CAAC,OAAO,EAAE,YAAY,EAAE;gBACxD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAChD,gBAAA,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC;YACpC;iBAAO;AACH,gBAAA,SAAS,CAAC,OAAO,GAAG,UAAU,CAAC;YACnC;QACJ;IACJ;AAEA;;AAEG;AACH,IAAA,MAAM,SAAS,GAAA;AACX,QAAA,IAAI;YACA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YAC/C,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,QAAQ,CAAC;AAE1D,YAAA,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;gBACjB,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC5B,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC/B,OAAO,CAAC,GAAG,CAAC,CAAA,2BAAA,EAA8B,IAAI,CAAC,MAAM,CAAA,UAAA,CAAY,CAAC;YACtE;AAEA,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;QAC5B;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,iCAAiC,EAAE,KAAK,CAAC;;QAE1D;IACJ;AAEA;;AAEG;IACH,SAAS,GAAA;QAOL,OAAO;YACH,UAAU,EAAE,IAAI,CAAC,MAAM;AACvB,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;YAC7B,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,cAAc,EAAE,IAAI,CAAC,cAAc;AACnC,YAAA,OAAO,EAAE,IAAI,CAAC,cAAc,CAAC,QAAQ;SACxC;IACL;AAEA;;AAEG;IACH,iBAAiB,GAAA;QACb,OAAO,IAAI,CAAC,cAAc;IAC9B;AAEA;;AAEG;AACH,IAAA,MAAM,UAAU,GAAA;AACZ,QAAA,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AACxB,QAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;IACzB;AAEA;;AAEG;AACH,IAAA,MAAM,eAAe,GAAA;AACjB,QAAA,IAAI;AACA,YAAA,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE;AAChC,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;AACxB,YAAA,OAAO,IAAI;QACf;AAAE,QAAA,MAAM;AACJ,YAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;AACzB,YAAA,OAAO,KAAK;QAChB;IACJ;;AAIA;;;AAGG;AACH,IAAA,MAAM,iBAAiB,GAAA;;QAEnB,IAAI,IAAI,CAAC,cAAc;AACnB,YAAA,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,uBAAuB,EAAE;YACtE,OAAO,IAAI,CAAC,cAAc;QAC9B;AAEA,QAAA,IAAI;YACA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE;;YAG/C,IAAI,CAAC,cAAc,GAAG;AAClB,gBAAA,mBAAmB,EAAE,MAAM,CAAC,cAAc,EAAE,mBAAmB,IAAI,GAAG;AACtE,gBAAA,eAAe,EAAE,MAAM,CAAC,cAAc,EAAE,eAAe,IAAI,KAAK;AAChE,gBAAA,kBAAkB,EAAE,MAAM,CAAC,cAAc,EAAE,kBAAkB;AAC7D,gBAAA,mBAAmB,EAAE,MAAM,CAAC,cAAc,EAAE,mBAAmB,IAAI;aACtE;AACD,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,GAAG,EAAE;YAErC,OAAO,IAAI,CAAC,cAAc;QAC9B;AAAE,QAAA,MAAM;;YAEJ,OAAO,IAAI,CAAC,cAAc,IAAI;AAC1B,gBAAA,mBAAmB,EAAE,GAAG;AACxB,gBAAA,eAAe,EAAE,KAAK;AACtB,gBAAA,mBAAmB,EAAE;aACxB;QACL;IACJ;AAEA;;AAEG;IACH,iBAAiB,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,cAAc,EAAE,eAAe,IAAI,KAAK;IACxD;AAEA;;AAEG;IACH,qBAAqB,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,cAAc,EAAE,kBAAkB;IAClD;AAEA;;AAEG;IACH,YAAY,GAAA;QACR,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QAC1C,IAAI,CAAC,WAAW,CAAC,UAAU;AAAE,YAAA,OAAO,KAAK;;QAGzC,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,mBAAmB,IAAI,GAAG;QACtE,MAAM,WAAW,GAAG,cAAc,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI;AAEnD,QAAA,MAAM,YAAY,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE;QAE/D,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,YAAY,GAAG,WAAW;IAClD;;AAIA;;AAEG;AACK,IAAA,MAAM,YAAY,CACtB,OAAe,EACf,OAAoB,EAAA;AAMpB,QAAA,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK;QAC1E,IAAI,SAAS,GAAiB,IAAI;AAElC,QAAA,KAAK,IAAI,OAAO,GAAG,CAAC,EAAE,OAAO,IAAI,UAAU,EAAE,OAAO,EAAE,EAAE;AACpD,YAAA,IAAI;gBACA,OAAO,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE;oBACtC,SAAS,EAAE,OAAO,EAAE,SAAS;oBAC7B,OAAO,EAAE,OAAO,EAAE;AACrB,iBAAA,CAAC;YACN;YAAE,OAAO,KAAK,EAAE;gBACZ,SAAS,GAAG,KAAc;;gBAG1B,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,gBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,EAAE;AACtE,oBAAA,MAAM,KAAK;gBACf;;AAGA,gBAAA,IAAI,OAAO,GAAG,UAAU,EAAE;oBACtB,MAAM,KAAK,GAAG;0BACR,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO;0BAClC,YAAY;AAClB,oBAAA,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;gBAC3B;YACJ;QACJ;AAEA,QAAA,MAAM,SAAS;IACnB;AAEA;;;AAGG;IACK,MAAM,WAAW,CAAC,OAAe,EAAA;AACrC,QAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;;AAGzB,QAAA,IAAI,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE;AAC1B,YAAA,IAAI;gBACA,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;AAEpD,gBAAA,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE;;AAEvB,oBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,WAAW,CAAC,MAAM,CAAC;oBAE9E,OAAO;AACH,wBAAA,KAAK,EAAE,cAAc;wBACrB,UAAU,EAAE,WAAW,CAAC,UAAU;wBAClC,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;4BACnC,KAAK,EAAE,CAAC,CAAC,OAAO;AAChB,4BAAA,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;4BAC5C,SAAS,EAAE,CAAC,CAAC;AAChB,yBAAA,CAAC,CAAC;AACH,wBAAA,OAAO,EAAE,IAAI;AACb,wBAAA,cAAc,EAAE;qBACnB;gBACL;;YAEJ;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,IAAI,CAAC,wDAAwD,EAAE,KAAK,CAAC;;YAEjF;QACJ;;QAGA,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,CAAC,eAAe,EAAE;QACtD,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,aAAa,GAAG,CAAC,EAAE;AAC3D,YAAA,IAAI;gBACA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;;gBAG/C,IAAI,UAAU,GAA8B,QAAQ;AACpD,gBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;oBACvB,UAAU,GAAG,KAAK;gBACtB;AAAO,qBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;AAC9B,oBAAA,UAAU,GAAG,QAAQ,CAAC;gBAC1B;;AAGA,gBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;gBAEzE,OAAO;AACH,oBAAA,KAAK,EAAE,cAAc;oBACrB,UAAU;oBACV,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;wBAC9B,KAAK,EAAE,CAAC,CAAC,KAAK;wBACd,OAAO,EAAE,CAAC,CAAC,OAAO;wBAClB,SAAS,EAAE,CAAC,CAAC;AAChB,qBAAA,CAAC,CAAC;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,cAAc,EAAE,cAAc,CAAC;AAC3B,0BAAE;AACF,0BAAE;iBACT;YACL;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,+BAA+B,EAAE,KAAK,CAAC;;YAEzD;QACJ;;QAGA,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QAE1C,IAAI,WAAW,CAAC,aAAa,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;YAC3D,OAAO;AACH,gBAAA,KAAK,EAAE,4GAA4G;AACnH,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,cAAc,EAAE;aACnB;QACL;AAEA,QAAA,IAAI;;YAEA,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;;YAGxC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE,EAAE;gBAC5B,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;YACnC;;YAGA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;;YAG/C,IAAI,UAAU,GAA8B,QAAQ;AACpD,YAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;gBACvB,UAAU,GAAG,KAAK;YACtB;AAAO,iBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;AAC9B,gBAAA,UAAU,GAAG,QAAQ,CAAC;YAC1B;;AAGA,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;YAEzE,OAAO;AACH,gBAAA,KAAK,EAAE,cAAc;gBACrB,UAAU;gBACV,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;oBAC9B,KAAK,EAAE,CAAC,CAAC,KAAK;oBACd,OAAO,EAAE,CAAC,CAAC,OAAO;oBAClB,SAAS,EAAE,CAAC,CAAC;AAChB,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,cAAc,EAAE,WAAW,CAAC;AACxB,sBAAE;AACF,sBAAE;aACT;QACL;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,KAAK,CAAC,+BAA+B,EAAE,KAAK,CAAC;YACrD,OAAO;AACH,gBAAA,KAAK,EAAE,iGAAiG;AACxG,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,cAAc,EAAE;aACnB;QACL;IACJ;AAEA;;AAEG;IACK,iBAAiB,GAAA;AACrB,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,MAAM,CAAC,gBAAgB,CAAC,QAAQ,EAAE,MAAK;gBACnC,IAAI,CAAC,SAAS,EAAE,CAAC,KAAK,CAAC,MAAK,EAAE,CAAC,CAAC;AACpC,YAAA,CAAC,CAAC;AAEF,YAAA,MAAM,CAAC,gBAAgB,CAAC,SAAS,EAAE,MAAK;AACpC,gBAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;AAC7B,YAAA,CAAC,CAAC;;AAGF,YAAA,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;AACnB,gBAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;YAC7B;QACJ;IACJ;AAEA;;AAEG;AACK,IAAA,SAAS,CAAC,MAAwB,EAAA;AACtC,QAAA,IAAI,IAAI,CAAC,MAAM,KAAK,MAAM,EAAE;AACxB,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC;QACtC;IACJ;AAEA;;AAEG;AACK,IAAA,cAAc,CAAC,KAAc,EAAA;AACjC,QAAA,IAAI,KAAK,YAAY,KAAK,EAAE;;AAExB,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;gBACzE,OAAO;AACH,oBAAA,IAAI,EAAE,YAAY;AAClB,oBAAA,OAAO,EAAE,4BAA4B;AACrC,oBAAA,aAAa,EAAE;iBAClB;YACL;AAEA,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE;gBACvE,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,oBAAoB,CAAC;gBACvD,OAAO;AACH,oBAAA,IAAI,EAAE,YAAY;AAClB,oBAAA,OAAO,EAAE,qBAAqB;AAC9B,oBAAA,UAAU,EAAE,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,oBAAA,aAAa,EAAE;iBAClB;YACL;AAEA,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACjE,OAAO;AACH,oBAAA,IAAI,EAAE,cAAc;AACpB,oBAAA,OAAO,EAAE,uBAAuB;AAChC,oBAAA,aAAa,EAAE;iBAClB;YACL;YAEA,OAAO;AACH,gBAAA,IAAI,EAAE,eAAe;AACrB,gBAAA,OAAO,EAAE,KAAK,CAAC,OAAO,IAAI,eAAe;AACzC,gBAAA,aAAa,EAAE;aAClB;QACL;QAEA,OAAO;AACH,YAAA,IAAI,EAAE,eAAe;AACrB,YAAA,OAAO,EAAE;SACZ;IACL;AAEA;;AAEG;AACK,IAAA,mBAAmB,CACvB,OAAe,EACf,UAA0B,EAC1B,UAAmB,EAAA;QAEnB,OAAO;AACH,YAAA,KAAK,EAAE,OAAO;YACd,UAAU;AACV,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,OAAO,EAAE,KAAK;YACd;SACH;IACL;AAEA;;AAEG;AACK,IAAA,KAAK,CAAC,EAAU,EAAA;AACpB,QAAA,OAAO,IAAI,OAAO,CAAC,OAAO,IAAI,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAC1D;AACH;;ACxkCD;AACA;AAIA;AACA,MAAM,eAAe,GAAG,0BAA0B;AAElD;;AAEG;AACG,SAAU,cAAc,CAAC,MAAe,EAAA;IAC1C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC;IAC/C;IAEA,MAAM,CAAC,GAAG,MAAiC;AAE3C,IAAA,IAAI,CAAC,CAAC,CAAC,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,KAAK,QAAQ,EAAE;AAC3C,QAAA,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC;IAC9D;AAEA,IAAA,IAAI,CAAC,CAAC,CAAC,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,KAAK,QAAQ,EAAE;AAC3C,QAAA,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC;IAC9D;IAEA,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AAC7B,QAAA,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC;IACnD;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,kBAAkB,GAAA;;IAEvB,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;AAChD,QAAA,OAAO,IAAI;IACf;;AAGA,IAAA,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB;AAChD,IAAA,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B;IAEvD,IAAI,MAAM,EAAE;QACR,OAAO;YACH,MAAM;YACN,MAAM,EAAE,MAAM,IAAI,eAAe;AACjC,YAAA,OAAO,EAAE;SACZ;IACL;;AAGA,IAAA,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B;AAC7D,IAAA,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,sCAAsC;IAEpE,IAAI,SAAS,EAAE;QACX,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;YACjB,MAAM,EAAE,SAAS,IAAI,eAAe;AACpC,YAAA,OAAO,EAAE;SACZ;IACL;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;;AAGG;AACH,SAAS,eAAe,GAAA;;AAEpB,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;;AAE/B,QAAA,MAAM,QAAQ,GAAI,MAAc,CAAC,oBAAoB;QACrD,IAAI,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;YAC7D,OAAO;AACH,gBAAA,GAAG,QAAQ;AACX,gBAAA,MAAM,EAAE,QAAQ,CAAC,MAAM,IAAI,eAAe;AAC1C,gBAAA,OAAO,EAAE;aACZ;QACL;IACJ;;;AAIA,IAAA,IAAI;;QAEA,MAAM,UAAU,GAAI,UAAkB,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG;QACxD,IAAI,UAAU,EAAE;AACZ,YAAA,MAAM,MAAM,GAAG,UAAU,CAAC,0BAA0B;AACpD,YAAA,MAAM,MAAM,GAAG,UAAU,CAAC,iCAAiC;YAC3D,IAAI,MAAM,EAAE;gBACR,OAAO;oBACH,MAAM;oBACN,MAAM,EAAE,MAAM,IAAI,eAAe;AACjC,oBAAA,OAAO,EAAE;iBACZ;YACL;QACJ;IACJ;AAAE,IAAA,MAAM;;IAER;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,oBAAoB,GAAA;AACzB,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,QAAA,OAAO,IAAI;IACf;;AAGA,IAAA,MAAM,YAAY,GAAI,MAAc,CAAC,eAAe;IACpD,IAAI,YAAY,IAAI,OAAO,YAAY,KAAK,QAAQ,IAAI,YAAY,CAAC,MAAM,EAAE;QACzE,OAAO;AACH,YAAA,GAAG,YAAY;AACf,YAAA,MAAM,EAAE,YAAY,CAAC,MAAM,IAAI,eAAe;AAC9C,YAAA,OAAO,EAAE;SACZ;IACL;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,wBAAwB,GAAA;IAC7B,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;AAClE,QAAA,OAAO,IAAI;IACf;IAEA,MAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,4BAA4B,CAAC;IACnE,IAAI,MAAM,EAAE;QACR,MAAM,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,oBAAoB,CAAC;QACxD,IAAI,MAAM,EAAE;YACR,OAAO;gBACH,MAAM;gBACN,MAAM,EAAE,MAAM,CAAC,YAAY,CAAC,oBAAoB,CAAC,IAAI,eAAe;AACpE,gBAAA,OAAO,EAAE;aACZ;QACL;IACJ;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;;;;;;;;;AAUG;AACG,SAAU,UAAU,CAAC,OAAA,GAAyB,EAAE,EAAA;AAClD,IAAA,MAAM,EAAE,iBAAiB,GAAG,IAAI,EAAE,GAAG,OAAO;;AAG5C,IAAA,MAAM,SAAS,GAAG,kBAAkB,EAAE;IACtC,IAAI,SAAS,EAAE;AACX,QAAA,OAAO,SAAS;IACpB;;AAGA,IAAA,MAAM,UAAU,GAAG,eAAe,EAAE;IACpC,IAAI,UAAU,EAAE;AACZ,QAAA,OAAO,UAAU;IACrB;;AAGA,IAAA,MAAM,YAAY,GAAG,oBAAoB,EAAE;IAC3C,IAAI,YAAY,EAAE;QACd,cAAc,CAAC,YAAY,CAAC;AAC5B,QAAA,OAAO,YAAY;IACvB;;AAGA,IAAA,MAAM,YAAY,GAAG,wBAAwB,EAAE;IAC/C,IAAI,YAAY,EAAE;AACd,QAAA,OAAO,YAAY;IACvB;;IAGA,IAAI,iBAAiB,EAAE;QACnB,MAAM,IAAI,KAAK,CACX,wEAAwE;YACxE,mEAAmE;YACnE,uEAAuE;YACvE,yEAAyE;YACzE,4DAA4D;AAC5D,YAAA,6EAA6E,CAChF;IACL;SAAO;AACH,QAAA,OAAO,CAAC,IAAI,CAAC,2EAA2E,CAAC;AACzF,QAAA,OAAO,IAAI;IACf;AACJ;AAEA;;AAEG;AACI,MAAM,aAAa,GAAG;IACzB,kBAAkB;IAClB,eAAe;IACf,oBAAoB;IACpB;;;ACxNJ;AACA;AAgBA;;AAEG;AACH,MAAM,eAAe,GAAwE;AACzF,IAAA,QAAQ,EAAE;QACN,EAAE,OAAO,EAAE,+DAA+D,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;QAC3G,EAAE,OAAO,EAAE,qDAAqD,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;AAClG,QAAA,EAAE,OAAO,EAAE,qCAAqC,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;AACpF,KAAA;AACD,IAAA,QAAQ,EAAE;QACN,EAAE,OAAO,EAAE,2CAA2C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;QACxF,EAAE,OAAO,EAAE,6CAA6C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;QAC1F,EAAE,OAAO,EAAE,8CAA8C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;AAC7F,KAAA;AACD,IAAA,YAAY,EAAE;QACV,EAAE,OAAO,EAAE,yDAAyD,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,EAAE;QACzG,EAAE,OAAO,EAAE,4CAA4C,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,IAAI,EAAE;AAChG,KAAA;AACD,IAAA,YAAY,EAAE;QACV,EAAE,OAAO,EAAE,qEAAqE,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,EAAE;QACrH,EAAE,OAAO,EAAE,qDAAqD,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,IAAI,EAAE;AACzG,KAAA;AACD,IAAA,MAAM,EAAE;QACJ,EAAE,OAAO,EAAE,mEAAmE,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE;QAC9G,EAAE,OAAO,EAAE,8DAA8D,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE;AAC3G,KAAA;AACD,IAAA,SAAS,EAAE;QACP,EAAE,OAAO,EAAE,yDAAyD,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE;AACzG,KAAA;AACD,IAAA,MAAM,EAAE;QACJ,EAAE,OAAO,EAAE,yCAAyC,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE;AACvF;CACJ;AAED;;;;;AAKG;MACU,0BAA0B,CAAA;AAQnC,IAAA,WAAA,CAAY,MAAmB,EAAA;QAHvB,IAAA,CAAA,UAAU,GAAyB,EAAE;AACrC,QAAA,IAAA,CAAA,aAAa,GAA0B,IAAI,GAAG,EAAE,CAAC;AAGrD,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,GAAG,EAAE;AAC7B,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,GAAG,EAAE;AAC1B,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,GAAG,EAAE;;QAG3B,IAAI,CAAC,YAAY,EAAE;IACvB;AAEA;;AAEG;IACK,YAAY,GAAA;;AAEhB,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACrB,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;;AAErC,gBAAA,MAAM,IAAI,GAAG;AACT,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;oBACpB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;gBACrC;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;AACnB,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;AAC1D,gBAAA,MAAM,IAAI,GAAG;oBACT,GAAG,CAAC,WAAW,EAAE;AACjB,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBAClB,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC;gBAChC;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AACpB,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;AAC3D,gBAAA,MAAM,IAAI,GAAG;oBACT,GAAG,CAAC,WAAW,EAAE;AACjB,oBAAA,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE;AACtB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBAClB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC;gBACjC;YACJ;QACJ;IACJ;AAEA;;;;;AAKG;AACH,IAAA,QAAQ,CAAC,OAAe,EAAA;AACpB,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACtB,OAAO;AACH,gBAAA,MAAM,EAAE,IAAI;AACZ,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,aAAa,EAAE;aAClB;QACL;QAEA,MAAM,iBAAiB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE;;AAGtD,QAAA,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;YAClE,KAAK,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,QAAQ,EAAE;gBACxC,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;gBACpC,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,CAAC;oBAC1D,IAAI,MAAM,EAAE;AACR,wBAAA,OAAO,MAAM;oBACjB;gBACJ;YACJ;QACJ;;QAGA,MAAM,aAAa,GAAG,IAAI,CAAC,iBAAiB,CAAC,iBAAiB,CAAC;QAC/D,IAAI,aAAa,EAAE;AACf,YAAA,OAAO,aAAa;QACxB;;QAGA,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,iBAAiB,CAAC;QAC9D,IAAI,YAAY,EAAE;AACd,YAAA,OAAO,YAAY;QACvB;;QAGA,OAAO;AACH,YAAA,MAAM,EAAE,IAAI;AACZ,YAAA,cAAc,EAAE,IAAI;AACpB,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,eAAe,EAAE;SACpB;IACL;AAEA;;AAEG;AACK,IAAA,WAAW,CACf,UAAkB,EAClB,KAAuB,EACvB,UAAkB,EAAA;QAElB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;QAExD,QAAQ,UAAU;YACd,KAAK,UAAU,EAAE;gBACb,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;gBAE/C,IAAI,SAAS,EAAE;AACX,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,SAAS,CAAC,UAAU;oBACpD,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;AAChB,4BAAA,MAAM,EAAE,SAAS,CAAC,KAAK,CAAC,IAAI;4BAC5B,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,YAAA,EAAe,SAAS,CAAC,KAAK,CAAC,IAAI,CAAA;AACnD,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;wBACzB,eAAe,EAAE,CAAC,UAAU,EAAE,SAAS,CAAC,KAAK,CAAC,IAAI;qBACrD;gBACL;gBACA;YACJ;YAEA,KAAK,UAAU,EAAE;gBACb,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;AACxC,gBAAA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC;gBAEpD,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;4BAChB,MAAM,EAAE,KAAK,CAAC,QAAQ;4BACtB,MAAM,EAAE,EAAE,KAAK,EAAE;4BACjB,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,MAAA,EAAS,KAAK,CAAC,IAAI,CAAA,QAAA,EAAW,KAAK,CAAA,CAAA;AACnD,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,UAAU,EAAE,KAAK,CAAC,IAAI;qBAC3C;gBACL;;gBAGA,IAAI,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE;AAC3B,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,GAAG;oBACnC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;4BAChB,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ;AACzC,4BAAA,MAAM,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE;4BACzB,UAAU;4BACV,WAAW,EAAE,CAAA,YAAA,EAAe,MAAM,CAAA,CAAA;AACrC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,UAAU,EAAE,QAAQ;qBACzC;gBACL;gBACA;YACJ;YAEA,KAAK,cAAc,EAAE;gBACjB,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBAC5C,MAAM,QAAQ,GAAG,IAAI,CAAC,kBAAkB,CAAC,UAAU,CAAC;gBAEpD,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,cAAc;AACpB,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,OAAA,EAAU,UAAU,CAAA,QAAA;AACpC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,cAAc,EAAE,UAAU;qBAC/C;gBACL;gBACA;YACJ;YAEA,KAAK,cAAc,EAAE;gBACjB,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBAC3C,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,CAAC;gBAE7C,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,cAAc;4BACpB,MAAM,EAAE,KAAK,CAAC,OAAO;4BACrB,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,KAAA,EAAQ,KAAK,CAAC,EAAE,CAAA,MAAA;AAChC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE;qBAC7C;gBACL;gBACA;YACJ;YAEA,KAAK,QAAQ,EAAE;gBACX,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;gBAE9C,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,GAAG;oBACnC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,QAAQ;AACd,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,UAAA,EAAa,MAAM,CAAA;AACnC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,MAAM;qBACrC;gBACL;gBACA;YACJ;YAEA,KAAK,WAAW,EAAE;;gBAEd,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,wBAAwB,CAAC,MAAM,CAAC;gBAEtD,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,UAAA,EAAa,MAAM,CAAA;AACnC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,WAAW,EAAE,MAAM;qBACxC;gBACL;gBACA;YACJ;YAEA,KAAK,QAAQ,EAAE;gBACX,MAAM,UAAU,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE;gBAC1C,IAAI,IAAI,CAAC,MAAM,CAAC,aAAa,GAAG,UAAU,CAAC,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,QAAQ;AACd,4BAAA,MAAM,EAAE,UAAU;4BAClB,UAAU;4BACV,WAAW,EAAE,CAAA,QAAA,EAAW,UAAU,CAAA,OAAA;AACrC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,UAAU;qBACzC;gBACL;gBACA;YACJ;;AAGJ,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,aAAa,CAAC,KAAuB,EAAA;;AAEzC,QAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,YAAA,IAAI,KAAK,CAAC,CAAC,CAAC,EAAE;AACV,gBAAA,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;YAC1B;QACJ;AACA,QAAA,OAAO,EAAE;IACb;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,MAAc,EAAA;AACnC,QAAA,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,cAAc,EAAE,EAAE,CAAC;;QAGzE,MAAM,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,gBAAgB,CAAC;QACrD,IAAI,KAAK,EAAE;YACP,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,GAAG,EAAE;QAC5C;;QAGA,IAAI,SAAS,GAAwB,IAAI;QACzC,IAAI,SAAS,GAAG,CAAC;QAEjB,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,EAAE,GAAG,CAAC;YAC7D,IAAI,KAAK,GAAG,SAAS,IAAI,KAAK,GAAG,GAAG,EAAE;gBAClC,SAAS,GAAG,KAAK;gBACjB,SAAS,GAAG,KAAK;YACrB;QACJ;QAEA,IAAI,SAAS,EAAE;YACX,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE;QACtD;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,OAAe,EAAA;QACrC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;;QAGxD,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;AAC1C,YAAA,IAAI,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AACvB,gBAAA,MAAM,UAAU,GAAG,IAAI,CAAC;gBACxB,OAAO;AACH,oBAAA,MAAM,EAAE;AACJ,wBAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,wBAAA,IAAI,EAAE,UAAU;wBAChB,MAAM,EAAE,KAAK,CAAC,IAAI;wBAClB,UAAU;AACV,wBAAA,WAAW,EAAE,CAAA,YAAA,EAAe,KAAK,CAAC,IAAI,CAAA;AACzC,qBAAA;oBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,oBAAA,aAAa,EAAE,UAAU;AACzB,oBAAA,eAAe,EAAE,CAAC,eAAe,EAAE,GAAG;iBACzC;YACL;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,OAAe,EAAA;AACrC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa;AAAE,YAAA,OAAO,IAAI;QAE3C,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;AAExD,QAAA,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;YAC7D,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,EAAE;gBAC5C,MAAM,UAAU,GAAG,IAAI;gBACvB,OAAO;AACH,oBAAA,MAAM,EAAE;AACJ,wBAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,wBAAA,IAAI,EAAE,QAAQ;AACd,wBAAA,MAAM,EAAE,UAAU;wBAClB,UAAU;wBACV,WAAW,EAAE,CAAA,QAAA,EAAW,UAAU,CAAA;AACrC,qBAAA;oBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,oBAAA,aAAa,EAAE,UAAU;AACzB,oBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,UAAU;iBACzC;YACL;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,KAAuB,EAAA;AAI3C,QAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC;;QAGzB,KAAK,MAAM,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE;AACpC,YAAA,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;;AAE3D,gBAAA,MAAM,UAAU,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,CAAA,EAAG,KAAK,CAAC,IAAI,CAAA,WAAA,CAAa,EAAE,GAAG,CAAC,CAAC;gBAC9E,OAAO;oBACH,KAAK;oBACL,KAAK,EAAE,UAAU,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI;iBACrC;YACL;QACJ;AAEA,QAAA,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE;IAC5D;AAEA;;AAEG;AACK,IAAA,kBAAkB,CAAC,IAAY,EAAA;AACnC,QAAA,MAAM,cAAc,GAAG,IAAI,CAAC,WAAW,EAAE;;AAGzC,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;;YAEjC,OAAO,CAAA,iBAAA,EAAoB,IAAI,CAAA,EAAA,CAAI;QACvC;;AAGA,QAAA,MAAM,SAAS,GAAG;AACd,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,EAAA,CAAI;AACnC,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;AACrC,YAAA,CAAA,KAAA,EAAQ,cAAc,CAAA,CAAE;AACxB,YAAA,CAAA,CAAA,EAAI,cAAc,CAAA,OAAA,CAAS;AAC3B,YAAA,CAAA,OAAA,EAAU,cAAc,CAAA;SAC3B;;AAGD,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;;YAER;QACJ;;QAGA,MAAM,OAAO,GAAG,QAAQ,CAAC,gBAAgB,CAAC,gCAAgC,CAAC;AAC3E,QAAA,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACvB,YAAA,IAAI,GAAG,CAAC,WAAW,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;AACzD,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC;YAC3C;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,IAAY,EAAA;AAChC,QAAA,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,IAAI,IAAI;IAC1D;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,MAAc,EAAA;AACnC,QAAA,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE;;QAG7C,IAAI,gBAAgB,KAAK,KAAK,IAAI,gBAAgB,KAAK,WAAW,EAAE;AAChE,YAAA,OAAO,MAAM;QACjB;QACA,IAAI,gBAAgB,KAAK,QAAQ,IAAI,gBAAgB,KAAK,KAAK,EAAE;AAC7D,YAAA,OAAO,iBAAiB;QAC5B;;AAGA,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,CAAE;QACtD;;AAGA,QAAA,MAAM,SAAS,GAAG;YACd,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,CAAE;AAC3C,YAAA,CAAA,eAAA,EAAkB,gBAAgB,CAAA,EAAA,CAAI;YACtC,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,QAAA,CAAU;AACnD,YAAA,CAAA,qBAAA,EAAwB,gBAAgB,CAAA,IAAA;SAC3C;AAED,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;gBACJ;YACJ;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,wBAAwB,CAAC,WAAmB,EAAA;AAChD,QAAA,MAAM,cAAc,GAAG,WAAW,CAAC,WAAW,EAAE;;AAGhD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;QAChD;;AAGA,QAAA,MAAM,SAAS,GAAG;AACd,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;AACrC,YAAA,CAAA,eAAA,EAAkB,cAAc,CAAA,IAAA,CAAM;AACtC,YAAA,CAAA,SAAA,EAAY,cAAc,CAAA,IAAA;SAC7B;AAED,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;gBACJ;YACJ;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,sBAAsB,CAAC,OAAgB,EAAA;;AAE3C,QAAA,IAAI,OAAO,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAA,CAAA,EAAI,OAAO,CAAC,EAAE,EAAE;QAC3B;;AAGA,QAAA,IAAI,OAAO,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;YACrC,OAAO,CAAA,cAAA,EAAiB,OAAO,CAAC,YAAY,CAAC,aAAa,CAAC,IAAI;QACnE;;QAGA,MAAM,IAAI,GAAa,EAAE;QACzB,IAAI,OAAO,GAAmB,OAAO;QAErC,OAAO,OAAO,IAAI,OAAO,KAAK,QAAQ,CAAC,IAAI,EAAE;YACzC,IAAI,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,EAAE;YAE5C,IAAI,OAAO,CAAC,SAAS,IAAI,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ,EAAE;gBAC5D,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AAC/E,gBAAA,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACpB,oBAAA,QAAQ,IAAI,CAAA,CAAA,EAAI,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACnD;YACJ;AAEA,YAAA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC;AACtB,YAAA,OAAO,GAAG,OAAO,CAAC,aAAa;QACnC;AAEA,QAAA,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;IAC3B;AAEA;;AAEG;IACK,mBAAmB,CAAC,CAAS,EAAE,CAAS,EAAA;AAC5C,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACpC,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAEpC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,QAAA,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC,CAAC;AAEzC,QAAA,OAAO,YAAY,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI;IACzC;AAEA;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,MAAM;IACtB;;AAIA;;;;;AAKG;IACH,MAAM,eAAe,CAAC,UAA+B,EAAA;;AAEjD,QAAA,IAAI,UAAU,CAAC,UAAU,EAAE;AACvB,YAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,UAAU;AACvC,YAAA,KAAK,MAAM,QAAQ,IAAI,UAAU,CAAC,UAAU,EAAE;;AAE1C,gBAAA,KAAK,MAAM,OAAO,IAAI,QAAQ,CAAC,QAAQ,EAAE;AACrC,oBAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE;oBAC5D,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC;oBACpC,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC;gBACnD;YACJ;QACJ;;QAGA,MAAM,SAAS,GAAG,IAAI,CAAC,0BAA0B,CAAC,UAAU,CAAC,SAAS,CAAC;QACvE,KAAK,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,IAAI,SAAS,EAAE;AACvC,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE;AACpD,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;AAC1B,YAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC1D;;AAGA,QAAA,IAAI,UAAU,CAAC,OAAO,EAAE;AACpB,YAAA,IAAI,CAAC,kBAAkB,CAAC,UAAU,CAAC,OAAO,CAAC;QAC/C;AAEA,QAAA,OAAO,CAAC,GAAG,CAAC,CAAA,0CAAA,EAA6C,IAAI,CAAC,UAAU,CAAC,MAAM,CAAA,aAAA,EAAgB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAA,OAAA,CAAS,CAAC;IACpI;AAEA;;;;AAIG;IACH,MAAM,WAAW,CAAC,MAAqB,EAAA;QACnC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE;;AAGrB,QAAA,IAAI,MAAM,CAAC,WAAW,EAAE;AACpB,YAAA,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,WAAW,CAAC;YAC3C;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,UAAU,EAAE;AACnB,YAAA,IAAI;gBACA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC;AAC/C,gBAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;oBACd,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;gBAClE;AACA,gBAAA,MAAM,IAAI,GAAyB,MAAM,QAAQ,CAAC,IAAI,EAAE;AACxD,gBAAA,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC;YACjC;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,sDAAsD,EAAE,KAAK,CAAC;YAChF;QACJ;IACJ;AAEA;;AAEG;AACK,IAAA,kBAAkB,CAAC,OAA6B,EAAA;AACpD,QAAA,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;;AAEzB,YAAA,MAAM,YAAY,GAAiB;gBAC/B,IAAI,EAAE,KAAK,CAAC,GAAG;gBACf,IAAI,EAAE,KAAK,CAAC,KAAK;gBACjB,WAAW,EAAE,KAAK,CAAC,WAAW;gBAC9B,OAAO,EAAE,KAAK,CAAC;aAClB;;AAGD,YAAA,MAAM,IAAI,GAAG;AACT,gBAAA,KAAK,CAAC,KAAK,CAAC,WAAW,EAAE;AACzB,gBAAA,KAAK,CAAC,GAAG,CAAC,WAAW,EAAE;AACvB,gBAAA,GAAG,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;aACrD;AAED,YAAA,IAAI,KAAK,CAAC,WAAW,EAAE;;AAEnB,gBAAA,MAAM,SAAS,GAAG,KAAK,CAAC;AACnB,qBAAA,WAAW;qBACX,KAAK,CAAC,KAAK;qBACX,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AAC9B,gBAAA,IAAI,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;YAC3B;AAEA,YAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;gBACpB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,YAAY,CAAC;YAC5C;QACJ;QAEA,OAAO,CAAC,GAAG,CAAC,CAAA,oCAAA,EAAuC,OAAO,CAAC,MAAM,CAAA,gBAAA,CAAkB,CAAC;IACxF;AAEA;;AAEG;AACK,IAAA,0BAA0B,CAC9B,IAAmF,EAAA;AAEnF,QAAA,MAAM,MAAM,GAAG,IAAI,GAAG,EAAoB;AAE1C,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;;YAEpB,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE;YACrC,MAAM,QAAQ,GAAa,EAAE;;AAG7B,YAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,QAAQ,EAAE;gBACxB,QAAQ,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;YACrE;;AAGA,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC;AAClB,iBAAA,WAAW;AACX,iBAAA,OAAO,CAAC,cAAc,EAAE,EAAE;iBAC1B,KAAK,CAAC,KAAK;AACX,iBAAA,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACrD,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC;;AAG5B,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC;AACpB,iBAAA,WAAW;AACX,iBAAA,OAAO,CAAC,cAAc,EAAE,EAAE;iBAC1B,KAAK,CAAC,KAAK;AACX,iBAAA,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AAErD,YAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB;AAC1C,YAAA,KAAK,MAAM,IAAI,IAAI,YAAY,EAAE;AAC7B,gBAAA,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACrD;;YAGA,MAAM,QAAQ,GAAG,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE;AAClC,iBAAA,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC1B,iBAAA,KAAK,CAAC,CAAC,EAAE,CAAC;iBACV,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC;AAE1B,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;AAE1B,YAAA,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC9C;AAEA,QAAA,OAAO,MAAM;IACjB;AAEA;;AAEG;AACK,IAAA,UAAU,CAAC,IAAY,EAAA;AAC3B,QAAA,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC;AACtB,YAAA,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK;AACnE,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ;AACtE,YAAA,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvE,YAAA,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO;AACvE,YAAA,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO;AACtE,YAAA,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAC7D,YAAA,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;AACtE,YAAA,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM;AAChE,YAAA,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;AAClE,SAAA,CAAC;AACF,QAAA,OAAO,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC;IAC9B;AAEA;;AAEG;IACH,aAAa,GAAA;QACT,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,gBAAgB,GAAA;QACZ,OAAO,IAAI,CAAC,aAAa;IAC7B;AAEA;;AAEG;IACH,SAAS,GAAA;AACL,QAAA,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,GAAG,CAAC;IACpE;AACH;;ACx1BD;AACA;AAUA;;;;;AAKG;MACU,eAAe,CAAA;AAOxB,IAAA,WAAA,CAAY,MAAmB,EAAA;QAJvB,IAAA,CAAA,gBAAgB,GAAuB,IAAI;QAC3C,IAAA,CAAA,WAAW,GAAG,CAAC;AACf,QAAA,IAAA,CAAA,eAAe,GAAG,IAAI,CAAC,GAAG,EAAE;AAGhC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QACpB,IAAI,CAAC,UAAU,GAAG,IAAI,0BAA0B,CAAC,MAAM,CAAC;;AAGxD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,IAAI,CAAC,YAAY,EAAE;QACvB;IACJ;AAEA;;;;;AAKG;AACH,IAAA,eAAe,CAAC,OAAe,EAAA;QAC3B,OAAO,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC;IAC5C;AAEA;;;;;AAKG;IACH,MAAM,aAAa,CAAC,MAAmB,EAAA;;AAEnC,QAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE;QACtB,IAAI,GAAG,GAAG,IAAI,CAAC,eAAe,GAAG,KAAK,EAAE;AACpC,YAAA,IAAI,CAAC,WAAW,GAAG,CAAC;AACpB,YAAA,IAAI,CAAC,eAAe,GAAG,GAAG;QAC9B;;QAGA,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,iBAAiB,IAAI,CAAC;AACrD,QAAA,IAAI,IAAI,CAAC,WAAW,IAAI,UAAU,EAAE;YAChC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,6BAAA,EAAgC,UAAU,CAAA,oBAAA;aACtD;QACL;;QAGA,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,EAAE;YAC/B,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,CAAA,aAAA,EAAgB,MAAM,CAAC,IAAI,CAAA,gBAAA;aACvC;QACL;QAEA,IAAI,CAAC,WAAW,EAAE;AAElB,QAAA,QAAQ,MAAM,CAAC,IAAI;AACf,YAAA,KAAK,UAAU;AACX,gBAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;AAEtD,YAAA,KAAK,UAAU;AACX,gBAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,KAAe,CAAC;AAEvE,YAAA,KAAK,cAAc;gBACf,OAAO,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AAE3C,YAAA,KAAK,cAAc;gBACf,OAAO,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AAE3C,YAAA,KAAK,QAAQ;gBACT,OAAO,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;AAE9C,YAAA,KAAK,WAAW;gBACZ,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,MAAM,CAAC;AAE/C,YAAA,KAAK,QAAQ;AACT,gBAAA,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;AAEjE,YAAA;gBACI,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE,CAAA,qBAAA,EAAwB,MAAM,CAAC,IAAI,CAAA;iBAC/C;;IAEb;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,MAAmB,EAAA;AACvC,QAAA,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc;AACjD,QAAA,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC;AAEjC,QAAA,MAAM,aAAa,GAA2B;AAC1C,YAAA,UAAU,EAAE,UAAU;AACtB,YAAA,UAAU,EAAE,MAAM;AAClB,YAAA,cAAc,EAAE,OAAO;AACvB,YAAA,QAAQ,EAAE,QAAQ;YAClB,WAAW,EAAE,OAAO;AACpB,YAAA,cAAc,EAAE,OAAO;AACvB,YAAA,QAAQ,EAAE;SACb;QAED,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAkC;AAC9E,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,UAAU,CAAC;IAC9C;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,QAAgB,EAAA;;AAEtC,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,EAAE;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,EAAE;AAChD,gBAAA,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;AAC5B,oBAAA,OAAO,KAAK;gBAChB;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE;AACzE,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACnF;AAEA,QAAA,OAAO,IAAI;IACf;;AAIA;;AAEG;AACK,IAAA,MAAM,QAAQ,CAClB,IAAY,EACZ,MAAgC,EAAA;AAEhC,QAAA,IAAI;;YAEA,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,EAAE;gBACnC,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;YAGA,IAAI,GAAG,GAAG,IAAI;YACd,IAAI,MAAM,EAAE;AACR,gBAAA,MAAM,YAAY,GAAG,IAAI,eAAe,EAAE;AAC1C,gBAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;oBAC/C,IAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,EAAE;wBACvC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;oBACxC;gBACJ;AACA,gBAAA,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,EAAE;gBAC3C,IAAI,WAAW,EAAE;AACb,oBAAA,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,WAAW;gBACxD;YACJ;;AAGA,YAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;gBAC/B,OAAO;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,IAAI,CAAA,gCAAA;iBACjC;YACL;;AAGA,YAAA,IAAI,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,EAAE;;AAEzB,gBAAA,IAAI,IAAI,CAAC,uBAAuB,CAAC,GAAG,CAAC,EAAE;oBACnC,OAAO;AACH,wBAAA,OAAO,EAAE,IAAI;wBACb,OAAO,EAAE,CAAA,aAAA,EAAgB,IAAI,CAAA;qBAChC;gBACL;;AAGA,gBAAA,MAAM,CAAC,QAAQ,CAAC,IAAI,GAAG,GAAG;gBAC1B,OAAO;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,IAAI,CAAA,GAAA;iBACjC;YACL;;YAGA,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,EAAE,qBAAqB,CAAC;YACjD,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,OAAA,EAAU,GAAG,CAAA,WAAA;aACzB;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,QAAQ,CAAC,QAAgB,EAAE,KAAa,EAAA;;QAElD,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;QAGA,MAAM,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;;AAGzD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE,CAAA,qBAAA,EAAwB,iBAAiB,CAAA,MAAA,EAAS,KAAK,CAAA,eAAA;aACnE;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,iBAAiB,CAAC;YAEzD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,iBAAiB,CAAA;iBACnD;YACL;AAEA,YAAA,IAAI,EAAE,OAAO,YAAY,gBAAgB;AACjC,gBAAA,OAAO,YAAY,mBAAmB;AACtC,gBAAA,OAAO,YAAY,iBAAiB,CAAC,EAAE;gBAC3C,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;YAGA,OAAO,CAAC,KAAK,EAAE;;AAGf,YAAA,IAAI,OAAO,YAAY,iBAAiB,EAAE;;gBAEtC,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAC3C,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE;oBAC7C,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAC1D;gBACD,IAAI,MAAM,EAAE;AACR,oBAAA,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK;gBAChC;qBAAO;oBACH,OAAO;AACH,wBAAA,OAAO,EAAE,KAAK;wBACd,OAAO,EAAE,CAAA,QAAA,EAAW,KAAK,CAAA,WAAA;qBAC5B;gBACL;YACJ;iBAAO;AACH,gBAAA,OAAO,CAAC,KAAK,GAAG,KAAK;YACzB;;AAGA,YAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;AAC5D,YAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,QAAQ,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;;AAG7D,YAAA,MAAM,sBAAsB,GAAG,MAAM,CAAC,wBAAwB,CAC1D,MAAM,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAC7C,EAAE,GAAG;AACN,YAAA,IAAI,sBAAsB,IAAI,OAAO,YAAY,gBAAgB,EAAE;AAC/D,gBAAA,sBAAsB,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,gBAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;YAChE;YAEA,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA;aACvC;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACK,MAAM,YAAY,CAAC,QAAgB,EAAA;;QAEvC,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;QAGA,MAAM,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;;AAGzD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,UAAA,EAAa,iBAAiB,CAAA,iCAAA;aAC1C;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,iBAAiB,CAAC;YAEzD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,iBAAiB,CAAA;iBACnD;YACL;AAEA,YAAA,IAAI,EAAE,OAAO,YAAY,WAAW,CAAC,EAAE;gBACnC,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;AAGA,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;;AAG/D,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC;;YAGrB,OAAO,CAAC,KAAK,EAAE;YAEf,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACK,MAAM,YAAY,CAAC,eAAuB,EAAA;AAC9C,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC;IAC7C;AAEA;;AAEG;IACK,MAAM,eAAe,CAAC,QAAgB,EAAA;;AAE1C,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YAC/B,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,WAAA,EAAc,QAAQ,CAAA,iCAAA;aAClC;QACL;AAEA,QAAA,IAAI;;AAEA,YAAA,IAAI,QAAQ,KAAK,MAAM,EAAE;AACrB,gBAAA,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC;gBAC/C,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,iBAAiB,EAAE;YACxD;AAEA,YAAA,IAAI,QAAQ,KAAK,iBAAiB,EAAE;AAChC,gBAAA,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,EAAE,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC;gBACxE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,oBAAoB,EAAE;YAC3D;YAEA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC;YAEhD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,QAAQ,CAAA;iBAC1C;YACL;AAEA,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;YAE9D,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACH,MAAM,gBAAgB,CAAC,QAAgB,EAAA;;QAEnC,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;AAGA,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,QAAQ,CAAA,8BAAA;aACrC;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC;YAEhD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,QAAQ,CAAA;iBAC1C;YACL;;YAGA,IAAI,CAAC,eAAe,EAAE;;AAGtB,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;;AAG/D,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC;;AAGrB,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,qBAAqB,EAAE;YAC5C,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7C,YAAA,OAAO,CAAC,SAAS,GAAG,6BAA6B;AACjD,YAAA,OAAO,CAAC,KAAK,CAAC,OAAO,GAAG;;uBAEb,IAAI,CAAC,GAAG,GAAG,CAAC,CAAA;wBACX,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;yBACZ,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;0BACb,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;;;;;;aAM5B;AAED,YAAA,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAClC,YAAA,IAAI,CAAC,gBAAgB,GAAG,OAAO;;YAG/B,UAAU,CAAC,MAAM,IAAI,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC;YAE9C,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,mBAAmB,CAC7B,UAAkB,EAClB,MAAgC,EAAA;;QAGhC,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,EAAE;YACzC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,eAAA,EAAkB,UAAU,CAAA,WAAA;aACxC;QACL;QAEA,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,aAAc,CAAC,UAAU,CAAC;AAEvD,QAAA,IAAI;AACA,YAAA,OAAO,MAAM,QAAQ,CAAC,MAAM,CAAC;QACjC;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,eAAA,EAAkB,UAAU,CAAA,QAAA,CAAU;AAC/C,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;;AAIA;;AAEG;AACK,IAAA,aAAa,CAAC,GAAW,EAAA;;QAE7B,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACtE,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI;AACA,YAAA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;YACjD,OAAO,MAAM,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,CAAC,MAAM;QACnD;AAAE,QAAA,MAAM;YACJ,OAAO,IAAI,CAAC;QAChB;IACJ;AAEA;;AAEG;AACK,IAAA,uBAAuB,CAAC,GAAW,EAAA;AACvC,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI;YACA,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,CAAC;YACrC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,UAAU,CAAC,CAAC;;AAGnD,YAAA,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;gBACnB,MAAM,CAAC,aAAa,CAAC,IAAI,eAAe,CAAC,YAAY,CAAC,CAAC;YAC3D;AAEA,YAAA,OAAO,IAAI;QACf;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,KAAK;QAChB;IACJ;AAEA;;AAEG;AACK,IAAA,qBAAqB,CAAC,GAAW,EAAA;;AAErC,QAAA,IAAI,sBAAsB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAClC,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI;AACA,YAAA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;AACjD,YAAA,OAAO,MAAM,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,CAAC,MAAM,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC;QAC1E;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC;QAC9B;IACJ;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,QAAgB,EAAA;;AAErC,QAAA,OAAO;AACF,aAAA,OAAO,CAAC,SAAS,EAAE,EAAE;AACrB,aAAA,OAAO,CAAC,eAAe,EAAE,EAAE;AAC3B,aAAA,OAAO,CAAC,SAAS,EAAE,EAAE;AACrB,aAAA,IAAI,EAAE;IACf;AAEA;;AAEG;AACK,IAAA,qBAAqB,CAAC,UAAkB,EAAA;AAC5C,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,aAAa,KAAK,SAAS;AACrC,YAAA,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,UAAU,CAAC;IACxF;AAEA;;AAEG;IACK,eAAe,GAAA;AACnB,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;AACvB,YAAA,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE;AAC9B,YAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;QAChC;IACJ;AAEA;;AAEG;IACK,YAAY,GAAA;AAChB,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC;QACJ;AAEA,QAAA,IAAI,QAAQ,CAAC,cAAc,CAAC,wBAAwB,CAAC,EAAE;YACnD;QACJ;QAEA,MAAM,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,OAAO,CAAC;AAC7C,QAAA,KAAK,CAAC,EAAE,GAAG,wBAAwB;QACnC,KAAK,CAAC,WAAW,GAAG;;;;;;;;;;;;;SAanB;AAED,QAAA,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;IACpC;AAEA;;AAEG;AACK,IAAA,KAAK,CAAC,EAAU,EAAA;AACpB,QAAA,OAAO,IAAI,OAAO,CAAC,OAAO,IAAI,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAC1D;AAEA;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,MAAM;IACtB;AAEA;;AAEG;IACH,aAAa,GAAA;QACT,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,CAAC,WAAW,GAAG,CAAC;AACpB,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,GAAG,EAAE;IACrC;AACH;;ACvrBD;AACA;AAMA;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4BG;AACG,SAAU,2BAA2B,CAAC,MAAwB,EAAA;AAChE,IAAA,MAAM,YAAY,GAAwB;AACtC,QAAA,KAAK,EAAE,eAAe;AACtB,QAAA,gBAAgB,EAAE;KACrB;AACD,IAAA,MAAM,CAAC,eAAe,CAAC,YAAY,CAAC;AACxC;;ACmkBA;AACA;AACA;AACA;AACA,MAAM,WAAW,GAAG;AACpB,IAAI,WAAW,EAAE,EAAE;AACnB,IAAI,UAAU,EAAE,MAAM;AACtB,IAAI,WAAW,EAAE,IAAI;AACrB,IAAI,UAAU,EAAE,QAAQ;AACxB,IAAI,IAAI,EAAE,IAAI;AACd,IAAI,OAAO,EAAE,CAAC;AACd,IAAI,GAAG,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE,mBAAmB,EAAE,KAAK,EAAE,MAAM,EAAE;AACxF,CAAC;AAC4B,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,WAAW,CAAC,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE;AACxE,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,WAAW,CAAC,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,OAAO,EAAE,4BAA4B,EAAE,kBAAkB,EAAE,KAAK,EAAE;;AA2nQ5K,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,GAAG,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC,MAAM,EAAE;AACxD,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACjC,IAAI,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACzC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC;AAC/B,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC9B,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,CAAC,EAAE;AAClC,IAAI,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AAChC;AACA,SAAS,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE;AACxB,IAAI,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG;AAC9B,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AAChC,QAAQ,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;AACtB,QAAQ,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC;AACzB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,GAAG,IAAI,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC;AAC5B,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC;AAChC,IAAI;AACJ;AACA,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,MAAM,GAAG,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AACjD,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACnB,IAAI,OAAO,CAAC;AACZ;AACA;AACA,SAAS,OAAO,CAAC,GAAG,EAAE;AACtB,IAAI,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC;AACpB,IAAI,OAAO,CAAC,KAAK,CAAC;AAClB,QAAQ,CAAC,GAAG,GAAG,EAAE;AACjB,IAAI,OAAO,CAAC,KAAK,CAAC;AAClB,QAAQ,CAAC,GAAG,GAAG,EAAE;AACjB,IAAI,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC;AAClE;;AAEA;AACA,MAAM,WAAW,CAAC;AAClB,IAAI,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,EAAE;AACrC,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,MAAM,GAAG,MAAM;AAC5B,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AAC3C,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AAC3C;AACA,QAAQ,MAAM,GAAG,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC;AAC/C,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG;AACtC,IAAI;AACJ;AACA,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE;AACnB,QAAQ,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,IAAI;AACzC;AACA,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC3C,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACpB,QAAQ;AACR;AACA,QAAQ,IAAI,KAAK,GAAG,CAAC;AACrB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,KAAK,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;AACtC,QAAQ,MAAM,KAAK,GAAG,CAAC,GAAG,KAAK;AAC/B;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK;AACnC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC5C,QAAQ;AACR;AACA;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC3C,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACrB,QAAQ;AACR;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC5C,QAAQ;AACR,IAAI;AACJ;AACA,IAAI,OAAO,CAAC,GAAG,EAAE;AACjB,QAAQ,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,IAAI;AACnC,QAAQ,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACvC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7C,YAAY,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,GAAG;AAClB,IAAI;AACJ;;AAuLA;AACA;AACA;AACA,MAAM,GAAG,GAAG,KAAK,CAAC;AA8ElB;AACA,SAAS,IAAI,CAAC,CAAC,EAAE;AACjB;AACA,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACpC;AACA,SAAS,WAAW,CAAC,CAAC,EAAE,GAAG,GAAG,GAAG,EAAE;AACnC,IAAI,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC;AACvB,IAAI,IAAI,EAAE,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC7C,QAAQ,OAAO,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;AAC1C,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC;AACxC,IAAI,MAAM,GAAG,GAAG,CAAC,GAAG,GAAG;AACvB,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AACzB,IAAI,OAAO,CAAC;AACZ;AAwCA;AACA,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE;AACxB,IAAI,IAAI,EAAE,EAAE,EAAE;AACd,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC;AAC1C,IAAI,IAAI,CAAC,KAAK,CAAC;AACf,QAAQ,OAAO,CAAC;AAChB,IAAI,IAAI,IAAI,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAChC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AAChC,QAAQ,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,CAAC,EAAE,EAAE,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,CAAC;AAC1H,QAAQ,IAAI,IAAI,EAAE,GAAG,EAAE;AACvB,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,CAAC,CAAC;AACnD,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,KAAK;AAC1B,IAAI,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;AACrC;;AA4GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE;AACtD,IAAI,SAAS,KAAK,CAAC,KAAK,EAAE,EAAE,OAAO,KAAK,YAAY,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC,UAAU,OAAO,EAAE,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChH,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC/D,QAAQ,SAAS,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnG,QAAQ,SAAS,QAAQ,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtG,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;AACtH,QAAQ,IAAI,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,KAAK,CAAC,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;AAC9E,IAAI,CAAC,CAAC,CAAC;AACP,CAAC;AACD;AACA,OAAO,eAAe,KAAK,UAAU,GAAG,eAAe,GAAG,UAAU,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE;AAChG,IAAI,IAAI,CAAC,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;AAC/B,IAAI,OAAO,CAAC,CAAC,IAAI,GAAG,iBAAiB,EAAE,CAAC,CAAC,KAAK,GAAG,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG,UAAU,EAAE,CAAC,CAAC;AACrF,CAAC;;AAED;AACA;AACA;AACA,SAAS,gBAAgB,CAAC,IAAI,EAAE;AAChC,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG;AAC5B,SAAS,KAAK,CAAC,eAAe;AAC9B,SAAS,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE;AAC5B,SAAS,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpD;AACA,SAAS,KAAK,CAAC,IAAI,EAAE;AACrB,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,iBAAiB,EAAE,GAAG;AACvC,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,sBAAsB,EAAE,EAAE,CAAC;AAC5C,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG;AAC5B,SAAS,IAAI,EAAE;AACf;AACA,SAAS,cAAc,CAAC,CAAC,EAAE;AAC3B;AACA,IAAI,QAAQ,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,QAAQ,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,QAAQ,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,QAAQ,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,QAAQ,iBAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;AACjC,QAAQ,CAAC,CAAC,MAAM,GAAG,EAAE;AACrB;AACA;AACA,SAAS,OAAO,CAAC,OAAO,EAAE;AAC1B,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,gBAAgB,EAAE,GAAG;AACtC,SAAS,OAAO,CAAC,WAAW,EAAE,IAAI;AAClC,SAAS,OAAO,CAAC,qBAAqB,EAAE,OAAO;AAC/C,SAAS,OAAO,CAAC,SAAS,EAAE,GAAG;AAC/B,SAAS,IAAI,EAAE;AACf;AACA;AACA,SAAS,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE;AACjD,IAAI,OAAO,SAAS,CAAC,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,QAAQ,EAAE,KAAK,EAAE,IAAI,GAAG,EAAE,EAAE;AACrF;AACA,QAAQ,IAAI,EAAE,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;AACzE,YAAY,OAAO,mBAAmB;AACtC,QAAQ,MAAM,EAAE,GAAG,GAAG,EAAE,EAAE,QAAQ,GAAG,EAAE,EAAE,KAAK,GAAG,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,YAAY,GAAG,CAAC,EAAE,WAAW,GAAG,SAAS,GAAG,GAAG,IAAI;AACrH;AACA,QAAQ,MAAM,OAAO,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACpE,QAAQ,IAAI,SAAS,GAAG,gBAAgB,CAAC,OAAO;AAChD,aAAa,MAAM,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC;AAC3C,aAAa,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;AAC1B,QAAQ,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC;AAClC,YAAY,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;AACxD;AACA,QAAQ,MAAM,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,KAAK,CAAC;AAClD,QAAQ,MAAM,KAAK,GAAG,IAAI,WAAW,CAAC,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC;AAC5D,QAAQ,MAAM,MAAM,GAAG,CAAC,CAAC,KAAK;AAC9B,YAAY,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AAC7C,YAAY,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,GAAG,CAAC;AAC/C,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AACxC,gBAAgB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,GAAG;AAC9C,YAAY,OAAO,MAAM,CAAC,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC;AAChD,QAAQ,CAAC;AACT,QAAQ,MAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC;AACrC,QAAQ,MAAM,OAAO,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AACvF;AACA,QAAQ,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC5C,YAAY,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC/B,YAAY,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC;AACrC;AACA,YAAY,MAAM,KAAK,GAAG,CAAC,CAAC,WAAW,EAAE;AACzC,YAAY,KAAK,MAAM,CAAC,IAAI,OAAO;AACnC,gBAAgB,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;AACrC,oBAAoB,CAAC,IAAI,IAAI;AAC7B,YAAY,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAC9B,QAAQ,CAAC,CAAC;AACV,QAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxC,QAAQ,IAAI,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC;AAC/C;AACA,QAAQ,IAAI,OAAO,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACnD,QAAQ,IAAI,OAAO,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,QAAQ,CAAC;AACpD,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;AACzC,YAAY,MAAM,IAAI,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC;AACzE,YAAY,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC;AACrD,YAAY,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AAChE,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpD,gBAAgB,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,YAAY;AACZ;AACA,YAAY,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpD,gBAAgB,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;AACnD,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACjD,oBAAoB,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1C,YAAY;AACZ,YAAY,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;AAC3C,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE;AACnD,gBAAgB,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI;AAClC,YAAY,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC9C,gBAAgB,CAAC;AACjB,gBAAgB,CAAC,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;AAC/C,aAAa,CAAC,CAAC;AACf,YAAY,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9C,YAAY,OAAO,GAAG;AACtB,iBAAiB,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;AACnE,iBAAiB,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC/B,iBAAiB,IAAI,CAAC,GAAG,CAAC;AAC1B,QAAQ;AACR;AACA,QAAQ,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AAClC,QAAQ,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;AAC5E,QAAQ,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,IAAI,QAAQ;AAClD,QAAQ,MAAM,MAAM,GAAG,WAAW,KAAK;AACvC,cAAc;AACd,cAAc,WAAW,KAAK;AAC9B,kBAAkB;AAClB,kBAAkB,EAAE;AACpB,QAAQ,OAAO,CAAC,EAAE,MAAM,CAAC,EAAE,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,gBAAgB,CAAC;AACnE,IAAI,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACA;AACA,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG,EAAE,IAAI,EAAE,QAAQ,GAAG,CAAC,EAAE;AAClD,IAAI,MAAM,MAAM,GAAG,IAAI,GAAG,EAAE;AAC5B;AACA,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC9B,QAAQ,IAAI,EAAE,KAAK,SAAS;AAC5B,YAAY;AACZ,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,QAAQ,GAAG,CAAC;AACxC,QAAQ,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjD,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;AACjD,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,EAAE;AACvB,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,EAAE;AACjC,QAAQ,MAAM,EAAE,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG,KAAK,CAAC;AAC1C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACpC,IAAI;AACJ,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE;AAC5B,IAAI,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAC/B,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE;AAC7B,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,QAAQ,MAAM,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3B,QAAQ,IAAI,EAAE;AACd,YAAY,GAAG,IAAI,EAAE,GAAG,EAAE;AAC1B,IAAI;AACJ,IAAI,KAAK,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC;AAC1B,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,IAAI,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE;AAClB,QAAQ,OAAO,CAAC;AAChB,IAAI,OAAO,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAChD;AACA;AACA;AACA;AACA,SAAS,aAAa,CAAC,CAAC,EAAE,GAAG,EAAE;AAC/B,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AACnC,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC;AAC5B,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AAClB,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;AACzB,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AACtC,IAAI,IAAI,CAAC,KAAK,QAAQ,EAAE;AACxB,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACjF,QAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC;AACjD,IAAI;AACJ,SAAS,IAAI,CAAC,KAAK,OAAO,EAAE;AAC5B,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;AACnC,QAAQ,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI;AACJ,SAAS;AACT,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC3C,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,GAAG,KAAK,CAAC,CAAC;AAC/D,IAAI;AACJ;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AACnC,IAAI,IAAI,CAAC,KAAK,QAAQ,EAAE;AACxB,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACjF,QAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC;AACjD,IAAI;AACJ,SAAS,IAAI,CAAC,KAAK,OAAO,EAAE;AAC5B,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;AACnC,QAAQ,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI;AACJ,SAAS;AACT,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC3C,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,GAAG,KAAK,CAAC,CAAC;AAC/D,IAAI;AACJ;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,CAAC,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE;AAClE,IAAI,MAAM,CAAC,GAAG,aAAa,CAAC,CAAC,EAAE,SAAS,CAAC;AACzC,IAAI,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,WAAW,CAAC,MAAM,CAAC;AACtD,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACjD,QAAQ,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;AAChC,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC;AACpD,IAAI;AACJ,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;AAClC,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;AAC7C,YAAY,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,IAAI,OAAO,KAAK;AAChB;;AAEA;AACA;AACA;AACA,MAAM,UAAU,GAAG,IAAI,GAAG,EAAE;AAC5B,SAAS,aAAa,CAAC,GAAG,EAAE;AAC5B,IAAI,MAAM,CAAC,GAAG,GAAG;AACjB,IAAI,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACpC,IAAI,IAAI,MAAM;AACd,QAAQ,OAAO,MAAM;AACrB,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,WAAW,EAAE;AAC7B,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,0BAA0B,EAAE,EAAE,CAAC;AACjD,IAAI,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,EAAE;AACvB,QAAQ,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5B,QAAQ,OAAO,CAAC;AAChB,IAAI;AACJ;AACA,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3C,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG;AAChC,IAAI;AACJ,SAAS,IAAI,2BAA2B,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAClE,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,2BAA2B,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;AACvG,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AACrE,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC;AAC1B,IAAI;AACJ;AACA,IAAI,MAAM,KAAK,GAAG;AAClB,QAAQ,CAAC,mBAAmB,EAAE,KAAK,CAAC;AACpC,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;AAC5B,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;AACrB,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;AACrB,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,aAAa,EAAE,KAAK,CAAC;AAC9B,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC;AACvB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;AACzB,QAAQ,CAAC,cAAc,EAAE,EAAE,CAAC;AAC5B,QAAQ,CAAC,UAAU,EAAE,EAAE,CAAC;AACxB,KAAK;AACL,IAAI,KAAK,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,IAAI,KAAK,EAAE;AACnC,QAAQ,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,EAAE;AACtD,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,GAAG,CAAC;AAClC,YAAY;AACZ,QAAQ;AACR,IAAI;AACJ,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACxB,IAAI,OAAO,CAAC;AACZ;AACA,SAAS,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE;AAClC,IAAI,MAAM,IAAI,GAAG,IAAI,CAAC,WAAW;AACjC,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,aAAa,EAAE,GAAG;AACnC,SAAS,KAAK,CAAC,KAAK;AACpB,SAAS,MAAM,CAAC,OAAO,CAAC;AACxB,IAAI,IAAI,CAAC,MAAM;AACf,QAAQ,OAAO,IAAI;AACnB,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC1B,QAAQ,MAAM,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC;AAClC,QAAQ,IAAI,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC;AAC7B,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACvB,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA,SAAS,WAAW,CAAC,CAAC,EAAE;AACxB,IAAI,MAAM,IAAI,GAAG,EAAE;AACnB,IAAI,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AACzB,QAAQ,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC;AACxC,IAAI,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC;AAClC,QAAQ,IAAI,CAAC,IAAI,CAAC,qCAAqC,CAAC;AACxD,IAAI,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,QAAQ,IAAI,CAAC,IAAI,CAAC,oCAAoC,CAAC;AACvD,IAAI,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5B,QAAQ,IAAI,CAAC,IAAI,CAAC,uCAAuC,CAAC;AAC1D,IAAI,OAAO,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;AACnC;;AA+GA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,MAAM,EAAE,KAAK,EAAE;AACrC,IAAI,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/F,IAAI,MAAM,WAAW,GAAG,oEAAoE,CAAC,IAAI,CAAC,KAAK,CAAC;AACxG,IAAI,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI;AAC3B,QAAQ,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE;AAC9C,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,WAAW,EAAE;AACrC,QAAQ,IAAI,GAAG,GAAG,CAAC;AACnB,QAAQ,KAAK,MAAM,CAAC,IAAI,GAAG;AAC3B,YAAY,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC9B,gBAAgB,GAAG,EAAE;AACrB,QAAQ,IAAI,WAAW,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAC3C,YAAY,GAAG,IAAI,CAAC,CAAC;AACrB,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;AACxC,IAAI,CAAC,CAAC;AACN;AACA;AACA;AACA;AACA,SAAS,WAAW,CAAC,GAAG,EAAE,CAAC,EAAE;AAC7B,IAAI,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC;AAC5C,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,IAAI,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;AAC1B;AACA;AACA;AACA;AACA,SAAS,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAC1B,IAAI,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACtC;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,IAAI,EAAE;AAC9B;AACA,IAAI,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,EAAE,SAAS,GAAG,GAAG,IAAI;AACnN;AACA,IAAI,MAAM,IAAI,GAAG,YAAY,GAAG,WAAW,CAAC,KAAK,CAAC,GAAG,KAAK;AAC1D,IAAI,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;AAC1C,IAAI,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,QAAQ,CAAC;AACvD,IAAI,MAAM,MAAM,GAAG,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,CAAC;AAC9E;AACA,IAAI,MAAM,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACjE;AACA,IAAI,MAAM,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;AACjF;AACA,IAAI,MAAM,KAAK,GAAG,YAAY,CAAC,MAAM,EAAE,KAAK,CAAC;AAC7C;AACA,IAAI,MAAM,YAAY,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;AAC7C,IAAI,MAAM,MAAM,GAAG,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,KAAK,GAAG,IAAI;AACpE,IAAI,MAAM,MAAM,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AAC7C,QAAQ,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;AAChC,QAAQ,MAAM,CAAC,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC;AACjC;AACA,QAAQ,MAAM,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,MAAM,CAAC,GAAG,GAAG,IAAI,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,GAAG,CAAC,CAAC;AACvE;AACA,QAAQ,OAAO,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,IAAI,CAAC,CAAC;AACN;AACA,IAAI,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,KAAK,IAAI,IAAI,SAAS,KAAK,MAAM,GAAG,SAAS,GAAG,CAAC,CAAC;AACvF,IAAI,MAAM,IAAI,GAAG,WAAW,CAAC,MAAM,EAAE,GAAG,CAAC;AACzC,IAAI,MAAM,SAAS,GAAG,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;AAChF;AACA,IAAI,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI;AACrC,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,OAAO,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,IAAI,mBAAmB;AACxG,QAAQ,OAAO;AACf,YAAY,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5B,YAAY,OAAO,EAAE,CAAC,CAAC,OAAO;AAC9B,YAAY,OAAO,EAAE,IAAI;AACzB,YAAY,KAAK,EAAE,CAAC;AACpB,SAAS;AACT,IAAI,CAAC,CAAC;AACN,IAAI,OAAO;AACX,QAAQ,KAAK;AACb,QAAQ,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;AAC7C,QAAQ,OAAO,EAAE,SAAS;AAC1B,QAAQ,WAAW,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,WAAW,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC;AACvD,KAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC;AACvB,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM;AACjH,IAAI,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM;AAC/G,IAAI,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI;AAC/G,IAAI,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE;AAChG,CAAC,CAAC;AACF,SAAS,QAAQ,CAAC,CAAC,EAAE;AACrB,IAAI,OAAO;AACX,SAAS,WAAW;AACpB,SAAS,OAAO,CAAC,qCAAqC,EAAE,GAAG;AAC3D,SAAS,KAAK,CAAC,KAAK;AACpB,SAAS,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACzC;AACA,SAAS,MAAM,CAAC,GAAG,EAAE,EAAE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxD,SAAS,gBAAgB,CAAC,IAAI,EAAE;AAChC,IAAI,MAAM,KAAK,GAAG,IAAI,GAAG,EAAE;AAC3B,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,MAAM,OAAO,GAAG,EAAE;AACtB,IAAI,IAAI,MAAM,GAAG,CAAC;AAClB,IAAI,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC1B,QAAQ,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC;AAChC,QAAQ,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;AACjC,QAAQ,MAAM,EAAE,GAAG,IAAI,GAAG,EAAE;AAC5B,QAAQ,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC9B,YAAY,IAAI,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AACjC,YAAY,IAAI,EAAE,KAAK,SAAS,EAAE;AAClC,gBAAgB,EAAE,GAAG,MAAM,EAAE;AAC7B,gBAAgB,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;AAChC,YAAY;AACZ,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC7C,QAAQ;AACR,QAAQ,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;AACpB,IAAI;AACJ,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM;AACzB,IAAI,MAAM,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACpC,IAAI,KAAK,MAAM,EAAE,IAAI,GAAG;AACxB,QAAQ,KAAK,MAAM,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE;AAClC,YAAY,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC;AACvB,IAAI,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAClE,IAAI,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACtE,IAAI,OAAO,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,OAAO,EAAE;AAClE;AACA,SAAS,WAAW,CAAC,EAAE,EAAE,GAAG,EAAE;AAC9B,IAAI,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE;AACzB,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,EAAE;AAC7B,QAAQ,MAAM,GAAG,GAAG,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACvC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AACvB,QAAQ,KAAK,IAAI,GAAG,GAAG,GAAG;AAC1B,IAAI;AACJ,IAAI,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,KAAK;AAC1C,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG;AAC5B,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC;AAC5B,IAAI,OAAO,GAAG;AACd;AACA,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE;AACxB,IAAI,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5D,IAAI,IAAI,GAAG,GAAG,CAAC;AACf,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK,EAAE;AAChC,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AAC9B,QAAQ,IAAI,CAAC,KAAK,SAAS;AAC3B,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC;AACxB,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA,SAAS,SAAS,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,GAAG,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE;AAC9D,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE;AAC3B,QAAQ,MAAM,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC,QAAQ,IAAI,CAAC,IAAI,CAAC;AAClB,YAAY;AACZ,QAAQ,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,IAAI,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC;AAC9G,QAAQ,MAAM,KAAK,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;AACzE,QAAQ,KAAK,IAAI,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,KAAK,KAAK,IAAI,KAAK,CAAC,CAAC;AAC1D,IAAI;AACJ,IAAI,OAAO,KAAK;AAChB;AACA;AACA,SAAS,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,GAAG,IAAI,EAAE;AAC9C;AACA,IAAI,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AACrC,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG,EAAE;AAC9B,QAAQ,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,MAAM,CAAC;AACtC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;AACtC,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE;AACxB,YAAY,CAAC,IAAI,CAAC,KAAK,EAAE;AACzB,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;AACvB,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,UAAU,IAAI,CAAC,GAAG,CAAC,CAAC;AACvD,YAAY,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AAC3B,QAAQ;AACR,IAAI;AACJ,IAAI,IAAI,EAAE,GAAG,CAAC;AACd,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AAChC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC7B,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,KAAK;AACpC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AAChC,QAAQ,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AACnB,IAAI,OAAO,GAAG;AACd;AACA;AACA,SAAS,mBAAmB,CAAC,CAAC,EAAE;AAChC,IAAI,OAAO,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC;AACzF;AACA,SAAS,iBAAiB,CAAC,CAAC,EAAE;AAC9B,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9C;AACA,SAAS,iBAAiB,CAAC,IAAI,EAAE,CAAC,EAAE;AACpC,IAAI,MAAM,EAAE,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;AACrC,IAAI,MAAM,EAAE,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AACnC,IAAI,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,CAAC;AACxC,QAAQ,OAAO,CAAC;AAChB,IAAI,IAAI,GAAG,GAAG,CAAC;AACf,IAAI,KAAK,MAAM,CAAC,IAAI,EAAE;AACtB,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AACrB,YAAY,GAAG,EAAE;AACjB,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM;AAC1B;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;AACzB,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAClC,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAClC,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,IAAI,CAAC;AACrB,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACpB,YAAY,KAAK,EAAE;AACnB,IAAI,MAAM,GAAG,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,KAAK;AACvC,IAAI,OAAO,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,KAAK,GAAG,GAAG;AACtC;AACA,SAAS,cAAc,CAAC,CAAC,EAAE;AAC3B,IAAI,OAAO,qCAAqC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;AAChE;AACA,SAAS,eAAe,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE;AAC9F,IAAI,IAAI,EAAE;AACV,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,KAAK,GAAG,EAAE;AACpB;AACA,IAAI,MAAM,GAAG,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;AACxC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACf,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B,IAAI,MAAM,IAAI,GAAG,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,CAAC;AACvD,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;AAChB,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;AACtB;AACA,IAAI,MAAM,MAAM,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,CAAC,CAAC;AAC5D,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AAClB,IAAI,KAAK,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACpC,IAAI,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACjD,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACf,IAAI,KAAK,CAAC,IAAI,CAAC,gBAAgB,CAAC;AAChC;AACA,IAAI,MAAM,KAAK,GAAG,mBAAmB,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACxE,IAAI,MAAM,OAAO,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACxE,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;AACzB,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AAC3B,IAAI,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC;AAC/B;AACA,IAAI,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AACvD,IAAI,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC;AACnC;AACA,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,GAAG,KAAK,CAAC,UAAU,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,EAAE;AACxE,IAAI,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAClC;AACA,IAAI,MAAM,QAAQ,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AACjD,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC;AACvC,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B;AACA,IAAI,IAAI,KAAK,IAAI,KAAK,EAAE;AACxB,QAAQ,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAC3B,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC/C,YAAY,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;AAC/C,QAAQ;AACR,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACnB,QAAQ,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;AAC9B,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;AACjC,QAAQ,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC;AACjC,IAAI;AACJ,IAAI,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE;AAC/B;AACA;AACA,MAAM,KAAK,CAAC;AACZ,IAAI,WAAW,GAAG;AAClB,QAAQ,IAAI,CAAC,CAAC,GAAG,IAAI;AACrB,QAAQ,IAAI,CAAC,EAAE,GAAG,IAAI;AACtB,QAAQ,IAAI,CAAC,KAAK,GAAG,IAAI;AACzB,IAAI;AACJ,IAAI,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,EAAE;AAC7B,QAAQ,IAAI,EAAE;AACd,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAC1B,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,MAAM,KAAK,CAAC;AACnF,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;AAChC,YAAY,IAAI,CAAC,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACxC,YAAY;AACZ,QAAQ;AACR;AACA,QAAQ,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACvC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,YAAY,CAAC,IAAI,CAAC;AAClB,YAAY,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AACrB,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACrC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC1B,YAAY;AACZ,YAAY,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC;AAC1C,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1E,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC;AACpD;AACA,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,MAAM,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AACtE,QAAQ,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACtC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,gBAAgB,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE;AACnC,gBAAgB,MAAM,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AACjC,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE;AAC3C,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AAC1C,YAAY;AACZ,QAAQ;AACR,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,MAAM;AAC7B,QAAQ;AACR;AACA,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,MAAM,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AACtE,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACzC,gBAAgB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC1C,oBAAoB,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC;AAClG,YAAY;AACZ,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;AACzC,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACzC,YAAY,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC1C,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;AACzC,QAAQ;AACR,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,EAAE,GAAG,EAAE;AACpB,QAAQ,IAAI,CAAC,KAAK,GAAG,GAAG;AACxB,IAAI;AACJ,IAAI,OAAO,CAAC,CAAC,EAAE;AACf,QAAQ,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK;AAC9C,YAAY,OAAO,CAAC;AACpB,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAChD,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC;AACzD,YAAY,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC9B,QAAQ;AACR,QAAQ,OAAO,CAAC;AAChB,IAAI;AACJ;AACA;AACA,SAAS,iBAAiB,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE;AAC5C,IAAI,IAAI,EAAE;AACV,IAAI,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACnD,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC;AACjG,IAAI,IAAI,SAAS,IAAI,mBAAmB,CAAC,GAAG,CAAC;AAC7C,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,OAAO,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,CAAC,CAAC;AAC7D,IAAI,IAAI,OAAO,IAAI,IAAI,IAAI,iBAAiB,CAAC,GAAG,CAAC;AACjD,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC;AACtD,IAAI,MAAM,OAAO,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;AAC/C,IAAI,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC;AACtD,IAAI,MAAM,EAAE,GAAG,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC;AACvD,IAAI,IAAI,GAAG,GAAG,IAAI;AAClB,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,IAAI,EAAE,GAAG,GAAG;AAChB,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAC5D,IAAI,MAAM,KAAK,GAAG,QAAQ,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC5D,IAAI,IAAI,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,UAAU,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,IAAI,CAAC;AACxE,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,GAAG,GAAG,KAAK,CAAC,CAAC;AACzD,IAAI,OAAO,CAAC;AACZ;AACA,SAAS,OAAO,CAAC,CAAC,EAAE;AACpB,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE;AAChB,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1B,IAAI;AACJ,SAAS;AACT,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7B,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1B,IAAI;AACJ;AACA;AACA,SAAS,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,GAAG,EAAE,WAAW,GAAG,IAAI,EAAE;AAC7D,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,MAAM,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC;AACjD,IAAI,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,MAAM,EAAE,GAAG,gBAAgB,CAAC,IAAI,CAAC;AACxD,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,EAAE,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;AAC9D,IAAI,MAAM,WAAW,GAAG,IAAI,GAAG,EAAE;AACjC,IAAI,IAAI,IAAI,GAAG,CAAC;AAChB,IAAI,OAAO,WAAW,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE;AAC7C,QAAQ,IAAI,OAAO,GAAG,EAAE,EAAE,OAAO,GAAG,CAAC,QAAQ;AAC7C,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAChD,YAAY,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AAClC,gBAAgB;AAChB,YAAY,MAAM,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC;AAClC,YAAY,IAAI,GAAG,GAAG,CAAC;AACvB,YAAY,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;AACzC,gBAAgB,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;AACxD,gBAAgB,IAAI,GAAG,GAAG,GAAG;AAC7B,oBAAoB,GAAG,GAAG,GAAG;AAC7B,YAAY;AACZ,YAAY,MAAM,GAAG,GAAG,MAAM,GAAG,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,GAAG,MAAM,IAAI,GAAG;AACnE,YAAY,IAAI,GAAG,GAAG,OAAO,EAAE;AAC/B,gBAAgB,OAAO,GAAG,GAAG;AAC7B,gBAAgB,OAAO,GAAG,CAAC;AAC3B,YAAY;AACZ,QAAQ;AACR,QAAQ,IAAI,OAAO,GAAG,CAAC;AACvB,YAAY;AACZ,QAAQ,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;AACtC,QAAQ,MAAM,MAAM,GAAG,CAAC,MAAM,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AACpD,QAAQ,IAAI,IAAI,GAAG,MAAM,GAAG,WAAW,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC;AACzD,YAAY;AACZ,QAAQ,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AACxB,QAAQ,IAAI,IAAI,MAAM;AACtB,QAAQ,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC;AAChC,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA;AACA;AACA,SAAS,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AAC1C,IAAI,IAAI,EAAE,EAAE,EAAE;AACd;AACA,IAAI,MAAM,EAAE,WAAW,GAAG,IAAI,EAAE,aAAa,GAAG,EAAE,EAAE,cAAc,GAAG,IAAI,EAAE,kBAAkB,GAAG,KAAK,GAAG,GAAG,IAAI;AAC/G,IAAI,MAAM,IAAI,GAAG,CAAC,KAAK,EAAE,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC;AAC7D,IAAI,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAAC,IAAI,CAAC;AAChE,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,EAAE,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;AAChE,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC;AAC3B,IAAI,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC9B,IAAI,MAAM,KAAK,GAAG,aAAa,GAAG,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,aAAa,CAAC,GAAG,SAAS;AACtF,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,SAAS,GAAG,EAAE;AACxB,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;AACnC,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;AACtC,QAAQ,MAAM,KAAK,GAAG,aAAa,GAAG,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,aAAa,EAAE,IAAI,GAAG,CAAC,CAAC,GAAG,SAAS;AACpG,QAAQ,MAAM,KAAK,GAAG,eAAe,CAAC,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC;AACzH,QAAQ,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC;AAC5B,QAAQ,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,CAAC;AACxD,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;AACrB,IAAI;AACJ,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;AAC3D,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC;AACjE,QAAQ,IAAI,MAAM,IAAI,CAAC,EAAE;AACzB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AAC7C,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACzE,QAAQ;AACR,IAAI;AACJ,IAAI,MAAM,EAAE,GAAG,IAAI,KAAK,EAAE;AAC1B,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,CAAC;AAC7B,IAAI,IAAI,IAAI,GAAG,QAAQ,EAAE,IAAI,GAAG,CAAC,QAAQ;AACzC,IAAI,MAAM,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC/C,IAAI,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE;AAC/B,QAAQ,IAAI,CAAC,GAAG,IAAI;AACpB,YAAY,IAAI,GAAG,CAAC;AACpB,QAAQ,IAAI,CAAC,GAAG,IAAI;AACpB,YAAY,IAAI,GAAG,CAAC;AACpB,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,CAAC;AAC7C,IAAI,MAAM,YAAY,GAAG,kBAAkB,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,KAAK,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,EAAE,GAAG,SAAS;AACtK,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACxC,QAAQ,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,IAAI,KAAK;AACjD,QAAQ,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC;AACtF,QAAQ,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,GAAG,EAAE,UAAU,EAAE,CAAC,EAAE,CAAC;AAC1F,QAAQ,IAAI,cAAc;AAC1B,YAAY,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,IAAI,YAAY;AACxB,YAAY,IAAI,CAAC,cAAc,GAAG,YAAY;AAC9C,QAAQ,OAAO,IAAI;AACnB,IAAI,CAAC,CAAC;AACN,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ,CAAC;AAClD,IAAI,OAAO,MAAM;AACjB;AACA;AACA,SAAS,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AACtC;AACA,IAAI,MAAM,EAAE,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,EAAE,MAAM,GAAG,IAAI,EAAE,SAAS,GAAG,GAAG,EAAE,WAAW,GAAG,IAAI,EAAE,GAAG,IAAI;AAC7G,IAAI,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;AAC3B,QAAQ,OAAO,EAAE;AACjB,IAAI,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ;AAClC,IAAI,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,IAAI,UAAU,IAAI,CAAC,CAAC,QAAQ,KAAK,GAAG,GAAG,UAAU,CAAC,CAAC;AACvG,IAAI,MAAM,IAAI,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,GAAG,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC7D,IAAI,IAAI,CAAC,MAAM,EAAE;AACjB,QAAQ,MAAM,GAAG,GAAG,EAAE;AACtB,QAAQ,IAAI,IAAI,GAAG,CAAC;AACpB,QAAQ,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC9B,YAAY,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AAChD,YAAY,IAAI,IAAI,GAAG,GAAG,GAAG,WAAW,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC;AAC1D,gBAAgB;AAChB,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACvB,YAAY,IAAI,IAAI,GAAG;AACvB,QAAQ;AACR,QAAQ,OAAO,GAAG;AAClB,IAAI;AACJ,IAAI,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AAC3I,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,WAAW,CAAC;AACrD;AACA;AACA,SAAS,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AACnD;AACA,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC;AAC9C,IAAI,OAAO,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC;AAClC;AAk2CA,SAAS,WAAW,CAAC,KAAK,EAAE,IAAI,EAAE;AAClC;AACA,IAAI,IAAI,KAAK,CAAC,OAAO,KAAK,kBAAkB,IAAI,KAAK,CAAC,OAAO,KAAK,kBAAkB,EAAE;AACtF,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC,2BAA2B,EAAE,KAAK,CAAC,OAAO,CAAC,mDAAmD,CAAC,CAAC;AACzH,IAAI;AACJ;AACA,IAAI,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC,CAAC;AACrE;AACA,IAAI,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC;AACzC,IAAI,MAAM,GAAG,GAAG,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C;AACA,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC1C,QAAQ,OAAO,EAAE,CAAC,CAAC,OAAO;AAC1B,QAAQ,OAAO,EAAE,CAAC,CAAC,OAAO,IAAI,EAAE;AAChC,QAAQ,IAAI,EAAE,CAAC,CAAC,IAAI;AACpB,QAAQ,KAAK,EAAE,CAAC,CAAC,KAAK;AACtB,QAAQ,KAAK,EAAE,CAAC,CAAC;AACjB,KAAK,CAAC,CAAC;AACP;AACA,IAAI,MAAM,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,IAAI;AACjD,QAAQ,MAAM,CAAC,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAQ,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG;AAChC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACvB,QAAQ,OAAO,CAAC;AAChB,IAAI,CAAC,CAAC;AACN;AACA,IAAI,MAAM,YAAY,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC;AACvD,IAAI,MAAM,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACxE;AACA,IAAI,MAAM,aAAa,GAAmF,CAAC,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,KAAK,SAAS,CAAC,MAAM;AACzK,IAAI,IAAI,SAAS;AACjB,IAGS,IAAI,aAAa,EAAE;AAC5B,QAAQ,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC;AACnF,IAAI;AACJ,SAAS;AACT,QAAQ,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClE,IAAI;AACJ,IAAI,OAAO;AACX,QAAQ,QAAQ;AAChB,QAAQ,QAAQ;AAChB,QAAQ,GAAG;AACX,QAAQ,MAAM;AACd,QAAQ,SAAS;AACjB,QAAQ,YAAY;AACpB,QAAQ,WAAW;AACnB,QAAQ,SAAS;AACjB,KAAK;AACL;;ACxoXA;AACA;AA8BA;;;;;AAKG;MACU,eAAe,CAAA;AAKxB,IAAA,WAAA,CAAY,MAA8B,EAAA;QAJlC,IAAA,CAAA,UAAU,GAA8B,IAAI;QAC5C,IAAA,CAAA,KAAK,GAA2B,IAAI;QAIxC,IAAI,CAAC,MAAM,GAAG;AACV,YAAA,SAAS,EAAE;AACP,gBAAA,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,IAAI,IAAI;AAC3C,gBAAA,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,IAAI,GAAG;AAClD,gBAAA,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,IAAI,IAAI;AACzC,gBAAA,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,IAAI,GAAG;AAC9C,gBAAA,UAAU,EAAE,MAAM,EAAE,SAAS,EAAE,UAAU,IAAI;AAChD,aAAA;AACD,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,CAAC;AACvB,YAAA,OAAO,EAAE,MAAM,EAAE,OAAO,IAAI;SAC/B;IACL;AAEA;;AAEG;IACH,MAAM,SAAS,CAAC,KAAsB,EAAA;AAClC,QAAA,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE;AAEnC,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,UAAU,GAAG,WAAW,CAAC,KAAK,CAAC;AAEpC,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACrB,MAAM,QAAQ,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;AAC9C,YAAA,OAAO,CAAC,GAAG,CAAC,CAAA,kCAAA,EAAqC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA,EAAA,CAAI,CAAC;YACzE,OAAO,CAAC,GAAG,CAAC,CAAA,kBAAA,EAAqB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAA,SAAA,EAAY,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAA,MAAA,CAAQ,CAAC;QACpH;IACJ;AAEA;;AAEG;IACH,MAAM,gBAAgB,CAAC,GAAW,EAAA;AAC9B,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC;AACjC,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,2BAAA,EAA8B,GAAG,CAAA,EAAA,EAAK,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC5E;AACA,QAAA,MAAM,KAAK,GAAoB,MAAM,QAAQ,CAAC,IAAI,EAAE;AACpD,QAAA,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC/B;AAEA;;AAEG;IACH,OAAO,GAAA;AACH,QAAA,OAAO,IAAI,CAAC,UAAU,KAAK,IAAI;IACnC;AAEA;;AAEG;IACH,YAAY,GAAA;QACR,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,UAAU;AAAE,YAAA,OAAO,IAAI;QAEhD,OAAO;AACH,YAAA,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO;AAC3B,YAAA,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO;AAC3B,YAAA,aAAa,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM;AAC5C,YAAA,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AAC7C,YAAA,aAAa,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,MAAM;YAClD,eAAe,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC;AACrD,YAAA,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;SACxB;IACL;AAEA;;AAEG;IACH,MAAM,GAAG,CAAC,KAAa,EAAA;AACnB,QAAA,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE;QAEnC,IAAI,CAAC,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACjC,OAAO;AACH,gBAAA,MAAM,EAAE,oDAAoD;AAC5D,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,cAAc,EAAE,CAAC;AACjB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,cAAc,EAAE;aACnB;QACL;AAEA,QAAA,IAAI;;AAEA,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAmC;AAC/D,YAAA,MAAM,MAAM,GAAI,QAAQ,CAAC,MAAiB,IAAI,KAAK;AACnD,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,YAAA,MAAM,IAAI,GAAI,QAAQ,CAAC,IAAe,IAAI,GAAG;AAC7C,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,KAAK;AACjD,YAAA,MAAM,QAAQ,GAAI,QAAQ,CAAC,QAAmB,IAAI,GAAG;AACrD,YAAA,MAAM,OAAO,GAAI,QAAQ,CAAC,OAAmB,IAAI,IAAI;AACrD,YAAA,MAAM,WAAW,GAAI,QAAQ,CAAC,WAAuB,IAAI,KAAK;AAC9D,YAAA,MAAM,SAAS,GAAI,QAAQ,CAAC,SAAoB,IAAI,GAAG;;YAGvD,MAAM,eAAe,GAAG,cAAc,CAAC;gBACnC,KAAK;AACL,gBAAA,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;AAC9B,gBAAA,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ;AAClC,gBAAA,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,GAAG;AACxB,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,gBAAA,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY;AAC1C,gBAAA,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,WAAW;AACxC,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AACxC,gBAAA,MAAM,EAAE,MAAoC;gBAC5C,KAAK;gBACL,KAAK;gBACL,IAAI;gBACJ,KAAK;gBACL,QAAQ;gBACR,OAAO;gBACP,WAAW;gBACX,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,GAAG,CAAC;gBAC1B;AACH,aAAA,CAAC;AAEF,YAAA,IAAI,OAA2B;;AAG/B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,IAAI,eAAe,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;AACnE,gBAAA,MAAM,eAAe,GAAG,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;oBAC3F,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,UAAU,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACzC,iBAAA,CAAC,CAAC;AAEH,gBAAA,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,eAAe,EAAE;AACrD,oBAAA,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW;AAC9C,oBAAA,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM;AACpC,oBAAA,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS;AAC1C,oBAAA,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC;AACrC,iBAAA,CAAC;gBAEF,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,KAAoB,MAAM;oBACzE,OAAO,EAAE,KAAK,CAAC,OAAO;oBACtB,OAAO,EAAE,KAAK,CAAC,OAAO;oBACtB,IAAI,EAAE,KAAK,CAAC,IAAI;AAChB,oBAAA,KAAK,EAAE,KAAK,CAAC,UAAU,IAAI,CAAC;oBAC5B,WAAW,EAAE,KAAK,CAAC,QAAQ;oBAC3B,WAAW,EAAE,KAAK,CAAC;AACtB,iBAAA,CAAC,CAAC;YACP;iBAAO;gBACH,OAAO,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;oBACxG,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,KAAK,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACpC,iBAAA,CAAC,CAAC;YACP;;AAGA,YAAA,MAAM,MAAM,GAAG,MAAM,kBAAkB,CACnC,KAAK,EACL,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CACjF;YAED,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;;YAGpD,MAAM,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC;AACvC,YAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM,CAAC;;YAGrE,MAAM,mBAAmB,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;YAErD,OAAO;gBACH,MAAM;gBACN,UAAU;gBACV,OAAO;gBACP,cAAc;AACd,gBAAA,QAAQ,EAAE,KAAK;gBACf;aACH;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;AACpD,YAAA,OAAO,CAAC,KAAK,CAAC,2CAA2C,EAAE,KAAK,CAAC;YAEjE,OAAO;AACH,gBAAA,MAAM,EAAE,2DAA2D;AACnE,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;gBACX,cAAc;AACd,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,cAAc,EAAE,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG;aAC5D;QACL;IACJ;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,CAAC,KAAa,EAAE,IAAa,EAAA;QACvC,IAAI,CAAC,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAA,OAAO,EAAE;QACb;QAEA,MAAM,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI;;AAGlC,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAmC;AAC/D,QAAA,MAAM,MAAM,GAAI,QAAQ,CAAC,MAAiB,IAAI,KAAK;AACnD,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,QAAA,MAAM,IAAI,GAAI,QAAQ,CAAC,IAAe,IAAI,GAAG;AAC7C,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,KAAK;AACjD,QAAA,MAAM,QAAQ,GAAI,QAAQ,CAAC,QAAmB,IAAI,GAAG;AACrD,QAAA,MAAM,OAAO,GAAI,QAAQ,CAAC,OAAmB,IAAI,IAAI;AACrD,QAAA,MAAM,WAAW,GAAI,QAAQ,CAAC,WAAuB,IAAI,KAAK;AAC9D,QAAA,MAAM,SAAS,GAAI,QAAQ,CAAC,SAAoB,IAAI,GAAG;QAEvD,MAAM,eAAe,GAAG,cAAc,CAAC;YACnC,KAAK;AACL,YAAA,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;AAC9B,YAAA,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ;AAClC,YAAA,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,GAAG;AACxB,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,YAAA,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY;AAC1C,YAAA,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,WAAW;AACxC,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AACxC,YAAA,MAAM,EAAE,MAAoC;YAC5C,KAAK;YACL,KAAK;YACL,IAAI;YACJ,KAAK;YACL,QAAQ;YACR,OAAO;YACP,WAAW;AACX,YAAA,IAAI,EAAE,CAAC;YACP;AACH,SAAA,CAAC;AAEF,QAAA,OAAO,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;YAC1E,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,YAAA,KAAK,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACpC,SAAA,CAAC,CAAC;IACP;AAEA;;AAEG;AACH,IAAA,aAAa,CAAC,KAAa,EAAA;AACvB,QAAA,MAAM,UAAU,GAAG,KAAK,CAAC,WAAW,EAAE;QACtC,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;;QAGrC,MAAM,eAAe,GAAG,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC;AAC7F,QAAA,MAAM,kBAAkB,GAAG,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,WAAW,CAAC;AACxF,QAAA,MAAM,kBAAkB,GAAG,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,SAAS,CAAC;AAC7F,QAAA,MAAM,kBAAkB,GAAG,CAAC,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;QAE9F,IAAI,IAAI,GAAqC,SAAS;QACtD,IAAI,UAAU,GAAG,GAAG;QACpB,MAAM,QAAQ,GAAa,EAAE;;AAG7B,QAAA,KAAK,MAAM,MAAM,IAAI,kBAAkB,EAAE;AACrC,YAAA,IAAI,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;gBAC7B,IAAI,GAAG,cAAc;gBACrB,UAAU,GAAG,GAAG;AAChB,gBAAA,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC;gBACrB;YACJ;QACJ;AAEA,QAAA,IAAI,IAAI,KAAK,SAAS,EAAE;;AAEpB,YAAA,MAAM,SAAS,GAAG,KAAK,CAAC,CAAC,CAAC;YAE1B,IAAI,kBAAkB,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;gBAChG,IAAI,GAAG,YAAY;gBACnB,UAAU,GAAG,GAAG;gBAChB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,MAAM,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5E;iBAAO,IAAI,kBAAkB,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;gBACvG,IAAI,GAAG,YAAY;gBACnB,UAAU,GAAG,GAAG;gBAChB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,MAAM,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5E;AAAO,iBAAA,IAAI,eAAe,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;gBAC5C,IAAI,GAAG,SAAS;gBAChB,UAAU,GAAG,GAAG;AAChB,gBAAA,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC;YAC5B;QACJ;AAEA,QAAA,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE;IACzC;AAEA;;AAEG;IACK,mBAAmB,CAAC,QAAgB,EAAE,WAAmB,EAAA;QAC7D,IAAI,WAAW,KAAK,CAAC;AAAE,YAAA,OAAO,MAAM;QACpC,IAAI,QAAQ,IAAI,GAAG;YAAE,OAAO,QAAQ,CAAC;QACrC,IAAI,QAAQ,IAAI,GAAG;AAAE,YAAA,OAAO,QAAQ;QACpC,IAAI,QAAQ,IAAI,GAAG;AAAE,YAAA,OAAO,KAAK;AACjC,QAAA,OAAO,KAAK;IAChB;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI;AACtB,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;IACrB;AACH;;;;;;;ACjWD;AACA;AAEA;AA0FM,SAAU,YAAY,CAAC,MAAwB,EAAA;AACjD,IAAA,OAAO,IAAI,gBAAgB,CAAC,MAAM,CAAC;AACvC;;;;","x_google_ignoreList":[1,2,14]}
1
+ {"version":3,"file":"cybernetic-chatbot-client.esm.js","sources":["../src/ApiClient.ts","../node_modules/idb/build/wrap-idb-value.js","../node_modules/idb/build/index.js","../src/CyberneticCache.ts","../src/CyberneticLocalRAG.ts","../src/CyberneticOfflineStorage.ts","../src/license/base64url.ts","../src/license/verifier.ts","../src/license/licenseManager.ts","../src/CyberneticClient.ts","../src/config.ts","../src/agentic/CyberneticIntentClassifier.ts","../src/agentic/CyberneticAgent.ts","../src/agentic/register.ts","../node_modules/@astermind/astermind-community/dist/astermind.esm.js","../src/omega/OmegaOfflineRAG.ts","../src/index.ts"],"sourcesContent":[null,"const instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c);\n\nlet idbProxyableTypes;\nlet cursorAdvanceMethods;\n// This is a function to prevent it throwing up in node environments.\nfunction getIdbProxyableTypes() {\n return (idbProxyableTypes ||\n (idbProxyableTypes = [\n IDBDatabase,\n IDBObjectStore,\n IDBIndex,\n IDBCursor,\n IDBTransaction,\n ]));\n}\n// This is a function to prevent it throwing up in node environments.\nfunction getCursorAdvanceMethods() {\n return (cursorAdvanceMethods ||\n (cursorAdvanceMethods = [\n IDBCursor.prototype.advance,\n IDBCursor.prototype.continue,\n IDBCursor.prototype.continuePrimaryKey,\n ]));\n}\nconst cursorRequestMap = new WeakMap();\nconst transactionDoneMap = new WeakMap();\nconst transactionStoreNamesMap = new WeakMap();\nconst transformCache = new WeakMap();\nconst reverseTransformCache = new WeakMap();\nfunction promisifyRequest(request) {\n const promise = new Promise((resolve, reject) => {\n const unlisten = () => {\n request.removeEventListener('success', success);\n request.removeEventListener('error', error);\n };\n const success = () => {\n resolve(wrap(request.result));\n unlisten();\n };\n const error = () => {\n reject(request.error);\n unlisten();\n };\n request.addEventListener('success', success);\n request.addEventListener('error', error);\n });\n promise\n .then((value) => {\n // Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval\n // (see wrapFunction).\n if (value instanceof IDBCursor) {\n cursorRequestMap.set(value, request);\n }\n // Catching to avoid \"Uncaught Promise exceptions\"\n })\n .catch(() => { });\n // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This\n // is because we create many promises from a single IDBRequest.\n reverseTransformCache.set(promise, request);\n return promise;\n}\nfunction cacheDonePromiseForTransaction(tx) {\n // Early bail if we've already created a done promise for this transaction.\n if (transactionDoneMap.has(tx))\n return;\n const done = new Promise((resolve, reject) => {\n const unlisten = () => {\n tx.removeEventListener('complete', complete);\n tx.removeEventListener('error', error);\n tx.removeEventListener('abort', error);\n };\n const complete = () => {\n resolve();\n unlisten();\n };\n const error = () => {\n reject(tx.error || new DOMException('AbortError', 'AbortError'));\n unlisten();\n };\n tx.addEventListener('complete', complete);\n tx.addEventListener('error', error);\n tx.addEventListener('abort', error);\n });\n // Cache it for later retrieval.\n transactionDoneMap.set(tx, done);\n}\nlet idbProxyTraps = {\n get(target, prop, receiver) {\n if (target instanceof IDBTransaction) {\n // Special handling for transaction.done.\n if (prop === 'done')\n return transactionDoneMap.get(target);\n // Polyfill for objectStoreNames because of Edge.\n if (prop === 'objectStoreNames') {\n return target.objectStoreNames || transactionStoreNamesMap.get(target);\n }\n // Make tx.store return the only store in the transaction, or undefined if there are many.\n if (prop === 'store') {\n return receiver.objectStoreNames[1]\n ? undefined\n : receiver.objectStore(receiver.objectStoreNames[0]);\n }\n }\n // Else transform whatever we get back.\n return wrap(target[prop]);\n },\n set(target, prop, value) {\n target[prop] = value;\n return true;\n },\n has(target, prop) {\n if (target instanceof IDBTransaction &&\n (prop === 'done' || prop === 'store')) {\n return true;\n }\n return prop in target;\n },\n};\nfunction replaceTraps(callback) {\n idbProxyTraps = callback(idbProxyTraps);\n}\nfunction wrapFunction(func) {\n // Due to expected object equality (which is enforced by the caching in `wrap`), we\n // only create one new func per func.\n // Edge doesn't support objectStoreNames (booo), so we polyfill it here.\n if (func === IDBDatabase.prototype.transaction &&\n !('objectStoreNames' in IDBTransaction.prototype)) {\n return function (storeNames, ...args) {\n const tx = func.call(unwrap(this), storeNames, ...args);\n transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);\n return wrap(tx);\n };\n }\n // Cursor methods are special, as the behaviour is a little more different to standard IDB. In\n // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the\n // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense\n // with real promises, so each advance methods returns a new promise for the cursor object, or\n // undefined if the end of the cursor has been reached.\n if (getCursorAdvanceMethods().includes(func)) {\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n func.apply(unwrap(this), args);\n return wrap(cursorRequestMap.get(this));\n };\n }\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n return wrap(func.apply(unwrap(this), args));\n };\n}\nfunction transformCachableValue(value) {\n if (typeof value === 'function')\n return wrapFunction(value);\n // This doesn't return, it just creates a 'done' promise for the transaction,\n // which is later returned for transaction.done (see idbObjectHandler).\n if (value instanceof IDBTransaction)\n cacheDonePromiseForTransaction(value);\n if (instanceOfAny(value, getIdbProxyableTypes()))\n return new Proxy(value, idbProxyTraps);\n // Return the same value back if we're not going to transform it.\n return value;\n}\nfunction wrap(value) {\n // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because\n // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.\n if (value instanceof IDBRequest)\n return promisifyRequest(value);\n // If we've already transformed this value before, reuse the transformed value.\n // This is faster, but it also provides object equality.\n if (transformCache.has(value))\n return transformCache.get(value);\n const newValue = transformCachableValue(value);\n // Not all types are transformed.\n // These may be primitive types, so they can't be WeakMap keys.\n if (newValue !== value) {\n transformCache.set(value, newValue);\n reverseTransformCache.set(newValue, value);\n }\n return newValue;\n}\nconst unwrap = (value) => reverseTransformCache.get(value);\n\nexport { reverseTransformCache as a, instanceOfAny as i, replaceTraps as r, unwrap as u, wrap as w };\n","import { w as wrap, r as replaceTraps } from './wrap-idb-value.js';\nexport { u as unwrap, w as wrap } from './wrap-idb-value.js';\n\n/**\n * Open a database.\n *\n * @param name Name of the database.\n * @param version Schema version.\n * @param callbacks Additional callbacks.\n */\nfunction openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) {\n const request = indexedDB.open(name, version);\n const openPromise = wrap(request);\n if (upgrade) {\n request.addEventListener('upgradeneeded', (event) => {\n upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event);\n });\n }\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event.newVersion, event));\n }\n openPromise\n .then((db) => {\n if (terminated)\n db.addEventListener('close', () => terminated());\n if (blocking) {\n db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event));\n }\n })\n .catch(() => { });\n return openPromise;\n}\n/**\n * Delete a database.\n *\n * @param name Name of the database.\n */\nfunction deleteDB(name, { blocked } = {}) {\n const request = indexedDB.deleteDatabase(name);\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event));\n }\n return wrap(request).then(() => undefined);\n}\n\nconst readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];\nconst writeMethods = ['put', 'add', 'delete', 'clear'];\nconst cachedMethods = new Map();\nfunction getMethod(target, prop) {\n if (!(target instanceof IDBDatabase &&\n !(prop in target) &&\n typeof prop === 'string')) {\n return;\n }\n if (cachedMethods.get(prop))\n return cachedMethods.get(prop);\n const targetFuncName = prop.replace(/FromIndex$/, '');\n const useIndex = prop !== targetFuncName;\n const isWrite = writeMethods.includes(targetFuncName);\n if (\n // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.\n !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) ||\n !(isWrite || readMethods.includes(targetFuncName))) {\n return;\n }\n const method = async function (storeName, ...args) {\n // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(\n const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');\n let target = tx.store;\n if (useIndex)\n target = target.index(args.shift());\n // Must reject if op rejects.\n // If it's a write operation, must reject if tx.done rejects.\n // Must reject with op rejection first.\n // Must resolve with op value.\n // Must handle both promises (no unhandled rejections)\n return (await Promise.all([\n target[targetFuncName](...args),\n isWrite && tx.done,\n ]))[0];\n };\n cachedMethods.set(prop, method);\n return method;\n}\nreplaceTraps((oldTraps) => ({\n ...oldTraps,\n get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),\n has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop),\n}));\n\nexport { deleteDB, openDB };\n",null,null,null,null,null,null,null,null,null,null,null,"import * as fs from 'fs';\nimport * as path from 'path';\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Matrix.ts — tolerant, safe helpers with dimension checks and stable ops\nclass DimError extends Error {\n constructor(msg) {\n super(msg);\n this.name = 'DimError';\n }\n}\nconst EPS$5 = 1e-12;\n/* ===================== Array-like coercion helpers ===================== */\n// ✅ Narrow to ArrayLike<number> so numeric indexing is allowed\nfunction isArrayLikeRow(row) {\n return row != null && typeof row.length === 'number';\n}\n/**\n * Coerce any 2D array-like into a strict rectangular number[][]\n * - If width is not provided, infer from the first row's length\n * - Pads/truncates to width\n * - Non-finite values become 0\n */\nfunction ensureRectNumber2D(M, width, name = 'matrix') {\n if (!M || typeof M.length !== 'number') {\n throw new DimError(`${name} must be a non-empty 2D array`);\n }\n const rows = Array.from(M);\n if (rows.length === 0)\n throw new DimError(`${name} is empty`);\n const first = rows[0];\n if (!isArrayLikeRow(first))\n throw new DimError(`${name} row 0 missing/invalid`);\n const C = ((width !== null && width !== void 0 ? width : first.length) | 0);\n if (C <= 0)\n throw new DimError(`${name} has zero width`);\n const out = new Array(rows.length);\n for (let r = 0; r < rows.length; r++) {\n const src = rows[r];\n const rr = new Array(C);\n if (isArrayLikeRow(src)) {\n const sr = src; // ✅ typed\n for (let c = 0; c < C; c++) {\n const v = sr[c];\n rr[c] = Number.isFinite(v) ? Number(v) : 0;\n }\n }\n else {\n for (let c = 0; c < C; c++)\n rr[c] = 0;\n }\n out[r] = rr;\n }\n return out;\n}\n/**\n * Relaxed rectangularity check:\n * - Accepts any array-like rows (typed arrays included)\n * - Verifies consistent width and finite numbers\n */\nfunction assertRect(A, name = 'matrix') {\n if (!A || typeof A.length !== 'number') {\n throw new DimError(`${name} must be a non-empty 2D array`);\n }\n const rows = A.length | 0;\n if (rows <= 0)\n throw new DimError(`${name} must be a non-empty 2D array`);\n const first = A[0];\n if (!isArrayLikeRow(first))\n throw new DimError(`${name} row 0 missing/invalid`);\n const C = first.length | 0;\n if (C <= 0)\n throw new DimError(`${name} must have positive column count`);\n for (let r = 0; r < rows; r++) {\n const rowAny = A[r];\n if (!isArrayLikeRow(rowAny)) {\n throw new DimError(`${name} row ${r} invalid`);\n }\n const row = rowAny; // ✅ typed\n if ((row.length | 0) !== C) {\n throw new DimError(`${name} has ragged rows: row 0 = ${C} cols, row ${r} = ${row.length} cols`);\n }\n for (let c = 0; c < C; c++) {\n const v = row[c];\n if (!Number.isFinite(v)) {\n throw new DimError(`${name} row ${r}, col ${c} is not finite: ${v}`);\n }\n }\n }\n}\nfunction assertMulDims(A, B) {\n assertRect(A, 'A');\n assertRect(B, 'B');\n const nA = A[0].length;\n const mB = B.length;\n if (nA !== mB) {\n throw new DimError(`matmul dims mismatch: A(${A.length}x${nA}) * B(${mB}x${B[0].length})`);\n }\n}\nfunction isSquare(A) {\n return isArrayLikeRow(A === null || A === void 0 ? void 0 : A[0]) && (A.length === (A[0].length | 0));\n}\nfunction isSymmetric(A, tol = 1e-10) {\n if (!isSquare(A))\n return false;\n const n = A.length;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n if (Math.abs(A[i][j] - A[j][i]) > tol)\n return false;\n }\n }\n return true;\n}\n/* ============================== Matrix ============================== */\nclass Matrix {\n /* ========= constructors / basics ========= */\n static shape(A) {\n assertRect(A, 'A');\n return [A.length, A[0].length];\n }\n static clone(A) {\n assertRect(A, 'A');\n return ensureRectNumber2D(A, A[0].length, 'A(clone)');\n }\n static zeros(rows, cols) {\n const out = new Array(rows);\n for (let i = 0; i < rows; i++)\n out[i] = new Array(cols).fill(0);\n return out;\n }\n static identity(n) {\n const I = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++)\n I[i][i] = 1;\n return I;\n }\n static transpose(A) {\n assertRect(A, 'A');\n const m = A.length, n = A[0].length;\n const T = Matrix.zeros(n, m);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n for (let j = 0; j < n; j++)\n T[j][i] = Number(Ai[j]);\n }\n return T;\n }\n /* ========= algebra ========= */\n static add(A, B) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertRect(A, 'A');\n assertRect(B, 'B');\n if (A.length !== B.length || A[0].length !== B[0].length) {\n throw new DimError(`add dims mismatch: A(${A.length}x${A[0].length}) vs B(${B.length}x${B[0].length})`);\n }\n const m = A.length, n = A[0].length;\n const C = Matrix.zeros(m, n);\n for (let i = 0; i < m; i++) {\n const Ai = A[i], Bi = B[i], Ci = C[i];\n for (let j = 0; j < n; j++)\n Ci[j] = Ai[j] + Bi[j];\n }\n return C;\n }\n /** Adds lambda to the diagonal (ridge regularization) */\n static addRegularization(A, lambda = 1e-6) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A)) {\n throw new DimError(`addRegularization expects square matrix, got ${A.length}x${A[0].length}`);\n }\n const C = Matrix.clone(A);\n for (let i = 0; i < C.length; i++)\n C[i][i] += lambda;\n return C;\n }\n static multiply(A, B) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertMulDims(A, B);\n const m = A.length, n = B.length, p = B[0].length;\n const C = Matrix.zeros(m, p);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n for (let k = 0; k < n; k++) {\n const aik = Number(Ai[k]);\n const Bk = B[k];\n for (let j = 0; j < p; j++)\n C[i][j] += aik * Number(Bk[j]);\n }\n }\n return C;\n }\n static multiplyVec(A, v) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!v || typeof v.length !== 'number') {\n throw new DimError(`matvec expects vector 'v' with length ${A[0].length}`);\n }\n if (A[0].length !== v.length) {\n throw new DimError(`matvec dims mismatch: A cols ${A[0].length} vs v len ${v.length}`);\n }\n const m = A.length, n = v.length;\n const out = new Array(m).fill(0);\n for (let i = 0; i < m; i++) {\n const Ai = A[i];\n let s = 0;\n for (let j = 0; j < n; j++)\n s += Number(Ai[j]) * Number(v[j]);\n out[i] = s;\n }\n return out;\n }\n /* ========= decompositions / solve ========= */\n static cholesky(A, jitter = 0) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A))\n throw new DimError(`cholesky expects square matrix, got ${A.length}x${A[0].length}`);\n const n = A.length;\n const L = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n if (i === j) {\n const v = sum + jitter;\n L[i][j] = Math.sqrt(Math.max(v, EPS$5));\n }\n else {\n L[i][j] = sum / L[j][j];\n }\n }\n }\n return L;\n }\n static solveCholesky(A, B, jitter = 1e-10) {\n A = ensureRectNumber2D(A, undefined, 'A');\n B = ensureRectNumber2D(B, undefined, 'B');\n assertRect(A, 'A');\n assertRect(B, 'B');\n if (!isSquare(A) || A.length !== B.length) {\n throw new DimError(`solveCholesky dims: A(${A.length}x${A[0].length}) vs B(${B.length}x${B[0].length})`);\n }\n const n = A.length, k = B[0].length;\n const L = Matrix.cholesky(A, jitter);\n // Solve L Z = B (forward)\n const Z = Matrix.zeros(n, k);\n for (let i = 0; i < n; i++) {\n for (let c = 0; c < k; c++) {\n let s = B[i][c];\n for (let p = 0; p < i; p++)\n s -= L[i][p] * Z[p][c];\n Z[i][c] = s / L[i][i];\n }\n }\n // Solve L^T X = Z (backward)\n const X = Matrix.zeros(n, k);\n for (let i = n - 1; i >= 0; i--) {\n for (let c = 0; c < k; c++) {\n let s = Z[i][c];\n for (let p = i + 1; p < n; p++)\n s -= L[p][i] * X[p][c];\n X[i][c] = s / L[i][i];\n }\n }\n return X;\n }\n static inverse(A) {\n A = ensureRectNumber2D(A, undefined, 'A');\n assertRect(A, 'A');\n if (!isSquare(A))\n throw new DimError(`inverse expects square matrix, got ${A.length}x${A[0].length}`);\n const n = A.length;\n const M = Matrix.clone(A);\n const I = Matrix.identity(n);\n // Augment [M | I]\n const aug = new Array(n);\n for (let i = 0; i < n; i++)\n aug[i] = M[i].concat(I[i]);\n const cols = 2 * n;\n for (let p = 0; p < n; p++) {\n // Pivot\n let maxRow = p, maxVal = Math.abs(aug[p][p]);\n for (let r = p + 1; r < n; r++) {\n const v = Math.abs(aug[r][p]);\n if (v > maxVal) {\n maxVal = v;\n maxRow = r;\n }\n }\n if (maxVal < EPS$5)\n throw new Error('Matrix is singular or ill-conditioned');\n if (maxRow !== p) {\n const tmp = aug[p];\n aug[p] = aug[maxRow];\n aug[maxRow] = tmp;\n }\n // Normalize pivot row\n const piv = aug[p][p];\n const invPiv = 1 / piv;\n for (let c = 0; c < cols; c++)\n aug[p][c] *= invPiv;\n // Eliminate other rows\n for (let r = 0; r < n; r++) {\n if (r === p)\n continue;\n const f = aug[r][p];\n if (Math.abs(f) < EPS$5)\n continue;\n for (let c = 0; c < cols; c++)\n aug[r][c] -= f * aug[p][c];\n }\n }\n // Extract right half as inverse\n const inv = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n for (let j = 0; j < n; j++)\n inv[i][j] = aug[i][n + j];\n }\n return inv;\n }\n /* ========= helpers ========= */\n static inverseSPDOrFallback(A) {\n if (isSymmetric(A)) {\n try {\n return Matrix.solveCholesky(A, Matrix.identity(A.length), 1e-10);\n }\n catch (_a) {\n // fall through\n }\n }\n return Matrix.inverse(A);\n }\n /* ========= Symmetric Eigen (Jacobi) & Inverse Square Root ========= */\n static assertSquare(A, ctx = 'Matrix') {\n assertRect(A, ctx);\n if (!isSquare(A)) {\n throw new DimError(`${ctx}: expected square matrix, got ${A.length}x${A[0].length}`);\n }\n }\n static eigSym(A, maxIter = 64, tol = 1e-12) {\n A = ensureRectNumber2D(A, undefined, 'eigSym/A');\n Matrix.assertSquare(A, 'eigSym');\n const n = A.length;\n const B = Matrix.clone(A);\n let V = Matrix.identity(n);\n const abs = Math.abs;\n const offdiagNorm = () => {\n let s = 0;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = B[i][j];\n s += v * v;\n }\n }\n return Math.sqrt(s);\n };\n for (let it = 0; it < maxIter; it++) {\n if (offdiagNorm() <= tol)\n break;\n let p = 0, q = 1, max = 0;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = abs(B[i][j]);\n if (v > max) {\n max = v;\n p = i;\n q = j;\n }\n }\n }\n if (max <= tol)\n break;\n const app = B[p][p], aqq = B[q][q], apq = B[p][q];\n const tau = (aqq - app) / (2 * apq);\n const t = Math.sign(tau) / (abs(tau) + Math.sqrt(1 + tau * tau));\n const c = 1 / Math.sqrt(1 + t * t);\n const s = t * c;\n const Bpp = c * c * app - 2 * s * c * apq + s * s * aqq;\n const Bqq = s * s * app + 2 * s * c * apq + c * c * aqq;\n B[p][p] = Bpp;\n B[q][q] = Bqq;\n B[p][q] = B[q][p] = 0;\n for (let k = 0; k < n; k++) {\n if (k === p || k === q)\n continue;\n const aip = B[k][p], aiq = B[k][q];\n const new_kp = c * aip - s * aiq;\n const new_kq = s * aip + c * aiq;\n B[k][p] = B[p][k] = new_kp;\n B[k][q] = B[q][k] = new_kq;\n }\n for (let k = 0; k < n; k++) {\n const vip = V[k][p], viq = V[k][q];\n V[k][p] = c * vip - s * viq;\n V[k][q] = s * vip + c * viq;\n }\n }\n const vals = new Array(n);\n for (let i = 0; i < n; i++)\n vals[i] = B[i][i];\n const order = vals.map((v, i) => [v, i]).sort((a, b) => a[0] - b[0]).map(([, i]) => i);\n const values = order.map(i => vals[i]);\n const vectors = Matrix.zeros(n, n);\n for (let r = 0; r < n; r++) {\n for (let c = 0; c < n; c++)\n vectors[r][c] = V[r][order[c]];\n }\n return { values, vectors };\n }\n static invSqrtSym(A, eps = 1e-10) {\n A = ensureRectNumber2D(A, undefined, 'invSqrtSym/A');\n Matrix.assertSquare(A, 'invSqrtSym');\n const { values, vectors: U } = Matrix.eigSym(A);\n const n = values.length;\n const Dm12 = Matrix.zeros(n, n);\n for (let i = 0; i < n; i++) {\n const lam = Math.max(values[i], eps);\n Dm12[i][i] = 1 / Math.sqrt(lam);\n }\n const UD = Matrix.multiply(U, Dm12);\n return Matrix.multiply(UD, Matrix.transpose(U));\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Activations.ts - Common activation functions (with derivatives)\nclass Activations {\n /* ========= Forward ========= */\n /** Rectified Linear Unit */\n static relu(x) {\n return x > 0 ? x : 0;\n }\n /** Leaky ReLU with configurable slope for x<0 (default 0.01) */\n static leakyRelu(x, alpha = 0.01) {\n return x >= 0 ? x : alpha * x;\n }\n /** Logistic sigmoid */\n static sigmoid(x) {\n return 1 / (1 + Math.exp(-x));\n }\n /** Hyperbolic tangent */\n static tanh(x) {\n return Math.tanh(x);\n }\n /** Linear / identity activation */\n static linear(x) {\n return x;\n }\n /**\n * GELU (Gaussian Error Linear Unit), tanh approximation.\n * 0.5 * x * (1 + tanh(√(2/π) * (x + 0.044715 x^3)))\n */\n static gelu(x) {\n const k = Math.sqrt(2 / Math.PI);\n const u = k * (x + 0.044715 * x * x * x);\n return 0.5 * x * (1 + Math.tanh(u));\n }\n /**\n * Softmax with numerical stability and optional temperature.\n * @param arr logits\n * @param temperature >0; higher = flatter distribution\n */\n static softmax(arr, temperature = 1) {\n const t = Math.max(temperature, 1e-12);\n let max = -Infinity;\n for (let i = 0; i < arr.length; i++) {\n const v = arr[i] / t;\n if (v > max)\n max = v;\n }\n const exps = new Array(arr.length);\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n const e = Math.exp(arr[i] / t - max);\n exps[i] = e;\n sum += e;\n }\n const denom = sum || 1e-12;\n for (let i = 0; i < exps.length; i++)\n exps[i] = exps[i] / denom;\n return exps;\n }\n /* ========= Derivatives (elementwise) ========= */\n /** d/dx ReLU */\n static dRelu(x) {\n // subgradient at 0 -> 0\n return x > 0 ? 1 : 0;\n }\n /** d/dx LeakyReLU */\n static dLeakyRelu(x, alpha = 0.01) {\n return x >= 0 ? 1 : alpha;\n }\n /** d/dx Sigmoid = s(x)*(1-s(x)) */\n static dSigmoid(x) {\n const s = Activations.sigmoid(x);\n return s * (1 - s);\n }\n /** d/dx tanh = 1 - tanh(x)^2 */\n static dTanh(x) {\n const t = Math.tanh(x);\n return 1 - t * t;\n }\n /** d/dx Linear = 1 */\n static dLinear(_) {\n return 1;\n }\n /**\n * d/dx GELU (tanh approximation)\n * 0.5*(1 + tanh(u)) + 0.5*x*(1 - tanh(u)^2) * du/dx\n * where u = k*(x + 0.044715 x^3), du/dx = k*(1 + 0.134145 x^2), k = sqrt(2/pi)\n */\n static dGelu(x) {\n const k = Math.sqrt(2 / Math.PI);\n const x2 = x * x;\n const u = k * (x + 0.044715 * x * x2);\n const t = Math.tanh(u);\n const sech2 = 1 - t * t;\n const du = k * (1 + 0.134145 * x2);\n return 0.5 * (1 + t) + 0.5 * x * sech2 * du;\n }\n /* ========= Apply helpers ========= */\n /** Apply an elementwise activation across a 2D matrix, returning a new matrix. */\n static apply(matrix, fn) {\n const out = new Array(matrix.length);\n for (let i = 0; i < matrix.length; i++) {\n const row = matrix[i];\n const r = new Array(row.length);\n for (let j = 0; j < row.length; j++)\n r[j] = fn(row[j]);\n out[i] = r;\n }\n return out;\n }\n /** Apply an elementwise derivative across a 2D matrix, returning a new matrix. */\n static applyDerivative(matrix, dfn) {\n const out = new Array(matrix.length);\n for (let i = 0; i < matrix.length; i++) {\n const row = matrix[i];\n const r = new Array(row.length);\n for (let j = 0; j < row.length; j++)\n r[j] = dfn(row[j]);\n out[i] = r;\n }\n return out;\n }\n /* ========= Getters ========= */\n /**\n * Get an activation function by name. Case-insensitive.\n * For leaky ReLU, you can pass { alpha } to override the negative slope.\n */\n static get(name, opts) {\n var _a;\n const key = name.toLowerCase();\n switch (key) {\n case 'relu': return this.relu;\n case 'leakyrelu':\n case 'leaky-relu': {\n const alpha = (_a = opts === null || opts === void 0 ? void 0 : opts.alpha) !== null && _a !== void 0 ? _a : 0.01;\n return (x) => this.leakyRelu(x, alpha);\n }\n case 'sigmoid': return this.sigmoid;\n case 'tanh': return this.tanh;\n case 'linear':\n case 'identity':\n case 'none': return this.linear;\n case 'gelu': return this.gelu;\n default:\n throw new Error(`Unknown activation: ${name}`);\n }\n }\n /** Get derivative function by name (mirrors get). */\n static getDerivative(name, opts) {\n var _a;\n const key = name.toLowerCase();\n switch (key) {\n case 'relu': return this.dRelu;\n case 'leakyrelu':\n case 'leaky-relu': {\n const alpha = (_a = opts === null || opts === void 0 ? void 0 : opts.alpha) !== null && _a !== void 0 ? _a : 0.01;\n return (x) => this.dLeakyRelu(x, alpha);\n }\n case 'sigmoid': return this.dSigmoid;\n case 'tanh': return this.dTanh;\n case 'linear':\n case 'identity':\n case 'none': return this.dLinear;\n case 'gelu': return this.dGelu;\n default:\n throw new Error(`Unknown activation derivative: ${name}`);\n }\n }\n /** Get both forward and derivative together. */\n static getPair(name, opts) {\n return { f: this.get(name, opts), df: this.getDerivative(name, opts) };\n }\n /* ========= Optional: Softmax Jacobian (for research/tools) ========= */\n /**\n * Given softmax probabilities p, returns the Jacobian J = diag(p) - p p^T\n * (Useful for analysis; not typically needed for ELM.)\n */\n static softmaxJacobian(p) {\n const n = p.length;\n const J = new Array(n);\n for (let i = 0; i < n; i++) {\n const row = new Array(n);\n for (let j = 0; j < n; j++) {\n row[j] = (i === j ? p[i] : 0) - p[i] * p[j];\n }\n J[i] = row;\n }\n return J;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMConfig.ts - Configuration interfaces, defaults, helpers for ELM-based models\n/* =========== Defaults =========== */\nconst defaultBase = {\n hiddenUnits: 50,\n activation: 'relu',\n ridgeLambda: 1e-2,\n weightInit: 'xavier',\n seed: 1337,\n dropout: 0,\n log: { verbose: true, toFile: false, modelName: 'Unnamed ELM Model', level: 'info' },\n};\nconst defaultNumericConfig = Object.assign(Object.assign({}, defaultBase), { useTokenizer: false });\nconst defaultTextConfig = Object.assign(Object.assign({}, defaultBase), { useTokenizer: true, maxLen: 30, charSet: 'abcdefghijklmnopqrstuvwxyz', tokenizerDelimiter: /\\s+/ });\n/* =========== Type guards =========== */\nfunction isTextConfig(cfg) {\n return cfg.useTokenizer === true;\n}\nfunction isNumericConfig(cfg) {\n return cfg.useTokenizer !== true;\n}\n/* =========== Helpers =========== */\n/**\n * Normalize a user config with sensible defaults depending on mode.\n * (Keeps the original structural type, only fills in missing optional fields.)\n */\nfunction normalizeConfig(cfg) {\n var _a, _b, _c, _d;\n if (isTextConfig(cfg)) {\n const merged = Object.assign(Object.assign(Object.assign({}, defaultTextConfig), cfg), { log: Object.assign(Object.assign({}, ((_a = defaultBase.log) !== null && _a !== void 0 ? _a : {})), ((_b = cfg.log) !== null && _b !== void 0 ? _b : {})) });\n return merged;\n }\n else {\n const merged = Object.assign(Object.assign(Object.assign({}, defaultNumericConfig), cfg), { log: Object.assign(Object.assign({}, ((_c = defaultBase.log) !== null && _c !== void 0 ? _c : {})), ((_d = cfg.log) !== null && _d !== void 0 ? _d : {})) });\n return merged;\n }\n}\n/**\n * Rehydrate text-specific fields from a JSON-safe config\n * (e.g., convert tokenizerDelimiter source string → RegExp).\n */\nfunction deserializeTextBits(config) {\n var _a, _b, _c, _d;\n // If useTokenizer not true, assume numeric config\n if (config.useTokenizer !== true) {\n const nc = Object.assign(Object.assign(Object.assign({}, defaultNumericConfig), config), { log: Object.assign(Object.assign({}, ((_a = defaultBase.log) !== null && _a !== void 0 ? _a : {})), ((_b = config.log) !== null && _b !== void 0 ? _b : {})) });\n return nc;\n }\n // Text config: coerce delimiter\n const tDelim = config.tokenizerDelimiter;\n let delimiter = undefined;\n if (tDelim instanceof RegExp) {\n delimiter = tDelim;\n }\n else if (typeof tDelim === 'string' && tDelim.length > 0) {\n delimiter = new RegExp(tDelim);\n }\n else {\n delimiter = defaultTextConfig.tokenizerDelimiter;\n }\n const tc = Object.assign(Object.assign(Object.assign({}, defaultTextConfig), config), { tokenizerDelimiter: delimiter, log: Object.assign(Object.assign({}, ((_c = defaultBase.log) !== null && _c !== void 0 ? _c : {})), ((_d = config.log) !== null && _d !== void 0 ? _d : {})), useTokenizer: true });\n return tc;\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass Tokenizer {\n constructor(customDelimiter) {\n this.delimiter = customDelimiter || /[\\s,.;!?()\\[\\]{}\"']+/;\n }\n tokenize(text) {\n if (typeof text !== 'string') {\n console.warn('[Tokenizer] Expected a string, got:', typeof text, text);\n try {\n text = String(text !== null && text !== void 0 ? text : '');\n }\n catch (_a) {\n return [];\n }\n }\n return text\n .trim()\n .toLowerCase()\n .split(this.delimiter)\n .filter(Boolean);\n }\n ngrams(tokens, n) {\n if (n <= 0 || tokens.length < n)\n return [];\n const result = [];\n for (let i = 0; i <= tokens.length - n; i++) {\n result.push(tokens.slice(i, i + n).join(' '));\n }\n return result;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// TextEncoder.ts - Text preprocessing and one-hot encoding for ELM\nconst defaultTextEncoderConfig = {\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n maxLen: 15,\n useTokenizer: false\n};\nclass TextEncoder {\n constructor(config = {}) {\n const cfg = Object.assign(Object.assign({}, defaultTextEncoderConfig), config);\n this.charSet = cfg.charSet;\n this.charSize = cfg.charSet.length;\n this.maxLen = cfg.maxLen;\n this.useTokenizer = cfg.useTokenizer;\n if (this.useTokenizer) {\n this.tokenizer = new Tokenizer(config.tokenizerDelimiter);\n }\n }\n charToOneHot(c) {\n const index = this.charSet.indexOf(c.toLowerCase());\n const vec = Array(this.charSize).fill(0);\n if (index !== -1)\n vec[index] = 1;\n return vec;\n }\n textToVector(text) {\n let cleaned;\n if (this.useTokenizer && this.tokenizer) {\n const tokens = this.tokenizer.tokenize(text).join('');\n cleaned = tokens.slice(0, this.maxLen).padEnd(this.maxLen, ' ');\n }\n else {\n cleaned = text.toLowerCase().replace(new RegExp(`[^${this.charSet}]`, 'g'), '').padEnd(this.maxLen, ' ').slice(0, this.maxLen);\n }\n const vec = [];\n for (let i = 0; i < cleaned.length; i++) {\n vec.push(...this.charToOneHot(cleaned[i]));\n }\n return vec;\n }\n normalizeVector(v) {\n const norm = Math.sqrt(v.reduce((sum, x) => sum + x * x, 0));\n return norm > 0 ? v.map(x => x / norm) : v;\n }\n getVectorSize() {\n return this.charSize * this.maxLen;\n }\n getCharSet() {\n return this.charSet;\n }\n getMaxLen() {\n return this.maxLen;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// UniversalEncoder.ts - Automatically selects appropriate encoder (char or token based)\nconst defaultUniversalConfig = {\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n maxLen: 15,\n useTokenizer: false,\n mode: 'char'\n};\nclass UniversalEncoder {\n constructor(config = {}) {\n const merged = Object.assign(Object.assign({}, defaultUniversalConfig), config);\n const useTokenizer = merged.mode === 'token';\n this.encoder = new TextEncoder({\n charSet: merged.charSet,\n maxLen: merged.maxLen,\n useTokenizer,\n tokenizerDelimiter: config.tokenizerDelimiter\n });\n }\n encode(text) {\n return this.encoder.textToVector(text);\n }\n normalize(v) {\n return this.encoder.normalizeVector(v);\n }\n getVectorSize() {\n return this.encoder.getVectorSize();\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Augment.ts - Basic augmentation utilities for category training examples\nclass Augment {\n static addSuffix(text, suffixes) {\n return suffixes.map(suffix => `${text} ${suffix}`);\n }\n static addPrefix(text, prefixes) {\n return prefixes.map(prefix => `${prefix} ${text}`);\n }\n static addNoise(text, charSet, noiseRate = 0.1) {\n const chars = text.split('');\n for (let i = 0; i < chars.length; i++) {\n if (Math.random() < noiseRate) {\n const randomChar = charSet[Math.floor(Math.random() * charSet.length)];\n chars[i] = randomChar;\n }\n }\n return chars.join('');\n }\n static mix(text, mixins) {\n return mixins.map(m => `${text} ${m}`);\n }\n static generateVariants(text, charSet, options) {\n const variants = [text];\n if (options === null || options === void 0 ? void 0 : options.suffixes) {\n variants.push(...this.addSuffix(text, options.suffixes));\n }\n if (options === null || options === void 0 ? void 0 : options.prefixes) {\n variants.push(...this.addPrefix(text, options.prefixes));\n }\n if (options === null || options === void 0 ? void 0 : options.includeNoise) {\n variants.push(this.addNoise(text, charSet));\n }\n return variants;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELM.ts - Core ELM logic with TypeScript types (numeric & text modes)\n// Seeded PRNG (xorshift-ish) for deterministic init\nfunction makePRNG$2(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n return ((s >>> 0) / 0xffffffff);\n };\n}\nfunction clampInt(x, lo, hi) {\n const xi = x | 0;\n return xi < lo ? lo : (xi > hi ? hi : xi);\n}\nfunction isOneHot2D(Y) {\n return Array.isArray(Y) && Array.isArray(Y[0]) && Number.isFinite(Y[0][0]);\n}\nfunction maxLabel(y) {\n let m = -Infinity;\n for (let i = 0; i < y.length; i++) {\n const v = y[i] | 0;\n if (v > m)\n m = v;\n }\n return m === -Infinity ? 0 : m;\n}\n/** One-hot (clamped) */\nfunction toOneHotClamped(labels, k) {\n const K = k | 0;\n const Y = new Array(labels.length);\n for (let i = 0; i < labels.length; i++) {\n const j = clampInt(labels[i], 0, K - 1);\n const row = new Array(K).fill(0);\n row[j] = 1;\n Y[i] = row;\n }\n return Y;\n}\n/** (HᵀH + λI)B = HᵀY solved via Cholesky */\nfunction ridgeSolve$1(H, Y, lambda) {\n const Ht = Matrix.transpose(H);\n const A = Matrix.addRegularization(Matrix.multiply(Ht, H), lambda + 1e-10);\n const R = Matrix.multiply(Ht, Y);\n return Matrix.solveCholesky(A, R, 1e-10);\n}\n/* =========================\n * ELM class\n * ========================= */\nclass ELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;\n // Merge with mode-appropriate defaults\n const cfg = normalizeConfig(config);\n this.config = cfg;\n this.categories = cfg.categories;\n this.hiddenUnits = cfg.hiddenUnits;\n this.activation = (_a = cfg.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.useTokenizer = isTextConfig(cfg);\n this.maxLen = isTextConfig(cfg) ? cfg.maxLen : 0;\n this.charSet = isTextConfig(cfg) ? ((_b = cfg.charSet) !== null && _b !== void 0 ? _b : 'abcdefghijklmnopqrstuvwxyz') : 'abcdefghijklmnopqrstuvwxyz';\n this.tokenizerDelimiter = isTextConfig(cfg) ? cfg.tokenizerDelimiter : undefined;\n this.metrics = cfg.metrics;\n this.verbose = (_d = (_c = cfg.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : true;\n this.modelName = (_f = (_e = cfg.log) === null || _e === void 0 ? void 0 : _e.modelName) !== null && _f !== void 0 ? _f : 'Unnamed ELM Model';\n this.logToFile = (_h = (_g = cfg.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false;\n this.dropout = (_j = cfg.dropout) !== null && _j !== void 0 ? _j : 0;\n this.ridgeLambda = Math.max((_k = cfg.ridgeLambda) !== null && _k !== void 0 ? _k : 1e-2, 1e-8);\n // Seeded RNG\n const seed = (_l = cfg.seed) !== null && _l !== void 0 ? _l : 1337;\n this.rng = makePRNG$2(seed);\n // Create encoder only if tokenizer is enabled\n if (this.useTokenizer) {\n this.encoder = new UniversalEncoder({\n charSet: this.charSet,\n maxLen: this.maxLen,\n useTokenizer: this.useTokenizer,\n tokenizerDelimiter: this.tokenizerDelimiter,\n mode: this.useTokenizer ? 'token' : 'char'\n });\n }\n // Weights are allocated on first training call (inputDim known then)\n this.model = null;\n }\n /* ========= Encoder narrowing (Option A) ========= */\n assertEncoder() {\n if (!this.encoder) {\n throw new Error('Encoder is not initialized. Enable useTokenizer:true or construct an encoder.');\n }\n return this.encoder;\n }\n /* ========= initialization ========= */\n xavierLimit(fanIn, fanOut) {\n return Math.sqrt(6 / (fanIn + fanOut));\n }\n randomMatrix(rows, cols) {\n var _a;\n const weightInit = (_a = this.config.weightInit) !== null && _a !== void 0 ? _a : 'uniform';\n if (weightInit === 'xavier') {\n const limit = this.xavierLimit(cols, rows);\n if (this.verbose)\n console.log(`✨ Xavier init with limit sqrt(6/(${cols}+${rows})) ≈ ${limit.toFixed(4)}`);\n return Array.from({ length: rows }, () => Array.from({ length: cols }, () => (this.rng() * 2 - 1) * limit));\n }\n else {\n if (this.verbose)\n console.log(`✨ Uniform init [-1,1] (seeded)`);\n return Array.from({ length: rows }, () => Array.from({ length: cols }, () => (this.rng() * 2 - 1)));\n }\n }\n buildHidden(X, W, b) {\n const tempH = Matrix.multiply(X, Matrix.transpose(W)); // N x hidden\n const activationFn = Activations.get(this.activation);\n let H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn);\n if (this.dropout > 0) {\n const keepProb = 1 - this.dropout;\n for (let i = 0; i < H.length; i++) {\n for (let j = 0; j < H[0].length; j++) {\n if (this.rng() < this.dropout)\n H[i][j] = 0;\n else\n H[i][j] /= keepProb;\n }\n }\n }\n return H;\n }\n /* ========= public helpers ========= */\n oneHot(n, index) {\n return Array.from({ length: n }, (_, i) => (i === index ? 1 : 0));\n }\n setCategories(categories) {\n this.categories = categories;\n }\n loadModelFromJSON(json) {\n var _a, _b, _c, _d, _e;\n try {\n const parsed = JSON.parse(json);\n const cfg = deserializeTextBits(parsed.config);\n // Rebuild instance config\n this.config = cfg;\n this.categories = (_a = cfg.categories) !== null && _a !== void 0 ? _a : this.categories;\n this.hiddenUnits = (_b = cfg.hiddenUnits) !== null && _b !== void 0 ? _b : this.hiddenUnits;\n this.activation = (_c = cfg.activation) !== null && _c !== void 0 ? _c : this.activation;\n this.useTokenizer = cfg.useTokenizer === true;\n this.maxLen = (_d = cfg.maxLen) !== null && _d !== void 0 ? _d : this.maxLen;\n this.charSet = (_e = cfg.charSet) !== null && _e !== void 0 ? _e : this.charSet;\n this.tokenizerDelimiter = cfg.tokenizerDelimiter;\n if (this.useTokenizer) {\n this.encoder = new UniversalEncoder({\n charSet: this.charSet,\n maxLen: this.maxLen,\n useTokenizer: this.useTokenizer,\n tokenizerDelimiter: this.tokenizerDelimiter,\n mode: this.useTokenizer ? 'token' : 'char'\n });\n }\n else {\n this.encoder = undefined;\n }\n // Restore weights\n const { W, b, B } = parsed;\n this.model = { W, b, beta: B };\n this.savedModelJSON = json;\n if (this.verbose)\n console.log(`✅ ${this.modelName} Model loaded from JSON`);\n }\n catch (e) {\n console.error(`❌ Failed to load ${this.modelName} model from JSON:`, e);\n }\n }\n /* ========= Numeric training tolerance ========= */\n /** Decide output dimension from config/categories/labels/one-hot */\n resolveOutputDim(yOrY) {\n // Prefer explicit config\n const cfgOut = this.config.outputDim;\n if (Number.isFinite(cfgOut) && cfgOut > 0)\n return cfgOut | 0;\n // Then categories length if present\n if (Array.isArray(this.categories) && this.categories.length > 0)\n return this.categories.length | 0;\n // Infer from data\n if (isOneHot2D(yOrY))\n return (yOrY[0].length | 0) || 1;\n return (maxLabel(yOrY) + 1) | 0;\n }\n /** Coerce X, and turn labels→one-hot if needed. Always returns strict number[][] */\n coerceXY(X, yOrY) {\n const Xnum = ensureRectNumber2D(X, undefined, 'X');\n const outDim = this.resolveOutputDim(yOrY);\n let Ynum;\n if (isOneHot2D(yOrY)) {\n // Ensure rect with exact width outDim (pad/trunc to be safe)\n Ynum = ensureRectNumber2D(yOrY, outDim, 'Y(one-hot)');\n }\n else {\n // Labels → clamped one-hot\n Ynum = ensureRectNumber2D(toOneHotClamped(yOrY, outDim), outDim, 'Y(labels→one-hot)');\n }\n // If categories length mismatches inferred outDim, adjust categories (non-breaking)\n if (!this.categories || this.categories.length !== outDim) {\n this.categories = Array.from({ length: outDim }, (_, i) => { var _a, _b; return (_b = (_a = this.categories) === null || _a === void 0 ? void 0 : _a[i]) !== null && _b !== void 0 ? _b : String(i); });\n }\n return { Xnum, Ynum, outDim };\n }\n /* ========= Training on numeric vectors =========\n * y can be class indices OR one-hot.\n */\n trainFromData(X, y, options) {\n if (!(X === null || X === void 0 ? void 0 : X.length))\n throw new Error('trainFromData: X is empty');\n // Coerce & shape\n const { Xnum, Ynum, outDim } = this.coerceXY(X, y);\n const n = Xnum.length;\n const inputDim = Xnum[0].length;\n // init / reuse\n let W, b;\n const reuseWeights = (options === null || options === void 0 ? void 0 : options.reuseWeights) === true && this.model;\n if (reuseWeights && this.model) {\n W = this.model.W;\n b = this.model.b;\n if (this.verbose)\n console.log('🔄 Reusing existing weights/biases for training.');\n }\n else {\n W = this.randomMatrix(this.hiddenUnits, inputDim);\n b = this.randomMatrix(this.hiddenUnits, 1);\n if (this.verbose)\n console.log('✨ Initializing fresh weights/biases for training.');\n }\n // Hidden\n let H = this.buildHidden(Xnum, W, b);\n // Optional sample weights\n let Yw = Ynum;\n if (options === null || options === void 0 ? void 0 : options.weights) {\n const ww = options.weights;\n if (ww.length !== n) {\n throw new Error(`Weight array length ${ww.length} does not match sample count ${n}`);\n }\n H = H.map((row, i) => row.map(x => x * Math.sqrt(ww[i])));\n Yw = Ynum.map((row, i) => row.map(x => x * Math.sqrt(ww[i])));\n }\n // Solve ridge (stable)\n const beta = ridgeSolve$1(H, Yw, this.ridgeLambda);\n this.model = { W, b, beta };\n // Evaluate & maybe save\n const predictions = Matrix.multiply(H, beta);\n if (this.metrics) {\n const rmse = this.calculateRMSE(Ynum, predictions);\n const mae = this.calculateMAE(Ynum, predictions);\n const acc = this.calculateAccuracy(Ynum, predictions);\n const f1 = this.calculateF1Score(Ynum, predictions);\n const ce = this.calculateCrossEntropy(Ynum, predictions);\n const r2 = this.calculateR2Score(Ynum, predictions);\n const results = { rmse, mae, accuracy: acc, f1, crossEntropy: ce, r2 };\n let allPassed = true;\n if (this.metrics.rmse !== undefined && rmse > this.metrics.rmse)\n allPassed = false;\n if (this.metrics.mae !== undefined && mae > this.metrics.mae)\n allPassed = false;\n if (this.metrics.accuracy !== undefined && acc < this.metrics.accuracy)\n allPassed = false;\n if (this.metrics.f1 !== undefined && f1 < this.metrics.f1)\n allPassed = false;\n if (this.metrics.crossEntropy !== undefined && ce > this.metrics.crossEntropy)\n allPassed = false;\n if (this.metrics.r2 !== undefined && r2 < this.metrics.r2)\n allPassed = false;\n if (this.verbose)\n this.logMetrics(results);\n if (allPassed) {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model passed thresholds and was saved to JSON.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n else {\n if (this.verbose)\n console.log('❌ Model not saved: One or more thresholds not met.');\n }\n }\n else {\n // No metrics—always save\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model trained with no metrics—saved by default.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n return { epochs: 1, metrics: undefined };\n }\n /* ========= Training from category strings (text mode) ========= */\n train(augmentationOptions, weights) {\n if (!this.useTokenizer) {\n throw new Error('train(): text training requires useTokenizer:true');\n }\n const enc = this.assertEncoder();\n const X = [];\n let Y = [];\n this.categories.forEach((cat, i) => {\n const variants = Augment.generateVariants(cat, this.charSet, augmentationOptions);\n for (const variant of variants) {\n const vec = enc.normalize(enc.encode(variant));\n X.push(vec);\n Y.push(this.oneHot(this.categories.length, i));\n }\n });\n const inputDim = X[0].length;\n const W = this.randomMatrix(this.hiddenUnits, inputDim);\n const b = this.randomMatrix(this.hiddenUnits, 1);\n let H = this.buildHidden(X, W, b);\n if (weights) {\n if (weights.length !== H.length) {\n throw new Error(`Weight array length ${weights.length} does not match sample count ${H.length}`);\n }\n H = H.map((row, i) => row.map(x => x * Math.sqrt(weights[i])));\n Y = Y.map((row, i) => row.map(x => x * Math.sqrt(weights[i])));\n }\n const beta = ridgeSolve$1(H, Y, this.ridgeLambda);\n this.model = { W, b, beta };\n const predictions = Matrix.multiply(H, beta);\n if (this.metrics) {\n const rmse = this.calculateRMSE(Y, predictions);\n const mae = this.calculateMAE(Y, predictions);\n const acc = this.calculateAccuracy(Y, predictions);\n const f1 = this.calculateF1Score(Y, predictions);\n const ce = this.calculateCrossEntropy(Y, predictions);\n const r2 = this.calculateR2Score(Y, predictions);\n const results = { rmse, mae, accuracy: acc, f1, crossEntropy: ce, r2 };\n let allPassed = true;\n if (this.metrics.rmse !== undefined && rmse > this.metrics.rmse)\n allPassed = false;\n if (this.metrics.mae !== undefined && mae > this.metrics.mae)\n allPassed = false;\n if (this.metrics.accuracy !== undefined && acc < this.metrics.accuracy)\n allPassed = false;\n if (this.metrics.f1 !== undefined && f1 < this.metrics.f1)\n allPassed = false;\n if (this.metrics.crossEntropy !== undefined && ce > this.metrics.crossEntropy)\n allPassed = false;\n if (this.metrics.r2 !== undefined && r2 < this.metrics.r2)\n allPassed = false;\n if (this.verbose)\n this.logMetrics(results);\n if (allPassed) {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model passed thresholds and was saved to JSON.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n else {\n if (this.verbose)\n console.log('❌ Model not saved: One or more thresholds not met.');\n }\n }\n else {\n this.savedModelJSON = JSON.stringify({\n config: this.serializeConfig(),\n W, b, B: beta\n });\n if (this.verbose)\n console.log('✅ Model trained with no metrics—saved by default.');\n if (this.config.exportFileName)\n this.saveModelAsJSONFile(this.config.exportFileName);\n }\n return { epochs: 1, metrics: undefined };\n }\n /* ========= Prediction ========= */\n /** Text prediction (uses Option A narrowing) */\n predict(text, topK = 5) {\n if (!this.model)\n throw new Error('Model not trained.');\n if (!this.useTokenizer) {\n throw new Error('predict(text) requires useTokenizer:true');\n }\n const enc = this.assertEncoder();\n const vec = enc.normalize(enc.encode(text));\n const logits = this.predictLogitsFromVector(vec);\n const probs = Activations.softmax(logits);\n return probs\n .map((p, i) => ({ label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, topK);\n }\n /** Vector batch prediction (kept for back-compat) */\n predictFromVector(inputVecRows, topK = 5) {\n if (!this.model)\n throw new Error('Model not trained.');\n return inputVecRows.map(vec => {\n const logits = this.predictLogitsFromVector(vec);\n const probs = Activations.softmax(logits);\n return probs\n .map((p, i) => ({ label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, topK);\n });\n }\n /** Raw logits for a single numeric vector */\n predictLogitsFromVector(vec) {\n if (!this.model)\n throw new Error('Model not trained.');\n const { W, b, beta } = this.model;\n // Hidden\n const tempH = Matrix.multiply([vec], Matrix.transpose(W)); // 1 x hidden\n const activationFn = Activations.get(this.activation);\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn); // 1 x hidden\n // Output logits\n return Matrix.multiply(H, beta)[0]; // 1 x outDim → vec\n }\n /** Raw logits for a batch of numeric vectors */\n predictLogitsFromVectors(X) {\n if (!this.model)\n throw new Error('Model not trained.');\n const { W, b, beta } = this.model;\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const activationFn = Activations.get(this.activation);\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), activationFn);\n return Matrix.multiply(H, beta);\n }\n /** Probability vector (softmax) for a single numeric vector */\n predictProbaFromVector(vec) {\n return Activations.softmax(this.predictLogitsFromVector(vec));\n }\n /** Probability matrix (softmax per row) for a batch of numeric vectors */\n predictProbaFromVectors(X) {\n return this.predictLogitsFromVectors(X).map(Activations.softmax);\n }\n /** Top-K results for a single numeric vector */\n predictTopKFromVector(vec, k = 5) {\n const probs = this.predictProbaFromVector(vec);\n return probs\n .map((p, i) => ({ index: i, label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k);\n }\n /** Top-K results for a batch of numeric vectors */\n predictTopKFromVectors(X, k = 5) {\n return this.predictProbaFromVectors(X).map(row => row\n .map((p, i) => ({ index: i, label: this.categories[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k));\n }\n /* ========= Metrics ========= */\n calculateRMSE(Y, P) {\n const N = Y.length, C = Y[0].length;\n let sum = 0;\n for (let i = 0; i < N; i++)\n for (let j = 0; j < C; j++) {\n const d = Y[i][j] - P[i][j];\n sum += d * d;\n }\n return Math.sqrt(sum / (N * C));\n }\n calculateMAE(Y, P) {\n const N = Y.length, C = Y[0].length;\n let sum = 0;\n for (let i = 0; i < N; i++)\n for (let j = 0; j < C; j++) {\n sum += Math.abs(Y[i][j] - P[i][j]);\n }\n return sum / (N * C);\n }\n calculateAccuracy(Y, P) {\n let correct = 0;\n for (let i = 0; i < Y.length; i++) {\n const yMax = this.argmax(Y[i]);\n const pMax = this.argmax(P[i]);\n if (yMax === pMax)\n correct++;\n }\n return correct / Y.length;\n }\n calculateF1Score(Y, P) {\n let tp = 0, fp = 0, fn = 0;\n for (let i = 0; i < Y.length; i++) {\n const yIdx = this.argmax(Y[i]);\n const pIdx = this.argmax(P[i]);\n if (yIdx === pIdx)\n tp++;\n else {\n fp++;\n fn++;\n }\n }\n const precision = tp / (tp + fp || 1);\n const recall = tp / (tp + fn || 1);\n return 2 * (precision * recall) / (precision + recall || 1);\n }\n calculateCrossEntropy(Y, P) {\n let loss = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < Y[0].length; j++) {\n const pred = Math.min(Math.max(P[i][j], 1e-15), 1 - 1e-15);\n loss += -Y[i][j] * Math.log(pred);\n }\n }\n return loss / Y.length;\n }\n calculateR2Score(Y, P) {\n const C = Y[0].length;\n const mean = new Array(C).fill(0);\n for (let i = 0; i < Y.length; i++)\n for (let j = 0; j < C; j++)\n mean[j] += Y[i][j];\n for (let j = 0; j < C; j++)\n mean[j] /= Y.length;\n let ssRes = 0, ssTot = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < C; j++) {\n ssRes += Math.pow(Y[i][j] - P[i][j], 2);\n ssTot += Math.pow(Y[i][j] - mean[j], 2);\n }\n }\n return 1 - ssRes / ssTot;\n }\n /* ========= Hidden layer / embeddings ========= */\n computeHiddenLayer(X) {\n if (!this.model)\n throw new Error('Model not trained.');\n const WX = Matrix.multiply(X, Matrix.transpose(this.model.W));\n const WXb = WX.map(row => row.map((val, j) => val + this.model.b[j][0]));\n const activationFn = Activations.get(this.activation);\n return WXb.map(row => row.map(activationFn));\n }\n getEmbedding(X) {\n return this.computeHiddenLayer(X);\n }\n /* ========= Logging & export ========= */\n logMetrics(results) {\n var _a, _b, _c, _d, _e, _f;\n const logLines = [`📋 ${this.modelName} — Metrics Summary:`];\n const push = (label, value, threshold, cmp) => {\n if (threshold !== undefined)\n logLines.push(` ${label}: ${value.toFixed(4)} (threshold: ${cmp} ${threshold})`);\n };\n push('RMSE', results.rmse, (_a = this.metrics) === null || _a === void 0 ? void 0 : _a.rmse, '<=');\n push('MAE', results.mae, (_b = this.metrics) === null || _b === void 0 ? void 0 : _b.mae, '<=');\n push('Accuracy', results.accuracy, (_c = this.metrics) === null || _c === void 0 ? void 0 : _c.accuracy, '>=');\n push('F1 Score', results.f1, (_d = this.metrics) === null || _d === void 0 ? void 0 : _d.f1, '>=');\n push('Cross-Entropy', results.crossEntropy, (_e = this.metrics) === null || _e === void 0 ? void 0 : _e.crossEntropy, '<=');\n push('R² Score', results.r2, (_f = this.metrics) === null || _f === void 0 ? void 0 : _f.r2, '>=');\n if (this.verbose)\n console.log('\\n' + logLines.join('\\n'));\n if (this.logToFile) {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const logFile = this.config.logFileName || `${this.modelName.toLowerCase().replace(/\\s+/g, '_')}_metrics_${timestamp}.txt`;\n const blob = new Blob([logLines.join('\\n')], { type: 'text/plain' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = logFile;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n }\n }\n saveModelAsJSONFile(filename) {\n if (!this.savedModelJSON) {\n if (this.verbose)\n console.warn('No model saved — did not meet metric thresholds.');\n return;\n }\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const fallback = `${this.modelName.toLowerCase().replace(/\\s+/g, '_')}_${timestamp}.json`;\n const finalName = filename || this.config.exportFileName || fallback;\n const blob = new Blob([this.savedModelJSON], { type: 'application/json' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = finalName;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n if (this.verbose)\n console.log(`📦 Model exported as ${finalName}`);\n }\n serializeConfig() {\n const cfg = Object.assign({}, this.config);\n // Remove non-serializable / volatile fields\n delete cfg.seed;\n delete cfg.log;\n delete cfg.encoder;\n // Serialize tokenizerDelimiter for JSON\n if (cfg.tokenizerDelimiter instanceof RegExp) {\n cfg.tokenizerDelimiter = cfg.tokenizerDelimiter.source;\n }\n return cfg;\n }\n argmax(arr) {\n let i = 0;\n for (let k = 1; k < arr.length; k++)\n if (arr[k] > arr[i])\n i = k;\n return i;\n }\n getEncoder() {\n return this.encoder;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// KernelELM.ts — Kernel Extreme Learning Machine (Exact + Nyström + Whitening)\n// Dependencies: Matrix (multiply, transpose, addRegularization, solveCholesky, identity, zeros)\nclass KernelRegistry {\n static register(name, fn) {\n if (!name || typeof fn !== 'function')\n throw new Error('KernelRegistry.register: invalid args');\n this.map.set(name, fn);\n }\n static get(name) {\n const f = this.map.get(name);\n if (!f)\n throw new Error(`KernelRegistry: kernel \"${name}\" not found`);\n return f;\n }\n}\nKernelRegistry.map = new Map();\nfunction l2sq(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return s;\n}\nfunction l1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.abs(a[i] - b[i]);\n return s;\n}\nfunction dot$4(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction softmaxRow(v) {\n const m = Math.max(...v);\n const ex = v.map(x => Math.exp(x - m));\n const s = ex.reduce((a, b) => a + b, 0) || 1;\n return ex.map(e => e / s);\n}\nfunction makePRNG$1(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => { s ^= s << 13; s ^= s >>> 17; s ^= s << 5; return (s >>> 0) / 0xffffffff; };\n}\nfunction buildKernel(spec, dim) {\n var _a, _b, _c, _d, _e;\n switch (spec.type) {\n case 'custom':\n if (!spec.name)\n throw new Error('custom kernel requires \"name\"');\n return KernelRegistry.get(spec.name);\n case 'linear':\n return (x, z) => dot$4(x, z);\n case 'poly': {\n const gamma = (_a = spec.gamma) !== null && _a !== void 0 ? _a : 1 / Math.max(1, dim);\n const degree = (_b = spec.degree) !== null && _b !== void 0 ? _b : 2;\n const coef0 = (_c = spec.coef0) !== null && _c !== void 0 ? _c : 1;\n return (x, z) => Math.pow(gamma * dot$4(x, z) + coef0, degree);\n }\n case 'laplacian': {\n const gamma = (_d = spec.gamma) !== null && _d !== void 0 ? _d : 1 / Math.max(1, dim);\n return (x, z) => Math.exp(-gamma * l1(x, z));\n }\n case 'rbf':\n default: {\n const gamma = (_e = spec.gamma) !== null && _e !== void 0 ? _e : 1 / Math.max(1, dim);\n return (x, z) => Math.exp(-gamma * l2sq(x, z));\n }\n }\n}\n/* ============== Landmark selection (Nyström) ============== */\nfunction pickUniform(X, m, seed = 1337) {\n const prng = makePRNG$1(seed);\n const N = X.length;\n const idx = Array.from({ length: N }, (_, i) => i);\n // Fisher–Yates (only first m)\n for (let i = 0; i < m; i++) {\n const j = i + Math.floor(prng() * (N - i));\n const t = idx[i];\n idx[i] = idx[j];\n idx[j] = t;\n }\n return idx.slice(0, m);\n}\nfunction pickKMeansPP(X, m, seed = 1337) {\n const prng = makePRNG$1(seed);\n const N = X.length;\n if (m >= N)\n return Array.from({ length: N }, (_, i) => i);\n const centers = [];\n centers.push(Math.floor(prng() * N));\n const D2 = new Float64Array(N).fill(Infinity);\n while (centers.length < m) {\n const c = centers[centers.length - 1];\n for (let i = 0; i < N; i++) {\n const d2 = l2sq(X[i], X[c]);\n if (d2 < D2[i])\n D2[i] = d2;\n }\n let sum = 0;\n for (let i = 0; i < N; i++)\n sum += D2[i];\n let r = prng() * (sum || 1);\n let next = 0;\n for (let i = 0; i < N; i++) {\n r -= D2[i];\n if (r <= 0) {\n next = i;\n break;\n }\n }\n centers.push(next);\n }\n return centers;\n}\n/* ====================== KernelELM ====================== */\nclass KernelELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s;\n // exact mode params\n this.Xtrain = [];\n this.alpha = [];\n // nystrom params\n this.Z = []; // landmarks (m x D)\n this.W = []; // weights in feature space (m x K)\n this.R = []; // symmetric whitener K_mm^{-1/2} (m x m) when whitening\n const resolved = {\n outputDim: config.outputDim,\n kernel: config.kernel,\n ridgeLambda: (_a = config.ridgeLambda) !== null && _a !== void 0 ? _a : 1e-2,\n task: (_b = config.task) !== null && _b !== void 0 ? _b : 'classification',\n mode: (_c = config.mode) !== null && _c !== void 0 ? _c : 'exact',\n nystrom: {\n m: (_d = config.nystrom) === null || _d === void 0 ? void 0 : _d.m,\n strategy: (_f = (_e = config.nystrom) === null || _e === void 0 ? void 0 : _e.strategy) !== null && _f !== void 0 ? _f : 'uniform',\n seed: (_h = (_g = config.nystrom) === null || _g === void 0 ? void 0 : _g.seed) !== null && _h !== void 0 ? _h : 1337,\n preset: (_j = config.nystrom) === null || _j === void 0 ? void 0 : _j.preset,\n whiten: (_l = (_k = config.nystrom) === null || _k === void 0 ? void 0 : _k.whiten) !== null && _l !== void 0 ? _l : false,\n jitter: (_o = (_m = config.nystrom) === null || _m === void 0 ? void 0 : _m.jitter) !== null && _o !== void 0 ? _o : 1e-10,\n },\n log: {\n modelName: (_q = (_p = config.log) === null || _p === void 0 ? void 0 : _p.modelName) !== null && _q !== void 0 ? _q : 'KernelELM',\n verbose: (_s = (_r = config.log) === null || _r === void 0 ? void 0 : _r.verbose) !== null && _s !== void 0 ? _s : false,\n },\n };\n this.cfg = resolved;\n this.verbose = this.cfg.log.verbose;\n this.name = this.cfg.log.modelName;\n }\n /* ------------------- Train ------------------- */\n fit(X, Y) {\n var _a, _b, _c, _d, _e;\n if (!(X === null || X === void 0 ? void 0 : X.length) || !((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length))\n throw new Error('KernelELM.fit: empty X');\n if (!(Y === null || Y === void 0 ? void 0 : Y.length) || !((_b = Y[0]) === null || _b === void 0 ? void 0 : _b.length))\n throw new Error('KernelELM.fit: empty Y');\n if (X.length !== Y.length)\n throw new Error(`KernelELM.fit: X rows ${X.length} != Y rows ${Y.length}`);\n if (Y[0].length !== this.cfg.outputDim) {\n throw new Error(`KernelELM.fit: Y dims ${Y[0].length} != outputDim ${this.cfg.outputDim}`);\n }\n const N = X.length, D = X[0].length, K = Y[0].length;\n this.kernel = buildKernel(this.cfg.kernel, D);\n if (this.cfg.mode === 'exact') {\n // Gram K (N x N)\n if (this.verbose)\n console.log(`🔧 [${this.name}] exact Gram: N=${N}, D=${D}`);\n const Kmat = new Array(N);\n for (let i = 0; i < N; i++) {\n const row = new Array(N);\n Kmat[i] = row;\n row[i] = 1;\n for (let j = i + 1; j < N; j++)\n row[j] = this.kernel(X[i], X[j]);\n }\n for (let i = 1; i < N; i++)\n for (let j = 0; j < i; j++)\n Kmat[i][j] = Kmat[j][i];\n // (K + λI) α = Y\n const A = Matrix.addRegularization(Kmat, this.cfg.ridgeLambda + 1e-10);\n const Alpha = Matrix.solveCholesky(A, Y, 1e-12); // (N x K)\n this.Xtrain = X.map(r => r.slice());\n this.alpha = Alpha;\n this.Z = [];\n this.W = [];\n this.R = [];\n if (this.verbose)\n console.log(`✅ [${this.name}] exact fit complete: alpha(${N}x${K})`);\n return;\n }\n // ---------- Nyström ----------\n const ny = this.cfg.nystrom;\n let Z;\n if (ny.strategy === 'preset' && (((_c = ny.preset) === null || _c === void 0 ? void 0 : _c.points) || ((_d = ny.preset) === null || _d === void 0 ? void 0 : _d.indices))) {\n Z = ny.preset.points ? ny.preset.points.map(r => r.slice())\n : ny.preset.indices.map(i => X[i]);\n }\n else {\n const m = (_e = ny.m) !== null && _e !== void 0 ? _e : Math.max(10, Math.min(300, Math.floor(Math.sqrt(N))));\n const idx = (ny.strategy === 'kmeans++') ? pickKMeansPP(X, m, ny.seed) : pickUniform(X, m, ny.seed);\n Z = idx.map(i => X[i]);\n }\n const m = Z.length;\n if (this.verbose)\n console.log(`🔹 [${this.name}] Nyström: m=${m}, strategy=${ny.strategy}, whiten=${ny.whiten ? 'on' : 'off'}`);\n // K_nm (N x m)\n const Knm = new Array(N);\n for (let i = 0; i < N; i++) {\n const row = new Array(m), xi = X[i];\n for (let j = 0; j < m; j++)\n row[j] = this.kernel(xi, Z[j]);\n Knm[i] = row;\n }\n // Optional whitening with R = K_mm^{-1/2} (symmetric via eigen)\n let Phi = Knm;\n let R = [];\n if (ny.whiten) {\n // K_mm (m x m)\n const Kmm = new Array(m);\n for (let i = 0; i < m; i++) {\n const row = new Array(m);\n Kmm[i] = row;\n row[i] = 1;\n for (let j = i + 1; j < m; j++)\n row[j] = this.kernel(Z[i], Z[j]);\n }\n for (let i = 1; i < m; i++)\n for (let j = 0; j < i; j++)\n Kmm[i][j] = Kmm[j][i];\n // R = K_mm^{-1/2} with jitter\n const KmmJ = Matrix.addRegularization(Kmm, ny.jitter);\n R = Matrix.invSqrtSym(KmmJ, ny.jitter);\n Phi = Matrix.multiply(Knm, R); // (N x m)\n }\n // Ridge in feature space: W = (Φᵀ Φ + λ I)^-1 Φᵀ Y (m x K)\n const PhiT = Matrix.transpose(Phi);\n const G = Matrix.multiply(PhiT, Phi); // (m x m)\n const Greg = Matrix.addRegularization(G, this.cfg.ridgeLambda + 1e-10);\n const Rhs = Matrix.multiply(PhiT, Y); // (m x K)\n const W = Matrix.solveCholesky(Greg, Rhs, 1e-12); // (m x K)\n this.Z = Z;\n this.W = W;\n this.R = R; // empty when whiten=false\n this.Xtrain = [];\n this.alpha = [];\n if (this.verbose)\n console.log(`✅ [${this.name}] Nyström fit complete: Z(${m}x${D}), W(${m}x${K})`);\n }\n /* --------------- Features / Predict --------------- */\n featuresFor(X) {\n if (this.cfg.mode === 'exact') {\n const N = this.Xtrain.length, M = X.length;\n const Kqx = new Array(M);\n for (let i = 0; i < M; i++) {\n const row = new Array(N), xi = X[i];\n for (let j = 0; j < N; j++)\n row[j] = this.kernel(xi, this.Xtrain[j]);\n Kqx[i] = row;\n }\n return Kqx;\n }\n // Nyström\n if (!this.Z.length)\n throw new Error('featuresFor: Nyström model not fitted');\n const M = X.length, m = this.Z.length;\n const Kxm = new Array(M);\n for (let i = 0; i < M; i++) {\n const row = new Array(m), xi = X[i];\n for (let j = 0; j < m; j++)\n row[j] = this.kernel(xi, this.Z[j]);\n Kxm[i] = row;\n }\n return this.R.length ? Matrix.multiply(Kxm, this.R) : Kxm;\n }\n /** Raw logits for batch (M x K) */\n predictLogitsFromVectors(X) {\n const Phi = this.featuresFor(X);\n if (this.cfg.mode === 'exact') {\n if (!this.alpha.length)\n throw new Error('predict: exact model not fitted');\n return Matrix.multiply(Phi, this.alpha);\n }\n if (!this.W.length)\n throw new Error('predict: Nyström model not fitted');\n return Matrix.multiply(Phi, this.W);\n }\n /** Probabilities for classification; raw scores for regression */\n predictProbaFromVectors(X) {\n const logits = this.predictLogitsFromVectors(X);\n return this.cfg.task === 'classification' ? logits.map(softmaxRow) : logits;\n }\n /** Top-K for classification */\n predictTopKFromVectors(X, k = 5) {\n const P = this.predictProbaFromVectors(X);\n return P.map(row => row.map((p, i) => ({ index: i, prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k));\n }\n /** Embedding for chaining:\n * - exact: Φ = K(X, X_train) (M x N)\n * - nystrom: Φ = K(X, Z) (M x m) or K(X,Z)·R if whiten=true\n */\n getEmbedding(X) {\n return this.featuresFor(X);\n }\n /* -------------------- JSON I/O -------------------- */\n toJSON() {\n const base = { config: Object.assign(Object.assign({}, this.cfg), { __version: 'kelm-2.1.0' }) };\n if (this.cfg.mode === 'exact') {\n return Object.assign(Object.assign({}, base), { X: this.Xtrain, alpha: this.alpha });\n }\n return Object.assign(Object.assign({}, base), { Z: this.Z, W: this.W, R: this.R.length ? this.R : undefined });\n }\n fromJSON(payload) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n const obj = typeof payload === 'string' ? JSON.parse(payload) : payload;\n // Merge config (keep current defaults where missing)\n this.cfg.kernel = Object.assign({}, obj.config.kernel);\n this.cfg.ridgeLambda = (_a = obj.config.ridgeLambda) !== null && _a !== void 0 ? _a : this.cfg.ridgeLambda;\n this.cfg.task = ((_b = obj.config.task) !== null && _b !== void 0 ? _b : this.cfg.task);\n this.cfg.mode = ((_c = obj.config.mode) !== null && _c !== void 0 ? _c : this.cfg.mode);\n this.cfg.nystrom = Object.assign(Object.assign({}, this.cfg.nystrom), ((_d = obj.config.nystrom) !== null && _d !== void 0 ? _d : {}));\n // Restore params\n if (obj.X && obj.alpha) {\n this.Xtrain = obj.X.map(r => r.slice());\n this.alpha = obj.alpha.map(r => r.slice());\n this.Z = [];\n this.W = [];\n this.R = [];\n const D = (_f = (_e = this.Xtrain[0]) === null || _e === void 0 ? void 0 : _e.length) !== null && _f !== void 0 ? _f : 1;\n this.kernel = buildKernel(this.cfg.kernel, D);\n return;\n }\n if (obj.Z && obj.W) {\n this.Z = obj.Z.map(r => r.slice());\n this.W = obj.W.map(r => r.slice());\n this.R = obj.R ? obj.R.map(r => r.slice()) : [];\n this.Xtrain = [];\n this.alpha = [];\n const D = (_h = (_g = this.Z[0]) === null || _g === void 0 ? void 0 : _g.length) !== null && _h !== void 0 ? _h : 1;\n this.kernel = buildKernel(this.cfg.kernel, D);\n return;\n }\n throw new Error('KernelELM.fromJSON: invalid payload');\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// OnlineELM.ts — Online / OS-ELM with RLS updates\n/* ========== utils ========== */\nconst EPS$4 = 1e-10;\nfunction makePRNG(seed = 123456789) {\n let s = seed | 0 || 1;\n return () => {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n return ((s >>> 0) / 0xffffffff);\n };\n}\n/* ========== Online ELM (RLS) ========== */\nclass OnlineELM {\n constructor(cfg) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.inputDim = cfg.inputDim | 0;\n this.outputDim = cfg.outputDim | 0;\n this.hiddenUnits = cfg.hiddenUnits | 0;\n if (this.inputDim <= 0 || this.outputDim <= 0 || this.hiddenUnits <= 0) {\n throw new Error(`OnlineELM: invalid dims (inputDim=${this.inputDim}, outputDim=${this.outputDim}, hidden=${this.hiddenUnits})`);\n }\n this.activation = (_a = cfg.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.ridgeLambda = Math.max((_b = cfg.ridgeLambda) !== null && _b !== void 0 ? _b : 1e-2, EPS$4);\n this.weightInit = (_c = cfg.weightInit) !== null && _c !== void 0 ? _c : 'xavier';\n this.forgettingFactor = Math.max(Math.min((_d = cfg.forgettingFactor) !== null && _d !== void 0 ? _d : 1.0, 1.0), 1e-4);\n this.verbose = (_f = (_e = cfg.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false;\n this.modelName = (_h = (_g = cfg.log) === null || _g === void 0 ? void 0 : _g.modelName) !== null && _h !== void 0 ? _h : 'Online ELM';\n const seed = (_j = cfg.seed) !== null && _j !== void 0 ? _j : 1337;\n this.rng = makePRNG(seed);\n this.actFn = Activations.get(this.activation);\n // Random features\n this.W = this.initW(this.hiddenUnits, this.inputDim);\n this.b = this.initB(this.hiddenUnits);\n // Not initialized yet — init() will set these\n this.beta = null;\n this.P = null;\n }\n /* ===== init helpers ===== */\n xavierLimit(fanIn, fanOut) { return Math.sqrt(6 / (fanIn + fanOut)); }\n heLimit(fanIn) { return Math.sqrt(6 / fanIn); }\n initW(rows, cols) {\n let limit = 1;\n if (this.weightInit === 'xavier') {\n limit = this.xavierLimit(cols, rows);\n if (this.verbose)\n console.log(`✨ [${this.modelName}] Xavier W ~ U(±${limit.toFixed(4)})`);\n }\n else if (this.weightInit === 'he') {\n limit = this.heLimit(cols);\n if (this.verbose)\n console.log(`✨ [${this.modelName}] He W ~ U(±${limit.toFixed(4)})`);\n }\n else if (this.verbose) {\n console.log(`✨ [${this.modelName}] Uniform W ~ U(±1)`);\n }\n const rnd = () => (this.rng() * 2 - 1) * limit;\n return Array.from({ length: rows }, () => Array.from({ length: cols }, rnd));\n }\n initB(rows) {\n const rnd = () => (this.rng() * 2 - 1) * 0.01;\n return Array.from({ length: rows }, () => [rnd()]);\n }\n hidden(X) {\n const tempH = Matrix.multiply(X, Matrix.transpose(this.W)); // (n x hidden)\n const f = this.actFn;\n return tempH.map(row => row.map((v, j) => f(v + this.b[j][0])));\n }\n /* ===== public API ===== */\n /** Initialize β and P from a batch (ridge): P0=(HᵀH+λI)^-1, β0=P0 HᵀY */\n init(X0, Y0) {\n if (!(X0 === null || X0 === void 0 ? void 0 : X0.length) || !(Y0 === null || Y0 === void 0 ? void 0 : Y0.length))\n throw new Error('init: empty X0 or Y0');\n if (X0.length !== Y0.length)\n throw new Error(`init: X0 rows ${X0.length} != Y0 rows ${Y0.length}`);\n if (X0[0].length !== this.inputDim)\n throw new Error(`init: X0 cols ${X0[0].length} != inputDim ${this.inputDim}`);\n if (Y0[0].length !== this.outputDim)\n throw new Error(`init: Y0 cols ${Y0[0].length} != outputDim ${this.outputDim}`);\n const H0 = this.hidden(X0); // (n x h)\n const Ht = Matrix.transpose(H0); // (h x n)\n const A = Matrix.addRegularization(Matrix.multiply(Ht, H0), this.ridgeLambda + 1e-10); // (h x h)\n const R = Matrix.multiply(Ht, Y0); // (h x k)\n const P0 = Matrix.solveCholesky(A, Matrix.identity(this.hiddenUnits), 1e-10); // A^-1\n const B0 = Matrix.multiply(P0, R); // (h x k)\n this.P = P0;\n this.beta = B0;\n if (this.verbose)\n console.log(`✅ [${this.modelName}] init: n=${X0.length}, hidden=${this.hiddenUnits}, out=${this.outputDim}`);\n }\n /** If not initialized, init(); otherwise RLS update. */\n fit(X, Y) {\n if (!(X === null || X === void 0 ? void 0 : X.length) || !(Y === null || Y === void 0 ? void 0 : Y.length))\n throw new Error('fit: empty X or Y');\n if (X.length !== Y.length)\n throw new Error(`fit: X rows ${X.length} != Y rows ${Y.length}`);\n if (!this.P || !this.beta)\n this.init(X, Y);\n else\n this.update(X, Y);\n }\n /**\n * RLS / OS-ELM update with forgetting ρ:\n * S = I + HPHᵀ\n * K = P Hᵀ S^-1\n * β ← β + K (Y - Hβ)\n * P ← (P - K H P) / ρ\n */\n update(X, Y) {\n if (!(X === null || X === void 0 ? void 0 : X.length) || !(Y === null || Y === void 0 ? void 0 : Y.length))\n throw new Error('update: empty X or Y');\n if (X.length !== Y.length)\n throw new Error(`update: X rows ${X.length} != Y rows ${Y.length}`);\n if (!this.P || !this.beta)\n throw new Error('update: model not initialized (call init() first)');\n const n = X.length;\n const H = this.hidden(X); // (n x h)\n const Ht = Matrix.transpose(H); // (h x n)\n const rho = this.forgettingFactor;\n let P = this.P;\n if (rho < 1.0) {\n // Equivalent to P <- P / ρ (more responsive to new data)\n P = P.map(row => row.map(v => v / rho));\n }\n // S = I + H P Hᵀ (n x n, SPD)\n const HP = Matrix.multiply(H, P); // (n x h)\n const HPHt = Matrix.multiply(HP, Ht); // (n x n)\n const S = Matrix.add(HPHt, Matrix.identity(n));\n const S_inv = Matrix.solveCholesky(S, Matrix.identity(n), 1e-10);\n // K = P Hᵀ S^-1 (h x n)\n const PHt = Matrix.multiply(P, Ht); // (h x n)\n const K = Matrix.multiply(PHt, S_inv); // (h x n)\n // Innovation: (Y - Hβ) (n x k)\n const Hbeta = Matrix.multiply(H, this.beta);\n const innov = Y.map((row, i) => row.map((yij, j) => yij - Hbeta[i][j]));\n // β ← β + K * innov\n const Delta = Matrix.multiply(K, innov); // (h x k)\n this.beta = this.beta.map((row, i) => row.map((bij, j) => bij + Delta[i][j]));\n // P ← P - K H P\n const KH = Matrix.multiply(K, H); // (h x h)\n const KHP = Matrix.multiply(KH, P); // (h x h)\n this.P = P.map((row, i) => row.map((pij, j) => pij - KHP[i][j]));\n if (this.verbose) {\n const diagAvg = this.P.reduce((s, r, i) => s + r[i], 0) / this.P.length;\n console.log(`🔁 [${this.modelName}] update: n=${n}, avg diag(P)≈${diagAvg.toFixed(6)}`);\n }\n }\n /* ===== Prediction ===== */\n logitsFromVectors(X) {\n if (!this.beta)\n throw new Error('predict: model not initialized');\n const H = this.hidden(X);\n return Matrix.multiply(H, this.beta);\n }\n predictLogitsFromVector(x) {\n return this.logitsFromVectors([x])[0];\n }\n predictLogitsFromVectors(X) {\n return this.logitsFromVectors(X);\n }\n predictProbaFromVector(x) {\n return Activations.softmax(this.predictLogitsFromVector(x));\n }\n predictProbaFromVectors(X) {\n return this.predictLogitsFromVectors(X).map(Activations.softmax);\n }\n predictTopKFromVector(x, k = 5) {\n const p = this.predictProbaFromVector(x);\n const kk = Math.max(1, Math.min(k, p.length));\n return p.map((prob, index) => ({ index, prob }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, kk);\n }\n predictTopKFromVectors(X, k = 5) {\n return this.predictProbaFromVectors(X).map(p => {\n const kk = Math.max(1, Math.min(k, p.length));\n return p.map((prob, index) => ({ index, prob }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, kk);\n });\n }\n /* ===== Serialization ===== */\n toJSON(includeP = false) {\n if (!this.beta || !this.P)\n throw new Error('toJSON: model not initialized');\n const cfg = {\n hiddenUnits: this.hiddenUnits,\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n activation: this.activation,\n ridgeLambda: this.ridgeLambda,\n weightInit: this.weightInit,\n forgettingFactor: this.forgettingFactor,\n __version: 'online-elm-1.0.0',\n };\n const o = { W: this.W, b: this.b, B: this.beta, config: cfg };\n if (includeP)\n o.P = this.P;\n return o;\n }\n loadFromJSON(json) {\n var _a;\n const parsed = typeof json === 'string' ? JSON.parse(json) : json;\n const { W, b, B, P, config } = parsed;\n if (!W || !b || !B)\n throw new Error('loadFromJSON: missing W/b/B');\n if (W.length !== this.hiddenUnits || W[0].length !== this.inputDim) {\n throw new Error(`loadFromJSON: mismatched W shape (${W.length}x${W[0].length})`);\n }\n if (b.length !== this.hiddenUnits || b[0].length !== 1) {\n throw new Error(`loadFromJSON: mismatched b shape (${b.length}x${b[0].length})`);\n }\n if (B.length !== this.hiddenUnits || B[0].length !== this.outputDim) {\n throw new Error(`loadFromJSON: mismatched B shape (${B.length}x${B[0].length})`);\n }\n this.W = W;\n this.b = b;\n this.beta = B;\n this.P = P !== null && P !== void 0 ? P : null;\n if (config === null || config === void 0 ? void 0 : config.activation) {\n this.activation = config.activation;\n this.actFn = Activations.get(this.activation); // refresh cache\n }\n if (config === null || config === void 0 ? void 0 : config.ridgeLambda)\n this.ridgeLambda = config.ridgeLambda;\n if (this.verbose)\n console.log(`✅ [${this.modelName}] model loaded (v=${(_a = config === null || config === void 0 ? void 0 : config.__version) !== null && _a !== void 0 ? _a : 'n/a'})`);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMChain.ts — simple encoder pipeline with checks, normalization, and profiling\nfunction l2NormalizeRows$1(M) {\n return M.map(row => {\n let s = 0;\n for (let i = 0; i < row.length; i++)\n s += row[i] * row[i];\n const n = Math.sqrt(s) || 1;\n const inv = 1 / n;\n return row.map(v => v * inv);\n });\n}\nfunction asBatch(x) {\n return Array.isArray(x[0]) ? x : [x];\n}\nfunction fromBatch(y, originalWasVector) {\n var _a;\n return originalWasVector ? ((_a = y[0]) !== null && _a !== void 0 ? _a : []) : y;\n}\nclass ELMChain {\n constructor(encoders = [], opts) {\n var _a, _b, _c, _d, _e;\n this.lastDims = []; // input dim -> stage dims (for summary)\n this.encoders = [...encoders];\n this.opts = {\n normalizeEach: (_a = opts === null || opts === void 0 ? void 0 : opts.normalizeEach) !== null && _a !== void 0 ? _a : false,\n normalizeFinal: (_b = opts === null || opts === void 0 ? void 0 : opts.normalizeFinal) !== null && _b !== void 0 ? _b : false,\n validate: (_c = opts === null || opts === void 0 ? void 0 : opts.validate) !== null && _c !== void 0 ? _c : true,\n strict: (_d = opts === null || opts === void 0 ? void 0 : opts.strict) !== null && _d !== void 0 ? _d : true,\n name: (_e = opts === null || opts === void 0 ? void 0 : opts.name) !== null && _e !== void 0 ? _e : 'ELMChain',\n };\n }\n /** Add encoder at end */\n add(encoder) {\n this.encoders.push(encoder);\n }\n /** Insert encoder at position (0..length) */\n insertAt(index, encoder) {\n if (index < 0 || index > this.encoders.length)\n throw new Error('insertAt: index out of range');\n this.encoders.splice(index, 0, encoder);\n }\n /** Remove encoder at index; returns removed or undefined */\n removeAt(index) {\n if (index < 0 || index >= this.encoders.length)\n return undefined;\n return this.encoders.splice(index, 1)[0];\n }\n /** Remove all encoders */\n clear() {\n this.encoders.length = 0;\n this.lastDims.length = 0;\n }\n /** Number of stages */\n length() {\n return this.encoders.length;\n }\n /** Human-friendly overview (dims are filled after the first successful run) */\n summary() {\n const lines = [];\n lines.push(`📦 ${this.opts.name} — ${this.encoders.length} stage(s)`);\n this.encoders.forEach((enc, i) => {\n var _a, _b, _c;\n const nm = (_a = enc.name) !== null && _a !== void 0 ? _a : `Encoder#${i}`;\n const dimIn = (_b = this.lastDims[i]) !== null && _b !== void 0 ? _b : '?';\n const dimOut = (_c = this.lastDims[i + 1]) !== null && _c !== void 0 ? _c : '?';\n lines.push(` ${i}: ${nm} ${dimIn} → ${dimOut}`);\n });\n return lines.join('\\n');\n }\n getEmbedding(input) {\n var _a, _b;\n const wasVector = !Array.isArray(input[0]);\n const X0 = asBatch(input);\n if (this.opts.validate) {\n if (!X0.length || !((_a = X0[0]) === null || _a === void 0 ? void 0 : _a.length))\n throw new Error('ELMChain.getEmbedding: empty input');\n }\n let X = X0;\n this.lastDims = [X0[0].length];\n for (let i = 0; i < this.encoders.length; i++) {\n const enc = this.encoders[i];\n try {\n if (this.opts.validate) {\n // Ensure rows consistent\n const d = X[0].length;\n for (let r = 1; r < X.length; r++) {\n if (X[r].length !== d)\n throw new Error(`Stage ${i} input row ${r} has dim ${X[r].length} != ${d}`);\n }\n }\n let Y = enc.getEmbedding(X);\n if (this.opts.validate) {\n if (!Y.length || !((_b = Y[0]) === null || _b === void 0 ? void 0 : _b.length)) {\n throw new Error(`Stage ${i} produced empty output`);\n }\n }\n if (this.opts.normalizeEach) {\n Y = l2NormalizeRows$1(Y);\n }\n // Record dims for summary\n this.lastDims[i + 1] = Y[0].length;\n X = Y;\n }\n catch (err) {\n if (this.opts.strict)\n throw err;\n // Non-strict: return what we have so far\n return fromBatch(X, wasVector);\n }\n }\n if (this.opts.normalizeFinal && !this.opts.normalizeEach) {\n X = l2NormalizeRows$1(X);\n }\n return fromBatch(X, wasVector);\n }\n /**\n * Run once to collect per-stage timings (ms) and final dims.\n * Returns { timings, dims } where dims[i] is input dim to stage i,\n * dims[i+1] is that stage’s output dim.\n */\n profile(input) {\n !Array.isArray(input[0]);\n let X = asBatch(input);\n const timings = [];\n const dims = [X[0].length];\n for (let i = 0; i < this.encoders.length; i++) {\n const t0 = performance.now();\n X = this.encoders[i].getEmbedding(X);\n const t1 = performance.now();\n timings.push(t1 - t0);\n dims[i + 1] = X[0].length;\n }\n // Don’t mutate options; just return diagnostics\n return { timings, dims };\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ELMAdapter.ts — unify ELM / OnlineELM as EncoderLike for ELMChain\nfunction assertNonEmptyBatch(X, where) {\n if (!Array.isArray(X) || X.length === 0 || !Array.isArray(X[0]) || X[0].length === 0) {\n throw new Error(`${where}: expected non-empty (N x D) batch`);\n }\n}\nfunction matmulXWtAddB(X, // (N x D)\nW, // (H x D)\nb // (H x 1)\n) {\n var _a, _b, _c, _d, _e;\n const N = X.length, D = X[0].length, H = W.length;\n // quick shape sanity\n if (((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) !== D)\n throw new Error(`matmulXWtAddB: W is ${W.length}x${(_b = W[0]) === null || _b === void 0 ? void 0 : _b.length}, expected Hx${D}`);\n if (b.length !== H || ((_d = (_c = b[0]) === null || _c === void 0 ? void 0 : _c.length) !== null && _d !== void 0 ? _d : 0) !== 1)\n throw new Error(`matmulXWtAddB: b is ${b.length}x${(_e = b[0]) === null || _e === void 0 ? void 0 : _e.length}, expected Hx1`);\n const out = new Array(N);\n for (let n = 0; n < N; n++) {\n const xn = X[n];\n const row = new Array(H);\n for (let h = 0; h < H; h++) {\n const wh = W[h];\n let s = b[h][0] || 0;\n // unrolled dot\n for (let d = 0; d < D; d++)\n s += xn[d] * wh[d];\n row[h] = s;\n }\n out[n] = row;\n }\n return out;\n}\nclass ELMAdapter {\n constructor(target) {\n var _a, _b;\n this.target = target;\n this.mode = target.type === 'online' ? ((_a = target.mode) !== null && _a !== void 0 ? _a : 'hidden') : 'hidden';\n this.name = (_b = target.name) !== null && _b !== void 0 ? _b : (target.type === 'elm' ? 'ELM' : `OnlineELM(${this.mode})`);\n }\n /** Return embeddings for a batch (N x D) -> (N x H/L) */\n getEmbedding(X) {\n var _a, _b, _c, _d;\n assertNonEmptyBatch(X, `${this.name}.getEmbedding`);\n if (this.target.type === 'elm') {\n const m = this.target.model;\n // ELM already exposes getEmbedding()\n if (typeof m.getEmbedding !== 'function') {\n throw new Error(`${this.name}: underlying ELM lacks getEmbedding(X)`);\n }\n try {\n return m.getEmbedding(X);\n }\n catch (err) {\n // Helpful hint if model wasn’t trained\n if (m.model == null) {\n throw new Error(`${this.name}: model not trained/initialized (call train/trainFromData or load model).`);\n }\n throw err;\n }\n }\n // OnlineELM path\n const o = this.target.model;\n // Guard dims early\n const D = X[0].length;\n if (!Array.isArray(o.W) || ((_a = o.W[0]) === null || _a === void 0 ? void 0 : _a.length) !== D) {\n throw new Error(`${this.name}: input dim ${D} does not match model.W columns ${(_d = (_c = (_b = o.W) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.length) !== null && _d !== void 0 ? _d : 'n/a'}`);\n }\n if (this.mode === 'logits') {\n // Use public logits as an “embedding”\n try {\n return o.predictLogitsFromVectors(X);\n }\n catch (err) {\n if (o.beta == null) {\n throw new Error(`${this.name}: model not initialized (call init()/fit() before logits mode).`);\n }\n throw err;\n }\n }\n // mode === 'hidden' → compute hidden activations: act(X Wᵀ + b)\n const W = o.W;\n const BIAS = o.b;\n const actName = o.activation;\n const act = Activations.get((actName !== null && actName !== void 0 ? actName : 'relu').toLowerCase());\n const Hpre = matmulXWtAddB(X, W, BIAS);\n // apply activation in-place\n for (let n = 0; n < Hpre.length; n++) {\n const row = Hpre[n];\n for (let j = 0; j < row.length; j++)\n row[j] = act(row[j]);\n }\n return Hpre;\n }\n}\n/* -------- convenience helpers -------- */\nfunction wrapELM(model, name) {\n return new ELMAdapter({ type: 'elm', model, name });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// DeepELM.ts — stacked ELM autoencoders + top ELM classifier\nclass DeepELM {\n constructor(cfg) {\n this.aeLayers = [];\n this.chain = null;\n this.clf = null;\n this.cfg = Object.assign({ clfHiddenUnits: 0, clfActivation: 'linear', clfWeightInit: 'xavier', normalizeEach: false, normalizeFinal: true }, cfg);\n }\n /** Layer-wise unsupervised training with Y=X (autoencoder). Returns transformed X_L. */\n fitAutoencoders(X) {\n var _a, _b, _c, _d;\n let cur = X;\n this.aeLayers = [];\n for (let i = 0; i < this.cfg.layers.length; i++) {\n const spec = this.cfg.layers[i];\n // Minimal ELM config for numeric mode—categories aren’t used by trainFromData:\n const elm = new ELM({\n categories: ['ae'], // placeholder (unused in trainFromData)\n hiddenUnits: spec.hiddenUnits,\n activation: (_a = spec.activation) !== null && _a !== void 0 ? _a : 'relu',\n weightInit: (_b = spec.weightInit) !== null && _b !== void 0 ? _b : 'xavier',\n dropout: (_c = spec.dropout) !== null && _c !== void 0 ? _c : 0,\n log: { modelName: (_d = spec.name) !== null && _d !== void 0 ? _d : `AE#${i + 1}`, verbose: false },\n });\n // Autoencoder: targets are the inputs\n elm.trainFromData(cur, cur);\n this.aeLayers.push(elm);\n // Forward to next layer using hidden activations\n cur = elm.getEmbedding(cur);\n if (this.cfg.normalizeEach) {\n cur = l2NormalizeRows(cur);\n }\n }\n // Build chain for fast forward passes\n this.chain = new ELMChain(this.aeLayers.map((m, i) => {\n const a = wrapELM(m, m['modelName'] || `AE#${i + 1}`);\n return a;\n }), {\n normalizeEach: !!this.cfg.normalizeEach,\n normalizeFinal: !!this.cfg.normalizeFinal,\n name: 'DeepELM-Chain',\n });\n return this.transform(X);\n }\n /** Supervised training of a top classifier ELM on last-layer features. */\n fitClassifier(X, yOneHot) {\n var _a, _b;\n if (!this.chain)\n throw new Error('fitClassifier: call fitAutoencoders() first');\n const Z = this.chain.getEmbedding(X);\n // If clfHiddenUnits === 0, we mimic a “linear readout” by using a very small hidden layer with linear activation.\n const hidden = Math.max(1, this.cfg.clfHiddenUnits || 1);\n this.clf = new ELM({\n categories: Array.from({ length: this.cfg.numClasses }, (_, i) => String(i)),\n hiddenUnits: hidden,\n activation: (_a = this.cfg.clfActivation) !== null && _a !== void 0 ? _a : 'linear',\n weightInit: (_b = this.cfg.clfWeightInit) !== null && _b !== void 0 ? _b : 'xavier',\n log: { modelName: 'DeepELM-Classifier', verbose: false },\n });\n this.clf.trainFromData(Z, yOneHot);\n }\n /** One-shot convenience: train AEs then classifier. */\n fit(X, yOneHot) {\n this.fitAutoencoders(X);\n this.fitClassifier(X, yOneHot);\n }\n /** Forward through stacked AEs (no classifier). */\n transform(X) {\n if (!this.chain)\n throw new Error('transform: model not fitted');\n const Z = this.chain.getEmbedding(X);\n return Z;\n }\n /** Classifier probabilities (softmax) for a batch. */\n predictProba(X) {\n if (!this.clf)\n throw new Error('predictProba: classifier not fitted');\n // Reuse existing ELM method on batch:\n const Z = this.transform(X);\n const res = this.clf.predictFromVector(Z, this.cfg.numClasses);\n // predictFromVector returns topK lists; convert back into dense probs when possible\n // If you’d rather have dense probs, expose a new method on ELM to return raw softmax scores for a batch.\n return topKListToDense(res, this.cfg.numClasses);\n }\n /** Utility: export all models for persistence. */\n toJSON() {\n var _a;\n return {\n cfg: this.cfg,\n layers: this.aeLayers.map(m => { var _a; return (_a = m.savedModelJSON) !== null && _a !== void 0 ? _a : JSON.stringify(m.model); }),\n clf: this.clf ? ((_a = this.clf.savedModelJSON) !== null && _a !== void 0 ? _a : JSON.stringify(this.clf.model)) : null,\n __version: 'deep-elm-1.0.0',\n };\n }\n /** Utility: load from exported payload. */\n fromJSON(payload) {\n const { cfg, layers, clf } = payload !== null && payload !== void 0 ? payload : {};\n if (!Array.isArray(layers))\n throw new Error('fromJSON: invalid payload');\n this.cfg = Object.assign(Object.assign({}, this.cfg), cfg);\n this.aeLayers = layers.map((j, i) => {\n const m = new ELM({ categories: ['ae'], hiddenUnits: 1 });\n m.loadModelFromJSON(j);\n return m;\n });\n this.chain = new ELMChain(this.aeLayers.map((m, i) => wrapELM(m, `AE#${i + 1}`)), {\n normalizeEach: !!this.cfg.normalizeEach,\n normalizeFinal: !!this.cfg.normalizeFinal,\n name: 'DeepELM-Chain',\n });\n if (clf) {\n const c = new ELM({ categories: Array.from({ length: this.cfg.numClasses }, (_, i) => String(i)), hiddenUnits: 1 });\n c.loadModelFromJSON(clf);\n this.clf = c;\n }\n }\n}\n/* ---------- helpers ---------- */\nfunction l2NormalizeRows(M) {\n return M.map(r => {\n let s = 0;\n for (let i = 0; i < r.length; i++)\n s += r[i] * r[i];\n const inv = 1 / (Math.sqrt(s) || 1);\n return r.map(v => v * inv);\n });\n}\nfunction topKListToDense(list, K) {\n // Convert the ELM.predictFromVector top-K output back to dense [N x K] probs if needed.\n // (If your ELM exposes a dense “predictProbaFromVectors” for the batch, prefer that.)\n return list.map(row => {\n const out = new Array(K).fill(0);\n for (const { label, prob } of row) {\n const idx = Number(label);\n if (Number.isFinite(idx) && idx >= 0 && idx < K)\n out[idx] = prob;\n }\n return out;\n });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// EmbeddingStore.ts — Powerful in-memory vector store with fast KNN, thresholds, and JSON I/O\nconst EPS$3 = 1e-12;\n/* ================= math utils ================= */\nfunction l2Norm$1(v) {\n let s = 0;\n for (let i = 0; i < v.length; i++)\n s += v[i] * v[i];\n return Math.sqrt(s);\n}\nfunction l1Dist(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.abs(a[i] - b[i]);\n return s;\n}\nfunction dot$3(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction normalizeToUnit(v) {\n const out = new Float32Array(v.length);\n const n = l2Norm$1(v);\n if (n < EPS$3)\n return out; // zero vector → stay zero; cosine with zero returns 0\n const inv = 1 / n;\n for (let i = 0; i < v.length; i++)\n out[i] = v[i] * inv;\n return out;\n}\n/** Quickselect (nth_element) on-place for top-k largest by score. Returns cutoff value index. */\nfunction quickselectTopK(arr, k, scoreOf) {\n if (k <= 0 || k >= arr.length)\n return arr.length - 1;\n let left = 0, right = arr.length - 1;\n const target = k - 1; // 0-based index of kth largest after partition\n function swap(i, j) {\n const t = arr[i];\n arr[i] = arr[j];\n arr[j] = t;\n }\n function partition(l, r, pivotIdx) {\n const pivotScore = scoreOf(arr[pivotIdx]);\n swap(pivotIdx, r);\n let store = l;\n for (let i = l; i < r; i++) {\n if (scoreOf(arr[i]) > pivotScore) { // \">\" for largest-first\n swap(store, i);\n store++;\n }\n }\n swap(store, r);\n return store;\n }\n while (true) {\n const pivotIdx = Math.floor((left + right) / 2);\n const idx = partition(left, right, pivotIdx);\n if (idx === target)\n return idx;\n if (target < idx)\n right = idx - 1;\n else\n left = idx + 1;\n }\n}\n/* ================= store ================= */\nclass EmbeddingStore {\n constructor(dim, opts) {\n var _a, _b;\n // Data\n this.ids = [];\n this.metas = [];\n this.vecs = []; // if storeUnit=true -> unit vectors; else raw vectors\n // Index\n this.idToIdx = new Map();\n if (!Number.isFinite(dim) || dim <= 0)\n throw new Error(`EmbeddingStore: invalid dim=${dim}`);\n this.dim = dim | 0;\n this.storeUnit = (_a = opts === null || opts === void 0 ? void 0 : opts.storeUnit) !== null && _a !== void 0 ? _a : true;\n this.alsoStoreRaw = (_b = opts === null || opts === void 0 ? void 0 : opts.alsoStoreRaw) !== null && _b !== void 0 ? _b : this.storeUnit; // default: if normalizing, also keep raw so Euclidean is valid\n if ((opts === null || opts === void 0 ? void 0 : opts.capacity) !== undefined) {\n if (!Number.isFinite(opts.capacity) || opts.capacity <= 0)\n throw new Error(`capacity must be > 0`);\n this.capacity = Math.floor(opts.capacity);\n }\n if (this.alsoStoreRaw) {\n this.rawVecs = [];\n this.rawNorms = new Float32Array(0);\n }\n if (!this.storeUnit) {\n // storing raw in vecs → maintain norms for fast cosine\n this.norms = new Float32Array(0);\n }\n }\n /* ========== basic ops ========== */\n size() { return this.ids.length; }\n dimension() { return this.dim; }\n isUnitStored() { return this.storeUnit; }\n keepsRaw() { return !!this.rawVecs; }\n getCapacity() { return this.capacity; }\n setCapacity(capacity) {\n if (capacity === undefined) {\n this.capacity = undefined;\n return;\n }\n if (!Number.isFinite(capacity) || capacity <= 0)\n throw new Error(`capacity must be > 0`);\n this.capacity = Math.floor(capacity);\n this.enforceCapacity();\n }\n clear() {\n this.ids = [];\n this.vecs = [];\n this.metas = [];\n this.idToIdx.clear();\n if (this.rawVecs)\n this.rawVecs = [];\n if (this.norms)\n this.norms = new Float32Array(0);\n if (this.rawNorms)\n this.rawNorms = new Float32Array(0);\n }\n has(id) { return this.idToIdx.has(id); }\n get(id) {\n const idx = this.idToIdx.get(id);\n if (idx === undefined)\n return undefined;\n return {\n id,\n vec: this.vecs[idx],\n raw: this.rawVecs ? this.rawVecs[idx] : undefined,\n meta: this.metas[idx],\n };\n }\n /** Remove by id. Returns true if removed. */\n remove(id) {\n const idx = this.idToIdx.get(id);\n if (idx === undefined)\n return false;\n // capture id, splice arrays\n this.ids.splice(idx, 1);\n this.vecs.splice(idx, 1);\n this.metas.splice(idx, 1);\n if (this.rawVecs)\n this.rawVecs.splice(idx, 1);\n if (this.norms)\n this.norms = this.removeFromNorms(this.norms, idx);\n if (this.rawNorms)\n this.rawNorms = this.removeFromNorms(this.rawNorms, idx);\n this.idToIdx.delete(id);\n this.rebuildIndex(idx);\n return true;\n }\n /** Add or replace an item by id. Returns true if added, false if replaced. */\n upsert(item) {\n var _a;\n const { id, vec, meta } = item;\n if (!id)\n throw new Error('upsert: id is required');\n if (!vec || vec.length !== this.dim) {\n throw new Error(`upsert: vector dim ${(_a = vec === null || vec === void 0 ? void 0 : vec.length) !== null && _a !== void 0 ? _a : 'n/a'} != store dim ${this.dim}`);\n }\n const raw = new Float32Array(vec);\n const unit = this.storeUnit ? normalizeToUnit(raw) : raw;\n const idx = this.idToIdx.get(id);\n if (idx !== undefined) {\n // replace in place\n this.vecs[idx] = unit;\n this.metas[idx] = meta;\n if (this.rawVecs)\n this.rawVecs[idx] = raw;\n if (this.norms && !this.storeUnit)\n this.norms[idx] = l2Norm$1(raw);\n if (this.rawNorms && this.rawVecs)\n this.rawNorms[idx] = l2Norm$1(raw);\n return false;\n }\n else {\n this.ids.push(id);\n this.vecs.push(unit);\n this.metas.push(meta);\n if (this.rawVecs)\n this.rawVecs.push(raw);\n if (this.norms && !this.storeUnit) {\n // append norm\n const n = l2Norm$1(raw);\n const newNorms = new Float32Array(this.ids.length);\n newNorms.set(this.norms, 0);\n newNorms[this.ids.length - 1] = n;\n this.norms = newNorms;\n }\n if (this.rawNorms && this.rawVecs) {\n const n = l2Norm$1(raw);\n const newNorms = new Float32Array(this.ids.length);\n newNorms.set(this.rawNorms, 0);\n newNorms[this.ids.length - 1] = n;\n this.rawNorms = newNorms;\n }\n this.idToIdx.set(id, this.ids.length - 1);\n this.enforceCapacity();\n return true;\n }\n }\n add(item) {\n const added = this.upsert(item);\n if (!added)\n throw new Error(`add: id \"${item.id}\" already exists (use upsert instead)`);\n }\n addAll(items, allowUpsert = true) {\n for (const it of items) {\n if (allowUpsert)\n this.upsert(it);\n else\n this.add(it);\n }\n }\n /** Merge another store (same dim & normalization strategy) into this one. */\n merge(other, allowOverwrite = true) {\n var _a;\n if (other.dimension() !== this.dim)\n throw new Error('merge: dimension mismatch');\n if (other.isUnitStored() !== this.storeUnit)\n throw new Error('merge: normalized flag mismatch');\n if (other.keepsRaw() !== this.keepsRaw())\n throw new Error('merge: raw retention mismatch');\n for (let i = 0; i < other.ids.length; i++) {\n const id = other.ids[i];\n const vec = other.vecs[i];\n const raw = (_a = other.rawVecs) === null || _a === void 0 ? void 0 : _a[i];\n const meta = other.metas[i];\n if (!allowOverwrite && this.has(id))\n continue;\n // Use upsert path, but avoid double-normalizing when both stores have unit vectors:\n this.upsert({ id, vec, meta });\n if (this.rawVecs && raw)\n this.rawVecs[this.idToIdx.get(id)] = new Float32Array(raw);\n }\n }\n /* ========== querying ========== */\n /** Top-K KNN query. For L2/L1 we return NEGATIVE distance so higher is better. */\n query(queryVec, k = 10, opts) {\n var _a, _b, _c, _d, _e, _f;\n if (queryVec.length !== this.dim) {\n throw new Error(`query: vector dim ${queryVec.length} != store dim ${this.dim}`);\n }\n const metric = (_a = opts === null || opts === void 0 ? void 0 : opts.metric) !== null && _a !== void 0 ? _a : 'cosine';\n const filter = opts === null || opts === void 0 ? void 0 : opts.filter;\n const returnVectors = (_b = opts === null || opts === void 0 ? void 0 : opts.returnVectors) !== null && _b !== void 0 ? _b : false;\n const minScore = opts === null || opts === void 0 ? void 0 : opts.minScore;\n const maxDistance = opts === null || opts === void 0 ? void 0 : opts.maxDistance;\n const restrictSet = (opts === null || opts === void 0 ? void 0 : opts.restrictToIds) ? new Set(opts.restrictToIds) : undefined;\n let q;\n let qNorm = 0;\n if (metric === 'cosine') {\n // cosine → normalize query; stored data either unit (fast) or raw (use cached norms)\n q = normalizeToUnit(queryVec);\n }\n else if (metric === 'dot') {\n q = new Float32Array(queryVec);\n qNorm = l2Norm$1(q); // only used for potential future scoring transforms\n }\n else {\n // L2/L1 use RAW query\n q = new Float32Array(queryVec);\n qNorm = l2Norm$1(q);\n }\n const hits = [];\n const N = this.vecs.length;\n // helpers\n const pushHit = (i, score) => {\n if (restrictSet && !restrictSet.has(this.ids[i]))\n return;\n if (filter && !filter(this.metas[i], this.ids[i]))\n return;\n // Apply thresholds\n if (metric === 'euclidean' || metric === 'manhattan') {\n const dist = -score; // score is negative distance\n if (maxDistance !== undefined && dist > maxDistance)\n return;\n }\n else {\n if (minScore !== undefined && score < minScore)\n return;\n }\n hits.push(returnVectors\n ? { id: this.ids[i], score, index: i, meta: this.metas[i], vec: this.vecs[i] }\n : { id: this.ids[i], score, index: i, meta: this.metas[i] });\n };\n if (metric === 'cosine') {\n if (this.storeUnit) {\n // both unit → score = dot\n for (let i = 0; i < N; i++) {\n const s = dot$3(q, this.vecs[i]);\n pushHit(i, s);\n }\n }\n else {\n // stored raw in vecs → use cached norms (if available) for cos = dot / (||q||*||v||)\n if (!this.norms || this.norms.length !== N) {\n // build norms on-demand once\n this.norms = new Float32Array(N);\n for (let i = 0; i < N; i++)\n this.norms[i] = l2Norm$1(this.vecs[i]);\n }\n const qn = l2Norm$1(q) || 1; // guard\n for (let i = 0; i < N; i++) {\n const dn = this.norms[i] || 1;\n const s = dn < EPS$3 ? 0 : dot$3(q, this.vecs[i]) / (qn * dn);\n pushHit(i, s);\n }\n }\n }\n else if (metric === 'dot') {\n for (let i = 0; i < N; i++) {\n const s = dot$3(q, this.storeUnit ? this.vecs[i] : this.vecs[i]); // same storage\n pushHit(i, s);\n }\n }\n else if (metric === 'euclidean') {\n // Need RAW vectors\n const base = (_c = this.rawVecs) !== null && _c !== void 0 ? _c : (!this.storeUnit ? this.vecs : null);\n if (!base)\n throw new Error('euclidean query requires raw vectors; create store with alsoStoreRaw=true or storeUnit=false');\n // Use fast formula: ||q - v|| = sqrt(||q||^2 + ||v||^2 - 2 q·v)\n const vNorms = this.rawVecs ? ((_d = this.rawNorms) !== null && _d !== void 0 ? _d : this.buildRawNorms()) :\n (_e = this.norms) !== null && _e !== void 0 ? _e : this.buildNorms();\n const q2 = qNorm * qNorm;\n for (let i = 0; i < N; i++) {\n const d2 = Math.max(q2 + vNorms[i] * vNorms[i] - 2 * dot$3(q, base[i]), 0);\n const dist = Math.sqrt(d2);\n pushHit(i, -dist); // NEGATIVE distance so higher is better\n }\n }\n else { // 'manhattan'\n const base = (_f = this.rawVecs) !== null && _f !== void 0 ? _f : (!this.storeUnit ? this.vecs : null);\n if (!base)\n throw new Error('manhattan query requires raw vectors; create store with alsoStoreRaw=true or storeUnit=false');\n for (let i = 0; i < N; i++) {\n const dist = l1Dist(q, base[i]);\n pushHit(i, -dist); // NEGATIVE distance\n }\n }\n if (hits.length === 0)\n return [];\n const kk = Math.max(1, Math.min(k, hits.length));\n // Use quickselect to avoid full O(n log n) sort\n quickselectTopK(hits, kk, (h) => h.score);\n // Now sort just the top-K region for stable ordering\n hits\n .slice(0, kk)\n .sort((a, b) => b.score - a.score)\n .forEach((h, i) => (hits[i] = h));\n return hits.slice(0, kk);\n }\n /** Batch query helper. Returns array of results aligned to input queries. */\n queryBatch(queries, k = 10, opts) {\n return queries.map(q => this.query(q, k, opts));\n }\n /** Convenience: query by id */\n queryById(id, k = 10, opts) {\n var _a;\n const rec = this.get(id);\n if (!rec)\n return [];\n const use = ((opts === null || opts === void 0 ? void 0 : opts.metric) === 'euclidean' || (opts === null || opts === void 0 ? void 0 : opts.metric) === 'manhattan')\n ? ((_a = rec.raw) !== null && _a !== void 0 ? _a : rec.vec) // prefer raw for distance\n : rec.vec;\n return this.query(use, k, opts);\n }\n /* ========== export / import ========== */\n toJSON() {\n const includeRaw = !!this.rawVecs;\n return {\n dim: this.dim,\n normalized: this.storeUnit,\n alsoStoredRaw: includeRaw,\n capacity: this.capacity,\n items: this.ids.map((id, i) => ({\n id,\n vec: Array.from(this.vecs[i]),\n raw: includeRaw ? Array.from(this.rawVecs[i]) : undefined,\n meta: this.metas[i],\n })),\n __version: 'embedding-store-2.0.0',\n };\n }\n static fromJSON(obj) {\n var _a, _b;\n const parsed = typeof obj === 'string' ? JSON.parse(obj) : obj;\n if (!parsed || !parsed.dim || !Array.isArray(parsed.items)) {\n throw new Error('EmbeddingStore.fromJSON: invalid payload');\n }\n const store = new EmbeddingStore(parsed.dim, {\n storeUnit: parsed.normalized,\n capacity: parsed.capacity,\n alsoStoreRaw: (_a = parsed.alsoStoredRaw) !== null && _a !== void 0 ? _a : false,\n });\n for (const it of parsed.items) {\n if (!it || typeof it.id !== 'string' || !Array.isArray(it.vec))\n continue;\n if (it.vec.length !== parsed.dim) {\n throw new Error(`fromJSON: vector dim ${it.vec.length} != dim ${parsed.dim} for id ${it.id}`);\n }\n // Use public API to keep norms consistent\n store.upsert({ id: it.id, vec: (_b = it.raw) !== null && _b !== void 0 ? _b : it.vec, meta: it.meta });\n // If payload includes both vec and raw, ensure both sides are *exactly* respected\n if (store.storeUnit && store.rawVecs && it.raw) {\n const idx = store.idToIdx.get(it.id);\n store.rawVecs[idx] = new Float32Array(it.raw);\n if (store.rawNorms) {\n const newNorms = new Float32Array(store.size());\n newNorms.set(store.rawNorms, 0);\n newNorms[idx] = l2Norm$1(store.rawVecs[idx]);\n store.rawNorms = newNorms;\n }\n }\n else if (!store.storeUnit && it.vec) ;\n }\n return store;\n }\n /* ========== diagnostics / utils ========== */\n /** Estimate memory footprint in bytes (arrays only; metadata excluded). */\n memoryUsageBytes() {\n const f32 = 4;\n let bytes = 0;\n for (const v of this.vecs)\n bytes += v.length * f32;\n if (this.rawVecs)\n for (const v of this.rawVecs)\n bytes += v.length * f32;\n if (this.norms)\n bytes += this.norms.length * f32;\n if (this.rawNorms)\n bytes += this.rawNorms.length * f32;\n // ids + metas are JS objects; not included\n return bytes;\n }\n /** Re-normalize all vectors in-place (useful if you bulk-updated raw storage). */\n reNormalizeAll() {\n if (!this.storeUnit)\n return; // nothing to do\n for (let i = 0; i < this.vecs.length; i++) {\n const raw = this.rawVecs ? this.rawVecs[i] : this.vecs[i];\n this.vecs[i] = normalizeToUnit(raw);\n }\n }\n /** Iterate over all items */\n *entries() {\n var _a;\n for (let i = 0; i < this.ids.length; i++) {\n yield { id: this.ids[i], vec: this.vecs[i], raw: (_a = this.rawVecs) === null || _a === void 0 ? void 0 : _a[i], meta: this.metas[i] };\n }\n }\n /* ========== internals ========== */\n removeFromNorms(src, removeIdx) {\n const out = new Float32Array(src.length - 1);\n if (removeIdx > 0)\n out.set(src.subarray(0, removeIdx), 0);\n if (removeIdx < src.length - 1)\n out.set(src.subarray(removeIdx + 1), removeIdx);\n return out;\n }\n /** After a splice at 'start', rebuild id→index for shifted tail */\n rebuildIndex(start = 0) {\n if (start <= 0) {\n this.idToIdx.clear();\n for (let i = 0; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n return;\n }\n for (let i = start; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n }\n /** Enforce capacity by evicting oldest items (front of arrays) */\n enforceCapacity() {\n if (this.capacity === undefined)\n return;\n while (this.ids.length > this.capacity) {\n const removedId = this.ids[0];\n // shift( ) is O(n); for very large stores consider a circular buffer\n this.ids.shift();\n this.vecs.shift();\n this.metas.shift();\n if (this.rawVecs)\n this.rawVecs.shift();\n if (this.norms)\n this.norms = this.removeFromNorms(this.norms, 0);\n if (this.rawNorms)\n this.rawNorms = this.removeFromNorms(this.rawNorms, 0);\n this.idToIdx.delete(removedId);\n // rebuild full index (ids shifted)\n this.idToIdx.clear();\n for (let i = 0; i < this.ids.length; i++)\n this.idToIdx.set(this.ids[i], i);\n }\n }\n buildNorms() {\n const out = new Float32Array(this.vecs.length);\n for (let i = 0; i < this.vecs.length; i++)\n out[i] = l2Norm$1(this.vecs[i]);\n this.norms = out;\n return out;\n }\n buildRawNorms() {\n if (!this.rawVecs)\n throw new Error('no raw vectors to build norms for');\n const out = new Float32Array(this.rawVecs.length);\n for (let i = 0; i < this.rawVecs.length; i++)\n out[i] = l2Norm$1(this.rawVecs[i]);\n this.rawNorms = out;\n return out;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Evaluation.ts — Classification & Regression metrics (no deps)\nconst EPS$2 = 1e-12;\n/* =========================\n * Helpers\n * ========================= */\nfunction isOneHot(Y) {\n return Array.isArray(Y[0]);\n}\nfunction argmax$1(a) {\n let i = 0;\n for (let k = 1; k < a.length; k++)\n if (a[k] > a[i])\n i = k;\n return i;\n}\nfunction toIndexLabels(yTrue, yPred, numClasses) {\n let yTrueIdx;\n let yPredIdx;\n if (isOneHot(yTrue))\n yTrueIdx = yTrue.map(argmax$1);\n else\n yTrueIdx = yTrue;\n if (isOneHot(yPred))\n yPredIdx = yPred.map(argmax$1);\n else\n yPredIdx = yPred;\n const C = 1 + Math.max(Math.max(...yTrueIdx), Math.max(...yPredIdx));\n return { yTrueIdx, yPredIdx, C };\n}\n/* =========================\n * Confusion matrix\n * ========================= */\nfunction confusionMatrixFromIndices(yTrueIdx, yPredIdx, C) {\n if (yTrueIdx.length !== yPredIdx.length) {\n throw new Error(`confusionMatrix: length mismatch (${yTrueIdx.length} vs ${yPredIdx.length})`);\n }\n const classes = C !== null && C !== void 0 ? C : 1 + Math.max(Math.max(...yTrueIdx), Math.max(...yPredIdx));\n const M = Array.from({ length: classes }, () => new Array(classes).fill(0));\n for (let i = 0; i < yTrueIdx.length; i++) {\n const r = yTrueIdx[i] | 0;\n const c = yPredIdx[i] | 0;\n if (r >= 0 && r < classes && c >= 0 && c < classes)\n M[r][c]++;\n }\n return M;\n}\n/* =========================\n * Per-class metrics\n * ========================= */\nfunction perClassFromCM(M, labels) {\n var _a;\n const C = M.length;\n const totals = new Array(C).fill(0);\n const colTotals = new Array(C).fill(0);\n let N = 0;\n for (let i = 0; i < C; i++) {\n let rsum = 0;\n for (let j = 0; j < C; j++) {\n rsum += M[i][j];\n colTotals[j] += M[i][j];\n N += M[i][j];\n }\n totals[i] = rsum;\n }\n const perClass = [];\n for (let k = 0; k < C; k++) {\n const tp = M[k][k];\n const fp = colTotals[k] - tp;\n const fn = totals[k] - tp;\n const tn = N - tp - fp - fn;\n const precision = tp / (tp + fp + EPS$2);\n const recall = tp / (tp + fn + EPS$2);\n const f1 = (2 * precision * recall) / (precision + recall + EPS$2);\n perClass.push({\n label: (_a = labels === null || labels === void 0 ? void 0 : labels[k]) !== null && _a !== void 0 ? _a : k,\n support: totals[k],\n tp, fp, fn, tn,\n precision, recall, f1\n });\n }\n return perClass;\n}\n/* =========================\n * Averages\n * ========================= */\nfunction averagesFromPerClass(per, accuracy) {\n const C = per.length;\n let sumP = 0, sumR = 0, sumF = 0;\n let sumWP = 0, sumWR = 0, sumWF = 0, total = 0;\n let tp = 0, fp = 0, fn = 0; // for micro\n for (const c of per) {\n sumP += c.precision;\n sumR += c.recall;\n sumF += c.f1;\n sumWP += c.precision * c.support;\n sumWR += c.recall * c.support;\n sumWF += c.f1 * c.support;\n total += c.support;\n tp += c.tp;\n fp += c.fp;\n fn += c.fn;\n }\n const microP = tp / (tp + fp + EPS$2);\n const microR = tp / (tp + fn + EPS$2);\n const microF = (2 * microP * microR) / (microP + microR + EPS$2);\n return {\n accuracy,\n macroPrecision: sumP / C,\n macroRecall: sumR / C,\n macroF1: sumF / C,\n microPrecision: microP,\n microRecall: microR,\n microF1: microF,\n weightedPrecision: sumWP / (total + EPS$2),\n weightedRecall: sumWR / (total + EPS$2),\n weightedF1: sumWF / (total + EPS$2)\n };\n}\n/* =========================\n * Log loss / cross-entropy\n * ========================= */\nfunction logLoss(yTrue, yPredProba) {\n if (!isOneHot(yTrue) || !isOneHot(yPredProba)) {\n throw new Error('logLoss expects one-hot ground truth and probability matrix (N x C).');\n }\n const Y = yTrue;\n const P = yPredProba;\n if (Y.length !== P.length)\n throw new Error('logLoss: length mismatch');\n const N = Y.length;\n let sum = 0;\n for (let i = 0; i < N; i++) {\n const yi = Y[i];\n const pi = P[i];\n if (yi.length !== pi.length)\n throw new Error('logLoss: class count mismatch');\n for (let j = 0; j < yi.length; j++) {\n if (yi[j] > 0) {\n const p = Math.min(Math.max(pi[j], EPS$2), 1 - EPS$2);\n sum += -Math.log(p);\n }\n }\n }\n return sum / N;\n}\n/* =========================\n * Top-K accuracy\n * ========================= */\nfunction topKAccuracy(yTrueIdx, yPredProba, k = 5) {\n const N = yTrueIdx.length;\n let correct = 0;\n for (let i = 0; i < N; i++) {\n const probs = yPredProba[i];\n const idx = probs.map((p, j) => j).sort((a, b) => probs[b] - probs[a]).slice(0, Math.max(1, Math.min(k, probs.length)));\n if (idx.includes(yTrueIdx[i]))\n correct++;\n }\n return correct / N;\n}\nfunction pairSortByScore(yTrue01, yScore) {\n const pairs = yScore.map((s, i) => [s, yTrue01[i]]);\n pairs.sort((a, b) => b[0] - a[0]);\n return pairs;\n}\nfunction binaryROC(yTrue01, yScore) {\n if (yTrue01.length !== yScore.length)\n throw new Error('binaryROC: length mismatch');\n const pairs = pairSortByScore(yTrue01, yScore);\n const P = yTrue01.reduce((s, v) => s + (v ? 1 : 0), 0);\n const N = yTrue01.length - P;\n let tp = 0, fp = 0;\n const tpr = [0], fpr = [0], thr = [Infinity];\n for (let i = 0; i < pairs.length; i++) {\n const [score, y] = pairs[i];\n if (y === 1)\n tp++;\n else\n fp++;\n tpr.push(tp / (P + EPS$2));\n fpr.push(fp / (N + EPS$2));\n thr.push(score);\n }\n tpr.push(1);\n fpr.push(1);\n thr.push(-Infinity);\n // Trapezoidal AUC\n let auc = 0;\n for (let i = 1; i < tpr.length; i++) {\n const dx = fpr[i] - fpr[i - 1];\n const yAvg = (tpr[i] + tpr[i - 1]) / 2;\n auc += dx * yAvg;\n }\n return { thresholds: thr, tpr, fpr, auc };\n}\nfunction binaryPR(yTrue01, yScore) {\n if (yTrue01.length !== yScore.length)\n throw new Error('binaryPR: length mismatch');\n const pairs = pairSortByScore(yTrue01, yScore);\n const P = yTrue01.reduce((s, v) => s + (v ? 1 : 0), 0);\n let tp = 0, fp = 0;\n const precision = [], recall = [], thr = [];\n // Add starting point\n precision.push(P > 0 ? P / (P + 0) : 1);\n recall.push(0);\n thr.push(Infinity);\n for (let i = 0; i < pairs.length; i++) {\n const [score, y] = pairs[i];\n if (y === 1)\n tp++;\n else\n fp++;\n const prec = tp / (tp + fp + EPS$2);\n const rec = tp / (P + EPS$2);\n precision.push(prec);\n recall.push(rec);\n thr.push(score);\n }\n // AUPRC via trapezoid over recall axis\n let auc = 0;\n for (let i = 1; i < precision.length; i++) {\n const dx = recall[i] - recall[i - 1];\n const yAvg = (precision[i] + precision[i - 1]) / 2;\n auc += dx * yAvg;\n }\n return { thresholds: thr, precision, recall, auc };\n}\n/* =========================\n * Main: evaluate classification\n * ========================= */\n/**\n * Evaluate multi-class classification.\n * - yTrue can be indices (N) or one-hot (N x C)\n * - yPred can be indices (N) or probabilities (N x C)\n * - If yPred are probabilities, we also compute logLoss and optional topK.\n */\nfunction evaluateClassification(yTrue, yPred, opts) {\n const labels = opts === null || opts === void 0 ? void 0 : opts.labels;\n const { yTrueIdx, yPredIdx, C } = toIndexLabels(yTrue, yPred);\n const M = confusionMatrixFromIndices(yTrueIdx, yPredIdx, C);\n const per = perClassFromCM(M, labels);\n const correct = yTrueIdx.reduce((s, yt, i) => s + (yt === yPredIdx[i] ? 1 : 0), 0);\n const accuracy = correct / yTrueIdx.length;\n const averages = averagesFromPerClass(per, accuracy);\n // Optional extras if we have probabilities\n if (isOneHot(yTrue) && isOneHot(yPred)) {\n try {\n averages.logLoss = logLoss(yTrue, yPred);\n if ((opts === null || opts === void 0 ? void 0 : opts.topK) && opts.topK > 1) {\n averages.topKAccuracy = topKAccuracy(yTrueIdx, yPred, opts.topK);\n }\n }\n catch ( /* ignore extras if shapes disagree */_a) { /* ignore extras if shapes disagree */ }\n }\n return { confusionMatrix: M, perClass: per, averages };\n}\n/* =========================\n * Regression\n * ========================= */\nfunction evaluateRegression(yTrue, yPred) {\n const Y = Array.isArray(yTrue[0]) ? yTrue : yTrue.map(v => [v]);\n const P = Array.isArray(yPred[0]) ? yPred : yPred.map(v => [v]);\n if (Y.length !== P.length)\n throw new Error('evaluateRegression: length mismatch');\n const N = Y.length;\n const D = Y[0].length;\n const perOutput = [];\n let sumMSE = 0, sumMAE = 0, sumR2 = 0;\n for (let d = 0; d < D; d++) {\n let mse = 0, mae = 0;\n // mean of Y[:,d]\n let mean = 0;\n for (let i = 0; i < N; i++)\n mean += Y[i][d];\n mean /= N;\n let ssTot = 0, ssRes = 0;\n for (let i = 0; i < N; i++) {\n const y = Y[i][d], p = P[i][d];\n const e = y - p;\n mse += e * e;\n mae += Math.abs(e);\n ssRes += e * e;\n const dy = y - mean;\n ssTot += dy * dy;\n }\n mse /= N;\n const rmse = Math.sqrt(mse);\n mae /= N;\n const r2 = 1 - (ssRes / (ssTot + EPS$2));\n perOutput.push({ index: d, mse, rmse, mae, r2 });\n sumMSE += mse;\n sumMAE += mae;\n sumR2 += r2;\n }\n const mse = sumMSE / D;\n const rmse = Math.sqrt(mse);\n const mae = sumMAE / D;\n const r2 = sumR2 / D;\n return { perOutput, mse, rmse, mae, r2 };\n}\n/* =========================\n * Pretty report (optional)\n * ========================= */\nfunction formatClassificationReport(rep) {\n const lines = [];\n lines.push('Class\\tSupport\\tPrecision\\tRecall\\tF1');\n for (const c of rep.perClass) {\n lines.push(`${c.label}\\t${c.support}\\t${c.precision.toFixed(4)}\\t${c.recall.toFixed(4)}\\t${c.f1.toFixed(4)}`);\n }\n const a = rep.averages;\n lines.push('');\n lines.push(`Accuracy:\\t${a.accuracy.toFixed(4)}`);\n lines.push(`Macro P/R/F1:\\t${a.macroPrecision.toFixed(4)}\\t${a.macroRecall.toFixed(4)}\\t${a.macroF1.toFixed(4)}`);\n lines.push(`Micro P/R/F1:\\t${a.microPrecision.toFixed(4)}\\t${a.microRecall.toFixed(4)}\\t${a.microF1.toFixed(4)}`);\n lines.push(`Weighted P/R/F1:\\t${a.weightedPrecision.toFixed(4)}\\t${a.weightedRecall.toFixed(4)}\\t${a.weightedF1.toFixed(4)}`);\n if (a.logLoss !== undefined)\n lines.push(`LogLoss:\\t${a.logLoss.toFixed(6)}`);\n if (a.topKAccuracy !== undefined)\n lines.push(`TopK Acc:\\t${a.topKAccuracy.toFixed(4)}`);\n return lines.join('\\n');\n}\n\nconst EPS$1 = 1e-12;\n/* ---------- math helpers ---------- */\nfunction l2Norm(v) {\n let s = 0;\n for (let i = 0; i < v.length; i++)\n s += v[i] * v[i];\n return Math.sqrt(s);\n}\nfunction normalize(v) {\n const out = new Float32Array(v.length);\n const n = l2Norm(v);\n if (n < EPS$1)\n return out; // keep zeros; cosine with zero gives 0\n const inv = 1 / n;\n for (let i = 0; i < v.length; i++)\n out[i] = v[i] * inv;\n return out;\n}\nfunction dot$2(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/* ---------- main evaluation ---------- */\nfunction evaluateEnsembleRetrieval(queries, reference, chains, k, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n const metric = (_a = options === null || options === void 0 ? void 0 : options.metric) !== null && _a !== void 0 ? _a : \"cosine\";\n const aggregate = (_b = options === null || options === void 0 ? void 0 : options.aggregate) !== null && _b !== void 0 ? _b : \"mean\";\n const weights = options === null || options === void 0 ? void 0 : options.weights;\n const topK = Math.max(1, (_d = (_c = options === null || options === void 0 ? void 0 : options.k) !== null && _c !== void 0 ? _c : k) !== null && _d !== void 0 ? _d : 5);\n const ignoreUnlabeled = (_e = options === null || options === void 0 ? void 0 : options.ignoreUnlabeledQueries) !== null && _e !== void 0 ? _e : true;\n const reportPerLabel = (_f = options === null || options === void 0 ? void 0 : options.reportPerLabel) !== null && _f !== void 0 ? _f : false;\n const returnRankings = (_g = options === null || options === void 0 ? void 0 : options.returnRankings) !== null && _g !== void 0 ? _g : false;\n const logEvery = Math.max(1, (_h = options === null || options === void 0 ? void 0 : options.logEvery) !== null && _h !== void 0 ? _h : 10);\n if (chains.length === 0) {\n throw new Error(\"evaluateEnsembleRetrieval: 'chains' must be non-empty.\");\n }\n if (aggregate === \"weighted\") {\n if (!weights || weights.length !== chains.length) {\n throw new Error(`aggregate='weighted' requires weights.length === chains.length`);\n }\n // normalize weights to sum=1 for interpretability\n const sumW = weights.reduce((s, w) => s + w, 0) || 1;\n for (let i = 0; i < weights.length; i++)\n weights[i] = weights[i] / sumW;\n }\n console.log(\"🔹 Precomputing embeddings...\");\n // Pull raw embeddings from each chain\n const chainQueryEmb = [];\n const chainRefEmb = [];\n for (let c = 0; c < chains.length; c++) {\n const qMat = chains[c].getEmbedding(queries.map(q => {\n const v = q.embedding;\n if (!v || v.length === 0)\n throw new Error(`Query ${c} has empty embedding`);\n return Array.from(v);\n }));\n const rMat = chains[c].getEmbedding(reference.map(r => {\n const v = r.embedding;\n if (!v || v.length === 0)\n throw new Error(`Reference has empty embedding`);\n return Array.from(v);\n }));\n // Validate dims & normalize if cosine\n const qArr = qMat.map(row => Float32Array.from(row));\n const rArr = rMat.map(row => Float32Array.from(row));\n if (metric === \"cosine\") {\n chainQueryEmb.push(qArr.map(normalize));\n chainRefEmb.push(rArr.map(normalize));\n }\n else {\n chainQueryEmb.push(qArr);\n chainRefEmb.push(rArr);\n }\n // Basic safety: check dimensions match across Q/R for this chain\n const dimQ = (_k = (_j = qArr[0]) === null || _j === void 0 ? void 0 : _j.length) !== null && _k !== void 0 ? _k : 0;\n const dimR = (_m = (_l = rArr[0]) === null || _l === void 0 ? void 0 : _l.length) !== null && _m !== void 0 ? _m : 0;\n if (dimQ === 0 || dimR === 0 || dimQ !== dimR) {\n throw new Error(`Chain ${c}: query/ref embedding dims mismatch (${dimQ} vs ${dimR})`);\n }\n }\n console.log(\"✅ Precomputation complete. Starting retrieval evaluation...\");\n let hitsAt1 = 0, hitsAtK = 0, reciprocalRanks = 0;\n let used = 0;\n const perLabelRaw = {};\n const rankings = [];\n for (let i = 0; i < queries.length; i++) {\n if (i % logEvery === 0)\n console.log(`🔍 Query ${i + 1}/${queries.length}`);\n const correctLabel = ((_o = queries[i].metadata.label) !== null && _o !== void 0 ? _o : \"\").toString();\n if (!correctLabel && ignoreUnlabeled) {\n continue; // skip this query entirely\n }\n // Accumulate ensemble scores per reference\n // We keep (label, score) per reference j\n const scores = new Array(reference.length);\n for (let j = 0; j < reference.length; j++) {\n let sAgg;\n if (aggregate === \"max\") {\n // Take max across chains\n let sMax = -Infinity;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n const s = metric === \"cosine\" || metric === \"dot\" ? dot$2(q, r) : dot$2(q, r); // only cosine/dot supported\n if (s > sMax)\n sMax = s;\n }\n sAgg = sMax;\n }\n else if (aggregate === \"sum\") {\n let sSum = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sSum += (metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r);\n }\n sAgg = sSum;\n }\n else if (aggregate === \"weighted\") {\n let sW = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sW += ((metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r)) * weights[c];\n }\n sAgg = sW;\n }\n else { // \"mean\"\n let sSum = 0;\n for (let c = 0; c < chains.length; c++) {\n const q = chainQueryEmb[c][i];\n const r = chainRefEmb[c][j];\n sSum += (metric === \"cosine\" || metric === \"dot\") ? dot$2(q, r) : dot$2(q, r);\n }\n sAgg = sSum / chains.length;\n }\n scores[j] = {\n label: ((_p = reference[j].metadata.label) !== null && _p !== void 0 ? _p : \"\").toString(),\n score: sAgg\n };\n }\n // Sort by score desc\n scores.sort((a, b) => b.score - a.score);\n const rankedLabels = scores.map(s => s.label);\n // Update metrics\n const r1 = rankedLabels[0] === correctLabel ? 1 : 0;\n const rK = rankedLabels.slice(0, topK).includes(correctLabel) ? 1 : 0;\n const rank = rankedLabels.indexOf(correctLabel);\n const rr = rank === -1 ? 0 : 1 / (rank + 1);\n hitsAt1 += r1;\n hitsAtK += rK;\n reciprocalRanks += rr;\n used++;\n if (reportPerLabel) {\n const bucket = (_q = perLabelRaw[correctLabel]) !== null && _q !== void 0 ? _q : (perLabelRaw[correctLabel] = { count: 0, hitsAt1: 0, hitsAtK: 0, mrrSum: 0 });\n bucket.count++;\n bucket.hitsAt1 += r1;\n bucket.hitsAtK += rK;\n bucket.mrrSum += rr;\n }\n if (returnRankings) {\n rankings.push({\n queryIndex: i,\n queryId: queries[i].id,\n label: correctLabel,\n topK: scores.slice(0, topK),\n correctRank: rank\n });\n }\n }\n const denom = used || 1;\n const result = {\n usedQueries: used,\n recallAt1: hitsAt1 / denom,\n recallAtK: hitsAtK / denom,\n mrr: reciprocalRanks / denom\n };\n if (reportPerLabel) {\n const out = {};\n for (const [label, s] of Object.entries(perLabelRaw)) {\n out[label] = {\n support: s.count,\n recallAt1: s.hitsAt1 / (s.count || 1),\n recallAtK: s.hitsAtK / (s.count || 1),\n mrr: s.mrrSum / (s.count || 1)\n };\n }\n result.perLabel = out;\n }\n if (returnRankings)\n result.rankings = rankings;\n return result;\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// src/workers/ELMWorkerClient.ts\nclass ELMWorkerClient {\n constructor(worker) {\n this.pending = new Map();\n this.worker = worker;\n this.worker.onmessage = (ev) => {\n var _a;\n const msg = ev.data;\n // Progress event\n if ((msg === null || msg === void 0 ? void 0 : msg.type) === 'progress' && (msg === null || msg === void 0 ? void 0 : msg.id)) {\n const pend = this.pending.get(msg.id);\n (_a = pend === null || pend === void 0 ? void 0 : pend.onProgress) === null || _a === void 0 ? void 0 : _a.call(pend, msg);\n return;\n }\n // RPC response\n const id = msg === null || msg === void 0 ? void 0 : msg.id;\n if (!id)\n return;\n const pend = this.pending.get(id);\n if (!pend)\n return;\n this.pending.delete(id);\n if (msg.ok)\n pend.resolve(msg.result);\n else\n pend.reject(new Error(msg.error));\n };\n }\n call(action, payload, onProgress) {\n const id = Math.random().toString(36).slice(2);\n return new Promise((resolve, reject) => {\n this.pending.set(id, { resolve, reject, onProgress });\n this.worker.postMessage({ id, action, payload });\n });\n }\n // -------- lifecycle --------\n getKind() { return this.call('getKind'); }\n dispose() { return this.call('dispose'); }\n setVerbose(verbose) { return this.call('setVerbose', { verbose }); }\n // -------- ELM --------\n initELM(config) { return this.call('initELM', config); }\n elmTrain(opts, onProgress) {\n return this.call('elm.train', opts, onProgress);\n }\n elmTrainFromData(X, Y, options, onProgress) {\n return this.call('elm.trainFromData', { X, Y, options }, onProgress);\n }\n elmPredict(text, topK = 5) { return this.call('elm.predict', { text, topK }); }\n elmPredictFromVector(X, topK = 5) { return this.call('elm.predictFromVector', { X, topK }); }\n elmGetEmbedding(X) { return this.call('elm.getEmbedding', { X }); }\n elmToJSON() { return this.call('elm.toJSON'); }\n elmLoadJSON(json) { return this.call('elm.loadJSON', { json }); }\n // -------- OnlineELM --------\n initOnlineELM(config) { return this.call('initOnlineELM', config); }\n oelmInit(X0, Y0) { return this.call('oelm.init', { X0, Y0 }); }\n oelmFit(X, Y) { return this.call('oelm.fit', { X, Y }); }\n oelmUpdate(X, Y) { return this.call('oelm.update', { X, Y }); }\n oelmLogits(X) { return this.call('oelm.logits', { X }); }\n oelmToJSON() { return this.call('oelm.toJSON'); }\n oelmLoadJSON(json) { return this.call('oelm.loadJSON', { json }); }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass TFIDF {\n constructor(corpusDocs) {\n this.termFrequency = {};\n this.inverseDocFreq = {};\n this.wordsInDoc = [];\n this.processedWords = [];\n this.scores = {};\n this.corpus = \"\";\n this.corpus = corpusDocs.join(\" \");\n const wordsFinal = [];\n const re = /[^a-zA-Z0-9]+/g;\n corpusDocs.forEach(doc => {\n const tokens = doc.split(/\\s+/);\n tokens.forEach(word => {\n const cleaned = word.replace(re, \" \");\n wordsFinal.push(...cleaned.split(/\\s+/).filter(Boolean));\n });\n });\n this.wordsInDoc = wordsFinal;\n this.processedWords = TFIDF.processWords(wordsFinal);\n // Compute term frequency\n this.processedWords.forEach(token => {\n this.termFrequency[token] = (this.termFrequency[token] || 0) + 1;\n });\n // Compute inverse document frequency\n for (const term in this.termFrequency) {\n const count = TFIDF.countDocsContainingTerm(corpusDocs, term);\n this.inverseDocFreq[term] = Math.log(corpusDocs.length / (1 + count));\n }\n }\n static countDocsContainingTerm(corpusDocs, term) {\n return corpusDocs.reduce((acc, doc) => (doc.includes(term) ? acc + 1 : acc), 0);\n }\n static processWords(words) {\n const filtered = TFIDF.removeStopWordsAndStem(words).map(w => TFIDF.lemmatize(w));\n const bigrams = TFIDF.generateNGrams(filtered, 2);\n const trigrams = TFIDF.generateNGrams(filtered, 3);\n return [...filtered, ...bigrams, ...trigrams];\n }\n static removeStopWordsAndStem(words) {\n const stopWords = new Set([\n \"a\", \"and\", \"the\", \"is\", \"to\", \"of\", \"in\", \"it\", \"that\", \"you\",\n \"this\", \"for\", \"on\", \"are\", \"with\", \"as\", \"be\", \"by\", \"at\", \"from\",\n \"or\", \"an\", \"but\", \"not\", \"we\"\n ]);\n return words.filter(w => !stopWords.has(w)).map(w => TFIDF.advancedStem(w));\n }\n static advancedStem(word) {\n const programmingKeywords = new Set([\n \"func\", \"package\", \"import\", \"interface\", \"go\",\n \"goroutine\", \"channel\", \"select\", \"struct\",\n \"map\", \"slice\", \"var\", \"const\", \"type\",\n \"defer\", \"fallthrough\"\n ]);\n if (programmingKeywords.has(word))\n return word;\n const suffixes = [\"es\", \"ed\", \"ing\", \"s\", \"ly\", \"ment\", \"ness\", \"ity\", \"ism\", \"er\"];\n for (const suffix of suffixes) {\n if (word.endsWith(suffix)) {\n if (suffix === \"es\" && word.length > 2 && word[word.length - 3] === \"i\") {\n return word.slice(0, -2);\n }\n return word.slice(0, -suffix.length);\n }\n }\n return word;\n }\n static lemmatize(word) {\n const rules = {\n execute: \"execute\",\n running: \"run\",\n returns: \"return\",\n defined: \"define\",\n compiles: \"compile\",\n calls: \"call\",\n creating: \"create\",\n invoke: \"invoke\",\n declares: \"declare\",\n references: \"reference\",\n implements: \"implement\",\n utilizes: \"utilize\",\n tests: \"test\",\n loops: \"loop\",\n deletes: \"delete\",\n functions: \"function\"\n };\n if (rules[word])\n return rules[word];\n if (word.endsWith(\"ing\"))\n return word.slice(0, -3);\n if (word.endsWith(\"ed\"))\n return word.slice(0, -2);\n return word;\n }\n static generateNGrams(tokens, n) {\n if (tokens.length < n)\n return [];\n const ngrams = [];\n for (let i = 0; i <= tokens.length - n; i++) {\n ngrams.push(tokens.slice(i, i + n).join(\" \"));\n }\n return ngrams;\n }\n calculateScores() {\n const totalWords = this.processedWords.length;\n const scores = {};\n this.processedWords.forEach(token => {\n const tf = this.termFrequency[token] || 0;\n scores[token] = (tf / totalWords) * (this.inverseDocFreq[token] || 0);\n });\n this.scores = scores;\n return scores;\n }\n extractKeywords(topN) {\n const entries = Object.entries(this.scores).sort((a, b) => b[1] - a[1]);\n return Object.fromEntries(entries.slice(0, topN));\n }\n processedWordsIndex(word) {\n return this.processedWords.indexOf(word);\n }\n}\nclass TFIDFVectorizer {\n constructor(docs, maxVocabSize = 2000) {\n this.docTexts = docs;\n this.tfidf = new TFIDF(docs);\n // Collect all unique terms with frequencies\n const termFreq = {};\n docs.forEach(doc => {\n const tokens = doc.split(/\\s+/);\n const cleaned = tokens.map(t => t.replace(/[^a-zA-Z0-9]+/g, \"\"));\n const processed = TFIDF.processWords(cleaned);\n processed.forEach(t => {\n termFreq[t] = (termFreq[t] || 0) + 1;\n });\n });\n // Sort terms by frequency descending\n const sortedTerms = Object.entries(termFreq)\n .sort((a, b) => b[1] - a[1])\n .slice(0, maxVocabSize)\n .map(([term]) => term);\n this.vocabulary = sortedTerms;\n console.log(`✅ TFIDFVectorizer vocabulary capped at: ${this.vocabulary.length} terms.`);\n }\n /**\n * Returns the dense TFIDF vector for a given document text.\n */\n vectorize(doc) {\n const tokens = doc.split(/\\s+/);\n const cleaned = tokens.map(t => t.replace(/[^a-zA-Z0-9]+/g, \"\"));\n const processed = TFIDF.processWords(cleaned);\n // Compute term frequency in this document\n const termFreq = {};\n processed.forEach(token => {\n termFreq[token] = (termFreq[token] || 0) + 1;\n });\n const totalTerms = processed.length;\n return this.vocabulary.map(term => {\n const tf = totalTerms > 0 ? (termFreq[term] || 0) / totalTerms : 0;\n const idf = this.tfidf.inverseDocFreq[term] || 0;\n return tf * idf;\n });\n }\n /**\n * Returns vectors for all original training docs.\n */\n vectorizeAll() {\n return this.docTexts.map(doc => this.vectorize(doc));\n }\n /**\n * Optional L2 normalization utility.\n */\n static l2normalize(vec) {\n const norm = Math.sqrt(vec.reduce((s, x) => s + x * x, 0));\n return norm === 0 ? vec : vec.map(x => x / norm);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\nclass KNN {\n /**\n * Compute cosine similarity between two numeric vectors.\n */\n static cosineSimilarity(vec1, vec2) {\n let dot = 0, norm1 = 0, norm2 = 0;\n for (let i = 0; i < vec1.length; i++) {\n dot += vec1[i] * vec2[i];\n norm1 += vec1[i] * vec1[i];\n norm2 += vec2[i] * vec2[i];\n }\n if (norm1 === 0 || norm2 === 0)\n return 0;\n return dot / (Math.sqrt(norm1) * Math.sqrt(norm2));\n }\n /**\n * Compute Euclidean distance between two numeric vectors.\n */\n static euclideanDistance(vec1, vec2) {\n let sum = 0;\n for (let i = 0; i < vec1.length; i++) {\n const diff = vec1[i] - vec2[i];\n sum += diff * diff;\n }\n return Math.sqrt(sum);\n }\n /**\n * Find k nearest neighbors.\n * @param queryVec - Query vector\n * @param dataset - Dataset to search\n * @param k - Number of neighbors\n * @param topX - Number of top results to return\n * @param metric - Similarity metric\n */\n static find(queryVec, dataset, k = 5, topX = 3, metric = \"cosine\") {\n const similarities = dataset.map((item, idx) => {\n let score;\n if (metric === \"cosine\") {\n score = this.cosineSimilarity(queryVec, item.vector);\n }\n else {\n // For Euclidean, invert distance so higher = closer\n const dist = this.euclideanDistance(queryVec, item.vector);\n score = -dist;\n }\n return { index: idx, score };\n });\n similarities.sort((a, b) => b.score - a.score);\n const labelWeights = {};\n for (let i = 0; i < Math.min(k, similarities.length); i++) {\n const label = dataset[similarities[i].index].label;\n const weight = similarities[i].score;\n labelWeights[label] = (labelWeights[label] || 0) + weight;\n }\n const weightedLabels = Object.entries(labelWeights)\n .map(([label, weight]) => ({ label, weight }))\n .sort((a, b) => b.weight - a.weight);\n return weightedLabels.slice(0, topX);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// BindUI.ts - Utility to bind ELM model to HTML inputs and outputs\nfunction bindAutocompleteUI({ model, inputElement, outputElement, topK = 5 }) {\n inputElement.addEventListener('input', () => {\n const typed = inputElement.value.trim();\n if (typed.length === 0) {\n outputElement.innerHTML = '<em>Start typing...</em>';\n return;\n }\n try {\n const results = model.predict(typed, topK);\n outputElement.innerHTML = results.map(r => `\n <div><strong>${r.label}</strong>: ${(r.prob * 100).toFixed(1)}%</div>\n `).join('');\n }\n catch (e) {\n const message = e instanceof Error ? e.message : 'Unknown error';\n outputElement.innerHTML = `<span style=\"color: red;\">Error: ${message}</span>`;\n }\n });\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// Presets.ts — Reusable configuration presets for ELM (updated for new ELMConfig union)\n/**\n * NOTE:\n * - These are TEXT presets (token-mode). They set `useTokenizer: true`.\n * - If you need char-level, create an inline config where `useTokenizer: false`\n * and pass it directly to ELM (numeric presets generally need an explicit inputSize).\n */\n/** English token-level preset */\nconst EnglishTokenPreset = {\n useTokenizer: true,\n maxLen: 20,\n charSet: 'abcdefghijklmnopqrstuvwxyz',\n tokenizerDelimiter: /[\\s,.;!?()\\[\\]{}\"']+/};\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ✅ AutoComplete.ts — ELM | KernelELM (Nyström+whiten) | OnlineELM\n// Fixes:\n// • Avoids union narrowing on EnglishTokenPreset by shimming preset fields (no ExtendedELMConfig maxLen error)\n// • activation typed as Activation (not string)\n// • Removed non-existent \"task\" option in trainFromData()\n/** Safe accessor for preset fields (avoids type errors on ExtendedELMConfig) */\nconst PRESET = (() => {\n var _a, _b, _c, _d;\n const p = EnglishTokenPreset;\n return {\n maxLen: (_a = p === null || p === void 0 ? void 0 : p.maxLen) !== null && _a !== void 0 ? _a : 30,\n charSet: (_b = p === null || p === void 0 ? void 0 : p.charSet) !== null && _b !== void 0 ? _b : 'abcdefghijklmnopqrstuvwxyz',\n useTokenizer: (_c = p === null || p === void 0 ? void 0 : p.useTokenizer) !== null && _c !== void 0 ? _c : true,\n tokenizerDelimiter: (_d = p === null || p === void 0 ? void 0 : p.tokenizerDelimiter) !== null && _d !== void 0 ? _d : /\\s+/\n };\n})();\nfunction oneHot(idx, n) {\n const v = new Array(n).fill(0);\n if (idx >= 0 && idx < n)\n v[idx] = 1;\n return v;\n}\nfunction sortTopK(labels, probs, k) {\n return probs\n .map((p, i) => ({ label: labels[i], prob: p }))\n .sort((a, b) => b.prob - a.prob)\n .slice(0, k);\n}\nclass AutoComplete {\n constructor(pairs, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5;\n this.trainPairs = pairs;\n this.activation = (_a = options.activation) !== null && _a !== void 0 ? _a : 'relu';\n this.engine = (_b = options.engine) !== null && _b !== void 0 ? _b : 'elm';\n this.topKDefault = (_c = options.topK) !== null && _c !== void 0 ? _c : 5;\n // Labels\n this.categories = Array.from(new Set(pairs.map(p => p.label)));\n // Text → numeric encoder (Kernel/Online need numeric; ELM can also consume numeric directly)\n this.encoder = new UniversalEncoder({\n charSet: PRESET.charSet,\n maxLen: PRESET.maxLen,\n useTokenizer: PRESET.useTokenizer,\n tokenizerDelimiter: PRESET.tokenizerDelimiter,\n mode: (PRESET.useTokenizer ? 'token' : 'char'),\n });\n const hiddenUnits = (_d = options.hiddenUnits) !== null && _d !== void 0 ? _d : 128;\n const ridgeLambda = (_e = options.ridgeLambda) !== null && _e !== void 0 ? _e : 1e-2;\n const weightInit = (_f = options.weightInit) !== null && _f !== void 0 ? _f : 'xavier';\n const verbose = (_g = options.verbose) !== null && _g !== void 0 ? _g : false;\n if (this.engine === 'kernel') {\n const D = this.encoder.getVectorSize();\n const ktype = (_j = (_h = options.kernel) === null || _h === void 0 ? void 0 : _h.type) !== null && _j !== void 0 ? _j : 'rbf';\n const kernel = ktype === 'poly'\n ? { type: 'poly', gamma: (_l = (_k = options.kernel) === null || _k === void 0 ? void 0 : _k.gamma) !== null && _l !== void 0 ? _l : (1 / Math.max(1, D)), degree: (_o = (_m = options.kernel) === null || _m === void 0 ? void 0 : _m.degree) !== null && _o !== void 0 ? _o : 2, coef0: (_q = (_p = options.kernel) === null || _p === void 0 ? void 0 : _p.coef0) !== null && _q !== void 0 ? _q : 1 }\n : ktype === 'linear'\n ? { type: 'linear' }\n : ktype === 'laplacian'\n ? { type: 'laplacian', gamma: (_s = (_r = options.kernel) === null || _r === void 0 ? void 0 : _r.gamma) !== null && _s !== void 0 ? _s : (1 / Math.max(1, D)) }\n : { type: 'rbf', gamma: (_u = (_t = options.kernel) === null || _t === void 0 ? void 0 : _t.gamma) !== null && _u !== void 0 ? _u : (1 / Math.max(1, D)) };\n this.model = new KernelELM({\n outputDim: this.categories.length,\n kernel,\n ridgeLambda,\n task: 'classification',\n mode: 'nystrom',\n nystrom: {\n m: (_v = options.kernel) === null || _v === void 0 ? void 0 : _v.m,\n strategy: (_x = (_w = options.kernel) === null || _w === void 0 ? void 0 : _w.strategy) !== null && _x !== void 0 ? _x : 'uniform',\n seed: (_z = (_y = options.kernel) === null || _y === void 0 ? void 0 : _y.seed) !== null && _z !== void 0 ? _z : 1337,\n preset: (_0 = options.kernel) === null || _0 === void 0 ? void 0 : _0.preset,\n whiten: (_2 = (_1 = options.kernel) === null || _1 === void 0 ? void 0 : _1.whiten) !== null && _2 !== void 0 ? _2 : true,\n jitter: (_4 = (_3 = options.kernel) === null || _3 === void 0 ? void 0 : _3.jitter) !== null && _4 !== void 0 ? _4 : 1e-10,\n },\n log: { modelName: 'AutoComplete-KELM', verbose }\n });\n }\n else if (this.engine === 'online') {\n const inputDim = this.encoder.getVectorSize();\n this.model = new OnlineELM({\n inputDim,\n outputDim: this.categories.length,\n hiddenUnits,\n activation: this.activation,\n ridgeLambda,\n weightInit: (_5 = weightInit) !== null && _5 !== void 0 ? _5 : 'he',\n forgettingFactor: 0.997,\n log: { modelName: 'AutoComplete-OnlineELM', verbose }\n });\n }\n else {\n // Classic ELM — use TextConfig branch explicitly\n this.model = new ELM({\n categories: this.categories,\n hiddenUnits,\n activation: this.activation,\n ridgeLambda,\n weightInit: weightInit === 'he' ? 'xavier' : weightInit, // map 'he' to 'xavier' if needed\n // Text branch fields:\n useTokenizer: true,\n maxLen: PRESET.maxLen,\n charSet: PRESET.charSet,\n tokenizerDelimiter: PRESET.tokenizerDelimiter,\n // Logging / export\n metrics: options.metrics,\n log: { modelName: 'AutoComplete', verbose },\n exportFileName: options.exportFileName\n });\n }\n // Bind UI to a small adapter that calls our predict()\n bindAutocompleteUI({\n model: {\n predict: (text, k = this.topKDefault) => this.predict(text, k)\n },\n inputElement: options.inputElement,\n outputElement: options.outputElement,\n topK: options.topK\n });\n }\n /* ============= Training ============= */\n train() {\n // Build numeric X/Y\n const X = [];\n const Y = [];\n for (const { input, label } of this.trainPairs) {\n const vec = this.encoder.normalize(this.encoder.encode(input));\n const idx = this.categories.indexOf(label);\n if (idx === -1)\n continue;\n X.push(vec);\n Y.push(oneHot(idx, this.categories.length));\n }\n if (this.engine === 'kernel') {\n this.model.fit(X, Y);\n return;\n }\n if (this.engine === 'online') {\n this.model.init(X, Y); // then .update() for new batches\n return;\n }\n // Classic ELM — options: { reuseWeights?, weights? }; do NOT pass \"task\"\n this.model.trainFromData(X, Y);\n }\n /* ============= Prediction ============= */\n predict(input, topN = 1) {\n const k = Math.max(1, topN);\n if (this.engine === 'elm') {\n const out = this.model.predict(input, k);\n return out.map(p => ({ completion: p.label, prob: p.prob }));\n }\n const x = this.encoder.normalize(this.encoder.encode(input));\n if (this.engine === 'kernel') {\n const probs = this.model.predictProbaFromVectors([x])[0];\n return sortTopK(this.categories, probs, k).map(p => ({ completion: p.label, prob: p.prob }));\n }\n const probs = this.model.predictProbaFromVector(x);\n return sortTopK(this.categories, probs, k).map(p => ({ completion: p.label, prob: p.prob }));\n }\n /* ============= Persistence ============= */\n getModel() { return this.model; }\n loadModelFromJSON(json) {\n if (this.model.fromJSON) {\n this.model.fromJSON(json);\n }\n else if (this.model.loadModelFromJSON) {\n this.model.loadModelFromJSON(json);\n }\n else if (this.model.loadFromJSON) {\n this.model.loadFromJSON(json);\n }\n else {\n console.warn('No compatible load method found on model.');\n }\n }\n saveModelAsJSONFile(filename = 'model.json') {\n let payload;\n if (this.model.toJSON) {\n payload = this.model.toJSON(true); // OnlineELM supports includeP; KernelELM ignores extra arg\n }\n else if (this.model.savedModelJSON) {\n payload = this.model.savedModelJSON;\n }\n else {\n console.warn('No compatible toJSON/savedModelJSON on model; skipping export.');\n return;\n }\n const blob = new Blob([typeof payload === 'string' ? payload : JSON.stringify(payload, null, 2)], { type: 'application/json' });\n const url = URL.createObjectURL(blob);\n const a = document.createElement('a');\n a.href = url;\n a.download = filename;\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n URL.revokeObjectURL(url);\n }\n /* ============= Evaluation helpers ============= */\n top1Accuracy(pairs) {\n var _a;\n let correct = 0;\n for (const { input, label } of pairs) {\n const [pred] = this.predict(input, 1);\n if (((_a = pred === null || pred === void 0 ? void 0 : pred.completion) === null || _a === void 0 ? void 0 : _a.toLowerCase().trim()) === label.toLowerCase().trim())\n correct++;\n }\n return correct / Math.max(1, pairs.length);\n }\n crossEntropy(pairs) {\n var _a;\n let total = 0;\n for (const { input, label } of pairs) {\n const preds = this.predict(input, this.categories.length);\n const match = preds.find(p => p.completion.toLowerCase().trim() === label.toLowerCase().trim());\n const prob = (_a = match === null || match === void 0 ? void 0 : match.prob) !== null && _a !== void 0 ? _a : 1e-12;\n total += -Math.log(prob);\n }\n return total / Math.max(1, pairs.length);\n }\n /** Internal CE via W/b/β (only for classic ELM); others fall back to external CE. */\n internalCrossEntropy(verbose = false) {\n if (!(this.model instanceof ELM)) {\n const ce = this.crossEntropy(this.trainPairs);\n if (verbose)\n console.log(`📏 Internal CE not applicable to ${this.engine}; external CE: ${ce.toFixed(4)}`);\n return ce;\n }\n const elm = this.model;\n const { model, categories } = elm;\n if (!model) {\n if (verbose)\n console.warn('⚠️ Cannot compute internal cross-entropy: model not trained.');\n return Infinity;\n }\n const X = [];\n const Y = [];\n for (const { input, label } of this.trainPairs) {\n const vec = this.encoder.normalize(this.encoder.encode(input));\n const idx = categories.indexOf(label);\n if (idx === -1)\n continue;\n X.push(vec);\n Y.push(oneHot(idx, categories.length));\n }\n const { W, b, beta } = model; // W: hidden x in, b: hidden x 1, beta: hidden x out\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const act = Activations.get(this.activation);\n const H = tempH.map(row => row.map((v, j) => act(v + b[j][0])));\n const logits = Matrix.multiply(H, beta);\n const probs = logits.map(row => Activations.softmax(row));\n let total = 0;\n for (let i = 0; i < Y.length; i++) {\n for (let j = 0; j < Y[0].length; j++) {\n if (Y[i][j] === 1) {\n const p = Math.min(Math.max(probs[i][j], 1e-15), 1 - 1e-15);\n total += -Math.log(p);\n }\n }\n }\n const ce = total / Math.max(1, Y.length);\n if (verbose)\n console.log(`📏 Internal Cross-Entropy (ELM W/b/β): ${ce.toFixed(4)}`);\n return ce;\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// CharacterLangEncoderELM.ts — robust char/token text encoder on top of ELM\n// Upgrades:\n// • Safe preset extraction (no union-type errors on maxLen/charSet)\n// • Proper (inputs, labels) training via trainFromData()\n// • Hidden-layer embeddings via elm.getEmbedding() (with matrix fallback)\n// • Batch encode(), JSON I/O passthrough, gentle logging\n// • Activation typed, no reliance on private fields\n// If you have a preset (optional). Otherwise remove this import.\n// import { EnglishTokenPreset } from '../config/Presets';\nclass CharacterLangEncoderELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n // Make sure we have the basics\n if (!config.hiddenUnits) {\n throw new Error('CharacterLangEncoderELM requires hiddenUnits');\n }\n // Activation defaults to 'relu' if not provided\n this.activation = (_a = config.activation) !== null && _a !== void 0 ? _a : 'relu';\n // Safely coerce into a *text* config (avoid NumericConfig branch)\n // We do not assume a preset exists; provide conservative defaults.\n const textMaxLen = (_b = config === null || config === void 0 ? void 0 : config.maxLen) !== null && _b !== void 0 ? _b : 64;\n const textCharSet = (_c = config === null || config === void 0 ? void 0 : config.charSet) !== null && _c !== void 0 ? _c : 'abcdefghijklmnopqrstuvwxyz';\n const textTokDelim = (_d = config === null || config === void 0 ? void 0 : config.tokenizerDelimiter) !== null && _d !== void 0 ? _d : /\\s+/;\n // Merge into a TEXT-leaning config object.\n // NOTE: We keep categories if provided, but we will override them in train() from labels.\n this.config = Object.assign(Object.assign({}, config), { \n // Force text branch:\n useTokenizer: true, maxLen: textMaxLen, charSet: textCharSet, tokenizerDelimiter: textTokDelim, activation: this.activation, \n // Make logging robust:\n log: {\n modelName: 'CharacterLangEncoderELM',\n verbose: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = config.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n } }); // cast to any to avoid union friction\n this.elm = new ELM(this.config);\n // Forward thresholds/export if present\n if (config.metrics) {\n this.elm.metrics = config.metrics;\n }\n if (this.config.exportFileName) {\n this.elm.config.exportFileName = this.config.exportFileName;\n }\n }\n /**\n * Train on parallel arrays: inputs (strings) + labels (strings).\n * We:\n * • dedupe labels → categories\n * • encode inputs with the ELM’s text encoder\n * • one-hot the labels\n * • call trainFromData(X, Y)\n */\n train(inputStrings, labels) {\n var _a, _b, _c, _d;\n if (!(inputStrings === null || inputStrings === void 0 ? void 0 : inputStrings.length) || !(labels === null || labels === void 0 ? void 0 : labels.length) || inputStrings.length !== labels.length) {\n throw new Error('train() expects equal-length inputStrings and labels');\n }\n // Build categories from labels\n const categories = Array.from(new Set(labels));\n this.elm.setCategories(categories);\n // Get the encoder (support getEncoder() or .encoder)\n const enc = (_c = (_b = (_a = this.elm).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.elm.encoder;\n if (!(enc === null || enc === void 0 ? void 0 : enc.encode) || !(enc === null || enc === void 0 ? void 0 : enc.normalize)) {\n throw new Error('ELM text encoder is not available. Ensure useTokenizer/maxLen/charSet are set.');\n }\n const X = [];\n const Y = [];\n for (let i = 0; i < inputStrings.length; i++) {\n const x = enc.normalize(enc.encode(String((_d = inputStrings[i]) !== null && _d !== void 0 ? _d : '')));\n X.push(x);\n const li = categories.indexOf(labels[i]);\n const y = new Array(categories.length).fill(0);\n if (li >= 0)\n y[li] = 1;\n Y.push(y);\n }\n // Classic ELM closed-form training\n this.elm.trainFromData(X, Y);\n }\n /**\n * Returns a dense embedding for one string.\n * Uses ELM.getEmbedding() if available; otherwise computes H = act(XW^T + b).\n * By design this returns the *hidden* feature (length = hiddenUnits).\n */\n encode(text) {\n var _a, _b, _c;\n // Get encoder\n const enc = (_c = (_b = (_a = this.elm).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.elm.encoder;\n if (!(enc === null || enc === void 0 ? void 0 : enc.encode) || !(enc === null || enc === void 0 ? void 0 : enc.normalize)) {\n throw new Error('ELM text encoder is not available. Train or configure text settings first.');\n }\n const x = enc.normalize(enc.encode(String(text !== null && text !== void 0 ? text : '')));\n // Prefer official embedding API if present\n if (typeof this.elm.getEmbedding === 'function') {\n const E = this.elm.getEmbedding([x]);\n if (Array.isArray(E) && Array.isArray(E[0]))\n return E[0];\n }\n // Fallback: compute hidden act via model params (W,b)\n const model = this.elm.model;\n if (!model)\n throw new Error('Model not trained.');\n const { W, b } = model; // W: hidden x in, b: hidden x 1\n const tempH = Matrix.multiply([x], Matrix.transpose(W)); // (1 x hidden)\n const act = Activations.get(this.activation);\n const H = tempH.map(row => row.map((v, j) => act(v + b[j][0]))); // (1 x hidden)\n // Return hidden vector\n return H[0];\n }\n /** Batch encoding convenience */\n encodeBatch(texts) {\n return texts.map(t => this.encode(t));\n }\n /** Load/save passthroughs */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// FeatureCombinerELM.ts — combine encoder vectors + metadata, train numeric ELM\nclass FeatureCombinerELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.categories = [];\n const hidden = config.hiddenUnits;\n const act = config.activation;\n if (typeof hidden !== 'number') {\n throw new Error('FeatureCombinerELM requires config.hiddenUnits (number)');\n }\n if (!act) {\n throw new Error('FeatureCombinerELM requires config.activation');\n }\n // Force numeric mode (tokenizer off). Provide a safe inputSize placeholder;\n // ELM's trainFromData learns actual dims from X at train-time.\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: false, inputSize: (_b = config.inputSize) !== null && _b !== void 0 ? _b : 1, log: {\n modelName: 'FeatureCombinerELM',\n verbose: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : false,\n toFile: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.toFile) !== null && _f !== void 0 ? _f : false,\n // @ts-ignore optional level passthrough\n level: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.level) !== null && _h !== void 0 ? _h : 'info',\n } });\n this.elm = new ELM(this.config);\n // Optional thresholds/export passthrough\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /** Concatenate encoder vector + metadata vector */\n static combineFeatures(encodedVec, meta) {\n // Fast path avoids spread copies in tight loops\n const out = new Array(encodedVec.length + meta.length);\n let i = 0;\n for (; i < encodedVec.length; i++)\n out[i] = encodedVec[i];\n for (let j = 0; j < meta.length; j++)\n out[i + j] = meta[j];\n return out;\n }\n /** Convenience for batch combination */\n static combineBatch(encoded, metas) {\n if (encoded.length !== metas.length) {\n throw new Error(`combineBatch: encoded length ${encoded.length} != metas length ${metas.length}`);\n }\n const X = new Array(encoded.length);\n for (let i = 0; i < encoded.length; i++) {\n X[i] = FeatureCombinerELM.combineFeatures(encoded[i], metas[i]);\n }\n return X;\n }\n /** Train from encoder vectors + metadata + labels (classification) */\n train(encoded, metas, labels) {\n if (!(encoded === null || encoded === void 0 ? void 0 : encoded.length) || !(metas === null || metas === void 0 ? void 0 : metas.length) || !(labels === null || labels === void 0 ? void 0 : labels.length)) {\n throw new Error('train: empty encoded/metas/labels');\n }\n if (encoded.length !== metas.length || encoded.length !== labels.length) {\n throw new Error('train: lengths must match (encoded, metas, labels)');\n }\n const X = FeatureCombinerELM.combineBatch(encoded, metas);\n this.categories = Array.from(new Set(labels));\n this.elm.setCategories(this.categories);\n const Y = labels.map((lab) => {\n const idx = this.categories.indexOf(lab);\n const row = new Array(this.categories.length).fill(0);\n if (idx >= 0)\n row[idx] = 1;\n return row;\n });\n // Closed-form solve via ELM; no private internals needed\n this.elm.trainFromData(X, Y);\n }\n /** Predict top-K labels from a single (vec, meta) pair */\n predict(encodedVec, meta, topK = 1) {\n const input = [FeatureCombinerELM.combineFeatures(encodedVec, meta)];\n const batches = this.elm.predictFromVector(input, topK);\n return batches[0];\n }\n /** Predict the single best label + prob */\n predictLabel(encodedVec, meta) {\n const [top] = this.predict(encodedVec, meta, 1);\n return top;\n }\n /** Get hidden embedding for (vec, meta) pair (useful for chaining) */\n getEmbedding(encodedVec, meta) {\n const input = [FeatureCombinerELM.combineFeatures(encodedVec, meta)];\n const H = this.elm.getEmbedding(input);\n return H[0];\n }\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// ConfidenceClassifierELM.ts — numeric confidence classifier on top of ELM\n// Upgrades:\n// • Numeric-only pipeline (no tokenizer)\n// • Proper trainFromData(X, Y) with one-hot labels\n// • Vector-safe prediction (predictFromVector)\n// • Score helpers, batch APIs, and simple evaluation\n// • Robust logging + safe handling of ELMConfig union\n/**\n * ConfidenceClassifierELM is a lightweight wrapper that classifies whether\n * an upstream model’s prediction is \"low\" or \"high\" confidence based on\n * (embedding, metadata) numeric features.\n */\nclass ConfidenceClassifierELM {\n constructor(baseConfig, opts = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;\n this.baseConfig = baseConfig;\n this.categories = (_a = opts.categories) !== null && _a !== void 0 ? _a : ['low', 'high'];\n this.activation = (_b = opts.activation) !== null && _b !== void 0 ? _b : ((_c = baseConfig.activation) !== null && _c !== void 0 ? _c : 'relu');\n // We force a numeric ELM config. Many ELM builds don’t require inputSize\n // at construction because trainFromData(X,Y) uses X[0].length to size W.\n // We still pass useTokenizer=false and categories to be explicit.\n const cfg = Object.assign(Object.assign({}, this.baseConfig), { useTokenizer: false, categories: this.categories, activation: this.activation, log: {\n modelName: 'ConfidenceClassifierELM',\n verbose: (_f = (_e = (_d = baseConfig.log) === null || _d === void 0 ? void 0 : _d.verbose) !== null && _e !== void 0 ? _e : opts.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = baseConfig.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = baseConfig.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n }, \n // Optional passthroughs:\n exportFileName: (_l = opts.exportFileName) !== null && _l !== void 0 ? _l : this.baseConfig.exportFileName });\n this.elm = new ELM(cfg);\n // Forward thresholds if present\n if (this.baseConfig.metrics) {\n this.elm.metrics = this.baseConfig.metrics;\n }\n }\n /** One-hot helper */\n oneHot(n, idx) {\n const v = new Array(n).fill(0);\n if (idx >= 0 && idx < n)\n v[idx] = 1;\n return v;\n }\n /**\n * Train from numeric (vector, meta) → combined features + labels.\n * `vectors[i]` and `metas[i]` must be aligned with `labels[i]`.\n */\n train(vectors, metas, labels) {\n if (!(vectors === null || vectors === void 0 ? void 0 : vectors.length) || !(metas === null || metas === void 0 ? void 0 : metas.length) || !(labels === null || labels === void 0 ? void 0 : labels.length)) {\n throw new Error('train: empty inputs');\n }\n if (vectors.length !== metas.length || vectors.length !== labels.length) {\n throw new Error('train: vectors, metas, labels must have same length');\n }\n // Ensure categories include all observed labels (keeps order of existing categories first)\n const uniq = Array.from(new Set(labels));\n const merged = Array.from(new Set([...this.categories, ...uniq]));\n this.categories = merged;\n this.elm.setCategories(this.categories);\n // Build X, Y\n const X = new Array(vectors.length);\n const Y = new Array(vectors.length);\n for (let i = 0; i < vectors.length; i++) {\n const x = FeatureCombinerELM.combineFeatures(vectors[i], metas[i]); // numeric feature vector\n X[i] = x;\n const li = this.categories.indexOf(labels[i]);\n Y[i] = this.oneHot(this.categories.length, li);\n }\n // Closed-form ELM training\n this.elm.trainFromData(X, Y);\n }\n /** Predict full distribution for a single (vec, meta). */\n predict(vec, meta, topK = 2) {\n var _a, _b;\n const x = FeatureCombinerELM.combineFeatures(vec, meta);\n // Prefer vector-safe API; most Astermind builds expose predictFromVector([x], topK)\n const fn = this.elm.predictFromVector;\n if (typeof fn === 'function') {\n const out = fn.call(this.elm, [x], topK); // PredictResult[][]\n return Array.isArray(out) && Array.isArray(out[0]) ? out[0] : (out !== null && out !== void 0 ? out : []);\n }\n // Fallback to predict() if it supports numeric vectors (some builds do)\n const maybe = (_b = (_a = this.elm).predict) === null || _b === void 0 ? void 0 : _b.call(_a, x, topK);\n if (Array.isArray(maybe))\n return maybe;\n throw new Error('No vector-safe predict available on underlying ELM.');\n }\n /** Probability the label is \"high\" (or the second category by default). */\n predictScore(vec, meta, positive = 'high') {\n var _a;\n const dist = this.predict(vec, meta, this.categories.length);\n const hit = dist.find(d => d.label === positive);\n return (_a = hit === null || hit === void 0 ? void 0 : hit.prob) !== null && _a !== void 0 ? _a : 0;\n }\n /** Predicted top-1 label. */\n predictLabel(vec, meta) {\n var _a, _b;\n const dist = this.predict(vec, meta, 1);\n return (_b = (_a = dist[0]) === null || _a === void 0 ? void 0 : _a.label) !== null && _b !== void 0 ? _b : this.categories[0];\n }\n /** Batch prediction (distributions). */\n predictBatch(vectors, metas, topK = 2) {\n if (vectors.length !== metas.length) {\n throw new Error('predictBatch: vectors and metas must have same length');\n }\n return vectors.map((v, i) => this.predict(v, metas[i], topK));\n }\n /* ============ Simple evaluation helpers ============ */\n /** Compute accuracy and confusion counts for a labeled set. */\n evaluate(vectors, metas, labels) {\n if (vectors.length !== metas.length || vectors.length !== labels.length) {\n throw new Error('evaluate: inputs must have same length');\n }\n const confusion = {};\n for (const a of this.categories) {\n confusion[a] = {};\n for (const b of this.categories)\n confusion[a][b] = 0;\n }\n let correct = 0;\n for (let i = 0; i < vectors.length; i++) {\n const pred = this.predictLabel(vectors[i], metas[i]);\n const gold = labels[i];\n if (pred === gold)\n correct++;\n if (!confusion[gold])\n confusion[gold] = {};\n if (confusion[gold][pred] === undefined)\n confusion[gold][pred] = 0;\n confusion[gold][pred]++;\n }\n return { accuracy: correct / labels.length, confusion };\n }\n /* ============ I/O passthroughs ============ */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n /** Access underlying ELM if needed */\n getELM() {\n return this.elm;\n }\n /** Current category ordering used by the model */\n getCategories() {\n return this.categories.slice();\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// EncoderELM.ts — string→vector encoder using ELM (batch) + OnlineELM (incremental)\nclass EncoderELM {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n if (typeof config.hiddenUnits !== 'number') {\n throw new Error('EncoderELM requires config.hiddenUnits (number).');\n }\n if (!config.activation) {\n throw new Error('EncoderELM requires config.activation.');\n }\n // Force text-encoder mode by default (safe even if NumericConfig is passed:\n // ELM will ignore tokenizer fields in numeric flows)\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: (_b = config.useTokenizer) !== null && _b !== void 0 ? _b : true, \n // keep charSet/maxLen if caller provided; otherwise ELM defaults will kick in\n log: {\n modelName: 'EncoderELM',\n verbose: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.verbose) !== null && _d !== void 0 ? _d : false,\n toFile: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.toFile) !== null && _f !== void 0 ? _f : false,\n level: (_h = (_g = config.log) === null || _g === void 0 ? void 0 : _g.level) !== null && _h !== void 0 ? _h : 'info',\n } });\n this.elm = new ELM(this.config);\n // Forward thresholds/file export if present\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /** Batch training for string → dense vector mapping. */\n train(inputStrings, targetVectors) {\n if (!(inputStrings === null || inputStrings === void 0 ? void 0 : inputStrings.length) || !(targetVectors === null || targetVectors === void 0 ? void 0 : targetVectors.length)) {\n throw new Error('train: empty inputs');\n }\n if (inputStrings.length !== targetVectors.length) {\n throw new Error('train: inputStrings and targetVectors lengths differ');\n }\n const enc = this.elm.encoder;\n if (!enc || typeof enc.encode !== 'function') {\n throw new Error('EncoderELM: underlying ELM has no encoder; set useTokenizer/maxLen/charSet in config.');\n }\n // X = normalized encoded text; Y = dense targets\n const X = inputStrings.map(s => enc.normalize(enc.encode(s)));\n const Y = targetVectors;\n // Closed-form solve via ELM\n // (ELM learns W,b randomly and solves β; Y can be any numeric outputDim)\n this.elm.trainFromData(X, Y);\n }\n /** Encode a string into a dense feature vector using the trained model. */\n encode(text) {\n var _a;\n const enc = this.elm.encoder;\n if (!enc || typeof enc.encode !== 'function') {\n throw new Error('encode: underlying ELM has no encoder');\n }\n const model = this.elm.model;\n if (!model)\n throw new Error('EncoderELM model has not been trained yet.');\n const x = enc.normalize(enc.encode(text)); // 1 x D\n const { W, b, beta } = model;\n // H = act( x W^T + b )\n const tempH = Matrix.multiply([x], Matrix.transpose(W));\n const act = Activations.get((_a = this.config.activation) !== null && _a !== void 0 ? _a : 'relu');\n const H = Activations.apply(tempH.map(row => row.map((v, j) => v + b[j][0])), act);\n // y = H β\n return Matrix.multiply(H, beta)[0];\n }\n /* ===================== Online / Incremental API ===================== */\n /**\n * Begin an online OS-ELM run for string→vector encoding.\n * Provide outputDim and either inputDim OR a sampleText we can encode to infer inputDim.\n */\n beginOnline(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n const outputDim = opts.outputDim | 0;\n if (!(outputDim > 0))\n throw new Error('beginOnline: outputDim must be > 0');\n // Derive inputDim if not provided\n let inputDim = opts.inputDim;\n if (inputDim == null) {\n const enc = this.elm.encoder;\n if (!opts.sampleText || !enc) {\n throw new Error('beginOnline: provide inputDim or sampleText (and ensure encoder is available).');\n }\n inputDim = enc.normalize(enc.encode(opts.sampleText)).length;\n }\n const hiddenUnits = ((_a = opts.hiddenUnits) !== null && _a !== void 0 ? _a : this.config.hiddenUnits) | 0;\n if (!(hiddenUnits > 0))\n throw new Error('beginOnline: hiddenUnits must be > 0');\n const activation = ((_c = (_b = opts.activation) !== null && _b !== void 0 ? _b : this.config.activation) !== null && _c !== void 0 ? _c : 'relu');\n // Build OnlineELM with our new config-style constructor\n this.online = new OnlineELM({\n inputDim: inputDim,\n outputDim,\n hiddenUnits,\n activation,\n ridgeLambda: (_d = opts.ridgeLambda) !== null && _d !== void 0 ? _d : 1e-2,\n weightInit: (_e = opts.weightInit) !== null && _e !== void 0 ? _e : 'xavier',\n forgettingFactor: (_f = opts.forgettingFactor) !== null && _f !== void 0 ? _f : 1.0,\n seed: (_g = opts.seed) !== null && _g !== void 0 ? _g : 1337,\n log: { verbose: (_j = (_h = this.config.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false, modelName: 'EncoderELM-Online' },\n });\n this.onlineInputDim = inputDim;\n this.onlineOutputDim = outputDim;\n }\n /**\n * Online partial fit with *pre-encoded* numeric vectors.\n * If not initialized, this call seeds the model via `init`, else it performs an `update`.\n */\n partialTrainOnlineVectors(batch) {\n if (!this.online || this.onlineInputDim == null || this.onlineOutputDim == null) {\n throw new Error('partialTrainOnlineVectors: call beginOnline() first.');\n }\n if (!(batch === null || batch === void 0 ? void 0 : batch.length))\n return;\n const D = this.onlineInputDim, O = this.onlineOutputDim;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { x, y } = batch[i];\n if (x.length !== D)\n throw new Error(`x length ${x.length} != inputDim ${D}`);\n if (y.length !== O)\n throw new Error(`y length ${y.length} != outputDim ${O}`);\n X[i] = x;\n Y[i] = y;\n }\n if (!this.online.beta || !this.online.P) {\n this.online.init(X, Y);\n }\n else {\n this.online.update(X, Y);\n }\n }\n /**\n * Online partial fit with raw texts and dense numeric targets.\n * Texts are encoded + normalized internally.\n */\n partialTrainOnlineTexts(batch) {\n if (!this.online || this.onlineInputDim == null || this.onlineOutputDim == null) {\n throw new Error('partialTrainOnlineTexts: call beginOnline() first.');\n }\n if (!(batch === null || batch === void 0 ? void 0 : batch.length))\n return;\n const enc = this.elm.encoder;\n if (!enc)\n throw new Error('partialTrainOnlineTexts: encoder not available on underlying ELM');\n const D = this.onlineInputDim, O = this.onlineOutputDim;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { text, target } = batch[i];\n const x = enc.normalize(enc.encode(text));\n if (x.length !== D)\n throw new Error(`encoded text dim ${x.length} != inputDim ${D}`);\n if (target.length !== O)\n throw new Error(`target length ${target.length} != outputDim ${O}`);\n X[i] = x;\n Y[i] = target;\n }\n if (!this.online.beta || !this.online.P) {\n this.online.init(X, Y);\n }\n else {\n this.online.update(X, Y);\n }\n }\n /**\n * Finalize the online run by publishing learned weights into the standard ELM model.\n * After this, the normal encode() path works unchanged.\n */\n endOnline() {\n if (!this.online)\n return;\n const W = this.online.W;\n const b = this.online.b;\n const beta = this.online.beta;\n if (!W || !b || !beta) {\n throw new Error('endOnline: online model has no learned parameters (did you call init/fit/update?)');\n }\n this.elm.model = { W, b, beta };\n // Clear online state\n this.online = undefined;\n this.onlineInputDim = undefined;\n this.onlineOutputDim = undefined;\n }\n /* ===================== I/O passthrough ===================== */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// intentClassifier.ts — ELM-based intent classification (text → label)\nclass IntentClassifier {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.categories = [];\n // Basic guardrails (common footguns)\n const hidden = config.hiddenUnits;\n const act = config.activation;\n if (typeof hidden !== 'number') {\n throw new Error('IntentClassifier requires config.hiddenUnits (number)');\n }\n if (!act) {\n throw new Error('IntentClassifier requires config.activation');\n }\n // Force TEXT mode (tokenizer on). We set categories during train().\n this.config = Object.assign(Object.assign({}, config), { categories: (_a = config.categories) !== null && _a !== void 0 ? _a : [], useTokenizer: true, log: {\n modelName: 'IntentClassifier',\n verbose: (_c = (_b = config.log) === null || _b === void 0 ? void 0 : _b.verbose) !== null && _c !== void 0 ? _c : false,\n toFile: (_e = (_d = config.log) === null || _d === void 0 ? void 0 : _d.toFile) !== null && _e !== void 0 ? _e : false,\n // @ts-ignore: optional passthrough\n level: (_g = (_f = config.log) === null || _f === void 0 ? void 0 : _f.level) !== null && _g !== void 0 ? _g : 'info',\n } });\n this.model = new ELM(this.config);\n // Optional thresholds/export passthrough\n if (config.metrics)\n this.model.metrics = config.metrics;\n if (config.exportFileName)\n this.model.config.exportFileName = config.exportFileName;\n }\n /* ==================== Training ==================== */\n /**\n * Train from (text, label) pairs using closed-form ELM solve.\n * Uses the ELM's UniversalEncoder (token mode).\n */\n train(textLabelPairs, augmentation) {\n var _a, _b, _c, _d, _e;\n if (!(textLabelPairs === null || textLabelPairs === void 0 ? void 0 : textLabelPairs.length))\n throw new Error('train: empty training data');\n // Build label set\n this.categories = Array.from(new Set(textLabelPairs.map(p => p.label)));\n this.model.setCategories(this.categories);\n // Prepare encoder\n const enc = (_c = (_b = (_a = this.model).getEncoder) === null || _b === void 0 ? void 0 : _b.call(_a)) !== null && _c !== void 0 ? _c : this.model.encoder;\n if (!enc)\n throw new Error('IntentClassifier: encoder unavailable on ELM instance.');\n // Inline augmentation (prefix/suffix/noise) — lightweight so we avoid importing Augment here\n const charSet = (augmentation === null || augmentation === void 0 ? void 0 : augmentation.charSet) ||\n enc.charSet ||\n 'abcdefghijklmnopqrstuvwxyz';\n const makeNoisy = (s, rate) => {\n var _a, _b;\n if (rate === void 0) { rate = (_a = augmentation === null || augmentation === void 0 ? void 0 : augmentation.noiseRate) !== null && _a !== void 0 ? _a : 0.05; }\n if (!(augmentation === null || augmentation === void 0 ? void 0 : augmentation.includeNoise) || rate <= 0)\n return [s];\n const arr = s.split('');\n for (let i = 0; i < arr.length; i++) {\n if (Math.random() < rate) {\n const r = Math.floor(Math.random() * charSet.length);\n arr[i] = (_b = charSet[r]) !== null && _b !== void 0 ? _b : arr[i];\n }\n }\n return [s, arr.join('')];\n };\n const expanded = [];\n for (const p of textLabelPairs) {\n const base = [p.text];\n const withPrefixes = ((_d = augmentation === null || augmentation === void 0 ? void 0 : augmentation.prefixes) !== null && _d !== void 0 ? _d : []).map(px => `${px}${p.text}`);\n const withSuffixes = ((_e = augmentation === null || augmentation === void 0 ? void 0 : augmentation.suffixes) !== null && _e !== void 0 ? _e : []).map(sx => `${p.text}${sx}`);\n const candidates = [...base, ...withPrefixes, ...withSuffixes];\n for (const c of candidates) {\n for (const v of makeNoisy(c)) {\n expanded.push({ text: v, label: p.label });\n }\n }\n }\n // Encode + one-hot\n const X = new Array(expanded.length);\n const Y = new Array(expanded.length);\n for (let i = 0; i < expanded.length; i++) {\n const { text, label } = expanded[i];\n const vec = enc.normalize(enc.encode(text));\n X[i] = vec;\n const row = new Array(this.categories.length).fill(0);\n const li = this.categories.indexOf(label);\n if (li >= 0)\n row[li] = 1;\n Y[i] = row;\n }\n // Closed-form ELM training\n this.model.trainFromData(X, Y);\n }\n /* ==================== Inference ==================== */\n /** Top-K predictions with an optional probability threshold */\n predict(text, topK = 1, threshold = 0) {\n const res = this.model.predict(text, Math.max(1, topK));\n return threshold > 0 ? res.filter(r => r.prob >= threshold) : res;\n }\n /** Batched predict */\n predictBatch(texts, topK = 1, threshold = 0) {\n return texts.map(t => this.predict(t, topK, threshold));\n }\n /** Convenience: best label + prob (or undefined if below threshold) */\n predictLabel(text, threshold = 0) {\n const [top] = this.predict(text, 1, threshold);\n return top;\n }\n /* ==================== Model I/O ==================== */\n loadModelFromJSON(json) {\n this.model.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.model.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// IO.ts - Import/export utilities for labeled training data\nclass IO {\n static importJSON(json) {\n try {\n const data = JSON.parse(json);\n if (!Array.isArray(data))\n throw new Error('Invalid format');\n return data.filter(item => typeof item.text === 'string' && typeof item.label === 'string');\n }\n catch (err) {\n console.error('Failed to parse training data JSON:', err);\n return [];\n }\n }\n static exportJSON(pairs) {\n return JSON.stringify(pairs, null, 2);\n }\n static importDelimited(text, delimiter = ',', hasHeader = true) {\n var _a, _b, _c, _d;\n const lines = text.trim().split('\\n');\n const examples = [];\n const headers = hasHeader\n ? lines[0].split(delimiter).map(h => h.trim().toLowerCase())\n : lines[0].split(delimiter).length === 1\n ? ['label']\n : ['text', 'label'];\n const startIndex = hasHeader ? 1 : 0;\n for (let i = startIndex; i < lines.length; i++) {\n const parts = lines[i].split(delimiter);\n if (parts.length === 1) {\n examples.push({ text: parts[0].trim(), label: parts[0].trim() });\n }\n else {\n const textIdx = headers.indexOf('text');\n const labelIdx = headers.indexOf('label');\n const text = textIdx !== -1 ? (_a = parts[textIdx]) === null || _a === void 0 ? void 0 : _a.trim() : (_b = parts[0]) === null || _b === void 0 ? void 0 : _b.trim();\n const label = labelIdx !== -1 ? (_c = parts[labelIdx]) === null || _c === void 0 ? void 0 : _c.trim() : (_d = parts[1]) === null || _d === void 0 ? void 0 : _d.trim();\n if (text && label) {\n examples.push({ text, label });\n }\n }\n }\n return examples;\n }\n static exportDelimited(pairs, delimiter = ',', includeHeader = true) {\n const header = includeHeader ? `text${delimiter}label\\n` : '';\n const rows = pairs.map(p => `${p.text.replace(new RegExp(delimiter, 'g'), '')}${delimiter}${p.label.replace(new RegExp(delimiter, 'g'), '')}`);\n return header + rows.join('\\n');\n }\n static importCSV(csv, hasHeader = true) {\n return this.importDelimited(csv, ',', hasHeader);\n }\n static exportCSV(pairs, includeHeader = true) {\n return this.exportDelimited(pairs, ',', includeHeader);\n }\n static importTSV(tsv, hasHeader = true) {\n return this.importDelimited(tsv, '\\t', hasHeader);\n }\n static exportTSV(pairs, includeHeader = true) {\n return this.exportDelimited(pairs, '\\t', includeHeader);\n }\n static inferSchemaFromCSV(csv) {\n var _a;\n const lines = csv.trim().split('\\n');\n if (lines.length === 0)\n return { fields: [] };\n const header = lines[0].split(',').map(h => h.trim().toLowerCase());\n const row = ((_a = lines[1]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];\n const fields = header.map((name, i) => {\n var _a;\n const sample = (_a = row[i]) === null || _a === void 0 ? void 0 : _a.trim();\n let type = 'unknown';\n if (!sample)\n type = 'unknown';\n else if (!isNaN(Number(sample)))\n type = 'number';\n else if (sample === 'true' || sample === 'false')\n type = 'boolean';\n else\n type = 'string';\n return { name, type };\n });\n const suggestedMapping = {\n text: header.find(h => h.includes('text') || h.includes('utterance') || h.includes('input')) || header[0],\n label: header.find(h => h.includes('label') || h.includes('intent') || h.includes('tag')) || header[1] || header[0],\n };\n return { fields, suggestedMapping };\n }\n static inferSchemaFromJSON(json) {\n try {\n const data = JSON.parse(json);\n if (!Array.isArray(data) || data.length === 0 || typeof data[0] !== 'object')\n return { fields: [] };\n const keys = Object.keys(data[0]);\n const fields = keys.map(key => {\n const val = data[0][key];\n let type = 'unknown';\n if (typeof val === 'string')\n type = 'string';\n else if (typeof val === 'number')\n type = 'number';\n else if (typeof val === 'boolean')\n type = 'boolean';\n return { name: key.toLowerCase(), type };\n });\n const suggestedMapping = {\n text: keys.find(k => k.toLowerCase().includes('text') || k.toLowerCase().includes('utterance') || k.toLowerCase().includes('input')) || keys[0],\n label: keys.find(k => k.toLowerCase().includes('label') || k.toLowerCase().includes('intent') || k.toLowerCase().includes('tag')) || keys[1] || keys[0],\n };\n return { fields, suggestedMapping };\n }\n catch (err) {\n console.error('Failed to infer schema from JSON:', err);\n return { fields: [] };\n }\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// LanguageClassifier.ts — upgraded for new ELM/OnlineELM APIs (with requireEncoder guard)\nclass LanguageClassifier {\n constructor(config) {\n var _a, _b, _c, _d, _e, _f;\n this.config = Object.assign(Object.assign({}, config), { log: {\n modelName: 'LanguageClassifier',\n verbose: (_b = (_a = config.log) === null || _a === void 0 ? void 0 : _a.verbose) !== null && _b !== void 0 ? _b : false,\n toFile: (_d = (_c = config.log) === null || _c === void 0 ? void 0 : _c.toFile) !== null && _d !== void 0 ? _d : false,\n level: (_f = (_e = config.log) === null || _e === void 0 ? void 0 : _e.level) !== null && _f !== void 0 ? _f : 'info',\n } });\n this.elm = new ELM(this.config);\n if (config.metrics)\n this.elm.metrics = config.metrics;\n if (config.exportFileName)\n this.elm.config.exportFileName = config.exportFileName;\n }\n /* ============== tiny helper to guarantee an encoder ============== */\n requireEncoder() {\n const enc = this.elm.encoder;\n if (!enc) {\n throw new Error('LanguageClassifier: encoder unavailable. Use text mode (useTokenizer=true with maxLen/charSet) ' +\n 'or pass a UniversalEncoder in the ELM config.');\n }\n return enc;\n }\n /* ================= I/O helpers ================= */\n loadTrainingData(raw, format = 'json') {\n switch (format) {\n case 'csv': return IO.importCSV(raw);\n case 'tsv': return IO.importTSV(raw);\n case 'json':\n default: return IO.importJSON(raw);\n }\n }\n /* ================= Supervised training ================= */\n /** Train from labeled text examples (uses internal encoder). */\n train(data) {\n if (!(data === null || data === void 0 ? void 0 : data.length))\n throw new Error('LanguageClassifier.train: empty dataset');\n const enc = this.requireEncoder();\n const categories = Array.from(new Set(data.map(d => d.label)));\n this.elm.setCategories(categories);\n const X = [];\n const Y = [];\n for (const { text, label } of data) {\n const x = enc.normalize(enc.encode(text));\n const yi = categories.indexOf(label);\n if (yi < 0)\n continue;\n X.push(x);\n Y.push(this.elm.oneHot(categories.length, yi));\n }\n this.elm.trainFromData(X, Y);\n }\n /** Predict from raw text (uses internal encoder). */\n predict(text, topK = 3) {\n // let ELM handle encode→predict (works in text mode)\n return this.elm.predict(text, topK);\n }\n /** Train using already-encoded numeric vectors (no text encoder). */\n trainVectors(data) {\n var _a;\n if (!(data === null || data === void 0 ? void 0 : data.length))\n throw new Error('LanguageClassifier.trainVectors: empty dataset');\n const categories = Array.from(new Set(data.map(d => d.label)));\n this.elm.setCategories(categories);\n const X = data.map(d => d.vector);\n const Y = data.map(d => this.elm.oneHot(categories.length, categories.indexOf(d.label)));\n if (typeof this.elm.trainFromData === 'function') {\n this.elm.trainFromData(X, Y);\n return;\n }\n // Fallback closed-form (compat)\n const hidden = this.config.hiddenUnits;\n const W = this.elm.randomMatrix(hidden, X[0].length);\n const b = this.elm.randomMatrix(hidden, 1);\n const tempH = Matrix.multiply(X, Matrix.transpose(W));\n const act = Activations.get((_a = this.config.activation) !== null && _a !== void 0 ? _a : 'relu');\n const H = Activations.apply(tempH.map(row => row.map((val, j) => val + b[j][0])), act);\n const Hpinv = this.elm.pseudoInverse(H);\n const beta = Matrix.multiply(Hpinv, Y);\n this.elm.model = { W, b, beta };\n }\n /** Predict from an already-encoded vector (no text encoder). */\n predictFromVector(vec, topK = 1) {\n const out = this.elm.predictFromVector([vec], topK);\n return out[0];\n }\n /* ================= Online (incremental) API ================= */\n beginOnline(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n const cats = opts.categories.slice();\n const D = opts.inputDim | 0;\n if (!cats.length)\n throw new Error('beginOnline: categories must be non-empty');\n if (D <= 0)\n throw new Error('beginOnline: inputDim must be > 0');\n const H = ((_a = opts.hiddenUnits) !== null && _a !== void 0 ? _a : this.config.hiddenUnits) | 0;\n if (H <= 0)\n throw new Error('beginOnline: hiddenUnits must be > 0');\n const activation = (_c = (_b = opts.activation) !== null && _b !== void 0 ? _b : this.config.activation) !== null && _c !== void 0 ? _c : 'relu';\n const ridgeLambda = Math.max((_d = opts.lambda) !== null && _d !== void 0 ? _d : 1e-2, 1e-12);\n this.onlineMdl = new OnlineELM({\n inputDim: D,\n outputDim: cats.length,\n hiddenUnits: H,\n activation,\n ridgeLambda,\n seed: (_e = opts.seed) !== null && _e !== void 0 ? _e : 1337,\n weightInit: (_f = opts.weightInit) !== null && _f !== void 0 ? _f : 'xavier',\n forgettingFactor: (_g = opts.forgettingFactor) !== null && _g !== void 0 ? _g : 1.0,\n log: { verbose: (_j = (_h = this.config.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false, modelName: 'LanguageClassifier/Online' },\n });\n this.onlineCats = cats;\n this.onlineInputDim = D;\n }\n partialTrainVectorsOnline(batch) {\n if (!this.onlineMdl || !this.onlineCats || !this.onlineInputDim) {\n throw new Error('Call beginOnline() before partialTrainVectorsOnline().');\n }\n if (!batch.length)\n return;\n const D = this.onlineInputDim;\n const O = this.onlineCats.length;\n const X = new Array(batch.length);\n const Y = new Array(batch.length);\n for (let i = 0; i < batch.length; i++) {\n const { vector, label } = batch[i];\n if (vector.length !== D)\n throw new Error(`vector dim ${vector.length} != inputDim ${D}`);\n X[i] = vector.slice();\n const y = new Array(O).fill(0);\n const li = this.onlineCats.indexOf(label);\n if (li < 0)\n throw new Error(`Unknown label \"${label}\" for this online run.`);\n y[li] = 1;\n Y[i] = y;\n }\n if (this.onlineMdl.beta && this.onlineMdl.P) {\n this.onlineMdl.update(X, Y);\n }\n else {\n this.onlineMdl.init(X, Y);\n }\n }\n endOnline() {\n if (!this.onlineMdl || !this.onlineCats)\n return;\n const W = this.onlineMdl.W;\n const b = this.onlineMdl.b;\n const B = this.onlineMdl.beta;\n if (!W || !b || !B)\n throw new Error('endOnline: online model is not initialized.');\n this.elm.setCategories(this.onlineCats);\n this.elm.model = { W, b, beta: B };\n this.onlineMdl = undefined;\n this.onlineCats = undefined;\n this.onlineInputDim = undefined;\n }\n /* ================= Persistence ================= */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// RefinerELM.ts — numeric “refinement” classifier on top of arbitrary feature vectors\nclass RefinerELM {\n constructor(opts) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n if (!Number.isFinite(opts.inputSize) || opts.inputSize <= 0) {\n throw new Error('RefinerELM: opts.inputSize must be a positive number.');\n }\n if (!Number.isFinite(opts.hiddenUnits) || opts.hiddenUnits <= 0) {\n throw new Error('RefinerELM: opts.hiddenUnits must be a positive number.');\n }\n // Build a *numeric* ELM config (no text fields here)\n const numericConfig = {\n // numeric discriminator:\n useTokenizer: false,\n inputSize: opts.inputSize,\n // required for ELM\n categories: (_a = opts.categories) !== null && _a !== void 0 ? _a : [],\n // base config\n hiddenUnits: opts.hiddenUnits,\n activation: (_b = opts.activation) !== null && _b !== void 0 ? _b : 'relu',\n ridgeLambda: opts.ridgeLambda,\n dropout: opts.dropout,\n weightInit: opts.weightInit,\n // misc\n exportFileName: opts.exportFileName,\n log: {\n modelName: (_d = (_c = opts.log) === null || _c === void 0 ? void 0 : _c.modelName) !== null && _d !== void 0 ? _d : 'RefinerELM',\n verbose: (_f = (_e = opts.log) === null || _e === void 0 ? void 0 : _e.verbose) !== null && _f !== void 0 ? _f : false,\n toFile: (_h = (_g = opts.log) === null || _g === void 0 ? void 0 : _g.toFile) !== null && _h !== void 0 ? _h : false,\n level: (_k = (_j = opts.log) === null || _j === void 0 ? void 0 : _j.level) !== null && _k !== void 0 ? _k : 'info',\n },\n };\n this.elm = new ELM(numericConfig);\n // Set metric thresholds on the instance (not inside the config)\n if (opts.metrics) {\n this.elm.metrics = opts.metrics;\n }\n }\n /** Train from feature vectors + string labels. */\n train(inputs, labels, opts) {\n var _a;\n if (!(inputs === null || inputs === void 0 ? void 0 : inputs.length) || !(labels === null || labels === void 0 ? void 0 : labels.length) || inputs.length !== labels.length) {\n throw new Error('RefinerELM.train: inputs/labels must be non-empty and aligned.');\n }\n // Allow overriding categories at train time\n const categories = (_a = opts === null || opts === void 0 ? void 0 : opts.categories) !== null && _a !== void 0 ? _a : Array.from(new Set(labels));\n this.elm.setCategories(categories);\n const Y = labels.map((label) => this.elm.oneHot(categories.length, categories.indexOf(label)));\n // Public training path; no 'task' key here\n const options = {};\n if ((opts === null || opts === void 0 ? void 0 : opts.reuseWeights) !== undefined)\n options.reuseWeights = opts.reuseWeights;\n if (opts === null || opts === void 0 ? void 0 : opts.sampleWeights)\n options.weights = opts.sampleWeights;\n this.elm.trainFromData(inputs, Y, options);\n }\n /** Full probability vector aligned to `this.elm.categories`. */\n predictProbaFromVector(vec) {\n // Use the vector-safe path provided by the core ELM\n const out = this.elm.predictFromVector([vec], /*topK*/ this.elm.categories.length);\n // predictFromVector returns Array<PredictResult[]>, i.e., topK sorted.\n // We want a dense prob vector in category order, so map from topK back:\n const probs = new Array(this.elm.categories.length).fill(0);\n if (out && out[0]) {\n for (const { label, prob } of out[0]) {\n const idx = this.elm.categories.indexOf(label);\n if (idx >= 0)\n probs[idx] = prob;\n }\n }\n return probs;\n }\n /** Top-K predictions ({label, prob}) for a single vector. */\n predict(vec, topK = 1) {\n const [res] = this.elm.predictFromVector([vec], topK);\n return res;\n }\n /** Batch top-K predictions for an array of vectors. */\n predictBatch(vectors, topK = 1) {\n return this.elm.predictFromVector(vectors, topK);\n }\n /** Hidden-layer embedding(s) — useful for chaining. */\n embed(vec) {\n return this.elm.getEmbedding([vec])[0];\n }\n embedBatch(vectors) {\n return this.elm.getEmbedding(vectors);\n }\n /** Persistence passthroughs */\n loadModelFromJSON(json) {\n this.elm.loadModelFromJSON(json);\n }\n saveModelAsJSONFile(filename) {\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// © 2026 AsterMind AI Co. – All Rights Reserved.\n// Patent Pending US 63/897,713\n// VotingClassifierELM.ts — meta-classifier that learns to combine multiple ELMs' predictions\nclass VotingClassifierELM {\n // Keep constructor shape compatible with your existing calls\n constructor(baseConfig) {\n this.baseConfig = baseConfig;\n this.modelWeights = [];\n this.usesConfidence = false;\n this.categories = baseConfig.categories || ['English', 'French', 'Spanish'];\n }\n setModelWeights(weights) {\n this.modelWeights = weights.slice();\n }\n calibrateWeights(predictionLists, trueLabels) {\n var _a, _b;\n const numModels = predictionLists.length;\n const numExamples = trueLabels.length;\n const accuracies = new Array(numModels).fill(0);\n for (let m = 0; m < numModels; m++) {\n let correct = 0;\n for (let i = 0; i < numExamples; i++) {\n if (predictionLists[m][i] === trueLabels[i])\n correct++;\n }\n accuracies[m] = correct / Math.max(1, numExamples);\n }\n const total = accuracies.reduce((s, a) => s + a, 0) || 1;\n this.modelWeights = accuracies.map(a => a / total);\n if ((_b = (_a = this.baseConfig) === null || _a === void 0 ? void 0 : _a.log) === null || _b === void 0 ? void 0 : _b.verbose) {\n console.log('🔧 Calibrated model weights:', this.modelWeights);\n }\n }\n /** Train meta-classifier on model predictions (+ optional confidences) and true labels. */\n train(predictionLists, // shape: [numModels][numExamples]\n confidenceLists, trueLabels) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n if (!Array.isArray(predictionLists) || predictionLists.length === 0 || !trueLabels) {\n throw new Error('VotingClassifierELM.train: invalid inputs');\n }\n const numModels = predictionLists.length;\n const numExamples = predictionLists[0].length;\n for (const list of predictionLists) {\n if (list.length !== numExamples)\n throw new Error('Prediction list lengths must match');\n }\n this.usesConfidence = Array.isArray(confidenceLists);\n if (this.usesConfidence) {\n if (confidenceLists.length !== numModels)\n throw new Error('Confidence list count != numModels');\n for (const list of confidenceLists) {\n if (list.length !== numExamples)\n throw new Error('Confidence list length mismatch');\n }\n }\n if (!this.modelWeights.length || this.modelWeights.length !== numModels) {\n this.calibrateWeights(predictionLists, trueLabels);\n }\n // Categories (target space) => from true labels\n this.categories = Array.from(new Set(trueLabels));\n const C = this.categories.length;\n // Compute numeric input size for the meta-ELM:\n // per-model features = one-hot over C + (optional) 1 confidence\n const perModel = C + (this.usesConfidence ? 1 : 0);\n this.inputSize = numModels * perModel;\n // Build X, Y\n const X = new Array(numExamples);\n for (let i = 0; i < numExamples; i++) {\n let row = [];\n for (let m = 0; m < numModels; m++) {\n const predLabel = predictionLists[m][i];\n if (predLabel == null)\n throw new Error(`Invalid label at predictionLists[${m}][${i}]`);\n const w = (_a = this.modelWeights[m]) !== null && _a !== void 0 ? _a : 1;\n // one-hot over final categories (C)\n const idx = this.categories.indexOf(predLabel);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n row = row.concat(oh.map(x => x * w));\n if (this.usesConfidence) {\n const conf = confidenceLists[m][i];\n const norm = Math.max(0, Math.min(1, Number(conf) || 0));\n row.push(norm * w);\n }\n }\n X[i] = row;\n }\n const Y = trueLabels.map(lbl => {\n const idx = this.categories.indexOf(lbl);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n return oh;\n });\n // Construct numeric ELM config now that we know inputSize\n const cfg = {\n useTokenizer: false, // numeric mode\n inputSize: this.inputSize,\n categories: this.categories,\n hiddenUnits: (_b = this.baseConfig.hiddenUnits) !== null && _b !== void 0 ? _b : 64,\n activation: (_c = this.baseConfig.activation) !== null && _c !== void 0 ? _c : 'relu',\n ridgeLambda: this.baseConfig.ridgeLambda,\n dropout: this.baseConfig.dropout,\n weightInit: this.baseConfig.weightInit,\n exportFileName: this.baseConfig.exportFileName,\n log: {\n modelName: (_f = (_e = (_d = this.baseConfig) === null || _d === void 0 ? void 0 : _d.log) === null || _e === void 0 ? void 0 : _e.modelName) !== null && _f !== void 0 ? _f : 'VotingClassifierELM',\n verbose: (_j = (_h = (_g = this.baseConfig) === null || _g === void 0 ? void 0 : _g.log) === null || _h === void 0 ? void 0 : _h.verbose) !== null && _j !== void 0 ? _j : false,\n toFile: (_m = (_l = (_k = this.baseConfig) === null || _k === void 0 ? void 0 : _k.log) === null || _l === void 0 ? void 0 : _l.toFile) !== null && _m !== void 0 ? _m : false,\n level: (_q = (_p = (_o = this.baseConfig) === null || _o === void 0 ? void 0 : _o.log) === null || _p === void 0 ? void 0 : _p.level) !== null && _q !== void 0 ? _q : 'info',\n },\n };\n // Create (or recreate) the inner ELM with correct dims\n this.elm = new ELM(cfg);\n // Forward optional metrics gate\n if (this.baseConfig.metrics) {\n this.elm.metrics = this.baseConfig.metrics;\n }\n // Train numerically\n this.elm.trainFromData(X, Y);\n }\n /** Predict final label from a single stacked set of model labels (+ optional confidences). */\n predict(labels, confidences, topK = 1) {\n var _a;\n if (!this.elm)\n throw new Error('VotingClassifierELM: call train() before predict().');\n if (!(labels === null || labels === void 0 ? void 0 : labels.length))\n throw new Error('VotingClassifierELM.predict: empty labels');\n const C = this.categories.length;\n const numModels = labels.length;\n // Build numeric input row consistent with training\n let row = [];\n for (let m = 0; m < numModels; m++) {\n const w = (_a = this.modelWeights[m]) !== null && _a !== void 0 ? _a : 1;\n const idx = this.categories.indexOf(labels[m]);\n const oh = new Array(C).fill(0);\n if (idx >= 0)\n oh[idx] = 1;\n row = row.concat(oh.map(x => x * w));\n if (this.usesConfidence) {\n const norm = Math.max(0, Math.min(1, Number(confidences === null || confidences === void 0 ? void 0 : confidences[m]) || 0));\n row.push(norm * w);\n }\n }\n const [res] = this.elm.predictFromVector([row], topK);\n return res;\n }\n loadModelFromJSON(json) {\n var _a, _b, _c, _d, _e;\n if (!this.elm)\n this.elm = new ELM({\n // minimal placeholder; will be overwritten by fromJSON content\n useTokenizer: false,\n inputSize: 1,\n categories: ['_tmp'],\n hiddenUnits: 1,\n activation: 'relu',\n log: { modelName: 'VotingClassifierELM' },\n });\n this.elm.loadModelFromJSON(json);\n // Try to recover categories & inputSize from loaded model\n this.categories = (_a = this.elm.categories) !== null && _a !== void 0 ? _a : this.categories;\n this.inputSize = (_e = ((_d = (_c = (_b = this.elm.model) === null || _b === void 0 ? void 0 : _b.W) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.length)) !== null && _e !== void 0 ? _e : this.inputSize;\n }\n saveModelAsJSONFile(filename) {\n if (!this.elm)\n throw new Error('VotingClassifierELM: no model to save.');\n this.elm.saveModelAsJSONFile(filename);\n }\n}\n\n// adaptive-online-elm.ts — Adaptive Online ELM with dynamic hidden unit adjustment\n// Adjusts hidden units dynamically based on data complexity\n// Import OnlineELM directly - now that we're using ES modules, this works!\n/**\n * Adaptive Online ELM that dynamically adjusts hidden units\n * Features:\n * - Grows hidden units when error is high\n * - Shrinks hidden units when performance is stable\n * - Maintains efficiency while adapting to data complexity\n */\nclass AdaptiveOnlineELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n this.elm = null;\n this.trained = false;\n this.errorHistory = [];\n this.performanceHistory = [];\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n initialHiddenUnits: (_a = options.initialHiddenUnits) !== null && _a !== void 0 ? _a : 128,\n minHiddenUnits: (_b = options.minHiddenUnits) !== null && _b !== void 0 ? _b : 32,\n maxHiddenUnits: (_c = options.maxHiddenUnits) !== null && _c !== void 0 ? _c : 1024,\n growthThreshold: (_d = options.growthThreshold) !== null && _d !== void 0 ? _d : 0.3,\n shrinkThreshold: (_e = options.shrinkThreshold) !== null && _e !== void 0 ? _e : 0.1,\n growthFactor: (_f = options.growthFactor) !== null && _f !== void 0 ? _f : 1.5,\n shrinkFactor: (_g = options.shrinkFactor) !== null && _g !== void 0 ? _g : 0.8,\n activation: (_h = options.activation) !== null && _h !== void 0 ? _h : 'relu',\n maxLen: (_j = options.maxLen) !== null && _j !== void 0 ? _j : 100,\n useTokenizer: (_k = options.useTokenizer) !== null && _k !== void 0 ? _k : true,\n };\n this.currentHiddenUnits = this.options.initialHiddenUnits;\n this._initializeELM();\n }\n /**\n * Initialize or reinitialize ELM with current hidden units\n */\n _initializeELM(inputDim) {\n // inputDim must be provided if elm is null or needs reinitialization\n if (inputDim === undefined && this.elm && typeof this.elm.inputDim === 'number') {\n inputDim = this.elm.inputDim;\n }\n if (inputDim === undefined) {\n // Can't initialize without inputDim\n return;\n }\n this.elm = new OnlineELM({\n inputDim: inputDim,\n outputDim: this.categories.length,\n hiddenUnits: this.currentHiddenUnits,\n activation: this.options.activation,\n });\n }\n /**\n * Train with batch data\n */\n fit(X, y) {\n // Convert to one-hot if needed\n const oneHotY = this._toOneHot(y);\n // Initialize or reinitialize if needed\n if (!this.elm || (this.elm && typeof this.elm.inputDim === 'number' && this.elm.inputDim === 0)) {\n if (X.length > 0) {\n this._initializeELM(X[0].length);\n }\n }\n if (!this.elm) {\n throw new Error('Failed to initialize ELM model');\n }\n // Initial training with OnlineELM\n if (this.elm) {\n this.elm.fit(X, oneHotY);\n }\n // Evaluate and potentially adjust\n const error = this._evaluateError(X, oneHotY);\n this.errorHistory.push(error);\n // Adaptive adjustment (may reinitialize ELM)\n this._adaptHiddenUnits(error);\n this.trained = true;\n }\n /**\n * Incremental update with adaptive adjustment\n */\n update(x, y) {\n if (!this.trained || !this.elm) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Update model with OnlineELM\n if (this.elm) {\n this.elm.update([x], [oneHotY]);\n }\n else {\n throw new Error('Model not initialized');\n }\n // Evaluate recent performance\n const recentError = this._evaluateRecentError();\n this.errorHistory.push(recentError);\n // Keep history limited\n if (this.errorHistory.length > 100) {\n this.errorHistory.shift();\n }\n // Adaptive adjustment (may reinitialize ELM)\n this._adaptHiddenUnits(recentError);\n }\n /**\n * Predict with adaptive model\n */\n predict(x, topK = 3) {\n if (!this.trained || !this.elm) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n if (!this.elm)\n continue;\n const predVec = this.elm.predictLogitsFromVector(xi);\n if (!predVec)\n continue;\n // Convert to probabilities\n const probs = this._softmax(Array.from(predVec));\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n results.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n }\n return results;\n }\n /**\n * Adapt hidden units based on error\n */\n _adaptHiddenUnits(currentError) {\n if (this.errorHistory.length < 5)\n return; // Need some history\n const avgError = this.errorHistory.slice(-10).reduce((a, b) => a + b, 0) / Math.min(10, this.errorHistory.length);\n const recentError = this.errorHistory.slice(-3).reduce((a, b) => a + b, 0) / Math.min(3, this.errorHistory.length);\n // Grow if error is high\n if (recentError > this.options.growthThreshold &&\n this.currentHiddenUnits < this.options.maxHiddenUnits) {\n const newUnits = Math.min(this.options.maxHiddenUnits, Math.floor(this.currentHiddenUnits * this.options.growthFactor));\n if (newUnits > this.currentHiddenUnits) {\n const oldInputDim = this.elm && typeof this.elm.inputDim === 'number'\n ? this.elm.inputDim\n : undefined;\n this.currentHiddenUnits = newUnits;\n if (oldInputDim !== undefined) {\n this._initializeELM(oldInputDim);\n }\n // Note: In practice, you'd want to store recent data for retraining\n // For now, model will need to be retrained\n }\n }\n // Shrink if error is low and stable\n if (recentError < this.options.shrinkThreshold &&\n avgError < this.options.shrinkThreshold &&\n this.currentHiddenUnits > this.options.minHiddenUnits) {\n const newUnits = Math.max(this.options.minHiddenUnits, Math.floor(this.currentHiddenUnits * this.options.shrinkFactor));\n if (newUnits < this.currentHiddenUnits) {\n const oldInputDim = this.elm && typeof this.elm.inputDim === 'number'\n ? this.elm.inputDim\n : undefined;\n this.currentHiddenUnits = newUnits;\n if (oldInputDim !== undefined) {\n this._initializeELM(oldInputDim);\n }\n }\n }\n }\n /**\n * Evaluate error on data\n */\n _evaluateError(X, y) {\n var _a, _b, _c, _d;\n if (!this.elm)\n return 1.0;\n let totalError = 0;\n let count = 0;\n for (let i = 0; i < Math.min(100, X.length); i++) {\n const pred = ((_b = (_a = this.elm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [X[i]])) || ((_d = (_c = this.elm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [X[i]]));\n const predVec = Array.isArray(pred) ? pred[0] : pred;\n if (!predVec)\n continue;\n const trueIdx = this._argmax(y[i]);\n const predIdx = this._argmax(Array.from(predVec));\n if (trueIdx !== predIdx)\n totalError++;\n count++;\n }\n return count > 0 ? totalError / count : 1.0;\n }\n /**\n * Evaluate recent error (for incremental updates)\n */\n _evaluateRecentError() {\n // Use last few predictions for error estimate\n // In practice, you'd track actual errors\n if (this.errorHistory.length === 0)\n return 0.5;\n return this.errorHistory[this.errorHistory.length - 1];\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n /**\n * Get current number of hidden units\n */\n getHiddenUnits() {\n return this.currentHiddenUnits;\n }\n /**\n * Get error history\n */\n getErrorHistory() {\n return [...this.errorHistory];\n }\n}\n\n// forgetting-online-elm.ts — Forgetting Online ELM with time-decay for concept drift\n// Handles concept drift by decaying old samples over time\n// Import OnlineELM directly - now that we're using ES modules, this works!\n/**\n * Forgetting Online ELM with time-decay for concept drift\n * Features:\n * - Exponential decay of old samples\n * - Time-based or sample-based forgetting\n * - Sliding window for memory efficiency\n * - Handles concept drift automatically\n */\nclass ForgettingOnlineELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.elm = null;\n this.samples = [];\n this.trained = false;\n this.currentTime = 0;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n decayRate: (_b = options.decayRate) !== null && _b !== void 0 ? _b : 0.99,\n windowSize: (_c = options.windowSize) !== null && _c !== void 0 ? _c : 1000,\n timeBasedDecay: (_d = options.timeBasedDecay) !== null && _d !== void 0 ? _d : false,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n };\n // inputDim will be set during first fit\n // Note: OnlineELM will be initialized during fit() when we have inputDim\n this.elm = null;\n }\n /**\n * Initial training with batch data\n */\n fit(X, y) {\n const oneHotY = this._toOneHot(y);\n // Store samples with timestamps\n for (let i = 0; i < X.length; i++) {\n this.samples.push({\n x: [...X[i]],\n y: [...oneHotY[i]],\n timestamp: this.currentTime++,\n weight: 1.0,\n });\n }\n // Train on all samples (will initialize OnlineELM if needed)\n this._retrain();\n this.trained = true;\n }\n /**\n * Incremental update with forgetting mechanism\n */\n update(x, y) {\n if (!this.trained) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Apply decay to existing samples\n this._applyDecay();\n // Add new sample\n this.samples.push({\n x: [...x],\n y: [...oneHotY],\n timestamp: this.currentTime++,\n weight: 1.0,\n });\n // Remove old samples if window exceeded\n if (this.samples.length > this.options.windowSize) {\n const removeCount = this.samples.length - this.options.windowSize;\n this.samples.splice(0, removeCount);\n }\n // Retrain with weighted samples\n this._retrain();\n }\n /**\n * Predict with forgetting model\n */\n predict(x, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n const predVec = this.elm ? this.elm.predictLogitsFromVector(xi) : null;\n if (!predVec)\n continue;\n // Convert to probabilities\n const probs = this._softmax(Array.from(predVec));\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n results.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n }\n return results;\n }\n /**\n * Apply decay to all samples\n */\n _applyDecay() {\n if (this.options.timeBasedDecay) {\n // Time-based: decay based on time difference\n const currentTime = this.currentTime;\n for (const sample of this.samples) {\n const timeDiff = currentTime - sample.timestamp;\n sample.weight *= Math.pow(this.options.decayRate, timeDiff);\n }\n }\n else {\n // Sample-based: uniform decay\n for (const sample of this.samples) {\n sample.weight *= this.options.decayRate;\n }\n }\n }\n /**\n * Retrain model with weighted samples\n */\n _retrain() {\n if (this.samples.length === 0)\n return;\n // Get inputDim from first sample\n const inputDim = this.samples[0].x.length;\n // Reinitialize ELM if inputDim changed or not set\n if (!this.elm || (this.elm && this.elm.inputDim !== inputDim)) {\n this.elm = new OnlineELM({\n inputDim: inputDim,\n outputDim: this.categories.length,\n hiddenUnits: this.options.hiddenUnits,\n activation: this.options.activation,\n });\n }\n // Train with weighted samples\n // In practice, you'd use weighted training\n // For now, we'll use samples with weights above threshold\n const threshold = 0.01;\n const activeSamples = this.samples.filter(s => s.weight > threshold);\n // Batch samples for efficiency\n const X = [];\n const Y = [];\n for (const sample of activeSamples) {\n // Repeat samples based on weight (simplified approach)\n const repetitions = Math.max(1, Math.floor(sample.weight * 10));\n for (let i = 0; i < repetitions; i++) {\n X.push(sample.x);\n Y.push(sample.y);\n }\n }\n if (X.length > 0) {\n this.elm.fit(X, Y);\n }\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n /**\n * Get sample statistics\n */\n getSampleStats() {\n const active = this.samples.filter(s => s.weight > 0.01).length;\n const avgWeight = this.samples.length > 0\n ? this.samples.reduce((sum, s) => sum + s.weight, 0) / this.samples.length\n : 0;\n return {\n total: this.samples.length,\n active,\n avgWeight,\n };\n }\n}\n\n// hierarchical-elm.ts — Hierarchical ELM for tree-structured classification\n// Coarse-to-fine classification with hierarchical decision making\n/**\n * Hierarchical ELM for tree-structured classification\n * Features:\n * - Coarse-to-fine classification\n * - Tree-structured decision making\n * - Multi-level probability estimation\n * - Efficient hierarchical search\n */\nclass HierarchicalELM {\n constructor(options) {\n var _a, _b, _c, _d;\n this.elms = new Map();\n this.trained = false;\n this.hierarchy = new Map(Object.entries(options.hierarchy));\n this.rootCategories = options.rootCategories;\n this.options = {\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n activation: (_b = options.activation) !== null && _b !== void 0 ? _b : 'relu',\n maxLen: (_c = options.maxLen) !== null && _c !== void 0 ? _c : 100,\n useTokenizer: (_d = options.useTokenizer) !== null && _d !== void 0 ? _d : true,\n };\n // Initialize ELM for each level\n this._initializeELMs();\n }\n /**\n * Initialize ELMs for each level in hierarchy\n */\n _initializeELMs() {\n // Root level ELM\n this.elms.set('root', new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.rootCategories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n }));\n // Child level ELMs\n for (const [parent, children] of this.hierarchy.entries()) {\n this.elms.set(parent, new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: children,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n }));\n }\n }\n /**\n * Train hierarchical ELM\n * @param X Input features\n * @param yLabels Full hierarchical paths (e.g., ['root', 'parent', 'child'])\n */\n train(X, yLabels) {\n var _a, _b, _c, _d;\n // Group samples by level\n const levelData = new Map();\n // Root level\n const rootX = [];\n const rootY = [];\n for (let i = 0; i < X.length; i++) {\n if (yLabels[i].length > 0) {\n rootX.push(X[i]);\n rootY.push(this.rootCategories.indexOf(yLabels[i][0]));\n }\n }\n levelData.set('root', { X: rootX, y: rootY });\n // Child levels\n for (const [parent, children] of this.hierarchy.entries()) {\n const parentX = [];\n const parentY = [];\n for (let i = 0; i < X.length; i++) {\n const path = yLabels[i];\n const parentIdx = path.indexOf(parent);\n if (parentIdx >= 0 && parentIdx < path.length - 1) {\n const child = path[parentIdx + 1];\n if (children.includes(child)) {\n parentX.push(X[i]);\n parentY.push(children.indexOf(child));\n }\n }\n }\n if (parentX.length > 0) {\n levelData.set(parent, { X: parentX, y: parentY });\n }\n }\n // Train each ELM\n for (const [level, data] of levelData.entries()) {\n const elm = this.elms.get(level);\n if (elm && data.X.length > 0) {\n (_b = (_a = elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, level === 'root' ? this.rootCategories : this.hierarchy.get(level) || []);\n (_d = (_c = elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, data.X, data.y);\n }\n }\n this.trained = true;\n }\n /**\n * Predict with hierarchical model\n */\n predict(x, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const allResults = [];\n for (const xi of XArray) {\n const results = this._predictHierarchical(xi, topK);\n allResults.push(...results);\n }\n return allResults;\n }\n /**\n * Hierarchical prediction from root to leaf\n */\n _predictHierarchical(x, topK) {\n var _a, _b, _c, _d;\n const rootELM = this.elms.get('root');\n const rootPred = ((_b = (_a = rootELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n const allPaths = [];\n // For each root prediction, explore children\n for (const rootPredItem of rootPred.slice(0, topK)) {\n const rootLabel = rootPredItem.label || this.rootCategories[rootPredItem.index || 0];\n const rootProb = rootPredItem.prob || 0;\n // Check if root has children\n const children = this.hierarchy.get(rootLabel);\n if (!children || children.length === 0) {\n // Leaf node\n allPaths.push({\n path: [rootLabel],\n label: rootLabel,\n prob: rootProb,\n levelProbs: [rootProb],\n });\n continue;\n }\n // Predict children\n const childELM = this.elms.get(rootLabel);\n if (childELM) {\n const childPred = ((_d = (_c = childELM).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [x], topK)) || [];\n for (const childPredItem of childPred.slice(0, topK)) {\n const childLabel = childPredItem.label || children[childPredItem.index || 0];\n const childProb = childPredItem.prob || 0;\n const combinedProb = rootProb * childProb;\n allPaths.push({\n path: [rootLabel, childLabel],\n label: childLabel,\n prob: combinedProb,\n levelProbs: [rootProb, childProb],\n });\n }\n }\n else {\n // No child ELM, use root\n allPaths.push({\n path: [rootLabel],\n label: rootLabel,\n prob: rootProb,\n levelProbs: [rootProb],\n });\n }\n }\n // Sort by probability and return top-K\n allPaths.sort((a, b) => b.prob - a.prob);\n return allPaths.slice(0, topK);\n }\n /**\n * Get hierarchy structure\n */\n getHierarchy() {\n return new Map(this.hierarchy);\n }\n /**\n * Get root categories\n */\n getRootCategories() {\n return [...this.rootCategories];\n }\n}\n\n// attention-enhanced-elm.ts — Attention-Enhanced ELM with attention mechanisms\n// Query-key-value attention and self-attention for sequences\n/**\n * Attention-Enhanced ELM with attention mechanisms\n * Features:\n * - Query-key-value attention in hidden layer\n * - Self-attention for sequences\n * - Multi-head attention support\n * - Context-aware classification\n */\nclass AttentionEnhancedELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.attentionWeights = []; // [head][sequence][weight]\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n attentionHeads: (_b = options.attentionHeads) !== null && _b !== void 0 ? _b : 4,\n attentionDim: (_c = options.attentionDim) !== null && _c !== void 0 ? _c : 64,\n useSelfAttention: (_d = options.useSelfAttention) !== null && _d !== void 0 ? _d : true,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with attention-enhanced features\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract features with attention\n const attentionFeatures = this._extractAttentionFeatures(X);\n // Train base ELM on attention-enhanced features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, attentionFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Predict with attention\n */\n predict(X, topK = 3, returnAttention = false) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Extract attention features\n const attentionFeatures = this._extractAttentionFeatures([x])[0];\n // Predict\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [attentionFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnAttention && this.attentionWeights.length > 0) {\n result.attentionWeights = this.attentionWeights[this.attentionWeights.length - 1];\n }\n results.push(result);\n }\n }\n return results;\n }\n /**\n * Extract features with attention mechanism\n */\n _extractAttentionFeatures(X) {\n const features = [];\n for (const x of X) {\n // Compute attention for each head\n const headFeatures = [];\n for (let head = 0; head < this.options.attentionHeads; head++) {\n const attentionOutput = this._computeAttention(x, head);\n headFeatures.push(attentionOutput);\n }\n // Concatenate all heads\n const concatenated = headFeatures.flat();\n // Project to hidden units size\n const projected = this._projectToHiddenSize(concatenated);\n features.push(projected);\n }\n return features;\n }\n /**\n * Compute attention for a sequence\n */\n _computeAttention(x, headIndex) {\n // Simple attention mechanism: Q, K, V projection\n const seqLen = x.length;\n const dim = this.options.attentionDim;\n // Generate Q, K, V (simplified - using random projections)\n const Q = this._project(x, dim, `Q_${headIndex}`);\n const K = this._project(x, dim, `K_${headIndex}`);\n const V = this._project(x, dim, `V_${headIndex}`);\n // Compute attention scores: Q * K^T\n const scores = [];\n for (let i = 0; i < seqLen; i++) {\n let score = 0;\n for (let j = 0; j < dim; j++) {\n score += Q[j] * K[j];\n }\n scores.push(score / Math.sqrt(dim)); // Scaled dot-product\n }\n // Softmax attention weights\n const weights = this._softmax(scores);\n // Apply attention to values\n const output = new Array(dim).fill(0);\n for (let i = 0; i < seqLen; i++) {\n for (let j = 0; j < dim; j++) {\n output[j] += weights[i] * V[j];\n }\n }\n // Store attention weights for this head\n if (!this.attentionWeights[headIndex]) {\n this.attentionWeights[headIndex] = [];\n }\n this.attentionWeights[headIndex].push(weights);\n return output;\n }\n /**\n * Project input to attention dimension\n */\n _project(x, dim, key) {\n // Simple linear projection (in practice, you'd use learned weights)\n const projected = new Array(dim).fill(0);\n const scale = Math.sqrt(2.0 / (x.length + dim));\n for (let i = 0; i < dim; i++) {\n for (let j = 0; j < x.length; j++) {\n // Simple hash-based projection for determinism\n const hash = this._hash(`${key}_${i}_${j}`);\n projected[i] += x[j] * (hash * scale);\n }\n }\n return projected;\n }\n /**\n * Project attention output to hidden units size\n */\n _projectToHiddenSize(attentionOutput) {\n const hiddenSize = this.options.hiddenUnits;\n const output = new Array(hiddenSize).fill(0);\n const scale = Math.sqrt(2.0 / (attentionOutput.length + hiddenSize));\n for (let i = 0; i < hiddenSize; i++) {\n for (let j = 0; j < attentionOutput.length; j++) {\n const hash = this._hash(`proj_${i}_${j}`);\n output[i] += attentionOutput[j] * (hash * scale);\n }\n // Apply activation\n if (this.options.activation === 'relu') {\n output[i] = Math.max(0, output[i]);\n }\n else if (this.options.activation === 'tanh') {\n output[i] = Math.tanh(output[i]);\n }\n else if (this.options.activation === 'sigmoid') {\n output[i] = 1 / (1 + Math.exp(-output[i]));\n }\n }\n return output;\n }\n _softmax(logits) {\n const max = Math.max(...logits);\n const exp = logits.map(x => Math.exp(x - max));\n const sum = exp.reduce((a, b) => a + b, 0);\n return exp.map(x => x / sum);\n }\n _hash(str) {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = ((hash << 5) - hash) + char;\n hash = hash & hash; // Convert to 32-bit integer\n }\n return (hash / 2147483647); // Normalize to [-1, 1]\n }\n /**\n * Get attention weights for last prediction\n */\n getAttentionWeights() {\n return this.attentionWeights.map(head => [...head]);\n }\n}\n\n// variational-elm.ts — Variational ELM with uncertainty estimation\n// Probabilistic ELM with Bayesian inference and confidence intervals\n/**\n * Variational ELM with uncertainty estimation\n * Features:\n * - Probabilistic predictions with uncertainty\n * - Bayesian inference\n * - Confidence intervals\n * - Robust predictions with uncertainty quantification\n */\nclass VariationalELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.weightSamples = []; // Sampled weight matrices\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n priorVariance: (_b = options.priorVariance) !== null && _b !== void 0 ? _b : 1.0,\n posteriorSamples: (_c = options.posteriorSamples) !== null && _c !== void 0 ? _c : 10,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train variational ELM\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n // Sample weights for uncertainty estimation\n this._sampleWeights();\n this.trained = true;\n }\n /**\n * Predict with uncertainty estimation\n */\n predict(X, topK = 3, includeUncertainty = true) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Get base prediction\n const basePreds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n // Estimate uncertainty\n const uncertainty = includeUncertainty ? this._estimateUncertainty(x) : 0.5;\n for (const pred of basePreds.slice(0, topK)) {\n const prob = pred.prob || 0;\n const confidence = Math.max(0, Math.min(1, 1 - uncertainty));\n // Compute confidence interval\n const stdDev = Math.sqrt(uncertainty * prob * (1 - prob));\n const confidenceInterval = [\n Math.max(0, prob - 1.96 * stdDev),\n Math.min(1, prob + 1.96 * stdDev)\n ];\n allResults.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob,\n confidence,\n uncertainty,\n confidenceInterval,\n });\n }\n }\n return allResults;\n }\n /**\n * Estimate uncertainty using weight sampling\n */\n _estimateUncertainty(x) {\n if (this.weightSamples.length === 0) {\n return 0.5; // Default uncertainty\n }\n // Get predictions from multiple weight samples\n const predictions = [];\n for (const weights of this.weightSamples) {\n // Simplified: use variance in predictions as uncertainty measure\n // In practice, you'd compute actual predictions with sampled weights\n const pred = this._predictWithWeights(x, weights);\n predictions.push(pred);\n }\n // Compute variance as uncertainty measure\n const mean = predictions.reduce((a, b) => a + b, 0) / predictions.length;\n const variance = predictions.reduce((sum, p) => sum + Math.pow(p - mean, 2), 0) / predictions.length;\n // Normalize to [0, 1]\n return Math.min(1, variance);\n }\n /**\n * Predict with specific weight matrix (simplified)\n */\n _predictWithWeights(x, weights) {\n // Simplified prediction - in practice, you'd use the actual ELM forward pass\n // This is a placeholder for uncertainty estimation\n return 0.5;\n }\n /**\n * Sample weight matrices for uncertainty estimation\n */\n _sampleWeights() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const baseWeights = model.W;\n this.weightSamples = [];\n // Sample weights by adding Gaussian noise\n for (let s = 0; s < this.options.posteriorSamples; s++) {\n const sampled = [];\n for (let i = 0; i < baseWeights.length; i++) {\n sampled[i] = [];\n for (let j = 0; j < baseWeights[i].length; j++) {\n // Sample from posterior (Gaussian around base weight)\n const noise = this._gaussianRandom(0, this.options.priorVariance);\n sampled[i][j] = baseWeights[i][j] + noise;\n }\n }\n this.weightSamples.push(sampled);\n }\n }\n _gaussianRandom(mean, variance) {\n // Box-Muller transform\n const u1 = Math.random();\n const u2 = Math.random();\n const z0 = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);\n return mean + z0 * Math.sqrt(variance);\n }\n}\n\n// time-series-elm.ts — Time-Series ELM for temporal pattern recognition\n// Sequence-to-sequence ELM with temporal dependencies\n/**\n * Time-Series ELM for temporal pattern recognition\n * Features:\n * - Temporal pattern recognition\n * - Optional recurrent connections\n * - Sequence-to-sequence prediction\n * - Forecasting capabilities\n */\nclass TimeSeriesELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.history = []; // Store recent history for recurrent mode\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n sequenceLength: (_b = options.sequenceLength) !== null && _b !== void 0 ? _b : 10,\n lookbackWindow: (_c = options.lookbackWindow) !== null && _c !== void 0 ? _c : 5,\n useRecurrent: (_d = options.useRecurrent) !== null && _d !== void 0 ? _d : false,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on time-series data\n * @param X Sequences of features (each element is a time step)\n * @param y Labels for each sequence\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Flatten sequences to features\n const flattenedFeatures = this._flattenSequences(X);\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, flattenedFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Train on single sequences (convenience method)\n */\n trainSequences(sequences, labels) {\n this.train(sequences, labels);\n }\n /**\n * Predict from time-series sequence\n */\n predict(sequence, topK = 3) {\n var _a, _b, _c, _d;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const sequences = Array.isArray(sequence[0][0])\n ? sequence\n : [sequence];\n const allResults = [];\n for (const seq of sequences) {\n // Flatten sequence to features\n const features = this._flattenSequence(seq);\n // Update history if using recurrent mode\n if (this.options.useRecurrent) {\n this._updateHistory(features);\n // Use history-enhanced features\n const enhancedFeatures = this._enhanceWithHistory(features);\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [enhancedFeatures], topK)) || [];\n allResults.push(...preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n else {\n const preds = ((_d = (_c = this.elm).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [features], topK)) || [];\n allResults.push(...preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n }\n return allResults;\n }\n /**\n * Forecast future values (for regression/forecasting tasks)\n */\n forecast(sequence, steps = 1) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before forecasting');\n }\n const forecasts = [];\n let currentSeq = [...sequence];\n for (let step = 0; step < steps; step++) {\n const features = this._flattenSequence(currentSeq);\n const pred = ((_b = (_a = this.elm).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, features)) || [];\n // Use prediction as next step (simplified - in practice, you'd have a regression head)\n forecasts.push([...pred]);\n // Update sequence for next step\n currentSeq.push(pred);\n if (currentSeq.length > this.options.sequenceLength) {\n currentSeq.shift();\n }\n }\n return forecasts;\n }\n /**\n * Flatten sequences to feature vectors\n */\n _flattenSequences(sequences) {\n return sequences.map(seq => this._flattenSequence(seq));\n }\n /**\n * Flatten a single sequence\n */\n _flattenSequence(sequence) {\n var _a;\n // Concatenate all time steps\n const flattened = [];\n // Take last lookbackWindow steps\n const relevantSteps = sequence.slice(-this.options.lookbackWindow);\n for (const step of relevantSteps) {\n flattened.push(...step);\n }\n // Pad if necessary\n while (flattened.length < this.options.lookbackWindow * (((_a = sequence[0]) === null || _a === void 0 ? void 0 : _a.length) || 1)) {\n flattened.push(0);\n }\n return flattened;\n }\n /**\n * Update history for recurrent mode\n */\n _updateHistory(features) {\n this.history.push([...features]);\n // Keep only recent history\n if (this.history.length > this.options.lookbackWindow) {\n this.history.shift();\n }\n }\n /**\n * Enhance features with history (recurrent mode)\n */\n _enhanceWithHistory(currentFeatures) {\n if (this.history.length === 0) {\n return currentFeatures;\n }\n // Concatenate history with current features\n const historyFeatures = this.history.flat();\n return [...historyFeatures, ...currentFeatures];\n }\n /**\n * Clear history (useful for new sequences)\n */\n clearHistory() {\n this.history = [];\n }\n}\n\n// transfer-learning-elm.ts — Transfer Learning ELM\n// Pre-trained ELM adaptation, domain adaptation, and few-shot learning\n/**\n * Transfer Learning ELM\n * Features:\n * - Pre-trained model adaptation\n * - Domain adaptation\n * - Few-shot learning\n * - Fine-tuning capabilities\n */\nclass TransferLearningELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.sourceModel = null;\n this.trained = false;\n this.categories = options.categories;\n this.sourceModel = options.sourceModel || null;\n this.options = {\n categories: options.categories,\n sourceModel: this.sourceModel,\n freezeBase: (_a = options.freezeBase) !== null && _a !== void 0 ? _a : false,\n fineTuneLayers: (_b = options.fineTuneLayers) !== null && _b !== void 0 ? _b : 1,\n hiddenUnits: (_c = options.hiddenUnits) !== null && _c !== void 0 ? _c : 256,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Transfer weights from source model if available\n if (this.sourceModel) {\n this._transferWeights();\n }\n }\n /**\n * Transfer weights from source model\n */\n _transferWeights() {\n var _a, _b;\n if (!this.sourceModel)\n return;\n const sourceModelData = this.sourceModel.model;\n const targetModel = this.elm.model;\n if (!sourceModelData || !targetModel)\n return;\n // Transfer hidden layer weights if dimensions match\n if (sourceModelData.W && targetModel.W) {\n const sourceW = sourceModelData.W;\n const targetW = targetModel.W;\n // Copy matching dimensions\n for (let i = 0; i < Math.min(sourceW.length, targetW.length); i++) {\n for (let j = 0; j < Math.min(((_a = sourceW[i]) === null || _a === void 0 ? void 0 : _a.length) || 0, ((_b = targetW[i]) === null || _b === void 0 ? void 0 : _b.length) || 0); j++) {\n if (!this.options.freezeBase) {\n targetW[i][j] = sourceW[i][j];\n }\n }\n }\n }\n // Transfer biases if available\n if (sourceModelData.b && targetModel.b) {\n const sourceB = sourceModelData.b;\n const targetB = targetModel.b;\n for (let i = 0; i < Math.min(sourceB.length, targetB.length); i++) {\n if (!this.options.freezeBase) {\n targetB[i] = sourceB[i];\n }\n }\n }\n }\n /**\n * Train with transfer learning\n * @param X Target domain features\n * @param y Target domain labels\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // If source model exists and we're not freezing, fine-tune\n if (this.sourceModel && !this.options.freezeBase) {\n // Fine-tune: train on new data with transferred weights\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices, {\n reuseWeights: true, // Reuse transferred weights\n });\n }\n else {\n // Standard training\n (_f = (_e = this.elm).setCategories) === null || _f === void 0 ? void 0 : _f.call(_e, this.options.categories);\n (_h = (_g = this.elm).trainFromData) === null || _h === void 0 ? void 0 : _h.call(_g, X, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Few-shot learning: train with very few examples\n */\n fewShotTrain(X, y, shots = 5) {\n if (!this.sourceModel) {\n throw new Error('Few-shot learning requires a pre-trained source model');\n }\n // Use only a few examples\n const limitedX = X.slice(0, shots);\n const limitedY = y.slice(0, shots);\n // Fine-tune on limited data\n this.train(limitedX, limitedY);\n }\n /**\n * Predict with transferred model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n /**\n * Load pre-trained model\n */\n loadSourceModel(model) {\n this.sourceModel = model;\n this._transferWeights();\n }\n /**\n * Export current model for use as source in other transfers\n */\n exportModel() {\n return {\n model: this.elm.model,\n categories: this.options.categories,\n config: {\n hiddenUnits: this.options.hiddenUnits,\n activation: this.options.activation,\n },\n };\n }\n}\n\n// graph-elm.ts — Graph ELM for graph-structured data\n// Graph neural network + ELM for node/edge classification\n/**\n * Graph ELM for graph-structured data\n * Features:\n * - Node feature learning\n * - Graph structure encoding\n * - Edge-aware classification\n * - Graph convolution operations\n */\nclass GraphELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.trained = false;\n this.nodeFeatureMap = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n aggregationType: (_b = options.aggregationType) !== null && _b !== void 0 ? _b : 'mean',\n numLayers: (_c = options.numLayers) !== null && _c !== void 0 ? _c : 2,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on graph data\n * @param graphs Array of graphs\n * @param y Labels for each graph (or node labels)\n */\n train(graphs, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract graph features\n const graphFeatures = graphs.map(graph => this._extractGraphFeatures(graph));\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, graphFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features from graph structure\n */\n _extractGraphFeatures(graph) {\n // Build adjacency map\n const adjacencyMap = new Map();\n for (const edge of graph.edges) {\n if (!adjacencyMap.has(edge.source)) {\n adjacencyMap.set(edge.source, []);\n }\n if (!adjacencyMap.has(edge.target)) {\n adjacencyMap.set(edge.target, []);\n }\n adjacencyMap.get(edge.source).push(String(edge.target));\n adjacencyMap.get(edge.target).push(String(edge.source));\n }\n // Compute node features through graph convolution\n const nodeFeatures = new Map();\n // Initialize with node features\n for (const node of graph.nodes) {\n nodeFeatures.set(node.id, [...node.features]);\n }\n // Graph convolution layers\n for (let layer = 0; layer < this.options.numLayers; layer++) {\n const newFeatures = new Map();\n for (const node of graph.nodes) {\n const neighbors = adjacencyMap.get(node.id) || [];\n const neighborFeatures = neighbors\n .map(nid => {\n const node = graph.nodes.find(n => String(n.id) === String(nid));\n return node ? nodeFeatures.get(node.id) : null;\n })\n .filter(f => f !== null);\n // Aggregate neighbor features\n const aggregated = this._aggregateNeighbors(neighborFeatures);\n // Combine with self features\n const selfFeatures = nodeFeatures.get(node.id) || [];\n const combined = this._combineFeatures(selfFeatures, aggregated);\n newFeatures.set(node.id, combined);\n }\n // Update features\n for (const [id, features] of newFeatures) {\n nodeFeatures.set(id, features);\n }\n }\n // Aggregate all node features to graph-level features\n const allNodeFeatures = Array.from(nodeFeatures.values());\n const graphFeatures = this._aggregateNodes(allNodeFeatures);\n return graphFeatures;\n }\n /**\n * Aggregate neighbor features\n */\n _aggregateNeighbors(neighborFeatures) {\n if (neighborFeatures.length === 0) {\n return [];\n }\n const dim = neighborFeatures[0].length;\n const aggregated = new Array(dim).fill(0);\n for (const features of neighborFeatures) {\n for (let i = 0; i < dim; i++) {\n if (this.options.aggregationType === 'mean') {\n aggregated[i] += features[i] / neighborFeatures.length;\n }\n else if (this.options.aggregationType === 'sum') {\n aggregated[i] += features[i];\n }\n else if (this.options.aggregationType === 'max') {\n aggregated[i] = Math.max(aggregated[i], features[i]);\n }\n }\n }\n return aggregated;\n }\n /**\n * Combine self and neighbor features\n */\n _combineFeatures(self, neighbors) {\n const dim = Math.max(self.length, neighbors.length);\n const combined = new Array(dim).fill(0);\n for (let i = 0; i < dim; i++) {\n const selfVal = i < self.length ? self[i] : 0;\n const neighborVal = i < neighbors.length ? neighbors[i] : 0;\n combined[i] = selfVal + neighborVal; // Simple addition\n }\n // Apply activation\n if (this.options.activation === 'relu') {\n return combined.map(x => Math.max(0, x));\n }\n else if (this.options.activation === 'tanh') {\n return combined.map(x => Math.tanh(x));\n }\n else if (this.options.activation === 'sigmoid') {\n return combined.map(x => 1 / (1 + Math.exp(-x)));\n }\n return combined;\n }\n /**\n * Aggregate all node features to graph level\n */\n _aggregateNodes(nodeFeatures) {\n if (nodeFeatures.length === 0) {\n return [];\n }\n const dim = nodeFeatures[0].length;\n const graphFeatures = new Array(dim).fill(0);\n for (const features of nodeFeatures) {\n for (let i = 0; i < dim; i++) {\n if (this.options.aggregationType === 'mean') {\n graphFeatures[i] += features[i] / nodeFeatures.length;\n }\n else if (this.options.aggregationType === 'sum') {\n graphFeatures[i] += features[i];\n }\n else if (this.options.aggregationType === 'max') {\n graphFeatures[i] = Math.max(graphFeatures[i], features[i]);\n }\n }\n }\n return graphFeatures;\n }\n /**\n * Predict on graph\n */\n predict(graph, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const graphs = Array.isArray(graph) ? graph : [graph];\n const results = [];\n for (const g of graphs) {\n const graphFeatures = this._extractGraphFeatures(g);\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [graphFeatures], topK)) || [];\n // Store node features for first node (for interpretability)\n const firstNodeFeatures = g.nodes.length > 0\n ? this.nodeFeatureMap.get(g.nodes[0].id) || g.nodes[0].features\n : undefined;\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n nodeFeatures: firstNodeFeatures,\n });\n }\n }\n return results;\n }\n}\n\n// adaptive-kernel-elm.ts — Adaptive Kernel ELM\n// Data-dependent kernel parameters with local kernel adaptation\n/**\n * Adaptive Kernel ELM with data-dependent kernel parameters\n * Features:\n * - Local kernel adaptation\n * - Sample-specific kernels\n * - Adaptive gamma/degree parameters\n * - Improved performance on non-stationary data\n */\nclass AdaptiveKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.trained = false;\n this.adaptiveKernels = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n adaptiveGamma: (_b = options.adaptiveGamma) !== null && _b !== void 0 ? _b : true,\n adaptiveDegree: (_c = options.adaptiveDegree) !== null && _c !== void 0 ? _c : false,\n baseGamma: (_d = options.baseGamma) !== null && _d !== void 0 ? _d : 1.0,\n baseDegree: (_e = options.baseDegree) !== null && _e !== void 0 ? _e : 2,\n baseCoef0: (_f = options.baseCoef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.baseGamma,\n degree: this.options.baseDegree,\n coef0: this.options.baseCoef0,\n });\n }\n /**\n * Train with adaptive kernels\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Compute adaptive kernel parameters for each sample\n if (this.options.adaptiveGamma || this.options.adaptiveDegree) {\n this._computeAdaptiveKernels(X);\n }\n // Train base KernelELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n this.trained = true;\n }\n /**\n * Compute adaptive kernel parameters\n */\n _computeAdaptiveKernels(X) {\n // Compute local statistics for each sample\n for (let i = 0; i < X.length; i++) {\n const x = X[i];\n const neighbors = this._findNeighbors(x, X, 5); // Find 5 nearest neighbors\n const params = {};\n if (this.options.adaptiveGamma) {\n // Adapt gamma based on local density\n const localDensity = this._computeLocalDensity(x, neighbors);\n params.gamma = this.options.baseGamma / (1 + localDensity);\n }\n if (this.options.adaptiveDegree) {\n // Adapt degree based on local complexity\n const localComplexity = this._computeLocalComplexity(neighbors);\n params.degree = Math.max(1, Math.round(this.options.baseDegree * localComplexity));\n }\n this.adaptiveKernels.set(i, params);\n }\n }\n /**\n * Find nearest neighbors\n */\n _findNeighbors(x, X, k) {\n const distances = X.map((xi, i) => ({\n index: i,\n dist: this._euclideanDistance(x, xi),\n }));\n distances.sort((a, b) => a.dist - b.dist);\n return distances.slice(1, k + 1).map(d => X[d.index]);\n }\n /**\n * Compute local density\n */\n _computeLocalDensity(x, neighbors) {\n if (neighbors.length === 0)\n return 1;\n const avgDist = neighbors.reduce((sum, n) => sum + this._euclideanDistance(x, n), 0) / neighbors.length;\n return avgDist;\n }\n /**\n * Compute local complexity\n */\n _computeLocalComplexity(neighbors) {\n if (neighbors.length < 2)\n return 1;\n // Compute variance in neighbors as complexity measure\n const variances = [];\n for (let i = 0; i < neighbors[0].length; i++) {\n const values = neighbors.map(n => n[i]);\n const mean = values.reduce((a, b) => a + b, 0) / values.length;\n const variance = values.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0) / values.length;\n variances.push(variance);\n }\n const avgVariance = variances.reduce((a, b) => a + b, 0) / variances.length;\n return Math.sqrt(avgVariance);\n }\n _euclideanDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += Math.pow(a[i] - b[i], 2);\n }\n return Math.sqrt(sum);\n }\n /**\n * Predict with adaptive kernels\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Get base prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n // Get adaptive kernel params for this sample (if available)\n const sampleIndex = XArray.indexOf(x);\n const kernelParams = this.adaptiveKernels.get(sampleIndex);\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n kernelParams,\n });\n }\n }\n return results;\n }\n}\n\n// sparse-kernel-elm.ts — Sparse Kernel ELM\n// Sparse kernel matrix approximation with landmark selection\n/**\n * Sparse Kernel ELM with landmark-based approximation\n * Features:\n * - Sparse kernel matrix approximation\n * - Landmark selection strategies\n * - Reduced computational complexity\n * - Scalable to large datasets\n */\nclass SparseKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.landmarks = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n numLandmarks: (_b = options.numLandmarks) !== null && _b !== void 0 ? _b : 100,\n landmarkSelection: (_c = options.landmarkSelection) !== null && _c !== void 0 ? _c : 'kmeans',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train with sparse kernel approximation\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Select landmarks\n this._selectLandmarks(X);\n // Train on landmarks (reduced dataset)\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, this.landmarks, this._getLandmarkLabels(X, y, labelIndices));\n this.trained = true;\n }\n /**\n * Select landmark points\n */\n _selectLandmarks(X) {\n const numLandmarks = Math.min(this.options.numLandmarks, X.length);\n if (this.options.landmarkSelection === 'random') {\n // Random selection\n const indices = new Set();\n while (indices.size < numLandmarks) {\n indices.add(Math.floor(Math.random() * X.length));\n }\n this.landmarks = Array.from(indices).map(i => [...X[i]]);\n }\n else if (this.options.landmarkSelection === 'kmeans') {\n // K-means centroids as landmarks\n this.landmarks = this._kmeansLandmarks(X, numLandmarks);\n }\n else if (this.options.landmarkSelection === 'diverse') {\n // Diverse selection (maximize distance)\n this.landmarks = this._diverseLandmarks(X, numLandmarks);\n }\n else {\n // Default: first N points\n this.landmarks = X.slice(0, numLandmarks).map(x => [...x]);\n }\n }\n /**\n * K-means landmark selection\n */\n _kmeansLandmarks(X, k) {\n // Simplified k-means (in practice, use proper k-means)\n const centroids = [];\n const dim = X[0].length;\n // Initialize centroids randomly\n for (let i = 0; i < k; i++) {\n const idx = Math.floor(Math.random() * X.length);\n centroids.push([...X[idx]]);\n }\n // Simple iteration (simplified)\n for (let iter = 0; iter < 10; iter++) {\n const clusters = Array(k).fill(null).map(() => []);\n // Assign points to nearest centroid\n for (const x of X) {\n let minDist = Infinity;\n let nearest = 0;\n for (let i = 0; i < k; i++) {\n const dist = this._euclideanDistance(x, centroids[i]);\n if (dist < minDist) {\n minDist = dist;\n nearest = i;\n }\n }\n clusters[nearest].push(x);\n }\n // Update centroids\n for (let i = 0; i < k; i++) {\n if (clusters[i].length > 0) {\n const newCentroid = new Array(dim).fill(0);\n for (const point of clusters[i]) {\n for (let j = 0; j < dim; j++) {\n newCentroid[j] += point[j];\n }\n }\n for (let j = 0; j < dim; j++) {\n newCentroid[j] /= clusters[i].length;\n }\n centroids[i] = newCentroid;\n }\n }\n }\n return centroids;\n }\n /**\n * Diverse landmark selection\n */\n _diverseLandmarks(X, k) {\n const landmarks = [];\n // Start with random point\n let firstIdx = Math.floor(Math.random() * X.length);\n landmarks.push([...X[firstIdx]]);\n // Greedily select points that maximize minimum distance\n while (landmarks.length < k) {\n let maxMinDist = -1;\n let bestIdx = -1;\n for (let i = 0; i < X.length; i++) {\n const minDist = Math.min(...landmarks.map(l => this._euclideanDistance(X[i], l)));\n if (minDist > maxMinDist) {\n maxMinDist = minDist;\n bestIdx = i;\n }\n }\n if (bestIdx >= 0) {\n landmarks.push([...X[bestIdx]]);\n }\n else {\n break;\n }\n }\n return landmarks;\n }\n /**\n * Get labels for landmarks\n */\n _getLandmarkLabels(X, y, labelIndices) {\n const landmarkLabels = [];\n for (const landmark of this.landmarks) {\n // Find nearest point in X\n let minDist = Infinity;\n let nearestIdx = 0;\n for (let i = 0; i < X.length; i++) {\n const dist = this._euclideanDistance(landmark, X[i]);\n if (dist < minDist) {\n minDist = dist;\n nearestIdx = i;\n }\n }\n landmarkLabels.push(labelIndices[nearestIdx]);\n }\n return landmarkLabels;\n }\n _euclideanDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += Math.pow(a[i] - b[i], 2);\n }\n return Math.sqrt(sum);\n }\n /**\n * Predict using sparse kernel\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Use landmarks for prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n /**\n * Get selected landmarks\n */\n getLandmarks() {\n return this.landmarks.map(l => [...l]);\n }\n}\n\n// ensemble-kernel-elm.ts — Ensemble Kernel ELM\n// Multiple KELM models with different kernels, voting/weighted combination\n/**\n * Ensemble Kernel ELM\n * Features:\n * - Multiple KELM models with different kernels\n * - Voting/weighted combination\n * - Diversity promotion\n * - Robust predictions\n */\nclass EnsembleKernelELM {\n constructor(options) {\n var _a, _b, _c, _d;\n this.models = [];\n this.trained = false;\n this.categories = options.categories;\n // Default kernels if not provided\n const defaultKernels = options.kernels || [\n { type: 'rbf', gamma: 1.0, weight: 1.0 },\n { type: 'polynomial', degree: 2, coef0: 0, weight: 1.0 },\n { type: 'linear', weight: 1.0 },\n ];\n this.options = {\n categories: options.categories,\n kernels: defaultKernels.map(k => { var _a; return (Object.assign(Object.assign({}, k), { weight: (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 })); }),\n votingType: (_a = options.votingType) !== null && _a !== void 0 ? _a : 'weighted',\n activation: (_b = options.activation) !== null && _b !== void 0 ? _b : 'relu',\n maxLen: (_c = options.maxLen) !== null && _c !== void 0 ? _c : 100,\n useTokenizer: (_d = options.useTokenizer) !== null && _d !== void 0 ? _d : true,\n };\n // Initialize models for each kernel\n for (const kernel of this.options.kernels) {\n const kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: kernel.type,\n gamma: kernel.gamma,\n degree: kernel.degree,\n coef0: kernel.coef0,\n });\n this.models.push(kelm);\n }\n }\n /**\n * Train ensemble\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Train each model\n for (const model of this.models) {\n (_b = (_a = model).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = model).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, X, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Predict with ensemble voting\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Get predictions from all models\n const modelPredictions = [];\n for (const model of this.models) {\n const preds = ((_b = (_a = model).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n modelPredictions.push(preds.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n index: p.index || 0,\n })));\n }\n // Combine predictions\n const combined = this._combinePredictions(modelPredictions, topK);\n allResults.push(...combined);\n }\n return allResults;\n }\n /**\n * Combine predictions from multiple models\n */\n _combinePredictions(modelPredictions, topK) {\n // Aggregate predictions by label\n const labelScores = new Map();\n for (let modelIdx = 0; modelIdx < modelPredictions.length; modelIdx++) {\n const kernel = this.options.kernels[modelIdx];\n const weight = kernel.weight;\n for (const pred of modelPredictions[modelIdx]) {\n if (!labelScores.has(pred.label)) {\n labelScores.set(pred.label, { prob: 0, votes: 0, weight: 0 });\n }\n const score = labelScores.get(pred.label);\n if (this.options.votingType === 'majority') {\n score.votes += 1;\n }\n else if (this.options.votingType === 'weighted') {\n score.prob += pred.prob * weight;\n score.weight += weight;\n score.votes += 1;\n }\n else if (this.options.votingType === 'average') {\n score.prob += pred.prob;\n score.votes += 1;\n }\n }\n }\n // Normalize and sort\n const results = [];\n for (const [label, score] of labelScores) {\n let finalProb;\n if (this.options.votingType === 'majority') {\n finalProb = score.votes / this.models.length;\n }\n else if (this.options.votingType === 'weighted') {\n finalProb = score.weight > 0 ? score.prob / score.weight : 0;\n }\n else {\n finalProb = score.votes > 0 ? score.prob / score.votes : 0;\n }\n results.push({\n label,\n prob: finalProb,\n votes: score.votes,\n confidence: finalProb * (score.votes / this.models.length),\n });\n }\n // Sort by probability and return top K\n results.sort((a, b) => b.prob - a.prob);\n return results.slice(0, topK);\n }\n}\n\n// deep-kernel-elm.ts — Deep Kernel ELM\n// Multi-layer kernel transformations with hierarchical kernel learning\n/**\n * Deep Kernel ELM with multi-layer kernel transformations\n * Features:\n * - Hierarchical kernel learning\n * - Deep feature extraction\n * - Multi-layer kernel transformations\n * - Complex non-linear pattern learning\n */\nclass DeepKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.layers = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n numLayers: (_a = options.numLayers) !== null && _a !== void 0 ? _a : 3,\n kernelType: (_b = options.kernelType) !== null && _b !== void 0 ? _b : 'rbf',\n hiddenUnitsPerLayer: (_c = options.hiddenUnitsPerLayer) !== null && _c !== void 0 ? _c : 256,\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n // Initialize layers\n for (let i = 0; i < this.options.numLayers; i++) {\n const kelm = new KernelELM({\n useTokenizer: i === 0 && this.options.useTokenizer ? true : undefined,\n categories: i === this.options.numLayers - 1 ? this.options.categories : [],\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n this.layers.push(kelm);\n }\n }\n /**\n * Train deep kernel ELM\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Forward pass through layers\n let currentFeatures = X;\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n if (i === this.layers.length - 1) {\n // Final layer: train with labels\n (_b = (_a = layer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = layer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, currentFeatures, labelIndices);\n }\n else {\n // Intermediate layers: train autoencoder-style\n (_f = (_e = layer).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, currentFeatures, currentFeatures.map((_, idx) => idx));\n }\n // Extract features from this layer\n currentFeatures = this._extractLayerFeatures(currentFeatures, layer);\n }\n this.trained = true;\n }\n /**\n * Extract features from a layer\n */\n _extractLayerFeatures(X, layer) {\n var _a, _b;\n const features = [];\n for (const x of X) {\n // Get kernel features (simplified - in practice, you'd extract actual kernel features)\n const pred = ((_b = (_a = layer).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || [];\n features.push(pred.length > 0 ? pred : x); // Use prediction as features or fallback to input\n }\n return features;\n }\n /**\n * Predict with deep kernel\n */\n predict(X, topK = 3, returnLayerFeatures = false) {\n var _a, _b, _c, _d;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allResults = [];\n for (const x of XArray) {\n // Forward pass through layers\n let currentFeatures = x;\n const layerFeatures = [];\n for (let i = 0; i < this.layers.length - 1; i++) {\n const layer = this.layers[i];\n const features = ((_b = (_a = layer).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, currentFeatures)) || currentFeatures;\n layerFeatures.push(features);\n currentFeatures = features;\n }\n // Final layer prediction\n const finalLayer = this.layers[this.layers.length - 1];\n const preds = ((_d = (_c = finalLayer).predictFromVector) === null || _d === void 0 ? void 0 : _d.call(_c, [currentFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnLayerFeatures) {\n result.layerFeatures = layerFeatures.map(f => [...f]);\n }\n allResults.push(result);\n }\n }\n return allResults;\n }\n}\n\n// robust-kernel-elm.ts — Robust Kernel ELM\n// Outlier-resistant kernels with robust loss functions\n/**\n * Robust Kernel ELM with outlier resistance\n * Features:\n * - Outlier-resistant kernels\n * - Robust loss functions\n * - Noise-tolerant learning\n * - Outlier detection\n */\nclass RobustKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n this.outlierIndices = new Set();\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'rbf',\n robustLoss: (_b = options.robustLoss) !== null && _b !== void 0 ? _b : 'huber',\n outlierThreshold: (_c = options.outlierThreshold) !== null && _c !== void 0 ? _c : 2.0,\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n maxLen: (_h = options.maxLen) !== null && _h !== void 0 ? _h : 100,\n useTokenizer: (_j = options.useTokenizer) !== null && _j !== void 0 ? _j : true,\n };\n this.kelm = new KernelELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n kernel: this.options.kernelType,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train with robust loss\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Detect outliers\n this._detectOutliers(X);\n // Filter outliers for training (or use robust weighting)\n const filteredX = [];\n const filteredY = [];\n for (let i = 0; i < X.length; i++) {\n if (!this.outlierIndices.has(i)) {\n filteredX.push(X[i]);\n filteredY.push(labelIndices[i]);\n }\n }\n // Train on filtered data\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, filteredX.length > 0 ? filteredX : X, filteredY.length > 0 ? filteredY : labelIndices);\n this.trained = true;\n }\n /**\n * Detect outliers using statistical methods\n */\n _detectOutliers(X) {\n this.outlierIndices.clear();\n if (X.length === 0)\n return;\n // Compute mean and std for each dimension\n const dim = X[0].length;\n const means = new Array(dim).fill(0);\n const stds = new Array(dim).fill(0);\n // Compute means\n for (const x of X) {\n for (let i = 0; i < dim; i++) {\n means[i] += x[i] || 0;\n }\n }\n for (let i = 0; i < dim; i++) {\n means[i] /= X.length;\n }\n // Compute standard deviations\n for (const x of X) {\n for (let i = 0; i < dim; i++) {\n stds[i] += Math.pow((x[i] || 0) - means[i], 2);\n }\n }\n for (let i = 0; i < dim; i++) {\n stds[i] = Math.sqrt(stds[i] / X.length);\n }\n // Detect outliers (points far from mean)\n for (let i = 0; i < X.length; i++) {\n const x = X[i];\n let maxZScore = 0;\n for (let j = 0; j < dim; j++) {\n if (stds[j] > 0) {\n const zScore = Math.abs((x[j] || 0) - means[j]) / stds[j];\n maxZScore = Math.max(maxZScore, zScore);\n }\n }\n if (maxZScore > this.options.outlierThreshold) {\n this.outlierIndices.add(i);\n }\n }\n }\n /**\n * Apply robust loss function\n */\n _robustLoss(error) {\n if (this.options.robustLoss === 'huber') {\n const delta = 1.0;\n if (Math.abs(error) <= delta) {\n return 0.5 * error * error;\n }\n else {\n return delta * (Math.abs(error) - 0.5 * delta);\n }\n }\n else if (this.options.robustLoss === 'hinge') {\n return Math.max(0, 1 - error);\n }\n else if (this.options.robustLoss === 'epsilon-insensitive') {\n const epsilon = 0.1;\n return Math.max(0, Math.abs(error) - epsilon);\n }\n return error * error; // Default: squared loss\n }\n /**\n * Predict with outlier detection\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Check if input is outlier\n const isOutlier = this._isOutlier(x);\n // Get prediction\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [x], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const prob = pred.prob || 0;\n const robustness = isOutlier ? 0.5 : 1.0; // Lower robustness for outliers\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob,\n isOutlier,\n robustness,\n });\n }\n }\n return results;\n }\n /**\n * Check if a point is an outlier\n */\n _isOutlier(x) {\n // Simplified outlier check (in practice, use trained model statistics)\n const mean = x.reduce((a, b) => a + b, 0) / x.length;\n const std = Math.sqrt(x.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0) / x.length);\n if (std === 0)\n return false;\n const maxZScore = Math.max(...x.map(v => Math.abs((v - mean) / std)));\n return maxZScore > this.options.outlierThreshold;\n }\n}\n\n// elm-kelm-cascade.ts — ELM-KELM Cascade\n// ELM feature extraction → KELM classification\n/**\n * ELM-KELM Cascade\n * Features:\n * - ELM for feature extraction\n * - KELM for classification\n * - Hierarchical learning\n * - Efficiency + accuracy\n */\nclass ELMKELMCascade {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n elmHiddenUnits: (_a = options.elmHiddenUnits) !== null && _a !== void 0 ? _a : 256,\n kelmKernel: (_b = options.kelmKernel) !== null && _b !== void 0 ? _b : 'rbf',\n kelmGamma: (_c = options.kelmGamma) !== null && _c !== void 0 ? _c : 1.0,\n kelmDegree: (_d = options.kelmDegree) !== null && _d !== void 0 ? _d : 2,\n kelmCoef0: (_e = options.kelmCoef0) !== null && _e !== void 0 ? _e : 0,\n activation: (_f = options.activation) !== null && _f !== void 0 ? _f : 'relu',\n maxLen: (_g = options.maxLen) !== null && _g !== void 0 ? _g : 100,\n useTokenizer: (_h = options.useTokenizer) !== null && _h !== void 0 ? _h : true,\n };\n // Initialize ELM for feature extraction\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.elmHiddenUnits,\n categories: [], // No categories for feature extraction\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize KELM for classification\n this.kelm = new KernelELM({\n useTokenizer: false, // Already tokenized by ELM\n categories: this.options.categories,\n maxLen: undefined,\n kernel: this.options.kelmKernel,\n gamma: this.options.kelmGamma,\n degree: this.options.kelmDegree,\n coef0: this.options.kelmCoef0,\n });\n }\n /**\n * Train cascade\n */\n train(X, y) {\n var _a, _b, _c, _d, _e, _f;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Step 1: Train ELM for feature extraction (autoencoder-style)\n (_b = (_a = this.elm).trainFromData) === null || _b === void 0 ? void 0 : _b.call(_a, X, X.map((_, i) => i)); // Self-supervised\n // Step 2: Extract features using ELM\n const extractedFeatures = this._extractFeatures(X);\n // Step 3: Train KELM on extracted features\n (_d = (_c = this.kelm).setCategories) === null || _d === void 0 ? void 0 : _d.call(_c, this.options.categories);\n (_f = (_e = this.kelm).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, extractedFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features using ELM\n */\n _extractFeatures(X) {\n var _a, _b;\n const features = [];\n for (const x of X) {\n // Get hidden layer activations as features\n const logits = ((_b = (_a = this.elm).predictLogitsFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || [];\n features.push(logits.length > 0 ? logits : x); // Fallback to input if no logits\n }\n return features;\n }\n /**\n * Predict with cascade\n */\n predict(X, topK = 3, returnFeatures = false) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Step 1: Extract features\n const extractedFeatures = this._extractFeatures([x])[0];\n // Step 2: Classify with KELM\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [extractedFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const result = {\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n };\n if (returnFeatures) {\n result.extractedFeatures = [...extractedFeatures];\n }\n results.push(result);\n }\n }\n return results;\n }\n}\n\n// string-kernel-elm.ts — String Kernel ELM\n// String kernels for text/DNA/protein sequences\n/**\n * String Kernel ELM for sequence data\n * Features:\n * - N-gram kernels\n * - Subsequence kernels\n * - Spectrum kernels\n * - Text/DNA/protein sequence analysis\n */\nclass StringKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.trained = false;\n this.vocabulary = new Set();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'ngram',\n n: (_b = options.n) !== null && _b !== void 0 ? _b : 3,\n lambda: (_c = options.lambda) !== null && _c !== void 0 ? _c : 0.5,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n };\n // Use polynomial kernel as base (will be adapted for strings)\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: 'polynomial',\n degree: this.options.n,\n });\n }\n /**\n * Train on string sequences\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Convert strings to feature vectors\n const stringX = X;\n const featureVectors = this._stringsToFeatures(stringX);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, featureVectors, labelIndices);\n this.trained = true;\n }\n /**\n * Convert strings to feature vectors using string kernels\n */\n _stringsToFeatures(strings) {\n // Build vocabulary\n this.vocabulary.clear();\n for (const s of strings) {\n const ngrams = this._extractNgrams(s);\n for (const ngram of ngrams) {\n this.vocabulary.add(ngram);\n }\n }\n const vocabArray = Array.from(this.vocabulary);\n const features = [];\n for (const s of strings) {\n const feature = new Array(vocabArray.length).fill(0);\n const ngrams = this._extractNgrams(s);\n for (const ngram of ngrams) {\n const idx = vocabArray.indexOf(ngram);\n if (idx >= 0) {\n feature[idx] += 1;\n }\n }\n // Normalize\n const sum = feature.reduce((a, b) => a + b, 0);\n if (sum > 0) {\n for (let i = 0; i < feature.length; i++) {\n feature[i] /= sum;\n }\n }\n features.push(feature);\n }\n return features;\n }\n /**\n * Extract n-grams from string\n */\n _extractNgrams(s) {\n const ngrams = [];\n if (this.options.kernelType === 'ngram' || this.options.kernelType === 'spectrum') {\n // N-gram extraction\n for (let i = 0; i <= s.length - this.options.n; i++) {\n ngrams.push(s.substring(i, i + this.options.n));\n }\n }\n else if (this.options.kernelType === 'subsequence') {\n // Subsequence extraction (simplified)\n for (let i = 0; i <= s.length - this.options.n; i++) {\n ngrams.push(s.substring(i, i + this.options.n));\n }\n }\n return ngrams;\n }\n /**\n * Predict on strings\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const stringX = X;\n const featureVectors = this._stringsToFeatures(stringX);\n const results = [];\n for (const features of featureVectors) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// convolutional-elm.ts — Convolutional ELM (C-ELM)\n// Convolutional layers + ELM for image/sequence processing\n/**\n * Convolutional ELM\n * Features:\n * - Convolutional layers for feature extraction\n * - ELM for classification\n * - Translation invariance\n * - Image/sequence processing\n */\nclass ConvolutionalELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n inputShape: (_a = options.inputShape) !== null && _a !== void 0 ? _a : [28, 28, 1],\n filters: (_b = options.filters) !== null && _b !== void 0 ? _b : [32, 64],\n kernelSizes: (_c = options.kernelSizes) !== null && _c !== void 0 ? _c : [3, 3],\n poolSizes: (_d = options.poolSizes) !== null && _d !== void 0 ? _d : [2, 2],\n hiddenUnits: (_e = options.hiddenUnits) !== null && _e !== void 0 ? _e : 256,\n activation: (_f = options.activation) !== null && _f !== void 0 ? _f : 'relu',\n maxLen: (_g = options.maxLen) !== null && _g !== void 0 ? _g : 100,\n useTokenizer: (_h = options.useTokenizer) !== null && _h !== void 0 ? _h : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train on image/sequence data\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Extract convolutional features\n const images = Array.isArray(X[0][0]) ? X : X.map(x => [x]);\n const features = this._extractConvolutionalFeatures(images);\n // Train ELM on features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Extract features using convolutional layers\n */\n _extractConvolutionalFeatures(images) {\n const features = [];\n for (const image of images) {\n let current = image;\n // Apply convolutional layers\n for (let layer = 0; layer < this.options.filters.length; layer++) {\n const convInput = Array.isArray(current[0][0])\n ? current[0]\n : current;\n const convOutput = this._convLayer(convInput, this.options.filters[layer], this.options.kernelSizes[layer] || 3);\n current = this._poolLayer(convOutput, this.options.poolSizes[layer] || 2);\n }\n // Flatten\n const flattened = this._flatten(current);\n features.push(flattened);\n }\n return features;\n }\n /**\n * Convolutional layer (simplified)\n */\n _convLayer(input, numFilters, kernelSize) {\n // Simplified convolution (in practice, use proper convolution)\n const output = [];\n for (let f = 0; f < numFilters; f++) {\n const featureMap = [];\n for (let i = 0; i < input.length; i++) {\n featureMap[i] = [];\n for (let j = 0; j < input[i].length; j++) {\n // Simple convolution (simplified)\n let sum = 0;\n for (let ki = 0; ki < kernelSize; ki++) {\n for (let kj = 0; kj < kernelSize; kj++) {\n const row = i + ki - Math.floor(kernelSize / 2);\n const col = j + kj - Math.floor(kernelSize / 2);\n if (row >= 0 && row < input.length && col >= 0 && col < input[i].length) {\n sum += input[row][col] || 0;\n }\n }\n }\n featureMap[i][j] = Math.max(0, sum / (kernelSize * kernelSize)); // ReLU\n }\n }\n output.push(featureMap);\n }\n return output;\n }\n /**\n * Pooling layer\n */\n _poolLayer(input, poolSize) {\n // Simplified pooling\n const images = Array.isArray(input[0][0])\n ? input\n : [input];\n const pooled = [];\n for (const img of images) {\n const pooledImg = [];\n for (let i = 0; i < img.length; i += poolSize) {\n pooledImg[i / poolSize] = [];\n for (let j = 0; j < img[i].length; j += poolSize) {\n // Max pooling\n let max = -Infinity;\n for (let pi = 0; pi < poolSize && i + pi < img.length; pi++) {\n for (let pj = 0; pj < poolSize && j + pj < img[i].length; pj++) {\n max = Math.max(max, img[i + pi][j + pj] || 0);\n }\n }\n pooledImg[i / poolSize][j / poolSize] = max;\n }\n }\n pooled.push(pooledImg);\n }\n return pooled;\n }\n /**\n * Flatten feature maps\n */\n _flatten(featureMaps) {\n if (Array.isArray(featureMaps[0][0])) {\n const maps = featureMaps;\n const flattened = [];\n for (const map of maps) {\n for (const row of map) {\n flattened.push(...row);\n }\n }\n return flattened;\n }\n else {\n const map = featureMaps;\n const flattened = [];\n for (const row of map) {\n flattened.push(...row);\n }\n return flattened;\n }\n }\n /**\n * Predict\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const images = Array.isArray(X[0][0]) ? X : X.map(x => [x]);\n const features = this._extractConvolutionalFeatures(images);\n const results = [];\n for (const feature of features) {\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [feature], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// recurrent-elm.ts — Recurrent ELM (R-ELM)\n// Recurrent connections in ELM for sequence modeling\n/**\n * Recurrent ELM for sequence modeling\n * Features:\n * - Recurrent connections\n * - Sequence modeling\n * - Temporal dependencies\n * - Memory of past inputs\n */\nclass RecurrentELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.hiddenState = [];\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n recurrentUnits: (_b = options.recurrentUnits) !== null && _b !== void 0 ? _b : 128,\n sequenceLength: (_c = options.sequenceLength) !== null && _c !== void 0 ? _c : 10,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'tanh',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize hidden state\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n }\n /**\n * Train on sequences\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Process sequences with recurrent connections\n const features = this._processSequences(X);\n // Train ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Process sequences with recurrent connections\n */\n _processSequences(sequences) {\n const features = [];\n for (const sequence of sequences) {\n // Reset hidden state for each sequence\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n // Process sequence step by step\n for (const step of sequence) {\n // Combine input with hidden state\n [...step, ...this.hiddenState];\n // Update hidden state (simplified recurrent update)\n this._updateHiddenState(step);\n }\n // Use final hidden state + last input as features\n const finalFeatures = [...sequence[sequence.length - 1] || [], ...this.hiddenState];\n features.push(finalFeatures);\n }\n return features;\n }\n /**\n * Update hidden state (recurrent connection)\n */\n _updateHiddenState(input) {\n // Simplified recurrent update: h_t = tanh(W * [x_t; h_{t-1}])\n const combined = [...input, ...this.hiddenState];\n const newState = new Array(this.options.recurrentUnits).fill(0);\n // Simple linear transformation (in practice, use learned weights)\n for (let i = 0; i < this.options.recurrentUnits; i++) {\n let sum = 0;\n for (let j = 0; j < combined.length; j++) {\n // Simple hash-based weight (in practice, use learned weights)\n const hash = this._hash(`recurrent_${i}_${j}`);\n sum += combined[j] * hash;\n }\n // Apply activation\n if (this.options.activation === 'tanh') {\n newState[i] = Math.tanh(sum);\n }\n else if (this.options.activation === 'relu') {\n newState[i] = Math.max(0, sum);\n }\n else if (this.options.activation === 'sigmoid') {\n newState[i] = 1 / (1 + Math.exp(-sum));\n }\n else {\n newState[i] = sum;\n }\n }\n this.hiddenState = newState;\n }\n _hash(str) {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = ((hash << 5) - hash) + char;\n hash = hash & hash;\n }\n return (hash / 2147483647) * 0.1; // Small weights\n }\n /**\n * Predict on sequence\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const sequences = Array.isArray(X[0][0])\n ? X\n : [X];\n const results = [];\n for (const sequence of sequences) {\n // Process sequence\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n for (const step of sequence) {\n this._updateHiddenState(step);\n }\n const finalFeatures = [...sequence[sequence.length - 1] || [], ...this.hiddenState];\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [finalFeatures], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n hiddenState: [...this.hiddenState],\n });\n }\n }\n return results;\n }\n /**\n * Reset hidden state\n */\n resetState() {\n this.hiddenState = new Array(this.options.recurrentUnits).fill(0);\n }\n}\n\n// fuzzy-elm.ts — Fuzzy ELM\n// Fuzzy logic + ELM for uncertainty handling and soft classification\n/**\n * Fuzzy ELM\n * Features:\n * - Fuzzy logic integration\n * - Uncertainty handling\n * - Soft classification\n * - Membership functions\n */\nclass FuzzyELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f;\n this.trained = false;\n this.membershipParams = new Map();\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n fuzzyMembership: (_b = options.fuzzyMembership) !== null && _b !== void 0 ? _b : 'gaussian',\n fuzzificationLevel: (_c = options.fuzzificationLevel) !== null && _c !== void 0 ? _c : 0.5,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n maxLen: (_e = options.maxLen) !== null && _e !== void 0 ? _e : 100,\n useTokenizer: (_f = options.useTokenizer) !== null && _f !== void 0 ? _f : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with fuzzy logic\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Fuzzify input features\n const fuzzifiedX = this._fuzzifyFeatures(X);\n // Compute membership parameters\n this._computeMembershipParams(X, labelIndices);\n // Train ELM on fuzzified features\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, fuzzifiedX, labelIndices);\n this.trained = true;\n }\n /**\n * Fuzzify input features\n */\n _fuzzifyFeatures(X) {\n const fuzzified = [];\n for (const x of X) {\n const fuzzy = x.map(val => this._fuzzifyValue(val));\n fuzzified.push(fuzzy);\n }\n return fuzzified;\n }\n /**\n * Fuzzify a single value\n */\n _fuzzifyValue(value) {\n // Apply fuzzification based on membership function\n if (this.options.fuzzyMembership === 'triangular') {\n // Triangular membership\n const center = 0;\n const width = this.options.fuzzificationLevel;\n if (Math.abs(value - center) <= width) {\n return 1 - Math.abs(value - center) / width;\n }\n return 0;\n }\n else if (this.options.fuzzyMembership === 'gaussian') {\n // Gaussian membership\n const center = 0;\n const sigma = this.options.fuzzificationLevel;\n return Math.exp(-Math.pow(value - center, 2) / (2 * sigma * sigma));\n }\n else if (this.options.fuzzyMembership === 'trapezoidal') {\n // Trapezoidal membership\n const center = 0;\n const width = this.options.fuzzificationLevel;\n const dist = Math.abs(value - center);\n if (dist <= width * 0.5) {\n return 1;\n }\n else if (dist <= width) {\n return 1 - (dist - width * 0.5) / (width * 0.5);\n }\n return 0;\n }\n return value; // Default: no fuzzification\n }\n /**\n * Compute membership parameters for each category\n */\n _computeMembershipParams(X, y) {\n // Compute mean and std for each category\n const categoryData = new Map();\n for (let i = 0; i < X.length; i++) {\n const label = y[i];\n if (!categoryData.has(label)) {\n categoryData.set(label, []);\n }\n categoryData.get(label).push(X[i]);\n }\n for (const [label, data] of categoryData) {\n const mean = this._computeMean(data);\n const std = this._computeStd(data, mean);\n this.membershipParams.set(this.options.categories[label], {\n center: mean,\n width: std * 2, // 2 standard deviations\n });\n }\n }\n _computeMean(data) {\n var _a;\n if (data.length === 0)\n return 0;\n const sum = data.reduce((s, x) => s + x.reduce((a, b) => a + b, 0), 0);\n const count = data.length * (((_a = data[0]) === null || _a === void 0 ? void 0 : _a.length) || 1);\n return sum / count;\n }\n _computeStd(data, mean) {\n var _a;\n if (data.length === 0)\n return 1;\n const variance = data.reduce((s, x) => s + x.reduce((sum, v) => sum + Math.pow(v - mean, 2), 0), 0) / (data.length * (((_a = data[0]) === null || _a === void 0 ? void 0 : _a.length) || 1));\n return Math.sqrt(variance);\n }\n /**\n * Compute fuzzy membership for a prediction\n */\n _computeMembership(label, features) {\n const params = this.membershipParams.get(label);\n if (!params)\n return 0.5; // Default membership\n const mean = features.reduce((a, b) => a + b, 0) / features.length;\n const dist = Math.abs(mean - params.center);\n if (this.options.fuzzyMembership === 'gaussian') {\n return Math.exp(-Math.pow(dist, 2) / (2 * params.width * params.width));\n }\n else {\n // Triangular\n if (dist <= params.width) {\n return 1 - dist / params.width;\n }\n return 0;\n }\n }\n /**\n * Predict with fuzzy logic\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Fuzzify input\n const fuzzified = this._fuzzifyFeatures([x])[0];\n // Get base prediction\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [fuzzified], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n const label = pred.label || this.options.categories[pred.index || 0];\n const prob = pred.prob || 0;\n // Compute fuzzy membership\n const membership = this._computeMembership(label, x);\n // Combine probability with membership\n const confidence = prob * membership;\n results.push({\n label,\n prob,\n membership,\n confidence,\n });\n }\n }\n return results;\n }\n}\n\n// quantum-inspired-elm.ts — Quantum-Inspired ELM\n// Quantum computing principles for feature maps and optimization\n/**\n * Quantum-Inspired ELM\n * Features:\n * - Quantum feature maps\n * - Quantum superposition\n * - Quantum entanglement\n * - Quantum kernel methods\n */\nclass QuantumInspiredELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.quantumStates = [];\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n quantumLayers: (_b = options.quantumLayers) !== null && _b !== void 0 ? _b : 2,\n entanglement: (_c = options.entanglement) !== null && _c !== void 0 ? _c : true,\n superposition: (_d = options.superposition) !== null && _d !== void 0 ? _d : true,\n activation: (_e = options.activation) !== null && _e !== void 0 ? _e : 'relu',\n maxLen: (_f = options.maxLen) !== null && _f !== void 0 ? _f : 100,\n useTokenizer: (_g = options.useTokenizer) !== null && _g !== void 0 ? _g : true,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train with quantum-inspired features\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Apply quantum feature maps\n const quantumFeatures = this._applyQuantumFeatureMap(X);\n // Train ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, quantumFeatures, labelIndices);\n this.trained = true;\n }\n /**\n * Apply quantum feature map\n */\n _applyQuantumFeatureMap(X) {\n const features = [];\n for (const x of X) {\n let quantumState = this._encodeToQuantumState(x);\n // Apply quantum layers\n for (let layer = 0; layer < this.options.quantumLayers; layer++) {\n quantumState = this._applyQuantumLayer(quantumState, layer);\n }\n // Measure quantum state (convert to classical features)\n const measured = this._measureQuantumState(quantumState);\n features.push(measured);\n }\n return features;\n }\n /**\n * Encode classical data to quantum state\n */\n _encodeToQuantumState(x) {\n // Quantum state encoding (amplitude encoding)\n const state = new Array(Math.pow(2, Math.ceil(Math.log2(x.length)))).fill(0);\n // Normalize input\n const norm = Math.sqrt(x.reduce((sum, v) => sum + v * v, 0));\n if (norm > 0) {\n for (let i = 0; i < x.length; i++) {\n state[i] = x[i] / norm;\n }\n }\n return state;\n }\n /**\n * Apply quantum layer (quantum gates simulation)\n */\n _applyQuantumLayer(state, layer) {\n let newState = [...state];\n // Apply quantum gates (simplified simulation)\n if (this.options.superposition) {\n // Hadamard-like transformation (superposition)\n newState = this._applySuperposition(newState);\n }\n if (this.options.entanglement) {\n // Entanglement (CNOT-like)\n newState = this._applyEntanglement(newState);\n }\n // Rotation gates\n newState = this._applyRotation(newState, layer);\n return newState;\n }\n /**\n * Apply superposition (Hadamard-like)\n */\n _applySuperposition(state) {\n const newState = new Array(state.length).fill(0);\n const factor = 1 / Math.sqrt(2);\n for (let i = 0; i < state.length; i++) {\n for (let j = 0; j < state.length; j++) {\n // Simplified Hadamard transformation\n const phase = (i === j) ? factor : factor * Math.cos(Math.PI * i * j / state.length);\n newState[i] += state[j] * phase;\n }\n }\n return newState;\n }\n /**\n * Apply entanglement (CNOT-like)\n */\n _applyEntanglement(state) {\n const newState = [...state];\n // Entangle pairs of qubits\n for (let i = 0; i < state.length - 1; i += 2) {\n const temp = newState[i];\n newState[i] = newState[i + 1];\n newState[i + 1] = temp;\n }\n return newState;\n }\n /**\n * Apply rotation gates\n */\n _applyRotation(state, layer) {\n const newState = new Array(state.length).fill(0);\n const angle = Math.PI / (2 * (layer + 1));\n for (let i = 0; i < state.length; i++) {\n const cos = Math.cos(angle);\n const sin = Math.sin(angle);\n newState[i] = state[i] * cos - state[(i + 1) % state.length] * sin;\n }\n return newState;\n }\n /**\n * Measure quantum state (convert to classical)\n */\n _measureQuantumState(state) {\n // Measure by computing probabilities (amplitudes squared)\n const probabilities = state.map(amp => amp * amp);\n // Project to hidden units dimension\n const hiddenDim = this.options.hiddenUnits;\n const features = new Array(hiddenDim).fill(0);\n for (let i = 0; i < hiddenDim; i++) {\n const idx = i % probabilities.length;\n features[i] = probabilities[idx];\n }\n return features;\n }\n /**\n * Predict with quantum-inspired model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = [];\n for (const x of XArray) {\n // Apply quantum feature map\n const quantumFeatures = this._applyQuantumFeatureMap([x])[0];\n // Predict\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [quantumFeatures], topK)) || [];\n // Get quantum state for this input\n let quantumState = this._encodeToQuantumState(x);\n for (let layer = 0; layer < this.options.quantumLayers; layer++) {\n quantumState = this._applyQuantumLayer(quantumState, layer);\n }\n const amplitude = Math.sqrt(quantumState.reduce((sum, v) => sum + v * v, 0));\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n quantumState: [...quantumState],\n amplitude,\n });\n }\n }\n return results;\n }\n}\n\n// graph-kernel-elm.ts — Graph Kernel ELM\n// Graph kernels (Weisfeiler-Lehman, etc.) for graph structure encoding\n/**\n * Graph Kernel ELM\n * Features:\n * - Graph kernels (Weisfeiler-Lehman, shortest-path, random-walk)\n * - Graph structure encoding\n * - Node classification/regression\n */\nclass GraphKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n kernelType: (_a = options.kernelType) !== null && _a !== void 0 ? _a : 'weisfeiler-lehman',\n wlIterations: (_b = options.wlIterations) !== null && _b !== void 0 ? _b : 3,\n kernel: (_c = options.kernel) !== null && _c !== void 0 ? _c : 'rbf',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n };\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: this.options.kernel,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train on graphs\n */\n train(graphs, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Compute graph kernel features\n const features = this._computeGraphKernelFeatures(graphs);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Compute graph kernel features\n */\n _computeGraphKernelFeatures(graphs) {\n const features = [];\n for (const graph of graphs) {\n let graphFeatures;\n if (this.options.kernelType === 'weisfeiler-lehman') {\n graphFeatures = this._weisfeilerLehmanKernel(graph);\n }\n else if (this.options.kernelType === 'shortest-path') {\n graphFeatures = this._shortestPathKernel(graph);\n }\n else {\n graphFeatures = this._randomWalkKernel(graph);\n }\n features.push(graphFeatures);\n }\n return features;\n }\n /**\n * Weisfeiler-Lehman kernel\n */\n _weisfeilerLehmanKernel(graph) {\n const features = [];\n const nodeLabels = new Map();\n // Initialize labels with node features\n for (const node of graph.nodes) {\n const label = node.features.join(',');\n nodeLabels.set(node.id, label);\n }\n // WL iterations\n for (let iter = 0; iter < this.options.wlIterations; iter++) {\n const newLabels = new Map();\n for (const node of graph.nodes) {\n // Get neighbor labels\n const neighbors = graph.edges\n .filter(e => e.source === node.id || e.target === node.id)\n .map(e => e.source === node.id ? e.target : e.source);\n const neighborLabels = neighbors\n .map(nid => nodeLabels.get(nid) || '')\n .sort()\n .join(',');\n // New label = current label + sorted neighbor labels\n const newLabel = `${nodeLabels.get(node.id)}|${neighborLabels}`;\n newLabels.set(node.id, newLabel);\n }\n // Count label frequencies\n const labelCounts = new Map();\n for (const label of newLabels.values()) {\n labelCounts.set(label, (labelCounts.get(label) || 0) + 1);\n }\n // Add to features\n for (const [label, count] of labelCounts) {\n features.push(count);\n }\n nodeLabels.clear();\n for (const [id, label] of newLabels) {\n nodeLabels.set(id, label);\n }\n }\n return features.length > 0 ? features : new Array(10).fill(0);\n }\n /**\n * Shortest-path kernel\n */\n _shortestPathKernel(graph) {\n // Compute shortest paths between all pairs\n const distances = this._computeShortestPaths(graph);\n // Create histogram of distances\n const maxDist = Math.max(...distances.flat().filter(d => d < Infinity));\n const bins = Math.min(10, maxDist + 1);\n const histogram = new Array(bins).fill(0);\n for (const row of distances) {\n for (const dist of row) {\n if (dist < Infinity) {\n const bin = Math.min(Math.floor(dist), bins - 1);\n histogram[bin]++;\n }\n }\n }\n return histogram;\n }\n /**\n * Random-walk kernel\n */\n _randomWalkKernel(graph) {\n // Simplified random-walk kernel\n const features = [];\n // Node degree distribution\n const degrees = new Map();\n for (const edge of graph.edges) {\n degrees.set(edge.source, (degrees.get(edge.source) || 0) + 1);\n degrees.set(edge.target, (degrees.get(edge.target) || 0) + 1);\n }\n const degreeHist = new Array(10).fill(0);\n for (const degree of degrees.values()) {\n const bin = Math.min(degree, 9);\n degreeHist[bin]++;\n }\n features.push(...degreeHist);\n // Graph statistics\n features.push(graph.nodes.length);\n features.push(graph.edges.length);\n features.push(graph.nodes.length > 0 ? graph.edges.length / graph.nodes.length : 0);\n return features;\n }\n /**\n * Compute shortest paths (Floyd-Warshall simplified)\n */\n _computeShortestPaths(graph) {\n const n = graph.nodes.length;\n const dist = Array(n).fill(null).map(() => Array(n).fill(Infinity));\n // Initialize\n for (let i = 0; i < n; i++) {\n dist[i][i] = 0;\n }\n // Add edges\n for (const edge of graph.edges) {\n const srcIdx = graph.nodes.findIndex(n => n.id === edge.source);\n const tgtIdx = graph.nodes.findIndex(n => n.id === edge.target);\n if (srcIdx >= 0 && tgtIdx >= 0) {\n dist[srcIdx][tgtIdx] = 1;\n dist[tgtIdx][srcIdx] = 1;\n }\n }\n // Floyd-Warshall\n for (let k = 0; k < n; k++) {\n for (let i = 0; i < n; i++) {\n for (let j = 0; j < n; j++) {\n if (dist[i][k] + dist[k][j] < dist[i][j]) {\n dist[i][j] = dist[i][k] + dist[k][j];\n }\n }\n }\n }\n return dist;\n }\n /**\n * Predict on graphs\n */\n predict(graphs, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const graphArray = Array.isArray(graphs) ? graphs : [graphs];\n const features = this._computeGraphKernelFeatures(graphArray);\n const results = [];\n for (const feature of features) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [feature], topK)) || [];\n for (const pred of preds.slice(0, topK)) {\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n });\n }\n }\n return results;\n }\n}\n\n// tensor-kernel-elm.ts — Tensor Kernel ELM\n// Multi-dimensional kernel learning with tensor factorization\n/**\n * Tensor Kernel ELM\n * Features:\n * - Multi-dimensional kernel learning\n * - Tensor factorization\n * - Multi-modal data fusion\n * - Complex relationship modeling\n */\nclass TensorKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.tensorFactors = []; // [sample][mode][dim][rank] - each sample has multiple modes, each mode is a 2D matrix\n this.categories = options.categories;\n this.options = {\n categories: options.categories,\n tensorRank: (_a = options.tensorRank) !== null && _a !== void 0 ? _a : 10,\n modes: (_b = options.modes) !== null && _b !== void 0 ? _b : [10, 10, 10],\n kernel: (_c = options.kernel) !== null && _c !== void 0 ? _c : 'rbf',\n gamma: (_d = options.gamma) !== null && _d !== void 0 ? _d : 1.0,\n degree: (_e = options.degree) !== null && _e !== void 0 ? _e : 2,\n coef0: (_f = options.coef0) !== null && _f !== void 0 ? _f : 0,\n activation: (_g = options.activation) !== null && _g !== void 0 ? _g : 'relu',\n };\n this.kelm = new KernelELM({\n categories: this.options.categories,\n kernel: this.options.kernel,\n gamma: this.options.gamma,\n degree: this.options.degree,\n coef0: this.options.coef0,\n });\n }\n /**\n * Train on tensor data\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Factorize tensors\n const tensorX = Array.isArray(X[0][0]) ? X : this._reshapeToTensors(X);\n this._factorizeTensors(tensorX);\n // Extract features from tensor factorization\n const features = this._extractTensorFeatures(tensorX);\n // Train KELM\n (_b = (_a = this.kelm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.kelm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n this.trained = true;\n }\n /**\n * Reshape 2D data to 3D tensors\n */\n _reshapeToTensors(X) {\n const [h, w, c] = this.options.modes;\n const result = [];\n for (const x of X) {\n const tensor = [];\n let idx = 0;\n for (let k = 0; k < c; k++) {\n const matrix = [];\n for (let i = 0; i < h; i++) {\n const row = [];\n for (let j = 0; j < w; j++) {\n row.push(x[idx % x.length] || 0);\n idx++;\n }\n matrix.push(row);\n }\n tensor.push(matrix);\n }\n result.push(tensor);\n }\n return result;\n }\n /**\n * Factorize tensors using CP decomposition\n */\n _factorizeTensors(tensors) {\n // Simplified CP (CANDECOMP/PARAFAC) decomposition\n this.tensorFactors = [];\n for (const tensor of tensors) {\n const factors = [];\n // Factorize each mode\n for (let mode = 0; mode < this.options.modes.length; mode++) {\n const factor = new Array(this.options.modes[mode]).fill(0).map(() => new Array(this.options.tensorRank).fill(0).map(() => Math.random() * 0.1));\n factors.push(factor);\n }\n this.tensorFactors.push(factors);\n }\n }\n /**\n * Extract features from tensor factorization\n */\n _extractTensorFeatures(tensors) {\n const features = [];\n for (let i = 0; i < tensors.length; i++) {\n const factors = this.tensorFactors[i] || [];\n // Flatten factors\n const feature = [];\n for (const factor of factors) {\n for (const row of factor) {\n for (const val of row) {\n feature.push(val);\n }\n }\n }\n // Add tensor statistics\n const tensor = tensors[i];\n if (tensor && tensor.length > 0) {\n feature.push(tensor.length); // Height\n if (Array.isArray(tensor[0])) {\n feature.push(tensor[0].length); // Width\n if (Array.isArray(tensor[0][0])) {\n feature.push(tensor[0][0].length); // Channels\n }\n else {\n feature.push(1);\n }\n }\n else {\n feature.push(1);\n feature.push(1);\n }\n // Add tensor norm\n let norm = 0;\n for (const matrix of tensor) {\n if (Array.isArray(matrix)) {\n for (const row of matrix) {\n if (Array.isArray(row)) {\n for (const val of row) {\n norm += val * val;\n }\n }\n else {\n norm += row * row;\n }\n }\n }\n }\n feature.push(Math.sqrt(norm));\n }\n else {\n feature.push(0, 0, 0, 0);\n }\n features.push(feature);\n }\n return features;\n }\n /**\n * Predict on tensor data\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const tensorX = Array.isArray(X[0][0]) ? X : this._reshapeToTensors(X);\n const features = this._extractTensorFeatures(tensorX);\n const results = [];\n for (let i = 0; i < features.length; i++) {\n const preds = ((_b = (_a = this.kelm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features[i]], topK)) || [];\n const factors = this.tensorFactors[i] || [];\n for (const pred of preds.slice(0, topK)) {\n const factorCopy = factors.map((f) => f.map((row) => row.slice()));\n results.push({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n tensorFactors: factorCopy,\n });\n }\n }\n return results;\n }\n}\n\nfunction buildRFF(d, D, sigma = 1.0, rng = Math.random) {\n const W = new Float64Array(D * d);\n const b = new Float64Array(D);\n const s = 1 / Math.max(1e-12, sigma); // N(0, 1/sigma^2)\n for (let i = 0; i < D * d; i++)\n W[i] = gauss$1(rng) * s;\n for (let i = 0; i < D; i++)\n b[i] = rng() * 2 * Math.PI;\n return { W, b, D, d, sigma };\n}\nfunction mapRFF(rff, x) {\n const { W, b, D, d } = rff;\n const z = new Float64Array(2 * D);\n for (let k = 0; k < D; k++) {\n let dot = b[k];\n const off = k * d;\n for (let j = 0; j < d; j++)\n dot += W[off + j] * (x[j] || 0);\n z[k] = Math.cos(dot);\n z[D + k] = Math.sin(dot);\n }\n // L2 normalize block to keep ridge well-conditioned\n let s = 0;\n for (let i = 0; i < z.length; i++)\n s += z[i] * z[i];\n const inv = 1 / Math.sqrt(Math.max(s, 1e-12));\n for (let i = 0; i < z.length; i++)\n z[i] *= inv;\n return z;\n}\n// Box-Muller\nfunction gauss$1(rng) {\n let u = 0, v = 0;\n while (u === 0)\n u = rng();\n while (v === 0)\n v = rng();\n return Math.sqrt(-2 * Math.log(u)) * Math.cos(2 * Math.PI * v);\n}\n\n// online_ridge.ts — maintain (Φ^T Φ + λI)^{-1} and β for linear ridge\nclass OnlineRidge {\n constructor(p, m, lambda = 1e-4) {\n this.p = p;\n this.m = m;\n this.lambda = lambda;\n this.Ainv = new Float64Array(p * p);\n this.Beta = new Float64Array(p * m);\n // Ainv = (λ I)^-1 = (1/λ) I\n const inv = 1 / Math.max(1e-12, lambda);\n for (let i = 0; i < p; i++)\n this.Ainv[i * p + i] = inv;\n }\n // rank-1 update with a single sample (φ, y)\n update(phi, y) {\n const { p, m, Ainv, Beta } = this;\n // u = Ainv * phi\n const u = new Float64Array(p);\n for (let i = 0; i < p; i++) {\n let s = 0, row = i * p;\n for (let j = 0; j < p; j++)\n s += Ainv[row + j] * phi[j];\n u[i] = s;\n }\n // denom = 1 + phi^T u\n let denom = 1;\n for (let j = 0; j < p; j++)\n denom += phi[j] * u[j];\n denom = Math.max(denom, 1e-12);\n const scale = 1 / denom;\n // Ainv <- Ainv - (u u^T) * scale\n for (let i = 0; i < p; i++) {\n const ui = u[i] * scale;\n for (let j = 0; j < p; j++)\n Ainv[i * p + j] -= ui * u[j];\n }\n // Beta <- Beta + Ainv * (phi * y^T)\n // compute t = Ainv * phi (reuse u after Ainv update)\n for (let i = 0; i < p; i++) {\n let s = 0, row = i * p;\n for (let j = 0; j < p; j++)\n s += Ainv[row + j] * phi[j];\n u[i] = s; // reuse u as t\n }\n // Beta += outer(u, y)\n for (let i = 0; i < p; i++) {\n const ui = u[i];\n for (let c = 0; c < m; c++)\n Beta[i * m + c] += ui * y[c];\n }\n }\n // yhat = φ^T Beta\n predict(phi) {\n const { p, m, Beta } = this;\n const out = new Float64Array(m);\n for (let c = 0; c < m; c++) {\n let s = 0;\n for (let i = 0; i < p; i++)\n s += phi[i] * Beta[i * m + c];\n out[c] = s;\n }\n return out;\n }\n}\n\nfunction isFiniteMatrix(M) {\n for (let i = 0; i < M.length; i++) {\n const row = M[i];\n if (!row || row.length !== M[0].length)\n return false;\n for (let j = 0; j < row.length; j++) {\n const v = row[j];\n if (!Number.isFinite(v))\n return false;\n }\n }\n return true;\n}\nfunction symmetrize(A) {\n const n = A.length;\n for (let i = 0; i < n; i++) {\n for (let j = i + 1; j < n; j++) {\n const v = 0.5 * (A[i][j] + A[j][i]);\n A[i][j] = v;\n A[j][i] = v;\n }\n }\n}\nfunction choleskySolve(A, Y) {\n const n = A.length, m = Y[0].length;\n // L\n const L = Array.from({ length: n }, () => Array(n).fill(0));\n for (let i = 0; i < n; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n if (i === j) {\n if (!(sum > 0) || !Number.isFinite(sum))\n return null; // not PD\n L[i][j] = Math.sqrt(sum);\n }\n else {\n L[i][j] = sum / L[j][j];\n }\n }\n }\n // forward solve: L Z = Y\n const Z = Array.from({ length: n }, () => Array(m).fill(0));\n for (let c = 0; c < m; c++) {\n for (let i = 0; i < n; i++) {\n let s = Y[i][c];\n for (let k = 0; k < i; k++)\n s -= L[i][k] * Z[k][c];\n Z[i][c] = s / L[i][i];\n }\n }\n // back solve: L^T Θ = Z\n const Theta = Array.from({ length: n }, () => Array(m).fill(0));\n for (let c = 0; c < m; c++) {\n for (let i = n - 1; i >= 0; i--) {\n let s = Z[i][c];\n for (let k = i + 1; k < n; k++)\n s -= L[k][i] * Theta[k][c];\n Theta[i][c] = s / L[i][i];\n }\n }\n return { Theta, L };\n}\n// CG fallback for SPD system A x = b, where A is given as matrix\nfunction cgSolve(A, b, tol, maxIter) {\n const n = A.length;\n const x = new Array(n).fill(0);\n const r = b.slice(); // r = b - A x = b initially\n const p = r.slice();\n let rsold = dot$1(r, r);\n let it = 0;\n for (; it < maxIter; it++) {\n const Ap = matvec(A, p);\n const alpha = rsold / Math.max(1e-300, dot$1(p, Ap));\n for (let i = 0; i < n; i++)\n x[i] += alpha * p[i];\n for (let i = 0; i < n; i++)\n r[i] -= alpha * Ap[i];\n const rsnew = dot$1(r, r);\n if (Math.sqrt(rsnew) <= tol)\n break;\n const beta = rsnew / Math.max(1e-300, rsold);\n for (let i = 0; i < n; i++)\n p[i] = r[i] + beta * p[i];\n rsold = rsnew;\n }\n return { x, iters: it + 1 };\n}\nfunction dot$1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction matvec(A, x) {\n const n = A.length, out = new Array(n).fill(0);\n for (let i = 0; i < n; i++) {\n const Ai = A[i];\n let s = 0;\n for (let j = 0; j < n; j++)\n s += Ai[j] * x[j];\n out[i] = s;\n }\n return out;\n}\n/**\n * Production-grade ridge regression solver:\n * Solves (K + λ I) Θ = Y, with symmetry enforcement, adaptive jitter, and CG fallback.\n */\nfunction ridgeSolvePro(K, Y, opts = {}) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n const info = [];\n const n = K.length;\n if (n === 0)\n return { Theta: [], usedLambda: (_a = opts.lambda) !== null && _a !== void 0 ? _a : 1e-4, method: \"cholesky\", info: [\"empty system\"] };\n if (!isFiniteMatrix(K))\n throw new Error(\"K contains NaN/Inf or ragged rows\");\n if (!Array.isArray(Y) || Y.length !== n || Y[0].length === undefined)\n throw new Error(\"Y shape mismatch\");\n if (!isFiniteMatrix(Y))\n throw new Error(\"Y contains NaN/Inf\");\n const m = Y[0].length;\n const baseLambda = Math.max(0, (_b = opts.lambda) !== null && _b !== void 0 ? _b : 1e-4);\n const ensureSym = (_c = opts.ensureSymmetry) !== null && _c !== void 0 ? _c : true;\n let jitter = (_d = opts.jitterInit) !== null && _d !== void 0 ? _d : 1e-10;\n const jitterMax = (_e = opts.jitterMax) !== null && _e !== void 0 ? _e : 1e-1;\n const jitterFactor = (_f = opts.jitterFactor) !== null && _f !== void 0 ? _f : 10;\n // Build A = (symmetrized K) + (lambda + jitter) I\n const A = Array.from({ length: n }, (_, i) => K[i].slice());\n if (ensureSym)\n symmetrize(A);\n // Try Cholesky with increasing jitter\n let usedLambda = baseLambda;\n while (true) {\n if ((_g = opts.abortSignal) === null || _g === void 0 ? void 0 : _g.aborted)\n throw new Error(\"ridgeSolvePro aborted\");\n // add diag\n for (let i = 0; i < n; i++)\n A[i][i] = (ensureSym ? A[i][i] : (A[i][i] + A[i][i]) * 0.5) + usedLambda;\n const chol = choleskySolve(A, Y);\n if (chol) {\n info.push(`Cholesky ok with lambda=${usedLambda.toExponential(2)}`);\n return { Theta: chol.Theta, usedLambda, method: \"cholesky\", info };\n }\n else {\n // remove the just-added lambda before next try\n for (let i = 0; i < n; i++)\n A[i][i] -= usedLambda;\n if (jitter > jitterMax) {\n info.push(`Cholesky failed up to jitter=${jitterMax}; falling back to CG`);\n break;\n }\n usedLambda = baseLambda + jitter;\n info.push(`Cholesky failed; retry with lambda=${usedLambda.toExponential(2)}`);\n jitter *= jitterFactor;\n }\n }\n // CG fallback: solve A Θ = Y column-wise\n // Rebuild A once with final usedLambda\n for (let i = 0; i < n; i++)\n A[i][i] = (ensureSym ? A[i][i] : (A[i][i] + A[i][i]) * 0.5) + usedLambda;\n const tol = (_h = opts.cgTol) !== null && _h !== void 0 ? _h : 1e-6;\n const maxIter = (_j = opts.cgMaxIter) !== null && _j !== void 0 ? _j : Math.min(1000, n * 3);\n const Theta = Array.from({ length: n }, () => Array(m).fill(0));\n let maxIters = 0;\n for (let c = 0; c < m; c++) {\n if ((_k = opts.abortSignal) === null || _k === void 0 ? void 0 : _k.aborted)\n throw new Error(\"ridgeSolvePro aborted\");\n const b = new Array(n);\n for (let i = 0; i < n; i++)\n b[i] = Y[i][c];\n const { x, iters } = cgSolve(A, b, tol, maxIter);\n maxIters = Math.max(maxIters, iters);\n for (let i = 0; i < n; i++)\n Theta[i][c] = x[i];\n }\n info.push(`CG solved columns with tol=${tol}, maxIter=${maxIter}, max iters used=${maxIters}`);\n return { Theta, usedLambda, method: \"cg\", iters: maxIters, info };\n}\n\n// src/math/index.ts — production-grade numerics for Ω\n// Backward compatible with previous exports; adds robust, stable helpers.\n// ---------- Constants\nconst EPS = 1e-12; // general epsilon for divides/sqrt\nconst DISK_EPS = 0.95; // strict radius for Poincaré-like ops\nconst MAX_EXP = 709; // ~ ln(Number.MAX_VALUE)\nconst MIN_EXP = -745; // ~ ln(Number.MIN_VALUE)\n// ---------- Constructors / guards\nfunction zeros(n) { return new Float64Array(n); }\nfunction isFiniteVec(a) {\n const n = a.length;\n for (let i = 0; i < n; i++)\n if (!Number.isFinite(a[i]))\n return false;\n return true;\n}\nfunction asVec(a) {\n // Copy into Float64Array for consistent math perf\n return a instanceof Float64Array ? a : new Float64Array(Array.from(a));\n}\n// ---------- Basic algebra (pure, allocation)\nfunction dot(a, b) {\n const n = Math.min(a.length, b.length);\n let s = 0;\n for (let i = 0; i < n; i++)\n s += a[i] * b[i];\n return s;\n}\nfunction add(a, b) {\n const n = Math.min(a.length, b.length);\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] + b[i];\n return o;\n}\nfunction scal(a, k) {\n const n = a.length;\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] * k;\n return o;\n}\nfunction hadamard(a, b) {\n const n = Math.min(a.length, b.length);\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = a[i] * b[i];\n return o;\n}\nfunction tanhVec(a) {\n const n = a.length;\n const o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = Math.tanh(a[i]);\n return o;\n}\n// ---------- In-place variants (underscore suffix) to reduce GC\nfunction add_(out, a, b) {\n const n = Math.min(out.length, a.length, b.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] + b[i];\n return out;\n}\nfunction scal_(out, a, k) {\n const n = Math.min(out.length, a.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] * k;\n return out;\n}\nfunction hadamard_(out, a, b) {\n const n = Math.min(out.length, a.length, b.length);\n for (let i = 0; i < n; i++)\n out[i] = a[i] * b[i];\n return out;\n}\nfunction tanhVec_(out, a) {\n const n = Math.min(out.length, a.length);\n for (let i = 0; i < n; i++)\n out[i] = Math.tanh(a[i]);\n return out;\n}\n// ---------- Norms / normalization\nfunction l2$1(a) {\n // robust L2 (avoids NaN on weird input)\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * a[i];\n return Math.sqrt(Math.max(0, s));\n}\nfunction normalizeL2(a, eps = EPS) {\n const nrm = l2$1(a);\n if (!(nrm > eps) || !Number.isFinite(nrm))\n return new Float64Array(a.length); // zero vec\n const o = new Float64Array(a.length);\n const inv = 1 / nrm;\n for (let i = 0; i < a.length; i++)\n o[i] = a[i] * inv;\n return o;\n}\nfunction clampVec(a, lo = -Infinity, hi = Infinity) {\n const n = a.length, o = new Float64Array(n);\n for (let i = 0; i < n; i++)\n o[i] = Math.min(hi, Math.max(lo, a[i]));\n return o;\n}\n// ---------- Stats\nfunction mean(a) {\n if (a.length === 0)\n return 0;\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i];\n return s / a.length;\n}\nfunction variance(a, mu = mean(a)) {\n if (a.length === 0)\n return 0;\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - mu;\n s += d * d;\n }\n return s / a.length;\n}\nfunction standardize(a) {\n const mu = mean(a);\n const v = variance(a, mu);\n const sd = Math.sqrt(Math.max(v, 0));\n if (!(sd > EPS)) {\n // zero-variance edge: return zeros to avoid blowing up downstream\n return new Float64Array(a.length);\n }\n const o = new Float64Array(a.length);\n const inv = 1 / sd;\n for (let i = 0; i < a.length; i++)\n o[i] = (a[i] - mu) * inv;\n return o;\n}\n// ---------- Cosine (robust)\nfunction cosine$2(a, b) {\n var _a, _b;\n const n = Math.min(a.length, b.length);\n if (n === 0)\n return 0;\n let dotv = 0, na = 0, nb = 0;\n for (let i = 0; i < n; i++) {\n const ai = ((_a = a[i]) !== null && _a !== void 0 ? _a : 0), bi = ((_b = b[i]) !== null && _b !== void 0 ? _b : 0);\n dotv += ai * bi;\n na += ai * ai;\n nb += bi * bi;\n }\n const denom = Math.sqrt(Math.max(na * nb, EPS));\n const v = dotv / denom;\n return Number.isFinite(v) ? v : 0;\n}\n// ---------- Stable softmax / log-sum-exp\nfunction logSumExp(a) {\n let m = -Infinity;\n for (let i = 0; i < a.length; i++)\n if (a[i] > m)\n m = a[i];\n if (!Number.isFinite(m))\n m = 0;\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, a[i] - m)));\n return m + Math.log(Math.max(s, EPS));\n}\nfunction softmax(a) {\n const out = new Float64Array(a.length);\n const lse = logSumExp(a);\n for (let i = 0; i < a.length; i++)\n out[i] = Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, a[i] - lse)));\n // tiny renorm to remove drift\n let s = 0;\n for (let i = 0; i < out.length; i++)\n s += out[i];\n const inv = 1 / Math.max(s, EPS);\n for (let i = 0; i < out.length; i++)\n out[i] *= inv;\n return out;\n}\n// ---------- Argmax / Top-K\nfunction argmax(a) {\n var _a, _b;\n if (a.length === 0)\n return -1;\n let idx = 0;\n let m = ((_a = a[0]) !== null && _a !== void 0 ? _a : -Infinity);\n for (let i = 1; i < a.length; i++) {\n const v = ((_b = a[i]) !== null && _b !== void 0 ? _b : -Infinity);\n if (v > m) {\n m = v;\n idx = i;\n }\n }\n return idx;\n}\nfunction topK(a, k) {\n var _a;\n const n = a.length;\n if (k <= 0 || n === 0)\n return [];\n const K = Math.min(k, n);\n // simple partial selection (O(nk)); fine for small k in UI\n const res = [];\n for (let i = 0; i < n; i++) {\n const v = ((_a = a[i]) !== null && _a !== void 0 ? _a : -Infinity);\n if (res.length < K) {\n res.push({ index: i, value: v });\n if (res.length === K)\n res.sort((x, y) => y.value - x.value);\n }\n else if (v > res[K - 1].value) {\n res[K - 1] = { index: i, value: v };\n res.sort((x, y) => y.value - x.value);\n }\n }\n return res;\n}\n// ---------- Safe exp/log/sigmoid\nfunction expSafe(x) {\n return Math.exp(Math.max(MIN_EXP, Math.min(MAX_EXP, x)));\n}\nfunction log1pSafe(x) {\n // log(1+x) with guard (x>-1)\n const y = Math.max(x, -1 + EPS);\n return Math.log(1 + y);\n}\nfunction sigmoid$1(x) {\n if (x >= 0) {\n const z = Math.exp(-Math.min(x, MAX_EXP));\n return 1 / (1 + z);\n }\n else {\n const z = Math.exp(Math.max(x, MIN_EXP));\n return z / (1 + z);\n }\n}\n// ---------- Hyperbolic (proxy) distance with strict disk clamp\n// Assumes inputs are already bounded; still clamps defensively.\nfunction hDistProxy(a, b) {\n // clamp radii to avoid denom blow-ups\n let na = 0, nb = 0, sum = 0;\n for (let i = 0; i < a.length; i++) {\n const ai = Math.max(-DISK_EPS, Math.min(DISK_EPS, a[i]));\n const bi = Math.max(-DISK_EPS, Math.min(DISK_EPS, b[i]));\n na += ai * ai;\n nb += bi * bi;\n const d = ai - bi;\n sum += d * d;\n }\n const num = 2 * Math.sqrt(Math.max(0, sum));\n const den = Math.max(EPS, (1 - na) * (1 - nb));\n // smooth, monotone proxy; bounded growth; stable near boundary\n return Math.log1p(Math.min(2 * num / den, 1e12));\n}\n// ---------- Small utilities for UI formatting\nfunction fmtHead(a, n = 4, digits = 3) {\n return Array.from(a).slice(0, n).map(v => v.toFixed(digits)).join(\", \");\n}\n\n/******************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise, SuppressedError, Symbol, Iterator */\r\n\r\n\r\nfunction __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\ntypeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\r\n var e = new Error(message);\r\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\r\n};\n\n// Omega.ts v2 — improved local reasoning + summarization\n// uses your math.ts, rff.ts, online_ridge.ts\n// -------- sentence + text helpers ----------\nfunction splitSentences$1(text) {\n return text\n .replace(/\\s+/g, \" \")\n .split(/(?<=[.?!])\\s+/)\n .map((s) => s.trim())\n .filter((s) => s.length > 8 && /\\w/.test(s));\n}\nfunction clean(text) {\n return text\n .replace(/```[\\s\\S]*?```/g, \" \")\n .replace(/`[^`]+`/g, \" \")\n .replace(/\\[[^\\]]*\\]\\([^)]*\\)/g, \"\") // strip markdown links\n .replace(/[-–>•→]/g, \" \")\n .replace(/\\s+/g, \" \")\n .trim();\n}\nfunction isMetaSentence(s) {\n // simple heuristics for table-of-contents or chapter headings\n return (/^(\\*|#)/.test(s) || // markdown markers\n /chapter/i.test(s) || // \"Chapter 11\", \"Chapters 11–15\"\n /part\\s*\\d+/i.test(s) || // \"Part 3\"\n /section/i.test(s) || // \"Section 2.3\"\n /^\\s*[A-Z]\\)\\s*$/.test(s) || // single-letter outlines\n s.length < 15 // very short stray lines\n );\n}\nfunction rewrite(summary) {\n return summary\n .replace(/\\s+[-–>•→]\\s+/g, \" \")\n .replace(/\\s+\\.\\s+/g, \". \")\n .replace(/([a-z]) - ([a-z])/gi, \"$1-$2\")\n .replace(/\\s{2,}/g, \" \")\n .trim();\n}\n// ------------------------------------------------------------\nfunction omegaComposeAnswer(question_1, items_1) {\n return __awaiter(this, arguments, void 0, function* (question, items, opts = {}) {\n // License check removed // Premium feature - requires valid license\n if (!(items === null || items === void 0 ? void 0 : items.length))\n return \"No results found.\";\n const { dim = 64, features = 32, sigma = 1.0, rounds = 3, topSentences = 8, personality = \"neutral\", } = opts;\n // ---------- 1. Clean + collect sentences ----------\n const allText = items.map((i) => clean(i.content)).join(\" \");\n let sentences = splitSentences$1(allText)\n .filter(s => !isMetaSentence(s))\n .slice(0, 120);\n if (sentences.length === 0)\n return clean(items[0].content).slice(0, 400);\n // ---------- 2. Build encoder + ridge ----------\n const rff = buildRFF(dim, features, sigma);\n const ridge = new OnlineRidge(2 * features, 1, 1e-3);\n const encode = (s) => {\n const vec = new Float64Array(dim);\n const len = Math.min(s.length, dim);\n for (let i = 0; i < len; i++)\n vec[i] = s.charCodeAt(i) / 255;\n return mapRFF(rff, normalizeL2(vec));\n };\n const qVec = encode(question);\n const qTokens = question.toLowerCase().split(/\\W+/).filter((t) => t.length > 2);\n // ---------- 3. Score + select top sentences ----------\n const scored = sentences.map((s) => {\n const v = encode(s);\n let w = cosine$2(v, qVec);\n // small lexical bonus for overlapping words\n const lower = s.toLowerCase();\n for (const t of qTokens)\n if (lower.includes(t))\n w += 0.02;\n return { s, v, w };\n });\n scored.sort((a, b) => b.w - a.w);\n let top = scored.slice(0, topSentences);\n // ---------- 4. Recursive compression ----------\n let summary = top.map((t) => t.s).join(\" \");\n let meanVec = new Float64Array(2 * features);\n for (let r = 0; r < rounds; r++) {\n const subs = splitSentences$1(summary).slice(0, topSentences);\n const embeds = subs.map((s) => encode(s));\n const weights = embeds.map((v) => cosine$2(v, qVec));\n for (let i = 0; i < embeds.length; i++) {\n ridge.update(embeds[i], new Float64Array([weights[i]]));\n }\n // weighted mean vector\n meanVec.fill(0);\n for (let i = 0; i < embeds.length; i++) {\n const v = embeds[i], w = weights[i];\n for (let j = 0; j < v.length; j++)\n meanVec[j] += v[j] * w;\n }\n const norm = l2$1(meanVec) || 1;\n for (let j = 0; j < meanVec.length; j++)\n meanVec[j] /= norm;\n const rescored = subs.map((s) => ({\n s,\n w: cosine$2(encode(s), meanVec),\n }));\n rescored.sort((a, b) => b.w - a.w);\n summary = rescored\n .slice(0, Math.max(3, Math.floor(topSentences / 2)))\n .map((r) => r.s)\n .join(\" \");\n }\n // ---------- 5. Compose readable answer ----------\n summary = rewrite(summary);\n const firstChar = summary.charAt(0).toUpperCase() + summary.slice(1);\n const title = items[0].heading || \"Answer\";\n const prefix = personality === \"teacher\"\n ? \"Here’s a simple way to think about it:\\n\\n\"\n : personality === \"scientist\"\n ? \"From the retrieved material, we can infer:\\n\\n\"\n : \"\";\n return `${prefix}${firstChar}\\n\\n(${title}, Ω-synthesized)`;\n });\n}\n\n// Vectorization utilities for sparse and dense vectors\n// Extracted from workers for reuse\n/**\n * Compute TF-IDF vector from tokens\n */\nfunction toTfidf(tokens, idf, vmap, headingW = 1) {\n const counts = new Map();\n // crude heuristic: first 8 tokens considered heading-weighted\n for (let i = 0; i < tokens.length; i++) {\n const t = tokens[i];\n const id = vmap.get(t);\n if (id === undefined)\n continue;\n const w = (i < 8) ? headingW : 1;\n counts.set(id, (counts.get(id) || 0) + w);\n }\n const maxTf = Math.max(1, ...counts.values());\n const v = new Map();\n for (const [i, c] of counts) {\n const tf = 0.5 + 0.5 * (c / maxTf);\n v.set(i, tf * (idf[i] || 0));\n }\n return v;\n}\n/**\n * Cosine similarity between two sparse vectors\n */\nfunction cosineSparse(a, b) {\n let dot = 0, na = 0, nb = 0;\n for (const [i, av] of a) {\n na += av * av;\n const bv = b.get(i);\n if (bv)\n dot += av * bv;\n }\n for (const [, bv] of b)\n nb += bv * bv;\n if (!na || !nb)\n return 0;\n return dot / (Math.sqrt(na) * Math.sqrt(nb));\n}\n/**\n * Convert sparse vector to dense Float64Array\n */\nfunction sparseToDense(v, dim) {\n const x = new Float64Array(dim);\n for (const [i, val] of v)\n x[i] = val;\n return x;\n}\n/**\n * Dot product of two dense vectors\n */\nfunction dotProd$1(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/**\n * Base kernel function (RBF, cosine, or poly2)\n */\nfunction baseKernel$1(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd$1(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\n/**\n * Kernel similarity between two dense vectors\n */\nfunction kernelSim(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd$1(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\n/**\n * Project sparse vector to dense using Nyström landmarks\n */\nfunction projectToDense(v, vocabSize, landmarkMat, kernel, sigma) {\n const x = sparseToDense(v, vocabSize);\n const feats = new Float64Array(landmarkMat.length);\n for (let j = 0; j < landmarkMat.length; j++) {\n const l = landmarkMat[j];\n feats[j] = baseKernel$1(x, l, kernel, sigma);\n }\n const n = Math.hypot(...feats);\n if (n > 0)\n for (let i = 0; i < feats.length; i++)\n feats[i] /= n;\n return feats;\n}\n\n// Tokenization and stemming utilities\n// Extracted from workers for reuse\n// Memo for speed\nconst STEM_CACHE = new Map();\nfunction normalizeWord(raw) {\n const k = raw;\n const cached = STEM_CACHE.get(k);\n if (cached)\n return cached;\n let w = raw.toLowerCase();\n w = w.replace(/^[^a-z0-9]+|[^a-z0-9]+$/g, '');\n if (w.length <= 2) {\n STEM_CACHE.set(k, w);\n return w;\n }\n // plural → singular\n if (w.endsWith('ies') && w.length > 4) {\n w = w.slice(0, -3) + 'y';\n }\n else if (/(xes|ches|shes|zes|sses)$/.test(w) && w.length > 4) {\n w = w.replace(/(xes|ches|shes|zes|sses)$/, (m) => (m === 'sses' ? 'ss' : m.replace(/es$/, '')));\n }\n else if (w.endsWith('s') && !/(ss|us)$/.test(w) && w.length > 3) {\n w = w.slice(0, -1);\n }\n // conservative suffix trimming\n const rules = [\n [/ization$|isation$/, 'ize'],\n [/ational$/, 'ate'],\n [/fulness$/, 'ful'],\n [/ousness$/, 'ous'],\n [/iveness$/, 'ive'],\n [/ability$/, 'able'],\n [/ness$/, ''],\n [/ment$/, ''],\n [/ations?$/, 'ate'],\n [/izer$|iser$/, 'ize'],\n [/ally$/, 'al'],\n [/ically$/, 'ic'],\n [/ingly$|edly$/, ''],\n [/ing$|ed$/, ''],\n ];\n for (const [re, rep] of rules) {\n if (re.test(w) && w.length - rep.length >= 4) {\n w = w.replace(re, rep);\n break;\n }\n }\n STEM_CACHE.set(k, w);\n return w;\n}\nfunction tokenize$1(text, doStem) {\n const base = text.toLowerCase()\n .replace(/[`*_>~]/g, ' ')\n .replace(/[^a-z0-9]+/g, ' ')\n .split(/\\s+/)\n .filter(Boolean);\n if (!doStem)\n return base;\n const out = [];\n for (const t of base) {\n const n = normalizeWord(t);\n if (n && n.length > 1)\n out.push(n);\n }\n return out;\n}\nfunction expandQuery(q) {\n const adds = [];\n if (/\\bmap\\b/.test(q))\n adds.push('dict key value make');\n if (/\\bchan|channel\\b/.test(q))\n adds.push('goroutine concurrency select buffer');\n if (/\\berror\\b/.test(q))\n adds.push('fmt wrap unwrap sentinel try catch');\n if (/\\bstruct\\b/.test(q))\n adds.push('field method receiver init zero value');\n return q + ' ' + adds.join(' ');\n}\n\n// Index building utilities\n// Extracted from workers for reuse\n// License removed - all features are now free!\n/**\n * Build vocabulary and IDF from chunks\n */\nfunction buildVocabAndIdf(chunks, vocabSize, useStem) {\n const docsTokens = chunks.map(ch => tokenize$1((ch.heading + ' \\n' + ch.content), useStem));\n const df = new Map();\n for (const toks of docsTokens) {\n const unique = new Set(toks);\n for (const t of unique)\n df.set(t, (df.get(t) || 0) + 1);\n }\n const sorted = [...df.entries()].sort((a, b) => b[1] - a[1]).slice(0, vocabSize);\n const vocabMap = new Map(sorted.map(([tok], i) => [tok, i]));\n const idf = new Array(vocabMap.size).fill(0);\n const N = docsTokens.length;\n for (const [tok, i] of vocabMap.entries()) {\n const dfi = df.get(tok) || 1;\n idf[i] = Math.log((N + 1) / (dfi + 1)) + 1;\n }\n return { vocabMap, idf };\n}\n/**\n * Build TF-IDF vectors for all chunks\n */\nfunction buildTfidfDocs(chunks, vocabMap, idf, headingW, useStem) {\n return chunks.map(ch => {\n const toks = tokenize$1((ch.heading + ' \\n' + ch.content), useStem);\n return toTfidf(toks, idf, vocabMap, headingW);\n });\n}\n/**\n * Build Nyström landmarks from TF-IDF documents\n */\nfunction buildLandmarks(tfidfDocs, vocabSize, numLandmarks) {\n const L = Math.max(32, numLandmarks);\n const step = Math.max(1, Math.floor(Math.max(1, tfidfDocs.length) / L));\n const landmarksIdx = Array.from({ length: L }, (_, k) => Math.min(tfidfDocs.length - 1, k * step));\n const landmarkMat = landmarksIdx.map(i => sparseToDense(tfidfDocs[i], vocabSize));\n return { landmarksIdx, landmarkMat };\n}\n/**\n * Build dense projections for all TF-IDF documents\n */\nfunction buildDenseDocs(tfidfDocs, vocabSize, landmarkMat, kernel, sigma) {\n return tfidfDocs.map(v => {\n const x = sparseToDense(v, vocabSize);\n const feats = new Float64Array(landmarkMat.length);\n for (let j = 0; j < landmarkMat.length; j++) {\n const l = landmarkMat[j];\n feats[j] = baseKernel(x, l, kernel, sigma);\n }\n const n = Math.hypot(...feats);\n if (n > 0)\n for (let i = 0; i < feats.length; i++)\n feats[i] /= n;\n return feats;\n });\n}\nfunction baseKernel(a, b, k, sigma) {\n if (k === 'cosine') {\n const dot = dotProd(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);\n return (na && nb) ? (dot / (na * nb)) : 0;\n }\n else if (k === 'poly2') {\n const dot = dotProd(a, b);\n return Math.pow((dot + 1), 2);\n }\n else {\n let s = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n s += d * d;\n }\n return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));\n }\n}\nfunction dotProd(a, b) {\n let s = 0;\n for (let i = 0; i < a.length; i++)\n s += a[i] * b[i];\n return s;\n}\n/**\n * Build complete index from chunks\n */\nfunction buildIndex(opts) {\n // License check removed // Premium feature - requires valid license\n const { chunks, vocab, landmarks, headingW, useStem, kernel, sigma } = opts;\n // Build vocab and IDF\n const { vocabMap, idf } = buildVocabAndIdf(chunks, vocab, useStem);\n // Build TF-IDF vectors\n const tfidfDocs = buildTfidfDocs(chunks, vocabMap, idf, headingW, useStem);\n // Build landmarks\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabMap.size, landmarks);\n // Build dense projections\n const denseDocs = buildDenseDocs(tfidfDocs, vocabMap.size, landmarkMat, kernel, sigma);\n return {\n vocabMap,\n idf,\n tfidfDocs,\n landmarksIdx,\n landmarkMat,\n denseDocs,\n };\n}\n\n// Hybrid retrieval system (sparse + dense + keyword bonus)\n// Extracted from workers for reuse\n// License removed - all features are now free!\n/**\n * Compute keyword bonus scores for chunks\n */\nfunction keywordBonus(chunks, query) {\n const kws = Array.from(new Set(query.toLowerCase().split(/\\W+/).filter(t => t.length > 2)));\n const syntaxBoost = /\\b(define|declare|syntax|example|function|struct|map|interface)\\b/i.test(query);\n return chunks.map(c => {\n const text = c.rich || c.content || '';\n const lc = text.toLowerCase();\n let hit = 0;\n for (const k of kws)\n if (lc.includes(k))\n hit++;\n if (syntaxBoost && /```/.test(text))\n hit += 5; // strong bonus for code presence\n return Math.min(1.0, hit * 0.03);\n });\n}\n/**\n * Get top K indices from scores\n */\nfunction topKIndices(arr, k) {\n const idx = Array.from(arr, (_, i) => i);\n idx.sort((i, j) => (arr[j] - arr[i]));\n return idx.slice(0, k);\n}\n/**\n * Clamp value between min and max\n */\nfunction clamp$1(x, a, b) {\n return Math.max(a, Math.min(b, x));\n}\n/**\n * Perform hybrid retrieval (sparse + dense + keyword bonus)\n */\nfunction hybridRetrieve(opts) {\n // License check removed // Premium feature - requires valid license\n const { query, chunks, vocabMap, idf, tfidfDocs, denseDocs, landmarksIdx, landmarkMat, vocabSize, kernel, sigma, alpha, beta, ridge, headingW, useStem, expandQuery: shouldExpand, topK: k, prefilter, } = opts;\n // Expand query if needed\n const qexp = shouldExpand ? expandQuery(query) : query;\n const toks = tokenize$1(qexp, useStem);\n const qvec = toTfidf(toks, idf, vocabMap, headingW);\n const qdense = projectToDense(qvec, vocabSize, landmarkMat, kernel, sigma);\n // Compute sparse (TF-IDF) scores\n const tfidfScores = tfidfDocs.map(v => cosineSparse(v, qvec));\n // Compute dense (kernel) scores\n const denseScores = denseDocs.map((v) => kernelSim(v, qdense, kernel, sigma));\n // Compute keyword bonus\n const bonus = keywordBonus(chunks, query);\n // Hybrid scoring with ridge regularization\n const alphaClamped = clamp$1(alpha, 0, 1);\n const lambda = ridge !== null && ridge !== void 0 ? ridge : 0.08;\n const scores = denseScores.map((d, i) => {\n const t = tfidfScores[i];\n const b = beta * bonus[i];\n // Ridge damping on ALL components (dense, tfidf, and keyword bonus)\n const reg = 1 / (1 + lambda * (d * d + t * t + 0.5 * b * b));\n const s = reg * (alphaClamped * d + (1 - alphaClamped) * t + b);\n // soft clip extremes; helps prevent a single noisy dimension from dominating\n return Math.tanh(s);\n });\n // Pre-filter then final topK (retrieval stage)\n const pre = Math.max(k, prefilter !== null && prefilter !== void 0 ? prefilter : 0);\n const idxs = topKIndices(scores, pre);\n const finalIdxs = topKIndices(idxs.map(i => scores[i]), k).map(k => idxs[k]);\n // Build result items\n const items = finalIdxs.map(i => {\n const c = chunks[i];\n const body = (c.rich && c.rich.trim()) || (c.content && c.content.trim()) || '(see subsections)';\n return {\n score: scores[i],\n heading: c.heading,\n content: body,\n index: i,\n };\n });\n return {\n items,\n scores: finalIdxs.map(i => scores[i]),\n indices: finalIdxs,\n tfidfScores: finalIdxs.map(i => tfidfScores[i]),\n denseScores: finalIdxs.map(i => denseScores[i]),\n };\n}\n\n// OmegaRR.ts\n// Reranker + Reducer for AsterMind docs\n// - Extracts rich query–chunk features (sparse text + structural signals)\n// - Trains a tiny ridge model on-the-fly with weak supervision (per query)\n// - Produces score_rr and p_relevant\n// - Filters with threshold + MMR coverage under a character budget\n// - (v2) Optionally exposes engineered features (values + names) for TE/diagnostics\n/* ====================== Tokenization ======================= */\nconst STOP$1 = new Set([\n \"a\", \"an\", \"the\", \"and\", \"or\", \"but\", \"if\", \"then\", \"else\", \"for\", \"to\", \"of\", \"in\", \"on\", \"at\", \"by\", \"with\",\n \"is\", \"are\", \"was\", \"were\", \"be\", \"been\", \"being\", \"as\", \"from\", \"that\", \"this\", \"it\", \"its\", \"you\", \"your\",\n \"i\", \"we\", \"they\", \"he\", \"she\", \"them\", \"his\", \"her\", \"our\", \"us\", \"do\", \"does\", \"did\", \"done\", \"not\", \"no\",\n \"yes\", \"can\", \"could\", \"should\", \"would\", \"may\", \"might\", \"into\", \"about\", \"over\", \"under\", \"between\"\n]);\nfunction tokenize(s) {\n return s\n .toLowerCase()\n .replace(/[`*_#>~=\\[\\]{}()!?.:,;'\"<>|/\\\\+-]+/g, \" \")\n .split(/\\s+/)\n .filter(t => t && !STOP$1.has(t));\n}\nfunction unique(arr) { return Array.from(new Set(arr)); }\nfunction buildCorpusStats(docs) {\n const vocab = new Map();\n const tfs = [];\n const docLens = [];\n let nextId = 0;\n for (const d of docs) {\n const toks = tokenize(d);\n docLens.push(toks.length);\n const tf = new Map();\n for (const w of toks) {\n let id = vocab.get(w);\n if (id === undefined) {\n id = nextId++;\n vocab.set(w, id);\n }\n tf.set(id, (tf.get(id) || 0) + 1);\n }\n tfs.push(tf);\n }\n const N = docs.length;\n const df = Array(nextId).fill(0);\n for (const tf of tfs)\n for (const id of tf.keys())\n df[id] += 1;\n const idf = df.map(df_i => Math.log((N + 1) / (df_i + 1)) + 1);\n const avgLen = docLens.reduce((a, b) => a + b, 0) / Math.max(1, N);\n return { stats: { vocab, idf, avgLen, df }, tf: tfs, docLens };\n}\nfunction tfidfVector(tf, idf) {\n const out = new Map();\n let norm2 = 0;\n for (const [i, f] of tf) {\n const val = (f) * (idf[i] || 0);\n out.set(i, val);\n norm2 += val * val;\n }\n const norm = Math.sqrt(norm2) || 1e-12;\n for (const [i, v] of out)\n out.set(i, v / norm);\n return out;\n}\nfunction cosine$1(a, b) {\n const [small, large] = a.size < b.size ? [a, b] : [b, a];\n let dot = 0;\n for (const [i, v] of small) {\n const u = large.get(i);\n if (u !== undefined)\n dot += v * u;\n }\n return dot;\n}\nfunction bm25Score(qTf, dTf, stats, dLen, k1 = 1.5, b = 0.75) {\n let score = 0;\n for (const [i] of qTf) {\n const f = dTf.get(i) || 0;\n if (f <= 0)\n continue;\n const idf = Math.log(((stats.df[i] || 0) + 0.5) / ((stats.idf.length - (stats.df[i] || 0)) + 0.5) + 1);\n const denom = f + k1 * (1 - b + b * (dLen / (stats.avgLen || 1)));\n score += idf * ((f * (k1 + 1)) / (denom || 1e-12));\n }\n return score;\n}\n/* ========== Light Random Projection from TF-IDF (dense hint) ========== */\nfunction projectSparse(vec, dim, seed = 1337) {\n // deterministic per (feature, j) hash: simple LCG/xorshift mix\n const out = new Float64Array(dim);\n for (const [i, v] of vec) {\n let s = (i * 2654435761) >>> 0;\n for (let j = 0; j < dim; j++) {\n s ^= s << 13;\n s ^= s >>> 17;\n s ^= s << 5;\n const r = ((s >>> 0) / 4294967296) * 2 - 1; // [-1,1]\n out[j] += v * r;\n }\n }\n let n2 = 0;\n for (let j = 0; j < dim; j++)\n n2 += out[j] * out[j];\n const n = Math.sqrt(n2) || 1e-12;\n for (let j = 0; j < dim; j++)\n out[j] /= n;\n return out;\n}\n/* ===================== Structural Signals ===================== */\nfunction containsGoCodeBlock(s) {\n return /```+\\s*go([\\s\\S]*?)```/i.test(s) || /\\bfunc\\s+\\w+\\s*\\(.*\\)\\s*\\w*\\s*{/.test(s);\n}\nfunction containsCodeBlock(s) {\n return /```+/.test(s) || /{[^}]*}/.test(s);\n}\nfunction headingQueryMatch(head, q) {\n const ht = unique(tokenize(head));\n const qt = new Set(tokenize(q));\n if (ht.length === 0 || qt.size === 0)\n return 0;\n let hit = 0;\n for (const t of ht)\n if (qt.has(t))\n hit++;\n return hit / ht.length;\n}\nfunction jaccard$1(a, b) {\n const A = new Set(tokenize(a));\n const B = new Set(tokenize(b));\n let inter = 0;\n for (const t of A)\n if (B.has(t))\n inter++;\n const uni = A.size + B.size - inter;\n return uni === 0 ? 0 : inter / uni;\n}\nfunction golangSpecFlag(s) {\n return /(golang\\.org|go\\.dev|pkg\\.go\\.dev)/i.test(s) ? 1 : 0;\n}\nfunction buildFeatures$1(q, chunk, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, cLen, projQ, projC) {\n var _a;\n const f = [];\n const names = [];\n // 1) Sparse sims\n const cos = cosine$1(qTfIdf, cTfIdf);\n f.push(cos);\n names.push(\"cosine_tfidf\");\n const bm25 = bm25Score(qTfRaw, cTfRaw, stats, cLen);\n f.push(bm25);\n names.push(\"bm25\");\n // 2) Heading & lexical overlaps\n const hMatch = headingQueryMatch(chunk.heading || \"\", q);\n f.push(hMatch);\n names.push(\"heading_match_frac\");\n const jac = jaccard$1(q, chunk.content || \"\");\n f.push(jac);\n names.push(\"jaccard_tokens\");\n // 3) Structural flags\n const hasGo = containsGoCodeBlock(chunk.rich || chunk.content || \"\");\n const hasCode = containsCodeBlock(chunk.rich || chunk.content || \"\");\n f.push(hasGo ? 1 : 0);\n names.push(\"flag_go_code\");\n f.push(hasCode ? 1 : 0);\n names.push(\"flag_any_code\");\n // 4) Source cues\n f.push(golangSpecFlag(chunk.content || \"\") ? 1 : 0);\n names.push(\"flag_go_spec_link\");\n // 5) Prior score (baseline)\n f.push(((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0));\n names.push(\"prior_score_base\");\n // 6) Length heuristics (prefer concise answers)\n const lenChars = (chunk.content || \"\").length;\n f.push(1 / Math.sqrt(1 + lenChars));\n names.push(\"len_inv_sqrt\");\n // 7) Dense hint from projection\n if (projQ && projC) {\n let dot = 0, l1 = 0;\n for (let i = 0; i < projQ.length; i++) {\n dot += projQ[i] * projC[i];\n l1 += Math.abs(projQ[i] - projC[i]);\n }\n f.push(dot);\n names.push(\"proj_dot\");\n f.push(l1 / projQ.length);\n names.push(\"proj_l1mean\");\n }\n return { names, values: f };\n}\n/* ======================== Ridge Model ======================== */\nclass Ridge {\n constructor() {\n this.w = null;\n this.mu = null;\n this.sigma = null;\n }\n fit(X, y, lambda = 1e-2) {\n var _a;\n const n = X.length;\n const d = ((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n if (n === 0 || d === 0) {\n this.w = new Float64Array(d);\n return;\n }\n // standardize\n const mu = new Float64Array(d);\n const sig = new Float64Array(d);\n for (let j = 0; j < d; j++) {\n let m = 0;\n for (let i = 0; i < n; i++)\n m += X[i][j];\n m /= n;\n mu[j] = m;\n let v = 0;\n for (let i = 0; i < n; i++) {\n const z = X[i][j] - m;\n v += z * z;\n }\n sig[j] = Math.sqrt(v / n) || 1;\n }\n const Z = Array.from({ length: n }, (_, i) => new Float64Array(d));\n for (let i = 0; i < n; i++)\n for (let j = 0; j < d; j++)\n Z[i][j] = (X[i][j] - mu[j]) / sig[j];\n // A = Z^T Z + λI, Zy = Z^T y\n const A = Array.from({ length: d }, () => new Float64Array(d));\n const Zy = new Float64Array(d);\n for (let i = 0; i < n; i++) {\n const zi = Z[i];\n const yi = y[i];\n for (let j = 0; j < d; j++) {\n Zy[j] += zi[j] * yi;\n const zij = zi[j];\n for (let k = 0; k <= j; k++)\n A[j][k] += zij * zi[k];\n }\n }\n for (let j = 0; j < d; j++) {\n for (let k = 0; k < j; k++)\n A[k][j] = A[j][k];\n A[j][j] += lambda;\n }\n // Cholesky solve\n const L = Array.from({ length: d }, () => new Float64Array(d));\n for (let i = 0; i < d; i++) {\n for (let j = 0; j <= i; j++) {\n let sum = A[i][j];\n for (let k = 0; k < j; k++)\n sum -= L[i][k] * L[j][k];\n L[i][j] = (i === j) ? Math.sqrt(Math.max(sum, 1e-12)) : (sum / (L[j][j] || 1e-12));\n }\n }\n const z = new Float64Array(d);\n for (let i = 0; i < d; i++) {\n let s = Zy[i];\n for (let k = 0; k < i; k++)\n s -= L[i][k] * z[k];\n z[i] = s / (L[i][i] || 1e-12);\n }\n const w = new Float64Array(d);\n for (let i = d - 1; i >= 0; i--) {\n let s = z[i];\n for (let k = i + 1; k < d; k++)\n s -= L[k][i] * w[k];\n w[i] = s / (L[i][i] || 1e-12);\n }\n this.w = w;\n this.mu = mu;\n this.sigma = sig;\n }\n predict(x) {\n if (!this.w || !this.mu || !this.sigma)\n return 0;\n let s = 0;\n for (let j = 0; j < this.w.length; j++) {\n const z = (x[j] - this.mu[j]) / this.sigma[j];\n s += this.w[j] * z;\n }\n return s;\n }\n}\n/* ===================== Weak Supervision ===================== */\nfunction generateWeakLabel(q, chunk, feats) {\n var _a;\n const txt = (chunk.rich || chunk.content || \"\");\n let y = 0;\n const qIsGoFunc = /\\bgo\\b/.test(q.toLowerCase()) && /(define|declare|function|func)/i.test(q);\n if (qIsGoFunc && containsGoCodeBlock(txt))\n y = Math.max(y, 1.0);\n const headHit = headingQueryMatch(chunk.heading || \"\", q);\n if (headHit >= 0.34 && containsCodeBlock(txt))\n y = Math.max(y, 0.8);\n const cosIdx = feats.names.indexOf(\"cosine_tfidf\");\n const bm25Idx = feats.names.indexOf(\"bm25\");\n const cos = cosIdx >= 0 ? feats.values[cosIdx] : 0;\n const bm = bm25Idx >= 0 ? feats.values[bm25Idx] : 0;\n if (cos > 0.25)\n y = Math.max(y, 0.6);\n if (bm > 1.0)\n y = Math.max(y, 0.6);\n const priorIdx = feats.names.indexOf(\"prior_score_base\");\n const prior = priorIdx >= 0 ? feats.values[priorIdx] : 0;\n if (((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0) > 0)\n y = Math.max(y, Math.min(0.6, 0.2 + 0.5 * prior));\n return y;\n}\nfunction sigmoid(x) {\n if (x >= 0) {\n const z = Math.exp(-x);\n return 1 / (1 + z);\n }\n else {\n const z = Math.exp(x);\n return z / (1 + z);\n }\n}\n/* ========================= MMR Filter ========================= */\nfunction mmrFilter(scored, lambda = 0.7, budgetChars = 1200) {\n const sel = [];\n const docs = scored.map(s => s.content || \"\");\n const { stats, tf: tfList } = buildCorpusStats(docs);\n const tfidf = tfList.map(tf => tfidfVector(tf, stats.idf));\n const selectedIdx = new Set();\n let used = 0;\n while (selectedIdx.size < scored.length) {\n let bestIdx = -1, bestVal = -Infinity;\n for (let i = 0; i < scored.length; i++) {\n if (selectedIdx.has(i))\n continue;\n const cand = scored[i];\n let red = 0;\n for (const j of selectedIdx) {\n const sim = cosine$1(tfidf[i], tfidf[j]);\n if (sim > red)\n red = sim;\n }\n const val = lambda * cand.score_rr - (1 - lambda) * red;\n if (val > bestVal) {\n bestVal = val;\n bestIdx = i;\n }\n }\n if (bestIdx < 0)\n break;\n const chosen = scored[bestIdx];\n const addLen = (chosen.content || \"\").length;\n if (used + addLen > budgetChars && sel.length > 0)\n break;\n sel.push(chosen);\n used += addLen;\n selectedIdx.add(bestIdx);\n }\n return sel;\n}\n/* ========================= Public API ========================= */\n/** Train per-query ridge model and score chunks. */\nfunction rerank(query, chunks, opts = {}) {\n var _a, _b;\n // License check removed // Premium feature - requires valid license\n const { lambdaRidge = 1e-2, randomProjDim = 32, exposeFeatures = true, attachFeatureNames = false, } = opts;\n const docs = [query, ...chunks.map(c => c.content || \"\")];\n const { stats, tf: tfRaw, docLens } = buildCorpusStats(docs);\n const tfidfAll = tfRaw.map(tf => tfidfVector(tf, stats.idf));\n const qTfRaw = tfRaw[0];\n const qTfIdf = tfidfAll[0];\n const projQ = randomProjDim > 0 ? projectSparse(qTfIdf, randomProjDim) : undefined;\n const X = [];\n const y = [];\n const featPacks = [];\n for (let i = 0; i < chunks.length; i++) {\n const c = chunks[i];\n const cTfRaw = tfRaw[i + 1];\n const cTfIdf = tfidfAll[i + 1];\n const projC = randomProjDim > 0 ? projectSparse(cTfIdf, randomProjDim, 1337 + i) : undefined;\n const feats = buildFeatures$1(query, c, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, docLens[i + 1] || 1, projQ, projC);\n featPacks.push(feats);\n X.push(feats.values);\n const label = generateWeakLabel(query, c, feats);\n y.push(label);\n }\n const allSame = y.every(v => Math.abs(v - y[0]) < 1e-9);\n if (allSame) {\n const cosIdx = featPacks[0].names.indexOf(\"cosine_tfidf\");\n if (cosIdx >= 0) {\n for (let i = 0; i < y.length; i++)\n y[i] = Math.max(0, Math.min(1, 0.2 + 0.6 * X[i][cosIdx]));\n }\n }\n const rr = new Ridge();\n rr.fit(X, y, lambdaRidge);\n let minS = Infinity, maxS = -Infinity;\n const rawScores = X.map(x => rr.predict(x));\n for (const s of rawScores) {\n if (s < minS)\n minS = s;\n if (s > maxS)\n maxS = s;\n }\n const range = Math.max(1e-9, maxS - minS);\n const featureNames = attachFeatureNames ? (_b = (_a = featPacks[0]) === null || _a === void 0 ? void 0 : _a.names) !== null && _b !== void 0 ? _b : [] : undefined;\n const scored = chunks.map((c, i) => {\n const s01 = (rawScores[i] - minS) / range;\n const p = sigmoid((rawScores[i] - 0.5 * (minS + maxS)) / (0.2 * range + 1e-6));\n const base = Object.assign(Object.assign({}, c), { score_rr: s01, p_relevant: p });\n if (exposeFeatures)\n base._features = X[i];\n if (featureNames)\n base._feature_names = featureNames;\n return base;\n });\n scored.sort((a, b) => b.score_rr - a.score_rr);\n return scored;\n}\n/** Filter scored chunks using probability/near-top thresholds and MMR coverage. */\nfunction filterMMR(scored, opts = {}) {\n // License check removed // Premium feature - requires valid license\n const { probThresh = 0.45, epsilonTop = 0.05, useMMR = true, mmrLambda = 0.7, budgetChars = 1200 } = opts;\n if (scored.length === 0)\n return [];\n const top = scored[0].score_rr;\n const bandKept = scored.filter(s => s.p_relevant >= probThresh && s.score_rr >= (top - epsilonTop));\n const seed = bandKept.length > 0 ? bandKept : [scored[0]];\n if (!useMMR) {\n const out = [];\n let used = 0;\n for (const s of seed) {\n const add = (s.content || \"\").length;\n if (used + add > budgetChars && out.length > 0)\n break;\n out.push(s);\n used += add;\n }\n return out;\n }\n const boosted = scored.map(s => (Object.assign(Object.assign({}, s), { score_rr: seed.includes(s) ? s.score_rr + 0.01 : s.score_rr })));\n return mmrFilter(boosted, mmrLambda, budgetChars);\n}\n/** Convenience: run rerank then filter. */\nfunction rerankAndFilter(query, chunks, opts = {}) {\n // License check removed // Premium feature - requires valid license\n const scored = rerank(query, chunks, opts);\n return filterMMR(scored, opts);\n}\n/* ========================= Debug Utilities ========================= */\nfunction explainFeatures(query, chunks, opts = {}) {\n var _a;\n const rpd = (_a = opts.randomProjDim) !== null && _a !== void 0 ? _a : 32;\n const docs = [query, ...chunks.map(c => c.content || \"\")];\n const { stats, tf: tfRaw } = buildCorpusStats(docs);\n const tfidfAll = tfRaw.map(tf => tfidfVector(tf, stats.idf));\n const projQ = rpd > 0 ? projectSparse(tfidfAll[0], rpd) : undefined;\n const namesRef = [];\n const rows = [];\n for (let i = 0; i < chunks.length; i++) {\n const feats = buildFeatures$1(query, chunks[i], tfidfAll[0], tfidfAll[i + 1], tfRaw[0], tfRaw[i + 1], stats, 1, projQ, rpd > 0 ? projectSparse(tfidfAll[i + 1], rpd, 1337 + i) : undefined);\n if (namesRef.length === 0)\n namesRef.push(...feats.names);\n rows.push({ heading: chunks[i].heading, features: feats.values });\n }\n return { names: namesRef, rows };\n}\n\n// OmegaSumDet.ts — Deterministic, context-locked summarizer (v2.2)\n// -----------------------------------------------------------------------------\n// Goals\n// - ONLY summarize from the already-kept, top-ranked chunks (no leakage).\n// - Deterministic ordering, scoring, and composition.\n// - Stable weighting with explicit, normalized features.\n// - Code is treated as atomic and only included when query-aligned.\n// - Section diversity is capped to keep answers focused.\n// - Scored, stemmed, stopword-aware heading alignment (Dice) + small intent & RR boosts.\n// - Intent-aware code gating (e.g., require `func` for \"define function\" queries).\n// -----------------------------------------------------------------------------\nconst DEFAULTS = {\n maxAnswerChars: 900,\n maxBullets: 6,\n preferCode: true,\n includeCitations: true,\n addFooter: true,\n teWeight: 0.25,\n queryWeight: 0.45,\n evidenceWeight: 0.20,\n rrWeight: 0.10,\n codeBonus: 0.05,\n headingBonus: 0.04,\n jaccardDedupThreshold: 0.6,\n allowOffTopic: false,\n minQuerySimForCode: 0.40,\n maxSectionsInAnswer: 1,\n focusTopAlignedHeadings: 2,\n};\nfunction summarizeDeterministic(query, kept, opts) {\n var _a, _b, _c;\n // License check removed // Premium feature - requires valid license\n const O = Object.assign(Object.assign({}, DEFAULTS), (opts || {}));\n // 0) Normalize kept list with stable rrRank/rrScore defaults\n const K = kept.map((c, i) => (Object.assign(Object.assign({}, c), { rrRank: (typeof c.rrRank === \"number\" ? c.rrRank : i), rrScore: (typeof c.rrScore === \"number\" ? c.rrScore : (kept.length - i) / Math.max(1, kept.length)) })));\n if (K.length === 0) {\n return { text: \"No answer could be composed from the provided context.\", cites: [] };\n }\n // 1) Scored, stemmed, stopword-aware heading alignment + RR + intent bumps\n const intent = detectIntent(query);\n // normalize rrScore across kept for a small deterministic boost\n let rrMin = Infinity, rrMax = -Infinity;\n for (const c of K) {\n rrMin = Math.min(rrMin, (_a = c.rrScore) !== null && _a !== void 0 ? _a : 0);\n rrMax = Math.max(rrMax, (_b = c.rrScore) !== null && _b !== void 0 ? _b : 0);\n }\n const rrSpan = (rrMax - rrMin) || 1;\n function intentHit(c) {\n const hay = (c.heading + ' ' + (c.content || '') + ' ' + (c.rich || '')).toLowerCase();\n let hit = 0;\n if (intent.function && /\\bfunc\\b|\\bfunction\\b/.test(hay))\n hit += 1;\n if (intent.variable && /\\bvar\\b|\\bvariable\\b|\\b:=\\b/.test(hay))\n hit += 1;\n if (intent.constant && /\\bconst\\b|\\bconstant\\b/.test(hay))\n hit += 1;\n if (intent.concurrency && /\\bgoroutine\\b|\\bgo\\s+func\\b|\\bchan(nel)?\\b|\\bselect\\b/.test(hay))\n hit += 1;\n if (intent.loop && /\\bfor\\b/.test(hay))\n hit += 1;\n return Math.min(1, hit / 2); // 0..1\n }\n const alignScores = K.map(ch => diceStemmed(query, ch.heading)); // 0..1\n const composite = K.map((c, i) => {\n var _a;\n const align = alignScores[i] || 0;\n const rrNorm = (((_a = c.rrScore) !== null && _a !== void 0 ? _a : 0) - rrMin) / rrSpan; // 0..1\n const ih = intentHit(c); // 0..1\n // alignment dominates; rr+intent provide gentle nudges\n return align + 0.15 * rrNorm + 0.20 * ih;\n });\n // rank by composite desc, break ties by rrRank asc\n const allByComposite = K.map((_, i) => i).sort((i, j) => {\n if (composite[j] !== composite[i])\n return composite[j] - composite[i];\n return (K[i].rrRank - K[j].rrRank);\n });\n // choose top-N aligned headings; ensure at least one is chosen\n const alignedIdxs = allByComposite.slice(0, Math.max(1, O.focusTopAlignedHeadings));\n const allowedChunkIdx = new Set(alignedIdxs);\n // 2) Candidate extraction: sentences + fenced code blocks; stable order\n const queryTok = tokens(query);\n const candidates = [];\n for (let i = 0; i < K.length; i++) {\n if (!allowedChunkIdx.has(i))\n continue; // HARD mask to top aligned headings\n const ch = K[i];\n const base = (_c = ch.rich) !== null && _c !== void 0 ? _c : ch.content;\n const parts = splitCodeAware(base); // preserves order; code blocks are atomic\n let localSentIdx = 0;\n for (const part of parts) {\n const hasCode = part.kind === \"code\";\n const sentList = hasCode ? [part.text] : splitSentences(part.text);\n for (const s of sentList) {\n const trimmed = s.trim();\n if (!trimmed)\n continue;\n const f = buildFeatures(trimmed, queryTok, ch, O, hasCode);\n candidates.push({\n sent: trimmed,\n chunkIdx: i,\n sentIdx: localSentIdx++,\n heading: ch.heading,\n hasCode,\n features: f,\n score: 0,\n });\n }\n }\n }\n if (candidates.length === 0) {\n return { text: \"No answer could be composed from the aligned context.\", cites: [] };\n }\n // 3) Normalize numeric features across candidates → [0,1]\n normalizeFeature(candidates, \"querySim\");\n normalizeFeature(candidates, \"teGain\");\n normalizeFeature(candidates, \"evidence\");\n normalizeFeature(candidates, \"rr\");\n // 4) Combine with explicit weights + strict, intent-aware gates (deterministic)\n for (const c of candidates) {\n const f = c.features;\n let s = O.queryWeight * f.querySim +\n O.teWeight * f.teGain +\n O.evidenceWeight * f.evidence +\n O.rrWeight * f.rr;\n // Intent-aware code gating\n if (c.hasCode) {\n const align = alignScores[c.chunkIdx] || 0;\n const txt = c.sent.toLowerCase();\n let intentOK = true;\n if (intent.function)\n intentOK = /\\bfunc\\b/.test(txt);\n if (intent.variable)\n intentOK = intentOK && (/\\bvar\\b/.test(txt) || /\\b:=\\b/.test(txt));\n if (intent.constant)\n intentOK = intentOK && /\\bconst\\b/.test(txt);\n if (intent.concurrency)\n intentOK = intentOK && (/\\bgoroutine\\b|\\bgo\\s+func\\b|\\bchan(nel)?\\b|\\bselect\\b/.test(txt));\n if (!intentOK || align < 0.25 || f.querySim < O.minQuerySimForCode || f.codeRelevance <= 0.2) {\n s *= 0.5; // neuter misaligned code\n }\n else if (O.preferCode) {\n s += O.codeBonus * Math.min(1, f.codeRelevance * 1.25) * align;\n }\n }\n // Heading bonus scaled by composite alignment\n const hb = Math.min(1, composite[c.chunkIdx] || 0);\n if (hb > 0)\n s += O.headingBonus * hb;\n // Off-topic heading handling (shouldn’t happen due to hard mask, but keep as fail-safe)\n if (hb === 0 && !O.allowOffTopic) {\n s *= 0.1; // near-zero\n }\n c.score = clamp01p5(s);\n }\n // 5) TOTAL order sort with explicit tie-breakers (stable)\n candidates.sort((a, b) => {\n if (b.score !== a.score)\n return b.score - a.score;\n const ar = K[a.chunkIdx].rrRank, br = K[b.chunkIdx].rrRank;\n if (ar !== br)\n return ar - br; // better reranker rank first\n if (a.chunkIdx !== b.chunkIdx)\n return a.chunkIdx - b.chunkIdx; // earlier chunk first\n if (a.sentIdx !== b.sentIdx)\n return a.sentIdx - b.sentIdx; // earlier sentence first\n return a.sent.localeCompare(b.sent); // final deterministic tie-breaker\n });\n // 6) Deterministic dedup (Jaccard) — keep first occurrence only\n const picked = [];\n const seen = [];\n for (const c of candidates) {\n const t = c.sent.toLowerCase();\n let dup = false;\n for (const s of seen) {\n if (jaccardText(t, s) >= O.jaccardDedupThreshold) {\n dup = true;\n break;\n }\n }\n if (!dup) {\n picked.push(c);\n seen.push(t);\n }\n }\n // 7) Compose answer under budget with section cap\n const out = [];\n const citesSet = new Set();\n let budget = O.maxAnswerChars;\n const usedHeadings = new Set();\n for (const c of picked) {\n const h = K[c.chunkIdx].heading;\n const alreadyUsed = usedHeadings.has(h);\n // Enforce max distinct headings\n if (!alreadyUsed && usedHeadings.size >= O.maxSectionsInAnswer)\n continue;\n const unit = (picked.length > 1 ? `- ${c.sent}` : c.sent);\n const cost = unit.length + (out.length ? 1 : 0);\n if (cost > budget)\n continue;\n out.push(unit);\n budget -= cost;\n usedHeadings.add(h);\n if (O.includeCitations)\n citesSet.add(h);\n if (out.length >= O.maxBullets)\n break;\n }\n // Fallback if nothing fits budget\n if (out.length === 0 && picked.length > 0) {\n const c = picked[0];\n out.push(c.sent);\n citesSet.add(K[c.chunkIdx].heading);\n }\n let text = picked.length > 1 ? out.join(\"\\n\") : out.join(\"\");\n const cites = [...citesSet].map(h => ({ heading: h }));\n if (O.addFooter && cites.length > 0) {\n text += `\\n\\n---\\n**Sources used:**\\n` + cites.map(c => `- ${c.heading}`).join(\"\\n\");\n }\n return { text, cites };\n}\n/* -------------------- helpers (deterministic) -------------------- */\nfunction clamp01p5(x) {\n if (!Number.isFinite(x))\n return 0;\n return Math.max(0, Math.min(1.5, x));\n}\nfunction tokens(s) {\n var _a;\n return (_a = s.toLowerCase().match(/[a-z0-9_]+/g)) !== null && _a !== void 0 ? _a : [];\n}\n// code-aware split: returns a sequence of {kind: \"code\"|\"text\", text}\nfunction splitCodeAware(raw) {\n const out = [];\n const re = /```([\\s\\S]*?)```/g;\n let last = 0, m;\n while ((m = re.exec(raw)) !== null) {\n const before = raw.slice(last, m.index);\n if (before.trim())\n out.push({ kind: \"text\", text: normalizeWS(before) });\n const code = m[1];\n if (code.trim())\n out.push({ kind: \"code\", text: \"```\" + normalizeWS(code) + \"```\" });\n last = m.index + m[0].length;\n }\n const tail = raw.slice(last);\n if (tail.trim())\n out.push({ kind: \"text\", text: normalizeWS(tail) });\n return out;\n}\n// conservative sentence splitter (period, question, exclamation)\nfunction splitSentences(text) {\n // split on sentence boundaries; also split on blank lines to avoid giant paragraphs\n const parts = text.split(/(?<=[\\.\\?\\!])\\s+(?=[A-Z0-9[`])/g);\n return parts.flatMap(p => p.split(/\\n{2,}/g)).map(s => s.trim()).filter(Boolean);\n}\nfunction normalizeWS(s) {\n return s.replace(/\\r/g, \"\").replace(/[ \\t]+/g, \" \").replace(/\\n{3,}/g, \"\\n\\n\").trim();\n}\nfunction bow(ts) {\n var _a;\n const m = new Map();\n for (const t of ts)\n m.set(t, ((_a = m.get(t)) !== null && _a !== void 0 ? _a : 0) + 1);\n return m;\n}\nfunction cosine(a, b) {\n let dot = 0, na = 0, nb = 0;\n for (const [, v] of a)\n na += v * v;\n for (const [, v] of b)\n nb += v * v;\n const n = Math.sqrt(na || 1e-9) * Math.sqrt(nb || 1e-9);\n if (n === 0)\n return 0;\n const smaller = a.size < b.size ? a : b;\n const larger = a.size < b.size ? b : a;\n for (const [k, v] of smaller) {\n const w = larger.get(k);\n if (w)\n dot += v * w;\n }\n const val = dot / n;\n return Number.isFinite(val) ? Math.max(0, Math.min(1, val)) : 0;\n}\n// normalize each named feature across candidates → [0,1] deterministically\nfunction normalizeFeature(cands, key) {\n var _a, _b;\n let min = Infinity, max = -Infinity;\n for (const c of cands) {\n const v = (_a = c.features[key]) !== null && _a !== void 0 ? _a : 0;\n const vv = Number.isFinite(v) ? v : 0;\n if (vv < min)\n min = vv;\n if (vv > max)\n max = vv;\n }\n const span = (max - min) || 1;\n for (const c of cands) {\n const v = (_b = c.features[key]) !== null && _b !== void 0 ? _b : 0;\n const vv = Number.isFinite(v) ? v : 0;\n c.features[key] = (vv - min) / span;\n }\n}\nfunction jaccardText(a, b) {\n const A = new Set(a.split(/\\W+/).filter(Boolean));\n const B = new Set(b.split(/\\W+/).filter(Boolean));\n let inter = 0;\n for (const x of A)\n if (B.has(x))\n inter++;\n return inter / Math.max(1, A.size + B.size - inter);\n}\n/* ---------- stopwords + intent ---------- */\nconst STOP = new Set([\n 'a', 'an', 'the', 'and', 'or', 'but', 'if', 'then', 'else', 'of', 'in', 'on', 'for', 'to', 'from', 'by',\n 'with', 'without', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'as', 'at', 'it', 'this', 'that',\n 'these', 'those', 'i', 'you', 'he', 'she', 'we', 'they', 'do', 'does', 'did', 'how', 'what', 'when',\n 'where', 'why', 'which', 'can', 'could', 'should', 'would'\n]);\nfunction filterStops(ts) {\n return ts.filter(t => !STOP.has(t));\n}\nfunction detectIntent(q) {\n const s = q.toLowerCase();\n return {\n function: /\\bfunc(tion|)\\b|\\bdefine\\b|\\bdeclar(e|ation)\\b|\\bprototype\\b/.test(s),\n variable: /\\bvar(iable)?\\b|\\bdeclare\\b/.test(s),\n constant: /\\bconst(ant)?\\b/.test(s),\n concurrency: /\\bconcurrency\\b|\\bgoroutine\\b|\\bchannel\\b|\\bselect\\b/.test(s),\n loop: /\\bfor\\s+loop\\b|\\bloop\\b|\\bfor\\b/.test(s),\n };\n}\n/* ---------- light stemming + stemmed Dice alignment (0..1) ---------- */\nfunction stemToken(w) {\n let s = w.toLowerCase().replace(/^[^a-z0-9]+|[^a-z0-9]+$/g, '');\n if (s.length <= 2)\n return s;\n if (s.endsWith('ies') && s.length > 4)\n s = s.slice(0, -3) + 'y';\n else if (/(xes|ches|shes|zes|sses)$/.test(s) && s.length > 4)\n s = s.replace(/(xes|ches|shes|zes|sses)$/, (m) => (m === 'sses' ? 'ss' : m.replace(/es$/, '')));\n else if (s.endsWith('s') && !/(ss|us)$/.test(s) && s.length > 3)\n s = s.slice(0, -1);\n const rules = [\n [/ization$|isation$/, 'ize'],\n [/ational$/, 'ate'],\n [/fulness$/, 'ful'],\n [/ousness$/, 'ous'],\n [/iveness$/, 'ive'],\n [/ability$/, 'able'],\n [/ness$/, ''],\n [/ment$/, ''],\n [/ations?$/, 'ate'],\n [/izer$|iser$/, 'ize'],\n [/ally$/, 'al'],\n [/ically$/, 'ic'],\n [/ingly$|edly$/, ''],\n [/ing$|ed$/, ''],\n ];\n for (const [re, rep] of rules) {\n if (re.test(s) && s.length - rep.length >= 4) {\n s = s.replace(re, rep);\n break;\n }\n }\n return s;\n}\nfunction stemTokens(str) {\n var _a;\n const raw = ((_a = str.toLowerCase().match(/[a-z0-9_]+/g)) !== null && _a !== void 0 ? _a : []);\n const stemmed = raw.map(stemToken).filter(Boolean);\n return filterStops(stemmed);\n}\n// Dice coefficient over stemmed tokens (0..1). Robust for short strings.\nfunction diceStemmed(a, b) {\n const A = new Set(stemTokens(a));\n const B = new Set(stemTokens(b));\n if (A.size === 0 || B.size === 0)\n return 0;\n let inter = 0;\n for (const t of A)\n if (B.has(t))\n inter++;\n return (2 * inter) / (A.size + B.size);\n}\n// Overlap between code tokens and query tokens (fraction of code tokens in query)\nfunction cCodeRelevance(sentence, queryTokens) {\n if (!sentence.includes(\"```\"))\n return 0;\n const codeTokens = tokens(sentence.replace(/```/g, \"\"));\n if (codeTokens.length === 0)\n return 0;\n const Q = new Set(queryTokens);\n let overlap = 0;\n for (const t of codeTokens) {\n if (Q.has(t))\n overlap++;\n }\n return overlap / codeTokens.length;\n}\n// Feature builder (deterministic). If you have TE per chunk/sentence, inject it here.\nfunction buildFeatures(sentence, queryTokens, ch, _O, hasCode) {\n // querySim (raw) via cosine on hashed BoW; normalized later\n const qvec = bow(queryTokens);\n const svec = bow(tokens(sentence));\n const querySimRaw = cosine(qvec, svec); // 0..1\n // sentence↔heading local alignment (stemmed); treat ≥0.15 as aligned\n const localAlignScore = diceStemmed(sentence, ch.heading);\n const headingAligned = localAlignScore >= 0.15;\n // teGain: placeholder (replace with your TE if you have it)\n const teGainRaw = headingAligned ? 1 : 0;\n // evidence: proxy for coverage/utility (bounded length effect)\n const evRaw = Math.min(1, tokens(sentence).length / 40);\n const rrRaw = (typeof ch.rrScore === \"number\") ? ch.rrScore : 0;\n const codeRel = hasCode ? cCodeRelevance(sentence, queryTokens) : 0;\n return {\n querySim: querySimRaw,\n teGain: teGainRaw,\n evidence: evRaw,\n rr: rrRaw,\n headingAligned,\n codeRelevance: codeRel,\n };\n}\n\n// infoflow/TransferEntropy.ts\n// Phase-1: streaming Transfer Entropy (TE) with linear-Gaussian approximation.\n// TE(X→Y) ≈ 1/2 * log( Var[e | Y_past] / Var[e | Y_past, X_past] ), in nats (set bits=true for /ln2)\nfunction zscore(v) {\n const n = v.length || 1;\n let m = 0;\n for (const x of v)\n m += x;\n m /= n;\n let s2 = 0;\n for (const x of v) {\n const d = x - m;\n s2 += d * d;\n }\n const inv = 1 / Math.sqrt(s2 / Math.max(1, n - 1) || 1e-12);\n return v.map(x => (x - m) * inv);\n}\nfunction ridgeSolve(X, y, l2) {\n var _a;\n // Solve (X^T X + l2 I) beta = X^T y via Cholesky (d is small here).\n const n = X.length, d = ((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n if (!n || !d)\n return new Array(d).fill(0);\n const XtX = new Float64Array(d * d);\n const Xty = new Float64Array(d);\n for (let i = 0; i < n; i++) {\n const row = X[i];\n const yi = y[i];\n for (let j = 0; j < d; j++) {\n Xty[j] += row[j] * yi;\n for (let k = 0; k <= j; k++)\n XtX[j * d + k] += row[j] * row[k];\n }\n }\n for (let j = 0; j < d; j++) {\n for (let k = 0; k < j; k++)\n XtX[k * d + j] = XtX[j * d + k];\n XtX[j * d + j] += l2;\n }\n // Cholesky\n const L = new Float64Array(d * d);\n for (let i = 0; i < d; i++) {\n for (let j = 0; j <= i; j++) {\n let s = XtX[i * d + j];\n for (let k = 0; k < j; k++)\n s -= L[i * d + k] * L[j * d + k];\n L[i * d + j] = (i === j) ? Math.sqrt(Math.max(s, 1e-12)) : s / (L[j * d + j] || 1e-12);\n }\n }\n // Solve L z = Xty\n const z = new Float64Array(d);\n for (let i = 0; i < d; i++) {\n let s = Xty[i];\n for (let k = 0; k < i; k++)\n s -= L[i * d + k] * z[k];\n z[i] = s / (L[i * d + i] || 1e-12);\n }\n // Solve L^T beta = z\n const beta = new Float64Array(d);\n for (let i = d - 1; i >= 0; i--) {\n let s = z[i];\n for (let k = i + 1; k < d; k++)\n s -= L[k * d + i] * beta[k];\n beta[i] = s / (L[i * d + i] || 1e-12);\n }\n return Array.from(beta);\n}\nfunction mseResidual(X, y, beta) {\n const n = X.length || 1;\n let s = 0;\n for (let i = 0; i < n; i++) {\n const row = X[i];\n let p = 0;\n for (let j = 0; j < row.length; j++)\n p += row[j] * beta[j];\n const e = y[i] - p;\n s += e * e;\n }\n return s / n;\n}\n// Build supervised datasets for Y_t and regressors made of past Y/X lags.\nfunction makeDesign(ySeq, xSeq, L, LX) {\n // ySeq[i] and xSeq[i] are vectors at time i (we’ll average to 1D to keep it cheap)\n const y1d = ySeq.map(v => v.reduce((a, b) => a + b, 0) / Math.max(1, v.length));\n const x1d = xSeq.map(v => v.reduce((a, b) => a + b, 0) / Math.max(1, v.length));\n const N = y1d.length;\n const rowsY = [];\n const rowsYX = [];\n const target = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n // target: current Y (scalar)\n target.push([y1d[t]]);\n // past Y\n const ylags = [];\n for (let k = 1; k <= L; k++)\n ylags.push(y1d[t - k]);\n // past X\n const xlags = [];\n for (let k = 1; k <= LX; k++)\n xlags.push(x1d[t - k]);\n rowsY.push(ylags);\n rowsYX.push([...ylags, ...xlags]);\n }\n // standardize columns for stability\n const colZ = (M) => {\n var _a;\n const n = M.length, d = ((_a = M[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n const out = Array.from({ length: n }, () => new Array(d).fill(0));\n for (let j = 0; j < d; j++) {\n const col = new Array(n);\n for (let i = 0; i < n; i++)\n col[i] = M[i][j];\n const zs = zscore(col);\n for (let i = 0; i < n; i++)\n out[i][j] = zs[i];\n }\n return out;\n };\n return { XY: colZ(rowsY), XYX: colZ(rowsYX), y: target.map(v => v[0]) };\n}\nclass TransferEntropy {\n constructor(opts = {}) {\n this.xBuf = [];\n this.yBuf = [];\n this.opts = Object.assign({ window: 256, condLags: 1, xLags: 1, ridge: 1e-3, bits: true }, opts);\n }\n /** Push a synchronized sample pair (vectors OK). */\n push(x, y) {\n const X = Array.isArray(x) ? x : [x];\n const Y = Array.isArray(y) ? y : [y];\n this.xBuf.push(X);\n this.yBuf.push(Y);\n const W = this.opts.window;\n if (this.xBuf.length > W) {\n this.xBuf.shift();\n this.yBuf.shift();\n }\n }\n /** Estimate TE(X→Y) over the current window. */\n estimate() {\n const n = this.xBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (n < Math.max(L, LX) + 5)\n return 0;\n const { XY, XYX, y } = makeDesign(this.yBuf, this.xBuf, L, LX);\n if (!XY.length || !XYX.length)\n return 0;\n // H1: regress Y_t on Y_{t-1..t-L}\n const b1 = ridgeSolve(XY, y, this.opts.ridge);\n const v1 = mseResidual(XY, y, b1);\n // H2: regress Y_t on [Y_{t-1..t-L}, X_{t-1..t-L}]\n const b2 = ridgeSolve(XYX, y, this.opts.ridge);\n const v2 = mseResidual(XYX, y, b2);\n // TE ≈ 0.5 * log( v1 / v2 )\n const teNats = 0.5 * Math.log(Math.max(1e-12, v1) / Math.max(1e-12, v2));\n const te = Math.max(0, teNats); // no negatives (numerical guard)\n return this.opts.bits ? (te / Math.LN2) : te;\n }\n}\nclass InfoFlowGraph {\n constructor(defaultOpts = {}) {\n this.defaultOpts = defaultOpts;\n this.monitors = new Map();\n // License check removed // Premium feature - requires valid license\n }\n get(name) {\n if (!this.monitors.has(name))\n this.monitors.set(name, new TransferEntropy(this.defaultOpts));\n return this.monitors.get(name);\n }\n snapshot() {\n const out = {};\n for (const [k, mon] of this.monitors)\n out[k] = Number(mon.estimate().toFixed(4));\n return out;\n }\n}\n\n// src/infoflow/TransferEntropyPWS.ts\n// Phase-2 TE-PWS: importance sampling for rare events + path-weight sampling (PWS)\n// API mirrors Phase-1 so it plugs in with minimal edits.\n// --- small helpers ---\nfunction meanStd(arr) {\n if (arr.length === 0)\n return { m: 0, s: 0 };\n let m = 0;\n for (const v of arr)\n m += v;\n m /= arr.length;\n let v = 0;\n for (const x of arr) {\n const d = x - m;\n v += d * d;\n }\n return { m, s: Math.sqrt(v / Math.max(1, arr.length)) || 1e-12 };\n}\nfunction l2(a) { let s = 0; for (let i = 0; i < a.length; i++)\n s += a[i] * a[i]; return Math.sqrt(s); }\nfunction sub(a, b) { const n = Math.min(a.length, b.length); const o = new Array(n); for (let i = 0; i < n; i++)\n o[i] = a[i] - b[i]; return o; }\nfunction concat(a, b) { const o = new Array(a.length + b.length); let k = 0; for (const v of a)\n o[k++] = v; for (const v of b)\n o[k++] = v; return o; }\nfunction gaussianVec(a, b, s) {\n // product kernel with shared bandwidth\n const n = Math.min(a.length, b.length);\n let q = 0;\n for (let i = 0; i < n; i++) {\n const d = a[i] - b[i];\n q += d * d;\n }\n const ss = s * s || 1e-12;\n return Math.exp(-0.5 * q / ss) / Math.pow(Math.sqrt(2 * Math.PI * ss), n);\n}\nclass TransferEntropyPWS {\n constructor(opts = {}) {\n this.xBuf = [];\n this.yBuf = [];\n this.yDiffBuf = []; // ||ΔY|| magnitude for rarity\n this.wBuf = []; // per-sample weights (importance * decay)\n this.opts = Object.assign({ window: 256, condLags: 1, xLags: 1, normalize: true, tailQuantile: 0.9, tailBoost: 4, decay: 1.0, usePWS: false, jitterSigma: 0.15, pwsIters: 8, bandwidth: 0, ridge: 1e-6, bits: true }, opts);\n }\n /** Push one synchronized sample (vectors OK). */\n push(x, y) {\n const X = Array.isArray(x) ? x.slice() : [x];\n const Y = Array.isArray(y) ? y.slice() : [y];\n // Δ||Y|| for rarity\n const prev = this.yBuf.length ? this.yBuf[this.yBuf.length - 1] : Y;\n const d = l2(sub(Y, prev));\n this.xBuf.push(X);\n this.yBuf.push(Y);\n this.yDiffBuf.push(d);\n // time decay (most recent → weight 1)\n const tDecay = this.opts.decay;\n const wDecay = tDecay < 1 && this.xBuf.length > 1\n ? Math.pow(tDecay, this.xBuf.length - 1)\n : 1;\n // placeholder weight now; we’ll update after we know tail threshold\n this.wBuf.push(wDecay);\n // maintain window\n while (this.xBuf.length > this.opts.window) {\n this.xBuf.shift();\n this.yBuf.shift();\n this.yDiffBuf.shift();\n this.wBuf.shift();\n }\n }\n /** Basic Phase-2 call: choose PWS or vanilla IS+KDE based on opts.usePWS */\n estimate() {\n return this.opts.usePWS ? this.estimatePWS() : this.estimateIS();\n }\n /** Vanilla importance-weighted TE via KDE (no path jitter). */\n estimateIS() {\n const N = this.yBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (N <= Math.max(L, LX) + 2)\n return 0;\n // compute tail threshold on recent Δ||Y||\n const diffs = this.yDiffBuf.slice();\n const thr = quantile(diffs, this.opts.tailQuantile);\n // update importance weights\n for (let i = 0; i < this.wBuf.length; i++) {\n const tail = diffs[i] >= thr ? this.opts.tailBoost : 1;\n this.wBuf[i] = Math.max(1e-8, this.wBuf[i] * tail);\n }\n // Build contexts\n const samples = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n const y = this.yBuf[t];\n const yPast = stackPast(this.yBuf, t, L);\n const xPast = stackPast(this.xBuf, t, LX);\n samples.push({ y, yPast, xPast, w: this.wBuf[t] });\n }\n if (samples.length < 4)\n return 0;\n // bandwidth selection\n const ySc = flatten(samples.map(s => s.y));\n const b = this.opts.bandwidth > 0 ? this.opts.bandwidth\n : silverman(ySc);\n // H(Y|Ypast) and H(Y|Ypast,Xpast) via KDE density ratio\n const HY_Y = condEntropyKDE(samples, 'yPast', b, this.opts.ridge);\n const HY_YX = condEntropyKDE(samples, 'yPast+xPast', b, this.opts.ridge);\n const te = Math.max(0, HY_Y - HY_YX); // >= 0 numerically clipped\n return this.opts.bits ? te / Math.log(2) : te;\n }\n /** Path-Weight Sampling: jitter past contexts, average conditional entropies. */\n estimatePWS() {\n const N = this.yBuf.length;\n const L = Math.max(1, this.opts.condLags | 0);\n const LX = Math.max(1, this.opts.xLags | 0);\n if (N <= Math.max(L, LX) + 2)\n return 0;\n // tail-aware importance weights\n const diffs = this.yDiffBuf.slice();\n const thr = quantile(diffs, this.opts.tailQuantile);\n for (let i = 0; i < this.wBuf.length; i++) {\n const tail = diffs[i] >= thr ? this.opts.tailBoost : 1;\n this.wBuf[i] = Math.max(1e-8, this.wBuf[i] * tail);\n }\n const samples = [];\n for (let t = Math.max(L, LX); t < N; t++) {\n const y = this.yBuf[t];\n const yPast = stackPast(this.yBuf, t, L);\n const xPast = stackPast(this.xBuf, t, LX);\n samples.push({ y, yPast, xPast, w: this.wBuf[t] });\n }\n if (samples.length < 4)\n return 0;\n const ySc = flatten(samples.map(s => s.y));\n const b = this.opts.bandwidth > 0 ? this.opts.bandwidth : silverman(ySc);\n const J = Math.max(1, this.opts.pwsIters | 0);\n const jSig = this.opts.jitterSigma;\n // baseline entropies\n const baseHY_Y = condEntropyKDE(samples, 'yPast', b, this.opts.ridge);\n const baseHY_YX = condEntropyKDE(samples, 'yPast+xPast', b, this.opts.ridge);\n // jittered contexts\n let accY = 0, accYX = 0;\n for (let j = 0; j < J; j++) {\n const jittered = jitterSamples(samples, jSig);\n accY += condEntropyKDE(jittered, 'yPast', b, this.opts.ridge);\n accYX += condEntropyKDE(jittered, 'yPast+xPast', b, this.opts.ridge);\n }\n const HY_Y = 0.5 * baseHY_Y + 0.5 * (accY / J);\n const HY_YX = 0.5 * baseHY_YX + 0.5 * (accYX / J);\n const te = Math.max(0, HY_Y - HY_YX);\n return this.opts.bits ? te / Math.log(2) : te;\n }\n}\n/** Manage many labeled links, PWS-enabled. Same API as Phase-1. */\nclass InfoFlowGraphPWS {\n constructor(defaultOpts = {}) {\n this.defaultOpts = defaultOpts;\n this.monitors = new Map();\n // License check removed // Premium feature - requires valid license\n }\n get(name) {\n if (!this.monitors.has(name))\n this.monitors.set(name, new TransferEntropyPWS(this.defaultOpts));\n return this.monitors.get(name);\n }\n snapshot() {\n const out = {};\n for (const [k, mon] of this.monitors)\n out[k] = mon.estimate();\n return out;\n }\n}\n// ========================= internals =========================\nfunction stackPast(buf, t, L) {\n var _a;\n const out = [];\n for (let l = 1; l <= L; l++) {\n const v = (_a = buf[t - l]) !== null && _a !== void 0 ? _a : buf[0];\n for (let i = 0; i < v.length; i++)\n out.push(v[i]);\n }\n return out;\n}\nfunction flatten(mats) {\n const out = [];\n for (const v of mats)\n for (const x of v)\n out.push(x);\n return out;\n}\nfunction silverman(vals) {\n // Silverman's rule-of-thumb for Gaussian KDE (per-dim averaged)\n if (vals.length < 2)\n return 1;\n const { s } = meanStd(vals);\n const n = vals.length;\n return 1.06 * s * Math.pow(n, -1 / 5); // scalar, used for product kernel\n}\nfunction quantile(arr, q) {\n if (arr.length === 0)\n return 0;\n const a = arr.slice().sort((x, y) => x - y);\n const idx = Math.min(a.length - 1, Math.max(0, Math.floor(q * (a.length - 1))));\n return a[idx];\n}\nfunction condEntropyKDE(samples, mode, bw, ridge) {\n // H(Y|C) ≈ E[-log p(y|c)] with KDE ratio: p(y,c)/p(c)\n // Use importance weights w and product Gaussian kernels with shared bw.\n const useXY = mode === 'yPast+xPast';\n let totalW = 0, acc = 0;\n // Pre-extract contexts\n const C = samples.map(s => useXY ? concat(s.yPast, s.xPast) : s.yPast);\n const Y = samples.map(s => s.y);\n const W = samples.map(s => s.w);\n for (let i = 0; i < samples.length; i++) {\n const ci = C[i], yi = Y[i], wi = W[i];\n // joint density p(y,c) ~ sum_j w_j K_c(ci,cj) K_y(yi,yj)\n // context density p(c) ~ sum_j w_j K_c(ci,cj)\n let num = 0, den = 0;\n for (let j = 0; j < samples.length; j++) {\n const kc = gaussianVec(ci, C[j], bw);\n den += W[j] * kc;\n num += W[j] * kc * gaussianVec(yi, Y[j], bw);\n }\n const p = Math.max(ridge, num / Math.max(ridge, den));\n acc += -Math.log(p) * wi;\n totalW += wi;\n }\n return (totalW > 0) ? acc / totalW : 0;\n}\nfunction jitterSamples(samples, sigmaFrac) {\n var _a, _b;\n if (sigmaFrac <= 0)\n return samples;\n // Estimate per-dim std of yPast across buffer to scale jitter\n const allYp = samples.map(s => s.yPast);\n const dims = ((_a = allYp[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n const perDim = new Array(dims).fill(0);\n // compute std per dim\n for (let d = 0; d < dims; d++) {\n const vals = [];\n for (const v of allYp)\n vals.push((_b = v[d]) !== null && _b !== void 0 ? _b : 0);\n perDim[d] = meanStd(vals).s || 1e-3;\n }\n // jitter\n const out = new Array(samples.length);\n for (let i = 0; i < samples.length; i++) {\n const s = samples[i];\n const yp = s.yPast.slice();\n for (let d = 0; d < yp.length; d++) {\n const z = gauss() * sigmaFrac * perDim[d];\n yp[d] += z;\n }\n out[i] = { y: s.y, yPast: yp, xPast: s.xPast, w: s.w };\n }\n return out;\n}\nfunction gauss() {\n // Box-Muller\n let u = 0, v = 0;\n while (u === 0)\n u = Math.random();\n while (v === 0)\n v = Math.random();\n return Math.sqrt(-2 * Math.log(u)) * Math.cos(2 * Math.PI * v);\n}\n\n// TEController.ts — TE-PWS closed-loop tuner for Ω\n/* ------------------------ utils ------------------------ */\nfunction clampNumber(x, lo, hi) {\n return Math.max(lo, Math.min(hi, x));\n}\nfunction withinBand(v, band) {\n return v >= band[0] && v <= band[1];\n}\n/* ------------------------ controller ------------------------ */\nclass TEController {\n constructor(params = {}) {\n this.qCount = 0;\n this.emaBeta = 0.2; // EMA smoothing for TE\n // License check removed // Premium feature - requires valid license\n const defaultLimits = {\n alpha: [0.4, 0.98],\n sigma: [0.12, 1.0],\n ridge: [0.01, 0.2],\n probThresh: [0.3, 0.7],\n mmrLambda: [0.4, 0.9],\n budgetChars: [600, 2400],\n };\n const defaultStep = {\n alpha: 0.03,\n sigma: 0.04,\n ridge: 0.01,\n probThresh: 0.03,\n mmrLambda: 0.05,\n budgetChars: 120,\n };\n const defaults = {\n targets: {\n q2score: [0.01, 0.10],\n feat2score: [0.01, 0.10],\n kept2sum: [0.01, 0.10],\n loopMax: 0.25,\n },\n limits: defaultLimits,\n step: defaultStep,\n cooldown: 2,\n maxPerSessionAdjusts: 24,\n trustMinSamples: 8,\n };\n this.p = Object.assign(Object.assign(Object.assign({}, defaults), params), { targets: Object.assign(Object.assign({}, defaults.targets), (params.targets || {})), limits: Object.assign(Object.assign({}, defaultLimits), (params.limits || {})), step: Object.assign(Object.assign({}, defaultStep), (params.step || {})) });\n this.s = { lastAdjustAt: -999, totalAdjusts: 0, ema: {}, history: [] };\n }\n /** Update EMA from a TE snapshot. */\n pushTE(teSnap) {\n var _a;\n this.qCount++;\n for (const [k, v] of Object.entries(teSnap || {})) {\n const prev = (_a = this.s.ema[k]) !== null && _a !== void 0 ? _a : v;\n this.s.ema[k] = prev + this.emaBeta * (v - prev);\n }\n }\n /** Try one adjustment; returns {knobs?, note?}. Only adjusts if safe. */\n maybeAdjust(current) {\n var _a, _b, _c, _d;\n if (this.qCount < this.p.trustMinSamples)\n return {};\n if (this.s.totalAdjusts >= this.p.maxPerSessionAdjusts)\n return {};\n if (this.qCount - this.s.lastAdjustAt < this.p.cooldown)\n return {};\n const te = this.s.ema;\n const { q2score, feat2score, kept2sum, loopMax } = this.p.targets;\n const out = Object.assign({}, current);\n let changed = null;\n const pick = (cand) => {\n if (!changed)\n changed = cand; // single-knob change per step\n };\n const tQS = (_a = te['Retriever:Q->Score']) !== null && _a !== void 0 ? _a : 0;\n const tFS = (_b = te['OmegaRR:Feat->Score']) !== null && _b !== void 0 ? _b : 0;\n const tKS = (_c = te['Omega:Kept->Summary']) !== null && _c !== void 0 ? _c : 0;\n const tLoop = (_d = te['Reservoir:Loop']) !== null && _d !== void 0 ? _d : 0; // optional if you wire it\n // 1) Retrieval signal shaping\n if (!withinBand(tQS, q2score)) {\n if (tQS < q2score[0]) {\n pick({ param: 'alpha', delta: +this.p.step.alpha, why: `Q→Score low (${tQS.toFixed(3)} < ${q2score[0]})` });\n if (!changed)\n pick({ param: 'sigma', delta: -this.p.step.sigma, why: `Q→Score low, sharpen σ` });\n }\n else {\n pick({ param: 'sigma', delta: +this.p.step.sigma, why: `Q→Score high (${tQS.toFixed(3)} > ${q2score[1]})` });\n if (!changed)\n pick({ param: 'alpha', delta: -this.p.step.alpha, why: `Q→Score high, blend TF-IDF more` });\n }\n }\n // 2) Reranker feature effectiveness via ridge\n if (!changed && !withinBand(tFS, feat2score)) {\n if (tFS < feat2score[0]) {\n pick({ param: 'ridge', delta: -this.p.step.ridge, why: `Feat→Score low (${tFS.toFixed(3)}): loosen λ` });\n }\n else {\n pick({ param: 'ridge', delta: +this.p.step.ridge, why: `Feat→Score high (${tFS.toFixed(3)}): stabilize λ` });\n }\n }\n // 3) Grounding strength into summary via kept set\n if (!changed && !withinBand(tKS, kept2sum)) {\n if (tKS < kept2sum[0]) {\n pick({ param: 'probThresh', delta: -this.p.step.probThresh, why: `Kept→Summary low (${tKS.toFixed(3)}): expand kept` });\n if (!changed)\n pick({ param: 'budgetChars', delta: +this.p.step.budgetChars, why: `Kept→Summary low: widen budget` });\n }\n else {\n pick({ param: 'probThresh', delta: +this.p.step.probThresh, why: `Kept→Summary high: tighten kept` });\n }\n }\n // 4) Optional loop stability guard\n if (!changed && loopMax != null && tLoop > loopMax) {\n pick({ param: 'ridge', delta: +this.p.step.ridge, why: `Loop TE ${tLoop.toFixed(3)} > ${loopMax}: damp` });\n if (!changed)\n pick({ param: 'alpha', delta: -this.p.step.alpha, why: `Loop TE high: reduce dense gain` });\n }\n if (!changed)\n return {}; // nothing to do\n // ---- APPLY CHANGE (narrowed & typed) ----\n const change = changed; // non-null\n const limitsTuple = this.p.limits[change.param];\n const lo = limitsTuple[0];\n const hi = limitsTuple[1];\n const cur = out[change.param];\n const next = clampNumber(cur + change.delta, lo, hi);\n out[change.param] = next;\n // commit\n this.s.lastAdjustAt = this.qCount;\n this.s.totalAdjusts++;\n this.s.history.push({ param: change.param, oldVal: current[change.param], newVal: next, why: change.why });\n const note = `auto-adjust ${String(change.param)}: ${current[change.param]} → ${next} (${change.why})`;\n return { knobs: out, note };\n }\n getHistory() { return this.s.history.slice(-8); } // recent changes\n reset() {\n this.s = { lastAdjustAt: -999, totalAdjusts: 0, ema: {}, history: [] };\n this.qCount = 0;\n }\n}\n\n// Markdown parsing utilities\n// Extracted from workers for reuse\nconst FENCE_RE = /```[\\s\\S]*?```/g;\nconst LINK_RE = /\\[([^\\]]+)\\]\\(([^)]+)\\)/g;\nfunction stripForIndex(md, opts) {\n let s = md;\n if (opts.stripCode) {\n // Preserve a 1-line signature from the first non-empty line inside each fenced block.\n s = s.replace(FENCE_RE, m => {\n const lines = m.split('\\n').slice(1, -1);\n const sig = (lines.find(l => l.trim()) || '').trim();\n return sig ? `\\n${sig}\\n` : '\\n<code omitted>\\n';\n });\n }\n if (opts.stripLinks) {\n // Keep anchor text, drop target\n s = s.replace(LINK_RE, '$1');\n }\n // Light cleanup\n s = s.replace(/[ \\t]+/g, ' ')\n .replace(/\\n{3,}/g, '\\n\\n')\n .trim();\n return s;\n}\nfunction parseMarkdownToSections(md, opts = { stripCode: true, stripLinks: true }) {\n const lines = md.split(/\\r?\\n/);\n const root = { id: 0, level: 1, heading: '(root)', content: '', rich: '', children: [] };\n let current = null;\n const stack = [root];\n let nextId = 1;\n let buf = [];\n const flush = (buf, target) => {\n if (!target)\n return;\n const rich = buf.join('\\n').trim();\n target.rich = rich;\n target.content = stripForIndex(rich, opts);\n };\n for (const line of lines) {\n const mH = /^(#{2,6})\\s+(.*)$/.exec(line);\n if (mH) {\n // heading line\n flush(buf, current);\n buf = [];\n const level = mH[1].length;\n const heading = mH[2].trim();\n const sec = { id: nextId++, level, heading, content: '', rich: '', children: [] };\n // Find proper parent\n while (stack.length && stack[stack.length - 1].level >= level)\n stack.pop();\n const parent = stack[stack.length - 1] || root;\n parent.children.push(sec);\n sec.parent = parent.id;\n stack.push(sec);\n current = sec;\n }\n else {\n buf.push(line);\n }\n }\n flush(buf, current);\n return root;\n}\nfunction backfillEmptyParents(root) {\n const visit = (s) => {\n var _a;\n s.children.forEach(visit);\n // Backfill typical chapter parents (##) only; adjust as needed\n if (s.level === 2) {\n const isEmpty = !s.content || !s.content.trim();\n if (isEmpty) {\n const childSummaries = s.children\n .filter(c => (c.content || c.rich).trim())\n .slice(0, 2)\n .map(c => {\n const body = (c.content || c.rich).split('\\n').slice(0, 3).join('\\n');\n return `### ${c.heading}\\n${body}`;\n });\n if (childSummaries.length) {\n s.content = childSummaries.join('\\n\\n');\n if (!((_a = s.rich) === null || _a === void 0 ? void 0 : _a.trim())) {\n s.rich = `> Summary of subsections:\\n\\n${childSummaries.join('\\n\\n')}`;\n }\n }\n }\n }\n };\n visit(root);\n}\nfunction flattenSections(root) {\n const out = [];\n const walk = (s) => {\n if (s.id !== 0 && s.heading) {\n out.push({ heading: s.heading, content: s.content, rich: s.rich, secId: s.id, level: s.level });\n }\n s.children.forEach(walk);\n };\n walk(root);\n return out;\n}\n\n// Auto-tuning utilities for hyperparameter optimization\n// Extracted from dev-worker for reuse\n/**\n * Sample queries from corpus\n */\nfunction sampleQueriesFromCorpus(chunks, n, useStem) {\n const out = [];\n for (let i = 0; i < n; i++) {\n const s = chunks[Math.floor(Math.random() * chunks.length)];\n // short synthetic queries from headings + nouns-ish tokens\n const toks = tokenize$1((s.heading + ' ' + s.content).slice(0, 400), useStem)\n .filter(t => t.length > 3)\n .slice(0, 40);\n const uniq = Array.from(new Set(toks));\n out.push(uniq.slice(0, 6).join(' '));\n }\n return out;\n}\n/**\n * Compute penalty for configuration complexity\n */\nfunction penalty(cfg) {\n const lmCost = (cfg.landmarks - 128) / 512;\n const vocabCost = (cfg.vocab - 8000) / 24000;\n const preCost = (cfg.prefilter - 200) / 1200;\n return 0.02 * (lmCost + vocabCost + preCost);\n}\n/**\n * Jaccard similarity between two index arrays\n */\nfunction jaccard(a, b) {\n const A = new Set(a);\n const B = new Set(b);\n let inter = 0;\n for (const x of A)\n if (B.has(x))\n inter++;\n const uni = new Set([...A, ...B]).size;\n return uni ? inter / uni : 0;\n}\n/**\n * Clamp value between min and max\n */\nfunction clamp(x, a, b) {\n return Math.max(a, Math.min(b, x));\n}\n/**\n * Pick random element from array\n */\nfunction pick(arr) {\n return arr[Math.floor(Math.random() * arr.length)];\n}\n/**\n * Random number in range\n */\nfunction randRange(a, b) {\n return a + Math.random() * (b - a);\n}\n/**\n * Mutate object with patch\n */\nfunction mutate(base, patch) {\n return Object.assign({}, base, patch);\n}\n/**\n * Auto-tune hyperparameters\n */\nfunction autoTune(opts, onProgress) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b;\n // License check removed // Premium feature - requires valid license\n const { chunks, vocabMap, idf, tfidfDocs, vocabSize, budget = 40, sampleQueries: Qn = 24, currentSettings, } = opts;\n const budgetClamped = Math.max(10, Math.min(200, budget));\n const QnClamped = Math.max(8, Math.min(60, Qn));\n const useStem = ((_a = currentSettings.useStem) !== null && _a !== void 0 ? _a : true);\n const queries = sampleQueriesFromCorpus(chunks, QnClamped, useStem);\n // Pre-compute TF-IDF top-K for each query (baseline)\n const tfidfTops = queries.map(q => {\n var _a;\n const qv = toTfidf(tokenize$1(q, useStem), idf, vocabMap, 1);\n const scores = tfidfDocs.map(v => cosineSparse(v, qv));\n return topKIndices(scores, ((_a = currentSettings.topK) !== null && _a !== void 0 ? _a : 8));\n });\n let best = { score: -Infinity, cfg: Object.assign({}, currentSettings) };\n // Cache for dense docs (keyed by kernel params)\n const denseCache = new Map();\n const denseDocsFor = (cfg) => {\n // ridge doesn't affect projection; key on kernel params only\n const key = `${cfg.kernel}:${cfg.landmarks}:${cfg.sigma}`;\n let dd = denseCache.get(key);\n if (!dd) {\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabSize, cfg.landmarks);\n dd = buildDenseDocs(tfidfDocs, vocabSize, landmarkMat, cfg.kernel, cfg.sigma);\n denseCache.set(key, dd);\n }\n return dd;\n };\n let trial = 0;\n const tryCfg = (cfg, note) => {\n var _a;\n const jScores = [];\n const dd = denseDocsFor(cfg);\n const alpha = clamp(cfg.alpha, 0, 1);\n const lambda = ((_a = cfg.ridge) !== null && _a !== void 0 ? _a : 0.05);\n for (let qi = 0; qi < queries.length; qi++) {\n const q = queries[qi];\n const qv = toTfidf(tokenize$1(q, cfg.useStem), idf, vocabMap, 1);\n const { landmarksIdx, landmarkMat } = buildLandmarks(tfidfDocs, vocabSize, cfg.landmarks);\n const qd = projectToDense(qv, vocabSize, landmarkMat, cfg.kernel, cfg.sigma);\n const tfidfScores = tfidfDocs.map(v => cosineSparse(v, qv));\n // Compute dense scores using kernel similarity\n const denseScoresSimple = dd.map((v) => kernelSim(v, qd, cfg.kernel, cfg.sigma));\n // ridge-regularized hybrid (bonus off during tuning)\n const hybrid = denseScoresSimple.map((d, i) => {\n const t = tfidfScores[i];\n const reg = 1 / (1 + lambda * (d * d + t * t));\n return reg * (alpha * d + (1 - alpha) * t);\n });\n const idxs = topKIndices(hybrid, cfg.topK);\n jScores.push(jaccard(tfidfTops[qi], idxs));\n }\n const score = (jScores.reduce((a, b) => a + b, 0) / jScores.length) - penalty(cfg);\n if (score > best.score)\n best = { score, cfg: Object.assign({}, cfg) };\n if (onProgress)\n onProgress(++trial, best.score, note);\n };\n // random warmup\n for (let i = 0; i < Math.floor(budgetClamped * 0.6); i++) {\n const cfg = mutate(currentSettings, {\n alpha: randRange(0.55, 0.95),\n beta: randRange(0.0, 0.35),\n sigma: randRange(0.18, 0.75),\n kernel: pick(['rbf', 'cosine', 'poly2']),\n vocab: pick([8000, 10000, 12000, 15000]),\n landmarks: pick([128, 192, 256, 320, 384]),\n prefilter: pick([200, 300, 400, 600]),\n topK: pick([4, 6, 8]),\n headingW: randRange(1.5, 4.5),\n chunk: pick([450, 550, 650]),\n overlap: pick([50, 75, 100]),\n penalizeLinks: true,\n stripCode: true,\n expandQuery: true,\n useStem: true,\n ridge: randRange(0.02, 0.18),\n });\n tryCfg(cfg, 'random');\n }\n // refinement\n for (let i = trial; i < budgetClamped; i++) {\n const b = best.cfg;\n const cfg = mutate(b, {\n alpha: clamp(b.alpha + randRange(-0.1, 0.1), 0.4, 0.98),\n beta: clamp(b.beta + randRange(-0.1, 0.1), 0, 0.4),\n sigma: clamp(b.sigma + randRange(-0.08, 0.08), 0.12, 1.0),\n kernel: b.kernel,\n vocab: b.vocab,\n landmarks: b.landmarks,\n prefilter: b.prefilter,\n topK: b.topK,\n headingW: clamp(b.headingW + randRange(-0.4, 0.4), 1.0, 6.0),\n chunk: b.chunk,\n overlap: b.overlap,\n penalizeLinks: b.penalizeLinks,\n stripCode: b.stripCode,\n expandQuery: b.expandQuery,\n useStem: b.useStem,\n ridge: clamp(((_b = b.ridge) !== null && _b !== void 0 ? _b : 0.05) + randRange(-0.02, 0.02), 0.0, 0.2),\n });\n tryCfg(cfg, 'refine');\n }\n return {\n bestSettings: best.cfg,\n bestScore: best.score,\n trials: trial,\n };\n });\n}\n\n// Model serialization utilities\n// Extracted from workers for reuse\n/**\n * Small, deterministic hash (not cryptographic)\n */\nfunction quickHash(s) {\n let h1 = 0x9e3779b1, h2 = 0x85ebca6b;\n for (let i = 0; i < s.length; i++) {\n const c = s.charCodeAt(i);\n h1 = Math.imul(h1 ^ c, 0x85ebca6b);\n h2 = Math.imul(h2 ^ c, 0xc2b2ae35);\n }\n h1 = (h1 ^ (h2 >>> 15)) >>> 0;\n return ('00000000' + h1.toString(16)).slice(-8);\n}\n/**\n * Export model to serialized format\n */\nfunction exportModel(opts) {\n // License check removed // Premium feature - requires valid license\n const { settings, vocabMap, idf, chunks, tfidfDocs, landmarksIdx, landmarkMat, denseDocs, includeRich = true, includeDense = false, } = opts;\n // 1) settings snapshot (clone to avoid accidental mutation)\n const settingsSnap = JSON.parse(JSON.stringify(settings || {}));\n // 2) vocab\n const vocab = Array.from(vocabMap.entries());\n // 3) chunks (minimal text)\n const chunksSnap = chunks.map(c => ({\n heading: c.heading,\n content: c.content || '',\n rich: includeRich ? (c.rich || undefined) : undefined,\n level: c.level,\n secId: c.secId,\n }));\n // 4) tfidfDocs → array of pairs\n const tfidfPairs = tfidfDocs.map((m) => {\n const row = [];\n for (const [i, v] of m)\n row.push([i, v]);\n // sort indices for determinism\n row.sort((a, b) => a[0] - b[0]);\n return row;\n });\n // 5) Nyström landmarks and (optional) denseDocs\n const landmarkMatArr = landmarkMat.map(v => Array.from(v));\n const denseDocsArr = includeDense ?\n ((denseDocs === null || denseDocs === void 0 ? void 0 : denseDocs.map(v => Array.from(v))) || undefined) : undefined;\n const payload = {\n version: 'astermind-pro-v1',\n savedAt: new Date().toISOString(),\n settings: settingsSnap,\n vocab,\n idf: Array.from(idf),\n chunks: chunksSnap,\n tfidfDocs: tfidfPairs,\n landmarksIdx: Array.from(landmarksIdx),\n landmarkMat: landmarkMatArr,\n denseDocs: denseDocsArr,\n };\n // (Optional) quick content hash for sanity (small & deterministic)\n payload.hash = quickHash(JSON.stringify({\n idf: payload.idf.slice(0, 64),\n vi: payload.vocab.length,\n ci: payload.chunks.length,\n lm: payload.landmarksIdx.length\n }));\n return payload;\n}\nfunction importModel(model, opts) {\n // License check removed // Premium feature - requires valid license\n if (model.version !== 'astermind-pro-v1' && model.version !== 'astermind-elm-v1') {\n throw new Error(`Unsupported model version: ${model.version}. Expected 'astermind-pro-v1' or 'astermind-elm-v1'`);\n }\n // 1) restore settings\n const settings = JSON.parse(JSON.stringify(model.settings || {}));\n // 2) vocab & idf\n const vocabMap = new Map(model.vocab);\n const idf = Float64Array.from(model.idf); // keep as number[] for compatibility\n // 3) chunks\n const chunks = model.chunks.map(c => ({\n heading: c.heading,\n content: c.content || '',\n rich: c.rich,\n level: c.level,\n secId: c.secId\n }));\n // 4) tfidfDocs from pairs\n const tfidfDocs = model.tfidfDocs.map(row => {\n const m = new Map();\n for (const [i, v] of row)\n m.set(i, v);\n return m;\n });\n // 5) Nyström landmarks\n const landmarksIdx = Array.from(model.landmarksIdx);\n const landmarkMat = model.landmarkMat.map(a => Float64Array.from(a));\n // 6) denseDocs: use stored or recompute\n const needRecompute = ((opts === null || opts === void 0 ? void 0 : opts.recomputeDense) === true) || !model.denseDocs || model.denseDocs.length !== tfidfDocs.length;\n let denseDocs;\n if (needRecompute && (opts === null || opts === void 0 ? void 0 : opts.buildDense)) {\n denseDocs = opts.buildDense(tfidfDocs, vocabMap.size, landmarkMat, settings.kernel || 'rbf', settings.sigma || 1.0);\n }\n else if (needRecompute) {\n throw new Error('recomputeDense=true but buildDense function not provided');\n }\n else {\n denseDocs = model.denseDocs.map(a => Float64Array.from(a));\n }\n return {\n settings,\n vocabMap,\n idf,\n chunks,\n tfidfDocs,\n landmarksIdx,\n landmarkMat,\n denseDocs,\n };\n}\n\n// elm_scorer.ts — tiny, self-contained ELM scorer for (query, chunk) relevance\n// Uses a random single hidden layer + ridge (closed form via OnlineRidge).\n// \n// NOTE: You can also use astermind's ELM or OnlineELM classes from the local build:\n// import { ELM, OnlineELM, defaultNumericConfig } from '@astermind/astermind-elm';\n// License removed - all features are now free!\nfunction rngFactory(seed = 1337) {\n // xorshift32\n let x = (seed >>> 0) || 1;\n return () => {\n x ^= x << 13;\n x ^= x >> 17;\n x ^= x << 5;\n return ((x >>> 0) / 0xFFFFFFFF);\n };\n}\nclass ELMScorer {\n constructor(p, cfg) {\n var _a;\n // License check removed // License check - ELMScorer uses premium OnlineRidge\n this.p = p;\n this.dim = Math.max(8, cfg.dim | 0);\n this.lambda = Math.max(1e-6, cfg.lambda);\n const rng = rngFactory((_a = cfg.seed) !== null && _a !== void 0 ? _a : 1337);\n this.W = new Float64Array(this.dim * p);\n for (let i = 0; i < this.W.length; i++)\n this.W[i] = (rng() * 2 - 1) * Math.sqrt(2 / p);\n this.b = new Float64Array(this.dim);\n for (let i = 0; i < this.b.length; i++)\n this.b[i] = (rng() * 2 - 1);\n this.ridge = new OnlineRidge(this.dim, 1, this.lambda);\n this.ready = false;\n }\n hidden(x) {\n const h = new Float64Array(this.dim);\n for (let j = 0; j < this.dim; j++) {\n let s = this.b[j];\n const row = j * this.p;\n for (let i = 0; i < this.p; i++)\n s += this.W[row + i] * x[i];\n // GELU-ish smooth nonlinearity (fast approximate)\n const t = s;\n h[j] = 0.5 * t * (1 + Math.tanh(Math.sqrt(2 / Math.PI) * (t + 0.044715 * Math.pow(t, 3))));\n }\n return h;\n }\n partialFit(batchX, batchY) {\n if (!this.ridge)\n this.ridge = new OnlineRidge(this.dim, 1, this.lambda);\n for (let k = 0; k < batchX.length; k++) {\n const h = this.hidden(batchX[k]); // Float64Array\n const y = new Float64Array([batchY[k]]); // <-- make it Float64Array\n this.ridge.update(h, y);\n }\n this.ready = true;\n }\n fit(X, y, iters = 1, batch = 256) {\n const n = X.length;\n for (let t = 0; t < iters; t++) {\n for (let i = 0; i < n; i += batch) {\n const xb = X.slice(i, i + batch);\n const yb = y.slice(i, i + batch);\n this.partialFit(xb, yb);\n }\n }\n this.ready = true;\n }\n score(x) {\n if (!this.ready || !this.ridge)\n return 0;\n const h = this.hidden(x);\n // y = h^T Beta (single output)\n const Beta = this.ridge.Beta;\n let s = 0;\n for (let j = 0; j < this.dim; j++)\n s += h[j] * Beta[j];\n return s;\n }\n}\n\n// multi-kernel-elm.ts — Multi-Kernel ELM combining multiple kernel types\n// Combines RBF, polynomial, and linear kernels for improved accuracy\n/**\n * Multi-Kernel ELM that combines multiple kernel types\n * Uses weighted combination of kernels for improved accuracy\n */\nclass MultiKernelELM {\n constructor(categories, options) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n this.kelms = [];\n this.kernelWeights = [];\n this.categories = [];\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.categories = categories;\n this.options = {\n kernels: options.kernels,\n ridgeLambda: (_a = options.ridgeLambda) !== null && _a !== void 0 ? _a : 0.001,\n learnWeights: (_b = options.learnWeights) !== null && _b !== void 0 ? _b : true,\n nystrom: {\n m: (_d = (_c = options.nystrom) === null || _c === void 0 ? void 0 : _c.m) !== null && _d !== void 0 ? _d : 100,\n strategy: (_f = (_e = options.nystrom) === null || _e === void 0 ? void 0 : _e.strategy) !== null && _f !== void 0 ? _f : 'uniform',\n },\n };\n // Initialize kernel ELMs\n for (const kernelConfig of this.options.kernels) {\n const kelm = new KernelELM({\n outputDim: categories.length,\n kernel: {\n type: kernelConfig.type === 'polynomial' ? 'rbf' : kernelConfig.type, // Map polynomial to rbf for now\n gamma: (_h = (_g = kernelConfig.params) === null || _g === void 0 ? void 0 : _g.gamma) !== null && _h !== void 0 ? _h : 0.01,\n },\n ridgeLambda: this.options.ridgeLambda,\n task: 'classification',\n mode: 'nystrom',\n nystrom: {\n m: this.options.nystrom.m,\n strategy: this.options.nystrom.strategy === 'random' ? 'uniform' : this.options.nystrom.strategy,\n },\n });\n this.kelms.push(kelm);\n }\n // Initialize kernel weights\n if (this.options.learnWeights) {\n this.kernelWeights = this.options.kernels.map((k, i) => { var _a; return (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 / this.options.kernels.length; });\n }\n else {\n this.kernelWeights = this.options.kernels.map((k) => { var _a; return (_a = k.weight) !== null && _a !== void 0 ? _a : 1.0 / this.options.kernels.length; });\n }\n }\n /**\n * Train the multi-kernel ELM\n */\n fit(X, y) {\n // Convert y to one-hot if needed\n const oneHotY = this._toOneHot(y);\n // Train each kernel ELM\n for (const kelm of this.kelms) {\n kelm.fit(X, oneHotY);\n }\n // Learn optimal kernel weights if enabled\n if (this.options.learnWeights && this.kelms.length > 1) {\n this._learnKernelWeights(X, oneHotY);\n }\n this.trained = true;\n }\n /**\n * Predict with multi-kernel combination\n */\n predict(X, topK = 3) {\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const allPredictions = [];\n for (const x of XArray) {\n const predictions = [];\n // Get predictions from each kernel\n const kernelPredictions = this.kelms.map((kelm) => {\n var _a, _b, _c, _d;\n const pred = ((_b = (_a = kelm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [x])) || ((_d = (_c = kelm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [x]));\n return (Array.isArray(pred) ? pred[0] : pred) || new Float64Array(this.categories.length);\n });\n // Weighted combination\n const combined = new Float64Array(this.categories.length);\n for (let i = 0; i < this.kelms.length; i++) {\n const weight = this.kernelWeights[i];\n for (let j = 0; j < this.categories.length; j++) {\n combined[j] += kernelPredictions[i][j] * weight;\n }\n }\n // Convert to probabilities\n const probs = this._softmax(combined);\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n const topResults = [];\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n topResults.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n predictions.push(...topResults);\n allPredictions.push(...predictions);\n }\n return allPredictions;\n }\n /**\n * Learn optimal kernel weights using validation performance\n */\n _learnKernelWeights(X, y) {\n var _a, _b, _c, _d;\n // Simple approach: weight by validation accuracy\n // In practice, you might use cross-validation\n const weights = new Float64Array(this.kelms.length);\n for (let i = 0; i < this.kelms.length; i++) {\n const kelm = this.kelms[i];\n let correct = 0;\n let total = 0;\n // Evaluate on training data (in production, use validation set)\n for (let j = 0; j < Math.min(100, X.length); j++) {\n const pred = ((_b = (_a = kelm).transform) === null || _b === void 0 ? void 0 : _b.call(_a, [X[j]])) || ((_d = (_c = kelm).predict) === null || _d === void 0 ? void 0 : _d.call(_c, [X[j]]));\n const predVec = (Array.isArray(pred) ? pred[0] : pred) || new Float64Array(0);\n const predIdx = this._argmax(predVec);\n const trueIdx = this._argmax(y[j]);\n if (predIdx === trueIdx)\n correct++;\n total++;\n }\n weights[i] = total > 0 ? correct / total : 1.0 / this.kelms.length;\n }\n // Normalize weights\n const sum = Array.from(weights).reduce((a, b) => a + b, 0);\n if (sum > 0) {\n for (let i = 0; i < weights.length; i++) {\n this.kernelWeights[i] = weights[i] / sum;\n }\n }\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...Array.from(logits));\n const exp = new Float64Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n for (let i = 0; i < exp.length; i++) {\n exp[i] /= sum;\n }\n return exp;\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n /**\n * Get current kernel weights\n */\n getKernelWeights() {\n return [...this.kernelWeights];\n }\n}\n\n// deep-elm-pro.ts — Improved Deep ELM with advanced features\n// Enhanced version of DeepELM with better training strategies and regularization\n/**\n * Improved Deep ELM with advanced training strategies\n * Features:\n * - Layer-wise training with autoencoder pretraining\n * - Dropout and batch normalization\n * - L1/L2/Elastic net regularization\n * - Better initialization strategies\n */\nclass DeepELMPro {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;\n this.layers = [];\n this.trained = false;\n this.featureExtractors = []; // For pretraining\n // License check removed // Premium feature - requires valid license\n this.options = {\n layers: options.layers,\n activation: (_a = options.activation) !== null && _a !== void 0 ? _a : 'relu',\n useDropout: (_b = options.useDropout) !== null && _b !== void 0 ? _b : false,\n dropoutRate: (_c = options.dropoutRate) !== null && _c !== void 0 ? _c : 0.2,\n useBatchNorm: (_d = options.useBatchNorm) !== null && _d !== void 0 ? _d : false,\n regularization: {\n type: (_f = (_e = options.regularization) === null || _e === void 0 ? void 0 : _e.type) !== null && _f !== void 0 ? _f : 'l2',\n lambda: (_h = (_g = options.regularization) === null || _g === void 0 ? void 0 : _g.lambda) !== null && _h !== void 0 ? _h : 0.0001,\n alpha: (_k = (_j = options.regularization) === null || _j === void 0 ? void 0 : _j.alpha) !== null && _k !== void 0 ? _k : 0.5,\n },\n layerWiseTraining: (_l = options.layerWiseTraining) !== null && _l !== void 0 ? _l : true,\n pretraining: (_m = options.pretraining) !== null && _m !== void 0 ? _m : true,\n categories: options.categories,\n maxLen: (_o = options.maxLen) !== null && _o !== void 0 ? _o : 100,\n };\n // Initialize layers\n for (let i = 0; i < this.options.layers.length; i++) {\n const deepELM = new DeepELM({\n layers: [{ hiddenUnits: this.options.layers[i], activation: this.options.activation }],\n maxLen: this.options.maxLen,\n useTokenizer: i === 0, // Only first layer uses tokenizer\n });\n // Set categories for last layer after construction\n if (i === this.options.layers.length - 1) {\n (_q = (_p = deepELM).setCategories) === null || _q === void 0 ? void 0 : _q.call(_p, this.options.categories);\n }\n this.layers.push(deepELM);\n }\n // Initialize feature extractors for pretraining\n if (this.options.pretraining) {\n for (let i = 0; i < this.options.layers.length - 1; i++) {\n const extractor = new ELM({\n useTokenizer: i === 0 ? true : undefined,\n hiddenUnits: this.options.layers[i],\n categories: [],\n maxLen: this.options.maxLen,\n });\n this.featureExtractors.push(extractor);\n }\n }\n }\n /**\n * Train the deep ELM with improved strategies\n */\n train(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n // Step 1: Pretraining (if enabled)\n if (this.options.pretraining) {\n yield this._pretrain(X);\n }\n // Step 2: Layer-wise or joint training\n if (this.options.layerWiseTraining) {\n yield this._trainLayerWise(X, y);\n }\n else {\n yield this._trainJoint(X, y);\n }\n this.trained = true;\n });\n }\n /**\n * Predict with deep ELM\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const predictions = [];\n for (const x of XArray) {\n // Forward pass through layers\n let features = x;\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n // Apply batch normalization if enabled\n if (this.options.useBatchNorm && i > 0) {\n features = this._batchNormalize(features);\n }\n // Apply dropout if enabled (only during training, but we're in predict mode)\n // In practice, dropout is disabled during inference\n // Forward through layer\n if (i === this.layers.length - 1) {\n // Last layer: get predictions\n const pred = ((_b = (_a = layer).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [features], topK)) || [];\n predictions.push(...pred.map((p) => ({\n label: p.label || this.options.categories[p.index || 0],\n prob: p.prob || 0,\n })));\n }\n else {\n // Hidden layers: extract features\n features = this._extractFeatures(layer, features);\n }\n }\n }\n return predictions;\n }\n /**\n * Pretrain layers as autoencoders\n */\n _pretrain(X) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b;\n let currentFeatures = X;\n for (let i = 0; i < this.featureExtractors.length; i++) {\n const extractor = this.featureExtractors[i];\n // Train as autoencoder (reconstruct input)\n const encoded = currentFeatures.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = extractor.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = extractor.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n });\n // Use encoded features as both input and target (autoencoder)\n (_b = (_a = extractor).trainFromData) === null || _b === void 0 ? void 0 : _b.call(_a, encoded, encoded.map((_, idx) => idx));\n // Extract features for next layer\n currentFeatures = encoded.map(x => {\n const hidden = this._extractFeaturesFromELM(extractor, x);\n return Array.from(hidden);\n });\n }\n });\n }\n /**\n * Train layers sequentially\n */\n _trainLayerWise(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b, _c, _d, _e, _f;\n let currentFeatures = X;\n const labelIndices = y.map(label => typeof label === 'number' ? label : this.options.categories.indexOf(label));\n for (let i = 0; i < this.layers.length; i++) {\n const layer = this.layers[i];\n // Prepare features\n const features = currentFeatures.map(x => {\n if (i === 0) {\n // First layer: use raw input\n return x;\n }\n else {\n // Subsequent layers: use previous layer output\n return this._extractFeatures(this.layers[i - 1], x);\n }\n });\n // Train layer\n if (i === this.layers.length - 1) {\n // Last layer: train with labels\n (_b = (_a = layer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = layer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, features, labelIndices);\n }\n else {\n // Hidden layers: train to extract features\n // Use next layer's input as target (unsupervised)\n const nextLayerFeatures = i < this.layers.length - 1\n ? features.map(f => this._extractFeatures(this.layers[i + 1], f))\n : features;\n (_f = (_e = layer).trainFromData) === null || _f === void 0 ? void 0 : _f.call(_e, features, nextLayerFeatures.map((_, idx) => idx));\n }\n // Update features for next layer\n currentFeatures = features.map(f => this._extractFeatures(layer, f));\n }\n });\n }\n /**\n * Train all layers jointly\n */\n _trainJoint(X, y) {\n return __awaiter(this, void 0, void 0, function* () {\n var _a, _b, _c, _d;\n const labelIndices = y.map(label => typeof label === 'number' ? label : this.options.categories.indexOf(label));\n // Train the last layer with final features\n const lastLayer = this.layers[this.layers.length - 1];\n const finalFeatures = X.map(x => {\n let features = x;\n for (let i = 0; i < this.layers.length - 1; i++) {\n features = this._extractFeatures(this.layers[i], features);\n }\n return features;\n });\n (_b = (_a = lastLayer).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = lastLayer).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, finalFeatures, labelIndices);\n });\n }\n _extractFeatures(layer, input) {\n var _a, _b, _c, _d;\n // Extract hidden layer representation\n const hidden = (_b = (_a = layer).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [input], (_c = layer.model) === null || _c === void 0 ? void 0 : _c.W, (_d = layer.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) ? Array.from(hidden[0]) : input;\n }\n _extractFeaturesFromELM(elm, input) {\n var _a, _b, _c, _d;\n const hidden = (_b = (_a = elm).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [input], (_c = elm.model) === null || _c === void 0 ? void 0 : _c.W, (_d = elm.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) || new Float64Array(input.length);\n }\n _batchNormalize(features) {\n const mean = features.reduce((a, b) => a + b, 0) / features.length;\n const variance = features.reduce((sum, x) => sum + Math.pow((x - mean), 2), 0) / features.length;\n const std = Math.sqrt(variance + 1e-8);\n return features.map(x => (x - mean) / std);\n }\n}\n\n// online-kernel-elm.ts — Online Kernel ELM for streaming data\n// Incremental kernel learning with forgetting mechanisms\n/**\n * Online Kernel ELM for real-time learning from streaming data\n * Features:\n * - Incremental kernel matrix updates\n * - Sliding window with forgetting\n * - Adaptive landmark selection\n * - Real-time prediction\n */\nclass OnlineKernelELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n // Storage for streaming data\n this.landmarks = [];\n this.landmarkIndices = [];\n this.samples = [];\n this.labels = [];\n this.sampleWeights = [];\n // Online ridge for incremental updates\n this.onlineRidge = null;\n this.kernelMatrix = [];\n this.kernelMatrixInv = [];\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.kernelType = options.kernel.type;\n this.kernelParams = {\n gamma: (_a = options.kernel.gamma) !== null && _a !== void 0 ? _a : 0.01,\n degree: (_b = options.kernel.degree) !== null && _b !== void 0 ? _b : 2,\n coef0: (_c = options.kernel.coef0) !== null && _c !== void 0 ? _c : 0,\n };\n this.categories = options.categories;\n this.ridgeLambda = (_d = options.ridgeLambda) !== null && _d !== void 0 ? _d : 0.001;\n this.windowSize = (_e = options.windowSize) !== null && _e !== void 0 ? _e : 1000;\n this.decayFactor = (_f = options.decayFactor) !== null && _f !== void 0 ? _f : 0.99;\n this.maxLandmarks = (_g = options.maxLandmarks) !== null && _g !== void 0 ? _g : 100;\n }\n /**\n * Initial training with batch data\n */\n fit(X, y) {\n const oneHotY = this._toOneHot(y);\n // Select landmarks\n this._selectLandmarks(X);\n // Compute initial kernel matrix\n this._computeKernelMatrix(X);\n // Initialize online ridge\n this.onlineRidge = new OnlineRidge(this.landmarks.length, this.categories.length, this.ridgeLambda);\n // Train on initial batch\n for (let i = 0; i < X.length; i++) {\n const phi = this._computeKernelFeatures(X[i]);\n const yVec = new Float64Array(oneHotY[i]);\n this.onlineRidge.update(phi, yVec);\n }\n // Store samples\n this.samples = X.map(x => [...x]);\n this.labels = Array.isArray(y[0])\n ? y.map(yy => this._argmax(yy))\n : y;\n this.sampleWeights = new Array(X.length).fill(1.0);\n this.trained = true;\n }\n /**\n * Incremental update with new sample\n */\n update(x, y) {\n if (!this.trained) {\n throw new Error('Model must be initially trained with fit() before incremental updates');\n }\n const oneHotY = Array.isArray(y)\n ? y\n : (() => {\n const oh = new Array(this.categories.length).fill(0);\n oh[y] = 1;\n return oh;\n })();\n // Add to samples\n this.samples.push([...x]);\n this.labels.push(Array.isArray(y) ? this._argmax(y) : y);\n this.sampleWeights.push(1.0);\n // Apply decay to old samples\n for (let i = 0; i < this.sampleWeights.length; i++) {\n this.sampleWeights[i] *= this.decayFactor;\n }\n // Remove old samples if window exceeded\n if (this.samples.length > this.windowSize) {\n const removeCount = this.samples.length - this.windowSize;\n this.samples.splice(0, removeCount);\n this.labels.splice(0, removeCount);\n this.sampleWeights.splice(0, removeCount);\n }\n // Update landmarks if needed (adaptive strategy)\n if (this.landmarkStrategy === 'adaptive') {\n this._updateLandmarksAdaptive();\n }\n // Compute kernel features\n const phi = this._computeKernelFeatures(x);\n const yVec = new Float64Array(oneHotY);\n // Update online ridge\n if (this.onlineRidge) {\n this.onlineRidge.update(phi, yVec);\n }\n }\n /**\n * Predict with online model\n */\n predict(x, topK = 3) {\n if (!this.trained || !this.onlineRidge) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const allPredictions = [];\n for (const xi of XArray) {\n const predictions = [];\n const phi = this._computeKernelFeatures(xi);\n const logits = this.onlineRidge.predict(phi);\n // Convert to probabilities\n const probs = this._softmax(logits);\n // Get top-K\n const indexed = [];\n for (let idx = 0; idx < probs.length; idx++) {\n indexed.push({\n label: this.categories[idx],\n prob: probs[idx],\n index: idx,\n });\n }\n indexed.sort((a, b) => b.prob - a.prob);\n const topResults = [];\n for (let i = 0; i < Math.min(topK, indexed.length); i++) {\n topResults.push({\n label: indexed[i].label,\n prob: indexed[i].prob,\n });\n }\n predictions.push(...topResults);\n allPredictions.push(...predictions);\n }\n return allPredictions;\n }\n /**\n * Select landmarks from data\n */\n _selectLandmarks(X) {\n const strategy = this.landmarkStrategy || 'uniform';\n const n = Math.min(this.maxLandmarks, X.length);\n if (strategy === 'uniform') {\n const step = Math.max(1, Math.floor(X.length / n));\n this.landmarkIndices = Array.from({ length: n }, (_, i) => Math.min(X.length - 1, i * step));\n }\n else if (strategy === 'random') {\n const indices = Array.from({ length: X.length }, (_, i) => i);\n for (let i = indices.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [indices[i], indices[j]] = [indices[j], indices[i]];\n }\n this.landmarkIndices = indices.slice(0, n);\n }\n else {\n // Adaptive: use first n samples initially\n this.landmarkIndices = Array.from({ length: n }, (_, i) => i);\n }\n this.landmarks = this.landmarkIndices.map(idx => [...X[idx]]);\n }\n /**\n * Compute kernel features for a sample\n */\n _computeKernelFeatures(x) {\n const features = new Float64Array(this.landmarks.length);\n for (let i = 0; i < this.landmarks.length; i++) {\n features[i] = this._kernel(x, this.landmarks[i]);\n }\n return features;\n }\n /**\n * Compute kernel between two vectors\n */\n _kernel(x1, x2) {\n if (this.kernelType === 'linear') {\n return this._dot(x1, x2);\n }\n else if (this.kernelType === 'rbf') {\n const dist = this._squaredDistance(x1, x2);\n return Math.exp(-this.kernelParams.gamma * dist);\n }\n else if (this.kernelType === 'polynomial') {\n const dot = this._dot(x1, x2);\n return Math.pow(dot + this.kernelParams.coef0, this.kernelParams.degree);\n }\n return 0;\n }\n _dot(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n sum += a[i] * b[i];\n }\n return sum;\n }\n _squaredDistance(a, b) {\n let sum = 0;\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n const diff = a[i] - b[i];\n sum += diff * diff;\n }\n return sum;\n }\n _computeKernelMatrix(X) {\n // For online learning, we don't need full kernel matrix\n // This is kept for compatibility\n this.kernelMatrix = [];\n }\n _updateLandmarksAdaptive() {\n // Adaptive landmark selection based on prediction error\n // In practice, you might replace landmarks with high error\n // For now, keep existing landmarks\n }\n _toOneHot(y) {\n if (Array.isArray(y[0])) {\n return y;\n }\n const labels = y;\n return labels.map((label) => {\n const oneHot = new Array(this.categories.length).fill(0);\n oneHot[label] = 1;\n return oneHot;\n });\n }\n _softmax(logits) {\n const max = Math.max(...Array.from(logits));\n const exp = new Float64Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n for (let i = 0; i < exp.length; i++) {\n exp[i] /= sum;\n }\n return exp;\n }\n _argmax(arr) {\n let maxIdx = 0;\n let maxVal = arr[0] || 0;\n for (let i = 1; i < arr.length; i++) {\n if ((arr[i] || 0) > maxVal) {\n maxVal = arr[i] || 0;\n maxIdx = i;\n }\n }\n return maxIdx;\n }\n get landmarkStrategy() {\n return 'adaptive'; // Default for online learning\n }\n}\n\n// multi-task-elm.ts — Multi-Task ELM for joint learning across related tasks\n// Shared hidden layer with task-specific output layers\n/**\n * Multi-Task ELM for joint learning across related tasks\n * Features:\n * - Shared feature extraction layer\n * - Task-specific output layers\n * - Task weighting for importance\n * - Joint optimization\n */\nclass MultiTaskELM {\n constructor(options) {\n var _a, _b, _c, _d, _e;\n this.taskELMs = new Map();\n this.trained = false;\n // License check removed // Premium feature - requires valid license\n this.tasks = options.tasks.map((task) => {\n var _a;\n return ({\n name: task.name,\n categories: task.categories,\n weight: (_a = task.weight) !== null && _a !== void 0 ? _a : 1.0,\n });\n });\n this.options = {\n sharedHiddenUnits: (_a = options.sharedHiddenUnits) !== null && _a !== void 0 ? _a : 256,\n taskSpecificHiddenUnits: (_b = options.taskSpecificHiddenUnits) !== null && _b !== void 0 ? _b : options.tasks.map(() => 128),\n activation: (_c = options.activation) !== null && _c !== void 0 ? _c : 'relu',\n maxLen: (_d = options.maxLen) !== null && _d !== void 0 ? _d : 100,\n useTokenizer: (_e = options.useTokenizer) !== null && _e !== void 0 ? _e : true,\n };\n // Initialize shared ELM\n this.sharedELM = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.sharedHiddenUnits,\n categories: [], // No categories for shared layer\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n // Initialize task-specific ELMs\n for (let i = 0; i < this.tasks.length; i++) {\n const task = this.tasks[i];\n const taskELM = new ELM({\n hiddenUnits: this.options.taskSpecificHiddenUnits[i],\n categories: task.categories,\n maxLen: this.options.sharedHiddenUnits, // Input size is shared layer output\n activation: this.options.activation,\n });\n this.taskELMs.set(task.name, taskELM);\n }\n }\n /**\n * Train multi-task ELM\n * @param X Input features\n * @param yTaskData Map of task name to labels\n */\n train(X, yTaskData) {\n var _a, _b, _c, _d;\n // Step 1: Train shared layer (use all tasks)\n const allFeatures = this._extractSharedFeatures(X);\n // Step 2: Train each task-specific layer\n for (const task of this.tasks) {\n const taskLabels = yTaskData.get(task.name);\n if (!taskLabels)\n continue;\n const taskELM = this.taskELMs.get(task.name);\n const labelIndices = taskLabels.map(label => typeof label === 'number'\n ? label\n : task.categories.indexOf(label));\n // Train task-specific ELM on shared features\n (_b = (_a = taskELM).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, task.categories);\n (_d = (_c = taskELM).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, allFeatures, labelIndices);\n }\n this.trained = true;\n }\n /**\n * Predict for all tasks\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const XArray = Array.isArray(X[0]) ? X : [X];\n const results = new Map();\n for (const x of XArray) {\n // Extract shared features\n const sharedFeatures = this._extractSharedFeatures([x])[0];\n // Predict for each task\n for (const task of this.tasks) {\n const taskELM = this.taskELMs.get(task.name);\n const taskPreds = ((_b = (_a = taskELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [sharedFeatures], topK)) || [];\n const taskResults = taskPreds.map((pred) => ({\n task: task.name,\n label: pred.label || task.categories[pred.index || 0],\n prob: pred.prob || 0,\n }));\n if (!results.has(task.name)) {\n results.set(task.name, []);\n }\n results.get(task.name).push(...taskResults);\n }\n }\n return results;\n }\n /**\n * Predict for a specific task\n */\n predictTask(x, taskName, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n const taskELM = this.taskELMs.get(taskName);\n if (!taskELM) {\n throw new Error(`Task ${taskName} not found`);\n }\n const XArray = Array.isArray(x[0]) ? x : [x];\n const results = [];\n for (const xi of XArray) {\n // Extract shared features\n const sharedFeatures = this._extractSharedFeatures([xi])[0];\n // Predict with task-specific ELM\n const taskPreds = ((_b = (_a = taskELM).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, [sharedFeatures], topK)) || [];\n results.push(...taskPreds.map((pred) => ({\n task: taskName,\n label: pred.label || this.tasks.find(t => t.name === taskName).categories[pred.index || 0],\n prob: pred.prob || 0,\n })));\n }\n return results;\n }\n /**\n * Extract features from shared layer\n */\n _extractSharedFeatures(X) {\n // Encode inputs if using tokenizer\n const encoded = this.options.useTokenizer\n ? X.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = this.sharedELM.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = this.sharedELM.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n })\n : X;\n // Extract hidden layer features\n return encoded.map(x => {\n var _a, _b, _c, _d;\n const hidden = (_b = (_a = this.sharedELM).buildHidden) === null || _b === void 0 ? void 0 : _b.call(_a, [x], (_c = this.sharedELM.model) === null || _c === void 0 ? void 0 : _c.W, (_d = this.sharedELM.model) === null || _d === void 0 ? void 0 : _d.b);\n return (hidden === null || hidden === void 0 ? void 0 : hidden[0]) ? Array.from(hidden[0]) : x;\n });\n }\n /**\n * Get task names\n */\n getTaskNames() {\n return this.tasks.map(t => t.name);\n }\n /**\n * Get task weights\n */\n getTaskWeights() {\n return new Map(this.tasks.map(t => [t.name, t.weight]));\n }\n}\n\n// sparse-elm.ts — Sparse ELM with L1/L2 regularization and feature selection\n// Efficient for high-dimensional data with interpretability\n/**\n * Sparse ELM with regularization and feature selection\n * Features:\n * - L1/L2/Elastic net regularization\n * - Weight pruning for sparsity\n * - Feature importance ranking\n * - Interpretable models\n */\nclass SparseELM {\n constructor(options) {\n var _a, _b, _c, _d, _e, _f, _g;\n this.trained = false;\n this.weightMask = []; // Track which weights are active\n this.featureImportance = [];\n // License check removed // Premium feature - requires valid license\n this.options = {\n categories: options.categories,\n hiddenUnits: (_a = options.hiddenUnits) !== null && _a !== void 0 ? _a : 256,\n maxLen: (_b = options.maxLen) !== null && _b !== void 0 ? _b : 100,\n useTokenizer: (_c = options.useTokenizer) !== null && _c !== void 0 ? _c : true,\n activation: (_d = options.activation) !== null && _d !== void 0 ? _d : 'relu',\n regularization: {\n type: options.regularization.type,\n lambda: options.regularization.lambda,\n alpha: (_e = options.regularization.alpha) !== null && _e !== void 0 ? _e : 0.5,\n },\n sparsityTarget: (_f = options.sparsityTarget) !== null && _f !== void 0 ? _f : 0.5,\n pruneThreshold: (_g = options.pruneThreshold) !== null && _g !== void 0 ? _g : 1e-6,\n };\n this.elm = new ELM({\n useTokenizer: this.options.useTokenizer ? true : undefined,\n hiddenUnits: this.options.hiddenUnits,\n categories: this.options.categories,\n maxLen: this.options.maxLen,\n activation: this.options.activation,\n });\n }\n /**\n * Train sparse ELM with regularization\n */\n train(X, y) {\n var _a, _b, _c, _d;\n // Prepare labels\n const labelIndices = y.map(label => typeof label === 'number'\n ? label\n : this.options.categories.indexOf(label));\n // Encode inputs\n const encoded = this.options.useTokenizer\n ? X.map(x => {\n var _a, _b, _c, _d;\n const enc = ((_b = (_a = this.elm.encoder) === null || _a === void 0 ? void 0 : _a.encode) === null || _b === void 0 ? void 0 : _b.call(_a, x)) || x;\n return ((_d = (_c = this.elm.encoder) === null || _c === void 0 ? void 0 : _c.normalize) === null || _d === void 0 ? void 0 : _d.call(_c, enc)) || enc;\n })\n : X;\n // Train base ELM\n (_b = (_a = this.elm).setCategories) === null || _b === void 0 ? void 0 : _b.call(_a, this.options.categories);\n (_d = (_c = this.elm).trainFromData) === null || _d === void 0 ? void 0 : _d.call(_c, encoded, labelIndices);\n // Apply regularization and sparsification\n this._applyRegularization();\n this._pruneWeights();\n this._computeFeatureImportance();\n this.trained = true;\n }\n /**\n * Predict with sparse model\n */\n predict(X, topK = 3) {\n var _a, _b;\n if (!this.trained) {\n throw new Error('Model must be trained before prediction');\n }\n // Use base ELM for prediction (sparsity is in weights)\n const XArray = Array.isArray(X[0]) ? X : [X];\n const preds = ((_b = (_a = this.elm).predictFromVector) === null || _b === void 0 ? void 0 : _b.call(_a, XArray, topK)) || [];\n return preds.map((pred) => ({\n label: pred.label || this.options.categories[pred.index || 0],\n prob: pred.prob || 0,\n }));\n }\n /**\n * Apply regularization to weights\n */\n _applyRegularization() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const lambda = this.options.regularization.lambda;\n const alpha = this.options.regularization.alpha || 0.5;\n // Apply regularization\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n const w = W[i][j];\n if (this.options.regularization.type === 'l1') {\n // L1: soft thresholding\n const sign = w >= 0 ? 1 : -1;\n W[i][j] = sign * Math.max(0, Math.abs(w) - lambda);\n }\n else if (this.options.regularization.type === 'l2') {\n // L2: shrinkage\n W[i][j] = w / (1 + lambda);\n }\n else if (this.options.regularization.type === 'elastic') {\n // Elastic net: combination\n const l1 = alpha * lambda;\n const l2 = (1 - alpha) * lambda;\n const sign = w >= 0 ? 1 : -1;\n const softThresh = sign * Math.max(0, Math.abs(w) - l1);\n W[i][j] = softThresh / (1 + l2);\n }\n }\n }\n }\n /**\n * Prune small weights for sparsity\n */\n _pruneWeights() {\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const threshold = this.options.pruneThreshold;\n this.weightMask = [];\n // Prune weights below threshold\n for (let i = 0; i < W.length; i++) {\n this.weightMask[i] = [];\n for (let j = 0; j < W[i].length; j++) {\n if (Math.abs(W[i][j]) < threshold) {\n W[i][j] = 0;\n this.weightMask[i][j] = false;\n }\n else {\n this.weightMask[i][j] = true;\n }\n }\n }\n // Enforce sparsity target\n const currentSparsity = this._computeSparsity();\n if (currentSparsity < this.options.sparsityTarget) {\n this._enforceSparsityTarget();\n }\n }\n /**\n * Compute current sparsity ratio\n */\n _computeSparsity() {\n if (this.weightMask.length === 0)\n return 0;\n let total = 0;\n let zeros = 0;\n for (const row of this.weightMask) {\n for (const active of row) {\n total++;\n if (!active)\n zeros++;\n }\n }\n return total > 0 ? zeros / total : 0;\n }\n /**\n * Enforce target sparsity by pruning more weights\n */\n _enforceSparsityTarget() {\n var _a;\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const target = this.options.sparsityTarget;\n // Collect all weights with their absolute values\n const weights = [];\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n if (Math.abs(W[i][j]) > 0) {\n weights.push({ i, j, abs: Math.abs(W[i][j]) });\n }\n }\n }\n // Sort by absolute value\n weights.sort((a, b) => a.abs - b.abs);\n // Prune smallest weights to reach target\n const totalWeights = W.length * (((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) || 0);\n const targetZeros = Math.floor(totalWeights * target);\n const currentZeros = totalWeights - weights.length;\n const needToPrune = targetZeros - currentZeros;\n for (let k = 0; k < Math.min(needToPrune, weights.length); k++) {\n const { i, j } = weights[k];\n W[i][j] = 0;\n if (this.weightMask[i]) {\n this.weightMask[i][j] = false;\n }\n }\n }\n /**\n * Compute feature importance based on weight magnitudes\n */\n _computeFeatureImportance() {\n var _a;\n const model = this.elm.model;\n if (!model || !model.W)\n return;\n const W = model.W;\n const inputDim = ((_a = W[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;\n this.featureImportance = new Array(inputDim).fill(0);\n // Sum absolute weights for each input feature\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n this.featureImportance[j] += Math.abs(W[i][j]);\n }\n }\n // Normalize\n const max = Math.max(...this.featureImportance);\n if (max > 0) {\n for (let i = 0; i < this.featureImportance.length; i++) {\n this.featureImportance[i] /= max;\n }\n }\n }\n /**\n * Get feature importance scores\n */\n getFeatureImportance() {\n return [...this.featureImportance];\n }\n /**\n * Get sparsity statistics\n */\n getSparsityStats() {\n const model = this.elm.model;\n if (!model || !model.W) {\n return { sparsity: 0, activeWeights: 0, totalWeights: 0 };\n }\n const W = model.W;\n let total = 0;\n let active = 0;\n for (let i = 0; i < W.length; i++) {\n for (let j = 0; j < W[i].length; j++) {\n total++;\n if (Math.abs(W[i][j]) > this.options.pruneThreshold) {\n active++;\n }\n }\n }\n return {\n sparsity: total > 0 ? 1 - active / total : 0,\n activeWeights: active,\n totalWeights: total,\n };\n }\n}\n\n/**\n * SyntheticFieldStore - Storage for labeled samples\n * Supports insert, get, and sample operations\n */\nclass SyntheticFieldStore {\n constructor() {\n this.store = new Map();\n }\n /**\n * Insert a labeled sample into the store\n */\n insert(sample) {\n if (!this.store.has(sample.label)) {\n this.store.set(sample.label, []);\n }\n this.store.get(sample.label).push(sample.value);\n }\n /**\n * Insert multiple samples at once\n */\n insertMany(samples) {\n for (const sample of samples) {\n this.insert(sample);\n }\n }\n /**\n * Get all values for a given label\n */\n get(label) {\n return this.store.get(label) || [];\n }\n /**\n * Sample k values uniformly at random for a given label\n */\n sample(label, k = 1) {\n const values = this.get(label);\n if (values.length === 0) {\n return [];\n }\n const result = [];\n const indices = new Set();\n // Simple uniform random sampling without replacement\n while (result.length < k && indices.size < values.length) {\n const idx = Math.floor(Math.random() * values.length);\n if (!indices.has(idx)) {\n indices.add(idx);\n result.push(values[idx]);\n }\n }\n return result;\n }\n /**\n * Check if a label exists in the store\n */\n hasLabel(label) {\n return this.store.has(label);\n }\n /**\n * Get all labels in the store\n */\n getLabels() {\n return Array.from(this.store.keys());\n }\n /**\n * Get the count of samples for a label\n */\n count(label) {\n return this.get(label).length;\n }\n /**\n * Clear all data\n */\n clear() {\n this.store.clear();\n }\n}\n\n/**\n * RetrievalGenerator - Simple deterministic retrieval sampler\n * Uniform random sampling from stored labeled samples\n */\n/**\n * Seeded random number generator for deterministic testing\n */\nlet SeededRNG$1 = class SeededRNG {\n constructor(seed = Date.now()) {\n this.seed = seed;\n }\n next() {\n // Linear congruential generator\n this.seed = (this.seed * 1664525 + 1013904223) % Math.pow(2, 32);\n return this.seed / Math.pow(2, 32);\n }\n setSeed(seed) {\n this.seed = seed;\n }\n};\nclass RetrievalGenerator {\n constructor(seed) {\n // Initialize and require license before allowing generator use\n this.store = new SyntheticFieldStore();\n this.seed = seed;\n this.rng = new SeededRNG$1(seed);\n }\n /**\n * Ingest labeled samples into the store\n */\n ingest(samples) {\n this.store.insertMany(samples);\n }\n /**\n * Sample k values for a given label\n * Returns empty array if label doesn't exist or has no samples\n */\n sample(label, k = 1) {\n const values = this.store.get(label);\n if (values.length === 0) {\n return [];\n }\n const result = [];\n const availableIndices = Array.from({ length: values.length }, (_, i) => i);\n // Sample k values (or all if k > available)\n const sampleCount = Math.min(k, values.length);\n for (let i = 0; i < sampleCount; i++) {\n const randomIndex = Math.floor(this.rng.next() * availableIndices.length);\n const selectedIndex = availableIndices.splice(randomIndex, 1)[0];\n result.push(values[selectedIndex]);\n }\n return result;\n }\n /**\n * Get a single sample (convenience method)\n */\n sampleOne(label) {\n const samples = this.sample(label, 1);\n return samples.length > 0 ? samples[0] : null;\n }\n /**\n * Check if a label has samples\n */\n hasLabel(label) {\n return this.store.hasLabel(label) && this.store.count(label) > 0;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.store.getLabels();\n }\n /**\n * Reset the generator (clears store and optionally resets seed)\n */\n reset(seed) {\n this.store.clear();\n if (seed !== undefined) {\n this.seed = seed;\n this.rng.setSeed(seed);\n }\n }\n}\n\n/**\n * CharVocab - Character vocabulary builder\n * Builds a vocabulary from character sets and training data\n */\nclass CharVocab {\n constructor() {\n this.charToIndex = new Map();\n this.indexToChar = new Map();\n this.size = 0;\n }\n /**\n * Build vocabulary from a set of strings\n * @param samples Array of strings to build vocabulary from\n * @param charSet Optional predefined character set (e.g., alphanumeric + punctuation)\n */\n build(samples, charSet) {\n const chars = new Set();\n // Add padding character first (index 0) - use null character\n // This ensures index 0 is always padding\n chars.add('\\0');\n // Add predefined character set if provided\n if (charSet) {\n for (const char of charSet) {\n // Skip null character if it's in the charSet (we already added it)\n if (char !== '\\0') {\n chars.add(char);\n }\n }\n }\n // Add all characters from samples\n for (const sample of samples) {\n for (const char of sample) {\n // Skip null characters from samples (we use it for padding)\n if (char !== '\\0') {\n chars.add(char);\n }\n }\n }\n // Sort characters for consistent ordering, but keep null char at index 0\n const sortedChars = Array.from(chars).sort((a, b) => {\n // Ensure null char is always first\n if (a === '\\0')\n return -1;\n if (b === '\\0')\n return 1;\n return a.localeCompare(b);\n });\n // Build mappings\n this.charToIndex.clear();\n this.indexToChar.clear();\n this.size = sortedChars.length;\n sortedChars.forEach((char, index) => {\n this.charToIndex.set(char, index);\n this.indexToChar.set(index, char);\n });\n }\n /**\n * Get index for a character\n */\n getIndex(char) {\n const index = this.charToIndex.get(char);\n if (index === undefined) {\n throw new Error(`Character '${char}' not in vocabulary`);\n }\n return index;\n }\n /**\n * Get character for an index\n */\n getChar(index) {\n const char = this.indexToChar.get(index);\n if (char === undefined) {\n throw new Error(`Index ${index} not in vocabulary`);\n }\n return char;\n }\n /**\n * Check if character exists in vocabulary\n */\n hasChar(char) {\n return this.charToIndex.has(char);\n }\n /**\n * Get vocabulary size\n */\n getSize() {\n return this.size;\n }\n /**\n * Get all characters in vocabulary\n */\n getChars() {\n return Array.from(this.charToIndex.keys()).sort();\n }\n /**\n * Get default character set (alphanumeric + common punctuation)\n */\n static getDefaultCharSet() {\n return 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +\n ' !\"#$%&\\'()*+,-./:;<=>?@[\\\\]^_`{|}~';\n }\n}\n\n/**\n * FixedLength - Utilities for fixed-length padding and truncation\n */\nclass FixedLength {\n /**\n * Pad or truncate an array to a fixed length\n * @param arr Array to pad/truncate\n * @param length Target length\n * @param padValue Value to use for padding (default: 0)\n */\n static padOrTruncate(arr, length, padValue = 0) {\n if (arr.length === length) {\n return [...arr];\n }\n if (arr.length > length) {\n // Truncate\n return arr.slice(0, length);\n }\n // Pad\n const result = [...arr];\n while (result.length < length) {\n result.push(padValue);\n }\n return result;\n }\n /**\n * Pad or truncate a string to a fixed length\n * @param str String to pad/truncate\n * @param length Target length\n * @param padChar Character to use for padding (default: space)\n */\n static padOrTruncateString(str, length, padChar = ' ') {\n if (str.length === length) {\n return str;\n }\n if (str.length > length) {\n // Truncate\n return str.slice(0, length);\n }\n // Pad\n return str + padChar.repeat(length - str.length);\n }\n}\n\n/**\n * OneHot - One-hot encoding utilities\n */\nclass OneHot {\n /**\n * Encode an index as a one-hot vector\n * @param index Index to encode\n * @param size Size of the one-hot vector\n */\n static encode(index, size) {\n if (index < 0 || index >= size) {\n throw new Error(`Index ${index} out of range [0, ${size})`);\n }\n const vector = new Array(size).fill(0);\n vector[index] = 1;\n return vector;\n }\n /**\n * Decode a one-hot vector to an index\n * @param vector One-hot vector\n */\n static decode(vector) {\n const index = vector.indexOf(1);\n if (index === -1) {\n throw new Error('Invalid one-hot vector: no element equals 1');\n }\n return index;\n }\n /**\n * Encode multiple indices as one-hot vectors\n * @param indices Array of indices\n * @param size Size of each one-hot vector\n */\n static encodeBatch(indices, size) {\n return indices.map(idx => this.encode(idx, size));\n }\n /**\n * Decode multiple one-hot vectors to indices\n * @param vectors Array of one-hot vectors\n */\n static decodeBatch(vectors) {\n return vectors.map(vec => this.decode(vec));\n }\n}\n\n/**\n * StringEncoder - Encodes strings to vectors and decodes back\n * Compatible with ELM/KELM pipelines\n */\nclass StringEncoder {\n constructor(config) {\n this.config = Object.assign({ useOneHot: false }, config);\n this.vocab = new CharVocab();\n }\n /**\n * Build vocabulary from training samples\n */\n buildVocab(samples) {\n this.vocab.build(samples, this.config.charSet || CharVocab.getDefaultCharSet());\n }\n /**\n * Encode a string to a vector\n * @param str String to encode\n * @returns Encoded vector (either indices or one-hot)\n */\n encode(str) {\n if (this.vocab.getSize() === 0) {\n throw new Error('Vocabulary not built. Call buildVocab() first.');\n }\n // Convert string to indices\n const indices = [];\n for (const char of str) {\n if (this.vocab.hasChar(char)) {\n indices.push(this.vocab.getIndex(char));\n }\n else {\n // For unknown characters, try to find a similar one or use space\n // If space is in vocab, use it; otherwise use 0 (which will be treated as padding)\n if (this.vocab.hasChar(' ')) {\n indices.push(this.vocab.getIndex(' '));\n }\n else {\n indices.push(0);\n }\n }\n }\n // Pad or truncate to fixed length\n const padded = FixedLength.padOrTruncate(indices, this.config.maxLength, 0);\n // Convert to one-hot if requested\n if (this.config.useOneHot) {\n const vocabSize = this.vocab.getSize();\n const oneHotVectors = [];\n for (const idx of padded) {\n oneHotVectors.push(...OneHot.encode(idx, vocabSize));\n }\n return oneHotVectors;\n }\n return padded;\n }\n /**\n * Decode a vector back to a string\n * @param vector Encoded vector\n * @returns Decoded string\n */\n decode(vector) {\n if (this.vocab.getSize() === 0) {\n throw new Error('Vocabulary not built. Call buildVocab() first.');\n }\n let indices;\n if (this.config.useOneHot) {\n // Decode one-hot vectors\n const vocabSize = this.vocab.getSize();\n indices = [];\n for (let i = 0; i < vector.length; i += vocabSize) {\n const oneHot = vector.slice(i, i + vocabSize);\n try {\n indices.push(OneHot.decode(oneHot));\n }\n catch (_a) {\n // If decoding fails, use argmax as fallback\n const maxIdx = oneHot.indexOf(Math.max(...oneHot));\n indices.push(maxIdx);\n }\n }\n // Truncate to maxLength\n indices = indices.slice(0, this.config.maxLength);\n }\n else {\n // Direct index-based decoding\n indices = vector.slice(0, this.config.maxLength);\n }\n // Convert indices to characters, stopping at first padding\n let result = '';\n const vocabSize = this.vocab.getSize();\n const paddingIdx = 0; // Padding is always index 0\n for (const idx of indices) {\n // Clamp index to valid range\n const clampedIdx = Math.max(0, Math.min(vocabSize - 1, Math.round(idx)));\n // Stop decoding at first padding index (0)\n if (clampedIdx === paddingIdx) {\n break;\n }\n // Try to get character for this index\n try {\n const char = this.vocab.getChar(clampedIdx);\n // Skip null characters and control characters (except space, tab, newline)\n if (char === '\\0' || (char.charCodeAt(0) < 32 && char !== ' ' && char !== '\\t' && char !== '\\n')) {\n break; // Stop at first invalid character\n }\n result += char;\n }\n catch (_b) {\n // Invalid index - stop decoding\n break;\n }\n }\n // Trim trailing whitespace but preserve internal spaces\n return result.trimEnd();\n }\n /**\n * Encode multiple strings\n */\n encodeBatch(strings) {\n return strings.map(str => this.encode(str));\n }\n /**\n * Decode multiple vectors\n */\n decodeBatch(vectors) {\n return vectors.map(vec => this.decode(vec));\n }\n /**\n * Get the output vector size\n */\n getVectorSize() {\n if (this.config.useOneHot) {\n return this.config.maxLength * this.vocab.getSize();\n }\n return this.config.maxLength;\n }\n /**\n * Get vocabulary size\n */\n getVocabSize() {\n return this.vocab.getSize();\n }\n /**\n * Get vocabulary\n */\n getVocab() {\n return this.vocab;\n }\n}\n\n/**\n * ELM utilities for OmegaSynth\n * Helper functions for working with ELM models\n */\n/**\n * Create one-hot vector for a label index\n */\nfunction oneHotLabel(labelIndex, numLabels) {\n const vector = new Array(numLabels).fill(0);\n if (labelIndex >= 0 && labelIndex < numLabels) {\n vector[labelIndex] = 1;\n }\n return vector;\n}\n/**\n * Generate random noise vector\n */\nfunction generateNoiseVector(size, seed) {\n const rng = seed !== undefined ? new SeededRNG(seed) : null;\n const noise = [];\n for (let i = 0; i < size; i++) {\n const value = rng ? rng.next() : Math.random();\n // Normalize to [-1, 1]\n noise.push(value * 2 - 1);\n }\n return noise;\n}\n/**\n * Seeded random number generator\n */\nclass SeededRNG {\n constructor(seed) {\n this.seed = seed;\n }\n next() {\n this.seed = (this.seed * 1664525 + 1013904223) % Math.pow(2, 32);\n return this.seed / Math.pow(2, 32);\n }\n}\n\n/**\n * Label-specific validation and cleaning utilities\n */\n/**\n * Validate and clean a generated string based on its label type\n */\nfunction validateForLabel(label, value) {\n if (!value || value.length === 0) {\n return { isValid: false, cleaned: '', reason: 'Empty value' };\n }\n // Get label-specific validator\n const validator = getValidatorForLabel(label);\n return validator(value);\n}\n/**\n * Get validator function for a specific label\n */\nfunction getValidatorForLabel(label) {\n switch (label) {\n case 'first_name':\n case 'last_name':\n return validateName;\n case 'phone_number':\n return validatePhoneNumber;\n case 'email':\n return validateEmail;\n case 'street_address':\n return validateStreetAddress;\n case 'city':\n case 'state':\n case 'country':\n return validateLocation;\n case 'company_name':\n case 'job_title':\n case 'product_name':\n return validateText;\n case 'color':\n return validateColor;\n case 'uuid':\n return validateUUID;\n case 'date':\n return validateDate;\n case 'credit_card_type':\n case 'device_type':\n return validateText;\n default:\n return validateGeneric;\n }\n}\n/**\n * Validate name (first_name, last_name)\n * Rules: Letters only, optional hyphens/apostrophes, no numbers\n */\nfunction validateName(value) {\n // First check for placeholder patterns in original value (before cleaning)\n value.toLowerCase();\n // Reject \"Name\" followed by numbers (e.g., \"Name97\", \"name123\")\n if (/^name\\d+$/i.test(value)) {\n return { isValid: false, cleaned: '', reason: 'Placeholder name with numbers' };\n }\n // Remove all non-letter characters except hyphens and apostrophes\n let cleaned = value.replace(/[^a-zA-Z\\-\\'\\s]/g, '');\n // Remove numbers completely\n cleaned = cleaned.replace(/[0-9]/g, '');\n // Remove excessive special characters\n cleaned = cleaned.replace(/[-']{2,}/g, '-'); // Multiple hyphens/apostrophes -> single\n cleaned = cleaned.replace(/^[-']+|[-']+$/g, ''); // Remove leading/trailing\n // Trim and normalize whitespace\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must be at least 2 characters and contain at least one letter\n if (cleaned.length < 2 || !/[a-zA-Z]/.test(cleaned)) {\n return { isValid: false, cleaned: '', reason: 'Too short or no letters' };\n }\n // Reject common placeholder names (case-insensitive) after cleaning\n const lowerCleaned = cleaned.toLowerCase();\n // Check for exact matches\n if (lowerCleaned === 'name' || lowerCleaned === 'firstname' || lowerCleaned === 'lastname' ||\n lowerCleaned === 'surname') {\n return { isValid: false, cleaned: '', reason: 'Placeholder name' };\n }\n // Check for \"name\" followed by very short variations\n if (lowerCleaned.startsWith('name') && lowerCleaned.length <= 6) {\n return { isValid: false, cleaned: '', reason: 'Placeholder name' };\n }\n // Max length check\n if (cleaned.length > 30) {\n cleaned = cleaned.substring(0, 30).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate phone number\n * Rules: Digits, dashes, parentheses, dots, plus, spaces\n */\nfunction validatePhoneNumber(value) {\n // Keep only valid phone characters\n let cleaned = value.replace(/[^0-9\\-\\+\\(\\)\\.\\s]/g, '');\n // Remove excessive special characters\n cleaned = cleaned.replace(/[-\\.]{2,}/g, '-');\n cleaned = cleaned.replace(/\\s+/g, ' ');\n cleaned = cleaned.trim();\n // Count digits\n const digitCount = (cleaned.match(/\\d/g) || []).length;\n // Must have at least 7 digits (minimum phone number)\n if (digitCount < 7) {\n return { isValid: false, cleaned: '', reason: 'Too few digits' };\n }\n // Max length check\n if (cleaned.length > 25) {\n cleaned = cleaned.substring(0, 25).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate email\n * Rules: Must contain @, valid characters before and after\n */\nfunction validateEmail(value) {\n // Keep valid email characters\n let cleaned = value.replace(/[^a-zA-Z0-9@\\.\\-\\_]/g, '');\n // Must contain @\n if (!cleaned.includes('@')) {\n return { isValid: false, cleaned: '', reason: 'Missing @ symbol' };\n }\n const parts = cleaned.split('@');\n if (parts.length !== 2) {\n return { isValid: false, cleaned: '', reason: 'Invalid @ usage' };\n }\n const [local, domain] = parts;\n // Local part must have at least 1 character\n if (!local || local.length === 0) {\n return { isValid: false, cleaned: '', reason: 'Empty local part' };\n }\n // Domain must have at least 3 characters (x.y)\n if (!domain || domain.length < 3) {\n return { isValid: false, cleaned: '', reason: 'Invalid domain' };\n }\n // Domain must contain at least one dot\n if (!domain.includes('.')) {\n return { isValid: false, cleaned: '', reason: 'Domain missing dot' };\n }\n // Remove leading/trailing dots and hyphens\n const cleanLocal = local.replace(/^[\\.\\-]+|[\\.\\-]+$/g, '');\n const cleanDomain = domain.replace(/^[\\.\\-]+|[\\.\\-]+$/g, '');\n if (!cleanLocal || !cleanDomain) {\n return { isValid: false, cleaned: '', reason: 'Invalid format after cleaning' };\n }\n cleaned = `${cleanLocal}@${cleanDomain}`;\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50);\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate street address\n * Rules: Numbers, letters, spaces, common address characters\n */\nfunction validateStreetAddress(value) {\n // Keep valid address characters\n let cleaned = value.replace(/[^a-zA-Z0-9\\s\\-\\#\\.\\,]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 5 characters\n if (cleaned.length < 5) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate location (city, state, country)\n * Rules: Mostly letters, optional spaces/hyphens\n */\nfunction validateLocation(value) {\n // Keep letters, spaces, hyphens, apostrophes\n let cleaned = value.replace(/[^a-zA-Z\\s\\-\\']/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 2 characters and contain letters\n if (cleaned.length < 2 || !/[a-zA-Z]/.test(cleaned)) {\n return { isValid: false, cleaned: '', reason: 'Too short or no letters' };\n }\n // Max length check\n if (cleaned.length > 30) {\n cleaned = cleaned.substring(0, 30).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate text (company_name, job_title, product_name)\n * Rules: Letters, numbers, spaces, common punctuation\n */\nfunction validateText(value) {\n // Keep alphanumeric and common punctuation\n let cleaned = value.replace(/[^a-zA-Z0-9\\s\\-\\'\\.\\,]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 2 characters\n if (cleaned.length < 2) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 50) {\n cleaned = cleaned.substring(0, 50).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate color\n * Rules: Letters only, maybe spaces\n */\nfunction validateColor(value) {\n // Keep letters and spaces only\n let cleaned = value.replace(/[^a-zA-Z\\s]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n // Must have at least 3 characters\n if (cleaned.length < 3) {\n return { isValid: false, cleaned: '', reason: 'Too short' };\n }\n // Max length check\n if (cleaned.length > 20) {\n cleaned = cleaned.substring(0, 20).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate UUID\n * Rules: Should follow UUID format (8-4-4-4-12 hex digits with dashes)\n */\nfunction validateUUID(value) {\n // Keep hex characters and dashes\n let cleaned = value.replace(/[^0-9a-fA-F\\-]/g, '');\n // Try to format as UUID if it has enough characters\n const hexOnly = cleaned.replace(/-/g, '');\n if (hexOnly.length >= 32) {\n // Format as UUID: 8-4-4-4-12\n const formatted = [\n hexOnly.substring(0, 8),\n hexOnly.substring(8, 12),\n hexOnly.substring(12, 16),\n hexOnly.substring(16, 20),\n hexOnly.substring(20, 32)\n ].join('-');\n cleaned = formatted;\n }\n // Must have at least 32 hex characters\n const hexCount = cleaned.replace(/-/g, '').length;\n if (hexCount < 32) {\n return { isValid: false, cleaned: '', reason: 'Too few hex characters' };\n }\n return { isValid: true, cleaned };\n}\n/**\n * Validate date\n * Rules: Should follow date format (YYYY-MM-DD or similar)\n */\nfunction validateDate(value) {\n // Keep digits, dashes, slashes\n let cleaned = value.replace(/[^0-9\\-\\/]/g, '');\n // Must have at least 8 digits (YYYYMMDD)\n const digitCount = (cleaned.match(/\\d/g) || []).length;\n if (digitCount < 8) {\n return { isValid: false, cleaned: '', reason: 'Too few digits' };\n }\n // Max length check\n if (cleaned.length > 20) {\n cleaned = cleaned.substring(0, 20).trim();\n }\n return { isValid: true, cleaned };\n}\n/**\n * Generic validator for unknown labels\n */\nfunction validateGeneric(value) {\n // Remove control characters\n let cleaned = value.replace(/[\\x00-\\x1F\\x7F]/g, '');\n cleaned = cleaned.trim().replace(/\\s+/g, ' ');\n if (cleaned.length < 1) {\n return { isValid: false, cleaned: '', reason: 'Empty after cleaning' };\n }\n return { isValid: true, cleaned };\n}\n\n/**\n * PatternCorrector - Post-processing pattern matching and correction\n * Learns patterns from training data and applies them to generated samples\n */\nclass PatternCorrector {\n constructor() {\n this.patterns = new Map();\n }\n /**\n * Learn patterns from training data\n */\n learnPatterns(samples) {\n const byLabel = new Map();\n // Group samples by label\n for (const sample of samples) {\n if (!byLabel.has(sample.label)) {\n byLabel.set(sample.label, []);\n }\n byLabel.get(sample.label).push(sample.value);\n }\n // Learn patterns for each label\n for (const [label, values] of byLabel.entries()) {\n this.learnPattern(label, values);\n }\n }\n /**\n * Learn pattern for a specific label\n */\n learnPattern(label, examples) {\n if (examples.length === 0)\n return;\n // Extract common prefixes (first 1-3 characters)\n const prefixCounts = new Map();\n const suffixCounts = new Map();\n const charFreq = new Map();\n const lengths = [];\n for (const example of examples) {\n lengths.push(example.length);\n // Prefixes\n for (let len = 1; len <= Math.min(3, example.length); len++) {\n const prefix = example.substring(0, len);\n prefixCounts.set(prefix, (prefixCounts.get(prefix) || 0) + 1);\n }\n // Suffixes\n for (let len = 1; len <= Math.min(3, example.length); len++) {\n const suffix = example.substring(example.length - len);\n suffixCounts.set(suffix, (suffixCounts.get(suffix) || 0) + 1);\n }\n // Character frequency\n for (const char of example) {\n charFreq.set(char, (charFreq.get(char) || 0) + 1);\n }\n }\n // Get common prefixes (appear in >10% of examples - lowered from 20% for better pattern matching)\n const commonPrefixes = Array.from(prefixCounts.entries())\n .filter(([_, count]) => count / examples.length > 0.1)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 15) // Increased from 10 to 15\n .map(([prefix]) => prefix);\n // Get common suffixes (appear in >10% of examples - lowered from 20% for better pattern matching)\n const commonSuffixes = Array.from(suffixCounts.entries())\n .filter(([_, count]) => count / examples.length > 0.1)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 15) // Increased from 10 to 15\n .map(([suffix]) => suffix);\n // Normalize character frequencies\n const totalChars = Array.from(charFreq.values()).reduce((a, b) => a + b, 0);\n for (const [char, count] of charFreq.entries()) {\n charFreq.set(char, count / totalChars);\n }\n this.patterns.set(label, {\n label,\n examples,\n commonPrefixes,\n commonSuffixes,\n charFrequency: charFreq,\n lengthDistribution: lengths,\n });\n }\n /**\n * Correct a generated string using learned patterns\n */\n correct(generated, label) {\n const pattern = this.patterns.get(label);\n if (!pattern) {\n return generated; // No pattern learned, return as-is\n }\n let corrected = generated;\n // 1. Check if it matches a known example (exact match)\n if (pattern.examples.includes(generated)) {\n return generated; // Already perfect\n }\n // 2. Check prefix/suffix patterns\n const hasValidPrefix = pattern.commonPrefixes.some(prefix => corrected.toLowerCase().startsWith(prefix.toLowerCase()));\n pattern.commonSuffixes.some(suffix => corrected.toLowerCase().endsWith(suffix.toLowerCase()));\n // 3. If no valid prefix, try to fix it\n if (!hasValidPrefix && pattern.commonPrefixes.length > 0) {\n const mostCommonPrefix = pattern.commonPrefixes[0];\n // Only fix if the generated string is very different\n if (corrected.length > 0 && !corrected.toLowerCase().startsWith(mostCommonPrefix[0].toLowerCase())) ;\n }\n // 4. Check character frequency (remove unlikely characters)\n const charFreq = pattern.charFrequency;\n let cleaned = '';\n for (const char of corrected) {\n const freq = charFreq.get(char) || 0;\n // Keep character if it appears in >0.5% of training data (lowered from 1%), or if it's common (space, etc.)\n if (freq > 0.005 || /[a-zA-Z0-9\\s]/.test(char)) {\n cleaned += char;\n }\n }\n if (cleaned.length > 0) {\n corrected = cleaned;\n }\n // 5. Check length distribution\n pattern.lengthDistribution.reduce((a, b) => a + b, 0) / pattern.lengthDistribution.length;\n Math.min(...pattern.lengthDistribution);\n const maxLength = Math.max(...pattern.lengthDistribution);\n // Truncate if too long\n if (corrected.length > maxLength * 1.5) {\n corrected = corrected.substring(0, Math.floor(maxLength * 1.2));\n }\n return corrected;\n }\n /**\n * Score how well a generated string matches the pattern\n */\n score(generated, label) {\n const pattern = this.patterns.get(label);\n if (!pattern) {\n return 0.5; // Unknown pattern, neutral score\n }\n let score = 0;\n let factors = 0;\n // 1. Exact match bonus\n if (pattern.examples.includes(generated)) {\n return 1.0; // Perfect match\n }\n // 2. Prefix match (30% weight)\n const prefixMatch = pattern.commonPrefixes.some(prefix => generated.toLowerCase().startsWith(prefix.toLowerCase()));\n score += prefixMatch ? 0.3 : 0;\n factors++;\n // 3. Suffix match (20% weight)\n const suffixMatch = pattern.commonSuffixes.some(suffix => generated.toLowerCase().endsWith(suffix.toLowerCase()));\n score += suffixMatch ? 0.2 : 0;\n factors++;\n // 4. Character frequency match (30% weight)\n const charFreq = pattern.charFrequency;\n let charScore = 0;\n let charCount = 0;\n for (const char of generated) {\n const freq = charFreq.get(char) || 0;\n charScore += freq;\n charCount++;\n }\n score += (charCount > 0 ? charScore / charCount : 0) * 0.3;\n factors++;\n // 5. Length match (20% weight)\n const avgLength = pattern.lengthDistribution.reduce((a, b) => a + b, 0) / pattern.lengthDistribution.length;\n const lengthDiff = Math.abs(generated.length - avgLength) / avgLength;\n const lengthScore = Math.max(0, 1 - lengthDiff);\n score += lengthScore * 0.2;\n factors++;\n return factors > 0 ? score / factors : 0;\n }\n /**\n * Get pattern for a label\n */\n getPattern(label) {\n return this.patterns.get(label);\n }\n}\n\n/**\n * SequenceContext - Add sequence context to generation\n * Uses previous characters to inform next character prediction\n */\nclass SequenceContext {\n constructor(n = 3) {\n this.ngramPatterns = new Map();\n this.n = n;\n }\n /**\n * Learn n-gram patterns from training data\n */\n learnPatterns(samples) {\n this.ngramPatterns.clear();\n for (const sample of samples) {\n // Extract n-grams\n for (let i = 0; i <= sample.length - this.n; i++) {\n const ngram = sample.substring(i, i + this.n - 1); // Context (n-1 chars)\n const nextChar = sample[i + this.n - 1]; // Next character\n if (!this.ngramPatterns.has(ngram)) {\n this.ngramPatterns.set(ngram, new Map());\n }\n const charMap = this.ngramPatterns.get(ngram);\n charMap.set(nextChar, (charMap.get(nextChar) || 0) + 1);\n }\n }\n }\n /**\n * Get next character probabilities given context\n */\n getNextCharProbs(context) {\n // Use last n-1 characters as context\n const ctx = context.length >= this.n - 1\n ? context.substring(context.length - (this.n - 1))\n : context;\n const charCounts = this.ngramPatterns.get(ctx);\n if (!charCounts || charCounts.size === 0) {\n return new Map();\n }\n // Convert counts to probabilities\n const total = Array.from(charCounts.values()).reduce((a, b) => a + b, 0);\n const probs = new Map();\n for (const [char, count] of charCounts.entries()) {\n probs.set(char, count / total);\n }\n return probs;\n }\n /**\n * Suggest next character based on context\n */\n suggestNextChar(context) {\n const probs = this.getNextCharProbs(context);\n if (probs.size === 0) {\n return null;\n }\n // Return most likely character\n let bestChar = '';\n let bestProb = 0;\n for (const [char, prob] of probs.entries()) {\n if (prob > bestProb) {\n bestProb = prob;\n bestChar = char;\n }\n }\n return bestChar;\n }\n /**\n * Score how well a character fits the context\n */\n scoreChar(context, char) {\n const probs = this.getNextCharProbs(context);\n return probs.get(char) || 0;\n }\n}\n\n/**\n * ELMGenerator - Label-conditioned string generator using ELM\n * Trains an ELM to generate encoded strings based on labels + noise\n */\nclass ELMGenerator {\n constructor(config) {\n var _a;\n this.elm = null;\n this.labels = [];\n this.patternCorrector = null;\n this.sequenceContext = null;\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ hiddenUnits: 128, activation: 'relu', ridgeLambda: 0.01, noiseSize: 32, useOneHot: false, useClassification: false, usePatternCorrection: true }, config);\n this.noiseSize = this.config.noiseSize;\n this.useClassification = this.config.useClassification;\n this.encoder = new StringEncoder({\n maxLength: config.maxLength,\n useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, // Default to false for memory efficiency\n });\n if (this.config.usePatternCorrection) {\n this.patternCorrector = new PatternCorrector();\n }\n // Always use sequence context for better generation\n this.sequenceContext = new SequenceContext(3); // 3-grams\n }\n /**\n * Train the ELM generator on labeled samples\n */\n train(samples) {\n if (samples.length === 0) {\n throw new Error('Cannot train on empty dataset');\n }\n // Extract unique labels\n const uniqueLabels = Array.from(new Set(samples.map(s => s.label)));\n this.labels = uniqueLabels;\n // Extract all values for vocabulary building\n const allValues = samples.map(s => s.value);\n this.encoder.buildVocab(allValues);\n // Learn patterns if pattern correction is enabled\n if (this.patternCorrector) {\n this.patternCorrector.learnPatterns(samples);\n }\n // Learn sequence context\n if (this.sequenceContext) {\n this.sequenceContext.learnPatterns(allValues);\n }\n // Build training data\n const X = [];\n const Y = [];\n for (const sample of samples) {\n const labelIndex = this.labels.indexOf(sample.label);\n if (labelIndex === -1) {\n continue;\n }\n // Input: concat(oneHot(label), noiseVector)\n const labelOneHot = oneHotLabel(labelIndex, this.labels.length);\n const noise = generateNoiseVector(this.noiseSize, this.config.seed);\n const inputVector = [...labelOneHot, ...noise];\n X.push(inputVector);\n // Target: encoded(value)\n const encodedValue = this.encoder.encode(sample.value);\n Y.push(encodedValue);\n }\n if (X.length === 0) {\n throw new Error('No valid training samples after processing');\n }\n // Create ELM config\n const inputSize = this.labels.length + this.noiseSize;\n this.encoder.getVectorSize();\n const elmConfig = {\n useTokenizer: false, // Numeric mode\n inputSize: inputSize,\n categories: this.useClassification ? [] : [], // For classification, we'll handle it differently\n hiddenUnits: this.config.hiddenUnits,\n activation: this.config.activation,\n // Use lower regularization for better pattern learning\n ridgeLambda: this.config.ridgeLambda * 0.1, // Reduce regularization\n task: this.useClassification ? 'classification' : 'regression',\n };\n // Create and train ELM - resolve constructor robustly across CJS/ESM shapes\n // Replace dynamic require with direct constructor\n this.elm = new ELM(elmConfig);\n this.elm.trainFromData(X, Y);\n }\n /**\n * Generate a string for a given label\n * @param label Label to generate for\n * @param noiseSeed Optional seed for noise generation (for deterministic output)\n */\n generate(label, noiseSeed) {\n var _a;\n if (!this.elm) {\n throw new Error('Model not trained. Call train() first.');\n }\n const labelIndex = this.labels.indexOf(label);\n if (labelIndex === -1) {\n throw new Error(`Label '${label}' not found in training data`);\n }\n // Create input: concat(oneHot(label), noiseVector)\n const labelOneHot = oneHotLabel(labelIndex, this.labels.length);\n const noise = generateNoiseVector(this.noiseSize, noiseSeed !== undefined ? noiseSeed : this.config.seed);\n const inputVector = [...labelOneHot, ...noise];\n // Predict based on mode\n let decoded;\n if (this.useClassification && this.config.useOneHot && typeof this.elm.predictProbaFromVector === 'function') {\n // Classification mode with one-hot: use probabilities\n const vocabSize = this.encoder.getVocabSize();\n const maxLength = this.config.maxLength;\n // Get probabilities for each position\n const probs = this.elm.predictProbaFromVector(inputVector);\n // Reshape to [maxLength, vocabSize] and use argmax\n const indices = [];\n for (let pos = 0; pos < maxLength; pos++) {\n const posProbs = probs.slice(pos * vocabSize, (pos + 1) * vocabSize);\n const maxIdx = posProbs.indexOf(Math.max(...posProbs));\n indices.push(maxIdx);\n }\n decoded = this.encoder.decode(indices);\n }\n else {\n // Regression mode: use logits and round\n const prediction = this.elm.predictLogitsFromVector(inputVector);\n // Convert logits to indices with proper quantization\n const vocabSize = this.encoder.getVocabSize();\n const indices = prediction.map(val => {\n // Clamp value to reasonable range first (prevent extreme values)\n const clamped = Math.max(-vocabSize, Math.min(vocabSize * 2, val));\n // Round to nearest integer\n const rounded = Math.round(clamped);\n // Clamp to valid vocabulary range [0, vocabSize-1]\n const idx = Math.max(0, Math.min(vocabSize - 1, rounded));\n return idx;\n });\n decoded = this.encoder.decode(indices);\n }\n // Apply pattern correction if enabled\n let corrected = decoded;\n if (this.patternCorrector) {\n corrected = this.patternCorrector.correct(decoded, label);\n }\n // Apply sequence context refinement\n if (this.sequenceContext && corrected.length > 0) {\n corrected = this.refineWithSequenceContext(corrected, label);\n }\n // Validate and clean the decoded string using label-specific rules\n const validation = validateForLabel(label, corrected);\n // If validation fails, try to generate again with different noise (up to 3 attempts)\n if (!validation.isValid) {\n for (let attempt = 0; attempt < 3; attempt++) {\n const baseSeed = noiseSeed !== undefined ? noiseSeed : ((_a = this.config.seed) !== null && _a !== void 0 ? _a : Date.now());\n const newNoise = generateNoiseVector(this.noiseSize, baseSeed + attempt + 1000);\n const newInputVector = [...labelOneHot, ...newNoise];\n let newDecoded;\n if (this.useClassification && this.config.useOneHot && typeof this.elm.predictProbaFromVector === 'function') {\n const vocabSize = this.encoder.getVocabSize();\n const maxLength = this.config.maxLength;\n const probs = this.elm.predictProbaFromVector(newInputVector);\n const newIndices = [];\n for (let pos = 0; pos < maxLength; pos++) {\n const posProbs = probs.slice(pos * vocabSize, (pos + 1) * vocabSize);\n const maxIdx = posProbs.indexOf(Math.max(...posProbs));\n newIndices.push(maxIdx);\n }\n newDecoded = this.encoder.decode(newIndices);\n }\n else {\n const newPrediction = this.elm.predictLogitsFromVector(newInputVector);\n const vocabSize = this.encoder.getVocabSize();\n const newIndices = newPrediction.map(val => {\n const clamped = Math.max(-vocabSize, Math.min(vocabSize * 2, val));\n const rounded = Math.round(clamped);\n return Math.max(0, Math.min(vocabSize - 1, rounded));\n });\n newDecoded = this.encoder.decode(newIndices);\n }\n // Apply pattern correction\n if (this.patternCorrector) {\n newDecoded = this.patternCorrector.correct(newDecoded, label);\n }\n const newValidation = validateForLabel(label, newDecoded);\n if (newValidation.isValid) {\n return newValidation.cleaned;\n }\n }\n // If all attempts fail, return empty string\n return '';\n }\n return validation.cleaned;\n }\n /**\n * Generate multiple strings for a label with confidence-based selection\n */\n generateBatch(label, count) {\n const candidates = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 10; // Allow up to 10x attempts to get valid unique samples\n // Generate candidates with scoring\n while (attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n try {\n const generated = this.generate(label, seed);\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n // Score the candidate\n let score = 1.0;\n // Pattern match score\n if (this.patternCorrector) {\n score = this.patternCorrector.score(generated, label);\n }\n // Validation score (valid = 1.0, invalid = 0.0)\n const validation = validateForLabel(label, generated);\n if (!validation.isValid) {\n score = 0;\n }\n candidates.push({ value: generated, score });\n seen.add(generated.toLowerCase());\n }\n }\n catch (error) {\n // Skip errors\n }\n attempts++;\n }\n // Sort by score and return top candidates\n candidates.sort((a, b) => b.score - a.score);\n return candidates.slice(0, count).map(c => c.value);\n }\n /**\n * Refine generated string using sequence context\n */\n refineWithSequenceContext(generated, label) {\n if (!this.sequenceContext || generated.length === 0) {\n return generated;\n }\n // Try to improve the string by checking sequence context\n let refined = '';\n for (let i = 0; i < generated.length; i++) {\n const context = refined; // Use what we've built so far\n const currentChar = generated[i];\n // Check if current char fits the context\n const contextScore = this.sequenceContext.scoreChar(context, currentChar);\n // If score is very low, try to suggest better character\n if (contextScore < 0.1 && context.length > 0) {\n const suggested = this.sequenceContext.suggestNextChar(context);\n if (suggested && suggested !== currentChar) {\n // Only replace if it's a significant improvement\n refined += suggested;\n }\n else {\n refined += currentChar;\n }\n }\n else {\n refined += currentChar;\n }\n // Stop if we hit padding or invalid character\n if (currentChar === '\\0' || currentChar.charCodeAt(0) === 0) {\n break;\n }\n }\n return refined;\n }\n /**\n * Get all trained labels\n */\n getLabels() {\n return [...this.labels];\n }\n /**\n * Check if model is trained\n */\n isTrained() {\n return this.elm !== null;\n }\n}\n\n/**\n * HybridGenerator - Blends Retrieval + ELM jitter for realism + variation\n * 1. Retrieve real sample\n * 2. Encode\n * 3. Apply ELM noise\n * 4. Decode\n */\nclass HybridGenerator {\n constructor(config) {\n var _a;\n this.patternCorrector = null;\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.01, noiseSize: 32, jitterStrength: 0.05, exactMode: false, useOneHot: false, useClassification: false, usePatternCorrection: true }, config);\n // If exact mode, set jitter to 0\n if (this.config.exactMode) {\n this.jitterStrength = 0;\n }\n else {\n this.jitterStrength = this.config.jitterStrength;\n }\n this.retrieval = new RetrievalGenerator(config.seed);\n this.elm = new ELMGenerator({\n maxLength: config.maxLength,\n hiddenUnits: this.config.elmHiddenUnits,\n activation: this.config.elmActivation,\n ridgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n useOneHot: this.config.useOneHot,\n useClassification: this.config.useClassification,\n usePatternCorrection: this.config.usePatternCorrection,\n seed: config.seed,\n });\n this.encoder = new StringEncoder({\n maxLength: config.maxLength,\n useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, // Default to false for memory efficiency\n });\n if (this.config.usePatternCorrection) {\n this.patternCorrector = new PatternCorrector();\n }\n }\n /**\n * Train the hybrid generator on labeled samples\n */\n train(samples) {\n // Train retrieval\n this.retrieval.ingest(samples);\n // Build encoder vocabulary\n const allValues = samples.map(s => s.value);\n this.encoder.buildVocab(allValues);\n // Train ELM for jittering\n this.elm.train(samples);\n // Learn patterns if pattern correction is enabled\n if (this.patternCorrector) {\n this.patternCorrector.learnPatterns(samples);\n }\n }\n /**\n * Generate a hybrid sample (retrieval + jitter)\n * @param label Label to generate for\n * @param noiseSeed Optional seed for deterministic output\n */\n generate(label, noiseSeed) {\n // Step 1: Retrieve real sample\n const retrieved = this.retrieval.sampleOne(label);\n if (!retrieved) {\n // Fallback to pure ELM if no retrieval available\n return this.elm.generate(label, noiseSeed);\n }\n // Step 2: Encode\n const encoded = this.encoder.encode(retrieved);\n // Step 3: Apply ELM noise/jitter\n // Generate a jittered version using ELM\n const jittered = this.applyJitter(encoded, label, noiseSeed);\n // Step 4: Decode\n const decoded = this.encoder.decode(jittered);\n // Step 5: Apply pattern correction if enabled\n let corrected = decoded;\n if (this.patternCorrector) {\n corrected = this.patternCorrector.correct(decoded, label);\n }\n // Step 6: Validate and clean using label-specific rules\n const validation = validateForLabel(label, corrected);\n // If validation fails, try jittering again with different noise (up to 2 attempts)\n if (!validation.isValid) {\n for (let attempt = 0; attempt < 2; attempt++) {\n const newSeed = noiseSeed !== undefined ? noiseSeed + attempt + 1000 : undefined;\n const newJittered = this.applyJitter(encoded, label, newSeed);\n const newDecoded = this.encoder.decode(newJittered);\n let newCorrected = newDecoded;\n if (this.patternCorrector) {\n newCorrected = this.patternCorrector.correct(newDecoded, label);\n }\n const newValidation = validateForLabel(label, newCorrected);\n if (newValidation.isValid) {\n return newValidation.cleaned;\n }\n }\n // If all attempts fail, return original (retrieved is always valid)\n return retrieved;\n }\n return validation.cleaned;\n }\n /**\n * Apply jitter to an encoded vector\n */\n applyJitter(encoded, label, noiseSeed) {\n // Generate ELM output for the label\n const elmOutput = this.generateELMVector(label, noiseSeed);\n // If ELM output is empty or invalid, return original (no jitter)\n if (!elmOutput || elmOutput.length === 0 || elmOutput.every(v => v === 0)) {\n return encoded;\n }\n // Blend: (1 - jitterStrength) * original + jitterStrength * elmOutput\n // Use smaller jitter to preserve more of the original\n const effectiveJitter = Math.min(this.jitterStrength, 0.05); // Cap at 5% jitter\n const jittered = encoded.map((val, idx) => {\n const elmVal = elmOutput[idx] || 0;\n return (1 - effectiveJitter) * val + effectiveJitter * elmVal;\n });\n // Convert blended continuous values to integer indices\n // Round and clamp to valid vocabulary range\n const vocabSize = this.encoder.getVocabSize();\n const indices = jittered.map(val => {\n // Clamp value first\n const clamped = Math.max(0, Math.min(vocabSize - 1, val));\n const idx = Math.round(clamped);\n return Math.max(0, Math.min(vocabSize - 1, idx));\n });\n return indices;\n }\n /**\n * Generate an ELM vector for jittering\n */\n generateELMVector(label, noiseSeed) {\n try {\n // Try to get ELM prediction\n const elmGenerated = this.elm.generate(label, noiseSeed);\n // Only encode if we got a non-empty string\n if (elmGenerated && elmGenerated.length > 0) {\n return this.encoder.encode(elmGenerated);\n }\n // If empty, return zero vector (no jitter)\n return new Array(this.encoder.getVectorSize()).fill(0);\n }\n catch (_a) {\n // If ELM fails, return zero vector (no jitter)\n return new Array(this.encoder.getVectorSize()).fill(0);\n }\n }\n /**\n * Generate multiple hybrid samples\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 5; // Allow up to 5x attempts to get valid unique samples\n while (results.length < count && attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n const generated = this.generate(label, seed);\n // Only add if valid, non-empty, and unique\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n attempts++;\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.retrieval.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n return this.retrieval.hasLabel(this.getLabels()[0] || '') && this.elm.isTrained();\n }\n}\n\n/**\n * ExactGenerator - Perfect retrieval with pattern-based variations\n * Provides 100% realistic data by using exact training samples + pattern matching\n */\nclass ExactGenerator {\n constructor(config = {}) {\n this.trainingSamples = [];\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ usePatternMatching: true, maxVariations: 10 }, config);\n this.retrieval = new RetrievalGenerator(config.seed);\n this.patternCorrector = new PatternCorrector();\n }\n /**\n * Train the exact generator\n */\n train(samples) {\n this.trainingSamples = samples;\n this.retrieval.ingest(samples);\n if (this.config.usePatternMatching) {\n this.patternCorrector.learnPatterns(samples);\n }\n }\n /**\n * Generate an exact sample (100% realistic)\n */\n generate(label, seed) {\n // 1. Try exact retrieval first (100% realistic)\n const exact = this.retrieval.sampleOne(label);\n if (exact) {\n return exact; // ✅ 100% realistic\n }\n // 2. If pattern matching enabled, try pattern-based generation\n if (this.config.usePatternMatching) {\n const pattern = this.patternCorrector.getPattern(label);\n if (pattern && pattern.examples.length > 0) {\n // Return a random example from the pattern\n const randomIndex = seed !== undefined\n ? seed % pattern.examples.length\n : Math.floor(Math.random() * pattern.examples.length);\n return pattern.examples[randomIndex];\n }\n }\n throw new Error(`No samples found for label: ${label}`);\n }\n /**\n * Generate with pattern-based variations\n */\n generateWithVariation(label, seed) {\n // Get base sample\n const base = this.generate(label, seed);\n if (!this.config.usePatternMatching) {\n return base;\n }\n // Try to create variations using pattern matching\n const pattern = this.patternCorrector.getPattern(label);\n if (!pattern) {\n return base;\n }\n // Simple variation: combine prefix from one example with suffix from another\n if (pattern.examples.length >= 2) {\n const seed1 = seed !== undefined ? seed : Date.now();\n const seed2 = seed1 + 1000;\n const idx1 = seed1 % pattern.examples.length;\n const idx2 = seed2 % pattern.examples.length;\n if (idx1 !== idx2) {\n const ex1 = pattern.examples[idx1];\n const ex2 = pattern.examples[idx2];\n // Try combining if they're similar length\n if (Math.abs(ex1.length - ex2.length) <= 2) {\n const mid = Math.floor(ex1.length / 2);\n const variation = ex1.substring(0, mid) + ex2.substring(mid);\n // Validate the variation\n const validation = validateForLabel(label, variation);\n if (validation.isValid) {\n // Score the variation\n const score = this.patternCorrector.score(variation, label);\n if (score > 0.6) { // Only use if reasonably good\n return validation.cleaned;\n }\n }\n }\n }\n }\n return base;\n }\n /**\n * Generate multiple exact samples\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n // Try to get unique exact samples\n for (let i = 0; i < count * 2 && results.length < count; i++) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + i\n : Date.now() + i;\n let generated;\n if (i < count && this.config.usePatternMatching) {\n // First half: exact matches\n generated = this.generate(label, seed);\n }\n else {\n // Second half: try variations\n generated = this.generateWithVariation(label, seed);\n }\n if (generated && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.retrieval.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n return this.retrieval.getLabels().length > 0;\n }\n}\n\n/**\n * PerfectGenerator - Best of all worlds\n * Combines exact retrieval, pattern matching, and improved ELM generation\n * Provides highest realism with good variation\n */\nclass PerfectGenerator {\n constructor(config) {\n this.elm = null;\n this.trainingSamples = [];\n // Initialize and require license before allowing generator use\n this.config = Object.assign({ preferExact: true, usePatternMatching: true, useImprovedELM: false, elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.001, noiseSize: 32 }, config);\n this.exact = new ExactGenerator({\n seed: config.seed,\n usePatternMatching: this.config.usePatternMatching,\n });\n this.hybrid = new HybridGenerator({\n maxLength: config.maxLength,\n seed: config.seed,\n exactMode: false, // Allow some jitter for variation\n jitterStrength: 0.02, // Very low jitter (2%)\n useOneHot: false, // Disable one-hot to reduce memory (was: this.config.useImprovedELM)\n useClassification: false, // Disable classification to reduce memory (was: this.config.useImprovedELM)\n usePatternCorrection: true,\n elmHiddenUnits: this.config.elmHiddenUnits, // Now uses reduced 128 instead of 256\n elmActivation: this.config.elmActivation,\n elmRidgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n });\n // Only create standalone ELM if explicitly requested AND useImprovedELM is true\n // This avoids duplicate ELM training (HybridGenerator already has one)\n if (this.config.useImprovedELM && config.useImprovedELM === true) {\n this.elm = new ELMGenerator({\n maxLength: config.maxLength,\n seed: config.seed,\n hiddenUnits: this.config.elmHiddenUnits,\n activation: this.config.elmActivation,\n ridgeLambda: this.config.elmRidgeLambda,\n noiseSize: this.config.noiseSize,\n useOneHot: false, // Disable one-hot to reduce memory\n useClassification: false, // Disable classification to reduce memory\n usePatternCorrection: true,\n });\n }\n this.patternCorrector = new PatternCorrector();\n }\n /**\n * Train the perfect generator\n */\n train(samples) {\n this.trainingSamples = samples;\n // Train generators in order of priority (exact is fastest)\n this.exact.train(samples);\n // Only train hybrid if we need it (lazy training)\n // We'll train it on first use if needed\n // Learn patterns (lightweight)\n this.patternCorrector.learnPatterns(samples);\n }\n /**\n * Lazy train hybrid generator\n */\n ensureHybridTrained() {\n if (!this.hybrid.isTrained() && this.trainingSamples.length > 0) {\n this.hybrid.train(this.trainingSamples);\n }\n }\n /**\n * Lazy train ELM generator\n */\n ensureELMTrained() {\n if (this.elm && !this.elm.isTrained() && this.trainingSamples.length > 0) {\n this.elm.train(this.trainingSamples);\n }\n }\n /**\n * Generate with best strategy\n */\n generate(label, seed) {\n var _a;\n const candidates = [];\n // 1. Try exact retrieval first (100% realistic)\n try {\n const exact = this.exact.generate(label, seed);\n if (exact) {\n candidates.push({ value: exact, score: 1.0, source: 'exact' });\n }\n }\n catch (error) {\n // No exact match available\n }\n // 2. Try exact with variation (95-100% realistic)\n try {\n const exactVar = this.exact.generateWithVariation(label, seed);\n if (exactVar && exactVar !== ((_a = candidates[0]) === null || _a === void 0 ? void 0 : _a.value)) {\n const score = this.patternCorrector.score(exactVar, label);\n candidates.push({ value: exactVar, score: score * 0.95, source: 'exact-variation' });\n }\n }\n catch (error) {\n // Skip\n }\n // 3. Try hybrid (80-90% realistic) - lazy train if needed\n try {\n this.ensureHybridTrained();\n const hybrid = this.hybrid.generate(label, seed);\n if (hybrid && !candidates.some(c => c.value === hybrid)) {\n const score = this.patternCorrector.score(hybrid, label);\n const validation = validateForLabel(label, hybrid);\n const finalScore = validation.isValid ? score * 0.85 : score * 0.5;\n candidates.push({ value: hybrid, score: finalScore, source: 'hybrid' });\n }\n }\n catch (error) {\n // Skip\n }\n // 4. Try improved ELM if available (75-85% realistic) - lazy train if needed\n if (this.elm) {\n try {\n this.ensureELMTrained();\n const elmGen = this.elm.generate(label, seed);\n if (elmGen && !candidates.some(c => c.value === elmGen)) {\n const score = this.patternCorrector.score(elmGen, label);\n const validation = validateForLabel(label, elmGen);\n const finalScore = validation.isValid ? score * 0.8 : score * 0.4;\n candidates.push({ value: elmGen, score: finalScore, source: 'elm' });\n }\n }\n catch (error) {\n // Skip\n }\n }\n // 5. Select best candidate\n if (candidates.length === 0) {\n throw new Error(`No samples found for label: ${label}`);\n }\n // Sort by score (highest first)\n candidates.sort((a, b) => b.score - a.score);\n // If preferExact and we have exact match, use it\n if (this.config.preferExact) {\n const exactCandidate = candidates.find(c => c.source === 'exact');\n if (exactCandidate && exactCandidate.score >= 0.9) {\n return exactCandidate.value;\n }\n }\n // Return highest scoring candidate\n return candidates[0].value;\n }\n /**\n * Generate multiple samples with best strategy\n */\n generateBatch(label, count) {\n const results = [];\n const seen = new Set();\n let attempts = 0;\n const maxAttempts = count * 5;\n while (results.length < count && attempts < maxAttempts) {\n const seed = this.config.seed !== undefined\n ? this.config.seed + attempts\n : Date.now() + attempts;\n try {\n const generated = this.generate(label, seed);\n if (generated && generated.length > 0 && !seen.has(generated.toLowerCase())) {\n results.push(generated);\n seen.add(generated.toLowerCase());\n }\n }\n catch (error) {\n // Skip errors\n }\n attempts++;\n }\n return results;\n }\n /**\n * Get all available labels\n */\n getLabels() {\n return this.exact.getLabels();\n }\n /**\n * Check if generator is trained\n */\n isTrained() {\n // At minimum, exact generator should be trained\n return this.exact.isTrained();\n }\n}\n\n/**\n * OmegaSynth - Main class\n * Unified interface for synthetic data generation\n */\nclass OmegaSynth {\n constructor(config) {\n this.generator = null;\n this.config = Object.assign({ maxLength: 32 }, config);\n this.seed = config.seed;\n // Initialize generator based on mode\n this.initializeGenerator();\n }\n initializeGenerator() {\n var _a, _b, _c, _d, _e, _f, _g;\n const commonConfig = {\n maxLength: this.config.maxLength || 32,\n seed: this.seed,\n };\n switch (this.config.mode) {\n case 'retrieval':\n this.generator = new RetrievalGenerator(this.seed);\n break;\n case 'elm':\n this.generator = new ELMGenerator(Object.assign(Object.assign({}, commonConfig), { hiddenUnits: 128, activation: 'relu', ridgeLambda: 0.01, noiseSize: 32, useOneHot: (_a = this.config.useOneHot) !== null && _a !== void 0 ? _a : false, useClassification: (_b = this.config.useClassification) !== null && _b !== void 0 ? _b : false, usePatternCorrection: (_c = this.config.usePatternCorrection) !== null && _c !== void 0 ? _c : true }));\n break;\n case 'hybrid':\n this.generator = new HybridGenerator(Object.assign(Object.assign({}, commonConfig), { elmHiddenUnits: 128, elmActivation: 'relu', elmRidgeLambda: 0.01, noiseSize: 32, jitterStrength: this.config.exactMode ? 0 : 0.05, exactMode: (_d = this.config.exactMode) !== null && _d !== void 0 ? _d : false, useOneHot: (_e = this.config.useOneHot) !== null && _e !== void 0 ? _e : false, useClassification: (_f = this.config.useClassification) !== null && _f !== void 0 ? _f : false, usePatternCorrection: (_g = this.config.usePatternCorrection) !== null && _g !== void 0 ? _g : true }));\n break;\n case 'exact':\n this.generator = new ExactGenerator({\n seed: this.seed,\n usePatternMatching: true,\n });\n break;\n case 'perfect':\n this.generator = new PerfectGenerator(Object.assign(Object.assign({}, commonConfig), { preferExact: true, usePatternMatching: true, useImprovedELM: true, elmHiddenUnits: 256, elmActivation: 'relu', elmRidgeLambda: 0.001, noiseSize: 32 }));\n break;\n default:\n throw new Error(`Unknown mode: ${this.config.mode}`);\n }\n }\n /**\n * Train the generator on a dataset\n * @param dataset Array of labeled samples\n */\n train(dataset) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized');\n }\n if (this.config.mode === 'retrieval') {\n this.generator.ingest(dataset);\n }\n else if (this.config.mode === 'elm') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'hybrid') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'exact') {\n this.generator.train(dataset);\n }\n else if (this.config.mode === 'perfect') {\n this.generator.train(dataset);\n }\n });\n }\n /**\n * Generate a synthetic value for a given label\n * @param label Label to generate for\n * @param seed Optional seed for deterministic generation\n */\n generate(label, seed) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized. Call train() first.');\n }\n if (this.config.mode === 'retrieval') {\n const result = this.generator.sampleOne(label);\n if (!result) {\n throw new Error(`No samples found for label: ${label}`);\n }\n return result;\n }\n else if (this.config.mode === 'elm') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'exact') {\n return this.generator.generate(label, seed);\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.generate(label, seed);\n }\n throw new Error(`Unknown mode: ${this.config.mode}`);\n });\n }\n /**\n * Generate multiple synthetic values for a label\n * @param label Label to generate for\n * @param count Number of samples to generate\n */\n generateBatch(label, count) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!this.generator) {\n throw new Error('Generator not initialized. Call train() first.');\n }\n if (this.config.mode === 'retrieval') {\n return this.generator.sample(label, count);\n }\n else if (this.config.mode === 'elm') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'exact') {\n return this.generator.generateBatch(label, count);\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.generateBatch(label, count);\n }\n throw new Error(`Unknown mode: ${this.config.mode}`);\n });\n }\n /**\n * Get all available labels\n */\n getLabels() {\n if (!this.generator) {\n return [];\n }\n if (this.config.mode === 'retrieval') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'elm') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'exact') {\n return this.generator.getLabels();\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.getLabels();\n }\n return [];\n }\n /**\n * Check if the generator is trained\n */\n isTrained() {\n if (!this.generator) {\n return false;\n }\n if (this.config.mode === 'retrieval') {\n const labels = this.generator.getLabels();\n return labels.length > 0;\n }\n else if (this.config.mode === 'elm') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'hybrid') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'exact') {\n return this.generator.isTrained();\n }\n else if (this.config.mode === 'perfect') {\n return this.generator.isTrained();\n }\n return false;\n }\n /**\n * Set seed for deterministic generation\n */\n setSeed(seed) {\n this.seed = seed;\n // Reinitialize generator with new seed\n this.initializeGenerator();\n }\n}\n\n/**\n * loadPretrained - Load pretrained synthetic data generator\n * Instantiates OmegaSynth with pretrained data for common labels\n */\n/**\n * Load pretrained OmegaSynth instance\n * @param mode Generation mode ('retrieval', 'elm', or 'hybrid')\n * @param config Optional configuration overrides\n */\nfunction loadPretrained(mode = 'retrieval', config) {\n // Initialize license before creating instance\n const synth = new OmegaSynth({\n mode,\n maxLength: (config === null || config === void 0 ? void 0 : config.maxLength) || 32,\n seed: config === null || config === void 0 ? void 0 : config.seed,\n });\n // Load default data\n // Try multiple possible locations for the model file\n let modelPath = null;\n // Helper to find package root by looking for package.json\n function findPackageRoot(startDir) {\n let current = startDir;\n while (current !== path.dirname(current)) {\n const pkgPath = path.join(current, 'package.json');\n if (fs.existsSync(pkgPath)) {\n try {\n const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));\n if (pkg.name === '@astermind/astermind-synth') {\n return current;\n }\n }\n catch (_a) {\n // Continue searching\n }\n }\n current = path.dirname(current);\n }\n return null;\n }\n // Find package root first - this is more reliable than using __dirname\n // since we're looking for files relative to package root, not the current file\n const packageRoot = findPackageRoot(process.cwd());\n const possiblePaths = [];\n // Add paths relative to package root if found\n if (packageRoot) {\n possiblePaths.push(path.join(packageRoot, 'dist/omegasynth/models/default_synth.json'), // Bundled location (npm package)\n path.join(packageRoot, 'src/omegasynth/models/default_synth.json') // Source location (development)\n );\n }\n // Also try common npm package locations (when installed as dependency)\n possiblePaths.push(path.join(process.cwd(), 'node_modules/@astermind/astermind-synth/dist/omegasynth/models/default_synth.json'));\n // Try relative to current working directory (for development)\n possiblePaths.push(path.join(process.cwd(), 'dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'src/omegasynth/models/default_synth.json'));\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n modelPath = possiblePath;\n break;\n }\n }\n if (!modelPath) {\n throw new Error('default_synth.json not found. Tried paths: ' + possiblePaths.join(', '));\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n // Convert pretrained data to LabeledSample format\n const samples = [];\n for (const [label, values] of Object.entries(modelData.labels)) {\n for (const value of values) {\n samples.push({ label, value });\n }\n }\n // Train the generator synchronously for immediate use\n // Note: This is a simplified approach - in production you might want async\n (() => __awaiter(this, void 0, void 0, function* () {\n try {\n yield synth.train(samples);\n }\n catch (err) {\n console.error('Error training pretrained model:', err);\n }\n }))();\n return synth;\n}\n/**\n * Load a fully versioned OmegaSynth model from dist/models/vX.Y.Z\n *\n * This function:\n * - Reads model.json, training_data.json, and elm_model.json from the version directory\n * - Rebuilds the retrieval store from training_data.json\n * - Hydrates the internal ELM from elm_model.json (for elm/hybrid modes) if possible\n *\n * NOTE:\n * - We avoid calling synth.train() here to prevent re-training; instead we:\n * - Directly ingest training samples into the retrieval generator\n * - Attempt to load ELM weights via loadModelFromJSON if available\n */\nfunction loadPretrainedFromVersion(versionDir) {\n var _a;\n // Initialize license before creating instance\n const manifestPath = path.join(versionDir, 'manifest.json');\n const modelPath = path.join(versionDir, 'model.json');\n const trainingDataPath = path.join(versionDir, 'training_data.json');\n const elmModelPath = path.join(versionDir, 'elm_model.json');\n let manifest = null;\n if (fs.existsSync(manifestPath)) {\n manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf-8'));\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n const configFromModel = (_a = manifest === null || manifest === void 0 ? void 0 : manifest.config) !== null && _a !== void 0 ? _a : modelData.config;\n // Load training samples\n if (!fs.existsSync(trainingDataPath)) {\n throw new Error(`training_data.json not found in version directory: ${trainingDataPath}`);\n }\n const trainingSamples = JSON.parse(fs.readFileSync(trainingDataPath, 'utf-8'));\n // Create OmegaSynth.\n // IMPORTANT: For pretrained loading we prefer 'retrieval' mode here:\n // - We only need high-quality samples for downstream ELM/KELM training.\n // - Retrieval over the saved training_data.json gives 100% realistic data\n // without requiring vocab building or ELM retraining.\n //\n // If you ever need to use the original mode (e.g. 'hybrid' or 'elm'),\n // you can swap this back to configFromModel.mode.\n const mode = 'retrieval';\n const synth = new OmegaSynth({\n mode,\n maxLength: configFromModel.maxLength || 50,\n seed: configFromModel.seed,\n });\n // Ingest training samples directly into the retrieval generator\n // For hybrid/elm modes, this ensures retrieval works without retraining\n try {\n const generator = synth.generator;\n if (generator) {\n if (generator.ingest) {\n // RetrievalGenerator\n generator.ingest(trainingSamples);\n }\n else if (generator.retrieval && typeof generator.retrieval.ingest === 'function') {\n // HybridGenerator (has .retrieval)\n generator.retrieval.ingest(trainingSamples);\n }\n }\n }\n catch (err) {\n console.warn('Could not ingest training samples into OmegaSynth generator:', err);\n }\n // Hydrate ELM weights if available and applicable (elm/hybrid modes).\n // NOTE: Since we currently force mode = 'retrieval' above for stability,\n // this block will not run. It is left here for future use if you decide\n // to re-enable elm/hybrid loading via configFromModel.mode.\n if (fs.existsSync(elmModelPath) && (configFromModel.mode === 'elm' || configFromModel.mode === 'hybrid')) {\n try {\n const elmModelJSON = fs.readFileSync(elmModelPath, 'utf-8');\n const generator = synth.generator;\n if (generator) {\n let elmInstance = null;\n if (configFromModel.mode === 'hybrid' && generator.elm && generator.elm.elm) {\n // HybridGenerator -> ELMGenerator -> elm\n elmInstance = generator.elm.elm;\n }\n else if (configFromModel.mode === 'elm' && generator.elm) {\n // ELMGenerator -> elm\n elmInstance = generator.elm;\n }\n if (elmInstance && typeof elmInstance.loadModelFromJSON === 'function') {\n elmInstance.loadModelFromJSON(elmModelJSON);\n console.log('✅ ELM weights loaded from elm_model.json into OmegaSynth');\n }\n else {\n console.warn('Could not load ELM weights: loadModelFromJSON not available on ELM instance');\n }\n }\n }\n catch (err) {\n console.warn('Could not hydrate ELM from elm_model.json:', err);\n }\n }\n return synth;\n}\n/**\n * Load pretrained model from custom JSON data\n * @param modelData Custom model data\n * @param mode Generation mode\n * @param config Optional configuration\n */\nfunction loadPretrainedFromData(modelData, mode = 'retrieval', config) {\n // Initialize license before creating instance\n const synth = new OmegaSynth({\n mode,\n maxLength: (config === null || config === void 0 ? void 0 : config.maxLength) || 32,\n seed: config === null || config === void 0 ? void 0 : config.seed,\n });\n const samples = [];\n for (const [label, values] of Object.entries(modelData.labels)) {\n for (const value of values) {\n samples.push({ label, value });\n }\n }\n (() => __awaiter(this, void 0, void 0, function* () {\n try {\n yield synth.train(samples);\n }\n catch (err) {\n console.error('Error training custom model:', err);\n }\n }))();\n return synth;\n}\n/**\n * Get available pretrained labels\n */\nfunction getPretrainedLabels() {\n try {\n // Helper to find package root\n function findPackageRoot(startDir) {\n let current = startDir;\n while (current !== path.dirname(current)) {\n const pkgPath = path.join(current, 'package.json');\n if (fs.existsSync(pkgPath)) {\n try {\n const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));\n if (pkg.name === '@astermind/astermind-synth') {\n return current;\n }\n }\n catch (_a) {\n // Continue searching\n }\n }\n current = path.dirname(current);\n }\n return null;\n }\n // Try multiple possible locations for the model file\n const packageRoot = findPackageRoot(process.cwd());\n const possiblePaths = [];\n if (packageRoot) {\n possiblePaths.push(path.join(packageRoot, 'dist/omegasynth/models/default_synth.json'), path.join(packageRoot, 'src/omegasynth/models/default_synth.json'));\n }\n possiblePaths.push(path.join(process.cwd(), 'node_modules/@astermind/astermind-synth/dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'dist/omegasynth/models/default_synth.json'), path.join(process.cwd(), 'src/omegasynth/models/default_synth.json'));\n let modelPath = null;\n for (const possiblePath of possiblePaths) {\n if (fs.existsSync(possiblePath)) {\n modelPath = possiblePath;\n break;\n }\n }\n if (!modelPath) {\n throw new Error('Model file not found');\n }\n const modelData = JSON.parse(fs.readFileSync(modelPath, 'utf-8'));\n return Object.keys(modelData.labels);\n }\n catch (_a) {\n // Fallback if file not found\n return [\n 'first_name', 'last_name', 'phone_number', 'email', 'street_address',\n 'city', 'state', 'country', 'company_name', 'job_title', 'product_name',\n 'color', 'uuid', 'date', 'credit_card_type', 'device_type'\n ];\n }\n}\n\n/**\n * Utilities for saving trained OmegaSynth models\n */\n/**\n * Save a trained OmegaSynth model to disk\n *\n * @param synth The trained OmegaSynth instance\n * @param trainingData The training data used to train the model (required for saving)\n * @param outputDir Directory where the model will be saved\n * @param version Optional version string (default: '1.0.0')\n * @returns Path to the saved model directory\n */\nfunction saveTrainedModel(synth_1, trainingData_1, outputDir_1) {\n return __awaiter(this, arguments, void 0, function* (synth, trainingData, outputDir, version = '1.0.0') {\n if (!synth.isTrained()) {\n throw new Error('Model must be trained before saving. Call train() first.');\n }\n if (trainingData.length === 0) {\n throw new Error('Training data is required to save the model.');\n }\n // Create version directory\n const versionDir = path.join(outputDir, `v${version}`);\n if (!fs.existsSync(versionDir)) {\n fs.mkdirSync(versionDir, { recursive: true });\n }\n // Calculate training stats\n const labels = Array.from(new Set(trainingData.map(s => s.label)));\n const samplesPerLabel = {};\n for (const label of labels) {\n samplesPerLabel[label] = trainingData.filter(s => s.label === label).length;\n }\n // Get config from synth (we need to access private config)\n const config = synth.config || {};\n // Save model metadata\n const modelData = {\n config: {\n mode: config.mode || 'retrieval',\n maxLength: config.maxLength,\n seed: config.seed,\n exactMode: config.exactMode,\n useOneHot: config.useOneHot,\n useClassification: config.useClassification,\n usePatternCorrection: config.usePatternCorrection,\n },\n trainingStats: {\n totalSamples: trainingData.length,\n labels,\n samplesPerLabel,\n },\n timestamp: new Date().toISOString(),\n };\n const modelPath = path.join(versionDir, 'model.json');\n fs.writeFileSync(modelPath, JSON.stringify(modelData, null, 2));\n // Save training data (required for loading later)\n const trainingDataPath = path.join(versionDir, 'training_data.json');\n fs.writeFileSync(trainingDataPath, JSON.stringify(trainingData, null, 2));\n // Try to save ELM model weights if available (for elm/hybrid modes)\n try {\n const generator = synth.generator;\n if (generator) {\n let elmInstance = null;\n // Get ELM instance based on mode\n if (config.mode === 'hybrid' && generator.elm) {\n elmInstance = generator.elm.elm; // HybridGenerator -> ELMGenerator -> elm\n }\n else if (config.mode === 'elm' && generator.elm) {\n elmInstance = generator.elm; // ELMGenerator -> elm\n }\n if (elmInstance) {\n let elmModelJSON;\n // Try to get serialized model\n if (elmInstance.savedModelJSON) {\n elmModelJSON = elmInstance.savedModelJSON;\n }\n else if (elmInstance.model) {\n // Manually serialize\n const serialized = {\n config: elmInstance.config,\n W: elmInstance.model.W,\n b: elmInstance.model.b,\n B: elmInstance.model.beta,\n categories: elmInstance.categories || [],\n };\n elmModelJSON = JSON.stringify(serialized);\n }\n if (elmModelJSON) {\n const elmModelPath = path.join(versionDir, 'elm_model.json');\n fs.writeFileSync(elmModelPath, elmModelJSON);\n console.log(`✅ ELM model weights saved to: ${elmModelPath}`);\n }\n }\n }\n }\n catch (error) {\n console.warn('⚠️ Could not save ELM model weights:', error);\n // Continue - ELM weights are optional\n }\n console.log(`\\n✅ Model saved to: ${versionDir}`);\n console.log(` Version: ${version}`);\n console.log(` Training samples: ${trainingData.length}`);\n console.log(` Labels: ${labels.length} (${labels.join(', ')})`);\n console.log(`\\n To load this model later, use:`);\n console.log(` loadPretrainedFromVersion('${versionDir}')`);\n return versionDir;\n });\n}\n\nexport { Activations, AdaptiveKernelELM, AdaptiveOnlineELM, AttentionEnhancedELM, Augment, AutoComplete, CharacterLangEncoderELM, ConfidenceClassifierELM, ConvolutionalELM, DISK_EPS, DeepELM, DeepELMPro, DeepKernelELM, DimError, ELM, ELMAdapter, ELMChain, ELMGenerator, ELMKELMCascade, ELMScorer, ELMWorkerClient, EPS, EmbeddingStore, EncoderELM, EnsembleKernelELM, FeatureCombinerELM, ForgettingOnlineELM, FuzzyELM, GraphELM, GraphKernelELM, HierarchicalELM, HybridGenerator, IO, InfoFlowGraph, InfoFlowGraphPWS, IntentClassifier, KNN, KernelELM, LanguageClassifier, MAX_EXP, MIN_EXP, Matrix, MultiKernelELM, MultiTaskELM, OmegaSynth, OnlineELM, OnlineKernelELM, OnlineRidge, QuantumInspiredELM, RecurrentELM, RefinerELM, RetrievalGenerator, RobustKernelELM, SparseELM, SparseKernelELM, StringEncoder, StringKernelELM, SyntheticFieldStore, TEController, TFIDF, TFIDFVectorizer, TensorKernelELM, TextEncoder, TimeSeriesELM, Tokenizer, TransferEntropy, TransferEntropyPWS, TransferLearningELM, UniversalEncoder, VariationalELM, VotingClassifierELM, add, add_, argmax, asVec, assertRect, autoTune, backfillEmptyParents, baseKernel$1 as baseKernel, binaryPR, binaryROC, bindAutocompleteUI, buildDenseDocs, buildIndex, buildLandmarks, buildRFF, buildTfidfDocs, buildVocabAndIdf, clampVec, confusionMatrixFromIndices, cosine$2 as cosine, cosineSparse, defaultNumericConfig, defaultTextConfig, deserializeTextBits, dot, dotProd$1 as dotProd, ensureRectNumber2D, evaluateClassification, evaluateEnsembleRetrieval, evaluateRegression, expSafe, expandQuery, explainFeatures, exportModel, filterMMR, flattenSections, fmtHead, formatClassificationReport, getPretrainedLabels, hDistProxy, hadamard, hadamard_, hybridRetrieve, importModel, isFiniteVec, isNumericConfig, isTextConfig, jaccard, kernelSim, keywordBonus, l2$1 as l2, loadPretrained, loadPretrainedFromData, loadPretrainedFromVersion, log1pSafe, logLoss, logSumExp, mapRFF, mean, normalizeConfig, normalizeL2, normalizeWord, omegaComposeAnswer, parseMarkdownToSections, penalty, projectToDense, quickHash, rerank, rerankAndFilter, ridgeSolvePro, sampleQueriesFromCorpus, saveTrainedModel, scal, scal_, sigmoid$1 as sigmoid, softmax, sparseToDense, standardize, summarizeDeterministic, tanhVec, tanhVec_, toTfidf, tokenize$1 as tokenize, topK, topKAccuracy, topKIndices, variance, wrapELM, zeros };\n//# sourceMappingURL=astermind.esm.js.map\n",null,null],"names":[],"mappings":";;;AAAA;AACA;AAyDA;;AAEG;AACH,SAAS,SAAS,CAAC,GAAc,EAAA;IAC7B,OAAO;QACH,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,OAAO,EAAE,GAAG,CAAC,OAAO;QACpB,SAAS,EAAE,GAAG,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,WAAW,EAAE,GAAG,CAAC,WAAW;QAC5B,WAAW,EAAE,GAAG,CAAC,WAAW;QAC5B,YAAY,EAAE,GAAG,CAAC,YAAY;QAC9B,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,OAAO,EAAE,GAAG,CAAC;KAChB;AACL;AAEA;;AAEG;MACU,SAAS,CAAA;IAIlB,WAAA,CAAY,OAAe,EAAE,MAAc,EAAA;AACvC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;IACxB;AAEA;;AAEG;AACH,IAAA,MAAM,IAAI,CACN,OAAe,EACf,OAAqB,EAAA;QAErB,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,kBAAA,CAAoB,EAAE;AAC9D,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACL,gBAAA,cAAc,EAAE,kBAAkB;gBAClC,WAAW,EAAE,IAAI,CAAC;AACrB,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;gBACjB,OAAO;gBACP,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE;aACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;AACrD,YAAA,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,EAAA,EAAK,QAAQ,CAAC,UAAU,CAAA,CAAE,CAAC;QACvF;AAEA,QAAA,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;;QAElC,OAAO;AACH,YAAA,GAAG,IAAI;AACP,YAAA,OAAO,EAAE,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,SAAS;SAC9C;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,UAAU,CACZ,OAAe,EACf,OAAsC,EAAA;QAEtC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,yBAAA,CAA2B,EAAE;AACrE,YAAA,MAAM,EAAE,MAAM;AACd,YAAA,OAAO,EAAE;AACL,gBAAA,cAAc,EAAE,kBAAkB;gBAClC,WAAW,EAAE,IAAI,CAAC;AACrB,aAAA;AACD,YAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;gBACjB,OAAO;gBACP,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE;aACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;AACrD,YAAA,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC/D;QAEA,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,SAAS,EAAE;QACzC,IAAI,CAAC,MAAM,EAAE;AACT,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;QAC9C;AAEA,QAAA,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE;QACjC,IAAI,MAAM,GAAG,EAAE;QACf,IAAI,QAAQ,GAAG,EAAE;QACjB,IAAI,OAAO,GAAa,EAAE;AAC1B,QAAA,IAAI,SAA6B;AAEjC,QAAA,IAAI;YACA,OAAO,IAAI,EAAE;gBACT,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE;AAC3C,gBAAA,IAAI,IAAI;oBAAE;AAEV,gBAAA,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;gBACjD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,gBAAA,MAAM,GAAG,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE;AAE1B,gBAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACtB,oBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;;oBAEhC;AAAO,yBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AAClC,wBAAA,IAAI;AACA,4BAAA,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAEtC,4BAAA,IAAI,IAAI,CAAC,IAAI,KAAK,SAAS,EAAE;;AAEzB,gCAAA,QAAQ,IAAI,IAAI,CAAC,IAAI;gCACrB,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC;4BAChC;AAAO,iCAAA,IAAI,IAAI,CAAC,OAAO,KAAK,SAAS,EAAE;;AAEnC,gCAAA,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,SAAS,CAAC;AAC7C,gCAAA,OAAO,CAAC,SAAS,GAAG,OAAO,CAAC;4BAChC;AAAO,iCAAA,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;;AAErC,gCAAA,SAAS,GAAG,IAAI,CAAC,SAAS;4BAC9B;wBACJ;AAAE,wBAAA,MAAM;;wBAER;oBACJ;gBACJ;YACJ;AAEA,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC;QAC1D;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,OAAO,GAAG,KAAc,CAAC;QACrC;IACJ;AAEA;;AAEG;IACH,MAAM,cAAc,CAAC,KAAqB,EAAA;AAMtC,QAAA,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE;QACpC,IAAI,KAAK,EAAE;AACP,YAAA,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC;QAC9B;QAEA,MAAM,GAAG,GAAG,CAAA,EAAG,IAAI,CAAC,OAAO,CAAA,kBAAA,EAAqB,MAAM,CAAC,QAAQ,EAAE,GAAG,GAAG,GAAG,MAAM,GAAG,EAAE,CAAA,CAAE;AACvF,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAC9B,YAAA,OAAO,EAAE;gBACL,WAAW,EAAE,IAAI,CAAC;AACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE;QAClC,OAAO,IAAI,CAAC,SAAS;IACzB;AAEA;;AAEG;AACH,IAAA,MAAM,SAAS,GAAA;QACX,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,CAAA,oBAAA,CAAsB,EAAE;AAChE,YAAA,OAAO,EAAE;gBACL,WAAW,EAAE,IAAI,CAAC;AACrB;AACJ,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,OAAO,QAAQ,CAAC,IAAI,EAAE;IAC1B;AAEA;;AAEG;AACH,IAAA,MAAM,MAAM,GAAA;QACR,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,CAAA,EAAG,IAAI,CAAC,OAAO,CAAA,oBAAA,CAAsB,CAAC;AAEnE,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,KAAA,EAAQ,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC9C;AAEA,QAAA,OAAO,QAAQ,CAAC,IAAI,EAAE;IAC1B;AACH;;AC/PD,MAAM,aAAa,GAAG,CAAC,MAAM,EAAE,YAAY,KAAK,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,MAAM,YAAY,CAAC,CAAC;;AAE7F,IAAI,iBAAiB;AACrB,IAAI,oBAAoB;AACxB;AACA,SAAS,oBAAoB,GAAG;AAChC,IAAI,QAAQ,iBAAiB;AAC7B,SAAS,iBAAiB,GAAG;AAC7B,YAAY,WAAW;AACvB,YAAY,cAAc;AAC1B,YAAY,QAAQ;AACpB,YAAY,SAAS;AACrB,YAAY,cAAc;AAC1B,SAAS,CAAC;AACV;AACA;AACA,SAAS,uBAAuB,GAAG;AACnC,IAAI,QAAQ,oBAAoB;AAChC,SAAS,oBAAoB,GAAG;AAChC,YAAY,SAAS,CAAC,SAAS,CAAC,OAAO;AACvC,YAAY,SAAS,CAAC,SAAS,CAAC,QAAQ;AACxC,YAAY,SAAS,CAAC,SAAS,CAAC,kBAAkB;AAClD,SAAS,CAAC;AACV;AACA,MAAM,gBAAgB,GAAG,IAAI,OAAO,EAAE;AACtC,MAAM,kBAAkB,GAAG,IAAI,OAAO,EAAE;AACxC,MAAM,wBAAwB,GAAG,IAAI,OAAO,EAAE;AAC9C,MAAM,cAAc,GAAG,IAAI,OAAO,EAAE;AACpC,MAAM,qBAAqB,GAAG,IAAI,OAAO,EAAE;AAC3C,SAAS,gBAAgB,CAAC,OAAO,EAAE;AACnC,IAAI,MAAM,OAAO,GAAG,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAK;AACrD,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,OAAO,CAAC,mBAAmB,CAAC,SAAS,EAAE,OAAO,CAAC;AAC3D,YAAY,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AACvD,QAAQ,CAAC;AACT,QAAQ,MAAM,OAAO,GAAG,MAAM;AAC9B,YAAY,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AACzC,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,MAAM,KAAK,GAAG,MAAM;AAC5B,YAAY,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;AACjC,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,OAAO,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAAC;AACpD,QAAQ,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAChD,IAAI,CAAC,CAAC;AACN,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,KAAK,KAAK;AACzB;AACA;AACA,QAAQ,IAAI,KAAK,YAAY,SAAS,EAAE;AACxC,YAAY,gBAAgB,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,CAAC;AAChD,QAAQ;AACR;AACA,IAAI,CAAC;AACL,SAAS,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;AACzB;AACA;AACA,IAAI,qBAAqB,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;AAC/C,IAAI,OAAO,OAAO;AAClB;AACA,SAAS,8BAA8B,CAAC,EAAE,EAAE;AAC5C;AACA,IAAI,IAAI,kBAAkB,CAAC,GAAG,CAAC,EAAE,CAAC;AAClC,QAAQ;AACR,IAAI,MAAM,IAAI,GAAG,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAK;AAClD,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,EAAE,CAAC,mBAAmB,CAAC,UAAU,EAAE,QAAQ,CAAC;AACxD,YAAY,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AAClD,YAAY,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,KAAK,CAAC;AAClD,QAAQ,CAAC;AACT,QAAQ,MAAM,QAAQ,GAAG,MAAM;AAC/B,YAAY,OAAO,EAAE;AACrB,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,MAAM,KAAK,GAAG,MAAM;AAC5B,YAAY,MAAM,CAAC,EAAE,CAAC,KAAK,IAAI,IAAI,YAAY,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;AAC5E,YAAY,QAAQ,EAAE;AACtB,QAAQ,CAAC;AACT,QAAQ,EAAE,CAAC,gBAAgB,CAAC,UAAU,EAAE,QAAQ,CAAC;AACjD,QAAQ,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,QAAQ,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,IAAI,CAAC,CAAC;AACN;AACA,IAAI,kBAAkB,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC;AACpC;AACA,IAAI,aAAa,GAAG;AACpB,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE;AAChC,QAAQ,IAAI,MAAM,YAAY,cAAc,EAAE;AAC9C;AACA,YAAY,IAAI,IAAI,KAAK,MAAM;AAC/B,gBAAgB,OAAO,kBAAkB,CAAC,GAAG,CAAC,MAAM,CAAC;AACrD;AACA,YAAY,IAAI,IAAI,KAAK,kBAAkB,EAAE;AAC7C,gBAAgB,OAAO,MAAM,CAAC,gBAAgB,IAAI,wBAAwB,CAAC,GAAG,CAAC,MAAM,CAAC;AACtF,YAAY;AACZ;AACA,YAAY,IAAI,IAAI,KAAK,OAAO,EAAE;AAClC,gBAAgB,OAAO,QAAQ,CAAC,gBAAgB,CAAC,CAAC;AAClD,sBAAsB;AACtB,sBAAsB,QAAQ,CAAC,WAAW,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;AACxE,YAAY;AACZ,QAAQ;AACR;AACA,QAAQ,OAAO,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;AACjC,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE;AAC7B,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,KAAK;AAC5B,QAAQ,OAAO,IAAI;AACnB,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE;AACtB,QAAQ,IAAI,MAAM,YAAY,cAAc;AAC5C,aAAa,IAAI,KAAK,MAAM,IAAI,IAAI,KAAK,OAAO,CAAC,EAAE;AACnD,YAAY,OAAO,IAAI;AACvB,QAAQ;AACR,QAAQ,OAAO,IAAI,IAAI,MAAM;AAC7B,IAAI,CAAC;AACL,CAAC;AACD,SAAS,YAAY,CAAC,QAAQ,EAAE;AAChC,IAAI,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAC3C;AACA,SAAS,YAAY,CAAC,IAAI,EAAE;AAC5B;AACA;AACA;AACA,IAAI,IAAI,IAAI,KAAK,WAAW,CAAC,SAAS,CAAC,WAAW;AAClD,QAAQ,EAAE,kBAAkB,IAAI,cAAc,CAAC,SAAS,CAAC,EAAE;AAC3D,QAAQ,OAAO,UAAU,UAAU,EAAE,GAAG,IAAI,EAAE;AAC9C,YAAY,MAAM,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,UAAU,EAAE,GAAG,IAAI,CAAC;AACnE,YAAY,wBAAwB,CAAC,GAAG,CAAC,EAAE,EAAE,UAAU,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;AAChG,YAAY,OAAO,IAAI,CAAC,EAAE,CAAC;AAC3B,QAAQ,CAAC;AACT,IAAI;AACJ;AACA;AACA;AACA;AACA;AACA,IAAI,IAAI,uBAAuB,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;AAClD,QAAQ,OAAO,UAAU,GAAG,IAAI,EAAE;AAClC;AACA;AACA,YAAY,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC;AAC1C,YAAY,OAAO,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AACnD,QAAQ,CAAC;AACT,IAAI;AACJ,IAAI,OAAO,UAAU,GAAG,IAAI,EAAE;AAC9B;AACA;AACA,QAAQ,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC;AACnD,IAAI,CAAC;AACL;AACA,SAAS,sBAAsB,CAAC,KAAK,EAAE;AACvC,IAAI,IAAI,OAAO,KAAK,KAAK,UAAU;AACnC,QAAQ,OAAO,YAAY,CAAC,KAAK,CAAC;AAClC;AACA;AACA,IAAI,IAAI,KAAK,YAAY,cAAc;AACvC,QAAQ,8BAA8B,CAAC,KAAK,CAAC;AAC7C,IAAI,IAAI,aAAa,CAAC,KAAK,EAAE,oBAAoB,EAAE,CAAC;AACpD,QAAQ,OAAO,IAAI,KAAK,CAAC,KAAK,EAAE,aAAa,CAAC;AAC9C;AACA,IAAI,OAAO,KAAK;AAChB;AACA,SAAS,IAAI,CAAC,KAAK,EAAE;AACrB;AACA;AACA,IAAI,IAAI,KAAK,YAAY,UAAU;AACnC,QAAQ,OAAO,gBAAgB,CAAC,KAAK,CAAC;AACtC;AACA;AACA,IAAI,IAAI,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC;AACjC,QAAQ,OAAO,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC;AACxC,IAAI,MAAM,QAAQ,GAAG,sBAAsB,CAAC,KAAK,CAAC;AAClD;AACA;AACA,IAAI,IAAI,QAAQ,KAAK,KAAK,EAAE;AAC5B,QAAQ,cAAc,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,CAAC;AAC3C,QAAQ,qBAAqB,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,CAAC;AAClD,IAAI;AACJ,IAAI,OAAO,QAAQ;AACnB;AACA,MAAM,MAAM,GAAG,CAAC,KAAK,KAAK,qBAAqB,CAAC,GAAG,CAAC,KAAK,CAAC;;ACnL1D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,EAAE,EAAE;AAChF,IAAI,MAAM,OAAO,GAAG,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC;AACjD,IAAI,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC;AACrC,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,OAAO,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,KAAK,KAAK;AAC7D,YAAY,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE,KAAK,CAAC;AAC/G,QAAQ,CAAC,CAAC;AACV,IAAI;AACJ,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,OAAO,CAAC,gBAAgB,CAAC,SAAS,EAAE,CAAC,KAAK,KAAK,OAAO;AAC9D;AACA,QAAQ,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;AACnD,IAAI;AACJ,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,EAAE,KAAK;AACtB,QAAQ,IAAI,UAAU;AACtB,YAAY,EAAE,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,UAAU,EAAE,CAAC;AAC5D,QAAQ,IAAI,QAAQ,EAAE;AACtB,YAAY,EAAE,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,KAAK,KAAK,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;AAChH,QAAQ;AACR,IAAI,CAAC;AACL,SAAS,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;AACzB,IAAI,OAAO,WAAW;AACtB;;AAgBA,MAAM,WAAW,GAAG,CAAC,KAAK,EAAE,QAAQ,EAAE,QAAQ,EAAE,YAAY,EAAE,OAAO,CAAC;AACtE,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,CAAC;AACtD,MAAM,aAAa,GAAG,IAAI,GAAG,EAAE;AAC/B,SAAS,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACjC,IAAI,IAAI,EAAE,MAAM,YAAY,WAAW;AACvC,QAAQ,EAAE,IAAI,IAAI,MAAM,CAAC;AACzB,QAAQ,OAAO,IAAI,KAAK,QAAQ,CAAC,EAAE;AACnC,QAAQ;AACR,IAAI;AACJ,IAAI,IAAI,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AAC/B,QAAQ,OAAO,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AACtC,IAAI,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC;AACzD,IAAI,MAAM,QAAQ,GAAG,IAAI,KAAK,cAAc;AAC5C,IAAI,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,cAAc,CAAC;AACzD,IAAI;AACJ;AACA,IAAI,EAAE,cAAc,IAAI,CAAC,QAAQ,GAAG,QAAQ,GAAG,cAAc,EAAE,SAAS,CAAC;AACzE,QAAQ,EAAE,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,EAAE;AAC5D,QAAQ;AACR,IAAI;AACJ,IAAI,MAAM,MAAM,GAAG,gBAAgB,SAAS,EAAE,GAAG,IAAI,EAAE;AACvD;AACA,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,GAAG,WAAW,GAAG,UAAU,CAAC;AAClF,QAAQ,IAAI,MAAM,GAAG,EAAE,CAAC,KAAK;AAC7B,QAAQ,IAAI,QAAQ;AACpB,YAAY,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;AAC/C;AACA;AACA;AACA;AACA;AACA,QAAQ,OAAO,CAAC,MAAM,OAAO,CAAC,GAAG,CAAC;AAClC,YAAY,MAAM,CAAC,cAAc,CAAC,CAAC,GAAG,IAAI,CAAC;AAC3C,YAAY,OAAO,IAAI,EAAE,CAAC,IAAI;AAC9B,SAAS,CAAC,EAAE,CAAC,CAAC;AACd,IAAI,CAAC;AACL,IAAI,aAAa,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC;AACnC,IAAI,OAAO,MAAM;AACjB;AACA,YAAY,CAAC,CAAC,QAAQ,MAAM;AAC5B,IAAI,GAAG,QAAQ;AACf,IAAI,GAAG,EAAE,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAK,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,CAAC;AACpG,IAAI,GAAG,EAAE,CAAC,MAAM,EAAE,IAAI,KAAK,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC;AAClF,CAAC,CAAC,CAAC;;AC5FH;AACA;AA2BA;;AAEG;MACU,eAAe,CAAA;AAOxB,IAAA,WAAA,CAAY,MAAmB,EAAA;QALvB,IAAA,CAAA,EAAE,GAA2C,IAAI;QACjD,IAAA,CAAA,SAAS,GAAoD,IAAI;QACjE,IAAA,CAAA,aAAa,GAAG,CAAC;QACjB,IAAA,CAAA,UAAU,GAAkB,IAAI;AAGpC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QAEpB,IAAI,MAAM,CAAC,OAAO,KAAK,WAAW,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;AACpE,YAAA,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,MAAM,EAAE;QAClC;AAAO,aAAA,IAAI,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;;YAE1C,IAAI,CAAC,wBAAwB,EAAE;QACnC;IACJ;AAEA;;AAEG;IACK,wBAAwB,GAAA;AAC5B,QAAA,IAAI;YACA,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,CAAC,uBAAuB,CAAC;YAC1D,IAAI,IAAI,EAAE;gBACN,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC/B,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,CAAC;gBAC9C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,IAAI;YAC/C;QACJ;AAAE,QAAA,MAAM;;QAER;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,MAAM,GAAA;QAChB,IAAI,CAAC,EAAE,GAAG,MAAM,MAAM,CAAoB,kBAAkB,EAAE,CAAC,EAAE;AAC7D,YAAA,OAAO,CAAC,EAAE,EAAA;;gBAEN,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;AAC5C,oBAAA,MAAM,QAAQ,GAAG,EAAE,CAAC,iBAAiB,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;AACrE,oBAAA,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAE,WAAW,CAAC;gBACnD;;gBAGA,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;oBAC3C,EAAE,CAAC,iBAAiB,CAAC,UAAU,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC;gBACxD;YACJ;AACH,SAAA,CAAC;;AAGF,QAAA,MAAM,IAAI,CAAC,YAAY,EAAE;QAEzB,OAAO,IAAI,CAAC,EAAE;IAClB;AAEA;;AAEG;AACK,IAAA,MAAM,KAAK,GAAA;QACf,IAAI,IAAI,CAAC,EAAE;YAAE,OAAO,IAAI,CAAC,EAAE;QAC3B,IAAI,IAAI,CAAC,SAAS;YAAE,OAAO,IAAI,CAAC,SAAS;AACzC,QAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC;IAC7C;AAEA;;AAEG;AACK,IAAA,MAAM,YAAY,GAAA;QACtB,IAAI,CAAC,IAAI,CAAC,EAAE;YAAE;AAEd,QAAA,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,EAAE,eAAe,CAAC;QAChE,IAAI,CAAC,aAAa,GAAG,SAAS,EAAE,KAAe,IAAI,CAAC;AAEpD,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,EAAE,YAAY,CAAC;QAC5D,IAAI,CAAC,UAAU,GAAG,QAAQ,EAAE,KAAe,IAAI,IAAI;IACvD;AAEA;;AAEG;IACH,MAAM,KAAK,CAAC,SAA2B,EAAA;QACnC,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,OAAO,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC;QAC5C;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,GAAG,EAAE,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE,UAAU,CAAC,EAAE,WAAW,CAAC;;AAGjE,QAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;YACzB,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;QAC9C;;AAGA,QAAA,IAAI,CAAC,aAAa,GAAG,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;QAC9D,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;QAE1C,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC;AACjC,YAAA,GAAG,EAAE,eAAe;YACpB,KAAK,EAAE,IAAI,CAAC;AACf,SAAA,CAAC;QACF,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC;AACjC,YAAA,GAAG,EAAE,YAAY;YACjB,KAAK,EAAE,IAAI,CAAC;AACf,SAAA,CAAC;QAEF,MAAM,EAAE,CAAC,IAAI;IACjB;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,GAAA;QACV,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACtC;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,OAAO,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC;IACjC;AAEA;;AAEG;AACH,IAAA,MAAM,WAAW,GAAA;QACb,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,SAAS,GAAA;AACL,QAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE;QACtB,MAAM,QAAQ,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE,GAAG,CAAC;QAC1E,MAAM,OAAO,GAAG,GAAG,GAAG,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM;QAEnD,OAAO;YACH,aAAa,EAAE,IAAI,CAAC,aAAa;YACjC,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3B,YAAA,SAAS,EAAE,IAAI,CAAC,aAAa,GAAG,IAAI;YACpC;SACH;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,GAAA;QACP,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,KAAK,cAAc,EAAE;AACxC,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,IAAI,CAAC,aAAa,GAAG,CAAC;AACtB,YAAA,IAAI,CAAC,UAAU,GAAG,IAAI;YACtB;QACJ;AAEA,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,GAAG,EAAE,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE,UAAU,CAAC,EAAE,WAAW,CAAC;QACjE,MAAM,EAAE,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;QACzC,MAAM,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE;QACxC,MAAM,EAAE,CAAC,IAAI;AAEb,QAAA,IAAI,CAAC,aAAa,GAAG,CAAC;AACtB,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI;IAC1B;;AAIQ,IAAA,iBAAiB,CAAC,SAA2B,EAAA;AACjD,QAAA,IAAI;AACA,YAAA,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;AACxE,YAAA,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC,MAAM;YACrC,IAAI,CAAC,UAAU,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YAC1C,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC;gBACzD,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,UAAU,EAAE,IAAI,CAAC;AACpB,aAAA,CAAC,CAAC;QACP;AAAE,QAAA,MAAM;;AAEJ,YAAA,YAAY,CAAC,UAAU,CAAC,uBAAuB,CAAC;AAChD,YAAA,MAAM,OAAO,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACvC,YAAA,YAAY,CAAC,OAAO,CAAC,uBAAuB,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;AACtE,YAAA,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,MAAM;QACvC;IACJ;IAEQ,oBAAoB,GAAA;AACxB,QAAA,IAAI;YACA,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,CAAC,uBAAuB,CAAC;AAC1D,YAAA,OAAO,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;QACvC;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,EAAE;QACb;IACJ;AACH;;ACrOD;AACA;MAqCa,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;QACY,IAAA,CAAA,SAAS,GAAsB,EAAE;AACjC,QAAA,IAAA,CAAA,GAAG,GAAwB,IAAI,GAAG,EAAE;QACpC,IAAA,CAAA,OAAO,GAAG,KAAK;QACf,IAAA,CAAA,gBAAgB,GAAG,KAAK;AAGxB,QAAA,IAAA,CAAA,UAAU,GAAwB,IAAI,GAAG,EAAE,CAAC;IAmSxD;AAjSI;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC;IACpD;AAEA;;AAEG;IACH,MAAM,KAAK,CAAC,SAA2B,EAAA;AACnC,QAAA,IAAI,CAAC,SAAS,GAAG,EAAE;AACnB,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,EAAE;;AAGpB,QAAA,MAAM,OAAO,GAAG,IAAI,GAAG,EAAkB;AAEzC,QAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;YACzB,MAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;AACzC,YAAA,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC;;AAGpC,YAAA,KAAK,MAAM,KAAK,IAAI,YAAY,EAAE;AAC9B,gBAAA,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACrD;;AAGA,YAAA,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAChB,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,OAAO,EAAE,GAAG,CAAC,OAAO;gBACpB,MAAM;AACN,gBAAA,KAAK,EAAE,IAAI,GAAG,EAAE;AACnB,aAAA,CAAC;QACN;;AAGA,QAAA,MAAM,CAAC,GAAG,SAAS,CAAC,MAAM;QAC1B,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,OAAO,EAAE;YAC9B,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QACxD;;AAGA,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,SAAS,EAAE;YAC9B,MAAM,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,MAAM,CAAC;YACtD,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,QAAQ,EAAE;AAC/B,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;gBACnC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,GAAG,CAAC;YACjC;QACJ;AAEA,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI;IACvB;AAEA;;AAEG;IACH,MAAM,GAAG,CAAC,KAAa,EAAA;AACnB,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC9C,OAAO;AACH,gBAAA,MAAM,EAAE,kDAAkD;AAC1D,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,QAAQ,EAAE;aACb;QACL;;QAGA,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC;QACxC,MAAM,OAAO,GAAG,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC;AACtD,QAAA,MAAM,UAAU,GAAG,IAAI,GAAG,EAAkB;QAE5C,KAAK,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,OAAO,EAAE;AAC9B,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;YACnC,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,GAAG,GAAG,CAAC;QAClC;;AAGA,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,KAAK;YACtC,GAAG;YACH,KAAK,EAAE,IAAI,CAAC,gBAAgB,CAAC,UAAU,EAAE,GAAG,CAAC,KAAK;AACrD,SAAA,CAAC,CAAC;;AAGH,QAAA,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;;QAGxC,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,GAAG,CAAC;AAEhE,QAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YACzB,OAAO;AACH,gBAAA,MAAM,EAAE,6EAA6E;AACrF,gBAAA,OAAO,EAAE,EAAE;gBACX,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI;aACjC;QACL;;QAGA,MAAM,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG;AAChC,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,OAAO,EAAE,WAAW,CAAC;QAExE,OAAO;AACH,YAAA,MAAM,EAAE,OAAO;YACf,OAAO,EAAE,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK;AAC1B,gBAAA,KAAK,EAAE,CAAC,CAAC,GAAG,CAAC,KAAK;AAClB,gBAAA,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBAChD,KAAK,EAAE,CAAC,CAAC;AACZ,aAAA,CAAC,CAAC;AACH,YAAA,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;SAC3B;IACL;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,CAAC,SAAS,GAAG,EAAE;AACnB,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,EAAE;AACpB,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAA,IAAI,CAAC,OAAO,GAAG,KAAK;AACpB,QAAA,IAAI,CAAC,gBAAgB,GAAG,KAAK;AAC7B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS;IAC/B;AAEA;;;;;AAKG;IACH,MAAM,cAAc,CAAC,UAA+B,EAAA;QAChD,IAAI,CAAC,KAAK,EAAE;;AAGZ,QAAA,IAAI,CAAC,GAAG,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;;QAG7D,IAAI,UAAU,GAAG,CAAC;QAClB,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE;YAChC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,UAAU,EAAE,CAAC;QAC3C;;AAGA,QAAA,KAAK,MAAM,GAAG,IAAI,UAAU,CAAC,SAAS,EAAE;;AAEpC,YAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB;YAC1C,KAAK,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,IAAI,GAAG,CAAC,WAAW,EAAE;;gBAE/C,KAAK,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,IAAI,CAAC,UAAU,EAAE;AACvC,oBAAA,IAAI,GAAG,KAAK,SAAS,EAAE;AACnB,wBAAA,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC;wBAC1B;oBACJ;gBACJ;YACJ;AAEA,YAAA,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAChB,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,OAAO,EAAE,GAAG,CAAC,OAAO;gBACpB,MAAM,EAAE,EAAE;AACV,gBAAA,KAAK,EAAE;AACV,aAAA,CAAC;QACN;AAEA,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI;AACnB,QAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;AAC5B,QAAA,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,OAAO;QACvC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,UAAU;AAE5C,QAAA,OAAO,CAAC,GAAG,CAAC,CAAA,4BAAA,EAA+B,IAAI,CAAC,SAAS,CAAC,MAAM,4BAA4B,UAAU,CAAC,OAAO,CAAA,CAAA,CAAG,CAAC;IACtH;AAEA;;AAEG;IACH,eAAe,GAAA;QACX,OAAO;YACH,MAAM,EAAE,IAAI,CAAC,OAAO;YACpB,gBAAgB,EAAE,IAAI,CAAC,gBAAgB;AACvC,YAAA,aAAa,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM;AACpC,YAAA,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM;YACjC,aAAa,EAAE,IAAI,CAAC,aAAa;YACjC,UAAU,EAAE,IAAI,CAAC;SACpB;IACL;;AAIA;;AAEG;AACK,IAAA,QAAQ,CAAC,IAAY,EAAA;AACzB,QAAA,OAAO;AACF,aAAA,WAAW;AACX,aAAA,OAAO,CAAC,UAAU,EAAE,GAAG;aACvB,KAAK,CAAC,KAAK;AACX,aAAA,MAAM,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACrE;AAEA;;AAEG;AACK,IAAA,oBAAoB,CAAC,MAAgB,EAAA;AACzC,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,EAAkB;AACtC,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;AACxB,YAAA,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/C;;AAGA,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;AAC1C,QAAA,IAAI,OAAO,GAAG,CAAC,EAAE;YACb,KAAK,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC;YAC/B;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;IACK,gBAAgB,CACpB,IAAyB,EACzB,IAAyB,EAAA;QAEzB,IAAI,UAAU,GAAG,CAAC;QAClB,IAAI,KAAK,GAAG,CAAC;QACb,IAAI,KAAK,GAAG,CAAC;QAEb,KAAK,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,IAAI,EAAE;YAC7B,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AAChC,YAAA,UAAU,IAAI,IAAI,GAAG,IAAI;AACzB,YAAA,KAAK,IAAI,IAAI,GAAG,IAAI;QACxB;QAEA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;AAC7B,YAAA,KAAK,IAAI,GAAG,GAAG,GAAG;QACtB;AAEA,QAAA,IAAI,KAAK,KAAK,CAAC,IAAI,KAAK,KAAK,CAAC;AAAE,YAAA,OAAO,CAAC;AACxC,QAAA,OAAO,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC7D;AAEA;;AAEG;IACK,sBAAsB,CAAC,OAAe,EAAE,WAAqB,EAAA;QACjE,MAAM,SAAS,GAAG,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;;QAG1E,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,QAAQ,IAAG;AACpC,YAAA,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACvD,IAAI,KAAK,GAAG,CAAC;AACb,YAAA,KAAK,MAAM,EAAE,IAAI,WAAW,EAAE;AAC1B,gBAAA,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAC;AAAE,oBAAA,KAAK,EAAE;YACvC;YACA,OAAO,EAAE,QAAQ,EAAE,QAAQ,CAAC,IAAI,EAAE,EAAE,KAAK,EAAE;AAC/C,QAAA,CAAC,CAAC;AAEF,QAAA,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;;QAGxC,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC;AAEhE,QAAA,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,OAAO,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;QAC5C;AAEA,QAAA,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG;IAC7D;AAEA;;AAEG;AACK,IAAA,UAAU,CAAC,IAAY,EAAA;AAC3B,QAAA,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC;AACtB,YAAA,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK;AACnE,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ;AACtE,YAAA,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvE,YAAA,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO;AACvE,YAAA,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO;AACtE,YAAA,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAC7D,YAAA,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;AACtE,YAAA,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE;AAC7D,SAAA,CAAC;AACF,QAAA,OAAO,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC;IAC9B;AACH;;AChVD;AACA;AA0BA;;;AAGG;MACU,wBAAwB,CAAA;AAArC,IAAA,WAAA,GAAA;QACY,IAAA,CAAA,EAAE,GAA0C,IAAI;QAChD,IAAA,CAAA,SAAS,GAAmD,IAAI;QACvD,IAAA,CAAA,MAAM,GAAG,4BAA4B;QACrC,IAAA,CAAA,SAAS,GAAG,CAAC;IAqIlC;AAnII;;AAEG;AACK,IAAA,MAAM,MAAM,GAAA;AAChB,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,OAAO,IAAI,CAAC,EAAE;QAClB;AAEA,QAAA,IAAI,IAAI,CAAC,SAAS,EAAE;YAChB,OAAO,IAAI,CAAC,SAAS;QACzB;AAEA,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,CAAmB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,SAAS,EAAE;AACnE,YAAA,OAAO,CAAC,EAAE,EAAA;gBACN,IAAI,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;oBAC1C,EAAE,CAAC,iBAAiB,CAAC,SAAS,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;gBACtD;YACJ;AACH,SAAA,CAAC;AAEF,QAAA,IAAI,CAAC,EAAE,GAAG,MAAM,IAAI,CAAC,SAAS;QAC9B,OAAO,IAAI,CAAC,EAAE;IAClB;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,CAAC,EAAU,EAAE,UAA+B,EAAA;AACnD,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE;YACpB,EAAE;YACF,aAAa,EAAE,UAAU,CAAC,OAAO;AACjC,YAAA,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,UAAU;YACtC,IAAI,EAAE,UAAU,CAAC,IAAI;AACrB,YAAA,IAAI,EAAE;AACT,SAAA,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,CAAA,kCAAA,EAAqC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAA,OAAA,CAAS,CAAC;IACzF;AAEA;;AAEG;IACH,MAAM,QAAQ,CAAC,EAAU,EAAA;AACrB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,OAAO,MAAM,EAAE,IAAI,IAAI,IAAI;IAC/B;AAEA;;AAEG;AACH,IAAA,MAAM,OAAO,CAAC,EAAU,EAAE,MAAc,EAAA;AACpC,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM;AAAE,YAAA,OAAO,KAAK;AAEzB,QAAA,MAAM,UAAU,GAAG,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE;QACxD,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,UAAU,GAAG,MAAM;IAC3C;AAEA;;AAEG;IACH,MAAM,OAAO,CAAC,EAAU,EAAA;AACpB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,SAAS,EAAE,EAAE,CAAC;AAC1C,QAAA,IAAI,CAAC,MAAM;AAAE,YAAA,OAAO,IAAI;QAExB,OAAO;YACH,OAAO,EAAE,MAAM,CAAC,aAAa;YAC7B,UAAU,EAAE,MAAM,CAAC,UAAU;AAC7B,YAAA,aAAa,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa;AAC7C,YAAA,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU;YACvC,IAAI,EAAE,MAAM,CAAC;SAChB;IACL;AAEA;;AAEG;AACH,IAAA,MAAM,OAAO,GAAA;AACT,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC;QACvC,OAAO,KAAK,GAAG,CAAC;IACpB;AAEA;;AAEG;AACH,IAAA,MAAM,YAAY,GAAA;AACd,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,OAAO,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC;IACnC;AAEA;;AAEG;IACH,MAAM,MAAM,CAAC,EAAU,EAAA;AACnB,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;QAC7B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,EAAE,CAAC;IAClC;AAEA;;AAEG;AACH,IAAA,MAAM,KAAK,GAAA;AACP,QAAA,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,EAAE;AAC7B,QAAA,MAAM,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC;AACzB,QAAA,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC;IAC3D;AAEA;;AAEG;AACK,IAAA,MAAM,KAAK,GAAA;AACf,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,OAAO,IAAI,CAAC,EAAE;QAClB;AACA,QAAA,OAAO,IAAI,CAAC,MAAM,EAAE;IACxB;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,IAAI,CAAC,EAAE,EAAE;AACT,YAAA,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;AACf,YAAA,IAAI,CAAC,EAAE,GAAG,IAAI;AACd,YAAA,IAAI,CAAC,SAAS,GAAG,IAAI;QACzB;IACJ;AACH;;ACxKD;AACA;AAEA;;;AAGG;AACG,SAAU,eAAe,CAAC,KAAa,EAAA;;IAEzC,IAAI,MAAM,GAAG;AACR,SAAA,OAAO,CAAC,IAAI,EAAE,GAAG;AACjB,SAAA,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC;;AAGvB,IAAA,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC;IACjC,IAAI,OAAO,EAAE;QACT,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,GAAG,OAAO,CAAC;IACrC;;AAGA,IAAA,MAAM,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC;IACjC,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,YAAY,CAAC,MAAM,CAAC;AAEjD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACzC;AAEA,IAAA,OAAO,KAAK;AAChB;AAkBA;;AAEG;AACG,SAAU,qBAAqB,CAAC,KAAa,EAAA;AAC/C,IAAA,MAAM,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;IACpC,OAAO,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1C;AAEA;;;AAGG;AACG,SAAU,QAAQ,CAAC,KAAa,EAAA;AAMlC,IAAA,IAAI;QACA,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC;AAC9B,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AACpB,YAAA,OAAO,IAAI;QACf;QAEA,MAAM,CAAC,SAAS,EAAE,UAAU,EAAE,YAAY,CAAC,GAAG,KAAK;QAEnD,OAAO;YACH,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,SAAS,CAAC,CAAC;YACpD,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,UAAU,CAAC,CAAC;AACtD,YAAA,SAAS,EAAE,eAAe,CAAC,YAAY,CAAC;AACxC,YAAA,aAAa,EAAE,CAAA,EAAG,SAAS,CAAA,CAAA,EAAI,UAAU,CAAA;SAC5C;IACL;AAAE,IAAA,MAAM;AACJ,QAAA,OAAO,IAAI;IACf;AACJ;;ACjFA;AACA;AAKA;;;AAGG;AACH,MAAM,oBAAoB,GAAG,CAAA;;;yBAGJ;AAEzB;;AAEG;AACH,eAAe,eAAe,CAAC,MAAc,EAAA;;IAEzC,MAAM,WAAW,GAAG;AACf,SAAA,OAAO,CAAC,4BAA4B,EAAE,EAAE;AACxC,SAAA,OAAO,CAAC,0BAA0B,EAAE,EAAE;AACtC,SAAA,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;;AAGvB,IAAA,MAAM,YAAY,GAAG,IAAI,CAAC,WAAW,CAAC;IACtC,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,YAAY,CAAC,MAAM,CAAC;AACjD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACzC;;IAGA,OAAO,MAAM,CAAC,MAAM,CAAC,SAAS,CAC1B,MAAM,EACN,KAAK,CAAC,MAAM,EACZ;AACI,QAAA,IAAI,EAAE,OAAO;AACb,QAAA,UAAU,EAAE;AACf,KAAA,EACD,KAAK,EACL,CAAC,QAAQ,CAAC,CACb;AACL;AAEA;;;;AAIG;AACH,SAAS,mBAAmB,CAAC,SAAqB,EAAA;;;AAG9C,IAAA,OAAO,SAAS;AACpB;AAEA;;AAEG;AACH,eAAe,eAAe,CAC1B,aAAqB,EACrB,SAAqB,EACrB,SAAiB,EAAA;AAEjB,IAAA,IAAI;AACA,QAAA,MAAM,GAAG,GAAG,MAAM,eAAe,CAAC,SAAS,CAAC;AAC5C,QAAA,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE;QACjC,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC;AAC1C,QAAA,MAAM,GAAG,GAAG,mBAAmB,CAAC,SAAS,CAAC;;QAG1C,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,MAAqB;AAE3D,QAAA,OAAO,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAC7B;AACI,YAAA,IAAI,EAAE,OAAO;AACb,YAAA,IAAI,EAAE;AACT,SAAA,EACD,GAAG,EACH,SAAS,EACT,IAAI,CACP;IACL;IAAE,OAAO,KAAK,EAAE;AACZ,QAAA,OAAO,CAAC,IAAI,CAAC,0CAA0C,EAAE,KAAK,CAAC;AAC/D,QAAA,OAAO,KAAK;IAChB;AACJ;AAEA;;AAEG;AACH,SAAS,eAAe,CAAC,OAAgC,EAAA;;AAErD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AAChD,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;AACjD,IAAA,IAAI,OAAO,OAAO,CAAC,KAAK,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;IAClD,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC;AAAE,QAAA,OAAO,IAAI;AACjD,IAAA,IAAI,OAAO,OAAO,CAAC,cAAc,KAAK,QAAQ;AAAE,QAAA,OAAO,IAAI;IAE3D,OAAO;QACH,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,GAAG,EAAE,OAAO,CAAC,GAAG;QAChB,IAAI,EAAE,OAAO,CAAC,IAA8B;AAC5C,QAAA,GAAG,EAAE,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,GAAG,OAAO,CAAC,GAAG,GAAG,SAAS;QAC9D,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,QAAQ,EAAE,OAAO,CAAC,QAAoB;AACtC,QAAA,UAAU,EAAE,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS;QACnF,cAAc,EAAE,OAAO,CAAC;KAC3B;AACL;AAEA;;AAEG;AACH,SAAS,sBAAsB,CAAC,YAAoB,EAAA;AAChD,IAAA,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;AACzC,IAAA,MAAM,gBAAgB,GAAG,YAAY,GAAG,GAAG;IAC3C,OAAO,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,KAAK,CAAC,CAAC;AAChD;AAEA;;;;;;AAMG;AACI,eAAe,kBAAkB,CACpC,KAAyB,EACzB,SAAkB,EAAA;;IAGlB,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;QAC/B,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,yBAAyB;AAChC,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC;IAC9B,IAAI,CAAC,MAAM,EAAE;QACT,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,4BAA4B;AACnC,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;IAGA,MAAM,OAAO,GAAG,eAAe,CAAC,MAAM,CAAC,OAAO,CAAC;IAC/C,IAAI,CAAC,OAAO,EAAE;QACV,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,mCAAmC;AAC1C,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,GAAG,KAAK,WAAW,EAAE;QAC7B,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,wBAAwB;AAC/B,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,GAAG,KAAK,oBAAoB,IAAI,OAAO,CAAC,GAAG,KAAK,GAAG,EAAE;QAC7D,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;AACjB,YAAA,OAAO,EAAE,IAAI;AACb,YAAA,KAAK,EAAE,oCAAoC;AAC3C,YAAA,aAAa,EAAE,KAAK;AACpB,YAAA,aAAa,EAAE;SAClB;IACL;;IAGA,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE;AAChD,QAAA,MAAM,QAAQ,GAAG,SAAS,IAAI,oBAAoB;AAClD,QAAA,MAAM,cAAc,GAAG,MAAM,eAAe,CACxC,MAAM,CAAC,aAAa,EACpB,MAAM,CAAC,SAAS,EAChB,QAAQ,CACX;QAED,IAAI,CAAC,cAAc,EAAE;YACjB,OAAO;AACH,gBAAA,MAAM,EAAE,SAAS;AACjB,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,KAAK,EAAE,uCAAuC;AAC9C,gBAAA,aAAa,EAAE,KAAK;AACpB,gBAAA,aAAa,EAAE;aAClB;QACL;IACJ;;AAGA,IAAA,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;IACzC,MAAM,aAAa,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC;AAEzD,IAAA,IAAI,OAAO,CAAC,GAAG,GAAG,GAAG,EAAE;;QAEnB,IAAI,OAAO,CAAC,UAAU,IAAI,OAAO,CAAC,UAAU,GAAG,GAAG,EAAE;YAChD,OAAO;AACH,gBAAA,MAAM,EAAE,OAAO;gBACf,OAAO;AACP,gBAAA,aAAa,EAAE,IAAI;gBACnB;aACH;QACL;QAEA,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;YACjB,OAAO;AACP,YAAA,KAAK,EAAE,qBAAqB;AAC5B,YAAA,aAAa,EAAE,KAAK;YACpB;SACH;IACL;;AAGA,IAAA,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;QACzB,OAAO;AACH,YAAA,MAAM,EAAE,MAAM;YACd,OAAO;AACP,YAAA,aAAa,EAAE,KAAK;YACpB;SACH;IACL;;IAGA,OAAO;AACH,QAAA,MAAM,EAAE,OAAO;QACf,OAAO;AACP,QAAA,aAAa,EAAE,KAAK;QACpB;KACH;AACL;AAEA;;AAEG;AACG,SAAU,gBAAgB,CAAC,KAAa,EAAA;AAC1C,IAAA,OAAO,QAAQ,CAAC,KAAK,CAAC,KAAK,IAAI;AACnC;AAEA;;;AAGG;AACG,SAAU,kBAAkB,CAAC,KAAa,EAAA;AAC5C,IAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC;AAC9B,IAAA,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,CAAC,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;AACnD,QAAA,OAAO,IAAI;IACf;IACA,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC;AAC9C;;ACnRA;AACA;AAUA;;AAEG;AACI,MAAM,iBAAiB,GAAG;;AAE7B,IAAA,MAAM,EAAE,2BAA2B;;AAEnC,IAAA,OAAO,EAAE;;AAGb;;AAEG;AACH,MAAM,uBAAuB,GACzB,8EAA8E;AAC9E,IAAA,kGAAkG;AAEtG;;AAEG;SACa,iBAAiB,GAAA;;AAE7B,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,QAAA,OAAO,OAAO,EAAE,GAAG,EAAE,QAAQ,KAAK,YAAY,GAAG,YAAY,GAAG,aAAa;IACjF;IAEA,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,QAAQ,IAAI,EAAE;;AAGhD,IAAA,MAAM,WAAW,GAAG;QAChB,WAAW;QACX,WAAW;QACX,SAAS;QACT,QAAQ;QACR,MAAM;QACN,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;AACP,QAAA,OAAO;KACV;;AAGD,IAAA,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QAC/B,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,QAAQ,EAAE,IAAI,KAAK,OAAO,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;AAClF,YAAA,OAAO,aAAa;QACxB;IACJ;;IAGA,IAAI,MAAM,CAAC,QAAQ,EAAE,QAAQ,KAAK,OAAO,EAAE;AACvC,QAAA,OAAO,aAAa;IACxB;;AAGA,IAAA,MAAM,eAAe,GAAG;QACpB,UAAU;QACV,UAAU;QACV,WAAW;QACX,aAAa;QACb,cAAc;QACd,YAAY;KACf;AAED,IAAA,KAAK,MAAM,OAAO,IAAI,eAAe,EAAE;AACnC,QAAA,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;;AAE5B,YAAA,OAAO,YAAY;QACvB;IACJ;;AAGA,IAAA,OAAO,YAAY;AACvB;AAEA;;AAEG;AACG,SAAU,kBAAkB,CAAC,WAAwB,EAAA;IACvD,OAAO,WAAW,KAAK,aAAa,GAAG,MAAM,GAAG,MAAM;AAC1D;AAEA;;;;;;AAMG;MACU,cAAc,CAAA;AASvB,IAAA,WAAA,CAAY,SAAwB,EAAE,EAAA;QAP9B,IAAA,CAAA,KAAK,GAAwB,IAAI;QAGjC,IAAA,CAAA,mBAAmB,GAAiC,IAAI;QACxD,IAAA,CAAA,gBAAgB,GAAG,KAAK;AACxB,QAAA,IAAA,CAAA,qBAAqB,GAAgB,IAAI,GAAG,EAAE;AAGlD,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QACpB,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,iBAAiB,EAAE;QAC5D,IAAI,CAAC,eAAe,GAAG,kBAAkB,CAAC,IAAI,CAAC,WAAW,CAAC;IAC/D;AAEA;;AAEG;AACH,IAAA,MAAM,MAAM,GAAA;;AAER,QAAA,IAAI,IAAI,CAAC,KAAK,EAAE;YACZ,OAAO,IAAI,CAAC,KAAK;QACrB;;AAGA,QAAA,IAAI,IAAI,CAAC,mBAAmB,EAAE;YAC1B,OAAO,IAAI,CAAC,mBAAmB;QACnC;AAEA,QAAA,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,CACzC,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,SAAS,CACxB;AAED,QAAA,IAAI;AACA,YAAA,IAAI,CAAC,KAAK,GAAG,MAAM,IAAI,CAAC,mBAAmB;;YAG3C,IAAI,CAAC,MAAM,CAAC,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC;;YAGxC,IAAI,CAAC,gBAAgB,EAAE;YAEvB,OAAO,IAAI,CAAC,KAAK;QACrB;gBAAU;AACN,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI;QACnC;IACJ;AAEA;;AAEG;IACH,QAAQ,GAAA;QACJ,OAAO,IAAI,CAAC,KAAK;IACrB;AAEA;;;AAGG;IACH,OAAO,GAAA;QACH,IAAI,CAAC,IAAI,CAAC,KAAK;AAAE,YAAA,OAAO,KAAK;AAC7B,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,OAAO,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,MAAM;IACxE;AAEA;;AAEG;IACH,cAAc,GAAA;QACV,IAAI,CAAC,IAAI,CAAC,KAAK;AAAE,YAAA,OAAO,IAAI;AAC5B,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;AAC/B,YAAA,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;AAC/B,YAAA,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,SAAS;IAC1C;AAEA;;AAEG;IACH,cAAc,GAAA;QACV,OAAO,IAAI,CAAC,WAAW;IAC3B;AAEA;;AAEG;IACH,kBAAkB,GAAA;QACd,OAAO,IAAI,CAAC,eAAe;IAC/B;AAEA;;;;;;;AAOG;AACH,IAAA,eAAe,CAAC,QAAgB,EAAA;;AAE5B,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,QAAQ;QACnB;;AAGA,QAAA,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC7C,YAAA,OAAO,QAAQ;QACnB;;AAGA,QAAA,IAAI,IAAI,CAAC,eAAe,KAAK,MAAM,EAAE;;AAEjC,YAAA,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE;gBACxB,IAAI,CAAC,qBAAqB,EAAE;AAC5B,gBAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;YAChC;AACA,YAAA,OAAO,QAAQ;QACnB;;QAGA,OAAO,QAAQ,GAAG,uBAAuB;IAC7C;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,sBAAsB;QACjC;AAEA,QAAA,QAAQ,IAAI,CAAC,KAAK,CAAC,MAAM;AACrB,YAAA,KAAK,OAAO;AACR,gBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC1B,oBAAA,OAAO,CAAA,qCAAA,EAAwC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,gBAAgB;gBAC1G;AACA,gBAAA,OAAO,kBAAkB,IAAI,CAAC,KAAK,CAAC,aAAa,kBAAkB;AAEvE,YAAA,KAAK,MAAM;AACP,gBAAA,OAAO,uBAAuB,IAAI,CAAC,KAAK,CAAC,aAAa,kBAAkB;AAE5E,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAA,gBAAA,EAAmB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,WAAW;AAEhF,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,yBAAyB;AAEpC,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,oBAAoB,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AAEjD,YAAA;AACI,gBAAA,OAAO,wBAAwB;;IAE3C;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,GAAA;AACV,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;AACjB,QAAA,IAAI,CAAC,gBAAgB,GAAG,KAAK;AAC7B,QAAA,IAAI,CAAC,qBAAqB,CAAC,KAAK,EAAE;AAClC,QAAA,OAAO,IAAI,CAAC,MAAM,EAAE;IACxB;AAEA;;;;;AAKG;AACH,IAAA,UAAU,CAAC,OAAe,EAAA;AACtB,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO;AAAE,YAAA,OAAO,KAAK;AACtC,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;IACxD;AAEA;;AAEG;IACH,WAAW,GAAA;QACP,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,QAAQ,IAAI,EAAE;IAC9C;AAEA;;;;;AAKG;IACH,kBAAkB,GAAA;QACd,OAAO,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,MAAM,EAAE,kBAAkB,CAAC;IACjF;AAEA;;;;;;AAMG;IACH,mBAAmB,GAAA;QACf,OAAO,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,OAAO,EAAE,sBAAsB,CAAC;IACtF;AAEA;;AAEG;IACK,mBAAmB,CAAC,OAAe,EAAE,WAAmB,EAAA;;AAE5D,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE;AACtB,YAAA,OAAO,IAAI;QACf;AAEA,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC;AAEhE,QAAA,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AACzD,YAAA,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC;AACvC,YAAA,IAAI,CAAC,iBAAiB,CAAC,OAAO,EAAE,WAAW,CAAC;QAChD;AAEA,QAAA,OAAO,UAAU;IACrB;AAEA;;AAEG;IACK,iBAAiB,CAAC,cAAsB,EAAE,WAAmB,EAAA;AACjE,QAAA,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE,OAAO,EAAE,IAAI,IAAI,SAAS;AAEnD,QAAA,OAAO,CAAC,IAAI,CACR,wBAAwB,WAAW,CAAA,eAAA,EAAkB,cAAc,CAAA,WAAA,CAAa;AAChF,YAAA,CAAA,sCAAA,CAAwC,CAC3C;AACD,QAAA,OAAO,CAAC,IAAI,CACR,8CAA8C,IAAI,CAAA,CAAE,CACvD;QACD,OAAO,CAAC,IAAI,CACR,CAAA,wCAAA,EAA2C,iBAAiB,CAAC,MAAM,GAAG;AAClE,cAAE,iBAAiB,CAAC,IAAI,CAAC,IAAI;AAC7B,cAAE,QAAQ,CAAA,CAAE,CACnB;AACD,QAAA,OAAO,CAAC,IAAI,CACR,CAAA,8FAAA,CAAgG,CACnG;IACL;AAEA;;AAEG;IACK,gBAAgB,GAAA;QACpB,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE;QAEjB,MAAM,MAAM,GAAG,sBAAsB;AAErC,QAAA,QAAQ,IAAI,CAAC,KAAK,CAAC,MAAM;AACrB,YAAA,KAAK,OAAO;AACR,gBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;AAC1B,oBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,sCAAA,CAAwC;AACjD,wBAAA,CAAA,4CAAA,CAA8C,CACjD;gBACL;AAAO,qBAAA,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,KAAK,IAAI,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,EAAE,EAAE;AAC5E,oBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,oBAAA,EAAuB,IAAI,CAAC,KAAK,CAAC,aAAa,CAAA,KAAA,CAAO,CAClE;gBACL;gBACA;AAEJ,YAAA,KAAK,MAAM;AACP,gBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,sBAAA,EAAyB,IAAI,CAAC,KAAK,CAAC,aAAa,CAAA,eAAA,CAAiB,CAC9E;gBACD;AAEJ,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAC,KAAK,CACT,CAAA,EAAG,MAAM,CAAA,kBAAA,CAAoB;oBAC7B,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,EAAA,CAAI;AAC7C,oBAAA,CAAA,qCAAA,CAAuC,CAC1C;gBACD;AAEJ,YAAA,KAAK,SAAS;AACV,gBAAA,OAAO,CAAC,IAAI,CACR,CAAA,EAAG,MAAM,CAAA,0BAAA,CAA4B;oBACrC,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,EAAA,CAAI;AAC7C,oBAAA,CAAA,6CAAA,CAA+C,CAClD;gBACD;AAEJ,YAAA,KAAK,SAAS;gBACV,OAAO,CAAC,KAAK,CACT,CAAA,EAAG,MAAM,CAAA,kBAAA,EAAqB,IAAI,CAAC,KAAK,CAAC,KAAK,CAAA,EAAA,CAAI;AAClD,oBAAA,CAAA,kBAAA,EAAqB,IAAI,CAAC,eAAe,CAAA,CAAE,CAC9C;gBACD;;IAEZ;AAEA;;AAEG;IACK,qBAAqB,GAAA;QACzB,OAAO,CAAC,IAAI,CACR,wDAAwD;AACxD,YAAA,CAAA,EAAG,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,SAAS,CAAA,UAAA,CAAY;YAC9C,kEAAkE;AAClE,YAAA,+CAA+C,CAClD;IACL;AACH;AAED;;AAEG;AACI,eAAe,oBAAoB,CACtC,MAAqB,EAAA;AAErB,IAAA,MAAM,OAAO,GAAG,IAAI,cAAc,CAAC,MAAM,CAAC;AAC1C,IAAA,MAAM,OAAO,CAAC,MAAM,EAAE;AACtB,IAAA,OAAO,OAAO;AAClB;;ACraA;AACA;AA+DA;;;;;AAKG;MACU,gBAAgB,CAAA;AA4BzB,IAAA,WAAA,CAAY,MAAwB,EAAA;QAvB5B,IAAA,CAAA,MAAM,GAAqB,YAAY;QACvC,IAAA,CAAA,SAAS,GAA2B,IAAI;;QAGxC,IAAA,CAAA,cAAc,GAA0B,IAAI;QAC5C,IAAA,CAAA,mBAAmB,GAAW,CAAC;AACtB,QAAA,IAAA,CAAA,uBAAuB,GAAG,MAAM,CAAC;;QAG1C,IAAA,CAAA,mBAAmB,GAA+B,IAAI;;QAMtD,IAAA,CAAA,cAAc,GAAoC,IAAI;;QAGtD,IAAA,CAAA,QAAQ,GAA2B,IAAI;;QAGvC,IAAA,CAAA,mBAAmB,GAAG,KAAK;;QAI/B,IAAI,CAAC,MAAM,GAAG;YACV,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,MAAM,EAAE,MAAM,CAAC,MAAM;AACrB,YAAA,QAAQ,EAAE;AACN,gBAAA,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,OAAO,IAAI,IAAI;gBACzC,WAAW,EAAE,MAAM,CAAC,QAAQ,EAAE,WAAW,IAAI,QAAQ;AACrD,gBAAA,cAAc,EAAE,MAAM,CAAC,QAAQ,EAAE,cAAc,IAAI,IAAI;AACvD,gBAAA,YAAY,EAAE,MAAM,CAAC,QAAQ,EAAE,YAAY,IAAI;AAClD,aAAA;AACD,YAAA,KAAK,EAAE;AACH,gBAAA,UAAU,EAAE,MAAM,CAAC,KAAK,EAAE,UAAU,IAAI,CAAC;AACzC,gBAAA,YAAY,EAAE,MAAM,CAAC,KAAK,EAAE,YAAY,IAAI,IAAI;AAChD,gBAAA,kBAAkB,EAAE,MAAM,CAAC,KAAK,EAAE,kBAAkB,IAAI;AAC3D,aAAA;AACD,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,IAAI;AAC/B,YAAA,OAAO,EAAE;AACL,gBAAA,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,OAAO,IAAI,IAAI;AACxC,gBAAA,WAAW,EAAE,MAAM,CAAC,OAAO,EAAE,WAAW,IAAI,IAAI;AAChD,gBAAA,YAAY,EAAE,MAAM,CAAC,OAAO,EAAE,YAAY,IAAI,IAAI;AAClD,gBAAA,kBAAkB,EAAE,MAAM,CAAC,OAAO,EAAE,kBAAkB,IAAI,KAAK;AAC/D,gBAAA,UAAU,EAAE,MAAM,CAAC,OAAO,EAAE,UAAU,IAAI;AAC7C,aAAA;YACD,cAAc,EAAE,MAAM,CAAC,cAAc,KAAK,MAAK,EAAE,CAAC,CAAC;YACnD,OAAO,EAAE,MAAM,CAAC,OAAO,KAAK,MAAK,EAAE,CAAC;SACvC;;AAGD,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC;AAC7B,YAAA,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,YAAY;AAC1C,YAAA,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChC,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,kBAAkB,EAAE;;AAGxC,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,cAAc,CAAC;YACrC,UAAU,EAAE,MAAM,CAAC;AACtB,SAAA,CAAC;;QAGF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,MAAK;;AAEnC,YAAA,IAAI,CAAC,cAAc,CAAC,kBAAkB,EAAE;AAC5C,QAAA,CAAC,CAAC,CAAC,KAAK,CAAC,MAAK;;AAEd,QAAA,CAAC,CAAC;;QAGF,IAAI,CAAC,iBAAiB,EAAE;;AAGxB,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,EAAE;AACrE,YAAA,IAAI,CAAC,SAAS,EAAE,CAAC,KAAK,CAAC,MAAK;;AAE5B,YAAA,CAAC,CAAC;QACN;;QAGA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE;AAC9B,YAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,KAAI;AACxD,gBAAA,OAAO,CAAC,IAAI,CAAC,oDAAoD,EAAE,KAAK,CAAC;AAC7E,YAAA,CAAC,CAAC;QACN;IACJ;;AAIA;;;;;;;;;;AAUG;AACH,IAAA,eAAe,CAAC,YAAiC,EAAA;AAC7C,QAAA,IAAI,CAAC,mBAAmB,GAAG,YAAY;AACvC,QAAA,OAAO,CAAC,GAAG,CAAC,8CAA8C,CAAC;IAC/D;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,IAAI,CAAC,mBAAmB,KAAK,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,EAAE;AAC5E,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI,CAAC,cAAc,CAAC,mBAAmB,EAAE;AAEzC,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;IACH,sBAAsB,GAAA;QAClB,OAAO,IAAI,CAAC,mBAAmB;IACnC;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO;IAC9B;;AAIA;;;AAGG;IACH,gBAAgB,GAAA;QACZ,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;IACrC;AAEA;;;;;;AAMG;AACH,IAAA,wBAAwB,CAAC,OAA+B,EAAA;QACpD,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,EAAE;AAC9B,YAAA,OAAO,KAAK;QAChB;AAEA,QAAA,IAAI,OAAO,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,WAAW;QAC1C;AAEA,QAAA,IAAI,OAAO,KAAK,UAAU,EAAE;;AAExB,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,YAAY,IAAI,IAAI,CAAC,MAAM,KAAK,QAAQ;QACvE;AAEA,QAAA,OAAO,KAAK;IAChB;AAEA;;;;;;AAMG;AACH,IAAA,cAAc,CAAC,OAAe,EAAA;QAa1B,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;AACvD,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,OAAQ;AACxC,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,KAAK;AACjD,QAAA,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,WAAW,CAAC;AAEzC,QAAA,OAAO,KAAK,CAAC,eAAe,CAAC,OAAO,CAAC;IACzC;AAEA;;;;;;AAMG;IACH,MAAM,aAAa,CAAC,MAMnB,EAAA;QAKG,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;YACvD,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE;aACZ;QACL;AAEA,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,OAAQ;AACxC,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,KAAK;AACjD,QAAA,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,WAAW,CAAC;AAEzC,QAAA,OAAO,KAAK,CAAC,aAAa,CAAC,MAAM,CAAC;IACtC;AAEA;;;;;;;AAOG;AACH,IAAA,MAAM,QAAQ,CAAC,OAAe,EAAE,OAAoB,EAAA;;QAiBhD,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;QAEnD,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,EAAE;;YAE3E,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,KAAK,KAAK,EAAE;;AAEpD,gBAAA,OAAO,EAAE,MAAM,EAAE,cAAc,CAAC,MAAM,EAAE;YAC5C;;YAGA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC;YAC9D,OAAO;gBACH,MAAM,EAAE,cAAc,CAAC,MAAM;AAC7B,gBAAA,YAAY,EAAE;aACjB;QACL;;QAGA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;QACjD,OAAO,EAAE,QAAQ,EAAE;IACvB;;AAIA;;;AAGG;IACK,MAAM,iBAAiB,CAAC,MAAqB,EAAA;AACjD,QAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,QAAQ;QAClD,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,IAAI,SAAS,CAAC;;AAG/C,QAAA,IAAI,WAAW,KAAK,QAAQ,EAAE;AAC1B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,wBAAwB,EAAE;QACxD;;QAGA,IAAI,IAAI,CAAC,cAAc,IAAI,WAAW,KAAK,QAAQ,EAAE;YACjD,MAAM,OAAO,GAAG,SAAS;AACzB,YAAA,IAAI,MAAM,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE;AACpD,gBAAA,OAAO,CAAC,GAAG,CAAC,mDAAmD,CAAC;gBAChE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC;gBAC1D,IAAI,MAAM,EAAE;oBACR,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC;AAC1C,oBAAA,OAAO,CAAC,GAAG,CAAC,kDAAkD,CAAC;oBAC/D;gBACJ;YACJ;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,aAAa,EAAE;AACtB,YAAA,IAAI;gBACA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC;AAClD,gBAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;oBACd,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;gBAClE;AAEA,gBAAA,MAAM,UAAU,GAAwB,MAAM,QAAQ,CAAC,IAAI,EAAE;;gBAG7D,IAAI,IAAI,CAAC,cAAc,IAAI,WAAW,KAAK,QAAQ,EAAE;oBACjD,MAAM,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,UAAU,CAAC;gBAC1D;;gBAGA,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC;;AAG9C,gBAAA,IAAI,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE;AACvB,oBAAA,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC;gBACtC;gBAEA,OAAO,CAAC,GAAG,CAAC,uCAAuC,EAAE,MAAM,CAAC,aAAa,CAAC;gBAC1E;YACJ;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,uDAAuD,EAAE,KAAK,CAAC;YACjF;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,UAAU,EAAE;YACnB,MAAM,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC,UAAU,CAAC;;AAGrD,YAAA,IAAI,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE;AACvB,gBAAA,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC;YACtC;AAEA,YAAA,OAAO,CAAC,GAAG,CAAC,wCAAwC,CAAC;YACrD;QACJ;;AAGA,QAAA,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;YAC3B,OAAO,CAAC,IAAI,CACR,2EAA2E;gBAC3E,2EAA2E;gBAC3E,gFAAgF;AAChF,gBAAA,uBAAuB,CAC1B;AACD,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI;QACnC;IACJ;AAEA;;AAEG;IACK,MAAM,eAAe,CAAC,MAAqB,EAAA;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,OAAO;YAAE;AAE5B,QAAA,IAAI;;YAEA,MAAM,EAAE,eAAe,EAAE,GAAG,MAAM,iEAAoC;AAEtE,YAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,eAAe,CAAC;AAChC,gBAAA,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS;AACjC,gBAAA,IAAI,EAAE,CAAC;AACP,gBAAA,OAAO,EAAE;AACZ,aAAA,CAAC;;AAGF,YAAA,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE;AACvB,gBAAA,MAAM,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC;YAC/D;AAAO,iBAAA,IAAI,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE;;AAE/B,gBAAA,MAAM,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,SAAgB,CAAC;YAChE;AAEA,YAAA,OAAO,CAAC,GAAG,CAAC,oCAAoC,CAAC;QACrD;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,8CAA8C,EAAE,KAAK,CAAC;AACnE,YAAA,IAAI,CAAC,QAAQ,GAAG,IAAI;QACxB;IACJ;AAEA;;;AAGG;AACH,IAAA,MAAM,oBAAoB,GAAA;QACtB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE;AAC/B,YAAA,OAAO,CAAC,IAAI,CAAC,0CAA0C,CAAC;YACxD;QACJ;;AAGA,QAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;AACrB,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;AACrB,YAAA,MAAM,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE;QACrC;AACA,QAAA,IAAI,IAAI,CAAC,QAAQ,EAAE;AACf,YAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;QACzB;;QAGA,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC;IACrD;AAEA;;AAEG;IACH,qBAAqB,GAAA;QACjB,OAAO,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,IAAI,KAAK;IAC5C;AAEA;;AAEG;IACH,mBAAmB,GAAA;QAMf,OAAO,IAAI,CAAC,QAAQ,EAAE,YAAY,EAAE,IAAI,IAAI;IAChD;AAEA;;AAEG;IACH,iBAAiB,GAAA;AAMb,QAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,eAAe,EAAE;IAC1C;AAEA;;AAEG;IACH,iBAAiB,GAAA;QACb,OAAO,IAAI,CAAC,cAAc;IAC9B;;AAIA;;;;;;AAMG;AACH,IAAA,MAAM,GAAG,CAAC,OAAe,EAAE,OAAoB,EAAA;;QAE3C,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YACzC,OAAO,IAAI,CAAC,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC;QAClE;;AAGA,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE;QAE/C,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,mBAAmB,EAAE;AAC1D,YAAA,OAAO,CAAC,GAAG,CAAC,yDAAyD,CAAC;;AAGtE,YAAA,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE;gBACtB,OAAO;oBACH,KAAK,EAAE,QAAQ,CAAC,kBAAkB;wBAC9B,qEAAqE;AACzE,oBAAA,UAAU,EAAE,MAAM;AAClB,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,OAAO,EAAE,IAAI;AACb,oBAAA,cAAc,EAAE;iBACnB;YACL;;AAGA,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;QAC1C;;AAGA,QAAA,IAAI;YACA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC;AAC1D,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;;AAGxB,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,QAAQ,CAAC,KAAK,CAAC;YAE1E,OAAO;AACH,gBAAA,KAAK,EAAE,cAAc;AACrB,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,QAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,gBAAA,OAAO,EAAE,KAAK;gBACd,SAAS,EAAE,QAAQ,CAAC;aACvB;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,YAAA,IAAI,CAAC,SAAS,GAAG,UAAU;AAC3B,YAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;;AAG/B,YAAA,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,EAAE;gBAClC,OAAO;AACH,oBAAA,KAAK,EAAE,2EAA2E;AAClF,oBAAA,UAAU,EAAE,MAAM;AAClB,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,OAAO,EAAE,KAAK;oBACd,UAAU,EAAE,UAAU,CAAC;iBAC1B;YACL;;AAGA,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,CAAC,OAAO,EAAE,YAAY,EAAE;AACxD,gBAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;YAC1C;;AAGA,YAAA,OAAO,IAAI,CAAC,mBAAmB,CAC3B,mEAAmE,EACnE,MAAM,EACN,UAAU,CAAC,UAAU,CACxB;QACL;IACJ;AAEA;;;;;;AAMG;AACH,IAAA,MAAM,SAAS,CACX,OAAe,EACf,SAA0B,EAC1B,OAAoB,EAAA;QAEpB,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YACzC,SAAS,CAAC,OAAO,GAAG;AAChB,gBAAA,IAAI,EAAE,iBAAiB;AACvB,gBAAA,OAAO,EAAE;AACZ,aAAA,CAAC;YACF;QACJ;;AAGA,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,EAAE;QAE/C,IAAI,QAAQ,CAAC,eAAe,IAAI,QAAQ,CAAC,mBAAmB,EAAE;AAC1D,YAAA,OAAO,CAAC,GAAG,CAAC,+DAA+D,CAAC;YAC5E,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAChD,YAAA,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC;YAChC;QACJ;AAEA,QAAA,IAAI;AACA,YAAA,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,OAAO,EAAE;gBACrC,SAAS,EAAE,OAAO,EAAE,SAAS;gBAC7B,OAAO,EAAE,OAAO,EAAE,OAAO;gBACzB,OAAO,EAAE,SAAS,CAAC,OAAO;gBAC1B,SAAS,EAAE,SAAS,CAAC,SAAS;AAC9B,gBAAA,UAAU,EAAE,CAAC,IAAI,KAAI;AACjB,oBAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;;AAGxB,oBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC;oBAEzE,SAAS,CAAC,UAAU,GAAG;AACnB,wBAAA,KAAK,EAAE,cAAc;AACrB,wBAAA,UAAU,EAAE,MAAM;AAClB,wBAAA,OAAO,EAAE,IAAI,CAAC,OAAO,IAAI,EAAE;AAC3B,wBAAA,OAAO,EAAE,KAAK;wBACd,SAAS,EAAE,IAAI,CAAC;AACnB,qBAAA,CAAC;gBACN,CAAC;AACD,gBAAA,OAAO,EAAE,CAAC,KAAK,KAAI;oBACf,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,oBAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;AAC/B,oBAAA,SAAS,CAAC,OAAO,GAAG,UAAU,CAAC;gBACnC;AACH,aAAA,CAAC;QACN;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,YAAA,IAAI,CAAC,SAAS,GAAG,UAAU;AAC3B,YAAA,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC;;AAG/B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,IAAI,CAAC,OAAO,EAAE,YAAY,EAAE;gBACxD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAChD,gBAAA,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC;YACpC;iBAAO;AACH,gBAAA,SAAS,CAAC,OAAO,GAAG,UAAU,CAAC;YACnC;QACJ;IACJ;AAEA;;AAEG;AACH,IAAA,MAAM,SAAS,GAAA;AACX,QAAA,IAAI;YACA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YAC/C,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,QAAQ,CAAC;AAE1D,YAAA,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;gBACjB,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC5B,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;gBAC/B,OAAO,CAAC,GAAG,CAAC,CAAA,2BAAA,EAA8B,IAAI,CAAC,MAAM,CAAA,UAAA,CAAY,CAAC;YACtE;AAEA,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;QAC5B;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,iCAAiC,EAAE,KAAK,CAAC;;QAE1D;IACJ;AAEA;;AAEG;IACH,SAAS,GAAA;QAOL,OAAO;YACH,UAAU,EAAE,IAAI,CAAC,MAAM;AACvB,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;YAC7B,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,cAAc,EAAE,IAAI,CAAC,cAAc;AACnC,YAAA,OAAO,EAAE,IAAI,CAAC,cAAc,CAAC,QAAQ;SACxC;IACL;AAEA;;AAEG;IACH,iBAAiB,GAAA;QACb,OAAO,IAAI,CAAC,cAAc;IAC9B;AAEA;;AAEG;AACH,IAAA,MAAM,UAAU,GAAA;AACZ,QAAA,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AACxB,QAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;IACzB;AAEA;;AAEG;AACH,IAAA,MAAM,eAAe,GAAA;AACjB,QAAA,IAAI;AACA,YAAA,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE;AAChC,YAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC;AACxB,YAAA,OAAO,IAAI;QACf;AAAE,QAAA,MAAM;AACJ,YAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;AACzB,YAAA,OAAO,KAAK;QAChB;IACJ;;AAIA;;;AAGG;AACH,IAAA,MAAM,iBAAiB,GAAA;;QAEnB,IAAI,IAAI,CAAC,cAAc;AACnB,YAAA,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,uBAAuB,EAAE;YACtE,OAAO,IAAI,CAAC,cAAc;QAC9B;AAEA,QAAA,IAAI;YACA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE;;YAG/C,IAAI,CAAC,cAAc,GAAG;AAClB,gBAAA,mBAAmB,EAAE,MAAM,CAAC,cAAc,EAAE,mBAAmB,IAAI,GAAG;AACtE,gBAAA,eAAe,EAAE,MAAM,CAAC,cAAc,EAAE,eAAe,IAAI,KAAK;AAChE,gBAAA,kBAAkB,EAAE,MAAM,CAAC,cAAc,EAAE,kBAAkB;AAC7D,gBAAA,mBAAmB,EAAE,MAAM,CAAC,cAAc,EAAE,mBAAmB,IAAI;aACtE;AACD,YAAA,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,GAAG,EAAE;YAErC,OAAO,IAAI,CAAC,cAAc;QAC9B;AAAE,QAAA,MAAM;;YAEJ,OAAO,IAAI,CAAC,cAAc,IAAI;AAC1B,gBAAA,mBAAmB,EAAE,GAAG;AACxB,gBAAA,eAAe,EAAE,KAAK;AACtB,gBAAA,mBAAmB,EAAE;aACxB;QACL;IACJ;AAEA;;AAEG;IACH,iBAAiB,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,cAAc,EAAE,eAAe,IAAI,KAAK;IACxD;AAEA;;AAEG;IACH,qBAAqB,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,cAAc,EAAE,kBAAkB;IAClD;AAEA;;AAEG;IACH,YAAY,GAAA;QACR,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QAC1C,IAAI,CAAC,WAAW,CAAC,UAAU;AAAE,YAAA,OAAO,KAAK;;QAGzC,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,mBAAmB,IAAI,GAAG;QACtE,MAAM,WAAW,GAAG,cAAc,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI;AAEnD,QAAA,MAAM,YAAY,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,OAAO,EAAE;QAE/D,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,YAAY,GAAG,WAAW;IAClD;;AAIA;;AAEG;AACK,IAAA,MAAM,YAAY,CACtB,OAAe,EACf,OAAoB,EAAA;AAMpB,QAAA,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK;QAC1E,IAAI,SAAS,GAAiB,IAAI;AAElC,QAAA,KAAK,IAAI,OAAO,GAAG,CAAC,EAAE,OAAO,IAAI,UAAU,EAAE,OAAO,EAAE,EAAE;AACpD,YAAA,IAAI;gBACA,OAAO,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE;oBACtC,SAAS,EAAE,OAAO,EAAE,SAAS;oBAC7B,OAAO,EAAE,OAAO,EAAE;AACrB,iBAAA,CAAC;YACN;YAAE,OAAO,KAAK,EAAE;gBACZ,SAAS,GAAG,KAAc;;gBAG1B,MAAM,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC;AAC7C,gBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,IAAI,UAAU,CAAC,IAAI,KAAK,YAAY,EAAE;AACtE,oBAAA,MAAM,KAAK;gBACf;;AAGA,gBAAA,IAAI,OAAO,GAAG,UAAU,EAAE;oBACtB,MAAM,KAAK,GAAG;0BACR,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO;0BAClC,YAAY;AAClB,oBAAA,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;gBAC3B;YACJ;QACJ;AAEA,QAAA,MAAM,SAAS;IACnB;AAEA;;;AAGG;IACK,MAAM,WAAW,CAAC,OAAe,EAAA;AACrC,QAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;;AAGzB,QAAA,IAAI,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE;AAC1B,YAAA,IAAI;gBACA,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;AAEpD,gBAAA,IAAI,CAAC,WAAW,CAAC,QAAQ,EAAE;;AAEvB,oBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,WAAW,CAAC,MAAM,CAAC;oBAE9E,OAAO;AACH,wBAAA,KAAK,EAAE,cAAc;wBACrB,UAAU,EAAE,WAAW,CAAC,UAAU;wBAClC,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;4BACnC,KAAK,EAAE,CAAC,CAAC,OAAO;AAChB,4BAAA,OAAO,EAAE,CAAC,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;4BAC5C,SAAS,EAAE,CAAC,CAAC;AAChB,yBAAA,CAAC,CAAC;AACH,wBAAA,OAAO,EAAE,IAAI;AACb,wBAAA,cAAc,EAAE;qBACnB;gBACL;;YAEJ;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,IAAI,CAAC,wDAAwD,EAAE,KAAK,CAAC;;YAEjF;QACJ;;QAGA,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,CAAC,eAAe,EAAE;QACtD,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,aAAa,GAAG,CAAC,EAAE;AAC3D,YAAA,IAAI;gBACA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;;gBAG/C,IAAI,UAAU,GAA8B,QAAQ;AACpD,gBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;oBACvB,UAAU,GAAG,KAAK;gBACtB;AAAO,qBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;AAC9B,oBAAA,UAAU,GAAG,QAAQ,CAAC;gBAC1B;;AAGA,gBAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;gBAEzE,OAAO;AACH,oBAAA,KAAK,EAAE,cAAc;oBACrB,UAAU;oBACV,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;wBAC9B,KAAK,EAAE,CAAC,CAAC,KAAK;wBACd,OAAO,EAAE,CAAC,CAAC,OAAO;wBAClB,SAAS,EAAE,CAAC,CAAC;AAChB,qBAAA,CAAC,CAAC;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,cAAc,EAAE,cAAc,CAAC;AAC3B,0BAAE;AACF,0BAAE;iBACT;YACL;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,+BAA+B,EAAE,KAAK,CAAC;;YAEzD;QACJ;;QAGA,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QAE1C,IAAI,WAAW,CAAC,aAAa,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;YAC3D,OAAO;AACH,gBAAA,KAAK,EAAE,4GAA4G;AACnH,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,cAAc,EAAE;aACnB;QACL;AAEA,QAAA,IAAI;;YAEA,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;;YAGxC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE,EAAE;gBAC5B,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;YACnC;;YAGA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC;;YAG/C,IAAI,UAAU,GAA8B,QAAQ;AACpD,YAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;gBACvB,UAAU,GAAG,KAAK;YACtB;AAAO,iBAAA,IAAI,MAAM,CAAC,QAAQ,GAAG,GAAG,EAAE;AAC9B,gBAAA,UAAU,GAAG,QAAQ,CAAC;YAC1B;;AAGA,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;YAEzE,OAAO;AACH,gBAAA,KAAK,EAAE,cAAc;gBACrB,UAAU;gBACV,OAAO,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK;oBAC9B,KAAK,EAAE,CAAC,CAAC,KAAK;oBACd,OAAO,EAAE,CAAC,CAAC,OAAO;oBAClB,SAAS,EAAE,CAAC,CAAC;AAChB,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,cAAc,EAAE,WAAW,CAAC;AACxB,sBAAE;AACF,sBAAE;aACT;QACL;QAAE,OAAO,KAAK,EAAE;AACZ,YAAA,OAAO,CAAC,KAAK,CAAC,+BAA+B,EAAE,KAAK,CAAC;YACrD,OAAO;AACH,gBAAA,KAAK,EAAE,iGAAiG;AACxG,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,cAAc,EAAE;aACnB;QACL;IACJ;AAEA;;AAEG;IACK,iBAAiB,GAAA;AACrB,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,MAAM,CAAC,gBAAgB,CAAC,QAAQ,EAAE,MAAK;gBACnC,IAAI,CAAC,SAAS,EAAE,CAAC,KAAK,CAAC,MAAK,EAAE,CAAC,CAAC;AACpC,YAAA,CAAC,CAAC;AAEF,YAAA,MAAM,CAAC,gBAAgB,CAAC,SAAS,EAAE,MAAK;AACpC,gBAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;AAC7B,YAAA,CAAC,CAAC;;AAGF,YAAA,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE;AACnB,gBAAA,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;YAC7B;QACJ;IACJ;AAEA;;AAEG;AACK,IAAA,SAAS,CAAC,MAAwB,EAAA;AACtC,QAAA,IAAI,IAAI,CAAC,MAAM,KAAK,MAAM,EAAE;AACxB,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC;QACtC;IACJ;AAEA;;AAEG;AACK,IAAA,cAAc,CAAC,KAAc,EAAA;AACjC,QAAA,IAAI,KAAK,YAAY,KAAK,EAAE;;AAExB,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;gBACzE,OAAO;AACH,oBAAA,IAAI,EAAE,YAAY;AAClB,oBAAA,OAAO,EAAE,4BAA4B;AACrC,oBAAA,aAAa,EAAE;iBAClB;YACL;AAEA,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE;gBACvE,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,oBAAoB,CAAC;gBACvD,OAAO;AACH,oBAAA,IAAI,EAAE,YAAY;AAClB,oBAAA,OAAO,EAAE,qBAAqB;AAC9B,oBAAA,UAAU,EAAE,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE;AAC/C,oBAAA,aAAa,EAAE;iBAClB;YACL;AAEA,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACjE,OAAO;AACH,oBAAA,IAAI,EAAE,cAAc;AACpB,oBAAA,OAAO,EAAE,uBAAuB;AAChC,oBAAA,aAAa,EAAE;iBAClB;YACL;YAEA,OAAO;AACH,gBAAA,IAAI,EAAE,eAAe;AACrB,gBAAA,OAAO,EAAE,KAAK,CAAC,OAAO,IAAI,eAAe;AACzC,gBAAA,aAAa,EAAE;aAClB;QACL;QAEA,OAAO;AACH,YAAA,IAAI,EAAE,eAAe;AACrB,YAAA,OAAO,EAAE;SACZ;IACL;AAEA;;AAEG;AACK,IAAA,mBAAmB,CACvB,OAAe,EACf,UAA0B,EAC1B,UAAmB,EAAA;QAEnB,OAAO;AACH,YAAA,KAAK,EAAE,OAAO;YACd,UAAU;AACV,YAAA,OAAO,EAAE,EAAE;AACX,YAAA,OAAO,EAAE,KAAK;YACd;SACH;IACL;AAEA;;AAEG;AACK,IAAA,KAAK,CAAC,EAAU,EAAA;AACpB,QAAA,OAAO,IAAI,OAAO,CAAC,OAAO,IAAI,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAC1D;AACH;;ACxkCD;AACA;AAIA;AACA,MAAM,eAAe,GAAG,0BAA0B;AAElD;;AAEG;AACG,SAAU,cAAc,CAAC,MAAe,EAAA;IAC1C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC;IAC/C;IAEA,MAAM,CAAC,GAAG,MAAiC;AAE3C,IAAA,IAAI,CAAC,CAAC,CAAC,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,KAAK,QAAQ,EAAE;AAC3C,QAAA,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC;IAC9D;AAEA,IAAA,IAAI,CAAC,CAAC,CAAC,MAAM,IAAI,OAAO,CAAC,CAAC,MAAM,KAAK,QAAQ,EAAE;AAC3C,QAAA,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC;IAC9D;IAEA,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AAC7B,QAAA,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC;IACnD;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,kBAAkB,GAAA;;IAEvB,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;AAChD,QAAA,OAAO,IAAI;IACf;;AAGA,IAAA,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB;AAChD,IAAA,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B;IAEvD,IAAI,MAAM,EAAE;QACR,OAAO;YACH,MAAM;YACN,MAAM,EAAE,MAAM,IAAI,eAAe;AACjC,YAAA,OAAO,EAAE;SACZ;IACL;;AAGA,IAAA,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B;AAC7D,IAAA,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,sCAAsC;IAEpE,IAAI,SAAS,EAAE;QACX,OAAO;AACH,YAAA,MAAM,EAAE,SAAS;YACjB,MAAM,EAAE,SAAS,IAAI,eAAe;AACpC,YAAA,OAAO,EAAE;SACZ;IACL;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;;AAGG;AACH,SAAS,eAAe,GAAA;;AAEpB,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;;AAE/B,QAAA,MAAM,QAAQ,GAAI,MAAc,CAAC,oBAAoB;QACrD,IAAI,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;YAC7D,OAAO;AACH,gBAAA,GAAG,QAAQ;AACX,gBAAA,MAAM,EAAE,QAAQ,CAAC,MAAM,IAAI,eAAe;AAC1C,gBAAA,OAAO,EAAE;aACZ;QACL;IACJ;;;AAIA,IAAA,IAAI;;QAEA,MAAM,UAAU,GAAI,UAAkB,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG;QACxD,IAAI,UAAU,EAAE;AACZ,YAAA,MAAM,MAAM,GAAG,UAAU,CAAC,0BAA0B;AACpD,YAAA,MAAM,MAAM,GAAG,UAAU,CAAC,iCAAiC;YAC3D,IAAI,MAAM,EAAE;gBACR,OAAO;oBACH,MAAM;oBACN,MAAM,EAAE,MAAM,IAAI,eAAe;AACjC,oBAAA,OAAO,EAAE;iBACZ;YACL;QACJ;IACJ;AAAE,IAAA,MAAM;;IAER;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,oBAAoB,GAAA;AACzB,IAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,QAAA,OAAO,IAAI;IACf;;AAGA,IAAA,MAAM,YAAY,GAAI,MAAc,CAAC,eAAe;IACpD,IAAI,YAAY,IAAI,OAAO,YAAY,KAAK,QAAQ,IAAI,YAAY,CAAC,MAAM,EAAE;QACzE,OAAO;AACH,YAAA,GAAG,YAAY;AACf,YAAA,MAAM,EAAE,YAAY,CAAC,MAAM,IAAI,eAAe;AAC9C,YAAA,OAAO,EAAE;SACZ;IACL;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;AAEG;AACH,SAAS,wBAAwB,GAAA;IAC7B,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;AAClE,QAAA,OAAO,IAAI;IACf;IAEA,MAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,4BAA4B,CAAC;IACnE,IAAI,MAAM,EAAE;QACR,MAAM,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,oBAAoB,CAAC;QACxD,IAAI,MAAM,EAAE;YACR,OAAO;gBACH,MAAM;gBACN,MAAM,EAAE,MAAM,CAAC,YAAY,CAAC,oBAAoB,CAAC,IAAI,eAAe;AACpE,gBAAA,OAAO,EAAE;aACZ;QACL;IACJ;AAEA,IAAA,OAAO,IAAI;AACf;AAEA;;;;;;;;;;AAUG;AACG,SAAU,UAAU,CAAC,OAAA,GAAyB,EAAE,EAAA;AAClD,IAAA,MAAM,EAAE,iBAAiB,GAAG,IAAI,EAAE,GAAG,OAAO;;AAG5C,IAAA,MAAM,SAAS,GAAG,kBAAkB,EAAE;IACtC,IAAI,SAAS,EAAE;AACX,QAAA,OAAO,SAAS;IACpB;;AAGA,IAAA,MAAM,UAAU,GAAG,eAAe,EAAE;IACpC,IAAI,UAAU,EAAE;AACZ,QAAA,OAAO,UAAU;IACrB;;AAGA,IAAA,MAAM,YAAY,GAAG,oBAAoB,EAAE;IAC3C,IAAI,YAAY,EAAE;QACd,cAAc,CAAC,YAAY,CAAC;AAC5B,QAAA,OAAO,YAAY;IACvB;;AAGA,IAAA,MAAM,YAAY,GAAG,wBAAwB,EAAE;IAC/C,IAAI,YAAY,EAAE;AACd,QAAA,OAAO,YAAY;IACvB;;IAGA,IAAI,iBAAiB,EAAE;QACnB,MAAM,IAAI,KAAK,CACX,wEAAwE;YACxE,mEAAmE;YACnE,uEAAuE;YACvE,yEAAyE;YACzE,4DAA4D;AAC5D,YAAA,6EAA6E,CAChF;IACL;SAAO;AACH,QAAA,OAAO,CAAC,IAAI,CAAC,2EAA2E,CAAC;AACzF,QAAA,OAAO,IAAI;IACf;AACJ;AAEA;;AAEG;AACI,MAAM,aAAa,GAAG;IACzB,kBAAkB;IAClB,eAAe;IACf,oBAAoB;IACpB;;;ACxNJ;AACA;AAgBA;;AAEG;AACH,MAAM,eAAe,GAAwE;AACzF,IAAA,QAAQ,EAAE;QACN,EAAE,OAAO,EAAE,+DAA+D,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;QAC3G,EAAE,OAAO,EAAE,qDAAqD,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;AAClG,QAAA,EAAE,OAAO,EAAE,qCAAqC,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;AACpF,KAAA;AACD,IAAA,QAAQ,EAAE;QACN,EAAE,OAAO,EAAE,2CAA2C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;QACxF,EAAE,OAAO,EAAE,6CAA6C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE;QAC1F,EAAE,OAAO,EAAE,8CAA8C,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE;AAC7F,KAAA;AACD,IAAA,YAAY,EAAE;QACV,EAAE,OAAO,EAAE,yDAAyD,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,EAAE;QACzG,EAAE,OAAO,EAAE,4CAA4C,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,IAAI,EAAE;AAChG,KAAA;AACD,IAAA,YAAY,EAAE;QACV,EAAE,OAAO,EAAE,qEAAqE,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,EAAE;QACrH,EAAE,OAAO,EAAE,qDAAqD,EAAE,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,IAAI,EAAE;AACzG,KAAA;AACD,IAAA,MAAM,EAAE;QACJ,EAAE,OAAO,EAAE,mEAAmE,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE;QAC9G,EAAE,OAAO,EAAE,8DAA8D,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE;AAC3G,KAAA;AACD,IAAA,SAAS,EAAE;QACP,EAAE,OAAO,EAAE,yDAAyD,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE;AACzG,KAAA;AACD,IAAA,MAAM,EAAE;QACJ,EAAE,OAAO,EAAE,yCAAyC,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE;AACvF;CACJ;AAED;;;;;AAKG;MACU,0BAA0B,CAAA;AAQnC,IAAA,WAAA,CAAY,MAAmB,EAAA;QAHvB,IAAA,CAAA,UAAU,GAAyB,EAAE;AACrC,QAAA,IAAA,CAAA,aAAa,GAA0B,IAAI,GAAG,EAAE,CAAC;AAGrD,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,GAAG,EAAE;AAC7B,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,GAAG,EAAE;AAC1B,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,GAAG,EAAE;;QAG3B,IAAI,CAAC,YAAY,EAAE;IACvB;AAEA;;AAEG;IACK,YAAY,GAAA;;AAEhB,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACrB,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;;AAErC,gBAAA,MAAM,IAAI,GAAG;AACT,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;oBACpB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;gBACrC;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;AACnB,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;AAC1D,gBAAA,MAAM,IAAI,GAAG;oBACT,GAAG,CAAC,WAAW,EAAE;AACjB,oBAAA,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE;AACxB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBAClB,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC;gBAChC;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AACpB,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;AAC3D,gBAAA,MAAM,IAAI,GAAG;oBACT,GAAG,CAAC,WAAW,EAAE;AACjB,oBAAA,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE;AACtB,oBAAA,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;iBACpD;AACD,gBAAA,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;oBAClB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC;gBACjC;YACJ;QACJ;IACJ;AAEA;;;;;AAKG;AACH,IAAA,QAAQ,CAAC,OAAe,EAAA;AACpB,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACtB,OAAO;AACH,gBAAA,MAAM,EAAE,IAAI;AACZ,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,aAAa,EAAE;aAClB;QACL;QAEA,MAAM,iBAAiB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE;;AAGtD,QAAA,KAAK,MAAM,CAAC,UAAU,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;YAClE,KAAK,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,QAAQ,EAAE;gBACxC,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;gBACpC,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,CAAC;oBAC1D,IAAI,MAAM,EAAE;AACR,wBAAA,OAAO,MAAM;oBACjB;gBACJ;YACJ;QACJ;;QAGA,MAAM,aAAa,GAAG,IAAI,CAAC,iBAAiB,CAAC,iBAAiB,CAAC;QAC/D,IAAI,aAAa,EAAE;AACf,YAAA,OAAO,aAAa;QACxB;;QAGA,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,iBAAiB,CAAC;QAC9D,IAAI,YAAY,EAAE;AACd,YAAA,OAAO,YAAY;QACvB;;QAGA,OAAO;AACH,YAAA,MAAM,EAAE,IAAI;AACZ,YAAA,cAAc,EAAE,IAAI;AACpB,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,eAAe,EAAE;SACpB;IACL;AAEA;;AAEG;AACK,IAAA,WAAW,CACf,UAAkB,EAClB,KAAuB,EACvB,UAAkB,EAAA;QAElB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;QAExD,QAAQ,UAAU;YACd,KAAK,UAAU,EAAE;gBACb,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;gBAE/C,IAAI,SAAS,EAAE;AACX,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,SAAS,CAAC,UAAU;oBACpD,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;AAChB,4BAAA,MAAM,EAAE,SAAS,CAAC,KAAK,CAAC,IAAI;4BAC5B,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,YAAA,EAAe,SAAS,CAAC,KAAK,CAAC,IAAI,CAAA;AACnD,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;wBACzB,eAAe,EAAE,CAAC,UAAU,EAAE,SAAS,CAAC,KAAK,CAAC,IAAI;qBACrD;gBACL;gBACA;YACJ;YAEA,KAAK,UAAU,EAAE;gBACb,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;AACxC,gBAAA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC;gBAEpD,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;4BAChB,MAAM,EAAE,KAAK,CAAC,QAAQ;4BACtB,MAAM,EAAE,EAAE,KAAK,EAAE;4BACjB,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,MAAA,EAAS,KAAK,CAAC,IAAI,CAAA,QAAA,EAAW,KAAK,CAAA,CAAA;AACnD,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,UAAU,EAAE,KAAK,CAAC,IAAI;qBAC3C;gBACL;;gBAGA,IAAI,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE;AAC3B,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,GAAG;oBACnC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,UAAU;4BAChB,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ;AACzC,4BAAA,MAAM,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE;4BACzB,UAAU;4BACV,WAAW,EAAE,CAAA,YAAA,EAAe,MAAM,CAAA,CAAA;AACrC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,UAAU,EAAE,QAAQ;qBACzC;gBACL;gBACA;YACJ;YAEA,KAAK,cAAc,EAAE;gBACjB,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBAC5C,MAAM,QAAQ,GAAG,IAAI,CAAC,kBAAkB,CAAC,UAAU,CAAC;gBAEpD,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,cAAc;AACpB,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,OAAA,EAAU,UAAU,CAAA,QAAA;AACpC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,cAAc,EAAE,UAAU;qBAC/C;gBACL;gBACA;YACJ;YAEA,KAAK,cAAc,EAAE;gBACjB,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBAC3C,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,CAAC;gBAE7C,IAAI,KAAK,EAAE;AACP,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,cAAc;4BACpB,MAAM,EAAE,KAAK,CAAC,OAAO;4BACrB,UAAU;AACV,4BAAA,WAAW,EAAE,CAAA,KAAA,EAAQ,KAAK,CAAC,EAAE,CAAA,MAAA;AAChC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE;qBAC7C;gBACL;gBACA;YACJ;YAEA,KAAK,QAAQ,EAAE;gBACX,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;gBAE9C,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,GAAG;oBACnC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,QAAQ;AACd,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,UAAA,EAAa,MAAM,CAAA;AACnC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,MAAM;qBACrC;gBACL;gBACA;YACJ;YAEA,KAAK,WAAW,EAAE;;gBAEd,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;gBACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,wBAAwB,CAAC,MAAM,CAAC;gBAEtD,IAAI,QAAQ,EAAE;AACV,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,MAAM,EAAE,QAAQ;4BAChB,UAAU;4BACV,WAAW,EAAE,CAAA,UAAA,EAAa,MAAM,CAAA;AACnC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,WAAW,EAAE,MAAM;qBACxC;gBACL;gBACA;YACJ;YAEA,KAAK,QAAQ,EAAE;gBACX,MAAM,UAAU,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE;gBAC1C,IAAI,IAAI,CAAC,MAAM,CAAC,aAAa,GAAG,UAAU,CAAC,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,UAAU,GAAG,IAAI;oBACpC,OAAO;AACH,wBAAA,MAAM,EAAE;AACJ,4BAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,4BAAA,IAAI,EAAE,QAAQ;AACd,4BAAA,MAAM,EAAE,UAAU;4BAClB,UAAU;4BACV,WAAW,EAAE,CAAA,QAAA,EAAW,UAAU,CAAA,OAAA;AACrC,yBAAA;wBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,wBAAA,aAAa,EAAE,UAAU;AACzB,wBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,UAAU;qBACzC;gBACL;gBACA;YACJ;;AAGJ,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,aAAa,CAAC,KAAuB,EAAA;;AAEzC,QAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,YAAA,IAAI,KAAK,CAAC,CAAC,CAAC,EAAE;AACV,gBAAA,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;YAC1B;QACJ;AACA,QAAA,OAAO,EAAE;IACb;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,MAAc,EAAA;AACnC,QAAA,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,cAAc,EAAE,EAAE,CAAC;;QAGzE,MAAM,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,gBAAgB,CAAC;QACrD,IAAI,KAAK,EAAE;YACP,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,GAAG,EAAE;QAC5C;;QAGA,IAAI,SAAS,GAAwB,IAAI;QACzC,IAAI,SAAS,GAAG,CAAC;QAEjB,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;YAC1C,MAAM,KAAK,GAAG,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,EAAE,GAAG,CAAC;YAC7D,IAAI,KAAK,GAAG,SAAS,IAAI,KAAK,GAAG,GAAG,EAAE;gBAClC,SAAS,GAAG,KAAK;gBACjB,SAAS,GAAG,KAAK;YACrB;QACJ;QAEA,IAAI,SAAS,EAAE;YACX,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE;QACtD;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,OAAe,EAAA;QACrC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;;QAGxD,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;AAC1C,YAAA,IAAI,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AACvB,gBAAA,MAAM,UAAU,GAAG,IAAI,CAAC;gBACxB,OAAO;AACH,oBAAA,MAAM,EAAE;AACJ,wBAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,wBAAA,IAAI,EAAE,UAAU;wBAChB,MAAM,EAAE,KAAK,CAAC,IAAI;wBAClB,UAAU;AACV,wBAAA,WAAW,EAAE,CAAA,YAAA,EAAe,KAAK,CAAC,IAAI,CAAA;AACzC,qBAAA;oBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,oBAAA,aAAa,EAAE,UAAU;AACzB,oBAAA,eAAe,EAAE,CAAC,eAAe,EAAE,GAAG;iBACzC;YACL;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,OAAe,EAAA;AACrC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa;AAAE,YAAA,OAAO,IAAI;QAE3C,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,IAAI,GAAG;AAExD,QAAA,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;YAC7D,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,EAAE;gBAC5C,MAAM,UAAU,GAAG,IAAI;gBACvB,OAAO;AACH,oBAAA,MAAM,EAAE;AACJ,wBAAA,EAAE,EAAE,CAAA,OAAA,EAAU,IAAI,CAAC,GAAG,EAAE,CAAA,CAAE;AAC1B,wBAAA,IAAI,EAAE,QAAQ;AACd,wBAAA,MAAM,EAAE,UAAU;wBAClB,UAAU;wBACV,WAAW,EAAE,CAAA,QAAA,EAAW,UAAU,CAAA;AACrC,qBAAA;oBACD,cAAc,EAAE,UAAU,GAAG,SAAS;AACtC,oBAAA,aAAa,EAAE,UAAU;AACzB,oBAAA,eAAe,EAAE,CAAC,QAAQ,EAAE,UAAU;iBACzC;YACL;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,KAAuB,EAAA;AAI3C,QAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC;;QAGzB,KAAK,MAAM,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE;AACpC,YAAA,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;;AAE3D,gBAAA,MAAM,UAAU,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,CAAA,EAAG,KAAK,CAAC,IAAI,CAAA,WAAA,CAAa,EAAE,GAAG,CAAC,CAAC;gBAC9E,OAAO;oBACH,KAAK;oBACL,KAAK,EAAE,UAAU,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI;iBACrC;YACL;QACJ;AAEA,QAAA,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE;IAC5D;AAEA;;AAEG;AACK,IAAA,kBAAkB,CAAC,IAAY,EAAA;AACnC,QAAA,MAAM,cAAc,GAAG,IAAI,CAAC,WAAW,EAAE;;AAGzC,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;;YAEjC,OAAO,CAAA,iBAAA,EAAoB,IAAI,CAAA,EAAA,CAAI;QACvC;;AAGA,QAAA,MAAM,SAAS,GAAG;AACd,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,EAAA,CAAI;AACnC,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;AACrC,YAAA,CAAA,KAAA,EAAQ,cAAc,CAAA,CAAE;AACxB,YAAA,CAAA,CAAA,EAAI,cAAc,CAAA,OAAA,CAAS;AAC3B,YAAA,CAAA,OAAA,EAAU,cAAc,CAAA;SAC3B;;AAGD,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;;YAER;QACJ;;QAGA,MAAM,OAAO,GAAG,QAAQ,CAAC,gBAAgB,CAAC,gCAAgC,CAAC;AAC3E,QAAA,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACvB,YAAA,IAAI,GAAG,CAAC,WAAW,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE;AACzD,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC;YAC3C;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,IAAY,EAAA;AAChC,QAAA,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,IAAI,IAAI;IAC1D;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,MAAc,EAAA;AACnC,QAAA,MAAM,gBAAgB,GAAG,MAAM,CAAC,WAAW,EAAE;;QAG7C,IAAI,gBAAgB,KAAK,KAAK,IAAI,gBAAgB,KAAK,WAAW,EAAE;AAChE,YAAA,OAAO,MAAM;QACjB;QACA,IAAI,gBAAgB,KAAK,QAAQ,IAAI,gBAAgB,KAAK,KAAK,EAAE;AAC7D,YAAA,OAAO,iBAAiB;QAC5B;;AAGA,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,CAAE;QACtD;;AAGA,QAAA,MAAM,SAAS,GAAG;YACd,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,CAAE;AAC3C,YAAA,CAAA,eAAA,EAAkB,gBAAgB,CAAA,EAAA,CAAI;YACtC,CAAA,CAAA,EAAI,gBAAgB,CAAC,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA,QAAA,CAAU;AACnD,YAAA,CAAA,qBAAA,EAAwB,gBAAgB,CAAA,IAAA;SAC3C;AAED,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;gBACJ;YACJ;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,wBAAwB,CAAC,WAAmB,EAAA;AAChD,QAAA,MAAM,cAAc,GAAG,WAAW,CAAC,WAAW,EAAE;;AAGhD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;QAChD;;AAGA,QAAA,MAAM,SAAS,GAAG;AACd,YAAA,CAAA,cAAA,EAAiB,cAAc,CAAA,IAAA,CAAM;AACrC,YAAA,CAAA,eAAA,EAAkB,cAAc,CAAA,IAAA,CAAM;AACtC,YAAA,CAAA,SAAA,EAAY,cAAc,CAAA,IAAA;SAC7B;AAED,QAAA,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;AAC9B,YAAA,IAAI;AACA,gBAAA,IAAI,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE;AAClC,oBAAA,OAAO,QAAQ;gBACnB;YACJ;AAAE,YAAA,MAAM;gBACJ;YACJ;QACJ;AAEA,QAAA,OAAO,IAAI;IACf;AAEA;;AAEG;AACK,IAAA,sBAAsB,CAAC,OAAgB,EAAA;;AAE3C,QAAA,IAAI,OAAO,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAA,CAAA,EAAI,OAAO,CAAC,EAAE,EAAE;QAC3B;;AAGA,QAAA,IAAI,OAAO,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;YACrC,OAAO,CAAA,cAAA,EAAiB,OAAO,CAAC,YAAY,CAAC,aAAa,CAAC,IAAI;QACnE;;QAGA,MAAM,IAAI,GAAa,EAAE;QACzB,IAAI,OAAO,GAAmB,OAAO;QAErC,OAAO,OAAO,IAAI,OAAO,KAAK,QAAQ,CAAC,IAAI,EAAE;YACzC,IAAI,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,EAAE;YAE5C,IAAI,OAAO,CAAC,SAAS,IAAI,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ,EAAE;gBAC5D,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AAC/E,gBAAA,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACpB,oBAAA,QAAQ,IAAI,CAAA,CAAA,EAAI,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACnD;YACJ;AAEA,YAAA,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC;AACtB,YAAA,OAAO,GAAG,OAAO,CAAC,aAAa;QACnC;AAEA,QAAA,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;IAC3B;AAEA;;AAEG;IACK,mBAAmB,CAAC,CAAS,EAAE,CAAS,EAAA;AAC5C,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACpC,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAEpC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,QAAA,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC,CAAC;AAEzC,QAAA,OAAO,YAAY,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI;IACzC;AAEA;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,MAAM;IACtB;;AAIA;;;;;AAKG;IACH,MAAM,eAAe,CAAC,UAA+B,EAAA;;AAEjD,QAAA,IAAI,UAAU,CAAC,UAAU,EAAE;AACvB,YAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,UAAU;AACvC,YAAA,KAAK,MAAM,QAAQ,IAAI,UAAU,CAAC,UAAU,EAAE;;AAE1C,gBAAA,KAAK,MAAM,OAAO,IAAI,QAAQ,CAAC,QAAQ,EAAE;AACrC,oBAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE;oBAC5D,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC;oBACpC,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC;gBACnD;YACJ;QACJ;;QAGA,MAAM,SAAS,GAAG,IAAI,CAAC,0BAA0B,CAAC,UAAU,CAAC,SAAS,CAAC;QACvE,KAAK,MAAM,CAAC,KAAK,EAAE,QAAQ,CAAC,IAAI,SAAS,EAAE;AACvC,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE;AACpD,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;AAC1B,YAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC1D;;AAGA,QAAA,IAAI,UAAU,CAAC,OAAO,EAAE;AACpB,YAAA,IAAI,CAAC,kBAAkB,CAAC,UAAU,CAAC,OAAO,CAAC;QAC/C;AAEA,QAAA,OAAO,CAAC,GAAG,CAAC,CAAA,0CAAA,EAA6C,IAAI,CAAC,UAAU,CAAC,MAAM,CAAA,aAAA,EAAgB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAA,OAAA,CAAS,CAAC;IACpI;AAEA;;;;AAIG;IACH,MAAM,WAAW,CAAC,MAAqB,EAAA;QACnC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE;;AAGrB,QAAA,IAAI,MAAM,CAAC,WAAW,EAAE;AACpB,YAAA,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,WAAW,CAAC;YAC3C;QACJ;;AAGA,QAAA,IAAI,MAAM,CAAC,UAAU,EAAE;AACnB,YAAA,IAAI;gBACA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC;AAC/C,gBAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;oBACd,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;gBAClE;AACA,gBAAA,MAAM,IAAI,GAAyB,MAAM,QAAQ,CAAC,IAAI,EAAE;AACxD,gBAAA,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC;YACjC;YAAE,OAAO,KAAK,EAAE;AACZ,gBAAA,OAAO,CAAC,KAAK,CAAC,sDAAsD,EAAE,KAAK,CAAC;YAChF;QACJ;IACJ;AAEA;;AAEG;AACK,IAAA,kBAAkB,CAAC,OAA6B,EAAA;AACpD,QAAA,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;;AAEzB,YAAA,MAAM,YAAY,GAAiB;gBAC/B,IAAI,EAAE,KAAK,CAAC,GAAG;gBACf,IAAI,EAAE,KAAK,CAAC,KAAK;gBACjB,WAAW,EAAE,KAAK,CAAC,WAAW;gBAC9B,OAAO,EAAE,KAAK,CAAC;aAClB;;AAGD,YAAA,MAAM,IAAI,GAAG;AACT,gBAAA,KAAK,CAAC,KAAK,CAAC,WAAW,EAAE;AACzB,gBAAA,KAAK,CAAC,GAAG,CAAC,WAAW,EAAE;AACvB,gBAAA,GAAG,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;aACrD;AAED,YAAA,IAAI,KAAK,CAAC,WAAW,EAAE;;AAEnB,gBAAA,MAAM,SAAS,GAAG,KAAK,CAAC;AACnB,qBAAA,WAAW;qBACX,KAAK,CAAC,KAAK;qBACX,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AAC9B,gBAAA,IAAI,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;YAC3B;AAEA,YAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;gBACpB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,YAAY,CAAC;YAC5C;QACJ;QAEA,OAAO,CAAC,GAAG,CAAC,CAAA,oCAAA,EAAuC,OAAO,CAAC,MAAM,CAAA,gBAAA,CAAkB,CAAC;IACxF;AAEA;;AAEG;AACK,IAAA,0BAA0B,CAC9B,IAAmF,EAAA;AAEnF,QAAA,MAAM,MAAM,GAAG,IAAI,GAAG,EAAoB;AAE1C,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;;YAEpB,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE;YACrC,MAAM,QAAQ,GAAa,EAAE;;AAG7B,YAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,QAAQ,EAAE;gBACxB,QAAQ,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;YACrE;;AAGA,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC;AAClB,iBAAA,WAAW;AACX,iBAAA,OAAO,CAAC,cAAc,EAAE,EAAE;iBAC1B,KAAK,CAAC,KAAK;AACX,iBAAA,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACrD,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC;;AAG5B,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC;AACpB,iBAAA,WAAW;AACX,iBAAA,OAAO,CAAC,cAAc,EAAE,EAAE;iBAC1B,KAAK,CAAC,KAAK;AACX,iBAAA,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AAErD,YAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB;AAC1C,YAAA,KAAK,MAAM,IAAI,IAAI,YAAY,EAAE;AAC7B,gBAAA,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACrD;;YAGA,MAAM,QAAQ,GAAG,CAAC,GAAG,QAAQ,CAAC,OAAO,EAAE;AAClC,iBAAA,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC1B,iBAAA,KAAK,CAAC,CAAC,EAAE,CAAC;iBACV,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC;AAE1B,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;AAE1B,YAAA,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QAC9C;AAEA,QAAA,OAAO,MAAM;IACjB;AAEA;;AAEG;AACK,IAAA,UAAU,CAAC,IAAY,EAAA;AAC3B,QAAA,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC;AACtB,YAAA,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK;AACnE,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ;AACtE,YAAA,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvE,YAAA,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO;AACvE,YAAA,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO;AACtE,YAAA,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAC7D,YAAA,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;AACtE,YAAA,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM;AAChE,YAAA,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE;AAClE,SAAA,CAAC;AACF,QAAA,OAAO,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC;IAC9B;AAEA;;AAEG;IACH,aAAa,GAAA;QACT,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,gBAAgB,GAAA;QACZ,OAAO,IAAI,CAAC,aAAa;IAC7B;AAEA;;AAEG;IACH,SAAS,GAAA;AACL,QAAA,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,GAAG,CAAC;IACpE;AACH;;ACx1BD;AACA;AAUA;;;;;AAKG;MACU,eAAe,CAAA;AAOxB,IAAA,WAAA,CAAY,MAAmB,EAAA;QAJvB,IAAA,CAAA,gBAAgB,GAAuB,IAAI;QAC3C,IAAA,CAAA,WAAW,GAAG,CAAC;AACf,QAAA,IAAA,CAAA,eAAe,GAAG,IAAI,CAAC,GAAG,EAAE;AAGhC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;QACpB,IAAI,CAAC,UAAU,GAAG,IAAI,0BAA0B,CAAC,MAAM,CAAC;;AAGxD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,IAAI,CAAC,YAAY,EAAE;QACvB;IACJ;AAEA;;;;;AAKG;AACH,IAAA,eAAe,CAAC,OAAe,EAAA;QAC3B,OAAO,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC;IAC5C;AAEA;;;;;AAKG;IACH,MAAM,aAAa,CAAC,MAAmB,EAAA;;AAEnC,QAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE;QACtB,IAAI,GAAG,GAAG,IAAI,CAAC,eAAe,GAAG,KAAK,EAAE;AACpC,YAAA,IAAI,CAAC,WAAW,GAAG,CAAC;AACpB,YAAA,IAAI,CAAC,eAAe,GAAG,GAAG;QAC9B;;QAGA,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,iBAAiB,IAAI,CAAC;AACrD,QAAA,IAAI,IAAI,CAAC,WAAW,IAAI,UAAU,EAAE;YAChC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,6BAAA,EAAgC,UAAU,CAAA,oBAAA;aACtD;QACL;;QAGA,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,EAAE;YAC/B,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,CAAA,aAAA,EAAgB,MAAM,CAAC,IAAI,CAAA,gBAAA;aACvC;QACL;QAEA,IAAI,CAAC,WAAW,EAAE;AAElB,QAAA,QAAQ,MAAM,CAAC,IAAI;AACf,YAAA,KAAK,UAAU;AACX,gBAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;AAEtD,YAAA,KAAK,UAAU;AACX,gBAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,KAAe,CAAC;AAEvE,YAAA,KAAK,cAAc;gBACf,OAAO,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AAE3C,YAAA,KAAK,cAAc;gBACf,OAAO,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC;AAE3C,YAAA,KAAK,QAAQ;gBACT,OAAO,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,MAAM,CAAC;AAE9C,YAAA,KAAK,WAAW;gBACZ,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,MAAM,CAAC;AAE/C,YAAA,KAAK,QAAQ;AACT,gBAAA,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;AAEjE,YAAA;gBACI,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE,CAAA,qBAAA,EAAwB,MAAM,CAAC,IAAI,CAAA;iBAC/C;;IAEb;AAEA;;AAEG;AACK,IAAA,eAAe,CAAC,MAAmB,EAAA;AACvC,QAAA,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc;AACjD,QAAA,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC;AAEjC,QAAA,MAAM,aAAa,GAA2B;AAC1C,YAAA,UAAU,EAAE,UAAU;AACtB,YAAA,UAAU,EAAE,MAAM;AAClB,YAAA,cAAc,EAAE,OAAO;AACvB,YAAA,QAAQ,EAAE,QAAQ;YAClB,WAAW,EAAE,OAAO;AACpB,YAAA,cAAc,EAAE,OAAO;AACvB,YAAA,QAAQ,EAAE;SACb;QAED,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAkC;AAC9E,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,UAAU,CAAC;IAC9C;AAEA;;AAEG;AACK,IAAA,iBAAiB,CAAC,QAAgB,EAAA;;AAEtC,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,EAAE;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,EAAE;AAChD,gBAAA,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;AAC5B,oBAAA,OAAO,KAAK;gBAChB;YACJ;QACJ;;AAGA,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE;AACzE,YAAA,OAAO,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,OAAO,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACnF;AAEA,QAAA,OAAO,IAAI;IACf;;AAIA;;AAEG;AACK,IAAA,MAAM,QAAQ,CAClB,IAAY,EACZ,MAAgC,EAAA;AAEhC,QAAA,IAAI;;YAEA,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,EAAE;gBACnC,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;YAGA,IAAI,GAAG,GAAG,IAAI;YACd,IAAI,MAAM,EAAE;AACR,gBAAA,MAAM,YAAY,GAAG,IAAI,eAAe,EAAE;AAC1C,gBAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;oBAC/C,IAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,EAAE;wBACvC,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;oBACxC;gBACJ;AACA,gBAAA,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,EAAE;gBAC3C,IAAI,WAAW,EAAE;AACb,oBAAA,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,WAAW;gBACxD;YACJ;;AAGA,YAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;gBAC/B,OAAO;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,IAAI,CAAA,gCAAA;iBACjC;YACL;;AAGA,YAAA,IAAI,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,EAAE;;AAEzB,gBAAA,IAAI,IAAI,CAAC,uBAAuB,CAAC,GAAG,CAAC,EAAE;oBACnC,OAAO;AACH,wBAAA,OAAO,EAAE,IAAI;wBACb,OAAO,EAAE,CAAA,aAAA,EAAgB,IAAI,CAAA;qBAChC;gBACL;;AAGA,gBAAA,MAAM,CAAC,QAAQ,CAAC,IAAI,GAAG,GAAG;gBAC1B,OAAO;AACH,oBAAA,OAAO,EAAE,IAAI;oBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,IAAI,CAAA,GAAA;iBACjC;YACL;;YAGA,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,EAAE,qBAAqB,CAAC;YACjD,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,OAAA,EAAU,GAAG,CAAA,WAAA;aACzB;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,QAAQ,CAAC,QAAgB,EAAE,KAAa,EAAA;;QAElD,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;QAGA,MAAM,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;;AAGzD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE,CAAA,qBAAA,EAAwB,iBAAiB,CAAA,MAAA,EAAS,KAAK,CAAA,eAAA;aACnE;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,iBAAiB,CAAC;YAEzD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,iBAAiB,CAAA;iBACnD;YACL;AAEA,YAAA,IAAI,EAAE,OAAO,YAAY,gBAAgB;AACjC,gBAAA,OAAO,YAAY,mBAAmB;AACtC,gBAAA,OAAO,YAAY,iBAAiB,CAAC,EAAE;gBAC3C,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;YAGA,OAAO,CAAC,KAAK,EAAE;;AAGf,YAAA,IAAI,OAAO,YAAY,iBAAiB,EAAE;;gBAEtC,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAC3C,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE;oBAC7C,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAC1D;gBACD,IAAI,MAAM,EAAE;AACR,oBAAA,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK;gBAChC;qBAAO;oBACH,OAAO;AACH,wBAAA,OAAO,EAAE,KAAK;wBACd,OAAO,EAAE,CAAA,QAAA,EAAW,KAAK,CAAA,WAAA;qBAC5B;gBACL;YACJ;iBAAO;AACH,gBAAA,OAAO,CAAC,KAAK,GAAG,KAAK;YACzB;;AAGA,YAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;AAC5D,YAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,QAAQ,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;;AAG7D,YAAA,MAAM,sBAAsB,GAAG,MAAM,CAAC,wBAAwB,CAC1D,MAAM,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAC7C,EAAE,GAAG;AACN,YAAA,IAAI,sBAAsB,IAAI,OAAO,YAAY,gBAAgB,EAAE;AAC/D,gBAAA,sBAAsB,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC;AAC3C,gBAAA,OAAO,CAAC,aAAa,CAAC,IAAI,KAAK,CAAC,OAAO,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;YAChE;YAEA,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,mBAAA,EAAsB,KAAK,CAAA,CAAA;aACvC;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACK,MAAM,YAAY,CAAC,QAAgB,EAAA;;QAEvC,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;QAGA,MAAM,iBAAiB,GAAG,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;;AAGzD,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,UAAA,EAAa,iBAAiB,CAAA,iCAAA;aAC1C;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,iBAAiB,CAAC;YAEzD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,iBAAiB,CAAA;iBACnD;YACL;AAEA,YAAA,IAAI,EAAE,OAAO,YAAY,WAAW,CAAC,EAAE;gBACnC,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;AACd,oBAAA,OAAO,EAAE;iBACZ;YACL;;AAGA,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;;AAG/D,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC;;YAGrB,OAAO,CAAC,KAAK,EAAE;YAEf,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACK,MAAM,YAAY,CAAC,eAAuB,EAAA;AAC9C,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC;IAC7C;AAEA;;AAEG;IACK,MAAM,eAAe,CAAC,QAAgB,EAAA;;AAE1C,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YAC/B,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,WAAA,EAAc,QAAQ,CAAA,iCAAA;aAClC;QACL;AAEA,QAAA,IAAI;;AAEA,YAAA,IAAI,QAAQ,KAAK,MAAM,EAAE;AACrB,gBAAA,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC;gBAC/C,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,iBAAiB,EAAE;YACxD;AAEA,YAAA,IAAI,QAAQ,KAAK,iBAAiB,EAAE;AAChC,gBAAA,MAAM,CAAC,QAAQ,CAAC,EAAE,GAAG,EAAE,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC;gBACxE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,oBAAoB,EAAE;YAC3D;YAEA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC;YAEhD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,QAAQ,CAAA;iBAC1C;YACL;AAEA,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;YAE9D,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;IACH,MAAM,gBAAgB,CAAC,QAAgB,EAAA;;QAEnC,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,EAAE;YACnC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,UAAA,EAAa,QAAQ,CAAA,gBAAA;aACjC;QACL;;AAGA,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;gBACb,OAAO,EAAE,CAAA,cAAA,EAAiB,QAAQ,CAAA,8BAAA;aACrC;QACL;AAEA,QAAA,IAAI;YACA,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC;YAEhD,IAAI,CAAC,OAAO,EAAE;gBACV,OAAO;AACH,oBAAA,OAAO,EAAE,KAAK;oBACd,OAAO,EAAE,CAAA,mBAAA,EAAsB,QAAQ,CAAA;iBAC1C;YACL;;YAGA,IAAI,CAAC,eAAe,EAAE;;AAGtB,YAAA,OAAO,CAAC,cAAc,CAAC,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;;AAG/D,YAAA,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC;;AAGrB,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,qBAAqB,EAAE;YAC5C,MAAM,OAAO,GAAG,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7C,YAAA,OAAO,CAAC,SAAS,GAAG,6BAA6B;AACjD,YAAA,OAAO,CAAC,KAAK,CAAC,OAAO,GAAG;;uBAEb,IAAI,CAAC,GAAG,GAAG,CAAC,CAAA;wBACX,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;yBACZ,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;0BACb,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;;;;;;aAM5B;AAED,YAAA,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC;AAClC,YAAA,IAAI,CAAC,gBAAgB,GAAG,OAAO;;YAG/B,UAAU,CAAC,MAAM,IAAI,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC;YAE9C,OAAO;AACH,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,OAAO,EAAE;aACZ;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;AAEA;;AAEG;AACK,IAAA,MAAM,mBAAmB,CAC7B,UAAkB,EAClB,MAAgC,EAAA;;QAGhC,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,EAAE;YACzC,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,eAAA,EAAkB,UAAU,CAAA,WAAA;aACxC;QACL;QAEA,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,aAAc,CAAC,UAAU,CAAC;AAEvD,QAAA,IAAI;AACA,YAAA,OAAO,MAAM,QAAQ,CAAC,MAAM,CAAC;QACjC;QAAE,OAAO,KAAK,EAAE;YACZ,OAAO;AACH,gBAAA,OAAO,EAAE,KAAK;gBACd,OAAO,EAAE,CAAA,eAAA,EAAkB,UAAU,CAAA,QAAA,CAAU;AAC/C,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK;aACtB;QACL;IACJ;;AAIA;;AAEG;AACK,IAAA,aAAa,CAAC,GAAW,EAAA;;QAE7B,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;AACtE,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI;AACA,YAAA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;YACjD,OAAO,MAAM,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,CAAC,MAAM;QACnD;AAAE,QAAA,MAAM;YACJ,OAAO,IAAI,CAAC;QAChB;IACJ;AAEA;;AAEG;AACK,IAAA,uBAAuB,CAAC,GAAW,EAAA;AACvC,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI;YACA,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,CAAC;YACrC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,UAAU,CAAC,CAAC;;AAGnD,YAAA,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;gBACnB,MAAM,CAAC,aAAa,CAAC,IAAI,eAAe,CAAC,YAAY,CAAC,CAAC;YAC3D;AAEA,YAAA,OAAO,IAAI;QACf;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,KAAK;QAChB;IACJ;AAEA;;AAEG;AACK,IAAA,qBAAqB,CAAC,GAAW,EAAA;;AAErC,QAAA,IAAI,sBAAsB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;AAClC,YAAA,OAAO,KAAK;QAChB;;AAGA,QAAA,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;AAC/B,YAAA,OAAO,IAAI;QACf;;AAGA,QAAA,IAAI;AACA,YAAA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;AACjD,YAAA,OAAO,MAAM,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,CAAC,MAAM,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC;QAC1E;AAAE,QAAA,MAAM;AACJ,YAAA,OAAO,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC;QAC9B;IACJ;AAEA;;AAEG;AACK,IAAA,gBAAgB,CAAC,QAAgB,EAAA;;AAErC,QAAA,OAAO;AACF,aAAA,OAAO,CAAC,SAAS,EAAE,EAAE;AACrB,aAAA,OAAO,CAAC,eAAe,EAAE,EAAE;AAC3B,aAAA,OAAO,CAAC,SAAS,EAAE,EAAE;AACrB,aAAA,IAAI,EAAE;IACf;AAEA;;AAEG;AACK,IAAA,qBAAqB,CAAC,UAAkB,EAAA;AAC5C,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,aAAa,KAAK,SAAS;AACrC,YAAA,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,UAAU,CAAC;IACxF;AAEA;;AAEG;IACK,eAAe,GAAA;AACnB,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;AACvB,YAAA,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE;AAC9B,YAAA,IAAI,CAAC,gBAAgB,GAAG,IAAI;QAChC;IACJ;AAEA;;AAEG;IACK,YAAY,GAAA;AAChB,QAAA,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACjC;QACJ;AAEA,QAAA,IAAI,QAAQ,CAAC,cAAc,CAAC,wBAAwB,CAAC,EAAE;YACnD;QACJ;QAEA,MAAM,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,OAAO,CAAC;AAC7C,QAAA,KAAK,CAAC,EAAE,GAAG,wBAAwB;QACnC,KAAK,CAAC,WAAW,GAAG;;;;;;;;;;;;;SAanB;AAED,QAAA,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;IACpC;AAEA;;AAEG;AACK,IAAA,KAAK,CAAC,EAAU,EAAA;AACpB,QAAA,OAAO,IAAI,OAAO,CAAC,OAAO,IAAI,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAC1D;AAEA;;AAEG;IACH,SAAS,GAAA;QACL,OAAO,IAAI,CAAC,MAAM;IACtB;AAEA;;AAEG;IACH,aAAa,GAAA;QACT,OAAO,IAAI,CAAC,UAAU;IAC1B;AAEA;;AAEG;IACH,gBAAgB,GAAA;AACZ,QAAA,IAAI,CAAC,WAAW,GAAG,CAAC;AACpB,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,GAAG,EAAE;IACrC;AACH;;ACvrBD;AACA;AAMA;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4BG;AACG,SAAU,2BAA2B,CAAC,MAAwB,EAAA;AAChE,IAAA,MAAM,YAAY,GAAwB;AACtC,QAAA,KAAK,EAAE,eAAe;AACtB,QAAA,gBAAgB,EAAE;KACrB;AACD,IAAA,MAAM,CAAC,eAAe,CAAC,YAAY,CAAC;AACxC;;ACmkBA;AACA;AACA;AACA;AACA,MAAM,WAAW,GAAG;AACpB,IAAI,WAAW,EAAE,EAAE;AACnB,IAAI,UAAU,EAAE,MAAM;AACtB,IAAI,WAAW,EAAE,IAAI;AACrB,IAAI,UAAU,EAAE,QAAQ;AACxB,IAAI,IAAI,EAAE,IAAI;AACd,IAAI,OAAO,EAAE,CAAC;AACd,IAAI,GAAG,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE,mBAAmB,EAAE,KAAK,EAAE,MAAM,EAAE;AACxF,CAAC;AAC4B,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,WAAW,CAAC,EAAE,EAAE,YAAY,EAAE,KAAK,EAAE;AACxE,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,WAAW,CAAC,EAAE,EAAE,YAAY,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,OAAO,EAAE,4BAA4B,EAAE,kBAAkB,EAAE,KAAK,EAAE;;AA2nQ5K,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,GAAG,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC,MAAM,EAAE;AACxD,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACjC,IAAI,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACzC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC;AAC/B,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC9B,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,EAAE,GAAG,CAAC,GAAG,IAAI,CAAC,EAAE;AAClC,IAAI,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AAChC;AACA,SAAS,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE;AACxB,IAAI,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG;AAC9B,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AAChC,QAAQ,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;AACtB,QAAQ,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC;AACzB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,GAAG,IAAI,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC;AAC5B,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC;AAChC,IAAI;AACJ;AACA,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,MAAM,GAAG,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AACjD,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACnB,IAAI,OAAO,CAAC;AACZ;AACA;AACA,SAAS,OAAO,CAAC,GAAG,EAAE;AACtB,IAAI,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC;AACpB,IAAI,OAAO,CAAC,KAAK,CAAC;AAClB,QAAQ,CAAC,GAAG,GAAG,EAAE;AACjB,IAAI,OAAO,CAAC,KAAK,CAAC;AAClB,QAAQ,CAAC,GAAG,GAAG,EAAE;AACjB,IAAI,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC;AAClE;;AAEA;AACA,MAAM,WAAW,CAAC;AAClB,IAAI,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,EAAE;AACrC,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,MAAM,GAAG,MAAM;AAC5B,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AAC3C,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC;AAC3C;AACA,QAAQ,MAAM,GAAG,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC;AAC/C,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG;AACtC,IAAI;AACJ;AACA,IAAI,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE;AACnB,QAAQ,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,GAAG,IAAI;AACzC;AACA,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC3C,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACpB,QAAQ;AACR;AACA,QAAQ,IAAI,KAAK,GAAG,CAAC;AACrB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,KAAK,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;AACtC,QAAQ,MAAM,KAAK,GAAG,CAAC,GAAG,KAAK;AAC/B;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK;AACnC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC5C,QAAQ;AACR;AACA;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC3C,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACrB,QAAQ;AACR;AACA,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC5C,QAAQ;AACR,IAAI;AACJ;AACA,IAAI,OAAO,CAAC,GAAG,EAAE;AACjB,QAAQ,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,IAAI;AACnC,QAAQ,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACvC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC7C,YAAY,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,GAAG;AAClB,IAAI;AACJ;;AAuLA;AACA;AACA;AACA,MAAM,GAAG,GAAG,KAAK,CAAC;AA8ElB;AACA,SAAS,IAAI,CAAC,CAAC,EAAE;AACjB;AACA,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACpC;AACA,SAAS,WAAW,CAAC,CAAC,EAAE,GAAG,GAAG,GAAG,EAAE;AACnC,IAAI,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC;AACvB,IAAI,IAAI,EAAE,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC7C,QAAQ,OAAO,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;AAC1C,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC;AACxC,IAAI,MAAM,GAAG,GAAG,CAAC,GAAG,GAAG;AACvB,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AACzB,IAAI,OAAO,CAAC;AACZ;AAwCA;AACA,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE;AACxB,IAAI,IAAI,EAAE,EAAE,EAAE;AACd,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC;AAC1C,IAAI,IAAI,CAAC,KAAK,CAAC;AACf,QAAQ,OAAO,CAAC;AAChB,IAAI,IAAI,IAAI,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAChC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AAChC,QAAQ,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,CAAC,EAAE,EAAE,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,CAAC;AAC1H,QAAQ,IAAI,IAAI,EAAE,GAAG,EAAE;AACvB,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,CAAC,CAAC;AACnD,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,KAAK;AAC1B,IAAI,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;AACrC;;AA4GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE;AACtD,IAAI,SAAS,KAAK,CAAC,KAAK,EAAE,EAAE,OAAO,KAAK,YAAY,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC,UAAU,OAAO,EAAE,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChH,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;AAC/D,QAAQ,SAAS,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnG,QAAQ,SAAS,QAAQ,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtG,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;AACtH,QAAQ,IAAI,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,KAAK,CAAC,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;AAC9E,IAAI,CAAC,CAAC,CAAC;AACP,CAAC;AACD;AACA,OAAO,eAAe,KAAK,UAAU,GAAG,eAAe,GAAG,UAAU,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE;AAChG,IAAI,IAAI,CAAC,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;AAC/B,IAAI,OAAO,CAAC,CAAC,IAAI,GAAG,iBAAiB,EAAE,CAAC,CAAC,KAAK,GAAG,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG,UAAU,EAAE,CAAC,CAAC;AACrF,CAAC;;AAED;AACA;AACA;AACA,SAAS,gBAAgB,CAAC,IAAI,EAAE;AAChC,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG;AAC5B,SAAS,KAAK,CAAC,eAAe;AAC9B,SAAS,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE;AAC5B,SAAS,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACpD;AACA,SAAS,KAAK,CAAC,IAAI,EAAE;AACrB,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,iBAAiB,EAAE,GAAG;AACvC,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,sBAAsB,EAAE,EAAE,CAAC;AAC5C,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG;AAC5B,SAAS,IAAI,EAAE;AACf;AACA,SAAS,cAAc,CAAC,CAAC,EAAE;AAC3B;AACA,IAAI,QAAQ,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,QAAQ,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,QAAQ,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,QAAQ,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,QAAQ,iBAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;AACjC,QAAQ,CAAC,CAAC,MAAM,GAAG,EAAE;AACrB;AACA;AACA,SAAS,OAAO,CAAC,OAAO,EAAE;AAC1B,IAAI,OAAO;AACX,SAAS,OAAO,CAAC,gBAAgB,EAAE,GAAG;AACtC,SAAS,OAAO,CAAC,WAAW,EAAE,IAAI;AAClC,SAAS,OAAO,CAAC,qBAAqB,EAAE,OAAO;AAC/C,SAAS,OAAO,CAAC,SAAS,EAAE,GAAG;AAC/B,SAAS,IAAI,EAAE;AACf;AACA;AACA,SAAS,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE;AACjD,IAAI,OAAO,SAAS,CAAC,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,QAAQ,EAAE,KAAK,EAAE,IAAI,GAAG,EAAE,EAAE;AACrF;AACA,QAAQ,IAAI,EAAE,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;AACzE,YAAY,OAAO,mBAAmB;AACtC,QAAQ,MAAM,EAAE,GAAG,GAAG,EAAE,EAAE,QAAQ,GAAG,EAAE,EAAE,KAAK,GAAG,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,YAAY,GAAG,CAAC,EAAE,WAAW,GAAG,SAAS,GAAG,GAAG,IAAI;AACrH;AACA,QAAQ,MAAM,OAAO,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACpE,QAAQ,IAAI,SAAS,GAAG,gBAAgB,CAAC,OAAO;AAChD,aAAa,MAAM,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC;AAC3C,aAAa,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;AAC1B,QAAQ,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC;AAClC,YAAY,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;AACxD;AACA,QAAQ,MAAM,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,KAAK,CAAC;AAClD,QAAQ,MAAM,KAAK,GAAG,IAAI,WAAW,CAAC,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC;AAC5D,QAAQ,MAAM,MAAM,GAAG,CAAC,CAAC,KAAK;AAC9B,YAAY,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AAC7C,YAAY,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,GAAG,CAAC;AAC/C,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AACxC,gBAAgB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,GAAG;AAC9C,YAAY,OAAO,MAAM,CAAC,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC;AAChD,QAAQ,CAAC;AACT,QAAQ,MAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC;AACrC,QAAQ,MAAM,OAAO,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AACvF;AACA,QAAQ,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC5C,YAAY,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC/B,YAAY,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC;AACrC;AACA,YAAY,MAAM,KAAK,GAAG,CAAC,CAAC,WAAW,EAAE;AACzC,YAAY,KAAK,MAAM,CAAC,IAAI,OAAO;AACnC,gBAAgB,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;AACrC,oBAAoB,CAAC,IAAI,IAAI;AAC7B,YAAY,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAC9B,QAAQ,CAAC,CAAC;AACV,QAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxC,QAAQ,IAAI,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC;AAC/C;AACA,QAAQ,IAAI,OAAO,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACnD,QAAQ,IAAI,OAAO,GAAG,IAAI,YAAY,CAAC,CAAC,GAAG,QAAQ,CAAC;AACpD,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;AACzC,YAAY,MAAM,IAAI,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC;AACzE,YAAY,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC;AACrD,YAAY,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AAChE,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpD,gBAAgB,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,YAAY;AACZ;AACA,YAAY,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpD,gBAAgB,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;AACnD,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACjD,oBAAoB,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1C,YAAY;AACZ,YAAY,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC;AAC3C,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE;AACnD,gBAAgB,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI;AAClC,YAAY,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC9C,gBAAgB,CAAC;AACjB,gBAAgB,CAAC,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC;AAC/C,aAAa,CAAC,CAAC;AACf,YAAY,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9C,YAAY,OAAO,GAAG;AACtB,iBAAiB,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;AACnE,iBAAiB,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC/B,iBAAiB,IAAI,CAAC,GAAG,CAAC;AAC1B,QAAQ;AACR;AACA,QAAQ,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AAClC,QAAQ,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;AAC5E,QAAQ,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,IAAI,QAAQ;AAClD,QAAQ,MAAM,MAAM,GAAG,WAAW,KAAK;AACvC,cAAc;AACd,cAAc,WAAW,KAAK;AAC9B,kBAAkB;AAClB,kBAAkB,EAAE;AACpB,QAAQ,OAAO,CAAC,EAAE,MAAM,CAAC,EAAE,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC,gBAAgB,CAAC;AACnE,IAAI,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACA;AACA,SAAS,OAAO,CAAC,MAAM,EAAE,GAAG,EAAE,IAAI,EAAE,QAAQ,GAAG,CAAC,EAAE;AAClD,IAAI,MAAM,MAAM,GAAG,IAAI,GAAG,EAAE;AAC5B;AACA,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC9B,QAAQ,IAAI,EAAE,KAAK,SAAS;AAC5B,YAAY;AACZ,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,QAAQ,GAAG,CAAC;AACxC,QAAQ,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjD,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;AACjD,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,EAAE;AACvB,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,EAAE;AACjC,QAAQ,MAAM,EAAE,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG,KAAK,CAAC;AAC1C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACpC,IAAI;AACJ,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE;AAC5B,IAAI,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAC/B,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE;AAC7B,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,QAAQ,MAAM,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3B,QAAQ,IAAI,EAAE;AACd,YAAY,GAAG,IAAI,EAAE,GAAG,EAAE;AAC1B,IAAI;AACJ,IAAI,KAAK,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC;AAC1B,QAAQ,EAAE,IAAI,EAAE,GAAG,EAAE;AACrB,IAAI,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE;AAClB,QAAQ,OAAO,CAAC;AAChB,IAAI,OAAO,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAChD;AACA;AACA;AACA;AACA,SAAS,aAAa,CAAC,CAAC,EAAE,GAAG,EAAE;AAC/B,IAAI,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AACnC,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC;AAC5B,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AAClB,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;AACzB,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AACrC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,IAAI,OAAO,CAAC;AACZ;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AACtC,IAAI,IAAI,CAAC,KAAK,QAAQ,EAAE;AACxB,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACjF,QAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC;AACjD,IAAI;AACJ,SAAS,IAAI,CAAC,KAAK,OAAO,EAAE;AAC5B,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;AACnC,QAAQ,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI;AACJ,SAAS;AACT,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC3C,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,GAAG,KAAK,CAAC,CAAC;AAC/D,IAAI;AACJ;AACA;AACA;AACA;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE;AACnC,IAAI,IAAI,CAAC,KAAK,QAAQ,EAAE;AACxB,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACjF,QAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,GAAG,IAAI,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC;AACjD,IAAI;AACJ,SAAS,IAAI,CAAC,KAAK,OAAO,EAAE;AAC5B,QAAQ,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;AACnC,QAAQ,OAAO,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACrC,IAAI;AACJ,SAAS;AACT,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC3C,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,QAAQ;AACR,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,GAAG,KAAK,GAAG,KAAK,CAAC,CAAC;AAC/D,IAAI;AACJ;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,CAAC,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE;AAClE,IAAI,MAAM,CAAC,GAAG,aAAa,CAAC,CAAC,EAAE,SAAS,CAAC;AACzC,IAAI,MAAM,KAAK,GAAG,IAAI,YAAY,CAAC,WAAW,CAAC,MAAM,CAAC;AACtD,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACjD,QAAQ,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;AAChC,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC;AACpD,IAAI;AACJ,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;AAClC,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;AAC7C,YAAY,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,IAAI,OAAO,KAAK;AAChB;;AAEA;AACA;AACA;AACA,MAAM,UAAU,GAAG,IAAI,GAAG,EAAE;AAC5B,SAAS,aAAa,CAAC,GAAG,EAAE;AAC5B,IAAI,MAAM,CAAC,GAAG,GAAG;AACjB,IAAI,MAAM,MAAM,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;AACpC,IAAI,IAAI,MAAM;AACd,QAAQ,OAAO,MAAM;AACrB,IAAI,IAAI,CAAC,GAAG,GAAG,CAAC,WAAW,EAAE;AAC7B,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,0BAA0B,EAAE,EAAE,CAAC;AACjD,IAAI,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,EAAE;AACvB,QAAQ,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5B,QAAQ,OAAO,CAAC;AAChB,IAAI;AACJ;AACA,IAAI,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3C,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG;AAChC,IAAI;AACJ,SAAS,IAAI,2BAA2B,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAClE,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,2BAA2B,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;AACvG,IAAI;AACJ,SAAS,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AACrE,QAAQ,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC;AAC1B,IAAI;AACJ;AACA,IAAI,MAAM,KAAK,GAAG;AAClB,QAAQ,CAAC,mBAAmB,EAAE,KAAK,CAAC;AACpC,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;AAC5B,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;AACrB,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;AACrB,QAAQ,CAAC,UAAU,EAAE,KAAK,CAAC;AAC3B,QAAQ,CAAC,aAAa,EAAE,KAAK,CAAC;AAC9B,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC;AACvB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;AACzB,QAAQ,CAAC,cAAc,EAAE,EAAE,CAAC;AAC5B,QAAQ,CAAC,UAAU,EAAE,EAAE,CAAC;AACxB,KAAK;AACL,IAAI,KAAK,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,IAAI,KAAK,EAAE;AACnC,QAAQ,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,EAAE;AACtD,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,GAAG,CAAC;AAClC,YAAY;AACZ,QAAQ;AACR,IAAI;AACJ,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACxB,IAAI,OAAO,CAAC;AACZ;AACA,SAAS,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE;AAClC,IAAI,MAAM,IAAI,GAAG,IAAI,CAAC,WAAW;AACjC,SAAS,OAAO,CAAC,UAAU,EAAE,GAAG;AAChC,SAAS,OAAO,CAAC,aAAa,EAAE,GAAG;AACnC,SAAS,KAAK,CAAC,KAAK;AACpB,SAAS,MAAM,CAAC,OAAO,CAAC;AACxB,IAAI,IAAI,CAAC,MAAM;AACf,QAAQ,OAAO,IAAI;AACnB,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC1B,QAAQ,MAAM,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC;AAClC,QAAQ,IAAI,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC;AAC7B,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACvB,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA,SAAS,WAAW,CAAC,CAAC,EAAE;AACxB,IAAI,MAAM,IAAI,GAAG,EAAE;AACnB,IAAI,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AACzB,QAAQ,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC;AACxC,IAAI,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC;AAClC,QAAQ,IAAI,CAAC,IAAI,CAAC,qCAAqC,CAAC;AACxD,IAAI,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,QAAQ,IAAI,CAAC,IAAI,CAAC,oCAAoC,CAAC;AACvD,IAAI,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5B,QAAQ,IAAI,CAAC,IAAI,CAAC,uCAAuC,CAAC;AAC1D,IAAI,OAAO,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC;AACnC;;AA+GA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS,YAAY,CAAC,MAAM,EAAE,KAAK,EAAE;AACrC,IAAI,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/F,IAAI,MAAM,WAAW,GAAG,oEAAoE,CAAC,IAAI,CAAC,KAAK,CAAC;AACxG,IAAI,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI;AAC3B,QAAQ,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE;AAC9C,QAAQ,MAAM,EAAE,GAAG,IAAI,CAAC,WAAW,EAAE;AACrC,QAAQ,IAAI,GAAG,GAAG,CAAC;AACnB,QAAQ,KAAK,MAAM,CAAC,IAAI,GAAG;AAC3B,YAAY,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC9B,gBAAgB,GAAG,EAAE;AACrB,QAAQ,IAAI,WAAW,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC;AAC3C,YAAY,GAAG,IAAI,CAAC,CAAC;AACrB,QAAQ,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;AACxC,IAAI,CAAC,CAAC;AACN;AACA;AACA;AACA;AACA,SAAS,WAAW,CAAC,GAAG,EAAE,CAAC,EAAE;AAC7B,IAAI,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC;AAC5C,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,IAAI,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;AAC1B;AACA;AACA;AACA;AACA,SAAS,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAC1B,IAAI,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACtC;AACA;AACA;AACA;AACA,SAAS,cAAc,CAAC,IAAI,EAAE;AAC9B;AACA,IAAI,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,EAAE,SAAS,GAAG,GAAG,IAAI;AACnN;AACA,IAAI,MAAM,IAAI,GAAG,YAAY,GAAG,WAAW,CAAC,KAAK,CAAC,GAAG,KAAK;AAC1D,IAAI,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;AAC1C,IAAI,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,QAAQ,CAAC;AACvD,IAAI,MAAM,MAAM,GAAG,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,CAAC;AAC9E;AACA,IAAI,MAAM,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACjE;AACA,IAAI,MAAM,WAAW,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;AACjF;AACA,IAAI,MAAM,KAAK,GAAG,YAAY,CAAC,MAAM,EAAE,KAAK,CAAC;AAC7C;AACA,IAAI,MAAM,YAAY,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;AAC7C,IAAI,MAAM,MAAM,GAAG,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,GAAG,KAAK,GAAG,IAAI;AACpE,IAAI,MAAM,MAAM,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AAC7C,QAAQ,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;AAChC,QAAQ,MAAM,CAAC,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC;AACjC;AACA,QAAQ,MAAM,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,MAAM,CAAC,GAAG,GAAG,IAAI,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,GAAG,CAAC,CAAC;AACvE;AACA,QAAQ,OAAO,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3B,IAAI,CAAC,CAAC;AACN;AACA,IAAI,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,KAAK,IAAI,IAAI,SAAS,KAAK,MAAM,GAAG,SAAS,GAAG,CAAC,CAAC;AACvF,IAAI,MAAM,IAAI,GAAG,WAAW,CAAC,MAAM,EAAE,GAAG,CAAC;AACzC,IAAI,MAAM,SAAS,GAAG,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;AAChF;AACA,IAAI,MAAM,KAAK,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI;AACrC,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,OAAO,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,IAAI,mBAAmB;AACxG,QAAQ,OAAO;AACf,YAAY,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5B,YAAY,OAAO,EAAE,CAAC,CAAC,OAAO;AAC9B,YAAY,OAAO,EAAE,IAAI;AACzB,YAAY,KAAK,EAAE,CAAC;AACpB,SAAS;AACT,IAAI,CAAC,CAAC;AACN,IAAI,OAAO;AACX,QAAQ,KAAK;AACb,QAAQ,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;AAC7C,QAAQ,OAAO,EAAE,SAAS;AAC1B,QAAQ,WAAW,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,WAAW,EAAE,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC;AACvD,KAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC;AACvB,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM;AACjH,IAAI,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM;AAC/G,IAAI,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI;AAC/G,IAAI,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE;AAChG,CAAC,CAAC;AACF,SAAS,QAAQ,CAAC,CAAC,EAAE;AACrB,IAAI,OAAO;AACX,SAAS,WAAW;AACpB,SAAS,OAAO,CAAC,qCAAqC,EAAE,GAAG;AAC3D,SAAS,KAAK,CAAC,KAAK;AACpB,SAAS,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACzC;AACA,SAAS,MAAM,CAAC,GAAG,EAAE,EAAE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxD,SAAS,gBAAgB,CAAC,IAAI,EAAE;AAChC,IAAI,MAAM,KAAK,GAAG,IAAI,GAAG,EAAE;AAC3B,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,MAAM,OAAO,GAAG,EAAE;AACtB,IAAI,IAAI,MAAM,GAAG,CAAC;AAClB,IAAI,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC1B,QAAQ,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC;AAChC,QAAQ,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;AACjC,QAAQ,MAAM,EAAE,GAAG,IAAI,GAAG,EAAE;AAC5B,QAAQ,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC9B,YAAY,IAAI,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AACjC,YAAY,IAAI,EAAE,KAAK,SAAS,EAAE;AAClC,gBAAgB,EAAE,GAAG,MAAM,EAAE;AAC7B,gBAAgB,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;AAChC,YAAY;AACZ,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC7C,QAAQ;AACR,QAAQ,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;AACpB,IAAI;AACJ,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM;AACzB,IAAI,MAAM,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACpC,IAAI,KAAK,MAAM,EAAE,IAAI,GAAG;AACxB,QAAQ,KAAK,MAAM,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE;AAClC,YAAY,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC;AACvB,IAAI,MAAM,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAClE,IAAI,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACtE,IAAI,OAAO,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,OAAO,EAAE;AAClE;AACA,SAAS,WAAW,CAAC,EAAE,EAAE,GAAG,EAAE;AAC9B,IAAI,MAAM,GAAG,GAAG,IAAI,GAAG,EAAE;AACzB,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,EAAE;AAC7B,QAAQ,MAAM,GAAG,GAAG,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACvC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AACvB,QAAQ,KAAK,IAAI,GAAG,GAAG,GAAG;AAC1B,IAAI;AACJ,IAAI,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,KAAK;AAC1C,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG;AAC5B,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC;AAC5B,IAAI,OAAO,GAAG;AACd;AACA,SAAS,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE;AACxB,IAAI,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5D,IAAI,IAAI,GAAG,GAAG,CAAC;AACf,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK,EAAE;AAChC,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AAC9B,QAAQ,IAAI,CAAC,KAAK,SAAS;AAC3B,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC;AACxB,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA,SAAS,SAAS,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE,GAAG,GAAG,EAAE,CAAC,GAAG,IAAI,EAAE;AAC9D,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE;AAC3B,QAAQ,MAAM,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC,QAAQ,IAAI,CAAC,IAAI,CAAC;AAClB,YAAY;AACZ,QAAQ,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,IAAI,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC;AAC9G,QAAQ,MAAM,KAAK,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,IAAI,KAAK,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;AACzE,QAAQ,KAAK,IAAI,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,KAAK,KAAK,IAAI,KAAK,CAAC,CAAC;AAC1D,IAAI;AACJ,IAAI,OAAO,KAAK;AAChB;AACA;AACA,SAAS,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,GAAG,IAAI,EAAE;AAC9C;AACA,IAAI,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC;AACrC,IAAI,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG,EAAE;AAC9B,QAAQ,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,MAAM,CAAC;AACtC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;AACtC,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE;AACxB,YAAY,CAAC,IAAI,CAAC,KAAK,EAAE;AACzB,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;AACvB,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,UAAU,IAAI,CAAC,GAAG,CAAC,CAAC;AACvD,YAAY,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AAC3B,QAAQ;AACR,IAAI;AACJ,IAAI,IAAI,EAAE,GAAG,CAAC;AACd,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AAChC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;AAC7B,IAAI,MAAM,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,KAAK;AACpC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE;AAChC,QAAQ,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AACnB,IAAI,OAAO,GAAG;AACd;AACA;AACA,SAAS,mBAAmB,CAAC,CAAC,EAAE;AAChC,IAAI,OAAO,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC;AACzF;AACA,SAAS,iBAAiB,CAAC,CAAC,EAAE;AAC9B,IAAI,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9C;AACA,SAAS,iBAAiB,CAAC,IAAI,EAAE,CAAC,EAAE;AACpC,IAAI,MAAM,EAAE,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;AACrC,IAAI,MAAM,EAAE,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AACnC,IAAI,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,CAAC;AACxC,QAAQ,OAAO,CAAC;AAChB,IAAI,IAAI,GAAG,GAAG,CAAC;AACf,IAAI,KAAK,MAAM,CAAC,IAAI,EAAE;AACtB,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AACrB,YAAY,GAAG,EAAE;AACjB,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM;AAC1B;AACA,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;AACzB,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAClC,IAAI,MAAM,CAAC,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAClC,IAAI,IAAI,KAAK,GAAG,CAAC;AACjB,IAAI,KAAK,MAAM,CAAC,IAAI,CAAC;AACrB,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACpB,YAAY,KAAK,EAAE;AACnB,IAAI,MAAM,GAAG,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,KAAK;AACvC,IAAI,OAAO,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,KAAK,GAAG,GAAG;AACtC;AACA,SAAS,cAAc,CAAC,CAAC,EAAE;AAC3B,IAAI,OAAO,qCAAqC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC;AAChE;AACA,SAAS,eAAe,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE;AAC9F,IAAI,IAAI,EAAE;AACV,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,KAAK,GAAG,EAAE;AACpB;AACA,IAAI,MAAM,GAAG,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;AACxC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACf,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B,IAAI,MAAM,IAAI,GAAG,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,CAAC;AACvD,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;AAChB,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;AACtB;AACA,IAAI,MAAM,MAAM,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,CAAC,CAAC;AAC5D,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AAClB,IAAI,KAAK,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACpC,IAAI,MAAM,GAAG,GAAG,SAAS,CAAC,CAAC,EAAE,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACjD,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACf,IAAI,KAAK,CAAC,IAAI,CAAC,gBAAgB,CAAC;AAChC;AACA,IAAI,MAAM,KAAK,GAAG,mBAAmB,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACxE,IAAI,MAAM,OAAO,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACxE,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;AACzB,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AAC3B,IAAI,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC;AAC/B;AACA,IAAI,CAAC,CAAC,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AACvD,IAAI,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC;AACnC;AACA,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,GAAG,KAAK,CAAC,UAAU,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,EAAE;AACxE,IAAI,KAAK,CAAC,IAAI,CAAC,kBAAkB,CAAC;AAClC;AACA,IAAI,MAAM,QAAQ,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AACjD,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC;AACvC,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC;AAC9B;AACA,IAAI,IAAI,KAAK,IAAI,KAAK,EAAE;AACxB,QAAQ,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC;AAC3B,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC/C,YAAY,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;AAC/C,QAAQ;AACR,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACnB,QAAQ,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;AAC9B,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;AACjC,QAAQ,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC;AACjC,IAAI;AACJ,IAAI,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE;AAC/B;AACA;AACA,MAAM,KAAK,CAAC;AACZ,IAAI,WAAW,GAAG;AAClB,QAAQ,IAAI,CAAC,CAAC,GAAG,IAAI;AACrB,QAAQ,IAAI,CAAC,EAAE,GAAG,IAAI;AACtB,QAAQ,IAAI,CAAC,KAAK,GAAG,IAAI;AACzB,IAAI;AACJ,IAAI,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,EAAE;AAC7B,QAAQ,IAAI,EAAE;AACd,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAC1B,QAAQ,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,MAAM,KAAK,CAAC;AACnF,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;AAChC,YAAY,IAAI,CAAC,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACxC,YAAY;AACZ,QAAQ;AACR;AACA,QAAQ,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACvC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,YAAY,CAAC,IAAI,CAAC;AAClB,YAAY,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AACrB,YAAY,IAAI,CAAC,GAAG,CAAC;AACrB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACrC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC1B,YAAY;AACZ,YAAY,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC;AAC1C,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1E,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAClC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC;AACpD;AACA,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,MAAM,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AACtE,QAAQ,MAAM,EAAE,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACtC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACxC,gBAAgB,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE;AACnC,gBAAgB,MAAM,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AACjC,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE;AAC3C,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC;AAC1C,YAAY;AACZ,QAAQ;AACR,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,MAAM;AAC7B,QAAQ;AACR;AACA,QAAQ,MAAM,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,EAAE,MAAM,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;AACtE,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACzC,gBAAgB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,gBAAgB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC1C,oBAAoB,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC;AAClG,YAAY;AACZ,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AACpC,YAAY,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;AACzC,QAAQ;AACR,QAAQ,MAAM,CAAC,GAAG,IAAI,YAAY,CAAC,CAAC,CAAC;AACrC,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AACzC,YAAY,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACxB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE;AAC1C,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;AACzC,QAAQ;AACR,QAAQ,IAAI,CAAC,CAAC,GAAG,CAAC;AAClB,QAAQ,IAAI,CAAC,EAAE,GAAG,EAAE;AACpB,QAAQ,IAAI,CAAC,KAAK,GAAG,GAAG;AACxB,IAAI;AACJ,IAAI,OAAO,CAAC,CAAC,EAAE;AACf,QAAQ,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK;AAC9C,YAAY,OAAO,CAAC;AACpB,QAAQ,IAAI,CAAC,GAAG,CAAC;AACjB,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAChD,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC;AACzD,YAAY,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC9B,QAAQ;AACR,QAAQ,OAAO,CAAC;AAChB,IAAI;AACJ;AACA;AACA,SAAS,iBAAiB,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE;AAC5C,IAAI,IAAI,EAAE;AACV,IAAI,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;AACnD,IAAI,IAAI,CAAC,GAAG,CAAC;AACb,IAAI,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC;AACjG,IAAI,IAAI,SAAS,IAAI,mBAAmB,CAAC,GAAG,CAAC;AAC7C,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,OAAO,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE,CAAC,CAAC;AAC7D,IAAI,IAAI,OAAO,IAAI,IAAI,IAAI,iBAAiB,CAAC,GAAG,CAAC;AACjD,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC;AACtD,IAAI,MAAM,OAAO,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;AAC/C,IAAI,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC;AACtD,IAAI,MAAM,EAAE,GAAG,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC;AACvD,IAAI,IAAI,GAAG,GAAG,IAAI;AAClB,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,IAAI,EAAE,GAAG,GAAG;AAChB,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;AAC5B,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAC5D,IAAI,MAAM,KAAK,GAAG,QAAQ,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC5D,IAAI,IAAI,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,UAAU,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,CAAC,IAAI,CAAC;AACxE,QAAQ,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,GAAG,GAAG,KAAK,CAAC,CAAC;AACzD,IAAI,OAAO,CAAC;AACZ;AACA,SAAS,OAAO,CAAC,CAAC,EAAE;AACpB,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE;AAChB,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1B,IAAI;AACJ,SAAS;AACT,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7B,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1B,IAAI;AACJ;AACA;AACA,SAAS,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,GAAG,EAAE,WAAW,GAAG,IAAI,EAAE;AAC7D,IAAI,MAAM,GAAG,GAAG,EAAE;AAClB,IAAI,MAAM,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC;AACjD,IAAI,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,MAAM,EAAE,GAAG,gBAAgB,CAAC,IAAI,CAAC;AACxD,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,EAAE,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;AAC9D,IAAI,MAAM,WAAW,GAAG,IAAI,GAAG,EAAE;AACjC,IAAI,IAAI,IAAI,GAAG,CAAC;AAChB,IAAI,OAAO,WAAW,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE;AAC7C,QAAQ,IAAI,OAAO,GAAG,EAAE,EAAE,OAAO,GAAG,CAAC,QAAQ;AAC7C,QAAQ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAChD,YAAY,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AAClC,gBAAgB;AAChB,YAAY,MAAM,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC;AAClC,YAAY,IAAI,GAAG,GAAG,CAAC;AACvB,YAAY,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;AACzC,gBAAgB,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC;AACxD,gBAAgB,IAAI,GAAG,GAAG,GAAG;AAC7B,oBAAoB,GAAG,GAAG,GAAG;AAC7B,YAAY;AACZ,YAAY,MAAM,GAAG,GAAG,MAAM,GAAG,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,GAAG,MAAM,IAAI,GAAG;AACnE,YAAY,IAAI,GAAG,GAAG,OAAO,EAAE;AAC/B,gBAAgB,OAAO,GAAG,GAAG;AAC7B,gBAAgB,OAAO,GAAG,CAAC;AAC3B,YAAY;AACZ,QAAQ;AACR,QAAQ,IAAI,OAAO,GAAG,CAAC;AACvB,YAAY;AACZ,QAAQ,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;AACtC,QAAQ,MAAM,MAAM,GAAG,CAAC,MAAM,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AACpD,QAAQ,IAAI,IAAI,GAAG,MAAM,GAAG,WAAW,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC;AACzD,YAAY;AACZ,QAAQ,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AACxB,QAAQ,IAAI,IAAI,MAAM;AACtB,QAAQ,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC;AAChC,IAAI;AACJ,IAAI,OAAO,GAAG;AACd;AACA;AACA;AACA,SAAS,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AAC1C,IAAI,IAAI,EAAE,EAAE,EAAE;AACd;AACA,IAAI,MAAM,EAAE,WAAW,GAAG,IAAI,EAAE,aAAa,GAAG,EAAE,EAAE,cAAc,GAAG,IAAI,EAAE,kBAAkB,GAAG,KAAK,GAAG,GAAG,IAAI;AAC/G,IAAI,MAAM,IAAI,GAAG,CAAC,KAAK,EAAE,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC;AAC7D,IAAI,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,gBAAgB,CAAC,IAAI,CAAC;AAChE,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,EAAE,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;AAChE,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC;AAC3B,IAAI,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC9B,IAAI,MAAM,KAAK,GAAG,aAAa,GAAG,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,aAAa,CAAC,GAAG,SAAS;AACtF,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,CAAC,GAAG,EAAE;AAChB,IAAI,MAAM,SAAS,GAAG,EAAE;AACxB,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,QAAQ,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC3B,QAAQ,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;AACnC,QAAQ,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;AACtC,QAAQ,MAAM,KAAK,GAAG,aAAa,GAAG,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,aAAa,EAAE,IAAI,GAAG,CAAC,CAAC,GAAG,SAAS;AACpG,QAAQ,MAAM,KAAK,GAAG,eAAe,CAAC,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,KAAK,CAAC;AACzH,QAAQ,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC;AAC5B,QAAQ,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,CAAC;AACxD,QAAQ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;AACrB,IAAI;AACJ,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;AAC3D,IAAI,IAAI,OAAO,EAAE;AACjB,QAAQ,MAAM,MAAM,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC;AACjE,QAAQ,IAAI,MAAM,IAAI,CAAC,EAAE;AACzB,YAAY,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE;AAC7C,gBAAgB,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACzE,QAAQ;AACR,IAAI;AACJ,IAAI,MAAM,EAAE,GAAG,IAAI,KAAK,EAAE;AAC1B,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,CAAC;AAC7B,IAAI,IAAI,IAAI,GAAG,QAAQ,EAAE,IAAI,GAAG,CAAC,QAAQ;AACzC,IAAI,MAAM,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC/C,IAAI,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE;AAC/B,QAAQ,IAAI,CAAC,GAAG,IAAI;AACpB,YAAY,IAAI,GAAG,CAAC;AACpB,QAAQ,IAAI,CAAC,GAAG,IAAI;AACpB,YAAY,IAAI,GAAG,CAAC;AACpB,IAAI;AACJ,IAAI,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,IAAI,CAAC;AAC7C,IAAI,MAAM,YAAY,GAAG,kBAAkB,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC,KAAK,MAAM,IAAI,IAAI,EAAE,KAAK,MAAM,GAAG,EAAE,GAAG,EAAE,GAAG,SAAS;AACtK,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACxC,QAAQ,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,IAAI,KAAK;AACjD,QAAQ,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC;AACtF,QAAQ,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,GAAG,EAAE,UAAU,EAAE,CAAC,EAAE,CAAC;AAC1F,QAAQ,IAAI,cAAc;AAC1B,YAAY,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,IAAI,YAAY;AACxB,YAAY,IAAI,CAAC,cAAc,GAAG,YAAY;AAC9C,QAAQ,OAAO,IAAI;AACnB,IAAI,CAAC,CAAC;AACN,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ,CAAC;AAClD,IAAI,OAAO,MAAM;AACjB;AACA;AACA,SAAS,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AACtC;AACA,IAAI,MAAM,EAAE,UAAU,GAAG,IAAI,EAAE,UAAU,GAAG,IAAI,EAAE,MAAM,GAAG,IAAI,EAAE,SAAS,GAAG,GAAG,EAAE,WAAW,GAAG,IAAI,EAAE,GAAG,IAAI;AAC7G,IAAI,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;AAC3B,QAAQ,OAAO,EAAE;AACjB,IAAI,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ;AAClC,IAAI,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,UAAU,IAAI,UAAU,IAAI,CAAC,CAAC,QAAQ,KAAK,GAAG,GAAG,UAAU,CAAC,CAAC;AACvG,IAAI,MAAM,IAAI,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,GAAG,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC7D,IAAI,IAAI,CAAC,MAAM,EAAE;AACjB,QAAQ,MAAM,GAAG,GAAG,EAAE;AACtB,QAAQ,IAAI,IAAI,GAAG,CAAC;AACpB,QAAQ,KAAK,MAAM,CAAC,IAAI,IAAI,EAAE;AAC9B,YAAY,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,OAAO,IAAI,EAAE,EAAE,MAAM;AAChD,YAAY,IAAI,IAAI,GAAG,GAAG,GAAG,WAAW,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC;AAC1D,gBAAgB;AAChB,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACvB,YAAY,IAAI,IAAI,GAAG;AACvB,QAAQ;AACR,QAAQ,OAAO,GAAG;AAClB,IAAI;AACJ,IAAI,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AAC3I,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,WAAW,CAAC;AACrD;AACA;AACA,SAAS,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAG,EAAE,EAAE;AACnD;AACA,IAAI,MAAM,MAAM,GAAG,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC;AAC9C,IAAI,OAAO,SAAS,CAAC,MAAM,EAAE,IAAI,CAAC;AAClC;AAk2CA,SAAS,WAAW,CAAC,KAAK,EAAE,IAAI,EAAE;AAClC;AACA,IAAI,IAAI,KAAK,CAAC,OAAO,KAAK,kBAAkB,IAAI,KAAK,CAAC,OAAO,KAAK,kBAAkB,EAAE;AACtF,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC,2BAA2B,EAAE,KAAK,CAAC,OAAO,CAAC,mDAAmD,CAAC,CAAC;AACzH,IAAI;AACJ;AACA,IAAI,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC,CAAC;AACrE;AACA,IAAI,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC;AACzC,IAAI,MAAM,GAAG,GAAG,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C;AACA,IAAI,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC1C,QAAQ,OAAO,EAAE,CAAC,CAAC,OAAO;AAC1B,QAAQ,OAAO,EAAE,CAAC,CAAC,OAAO,IAAI,EAAE;AAChC,QAAQ,IAAI,EAAE,CAAC,CAAC,IAAI;AACpB,QAAQ,KAAK,EAAE,CAAC,CAAC,KAAK;AACtB,QAAQ,KAAK,EAAE,CAAC,CAAC;AACjB,KAAK,CAAC,CAAC;AACP;AACA,IAAI,MAAM,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,IAAI;AACjD,QAAQ,MAAM,CAAC,GAAG,IAAI,GAAG,EAAE;AAC3B,QAAQ,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG;AAChC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;AACvB,QAAQ,OAAO,CAAC;AAChB,IAAI,CAAC,CAAC;AACN;AACA,IAAI,MAAM,YAAY,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC;AACvD,IAAI,MAAM,WAAW,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACxE;AACA,IAAI,MAAM,aAAa,GAAmF,CAAC,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,KAAK,SAAS,CAAC,MAAM;AACzK,IAAI,IAAI,SAAS;AACjB,IAGS,IAAI,aAAa,EAAE;AAC5B,QAAQ,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC;AACnF,IAAI;AACJ,SAAS;AACT,QAAQ,SAAS,GAAG,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClE,IAAI;AACJ,IAAI,OAAO;AACX,QAAQ,QAAQ;AAChB,QAAQ,QAAQ;AAChB,QAAQ,GAAG;AACX,QAAQ,MAAM;AACd,QAAQ,SAAS;AACjB,QAAQ,YAAY;AACpB,QAAQ,WAAW;AACnB,QAAQ,SAAS;AACjB,KAAK;AACL;;ACxoXA;AACA;AA8BA;;;;;AAKG;MACU,eAAe,CAAA;AAKxB,IAAA,WAAA,CAAY,MAA8B,EAAA;QAJlC,IAAA,CAAA,UAAU,GAA8B,IAAI;QAC5C,IAAA,CAAA,KAAK,GAA2B,IAAI;QAIxC,IAAI,CAAC,MAAM,GAAG;AACV,YAAA,SAAS,EAAE;AACP,gBAAA,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,IAAI,IAAI;AAC3C,gBAAA,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,IAAI,GAAG;AAClD,gBAAA,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,IAAI,IAAI;AACzC,gBAAA,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,IAAI,GAAG;AAC9C,gBAAA,UAAU,EAAE,MAAM,EAAE,SAAS,EAAE,UAAU,IAAI;AAChD,aAAA;AACD,YAAA,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,CAAC;AACvB,YAAA,OAAO,EAAE,MAAM,EAAE,OAAO,IAAI;SAC/B;IACL;AAEA;;AAEG;IACH,MAAM,SAAS,CAAC,KAAsB,EAAA;AAClC,QAAA,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE;AAEnC,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,UAAU,GAAG,WAAW,CAAC,KAAK,CAAC;AAEpC,QAAA,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;YACrB,MAAM,QAAQ,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;AAC9C,YAAA,OAAO,CAAC,GAAG,CAAC,CAAA,kCAAA,EAAqC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA,EAAA,CAAI,CAAC;YACzE,OAAO,CAAC,GAAG,CAAC,CAAA,kBAAA,EAAqB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAA,SAAA,EAAY,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAA,MAAA,CAAQ,CAAC;QACpH;IACJ;AAEA;;AAEG;IACH,MAAM,gBAAgB,CAAC,GAAW,EAAA;AAC9B,QAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC;AACjC,QAAA,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,CAAA,2BAAA,EAA8B,GAAG,CAAA,EAAA,EAAK,QAAQ,CAAC,MAAM,CAAA,CAAE,CAAC;QAC5E;AACA,QAAA,MAAM,KAAK,GAAoB,MAAM,QAAQ,CAAC,IAAI,EAAE;AACpD,QAAA,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC/B;AAEA;;AAEG;IACH,OAAO,GAAA;AACH,QAAA,OAAO,IAAI,CAAC,UAAU,KAAK,IAAI;IACnC;AAEA;;AAEG;IACH,YAAY,GAAA;QACR,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,UAAU;AAAE,YAAA,OAAO,IAAI;QAEhD,OAAO;AACH,YAAA,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO;AAC3B,YAAA,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO;AAC3B,YAAA,aAAa,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM;AAC5C,YAAA,cAAc,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AAC7C,YAAA,aAAa,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,MAAM;YAClD,eAAe,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC;AACrD,YAAA,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;SACxB;IACL;AAEA;;AAEG;IACH,MAAM,GAAG,CAAC,KAAa,EAAA;AACnB,QAAA,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE;QAEnC,IAAI,CAAC,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACjC,OAAO;AACH,gBAAA,MAAM,EAAE,oDAAoD;AAC5D,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,cAAc,EAAE,CAAC;AACjB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,cAAc,EAAE;aACnB;QACL;AAEA,QAAA,IAAI;;AAEA,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAmC;AAC/D,YAAA,MAAM,MAAM,GAAI,QAAQ,CAAC,MAAiB,IAAI,KAAK;AACnD,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,YAAA,MAAM,IAAI,GAAI,QAAQ,CAAC,IAAe,IAAI,GAAG;AAC7C,YAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,KAAK;AACjD,YAAA,MAAM,QAAQ,GAAI,QAAQ,CAAC,QAAmB,IAAI,GAAG;AACrD,YAAA,MAAM,OAAO,GAAI,QAAQ,CAAC,OAAmB,IAAI,IAAI;AACrD,YAAA,MAAM,WAAW,GAAI,QAAQ,CAAC,WAAuB,IAAI,KAAK;AAC9D,YAAA,MAAM,SAAS,GAAI,QAAQ,CAAC,SAAoB,IAAI,GAAG;;YAGvD,MAAM,eAAe,GAAG,cAAc,CAAC;gBACnC,KAAK;AACL,gBAAA,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;AAC9B,gBAAA,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ;AAClC,gBAAA,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,GAAG;AACxB,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,gBAAA,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY;AAC1C,gBAAA,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,WAAW;AACxC,gBAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AACxC,gBAAA,MAAM,EAAE,MAAoC;gBAC5C,KAAK;gBACL,KAAK;gBACL,IAAI;gBACJ,KAAK;gBACL,QAAQ;gBACR,OAAO;gBACP,WAAW;gBACX,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,GAAG,CAAC;gBAC1B;AACH,aAAA,CAAC;AAEF,YAAA,IAAI,OAA2B;;AAG/B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,IAAI,eAAe,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;AACnE,gBAAA,MAAM,eAAe,GAAG,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;oBAC3F,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,UAAU,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACzC,iBAAA,CAAC,CAAC;AAEH,gBAAA,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,eAAe,EAAE;AACrD,oBAAA,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW;AAC9C,oBAAA,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM;AACpC,oBAAA,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS;AAC1C,oBAAA,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC;AACrC,iBAAA,CAAC;gBAEF,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,KAAoB,MAAM;oBACzE,OAAO,EAAE,KAAK,CAAC,OAAO;oBACtB,OAAO,EAAE,KAAK,CAAC,OAAO;oBACtB,IAAI,EAAE,KAAK,CAAC,IAAI;AAChB,oBAAA,KAAK,EAAE,KAAK,CAAC,UAAU,IAAI,CAAC;oBAC5B,WAAW,EAAE,KAAK,CAAC,QAAQ;oBAC3B,WAAW,EAAE,KAAK,CAAC;AACtB,iBAAA,CAAC,CAAC;YACP;iBAAO;gBACH,OAAO,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;oBACxG,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,KAAK,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACpC,iBAAA,CAAC,CAAC;YACP;;AAGA,YAAA,MAAM,MAAM,GAAG,MAAM,kBAAkB,CACnC,KAAK,EACL,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CACjF;YAED,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;;YAGpD,MAAM,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,CAAC;AACvC,YAAA,MAAM,UAAU,GAAG,IAAI,CAAC,mBAAmB,CAAC,QAAQ,EAAE,OAAO,CAAC,MAAM,CAAC;;YAGrE,MAAM,mBAAmB,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;YAErD,OAAO;gBACH,MAAM;gBACN,UAAU;gBACV,OAAO;gBACP,cAAc;AACd,gBAAA,QAAQ,EAAE,KAAK;gBACf;aACH;QACL;QAAE,OAAO,KAAK,EAAE;YACZ,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS;AACpD,YAAA,OAAO,CAAC,KAAK,CAAC,2CAA2C,EAAE,KAAK,CAAC;YAEjE,OAAO;AACH,gBAAA,MAAM,EAAE,2DAA2D;AACnE,gBAAA,UAAU,EAAE,MAAM;AAClB,gBAAA,OAAO,EAAE,EAAE;gBACX,cAAc;AACd,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,cAAc,EAAE,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG;aAC5D;QACL;IACJ;AAEA;;AAEG;AACH,IAAA,MAAM,QAAQ,CAAC,KAAa,EAAE,IAAa,EAAA;QACvC,IAAI,CAAC,IAAI,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAA,OAAO,EAAE;QACb;QAEA,MAAM,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI;;AAGlC,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAmC;AAC/D,QAAA,MAAM,MAAM,GAAI,QAAQ,CAAC,MAAiB,IAAI,KAAK;AACnD,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,GAAG;AAC/C,QAAA,MAAM,IAAI,GAAI,QAAQ,CAAC,IAAe,IAAI,GAAG;AAC7C,QAAA,MAAM,KAAK,GAAI,QAAQ,CAAC,KAAgB,IAAI,KAAK;AACjD,QAAA,MAAM,QAAQ,GAAI,QAAQ,CAAC,QAAmB,IAAI,GAAG;AACrD,QAAA,MAAM,OAAO,GAAI,QAAQ,CAAC,OAAmB,IAAI,IAAI;AACrD,QAAA,MAAM,WAAW,GAAI,QAAQ,CAAC,WAAuB,IAAI,KAAK;AAC9D,QAAA,MAAM,SAAS,GAAI,QAAQ,CAAC,SAAoB,IAAI,GAAG;QAEvD,MAAM,eAAe,GAAG,cAAc,CAAC;YACnC,KAAK;AACL,YAAA,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM;AAC9B,YAAA,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ;AAClC,YAAA,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,GAAG;AACxB,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,SAAS;AACpC,YAAA,YAAY,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY;AAC1C,YAAA,WAAW,EAAE,IAAI,CAAC,UAAU,CAAC,WAAW;AACxC,YAAA,SAAS,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI;AACxC,YAAA,MAAM,EAAE,MAAoC;YAC5C,KAAK;YACL,KAAK;YACL,IAAI;YACJ,KAAK;YACL,QAAQ;YACR,OAAO;YACP,WAAW;AACX,YAAA,IAAI,EAAE,CAAC;YACP;AACH,SAAA,CAAC;AAEF,QAAA,OAAO,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAyB,EAAE,GAAW,MAAM;YAC1E,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,YAAA,KAAK,EAAE,eAAe,CAAC,MAAM,CAAC,GAAG;AACpC,SAAA,CAAC,CAAC;IACP;AAEA;;AAEG;AACH,IAAA,aAAa,CAAC,KAAa,EAAA;AACvB,QAAA,MAAM,UAAU,GAAG,KAAK,CAAC,WAAW,EAAE;QACtC,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;;QAGrC,MAAM,eAAe,GAAG,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC;AAC7F,QAAA,MAAM,kBAAkB,GAAG,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,WAAW,CAAC;AACxF,QAAA,MAAM,kBAAkB,GAAG,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,SAAS,CAAC;AAC7F,QAAA,MAAM,kBAAkB,GAAG,CAAC,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;QAE9F,IAAI,IAAI,GAAqC,SAAS;QACtD,IAAI,UAAU,GAAG,GAAG;QACpB,MAAM,QAAQ,GAAa,EAAE;;AAG7B,QAAA,KAAK,MAAM,MAAM,IAAI,kBAAkB,EAAE;AACrC,YAAA,IAAI,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;gBAC7B,IAAI,GAAG,cAAc;gBACrB,UAAU,GAAG,GAAG;AAChB,gBAAA,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC;gBACrB;YACJ;QACJ;AAEA,QAAA,IAAI,IAAI,KAAK,SAAS,EAAE;;AAEpB,YAAA,MAAM,SAAS,GAAG,KAAK,CAAC,CAAC,CAAC;YAE1B,IAAI,kBAAkB,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;gBAChG,IAAI,GAAG,YAAY;gBACnB,UAAU,GAAG,GAAG;gBAChB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,MAAM,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5E;iBAAO,IAAI,kBAAkB,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;gBACvG,IAAI,GAAG,YAAY;gBACnB,UAAU,GAAG,GAAG;gBAChB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,MAAM,CAAC,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5E;AAAO,iBAAA,IAAI,eAAe,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;gBAC5C,IAAI,GAAG,SAAS;gBAChB,UAAU,GAAG,GAAG;AAChB,gBAAA,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC;YAC5B;QACJ;AAEA,QAAA,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE;IACzC;AAEA;;AAEG;IACK,mBAAmB,CAAC,QAAgB,EAAE,WAAmB,EAAA;QAC7D,IAAI,WAAW,KAAK,CAAC;AAAE,YAAA,OAAO,MAAM;QACpC,IAAI,QAAQ,IAAI,GAAG;YAAE,OAAO,QAAQ,CAAC;QACrC,IAAI,QAAQ,IAAI,GAAG;AAAE,YAAA,OAAO,QAAQ;QACpC,IAAI,QAAQ,IAAI,GAAG;AAAE,YAAA,OAAO,KAAK;AACjC,QAAA,OAAO,KAAK;IAChB;AAEA;;AAEG;IACH,KAAK,GAAA;AACD,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI;AACtB,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;IACrB;AACH;;;;;;;ACjWD;AACA;AAEA;AA8FM,SAAU,YAAY,CAAC,MAAwB,EAAA;AACjD,IAAA,OAAO,IAAI,gBAAgB,CAAC,MAAM,CAAC;AACvC;;;;","x_google_ignoreList":[1,2,14]}