@astermind/cybernetic-chatbot-client 1.0.9 → 2.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +240 -0
  2. package/dist/CyberneticClient.d.ts +59 -1
  3. package/dist/CyberneticClient.d.ts.map +1 -1
  4. package/dist/CyberneticLocalRAG.d.ts +24 -1
  5. package/dist/CyberneticLocalRAG.d.ts.map +1 -1
  6. package/dist/CyberneticOfflineStorage.d.ts +63 -0
  7. package/dist/CyberneticOfflineStorage.d.ts.map +1 -0
  8. package/dist/agentic/CyberneticIntentClassifier.d.ts +40 -0
  9. package/dist/agentic/CyberneticIntentClassifier.d.ts.map +1 -1
  10. package/dist/cybernetic-chatbot-client-full.esm.js +2011 -10
  11. package/dist/cybernetic-chatbot-client-full.esm.js.map +1 -1
  12. package/dist/cybernetic-chatbot-client-full.min.js +1 -1
  13. package/dist/cybernetic-chatbot-client-full.min.js.map +1 -1
  14. package/dist/cybernetic-chatbot-client-full.umd.js +2011 -11
  15. package/dist/cybernetic-chatbot-client-full.umd.js.map +1 -1
  16. package/dist/cybernetic-chatbot-client.esm.js +2011 -10
  17. package/dist/cybernetic-chatbot-client.esm.js.map +1 -1
  18. package/dist/cybernetic-chatbot-client.min.js +1 -1
  19. package/dist/cybernetic-chatbot-client.min.js.map +1 -1
  20. package/dist/cybernetic-chatbot-client.umd.js +2011 -11
  21. package/dist/cybernetic-chatbot-client.umd.js.map +1 -1
  22. package/dist/full.d.ts +7 -2
  23. package/dist/full.d.ts.map +1 -1
  24. package/dist/index.d.ts +6 -1
  25. package/dist/index.d.ts.map +1 -1
  26. package/dist/omega/OmegaOfflineRAG.d.ts +50 -0
  27. package/dist/omega/OmegaOfflineRAG.d.ts.map +1 -0
  28. package/dist/omega/index.d.ts +3 -0
  29. package/dist/omega/index.d.ts.map +1 -0
  30. package/dist/omega/types.d.ts +118 -0
  31. package/dist/omega/types.d.ts.map +1 -0
  32. package/dist/types.d.ts +177 -0
  33. package/dist/types.d.ts.map +1 -1
  34. package/package.json +2 -1
@@ -1,6 +1,6 @@
1
1
  (function (global, factory) {
2
- typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
3
- typeof define === 'function' && define.amd ? define(['exports'], factory) :
2
+ typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('fs'), require('path')) :
3
+ typeof define === 'function' && define.amd ? define(['exports', 'fs', 'path'], factory) :
4
4
  (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.AsterMindCyberneticFull = {}));
5
5
  })(this, (function (exports) { 'use strict';
6
6
 
@@ -599,17 +599,13 @@
599
599
 
600
600
  // src/CyberneticLocalRAG.ts
601
601
  // Local RAG processing for offline fallback
602
- /**
603
- * Local RAG engine using TF-IDF similarity
604
- *
605
- * Provides offline fallback when backend is unavailable.
606
- * Uses simple TF-IDF for document matching (no vector embeddings).
607
- */
608
602
  class CyberneticLocalRAG {
609
603
  constructor() {
610
604
  this.documents = [];
611
605
  this.idf = new Map();
612
606
  this.indexed = false;
607
+ this.loadedFromExport = false;
608
+ this.vocabulary = new Map(); // term -> index for sparse vectors
613
609
  }
614
610
  /**
615
611
  * Check if documents are indexed
@@ -710,7 +706,66 @@
710
706
  reset() {
711
707
  this.documents = [];
712
708
  this.idf = new Map();
709
+ this.vocabulary = new Map();
713
710
  this.indexed = false;
711
+ this.loadedFromExport = false;
712
+ this.exportVersion = undefined;
713
+ this.exportedAt = undefined;
714
+ }
715
+ /**
716
+ * Load pre-computed vectors from an export file
717
+ * This skips client-side TF-IDF computation by using pre-computed values
718
+ *
719
+ * @param exportData - The offline vector export data
720
+ */
721
+ async loadFromExport(exportData) {
722
+ this.reset();
723
+ // Load pre-computed IDF from export
724
+ this.idf = new Map(Object.entries(exportData.tfidfIndex.idf));
725
+ // Build vocabulary mapping from IDF keys
726
+ let vocabIndex = 0;
727
+ for (const term of this.idf.keys()) {
728
+ this.vocabulary.set(term, vocabIndex++);
729
+ }
730
+ // Load documents with pre-computed TF-IDF vectors
731
+ for (const doc of exportData.documents) {
732
+ // Convert sparse vector to Map
733
+ const tfidfMap = new Map();
734
+ for (const [termIndex, weight] of doc.tfidfVector) {
735
+ // Find the term from vocabulary by index
736
+ for (const [term, idx] of this.vocabulary) {
737
+ if (idx === termIndex) {
738
+ tfidfMap.set(term, weight);
739
+ break;
740
+ }
741
+ }
742
+ }
743
+ this.documents.push({
744
+ id: doc.id,
745
+ title: doc.title,
746
+ content: doc.content,
747
+ tokens: [], // Not needed when loading from export
748
+ tfidf: tfidfMap
749
+ });
750
+ }
751
+ this.indexed = true;
752
+ this.loadedFromExport = true;
753
+ this.exportVersion = exportData.version;
754
+ this.exportedAt = exportData.meta.exportedAt;
755
+ console.log(`[CyberneticLocalRAG] Loaded ${this.documents.length} documents from export (v${exportData.version})`);
756
+ }
757
+ /**
758
+ * Get export status information
759
+ */
760
+ getExportStatus() {
761
+ return {
762
+ loaded: this.indexed,
763
+ loadedFromExport: this.loadedFromExport,
764
+ documentCount: this.documents.length,
765
+ chunkCount: this.documents.length, // Each document is a chunk
766
+ exportVersion: this.exportVersion,
767
+ exportedAt: this.exportedAt
768
+ };
714
769
  }
715
770
  // ==================== PRIVATE METHODS ====================
716
771
  /**
@@ -800,6 +855,139 @@
800
855
  }
801
856
  }
802
857
 
858
+ // src/CyberneticOfflineStorage.ts
859
+ // IndexedDB storage for offline vector exports
860
+ /**
861
+ * IndexedDB storage for offline vector exports
862
+ * Provides persistent caching of pre-computed TF-IDF vectors
863
+ */
864
+ class CyberneticOfflineStorage {
865
+ constructor() {
866
+ this.db = null;
867
+ this.dbPromise = null;
868
+ this.dbName = 'cybernetic-offline-vectors';
869
+ this.dbVersion = 1;
870
+ }
871
+ /**
872
+ * Initialize IndexedDB for vector storage
873
+ */
874
+ async initDB() {
875
+ if (this.db) {
876
+ return this.db;
877
+ }
878
+ if (this.dbPromise) {
879
+ return this.dbPromise;
880
+ }
881
+ this.dbPromise = openDB(this.dbName, this.dbVersion, {
882
+ upgrade(db) {
883
+ if (!db.objectStoreNames.contains('vectors')) {
884
+ db.createObjectStore('vectors', { keyPath: 'id' });
885
+ }
886
+ }
887
+ });
888
+ this.db = await this.dbPromise;
889
+ return this.db;
890
+ }
891
+ /**
892
+ * Store vector export in IndexedDB
893
+ */
894
+ async store(id, exportData) {
895
+ const db = await this.getDB();
896
+ await db.put('vectors', {
897
+ id,
898
+ exportVersion: exportData.version,
899
+ exportedAt: exportData.meta.exportedAt,
900
+ type: exportData.type,
901
+ data: exportData
902
+ });
903
+ console.log(`[CyberneticOfflineStorage] Stored ${exportData.meta.chunkCount} chunks`);
904
+ }
905
+ /**
906
+ * Retrieve vector export from IndexedDB
907
+ */
908
+ async retrieve(id) {
909
+ const db = await this.getDB();
910
+ const record = await db.get('vectors', id);
911
+ return record?.data ?? null;
912
+ }
913
+ /**
914
+ * Check if cached vectors are valid (not expired)
915
+ */
916
+ async isValid(id, maxAge) {
917
+ const db = await this.getDB();
918
+ const record = await db.get('vectors', id);
919
+ if (!record)
920
+ return false;
921
+ const exportTime = new Date(record.exportedAt).getTime();
922
+ return Date.now() - exportTime < maxAge;
923
+ }
924
+ /**
925
+ * Get cached export metadata without loading full data
926
+ */
927
+ async getMeta(id) {
928
+ const db = await this.getDB();
929
+ const record = await db.get('vectors', id);
930
+ if (!record)
931
+ return null;
932
+ return {
933
+ version: record.exportVersion,
934
+ exportedAt: record.exportedAt,
935
+ documentCount: record.data.meta.documentCount,
936
+ chunkCount: record.data.meta.chunkCount,
937
+ type: record.type
938
+ };
939
+ }
940
+ /**
941
+ * Check if any cached vectors exist
942
+ */
943
+ async hasData() {
944
+ const db = await this.getDB();
945
+ const count = await db.count('vectors');
946
+ return count > 0;
947
+ }
948
+ /**
949
+ * Get all cached export IDs
950
+ */
951
+ async getStoredIds() {
952
+ const db = await this.getDB();
953
+ return db.getAllKeys('vectors');
954
+ }
955
+ /**
956
+ * Clear specific cached vectors
957
+ */
958
+ async delete(id) {
959
+ const db = await this.getDB();
960
+ await db.delete('vectors', id);
961
+ }
962
+ /**
963
+ * Clear all cached vectors
964
+ */
965
+ async clear() {
966
+ const db = await this.getDB();
967
+ await db.clear('vectors');
968
+ console.log('[CyberneticOfflineStorage] Cache cleared');
969
+ }
970
+ /**
971
+ * Get the database connection
972
+ */
973
+ async getDB() {
974
+ if (this.db) {
975
+ return this.db;
976
+ }
977
+ return this.initDB();
978
+ }
979
+ /**
980
+ * Close database connection
981
+ */
982
+ close() {
983
+ if (this.db) {
984
+ this.db.close();
985
+ this.db = null;
986
+ this.dbPromise = null;
987
+ }
988
+ }
989
+ }
990
+
803
991
  // src/license/base64url.ts
804
992
  // Base64URL encoding/decoding utilities for JWT handling
805
993
  /**
@@ -1443,6 +1631,12 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1443
1631
  this.SETTINGS_CHECK_INTERVAL = 300000; // 5 minutes
1444
1632
  // Agentic capabilities (optional, registered separately via registerAgenticCapabilities)
1445
1633
  this.agenticCapabilities = null;
1634
+ // Offline vector storage
1635
+ this.offlineStorage = null;
1636
+ // Omega RAG (optional, loaded dynamically)
1637
+ this.omegaRAG = null;
1638
+ // Track if offline warning has been shown
1639
+ this.offlineWarningShown = false;
1446
1640
  // Apply defaults
1447
1641
  this.config = {
1448
1642
  apiUrl: config.apiUrl,
@@ -1459,6 +1653,15 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1459
1653
  exponentialBackoff: config.retry?.exponentialBackoff ?? true
1460
1654
  },
1461
1655
  agentic: config.agentic ?? null,
1656
+ offline: config.offline ?? null,
1657
+ sitemap: config.sitemap ?? null,
1658
+ sources: {
1659
+ enabled: config.sources?.enabled ?? true,
1660
+ showSummary: config.sources?.showSummary ?? true,
1661
+ showDownload: config.sources?.showDownload ?? true,
1662
+ includeFullContent: config.sources?.includeFullContent ?? false,
1663
+ maxSources: config.sources?.maxSources ?? 5
1664
+ },
1462
1665
  onStatusChange: config.onStatusChange || (() => { }),
1463
1666
  onError: config.onError || (() => { })
1464
1667
  };
@@ -1488,6 +1691,12 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1488
1691
  // Silent failure on init - cache may be stale but usable
1489
1692
  });
1490
1693
  }
1694
+ // Initialize offline vector support if configured
1695
+ if (this.config.offline?.enabled) {
1696
+ this.initializeOffline(this.config.offline).catch((error) => {
1697
+ console.warn('[Cybernetic] Failed to initialize offline vectors:', error);
1698
+ });
1699
+ }
1491
1700
  }
1492
1701
  // ==================== AGENTIC CAPABILITIES ====================
1493
1702
  /**
@@ -1528,6 +1737,34 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1528
1737
  getAgenticConfig() {
1529
1738
  return this.config.agentic;
1530
1739
  }
1740
+ // ==================== SOURCES CONFIGURATION ====================
1741
+ /**
1742
+ * Get sources configuration
1743
+ * Returns merged configuration with all defaults applied
1744
+ */
1745
+ getSourcesConfig() {
1746
+ return { ...this.config.sources };
1747
+ }
1748
+ /**
1749
+ * Check if a specific source feature is available
1750
+ * Takes into account configuration, connection status, and requirements
1751
+ *
1752
+ * @param feature - The feature to check ('summary' or 'download')
1753
+ * @returns Whether the feature is available
1754
+ */
1755
+ isSourceFeatureAvailable(feature) {
1756
+ if (!this.config.sources.enabled) {
1757
+ return false;
1758
+ }
1759
+ if (feature === 'summary') {
1760
+ return this.config.sources.showSummary;
1761
+ }
1762
+ if (feature === 'download') {
1763
+ // Download only works when online
1764
+ return this.config.sources.showDownload && this.status === 'online';
1765
+ }
1766
+ return false;
1767
+ }
1531
1768
  /**
1532
1769
  * Classify user message intent for agentic action
1533
1770
  * Only works if agentic capabilities are registered and enabled
@@ -1592,6 +1829,148 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1592
1829
  const response = await this.ask(message, options);
1593
1830
  return { response };
1594
1831
  }
1832
+ // ==================== OFFLINE VECTOR METHODS ====================
1833
+ /**
1834
+ * Initialize offline vector support
1835
+ * Loads pre-exported vectors from URL or inline data
1836
+ */
1837
+ async initializeOffline(config) {
1838
+ const storageMode = config.storageMode ?? 'hybrid';
1839
+ const maxAge = config.cacheMaxAge ?? 604800000; // 7 days default
1840
+ // Initialize IndexedDB storage if not memory-only
1841
+ if (storageMode !== 'memory') {
1842
+ this.offlineStorage = new CyberneticOfflineStorage();
1843
+ }
1844
+ // Check for cached vectors first
1845
+ if (this.offlineStorage && storageMode !== 'memory') {
1846
+ const cacheId = 'default';
1847
+ if (await this.offlineStorage.isValid(cacheId, maxAge)) {
1848
+ console.log('[Cybernetic] Loading vectors from IndexedDB cache');
1849
+ const cached = await this.offlineStorage.retrieve(cacheId);
1850
+ if (cached) {
1851
+ await this.localRAG.loadFromExport(cached);
1852
+ console.log('[Cybernetic] Loaded vectors from IndexedDB cache');
1853
+ return;
1854
+ }
1855
+ }
1856
+ }
1857
+ // Try to load from URL
1858
+ if (config.vectorFileUrl) {
1859
+ try {
1860
+ const response = await fetch(config.vectorFileUrl);
1861
+ if (!response.ok) {
1862
+ throw new Error(`Failed to fetch vectors: ${response.status}`);
1863
+ }
1864
+ const exportData = await response.json();
1865
+ // Store in IndexedDB for future use
1866
+ if (this.offlineStorage && storageMode !== 'memory') {
1867
+ await this.offlineStorage.store('default', exportData);
1868
+ }
1869
+ // Load into local RAG
1870
+ await this.localRAG.loadFromExport(exportData);
1871
+ // Initialize Omega if configured
1872
+ if (config.omega?.enabled) {
1873
+ await this.initializeOmega(config);
1874
+ }
1875
+ console.log('[Cybernetic] Loaded vectors from URL:', config.vectorFileUrl);
1876
+ return;
1877
+ }
1878
+ catch (error) {
1879
+ console.error('[Cybernetic] Failed to load offline vectors from URL:', error);
1880
+ }
1881
+ }
1882
+ // Try inline vector data
1883
+ if (config.vectorData) {
1884
+ await this.localRAG.loadFromExport(config.vectorData);
1885
+ // Initialize Omega if configured
1886
+ if (config.omega?.enabled) {
1887
+ await this.initializeOmega(config);
1888
+ }
1889
+ console.log('[Cybernetic] Loaded inline vector data');
1890
+ return;
1891
+ }
1892
+ // No vectors available - log warning (one-time)
1893
+ if (!this.offlineWarningShown) {
1894
+ console.warn('[Cybernetic] Offline functionality is enabled but no vectors are loaded. ' +
1895
+ 'Offline responses will be limited. To enable full offline functionality, ' +
1896
+ 'export vectors from the AsterMind admin panel and configure the vectorFileUrl ' +
1897
+ 'or vectorData option.');
1898
+ this.offlineWarningShown = true;
1899
+ }
1900
+ }
1901
+ /**
1902
+ * Initialize Omega RAG module
1903
+ */
1904
+ async initializeOmega(config) {
1905
+ if (!config.omega?.enabled)
1906
+ return;
1907
+ try {
1908
+ // Dynamic import to support tree-shaking
1909
+ const { OmegaOfflineRAG } = await Promise.resolve().then(function () { return OmegaOfflineRAG$1; });
1910
+ this.omegaRAG = new OmegaOfflineRAG({
1911
+ reranking: config.omega.reranking,
1912
+ topK: 5,
1913
+ verbose: false
1914
+ });
1915
+ // Load model from URL or inline data
1916
+ if (config.omega.modelUrl) {
1917
+ await this.omegaRAG.loadModelFromUrl(config.omega.modelUrl);
1918
+ }
1919
+ else if (config.omega.modelData) {
1920
+ // Convert our type to the astermind type
1921
+ await this.omegaRAG.loadModel(config.omega.modelData);
1922
+ }
1923
+ console.log('[Cybernetic] Omega RAG initialized');
1924
+ }
1925
+ catch (error) {
1926
+ console.warn('[Cybernetic] Failed to initialize Omega RAG:', error);
1927
+ this.omegaRAG = null;
1928
+ }
1929
+ }
1930
+ /**
1931
+ * Manually reload offline vectors
1932
+ * Clears cache and re-fetches from configured source
1933
+ */
1934
+ async reloadOfflineVectors() {
1935
+ if (!this.config.offline?.enabled) {
1936
+ console.warn('[Cybernetic] Offline mode is not enabled');
1937
+ return;
1938
+ }
1939
+ // Clear existing data
1940
+ this.localRAG.reset();
1941
+ if (this.offlineStorage) {
1942
+ await this.offlineStorage.clear();
1943
+ }
1944
+ if (this.omegaRAG) {
1945
+ this.omegaRAG.clear();
1946
+ }
1947
+ // Re-initialize
1948
+ await this.initializeOffline(this.config.offline);
1949
+ }
1950
+ /**
1951
+ * Check if Omega offline RAG is enabled and ready
1952
+ */
1953
+ isOmegaOfflineEnabled() {
1954
+ return this.omegaRAG?.isReady() ?? false;
1955
+ }
1956
+ /**
1957
+ * Get offline model info (if Omega is loaded)
1958
+ */
1959
+ getOfflineModelInfo() {
1960
+ return this.omegaRAG?.getModelInfo() ?? null;
1961
+ }
1962
+ /**
1963
+ * Get local RAG export status
1964
+ */
1965
+ getLocalRAGStatus() {
1966
+ return this.localRAG.getExportStatus();
1967
+ }
1968
+ /**
1969
+ * Get offline storage instance for advanced operations
1970
+ */
1971
+ getOfflineStorage() {
1972
+ return this.offlineStorage;
1973
+ }
1595
1974
  // ==================== CORE METHODS ====================
1596
1975
  /**
1597
1976
  * Send a message to the chatbot
@@ -1870,18 +2249,79 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
1870
2249
  }
1871
2250
  /**
1872
2251
  * Fallback to local RAG processing
2252
+ * Uses Omega RAG if available, falls back to simple TF-IDF
1873
2253
  */
1874
2254
  async fallbackAsk(message) {
1875
2255
  this.setStatus('offline');
1876
- // Check if we have cached documents
2256
+ // Try Omega RAG first if available
2257
+ if (this.omegaRAG?.isReady()) {
2258
+ try {
2259
+ const omegaResult = await this.omegaRAG.ask(message);
2260
+ if (!omegaResult.degraded) {
2261
+ // Process response through license manager
2262
+ const processedReply = this.licenseManager.processResponse(omegaResult.answer);
2263
+ return {
2264
+ reply: processedReply,
2265
+ confidence: omegaResult.confidence,
2266
+ sources: omegaResult.sources.map(s => ({
2267
+ title: s.heading,
2268
+ snippet: s.content.substring(0, 200) + '...',
2269
+ relevance: s.score
2270
+ })),
2271
+ offline: true,
2272
+ degradedReason: 'Omega hybrid retrieval (offline)'
2273
+ };
2274
+ }
2275
+ // If Omega degraded, fall through to simple RAG
2276
+ }
2277
+ catch (error) {
2278
+ console.warn('[Cybernetic] Omega RAG failed, falling back to TF-IDF:', error);
2279
+ // Fall through to simple RAG
2280
+ }
2281
+ }
2282
+ // Check if local RAG has pre-loaded vectors from export
2283
+ const localRAGStatus = this.localRAG.getExportStatus();
2284
+ if (localRAGStatus.loaded && localRAGStatus.documentCount > 0) {
2285
+ try {
2286
+ const result = await this.localRAG.ask(message);
2287
+ // Determine confidence based on match quality
2288
+ let confidence = 'medium';
2289
+ if (result.topScore < 0.3) {
2290
+ confidence = 'low';
2291
+ }
2292
+ else if (result.topScore > 0.7) {
2293
+ confidence = 'medium'; // Never 'high' for offline
2294
+ }
2295
+ // Process response through license manager
2296
+ const processedReply = this.licenseManager.processResponse(result.answer);
2297
+ return {
2298
+ reply: processedReply,
2299
+ confidence,
2300
+ sources: result.sources.map(s => ({
2301
+ title: s.title,
2302
+ snippet: s.snippet,
2303
+ relevance: s.score
2304
+ })),
2305
+ offline: true,
2306
+ degradedReason: localRAGStatus.loadedFromExport
2307
+ ? 'Using pre-exported vectors (offline)'
2308
+ : 'Processed locally with TF-IDF'
2309
+ };
2310
+ }
2311
+ catch (error) {
2312
+ console.error('[Cybernetic] Local RAG error:', error);
2313
+ // Fall through to cache-based fallback
2314
+ }
2315
+ }
2316
+ // Check if we have cached documents (original sync-based fallback)
1877
2317
  const cacheStatus = this.cache.getStatus();
1878
- if (cacheStatus.documentCount === 0) {
2318
+ if (cacheStatus.documentCount === 0 && !localRAGStatus.loaded) {
1879
2319
  return {
1880
2320
  reply: 'I\'m currently offline and don\'t have any cached information. Please check your connection and try again.',
1881
2321
  confidence: 'none',
1882
2322
  sources: [],
1883
2323
  offline: true,
1884
- degradedReason: 'No cached documents available'
2324
+ degradedReason: 'No cached documents or offline vectors available'
1885
2325
  };
1886
2326
  }
1887
2327
  try {
@@ -2236,6 +2676,8 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
2236
2676
  */
2237
2677
  class CyberneticIntentClassifier {
2238
2678
  constructor(config) {
2679
+ this.categories = [];
2680
+ this.topicKeywords = new Map(); // topic -> keywords
2239
2681
  this.config = config;
2240
2682
  this.siteMapIndex = new Map();
2241
2683
  this.formIndex = new Map();
@@ -2756,6 +3198,174 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
2756
3198
  getConfig() {
2757
3199
  return this.config;
2758
3200
  }
3201
+ // ==================== EXPORT/SITEMAP LOADING METHODS ====================
3202
+ /**
3203
+ * Train classifier from exported documents
3204
+ * Extracts topics and keywords for better intent classification
3205
+ *
3206
+ * @param exportData - The offline vector export data
3207
+ */
3208
+ async trainFromExport(exportData) {
3209
+ // Load explicit categories if provided
3210
+ if (exportData.categories) {
3211
+ this.categories = exportData.categories;
3212
+ for (const category of exportData.categories) {
3213
+ // Index category keywords for matching
3214
+ for (const keyword of category.keywords) {
3215
+ const existing = this.topicKeywords.get(category.name) || [];
3216
+ existing.push(keyword.toLowerCase());
3217
+ this.topicKeywords.set(category.name, existing);
3218
+ }
3219
+ }
3220
+ }
3221
+ // Extract topics from documents
3222
+ const docTopics = this.extractTopicsFromDocuments(exportData.documents);
3223
+ for (const [topic, keywords] of docTopics) {
3224
+ const existing = this.topicKeywords.get(topic) || [];
3225
+ existing.push(...keywords);
3226
+ this.topicKeywords.set(topic, [...new Set(existing)]); // Dedupe
3227
+ }
3228
+ // Load sitemap from export if available
3229
+ if (exportData.sitemap) {
3230
+ this.loadSitemapEntries(exportData.sitemap);
3231
+ }
3232
+ console.log(`[CyberneticIntentClassifier] Trained with ${this.categories.length} categories, ${this.topicKeywords.size} topics`);
3233
+ }
3234
+ /**
3235
+ * Load sitemap from configuration
3236
+ *
3237
+ * @param config - Sitemap configuration with URL or inline data
3238
+ */
3239
+ async loadSitemap(config) {
3240
+ if (!config.enabled)
3241
+ return;
3242
+ // Load from inline data
3243
+ if (config.sitemapData) {
3244
+ this.loadSitemapEntries(config.sitemapData);
3245
+ return;
3246
+ }
3247
+ // Load from URL
3248
+ if (config.sitemapUrl) {
3249
+ try {
3250
+ const response = await fetch(config.sitemapUrl);
3251
+ if (!response.ok) {
3252
+ throw new Error(`Failed to fetch sitemap: ${response.status}`);
3253
+ }
3254
+ const data = await response.json();
3255
+ this.loadSitemapEntries(data);
3256
+ }
3257
+ catch (error) {
3258
+ console.error('[CyberneticIntentClassifier] Failed to load sitemap:', error);
3259
+ }
3260
+ }
3261
+ }
3262
+ /**
3263
+ * Load sitemap entries into the index
3264
+ */
3265
+ loadSitemapEntries(entries) {
3266
+ for (const entry of entries) {
3267
+ // Convert GlobalSiteMapEntry to agentic SiteMapEntry format
3268
+ const siteMapEntry = {
3269
+ path: entry.url,
3270
+ name: entry.title,
3271
+ description: entry.description,
3272
+ aliases: entry.keywords
3273
+ };
3274
+ // Index by title, URL, description, and keywords
3275
+ const keys = [
3276
+ entry.title.toLowerCase(),
3277
+ entry.url.toLowerCase(),
3278
+ ...(entry.keywords || []).map(k => k.toLowerCase())
3279
+ ];
3280
+ if (entry.description) {
3281
+ // Also index significant words from description
3282
+ const descWords = entry.description
3283
+ .toLowerCase()
3284
+ .split(/\s+/)
3285
+ .filter(w => w.length > 4);
3286
+ keys.push(...descWords);
3287
+ }
3288
+ for (const key of keys) {
3289
+ this.siteMapIndex.set(key, siteMapEntry);
3290
+ }
3291
+ }
3292
+ console.log(`[CyberneticIntentClassifier] Loaded ${entries.length} sitemap entries`);
3293
+ }
3294
+ /**
3295
+ * Extract topics and keywords from document content
3296
+ */
3297
+ extractTopicsFromDocuments(docs) {
3298
+ const topics = new Map();
3299
+ for (const doc of docs) {
3300
+ // Use title as topic
3301
+ const topic = doc.title.toLowerCase();
3302
+ const keywords = [];
3303
+ // Extract keywords from metadata
3304
+ if (doc.metadata?.keywords) {
3305
+ keywords.push(...doc.metadata.keywords.map(k => k.toLowerCase()));
3306
+ }
3307
+ // Extract significant words from title
3308
+ const titleWords = doc.title
3309
+ .toLowerCase()
3310
+ .replace(/[^a-z0-9\s]/g, '')
3311
+ .split(/\s+/)
3312
+ .filter(w => w.length > 3 && !this.isStopWord(w));
3313
+ keywords.push(...titleWords);
3314
+ // Extract frequent significant words from content (simple TF)
3315
+ const contentWords = doc.content
3316
+ .toLowerCase()
3317
+ .replace(/[^a-z0-9\s]/g, '')
3318
+ .split(/\s+/)
3319
+ .filter(w => w.length > 4 && !this.isStopWord(w));
3320
+ const wordFreq = new Map();
3321
+ for (const word of contentWords) {
3322
+ wordFreq.set(word, (wordFreq.get(word) || 0) + 1);
3323
+ }
3324
+ // Get top 5 most frequent words
3325
+ const topWords = [...wordFreq.entries()]
3326
+ .sort((a, b) => b[1] - a[1])
3327
+ .slice(0, 5)
3328
+ .map(([word]) => word);
3329
+ keywords.push(...topWords);
3330
+ topics.set(topic, [...new Set(keywords)]); // Dedupe
3331
+ }
3332
+ return topics;
3333
+ }
3334
+ /**
3335
+ * Check if word is a stop word
3336
+ */
3337
+ isStopWord(word) {
3338
+ const stopWords = new Set([
3339
+ 'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for',
3340
+ 'of', 'with', 'by', 'from', 'up', 'about', 'into', 'through', 'during',
3341
+ 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
3342
+ 'do', 'does', 'did', 'will', 'would', 'could', 'should', 'may', 'might',
3343
+ 'this', 'that', 'these', 'those', 'it', 'its', 'they', 'them', 'their',
3344
+ 'what', 'which', 'who', 'whom', 'when', 'where', 'why', 'how',
3345
+ 'all', 'each', 'every', 'both', 'few', 'more', 'most', 'other', 'some',
3346
+ 'such', 'no', 'not', 'only', 'same', 'so', 'than', 'too', 'very',
3347
+ 'can', 'just', 'dont', 'now', 'here', 'there', 'also', 'your', 'you'
3348
+ ]);
3349
+ return stopWords.has(word);
3350
+ }
3351
+ /**
3352
+ * Get loaded categories
3353
+ */
3354
+ getCategories() {
3355
+ return this.categories;
3356
+ }
3357
+ /**
3358
+ * Get topic keywords
3359
+ */
3360
+ getTopicKeywords() {
3361
+ return this.topicKeywords;
3362
+ }
3363
+ /**
3364
+ * Check if classifier has been trained
3365
+ */
3366
+ isTrained() {
3367
+ return this.categories.length > 0 || this.topicKeywords.size > 0;
3368
+ }
2759
3369
  }
2760
3370
 
2761
3371
  // src/agentic/CyberneticAgent.ts
@@ -3392,6 +4002,1394 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
3392
4002
  client.registerAgentic(capabilities);
3393
4003
  }
3394
4004
 
4005
+ // © 2026 AsterMind AI Co. – All Rights Reserved.
4006
+ // Patent Pending US 63/897,713
4007
+ // ELMConfig.ts - Configuration interfaces, defaults, helpers for ELM-based models
4008
+ /* =========== Defaults =========== */
4009
+ const defaultBase = {
4010
+ hiddenUnits: 50,
4011
+ activation: 'relu',
4012
+ ridgeLambda: 1e-2,
4013
+ weightInit: 'xavier',
4014
+ seed: 1337,
4015
+ dropout: 0,
4016
+ log: { verbose: true, toFile: false, modelName: 'Unnamed ELM Model', level: 'info' },
4017
+ };
4018
+ Object.assign(Object.assign({}, defaultBase), { useTokenizer: false });
4019
+ Object.assign(Object.assign({}, defaultBase), { useTokenizer: true, maxLen: 30, charSet: 'abcdefghijklmnopqrstuvwxyz', tokenizerDelimiter: /\s+/ });
4020
+
4021
+ function buildRFF(d, D, sigma = 1.0, rng = Math.random) {
4022
+ const W = new Float64Array(D * d);
4023
+ const b = new Float64Array(D);
4024
+ const s = 1 / Math.max(1e-12, sigma); // N(0, 1/sigma^2)
4025
+ for (let i = 0; i < D * d; i++)
4026
+ W[i] = gauss$1(rng) * s;
4027
+ for (let i = 0; i < D; i++)
4028
+ b[i] = rng() * 2 * Math.PI;
4029
+ return { W, b, D, d, sigma };
4030
+ }
4031
+ function mapRFF(rff, x) {
4032
+ const { W, b, D, d } = rff;
4033
+ const z = new Float64Array(2 * D);
4034
+ for (let k = 0; k < D; k++) {
4035
+ let dot = b[k];
4036
+ const off = k * d;
4037
+ for (let j = 0; j < d; j++)
4038
+ dot += W[off + j] * (x[j] || 0);
4039
+ z[k] = Math.cos(dot);
4040
+ z[D + k] = Math.sin(dot);
4041
+ }
4042
+ // L2 normalize block to keep ridge well-conditioned
4043
+ let s = 0;
4044
+ for (let i = 0; i < z.length; i++)
4045
+ s += z[i] * z[i];
4046
+ const inv = 1 / Math.sqrt(Math.max(s, 1e-12));
4047
+ for (let i = 0; i < z.length; i++)
4048
+ z[i] *= inv;
4049
+ return z;
4050
+ }
4051
+ // Box-Muller
4052
+ function gauss$1(rng) {
4053
+ let u = 0, v = 0;
4054
+ while (u === 0)
4055
+ u = rng();
4056
+ while (v === 0)
4057
+ v = rng();
4058
+ return Math.sqrt(-2 * Math.log(u)) * Math.cos(2 * Math.PI * v);
4059
+ }
4060
+
4061
+ // online_ridge.ts — maintain (Φ^T Φ + λI)^{-1} and β for linear ridge
4062
+ class OnlineRidge {
4063
+ constructor(p, m, lambda = 1e-4) {
4064
+ this.p = p;
4065
+ this.m = m;
4066
+ this.lambda = lambda;
4067
+ this.Ainv = new Float64Array(p * p);
4068
+ this.Beta = new Float64Array(p * m);
4069
+ // Ainv = (λ I)^-1 = (1/λ) I
4070
+ const inv = 1 / Math.max(1e-12, lambda);
4071
+ for (let i = 0; i < p; i++)
4072
+ this.Ainv[i * p + i] = inv;
4073
+ }
4074
+ // rank-1 update with a single sample (φ, y)
4075
+ update(phi, y) {
4076
+ const { p, m, Ainv, Beta } = this;
4077
+ // u = Ainv * phi
4078
+ const u = new Float64Array(p);
4079
+ for (let i = 0; i < p; i++) {
4080
+ let s = 0, row = i * p;
4081
+ for (let j = 0; j < p; j++)
4082
+ s += Ainv[row + j] * phi[j];
4083
+ u[i] = s;
4084
+ }
4085
+ // denom = 1 + phi^T u
4086
+ let denom = 1;
4087
+ for (let j = 0; j < p; j++)
4088
+ denom += phi[j] * u[j];
4089
+ denom = Math.max(denom, 1e-12);
4090
+ const scale = 1 / denom;
4091
+ // Ainv <- Ainv - (u u^T) * scale
4092
+ for (let i = 0; i < p; i++) {
4093
+ const ui = u[i] * scale;
4094
+ for (let j = 0; j < p; j++)
4095
+ Ainv[i * p + j] -= ui * u[j];
4096
+ }
4097
+ // Beta <- Beta + Ainv * (phi * y^T)
4098
+ // compute t = Ainv * phi (reuse u after Ainv update)
4099
+ for (let i = 0; i < p; i++) {
4100
+ let s = 0, row = i * p;
4101
+ for (let j = 0; j < p; j++)
4102
+ s += Ainv[row + j] * phi[j];
4103
+ u[i] = s; // reuse u as t
4104
+ }
4105
+ // Beta += outer(u, y)
4106
+ for (let i = 0; i < p; i++) {
4107
+ const ui = u[i];
4108
+ for (let c = 0; c < m; c++)
4109
+ Beta[i * m + c] += ui * y[c];
4110
+ }
4111
+ }
4112
+ // yhat = φ^T Beta
4113
+ predict(phi) {
4114
+ const { p, m, Beta } = this;
4115
+ const out = new Float64Array(m);
4116
+ for (let c = 0; c < m; c++) {
4117
+ let s = 0;
4118
+ for (let i = 0; i < p; i++)
4119
+ s += phi[i] * Beta[i * m + c];
4120
+ out[c] = s;
4121
+ }
4122
+ return out;
4123
+ }
4124
+ }
4125
+
4126
+ // src/math/index.ts — production-grade numerics for Ω
4127
+ // Backward compatible with previous exports; adds robust, stable helpers.
4128
+ // ---------- Constants
4129
+ const EPS = 1e-12; // general epsilon for divides/sqrt
4130
+ // ---------- Norms / normalization
4131
+ function l2$1(a) {
4132
+ // robust L2 (avoids NaN on weird input)
4133
+ let s = 0;
4134
+ for (let i = 0; i < a.length; i++)
4135
+ s += a[i] * a[i];
4136
+ return Math.sqrt(Math.max(0, s));
4137
+ }
4138
+ function normalizeL2(a, eps = EPS) {
4139
+ const nrm = l2$1(a);
4140
+ if (!(nrm > eps) || !Number.isFinite(nrm))
4141
+ return new Float64Array(a.length); // zero vec
4142
+ const o = new Float64Array(a.length);
4143
+ const inv = 1 / nrm;
4144
+ for (let i = 0; i < a.length; i++)
4145
+ o[i] = a[i] * inv;
4146
+ return o;
4147
+ }
4148
+ // ---------- Cosine (robust)
4149
+ function cosine$2(a, b) {
4150
+ var _a, _b;
4151
+ const n = Math.min(a.length, b.length);
4152
+ if (n === 0)
4153
+ return 0;
4154
+ let dotv = 0, na = 0, nb = 0;
4155
+ for (let i = 0; i < n; i++) {
4156
+ const ai = ((_a = a[i]) !== null && _a !== void 0 ? _a : 0), bi = ((_b = b[i]) !== null && _b !== void 0 ? _b : 0);
4157
+ dotv += ai * bi;
4158
+ na += ai * ai;
4159
+ nb += bi * bi;
4160
+ }
4161
+ const denom = Math.sqrt(Math.max(na * nb, EPS));
4162
+ const v = dotv / denom;
4163
+ return Number.isFinite(v) ? v : 0;
4164
+ }
4165
+
4166
+ /******************************************************************************
4167
+ Copyright (c) Microsoft Corporation.
4168
+
4169
+ Permission to use, copy, modify, and/or distribute this software for any
4170
+ purpose with or without fee is hereby granted.
4171
+
4172
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
4173
+ REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
4174
+ AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
4175
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
4176
+ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
4177
+ OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
4178
+ PERFORMANCE OF THIS SOFTWARE.
4179
+ ***************************************************************************** */
4180
+ /* global Reflect, Promise, SuppressedError, Symbol, Iterator */
4181
+
4182
+
4183
+ function __awaiter(thisArg, _arguments, P, generator) {
4184
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4185
+ return new (P || (P = Promise))(function (resolve, reject) {
4186
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
4187
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
4188
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
4189
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
4190
+ });
4191
+ }
4192
+
4193
+ typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
4194
+ var e = new Error(message);
4195
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
4196
+ };
4197
+
4198
+ // Omega.ts v2 — improved local reasoning + summarization
4199
+ // uses your math.ts, rff.ts, online_ridge.ts
4200
+ // -------- sentence + text helpers ----------
4201
+ function splitSentences$1(text) {
4202
+ return text
4203
+ .replace(/\s+/g, " ")
4204
+ .split(/(?<=[.?!])\s+/)
4205
+ .map((s) => s.trim())
4206
+ .filter((s) => s.length > 8 && /\w/.test(s));
4207
+ }
4208
+ function clean(text) {
4209
+ return text
4210
+ .replace(/```[\s\S]*?```/g, " ")
4211
+ .replace(/`[^`]+`/g, " ")
4212
+ .replace(/\[[^\]]*\]\([^)]*\)/g, "") // strip markdown links
4213
+ .replace(/[-–>•→]/g, " ")
4214
+ .replace(/\s+/g, " ")
4215
+ .trim();
4216
+ }
4217
+ function isMetaSentence(s) {
4218
+ // simple heuristics for table-of-contents or chapter headings
4219
+ return (/^(\*|#)/.test(s) || // markdown markers
4220
+ /chapter/i.test(s) || // "Chapter 11", "Chapters 11–15"
4221
+ /part\s*\d+/i.test(s) || // "Part 3"
4222
+ /section/i.test(s) || // "Section 2.3"
4223
+ /^\s*[A-Z]\)\s*$/.test(s) || // single-letter outlines
4224
+ s.length < 15 // very short stray lines
4225
+ );
4226
+ }
4227
+ function rewrite(summary) {
4228
+ return summary
4229
+ .replace(/\s+[-–>•→]\s+/g, " ")
4230
+ .replace(/\s+\.\s+/g, ". ")
4231
+ .replace(/([a-z]) - ([a-z])/gi, "$1-$2")
4232
+ .replace(/\s{2,}/g, " ")
4233
+ .trim();
4234
+ }
4235
+ // ------------------------------------------------------------
4236
+ function omegaComposeAnswer(question_1, items_1) {
4237
+ return __awaiter(this, arguments, void 0, function* (question, items, opts = {}) {
4238
+ // License check removed // Premium feature - requires valid license
4239
+ if (!(items === null || items === void 0 ? void 0 : items.length))
4240
+ return "No results found.";
4241
+ const { dim = 64, features = 32, sigma = 1.0, rounds = 3, topSentences = 8, personality = "neutral", } = opts;
4242
+ // ---------- 1. Clean + collect sentences ----------
4243
+ const allText = items.map((i) => clean(i.content)).join(" ");
4244
+ let sentences = splitSentences$1(allText)
4245
+ .filter(s => !isMetaSentence(s))
4246
+ .slice(0, 120);
4247
+ if (sentences.length === 0)
4248
+ return clean(items[0].content).slice(0, 400);
4249
+ // ---------- 2. Build encoder + ridge ----------
4250
+ const rff = buildRFF(dim, features, sigma);
4251
+ const ridge = new OnlineRidge(2 * features, 1, 1e-3);
4252
+ const encode = (s) => {
4253
+ const vec = new Float64Array(dim);
4254
+ const len = Math.min(s.length, dim);
4255
+ for (let i = 0; i < len; i++)
4256
+ vec[i] = s.charCodeAt(i) / 255;
4257
+ return mapRFF(rff, normalizeL2(vec));
4258
+ };
4259
+ const qVec = encode(question);
4260
+ const qTokens = question.toLowerCase().split(/\W+/).filter((t) => t.length > 2);
4261
+ // ---------- 3. Score + select top sentences ----------
4262
+ const scored = sentences.map((s) => {
4263
+ const v = encode(s);
4264
+ let w = cosine$2(v, qVec);
4265
+ // small lexical bonus for overlapping words
4266
+ const lower = s.toLowerCase();
4267
+ for (const t of qTokens)
4268
+ if (lower.includes(t))
4269
+ w += 0.02;
4270
+ return { s, v, w };
4271
+ });
4272
+ scored.sort((a, b) => b.w - a.w);
4273
+ let top = scored.slice(0, topSentences);
4274
+ // ---------- 4. Recursive compression ----------
4275
+ let summary = top.map((t) => t.s).join(" ");
4276
+ let meanVec = new Float64Array(2 * features);
4277
+ for (let r = 0; r < rounds; r++) {
4278
+ const subs = splitSentences$1(summary).slice(0, topSentences);
4279
+ const embeds = subs.map((s) => encode(s));
4280
+ const weights = embeds.map((v) => cosine$2(v, qVec));
4281
+ for (let i = 0; i < embeds.length; i++) {
4282
+ ridge.update(embeds[i], new Float64Array([weights[i]]));
4283
+ }
4284
+ // weighted mean vector
4285
+ meanVec.fill(0);
4286
+ for (let i = 0; i < embeds.length; i++) {
4287
+ const v = embeds[i], w = weights[i];
4288
+ for (let j = 0; j < v.length; j++)
4289
+ meanVec[j] += v[j] * w;
4290
+ }
4291
+ const norm = l2$1(meanVec) || 1;
4292
+ for (let j = 0; j < meanVec.length; j++)
4293
+ meanVec[j] /= norm;
4294
+ const rescored = subs.map((s) => ({
4295
+ s,
4296
+ w: cosine$2(encode(s), meanVec),
4297
+ }));
4298
+ rescored.sort((a, b) => b.w - a.w);
4299
+ summary = rescored
4300
+ .slice(0, Math.max(3, Math.floor(topSentences / 2)))
4301
+ .map((r) => r.s)
4302
+ .join(" ");
4303
+ }
4304
+ // ---------- 5. Compose readable answer ----------
4305
+ summary = rewrite(summary);
4306
+ const firstChar = summary.charAt(0).toUpperCase() + summary.slice(1);
4307
+ const title = items[0].heading || "Answer";
4308
+ const prefix = personality === "teacher"
4309
+ ? "Here’s a simple way to think about it:\n\n"
4310
+ : personality === "scientist"
4311
+ ? "From the retrieved material, we can infer:\n\n"
4312
+ : "";
4313
+ return `${prefix}${firstChar}\n\n(${title}, Ω-synthesized)`;
4314
+ });
4315
+ }
4316
+
4317
+ // Vectorization utilities for sparse and dense vectors
4318
+ // Extracted from workers for reuse
4319
+ /**
4320
+ * Compute TF-IDF vector from tokens
4321
+ */
4322
+ function toTfidf(tokens, idf, vmap, headingW = 1) {
4323
+ const counts = new Map();
4324
+ // crude heuristic: first 8 tokens considered heading-weighted
4325
+ for (let i = 0; i < tokens.length; i++) {
4326
+ const t = tokens[i];
4327
+ const id = vmap.get(t);
4328
+ if (id === undefined)
4329
+ continue;
4330
+ const w = (i < 8) ? headingW : 1;
4331
+ counts.set(id, (counts.get(id) || 0) + w);
4332
+ }
4333
+ const maxTf = Math.max(1, ...counts.values());
4334
+ const v = new Map();
4335
+ for (const [i, c] of counts) {
4336
+ const tf = 0.5 + 0.5 * (c / maxTf);
4337
+ v.set(i, tf * (idf[i] || 0));
4338
+ }
4339
+ return v;
4340
+ }
4341
+ /**
4342
+ * Cosine similarity between two sparse vectors
4343
+ */
4344
+ function cosineSparse(a, b) {
4345
+ let dot = 0, na = 0, nb = 0;
4346
+ for (const [i, av] of a) {
4347
+ na += av * av;
4348
+ const bv = b.get(i);
4349
+ if (bv)
4350
+ dot += av * bv;
4351
+ }
4352
+ for (const [, bv] of b)
4353
+ nb += bv * bv;
4354
+ if (!na || !nb)
4355
+ return 0;
4356
+ return dot / (Math.sqrt(na) * Math.sqrt(nb));
4357
+ }
4358
+ /**
4359
+ * Convert sparse vector to dense Float64Array
4360
+ */
4361
+ function sparseToDense(v, dim) {
4362
+ const x = new Float64Array(dim);
4363
+ for (const [i, val] of v)
4364
+ x[i] = val;
4365
+ return x;
4366
+ }
4367
+ /**
4368
+ * Dot product of two dense vectors
4369
+ */
4370
+ function dotProd$1(a, b) {
4371
+ let s = 0;
4372
+ for (let i = 0; i < a.length; i++)
4373
+ s += a[i] * b[i];
4374
+ return s;
4375
+ }
4376
+ /**
4377
+ * Base kernel function (RBF, cosine, or poly2)
4378
+ */
4379
+ function baseKernel$1(a, b, k, sigma) {
4380
+ if (k === 'cosine') {
4381
+ const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);
4382
+ return (na && nb) ? (dot / (na * nb)) : 0;
4383
+ }
4384
+ else if (k === 'poly2') {
4385
+ const dot = dotProd$1(a, b);
4386
+ return Math.pow((dot + 1), 2);
4387
+ }
4388
+ else {
4389
+ let s = 0;
4390
+ for (let i = 0; i < a.length; i++) {
4391
+ const d = a[i] - b[i];
4392
+ s += d * d;
4393
+ }
4394
+ return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));
4395
+ }
4396
+ }
4397
+ /**
4398
+ * Kernel similarity between two dense vectors
4399
+ */
4400
+ function kernelSim(a, b, k, sigma) {
4401
+ if (k === 'cosine') {
4402
+ const dot = dotProd$1(a, b), na = Math.hypot(...a), nb = Math.hypot(...b);
4403
+ return (na && nb) ? (dot / (na * nb)) : 0;
4404
+ }
4405
+ else if (k === 'poly2') {
4406
+ const dot = dotProd$1(a, b);
4407
+ return Math.pow((dot + 1), 2);
4408
+ }
4409
+ else {
4410
+ let s = 0;
4411
+ for (let i = 0; i < a.length; i++) {
4412
+ const d = a[i] - b[i];
4413
+ s += d * d;
4414
+ }
4415
+ return Math.exp(-s / Math.max(1e-9, 2 * sigma * sigma));
4416
+ }
4417
+ }
4418
+ /**
4419
+ * Project sparse vector to dense using Nyström landmarks
4420
+ */
4421
+ function projectToDense(v, vocabSize, landmarkMat, kernel, sigma) {
4422
+ const x = sparseToDense(v, vocabSize);
4423
+ const feats = new Float64Array(landmarkMat.length);
4424
+ for (let j = 0; j < landmarkMat.length; j++) {
4425
+ const l = landmarkMat[j];
4426
+ feats[j] = baseKernel$1(x, l, kernel, sigma);
4427
+ }
4428
+ const n = Math.hypot(...feats);
4429
+ if (n > 0)
4430
+ for (let i = 0; i < feats.length; i++)
4431
+ feats[i] /= n;
4432
+ return feats;
4433
+ }
4434
+
4435
+ // Tokenization and stemming utilities
4436
+ // Extracted from workers for reuse
4437
+ // Memo for speed
4438
+ const STEM_CACHE = new Map();
4439
+ function normalizeWord(raw) {
4440
+ const k = raw;
4441
+ const cached = STEM_CACHE.get(k);
4442
+ if (cached)
4443
+ return cached;
4444
+ let w = raw.toLowerCase();
4445
+ w = w.replace(/^[^a-z0-9]+|[^a-z0-9]+$/g, '');
4446
+ if (w.length <= 2) {
4447
+ STEM_CACHE.set(k, w);
4448
+ return w;
4449
+ }
4450
+ // plural → singular
4451
+ if (w.endsWith('ies') && w.length > 4) {
4452
+ w = w.slice(0, -3) + 'y';
4453
+ }
4454
+ else if (/(xes|ches|shes|zes|sses)$/.test(w) && w.length > 4) {
4455
+ w = w.replace(/(xes|ches|shes|zes|sses)$/, (m) => (m === 'sses' ? 'ss' : m.replace(/es$/, '')));
4456
+ }
4457
+ else if (w.endsWith('s') && !/(ss|us)$/.test(w) && w.length > 3) {
4458
+ w = w.slice(0, -1);
4459
+ }
4460
+ // conservative suffix trimming
4461
+ const rules = [
4462
+ [/ization$|isation$/, 'ize'],
4463
+ [/ational$/, 'ate'],
4464
+ [/fulness$/, 'ful'],
4465
+ [/ousness$/, 'ous'],
4466
+ [/iveness$/, 'ive'],
4467
+ [/ability$/, 'able'],
4468
+ [/ness$/, ''],
4469
+ [/ment$/, ''],
4470
+ [/ations?$/, 'ate'],
4471
+ [/izer$|iser$/, 'ize'],
4472
+ [/ally$/, 'al'],
4473
+ [/ically$/, 'ic'],
4474
+ [/ingly$|edly$/, ''],
4475
+ [/ing$|ed$/, ''],
4476
+ ];
4477
+ for (const [re, rep] of rules) {
4478
+ if (re.test(w) && w.length - rep.length >= 4) {
4479
+ w = w.replace(re, rep);
4480
+ break;
4481
+ }
4482
+ }
4483
+ STEM_CACHE.set(k, w);
4484
+ return w;
4485
+ }
4486
+ function tokenize$1(text, doStem) {
4487
+ const base = text.toLowerCase()
4488
+ .replace(/[`*_>~]/g, ' ')
4489
+ .replace(/[^a-z0-9]+/g, ' ')
4490
+ .split(/\s+/)
4491
+ .filter(Boolean);
4492
+ if (!doStem)
4493
+ return base;
4494
+ const out = [];
4495
+ for (const t of base) {
4496
+ const n = normalizeWord(t);
4497
+ if (n && n.length > 1)
4498
+ out.push(n);
4499
+ }
4500
+ return out;
4501
+ }
4502
+ function expandQuery(q) {
4503
+ const adds = [];
4504
+ if (/\bmap\b/.test(q))
4505
+ adds.push('dict key value make');
4506
+ if (/\bchan|channel\b/.test(q))
4507
+ adds.push('goroutine concurrency select buffer');
4508
+ if (/\berror\b/.test(q))
4509
+ adds.push('fmt wrap unwrap sentinel try catch');
4510
+ if (/\bstruct\b/.test(q))
4511
+ adds.push('field method receiver init zero value');
4512
+ return q + ' ' + adds.join(' ');
4513
+ }
4514
+
4515
+ // Hybrid retrieval system (sparse + dense + keyword bonus)
4516
+ // Extracted from workers for reuse
4517
+ // License removed - all features are now free!
4518
+ /**
4519
+ * Compute keyword bonus scores for chunks
4520
+ */
4521
+ function keywordBonus(chunks, query) {
4522
+ const kws = Array.from(new Set(query.toLowerCase().split(/\W+/).filter(t => t.length > 2)));
4523
+ const syntaxBoost = /\b(define|declare|syntax|example|function|struct|map|interface)\b/i.test(query);
4524
+ return chunks.map(c => {
4525
+ const text = c.rich || c.content || '';
4526
+ const lc = text.toLowerCase();
4527
+ let hit = 0;
4528
+ for (const k of kws)
4529
+ if (lc.includes(k))
4530
+ hit++;
4531
+ if (syntaxBoost && /```/.test(text))
4532
+ hit += 5; // strong bonus for code presence
4533
+ return Math.min(1.0, hit * 0.03);
4534
+ });
4535
+ }
4536
+ /**
4537
+ * Get top K indices from scores
4538
+ */
4539
+ function topKIndices(arr, k) {
4540
+ const idx = Array.from(arr, (_, i) => i);
4541
+ idx.sort((i, j) => (arr[j] - arr[i]));
4542
+ return idx.slice(0, k);
4543
+ }
4544
+ /**
4545
+ * Clamp value between min and max
4546
+ */
4547
+ function clamp$1(x, a, b) {
4548
+ return Math.max(a, Math.min(b, x));
4549
+ }
4550
+ /**
4551
+ * Perform hybrid retrieval (sparse + dense + keyword bonus)
4552
+ */
4553
+ function hybridRetrieve(opts) {
4554
+ // License check removed // Premium feature - requires valid license
4555
+ const { query, chunks, vocabMap, idf, tfidfDocs, denseDocs, landmarksIdx, landmarkMat, vocabSize, kernel, sigma, alpha, beta, ridge, headingW, useStem, expandQuery: shouldExpand, topK: k, prefilter, } = opts;
4556
+ // Expand query if needed
4557
+ const qexp = shouldExpand ? expandQuery(query) : query;
4558
+ const toks = tokenize$1(qexp, useStem);
4559
+ const qvec = toTfidf(toks, idf, vocabMap, headingW);
4560
+ const qdense = projectToDense(qvec, vocabSize, landmarkMat, kernel, sigma);
4561
+ // Compute sparse (TF-IDF) scores
4562
+ const tfidfScores = tfidfDocs.map(v => cosineSparse(v, qvec));
4563
+ // Compute dense (kernel) scores
4564
+ const denseScores = denseDocs.map((v) => kernelSim(v, qdense, kernel, sigma));
4565
+ // Compute keyword bonus
4566
+ const bonus = keywordBonus(chunks, query);
4567
+ // Hybrid scoring with ridge regularization
4568
+ const alphaClamped = clamp$1(alpha, 0, 1);
4569
+ const lambda = ridge !== null && ridge !== void 0 ? ridge : 0.08;
4570
+ const scores = denseScores.map((d, i) => {
4571
+ const t = tfidfScores[i];
4572
+ const b = beta * bonus[i];
4573
+ // Ridge damping on ALL components (dense, tfidf, and keyword bonus)
4574
+ const reg = 1 / (1 + lambda * (d * d + t * t + 0.5 * b * b));
4575
+ const s = reg * (alphaClamped * d + (1 - alphaClamped) * t + b);
4576
+ // soft clip extremes; helps prevent a single noisy dimension from dominating
4577
+ return Math.tanh(s);
4578
+ });
4579
+ // Pre-filter then final topK (retrieval stage)
4580
+ const pre = Math.max(k, prefilter !== null && prefilter !== void 0 ? prefilter : 0);
4581
+ const idxs = topKIndices(scores, pre);
4582
+ const finalIdxs = topKIndices(idxs.map(i => scores[i]), k).map(k => idxs[k]);
4583
+ // Build result items
4584
+ const items = finalIdxs.map(i => {
4585
+ const c = chunks[i];
4586
+ const body = (c.rich && c.rich.trim()) || (c.content && c.content.trim()) || '(see subsections)';
4587
+ return {
4588
+ score: scores[i],
4589
+ heading: c.heading,
4590
+ content: body,
4591
+ index: i,
4592
+ };
4593
+ });
4594
+ return {
4595
+ items,
4596
+ scores: finalIdxs.map(i => scores[i]),
4597
+ indices: finalIdxs,
4598
+ tfidfScores: finalIdxs.map(i => tfidfScores[i]),
4599
+ denseScores: finalIdxs.map(i => denseScores[i]),
4600
+ };
4601
+ }
4602
+
4603
+ // OmegaRR.ts
4604
+ // Reranker + Reducer for AsterMind docs
4605
+ // - Extracts rich query–chunk features (sparse text + structural signals)
4606
+ // - Trains a tiny ridge model on-the-fly with weak supervision (per query)
4607
+ // - Produces score_rr and p_relevant
4608
+ // - Filters with threshold + MMR coverage under a character budget
4609
+ // - (v2) Optionally exposes engineered features (values + names) for TE/diagnostics
4610
+ /* ====================== Tokenization ======================= */
4611
+ const STOP$1 = new Set([
4612
+ "a", "an", "the", "and", "or", "but", "if", "then", "else", "for", "to", "of", "in", "on", "at", "by", "with",
4613
+ "is", "are", "was", "were", "be", "been", "being", "as", "from", "that", "this", "it", "its", "you", "your",
4614
+ "i", "we", "they", "he", "she", "them", "his", "her", "our", "us", "do", "does", "did", "done", "not", "no",
4615
+ "yes", "can", "could", "should", "would", "may", "might", "into", "about", "over", "under", "between"
4616
+ ]);
4617
+ function tokenize(s) {
4618
+ return s
4619
+ .toLowerCase()
4620
+ .replace(/[`*_#>~=\[\]{}()!?.:,;'"<>|/\\+-]+/g, " ")
4621
+ .split(/\s+/)
4622
+ .filter(t => t && !STOP$1.has(t));
4623
+ }
4624
+ function unique(arr) { return Array.from(new Set(arr)); }
4625
+ function buildCorpusStats(docs) {
4626
+ const vocab = new Map();
4627
+ const tfs = [];
4628
+ const docLens = [];
4629
+ let nextId = 0;
4630
+ for (const d of docs) {
4631
+ const toks = tokenize(d);
4632
+ docLens.push(toks.length);
4633
+ const tf = new Map();
4634
+ for (const w of toks) {
4635
+ let id = vocab.get(w);
4636
+ if (id === undefined) {
4637
+ id = nextId++;
4638
+ vocab.set(w, id);
4639
+ }
4640
+ tf.set(id, (tf.get(id) || 0) + 1);
4641
+ }
4642
+ tfs.push(tf);
4643
+ }
4644
+ const N = docs.length;
4645
+ const df = Array(nextId).fill(0);
4646
+ for (const tf of tfs)
4647
+ for (const id of tf.keys())
4648
+ df[id] += 1;
4649
+ const idf = df.map(df_i => Math.log((N + 1) / (df_i + 1)) + 1);
4650
+ const avgLen = docLens.reduce((a, b) => a + b, 0) / Math.max(1, N);
4651
+ return { stats: { vocab, idf, avgLen, df }, tf: tfs, docLens };
4652
+ }
4653
+ function tfidfVector(tf, idf) {
4654
+ const out = new Map();
4655
+ let norm2 = 0;
4656
+ for (const [i, f] of tf) {
4657
+ const val = (f) * (idf[i] || 0);
4658
+ out.set(i, val);
4659
+ norm2 += val * val;
4660
+ }
4661
+ const norm = Math.sqrt(norm2) || 1e-12;
4662
+ for (const [i, v] of out)
4663
+ out.set(i, v / norm);
4664
+ return out;
4665
+ }
4666
+ function cosine$1(a, b) {
4667
+ const [small, large] = a.size < b.size ? [a, b] : [b, a];
4668
+ let dot = 0;
4669
+ for (const [i, v] of small) {
4670
+ const u = large.get(i);
4671
+ if (u !== undefined)
4672
+ dot += v * u;
4673
+ }
4674
+ return dot;
4675
+ }
4676
+ function bm25Score(qTf, dTf, stats, dLen, k1 = 1.5, b = 0.75) {
4677
+ let score = 0;
4678
+ for (const [i] of qTf) {
4679
+ const f = dTf.get(i) || 0;
4680
+ if (f <= 0)
4681
+ continue;
4682
+ const idf = Math.log(((stats.df[i] || 0) + 0.5) / ((stats.idf.length - (stats.df[i] || 0)) + 0.5) + 1);
4683
+ const denom = f + k1 * (1 - b + b * (dLen / (stats.avgLen || 1)));
4684
+ score += idf * ((f * (k1 + 1)) / (denom || 1e-12));
4685
+ }
4686
+ return score;
4687
+ }
4688
+ /* ========== Light Random Projection from TF-IDF (dense hint) ========== */
4689
+ function projectSparse(vec, dim, seed = 1337) {
4690
+ // deterministic per (feature, j) hash: simple LCG/xorshift mix
4691
+ const out = new Float64Array(dim);
4692
+ for (const [i, v] of vec) {
4693
+ let s = (i * 2654435761) >>> 0;
4694
+ for (let j = 0; j < dim; j++) {
4695
+ s ^= s << 13;
4696
+ s ^= s >>> 17;
4697
+ s ^= s << 5;
4698
+ const r = ((s >>> 0) / 4294967296) * 2 - 1; // [-1,1]
4699
+ out[j] += v * r;
4700
+ }
4701
+ }
4702
+ let n2 = 0;
4703
+ for (let j = 0; j < dim; j++)
4704
+ n2 += out[j] * out[j];
4705
+ const n = Math.sqrt(n2) || 1e-12;
4706
+ for (let j = 0; j < dim; j++)
4707
+ out[j] /= n;
4708
+ return out;
4709
+ }
4710
+ /* ===================== Structural Signals ===================== */
4711
+ function containsGoCodeBlock(s) {
4712
+ return /```+\s*go([\s\S]*?)```/i.test(s) || /\bfunc\s+\w+\s*\(.*\)\s*\w*\s*{/.test(s);
4713
+ }
4714
+ function containsCodeBlock(s) {
4715
+ return /```+/.test(s) || /{[^}]*}/.test(s);
4716
+ }
4717
+ function headingQueryMatch(head, q) {
4718
+ const ht = unique(tokenize(head));
4719
+ const qt = new Set(tokenize(q));
4720
+ if (ht.length === 0 || qt.size === 0)
4721
+ return 0;
4722
+ let hit = 0;
4723
+ for (const t of ht)
4724
+ if (qt.has(t))
4725
+ hit++;
4726
+ return hit / ht.length;
4727
+ }
4728
+ function jaccard$1(a, b) {
4729
+ const A = new Set(tokenize(a));
4730
+ const B = new Set(tokenize(b));
4731
+ let inter = 0;
4732
+ for (const t of A)
4733
+ if (B.has(t))
4734
+ inter++;
4735
+ const uni = A.size + B.size - inter;
4736
+ return uni === 0 ? 0 : inter / uni;
4737
+ }
4738
+ function golangSpecFlag(s) {
4739
+ return /(golang\.org|go\.dev|pkg\.go\.dev)/i.test(s) ? 1 : 0;
4740
+ }
4741
+ function buildFeatures$1(q, chunk, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, cLen, projQ, projC) {
4742
+ var _a;
4743
+ const f = [];
4744
+ const names = [];
4745
+ // 1) Sparse sims
4746
+ const cos = cosine$1(qTfIdf, cTfIdf);
4747
+ f.push(cos);
4748
+ names.push("cosine_tfidf");
4749
+ const bm25 = bm25Score(qTfRaw, cTfRaw, stats, cLen);
4750
+ f.push(bm25);
4751
+ names.push("bm25");
4752
+ // 2) Heading & lexical overlaps
4753
+ const hMatch = headingQueryMatch(chunk.heading || "", q);
4754
+ f.push(hMatch);
4755
+ names.push("heading_match_frac");
4756
+ const jac = jaccard$1(q, chunk.content || "");
4757
+ f.push(jac);
4758
+ names.push("jaccard_tokens");
4759
+ // 3) Structural flags
4760
+ const hasGo = containsGoCodeBlock(chunk.rich || chunk.content || "");
4761
+ const hasCode = containsCodeBlock(chunk.rich || chunk.content || "");
4762
+ f.push(hasGo ? 1 : 0);
4763
+ names.push("flag_go_code");
4764
+ f.push(hasCode ? 1 : 0);
4765
+ names.push("flag_any_code");
4766
+ // 4) Source cues
4767
+ f.push(golangSpecFlag(chunk.content || "") ? 1 : 0);
4768
+ names.push("flag_go_spec_link");
4769
+ // 5) Prior score (baseline)
4770
+ f.push(((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0));
4771
+ names.push("prior_score_base");
4772
+ // 6) Length heuristics (prefer concise answers)
4773
+ const lenChars = (chunk.content || "").length;
4774
+ f.push(1 / Math.sqrt(1 + lenChars));
4775
+ names.push("len_inv_sqrt");
4776
+ // 7) Dense hint from projection
4777
+ if (projQ && projC) {
4778
+ let dot = 0, l1 = 0;
4779
+ for (let i = 0; i < projQ.length; i++) {
4780
+ dot += projQ[i] * projC[i];
4781
+ l1 += Math.abs(projQ[i] - projC[i]);
4782
+ }
4783
+ f.push(dot);
4784
+ names.push("proj_dot");
4785
+ f.push(l1 / projQ.length);
4786
+ names.push("proj_l1mean");
4787
+ }
4788
+ return { names, values: f };
4789
+ }
4790
+ /* ======================== Ridge Model ======================== */
4791
+ class Ridge {
4792
+ constructor() {
4793
+ this.w = null;
4794
+ this.mu = null;
4795
+ this.sigma = null;
4796
+ }
4797
+ fit(X, y, lambda = 1e-2) {
4798
+ var _a;
4799
+ const n = X.length;
4800
+ const d = ((_a = X[0]) === null || _a === void 0 ? void 0 : _a.length) || 0;
4801
+ if (n === 0 || d === 0) {
4802
+ this.w = new Float64Array(d);
4803
+ return;
4804
+ }
4805
+ // standardize
4806
+ const mu = new Float64Array(d);
4807
+ const sig = new Float64Array(d);
4808
+ for (let j = 0; j < d; j++) {
4809
+ let m = 0;
4810
+ for (let i = 0; i < n; i++)
4811
+ m += X[i][j];
4812
+ m /= n;
4813
+ mu[j] = m;
4814
+ let v = 0;
4815
+ for (let i = 0; i < n; i++) {
4816
+ const z = X[i][j] - m;
4817
+ v += z * z;
4818
+ }
4819
+ sig[j] = Math.sqrt(v / n) || 1;
4820
+ }
4821
+ const Z = Array.from({ length: n }, (_, i) => new Float64Array(d));
4822
+ for (let i = 0; i < n; i++)
4823
+ for (let j = 0; j < d; j++)
4824
+ Z[i][j] = (X[i][j] - mu[j]) / sig[j];
4825
+ // A = Z^T Z + λI, Zy = Z^T y
4826
+ const A = Array.from({ length: d }, () => new Float64Array(d));
4827
+ const Zy = new Float64Array(d);
4828
+ for (let i = 0; i < n; i++) {
4829
+ const zi = Z[i];
4830
+ const yi = y[i];
4831
+ for (let j = 0; j < d; j++) {
4832
+ Zy[j] += zi[j] * yi;
4833
+ const zij = zi[j];
4834
+ for (let k = 0; k <= j; k++)
4835
+ A[j][k] += zij * zi[k];
4836
+ }
4837
+ }
4838
+ for (let j = 0; j < d; j++) {
4839
+ for (let k = 0; k < j; k++)
4840
+ A[k][j] = A[j][k];
4841
+ A[j][j] += lambda;
4842
+ }
4843
+ // Cholesky solve
4844
+ const L = Array.from({ length: d }, () => new Float64Array(d));
4845
+ for (let i = 0; i < d; i++) {
4846
+ for (let j = 0; j <= i; j++) {
4847
+ let sum = A[i][j];
4848
+ for (let k = 0; k < j; k++)
4849
+ sum -= L[i][k] * L[j][k];
4850
+ L[i][j] = (i === j) ? Math.sqrt(Math.max(sum, 1e-12)) : (sum / (L[j][j] || 1e-12));
4851
+ }
4852
+ }
4853
+ const z = new Float64Array(d);
4854
+ for (let i = 0; i < d; i++) {
4855
+ let s = Zy[i];
4856
+ for (let k = 0; k < i; k++)
4857
+ s -= L[i][k] * z[k];
4858
+ z[i] = s / (L[i][i] || 1e-12);
4859
+ }
4860
+ const w = new Float64Array(d);
4861
+ for (let i = d - 1; i >= 0; i--) {
4862
+ let s = z[i];
4863
+ for (let k = i + 1; k < d; k++)
4864
+ s -= L[k][i] * w[k];
4865
+ w[i] = s / (L[i][i] || 1e-12);
4866
+ }
4867
+ this.w = w;
4868
+ this.mu = mu;
4869
+ this.sigma = sig;
4870
+ }
4871
+ predict(x) {
4872
+ if (!this.w || !this.mu || !this.sigma)
4873
+ return 0;
4874
+ let s = 0;
4875
+ for (let j = 0; j < this.w.length; j++) {
4876
+ const z = (x[j] - this.mu[j]) / this.sigma[j];
4877
+ s += this.w[j] * z;
4878
+ }
4879
+ return s;
4880
+ }
4881
+ }
4882
+ /* ===================== Weak Supervision ===================== */
4883
+ function generateWeakLabel(q, chunk, feats) {
4884
+ var _a;
4885
+ const txt = (chunk.rich || chunk.content || "");
4886
+ let y = 0;
4887
+ const qIsGoFunc = /\bgo\b/.test(q.toLowerCase()) && /(define|declare|function|func)/i.test(q);
4888
+ if (qIsGoFunc && containsGoCodeBlock(txt))
4889
+ y = Math.max(y, 1.0);
4890
+ const headHit = headingQueryMatch(chunk.heading || "", q);
4891
+ if (headHit >= 0.34 && containsCodeBlock(txt))
4892
+ y = Math.max(y, 0.8);
4893
+ const cosIdx = feats.names.indexOf("cosine_tfidf");
4894
+ const bm25Idx = feats.names.indexOf("bm25");
4895
+ const cos = cosIdx >= 0 ? feats.values[cosIdx] : 0;
4896
+ const bm = bm25Idx >= 0 ? feats.values[bm25Idx] : 0;
4897
+ if (cos > 0.25)
4898
+ y = Math.max(y, 0.6);
4899
+ if (bm > 1.0)
4900
+ y = Math.max(y, 0.6);
4901
+ const priorIdx = feats.names.indexOf("prior_score_base");
4902
+ const prior = priorIdx >= 0 ? feats.values[priorIdx] : 0;
4903
+ if (((_a = chunk.score_base) !== null && _a !== void 0 ? _a : 0) > 0)
4904
+ y = Math.max(y, Math.min(0.6, 0.2 + 0.5 * prior));
4905
+ return y;
4906
+ }
4907
+ function sigmoid(x) {
4908
+ if (x >= 0) {
4909
+ const z = Math.exp(-x);
4910
+ return 1 / (1 + z);
4911
+ }
4912
+ else {
4913
+ const z = Math.exp(x);
4914
+ return z / (1 + z);
4915
+ }
4916
+ }
4917
+ /* ========================= MMR Filter ========================= */
4918
+ function mmrFilter(scored, lambda = 0.7, budgetChars = 1200) {
4919
+ const sel = [];
4920
+ const docs = scored.map(s => s.content || "");
4921
+ const { stats, tf: tfList } = buildCorpusStats(docs);
4922
+ const tfidf = tfList.map(tf => tfidfVector(tf, stats.idf));
4923
+ const selectedIdx = new Set();
4924
+ let used = 0;
4925
+ while (selectedIdx.size < scored.length) {
4926
+ let bestIdx = -1, bestVal = -Infinity;
4927
+ for (let i = 0; i < scored.length; i++) {
4928
+ if (selectedIdx.has(i))
4929
+ continue;
4930
+ const cand = scored[i];
4931
+ let red = 0;
4932
+ for (const j of selectedIdx) {
4933
+ const sim = cosine$1(tfidf[i], tfidf[j]);
4934
+ if (sim > red)
4935
+ red = sim;
4936
+ }
4937
+ const val = lambda * cand.score_rr - (1 - lambda) * red;
4938
+ if (val > bestVal) {
4939
+ bestVal = val;
4940
+ bestIdx = i;
4941
+ }
4942
+ }
4943
+ if (bestIdx < 0)
4944
+ break;
4945
+ const chosen = scored[bestIdx];
4946
+ const addLen = (chosen.content || "").length;
4947
+ if (used + addLen > budgetChars && sel.length > 0)
4948
+ break;
4949
+ sel.push(chosen);
4950
+ used += addLen;
4951
+ selectedIdx.add(bestIdx);
4952
+ }
4953
+ return sel;
4954
+ }
4955
+ /* ========================= Public API ========================= */
4956
+ /** Train per-query ridge model and score chunks. */
4957
+ function rerank(query, chunks, opts = {}) {
4958
+ var _a, _b;
4959
+ // License check removed // Premium feature - requires valid license
4960
+ const { lambdaRidge = 1e-2, randomProjDim = 32, exposeFeatures = true, attachFeatureNames = false, } = opts;
4961
+ const docs = [query, ...chunks.map(c => c.content || "")];
4962
+ const { stats, tf: tfRaw, docLens } = buildCorpusStats(docs);
4963
+ const tfidfAll = tfRaw.map(tf => tfidfVector(tf, stats.idf));
4964
+ const qTfRaw = tfRaw[0];
4965
+ const qTfIdf = tfidfAll[0];
4966
+ const projQ = randomProjDim > 0 ? projectSparse(qTfIdf, randomProjDim) : undefined;
4967
+ const X = [];
4968
+ const y = [];
4969
+ const featPacks = [];
4970
+ for (let i = 0; i < chunks.length; i++) {
4971
+ const c = chunks[i];
4972
+ const cTfRaw = tfRaw[i + 1];
4973
+ const cTfIdf = tfidfAll[i + 1];
4974
+ const projC = randomProjDim > 0 ? projectSparse(cTfIdf, randomProjDim, 1337 + i) : undefined;
4975
+ const feats = buildFeatures$1(query, c, qTfIdf, cTfIdf, qTfRaw, cTfRaw, stats, docLens[i + 1] || 1, projQ, projC);
4976
+ featPacks.push(feats);
4977
+ X.push(feats.values);
4978
+ const label = generateWeakLabel(query, c, feats);
4979
+ y.push(label);
4980
+ }
4981
+ const allSame = y.every(v => Math.abs(v - y[0]) < 1e-9);
4982
+ if (allSame) {
4983
+ const cosIdx = featPacks[0].names.indexOf("cosine_tfidf");
4984
+ if (cosIdx >= 0) {
4985
+ for (let i = 0; i < y.length; i++)
4986
+ y[i] = Math.max(0, Math.min(1, 0.2 + 0.6 * X[i][cosIdx]));
4987
+ }
4988
+ }
4989
+ const rr = new Ridge();
4990
+ rr.fit(X, y, lambdaRidge);
4991
+ let minS = Infinity, maxS = -Infinity;
4992
+ const rawScores = X.map(x => rr.predict(x));
4993
+ for (const s of rawScores) {
4994
+ if (s < minS)
4995
+ minS = s;
4996
+ if (s > maxS)
4997
+ maxS = s;
4998
+ }
4999
+ const range = Math.max(1e-9, maxS - minS);
5000
+ const featureNames = attachFeatureNames ? (_b = (_a = featPacks[0]) === null || _a === void 0 ? void 0 : _a.names) !== null && _b !== void 0 ? _b : [] : undefined;
5001
+ const scored = chunks.map((c, i) => {
5002
+ const s01 = (rawScores[i] - minS) / range;
5003
+ const p = sigmoid((rawScores[i] - 0.5 * (minS + maxS)) / (0.2 * range + 1e-6));
5004
+ const base = Object.assign(Object.assign({}, c), { score_rr: s01, p_relevant: p });
5005
+ if (exposeFeatures)
5006
+ base._features = X[i];
5007
+ if (featureNames)
5008
+ base._feature_names = featureNames;
5009
+ return base;
5010
+ });
5011
+ scored.sort((a, b) => b.score_rr - a.score_rr);
5012
+ return scored;
5013
+ }
5014
+ /** Filter scored chunks using probability/near-top thresholds and MMR coverage. */
5015
+ function filterMMR(scored, opts = {}) {
5016
+ // License check removed // Premium feature - requires valid license
5017
+ const { probThresh = 0.45, epsilonTop = 0.05, useMMR = true, mmrLambda = 0.7, budgetChars = 1200 } = opts;
5018
+ if (scored.length === 0)
5019
+ return [];
5020
+ const top = scored[0].score_rr;
5021
+ const bandKept = scored.filter(s => s.p_relevant >= probThresh && s.score_rr >= (top - epsilonTop));
5022
+ const seed = bandKept.length > 0 ? bandKept : [scored[0]];
5023
+ if (!useMMR) {
5024
+ const out = [];
5025
+ let used = 0;
5026
+ for (const s of seed) {
5027
+ const add = (s.content || "").length;
5028
+ if (used + add > budgetChars && out.length > 0)
5029
+ break;
5030
+ out.push(s);
5031
+ used += add;
5032
+ }
5033
+ return out;
5034
+ }
5035
+ const boosted = scored.map(s => (Object.assign(Object.assign({}, s), { score_rr: seed.includes(s) ? s.score_rr + 0.01 : s.score_rr })));
5036
+ return mmrFilter(boosted, mmrLambda, budgetChars);
5037
+ }
5038
+ /** Convenience: run rerank then filter. */
5039
+ function rerankAndFilter(query, chunks, opts = {}) {
5040
+ // License check removed // Premium feature - requires valid license
5041
+ const scored = rerank(query, chunks, opts);
5042
+ return filterMMR(scored, opts);
5043
+ }
5044
+ function importModel(model, opts) {
5045
+ // License check removed // Premium feature - requires valid license
5046
+ if (model.version !== 'astermind-pro-v1' && model.version !== 'astermind-elm-v1') {
5047
+ throw new Error(`Unsupported model version: ${model.version}. Expected 'astermind-pro-v1' or 'astermind-elm-v1'`);
5048
+ }
5049
+ // 1) restore settings
5050
+ const settings = JSON.parse(JSON.stringify(model.settings || {}));
5051
+ // 2) vocab & idf
5052
+ const vocabMap = new Map(model.vocab);
5053
+ const idf = Float64Array.from(model.idf); // keep as number[] for compatibility
5054
+ // 3) chunks
5055
+ const chunks = model.chunks.map(c => ({
5056
+ heading: c.heading,
5057
+ content: c.content || '',
5058
+ rich: c.rich,
5059
+ level: c.level,
5060
+ secId: c.secId
5061
+ }));
5062
+ // 4) tfidfDocs from pairs
5063
+ const tfidfDocs = model.tfidfDocs.map(row => {
5064
+ const m = new Map();
5065
+ for (const [i, v] of row)
5066
+ m.set(i, v);
5067
+ return m;
5068
+ });
5069
+ // 5) Nyström landmarks
5070
+ const landmarksIdx = Array.from(model.landmarksIdx);
5071
+ const landmarkMat = model.landmarkMat.map(a => Float64Array.from(a));
5072
+ // 6) denseDocs: use stored or recompute
5073
+ const needRecompute = !model.denseDocs || model.denseDocs.length !== tfidfDocs.length;
5074
+ let denseDocs;
5075
+ if (needRecompute) {
5076
+ throw new Error('recomputeDense=true but buildDense function not provided');
5077
+ }
5078
+ else {
5079
+ denseDocs = model.denseDocs.map(a => Float64Array.from(a));
5080
+ }
5081
+ return {
5082
+ settings,
5083
+ vocabMap,
5084
+ idf,
5085
+ chunks,
5086
+ tfidfDocs,
5087
+ landmarksIdx,
5088
+ landmarkMat,
5089
+ denseDocs,
5090
+ };
5091
+ }
5092
+
5093
+ // src/omega/OmegaOfflineRAG.ts
5094
+ // Omega-powered offline RAG using @astermind/astermind-community
5095
+ /**
5096
+ * Omega-powered offline RAG class
5097
+ *
5098
+ * Uses @astermind/astermind-community for hybrid retrieval and reranking.
5099
+ * Provides enhanced offline capabilities over simple TF-IDF fallback.
5100
+ */
5101
+ class OmegaOfflineRAG {
5102
+ constructor(config) {
5103
+ this.modelState = null;
5104
+ this.model = null;
5105
+ this.config = {
5106
+ reranking: {
5107
+ enabled: config?.reranking?.enabled ?? true,
5108
+ lambdaRidge: config?.reranking?.lambdaRidge ?? 0.1,
5109
+ useMMR: config?.reranking?.useMMR ?? true,
5110
+ mmrLambda: config?.reranking?.mmrLambda ?? 0.5,
5111
+ probThresh: config?.reranking?.probThresh ?? 0.1
5112
+ },
5113
+ topK: config?.topK ?? 5,
5114
+ verbose: config?.verbose ?? false
5115
+ };
5116
+ }
5117
+ /**
5118
+ * Load a pre-exported model from SerializedModel object
5119
+ */
5120
+ async loadModel(model) {
5121
+ const startTime = performance.now();
5122
+ this.model = model;
5123
+ this.modelState = importModel(model);
5124
+ if (this.config.verbose) {
5125
+ const loadTime = performance.now() - startTime;
5126
+ console.log(`[OmegaOfflineRAG] Model loaded in ${loadTime.toFixed(0)}ms`);
5127
+ console.log(`[OmegaOfflineRAG] ${this.modelState.chunks.length} chunks, ${this.modelState.vocabMap.size} vocab`);
5128
+ }
5129
+ }
5130
+ /**
5131
+ * Load model from a URL
5132
+ */
5133
+ async loadModelFromUrl(url) {
5134
+ const response = await fetch(url);
5135
+ if (!response.ok) {
5136
+ throw new Error(`Failed to fetch model from ${url}: ${response.status}`);
5137
+ }
5138
+ const model = await response.json();
5139
+ await this.loadModel(model);
5140
+ }
5141
+ /**
5142
+ * Check if model is loaded and ready
5143
+ */
5144
+ isReady() {
5145
+ return this.modelState !== null;
5146
+ }
5147
+ /**
5148
+ * Get model information
5149
+ */
5150
+ getModelInfo() {
5151
+ if (!this.model || !this.modelState)
5152
+ return null;
5153
+ return {
5154
+ version: this.model.version,
5155
+ savedAt: this.model.savedAt,
5156
+ documentCount: this.modelState.chunks.length,
5157
+ vocabularySize: this.modelState.vocabMap.size,
5158
+ landmarkCount: this.modelState.landmarksIdx.length,
5159
+ hasDenseVectors: this.modelState.denseDocs.length > 0,
5160
+ settings: this.model.settings
5161
+ };
5162
+ }
5163
+ /**
5164
+ * Perform RAG query and return answer with sources
5165
+ */
5166
+ async ask(query) {
5167
+ const startTime = performance.now();
5168
+ if (!this.modelState || !this.model) {
5169
+ return {
5170
+ answer: 'Omega model not loaded. Please load a model first.',
5171
+ confidence: 'none',
5172
+ sources: [],
5173
+ processingTime: 0,
5174
+ degraded: true,
5175
+ degradedReason: 'Model not loaded'
5176
+ };
5177
+ }
5178
+ try {
5179
+ // Extract settings with proper type assertions
5180
+ const settings = this.model.settings;
5181
+ const kernel = settings.kernel ?? 'rbf';
5182
+ const sigma = settings.sigma ?? 1.0;
5183
+ const alpha = settings.alpha ?? 0.5;
5184
+ const beta = settings.beta ?? 0.3;
5185
+ const ridge = settings.ridge ?? 0.001;
5186
+ const headingW = settings.headingW ?? 0.2;
5187
+ const useStem = settings.useStem ?? true;
5188
+ const expandQuery = settings.expandQuery ?? false;
5189
+ const prefilter = settings.prefilter ?? 100;
5190
+ // Perform hybrid retrieval
5191
+ const retrievalResult = hybridRetrieve({
5192
+ query,
5193
+ chunks: this.modelState.chunks,
5194
+ vocabMap: this.modelState.vocabMap,
5195
+ idf: this.modelState.idf,
5196
+ tfidfDocs: this.modelState.tfidfDocs,
5197
+ denseDocs: this.modelState.denseDocs,
5198
+ landmarksIdx: this.modelState.landmarksIdx,
5199
+ landmarkMat: this.modelState.landmarkMat,
5200
+ vocabSize: this.modelState.vocabMap.size,
5201
+ kernel: kernel,
5202
+ sigma,
5203
+ alpha,
5204
+ beta,
5205
+ ridge,
5206
+ headingW,
5207
+ useStem,
5208
+ expandQuery,
5209
+ topK: this.config.topK * 2, // Get more for reranking
5210
+ prefilter
5211
+ });
5212
+ let sources;
5213
+ // Apply reranking if enabled
5214
+ if (this.config.reranking.enabled && retrievalResult.items.length > 0) {
5215
+ const chunksForRerank = retrievalResult.items.map((item, idx) => ({
5216
+ heading: item.heading,
5217
+ content: item.content,
5218
+ rich: item.rich,
5219
+ score_base: retrievalResult.scores[idx]
5220
+ }));
5221
+ const reranked = rerankAndFilter(query, chunksForRerank, {
5222
+ lambdaRidge: this.config.reranking.lambdaRidge,
5223
+ useMMR: this.config.reranking.useMMR,
5224
+ mmrLambda: this.config.reranking.mmrLambda,
5225
+ probThresh: this.config.reranking.probThresh
5226
+ });
5227
+ sources = reranked.slice(0, this.config.topK).map((chunk) => ({
5228
+ heading: chunk.heading,
5229
+ content: chunk.content,
5230
+ rich: chunk.rich,
5231
+ score: chunk.score_base ?? 0,
5232
+ rerankScore: chunk.score_rr,
5233
+ probability: chunk.p_relevant
5234
+ }));
5235
+ }
5236
+ else {
5237
+ sources = retrievalResult.items.slice(0, this.config.topK).map((item, idx) => ({
5238
+ heading: item.heading,
5239
+ content: item.content,
5240
+ rich: item.rich,
5241
+ score: retrievalResult.scores[idx]
5242
+ }));
5243
+ }
5244
+ // Compose answer using Omega
5245
+ const answer = await omegaComposeAnswer(query, sources.map(s => ({ heading: s.heading, content: s.content, score: s.score })));
5246
+ const processingTime = performance.now() - startTime;
5247
+ // Determine confidence based on scores
5248
+ const topScore = sources[0]?.score ?? 0;
5249
+ const confidence = this.determineConfidence(topScore, sources.length);
5250
+ // Classify query type
5251
+ const queryClassification = this.classifyQuery(query);
5252
+ return {
5253
+ answer,
5254
+ confidence,
5255
+ sources,
5256
+ processingTime,
5257
+ degraded: false,
5258
+ queryClassification
5259
+ };
5260
+ }
5261
+ catch (error) {
5262
+ const processingTime = performance.now() - startTime;
5263
+ console.error('[OmegaOfflineRAG] Error during retrieval:', error);
5264
+ return {
5265
+ answer: 'An error occurred while processing your question offline.',
5266
+ confidence: 'none',
5267
+ sources: [],
5268
+ processingTime,
5269
+ degraded: true,
5270
+ degradedReason: error instanceof Error ? error.message : 'Unknown error'
5271
+ };
5272
+ }
5273
+ }
5274
+ /**
5275
+ * Retrieve relevant chunks without generating an answer
5276
+ */
5277
+ async retrieve(query, topK) {
5278
+ if (!this.modelState || !this.model) {
5279
+ return [];
5280
+ }
5281
+ const k = topK ?? this.config.topK;
5282
+ // Extract settings with proper type assertions
5283
+ const settings = this.model.settings;
5284
+ const kernel = settings.kernel ?? 'rbf';
5285
+ const sigma = settings.sigma ?? 1.0;
5286
+ const alpha = settings.alpha ?? 0.5;
5287
+ const beta = settings.beta ?? 0.3;
5288
+ const ridge = settings.ridge ?? 0.001;
5289
+ const headingW = settings.headingW ?? 0.2;
5290
+ const useStem = settings.useStem ?? true;
5291
+ const expandQuery = settings.expandQuery ?? false;
5292
+ const prefilter = settings.prefilter ?? 100;
5293
+ const retrievalResult = hybridRetrieve({
5294
+ query,
5295
+ chunks: this.modelState.chunks,
5296
+ vocabMap: this.modelState.vocabMap,
5297
+ idf: this.modelState.idf,
5298
+ tfidfDocs: this.modelState.tfidfDocs,
5299
+ denseDocs: this.modelState.denseDocs,
5300
+ landmarksIdx: this.modelState.landmarksIdx,
5301
+ landmarkMat: this.modelState.landmarkMat,
5302
+ vocabSize: this.modelState.vocabMap.size,
5303
+ kernel: kernel,
5304
+ sigma,
5305
+ alpha,
5306
+ beta,
5307
+ ridge,
5308
+ headingW,
5309
+ useStem,
5310
+ expandQuery,
5311
+ topK: k,
5312
+ prefilter
5313
+ });
5314
+ return retrievalResult.items.map((item, idx) => ({
5315
+ heading: item.heading,
5316
+ content: item.content,
5317
+ rich: item.rich,
5318
+ score: retrievalResult.scores[idx]
5319
+ }));
5320
+ }
5321
+ /**
5322
+ * Classify query type for better handling
5323
+ */
5324
+ classifyQuery(query) {
5325
+ const lowerQuery = query.toLowerCase();
5326
+ const words = lowerQuery.split(/\s+/);
5327
+ // Simple keyword-based classification
5328
+ const factualKeywords = ['what', 'who', 'when', 'where', 'which', 'is', 'are', 'was', 'were'];
5329
+ const proceduralKeywords = ['how', 'steps', 'guide', 'tutorial', 'process', 'procedure'];
5330
+ const conceptualKeywords = ['why', 'explain', 'describe', 'meaning', 'definition', 'concept'];
5331
+ const navigationKeywords = ['go to', 'navigate', 'find', 'page', 'link', 'take me', 'show me'];
5332
+ let type = 'unknown';
5333
+ let confidence = 0.5;
5334
+ const keywords = [];
5335
+ // Check navigation first (phrase matching)
5336
+ for (const phrase of navigationKeywords) {
5337
+ if (lowerQuery.includes(phrase)) {
5338
+ type = 'navigational';
5339
+ confidence = 0.8;
5340
+ keywords.push(phrase);
5341
+ break;
5342
+ }
5343
+ }
5344
+ if (type === 'unknown') {
5345
+ // Check other types
5346
+ const firstWord = words[0];
5347
+ if (proceduralKeywords.includes(firstWord) || proceduralKeywords.some(k => lowerQuery.includes(k))) {
5348
+ type = 'procedural';
5349
+ confidence = 0.7;
5350
+ keywords.push(...proceduralKeywords.filter(k => lowerQuery.includes(k)));
5351
+ }
5352
+ else if (conceptualKeywords.includes(firstWord) || conceptualKeywords.some(k => lowerQuery.includes(k))) {
5353
+ type = 'conceptual';
5354
+ confidence = 0.7;
5355
+ keywords.push(...conceptualKeywords.filter(k => lowerQuery.includes(k)));
5356
+ }
5357
+ else if (factualKeywords.includes(firstWord)) {
5358
+ type = 'factual';
5359
+ confidence = 0.7;
5360
+ keywords.push(firstWord);
5361
+ }
5362
+ }
5363
+ return { type, confidence, keywords };
5364
+ }
5365
+ /**
5366
+ * Determine confidence level based on retrieval scores
5367
+ */
5368
+ determineConfidence(topScore, sourceCount) {
5369
+ if (sourceCount === 0)
5370
+ return 'none';
5371
+ if (topScore >= 0.7)
5372
+ return 'medium'; // Never 'high' for offline
5373
+ if (topScore >= 0.4)
5374
+ return 'medium';
5375
+ if (topScore >= 0.2)
5376
+ return 'low';
5377
+ return 'low';
5378
+ }
5379
+ /**
5380
+ * Clear loaded model
5381
+ */
5382
+ clear() {
5383
+ this.modelState = null;
5384
+ this.model = null;
5385
+ }
5386
+ }
5387
+
5388
+ var OmegaOfflineRAG$1 = /*#__PURE__*/Object.freeze({
5389
+ __proto__: null,
5390
+ OmegaOfflineRAG: OmegaOfflineRAG
5391
+ });
5392
+
3395
5393
  // src/full.ts
3396
5394
  // Full package exports (core + agentic)
3397
5395
  //
@@ -3411,7 +5409,9 @@ LJ5AZXvOhHaXdHzMuYKX5BpK4w7TqbPvJ6QPvKmLKvHh1VKcUJ6mJQgJJw==
3411
5409
  exports.CyberneticClient = CyberneticClient;
3412
5410
  exports.CyberneticIntentClassifier = CyberneticIntentClassifier;
3413
5411
  exports.CyberneticLocalRAG = CyberneticLocalRAG;
5412
+ exports.CyberneticOfflineStorage = CyberneticOfflineStorage;
3414
5413
  exports.LicenseManager = LicenseManager;
5414
+ exports.OmegaOfflineRAG = OmegaOfflineRAG;
3415
5415
  exports.REQUIRED_FEATURES = REQUIRED_FEATURES;
3416
5416
  exports.createClient = createClient;
3417
5417
  exports.createLicenseManager = createLicenseManager;