claude-flow 2.7.16 → 2.7.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/claude-flow +1 -1
- package/dist/src/cli/help-formatter.js.map +1 -1
- package/dist/src/cli/simple-cli.js +0 -104
- package/dist/src/cli/simple-cli.js.map +1 -1
- package/dist/src/cli/simple-commands/config.js +0 -124
- package/dist/src/cli/simple-commands/config.js.map +1 -1
- package/dist/src/cli/simple-commands/memory.js +3 -2
- package/dist/src/cli/simple-commands/memory.js.map +1 -1
- package/dist/src/core/version.js +1 -1
- package/dist/src/reasoningbank/reasoningbank-adapter.js +3 -2
- package/dist/src/reasoningbank/reasoningbank-adapter.js.map +1 -1
- package/dist/src/utils/key-redactor.js.map +1 -1
- package/dist/src/utils/metrics-reader.js +29 -41
- package/dist/src/utils/metrics-reader.js.map +1 -1
- package/package.json +1 -1
- package/src/cli/simple-commands/memory.js +6 -3
- package/src/reasoningbank/reasoningbank-adapter.js +6 -3
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/reasoningbank/reasoningbank-adapter.js"],"sourcesContent":["/**\n * ReasoningBank Adapter for Claude-Flow (Node.js Backend)\n *\n * Uses agentic-flow@1.5.13 Node.js backend with SQLite for persistent storage\n * Provides semantic search via embeddings and MMR ranking\n *\n * Backend: SQLite with better-sqlite3\n * Features: Persistent storage, semantic search, memory consolidation\n */\n\nimport * as ReasoningBank from 'agentic-flow/reasoningbank';\nimport { v4 as uuidv4 } from 'uuid';\n\n// Backend instance (singleton)\nlet backendInitialized = false;\nlet initPromise = null;\n\n// Query result cache (LRU)\nconst queryCache = new Map();\nconst CACHE_SIZE = 100;\nconst CACHE_TTL = 60000; // 60 seconds\n\n/**\n * Initialize ReasoningBank Node.js backend\n * @returns {Promise<boolean>}\n */\nasync function ensureInitialized() {\n if (backendInitialized) {\n return true;\n }\n\n if (initPromise) {\n return initPromise;\n }\n\n initPromise = (async () => {\n try {\n // Initialize Node.js backend with SQLite database\n await ReasoningBank.initialize();\n backendInitialized = true;\n console.log('[ReasoningBank] Node.js backend initialized successfully');\n return true;\n } catch (error) {\n console.error('[ReasoningBank] Backend initialization failed:', error);\n\n // Check if this is the better-sqlite3 missing error (npx issue)\n if (error.message?.includes('BetterSqlite3 is not a constructor') ||\n error.message?.includes('better-sqlite3')) {\n const isNpx = process.env.npm_config_user_agent?.includes('npx') ||\n process.cwd().includes('_npx');\n\n if (isNpx) {\n console.error('\\n⚠️ NPX LIMITATION DETECTED\\n');\n console.error('ReasoningBank requires better-sqlite3, which is not available in npx temp directories.\\n');\n console.error('📚 Solutions:\\n');\n console.error(' 1. LOCAL INSTALL (Recommended):');\n console.error(' npm install && node_modules/.bin/claude-flow memory store \"key\" \"value\"\\n');\n console.error(' 2. USE MCP TOOLS instead:');\n console.error(' mcp__claude-flow__memory_usage({ action: \"store\", key: \"test\", value: \"data\" })\\n');\n console.error(' 3. USE JSON FALLBACK:');\n console.error(' npx claude-flow@alpha memory store \"key\" \"value\" --no-reasoningbank\\n');\n console.error('See: docs/MEMORY_COMMAND_FIX.md for details\\n');\n }\n }\n\n throw new Error(`Failed to initialize ReasoningBank: ${error.message}`);\n }\n })();\n\n return initPromise;\n}\n\n/**\n * Initialize ReasoningBank database (Node.js version)\n */\nexport async function initializeReasoningBank() {\n // Initialize the Node.js backend\n await ensureInitialized();\n return true;\n}\n\n/**\n * Store a memory in ReasoningBank (Node.js backend with SQLite)\n *\n * Maps claude-flow memory model to ReasoningBank pattern model:\n * - key -> title\n * - value -> content (searchable text)\n * - namespace -> domain\n * - confidence -> confidence score\n */\nexport async function storeMemory(key, value, options = {}) {\n await ensureInitialized();\n\n try {\n const memoryId = options.id || uuidv4();\n\n // Map our memory model to ReasoningBank pattern model\n const memory = {\n id: memoryId,\n type: 'reasoning_memory',\n pattern_data: {\n title: key,\n content: value,\n domain: options.namespace || 'default',\n agent: options.agent || 'memory-agent',\n task_type: options.type || 'fact',\n // Store original values for compatibility\n original_key: key,\n original_value: value,\n namespace: options.namespace || 'default'\n },\n confidence: options.confidence || 0.8,\n usage_count: 0\n };\n\n // Store memory using Node.js backend\n ReasoningBank.db.upsertMemory(memory);\n\n // Generate and store embedding for semantic search\n try {\n const embedding = await ReasoningBank.computeEmbedding(value);\n ReasoningBank.db.upsertEmbedding({\n id: memoryId,\n model: 'text-embedding-3-small', // Default model\n dims: embedding.length,\n vector: embedding\n });\n } catch (embeddingError) {\n console.warn('[ReasoningBank] Failed to generate embedding:', embeddingError.message);\n // Continue without embedding - memory is still stored\n }\n\n // Invalidate query cache when new memory is added\n queryCache.clear();\n\n return memoryId;\n } catch (error) {\n console.error('[ReasoningBank] storeMemory failed:', error);\n throw new Error(`Failed to store memory: ${error.message}`);\n }\n}\n\n/**\n * Query memories from ReasoningBank (Node.js backend with semantic search)\n *\n * Uses retrieveMemories for semantic search via embeddings and MMR ranking\n * Fallback to database query if semantic search fails\n */\nexport async function queryMemories(searchQuery, options = {}) {\n // Check cache first\n const cached = getCachedQuery(searchQuery, options);\n if (cached) {\n return cached;\n }\n\n await ensureInitialized();\n const limit = options.limit || 10;\n // Accept both 'namespace' and 'domain' for compatibility\n const namespace = options.namespace || options.domain || 'default';\n\n try {\n // Try semantic search first using retrieveMemories\n const results = await ReasoningBank.retrieveMemories(searchQuery, {\n domain: namespace,\n agent: options.agent || 'query-agent',\n k: limit,\n minConfidence: options.minConfidence || 0.3\n });\n\n // Map backend results to our memory format\n // retrieveMemories returns: { id, title, content, description, score, components }\n const memories = results.map(memory => ({\n id: memory.id,\n key: memory.title || 'unknown',\n value: memory.content || memory.description || '',\n namespace: namespace, // Use the namespace from our query\n confidence: memory.components?.reliability || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString(),\n score: memory.score || 0,\n // Include original pattern for debugging\n _pattern: memory\n }));\n\n // If no results, try direct database query as fallback\n if (memories.length === 0) {\n console.warn('[ReasoningBank] Semantic search returned 0 results, trying database fallback');\n const fallbackResults = ReasoningBank.db.fetchMemoryCandidates({\n domain: namespace,\n minConfidence: options.minConfidence || 0.3\n });\n\n const fallbackMemories = fallbackResults.slice(0, limit).map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || memory.pattern_data?.original_key || 'unknown',\n value: memory.pattern_data?.content || memory.pattern_data?.original_value || '',\n namespace: memory.pattern_data?.domain || memory.pattern_data?.namespace || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n\n // Cache and return fallback results\n setCachedQuery(searchQuery, options, fallbackMemories);\n return fallbackMemories;\n }\n\n // Cache successful results\n setCachedQuery(searchQuery, options, memories);\n return memories;\n } catch (error) {\n console.warn('[ReasoningBank] Query failed, trying database fallback:', error.message);\n\n try {\n // Final fallback: direct database query\n const fallbackResults = ReasoningBank.db.fetchMemoryCandidates({\n domain: namespace,\n minConfidence: options.minConfidence || 0.3\n });\n\n const fallbackMemories = fallbackResults.slice(0, limit).map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || 'unknown',\n value: memory.pattern_data?.content || '',\n namespace: memory.pattern_data?.domain || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n\n setCachedQuery(searchQuery, options, fallbackMemories);\n return fallbackMemories;\n } catch (fallbackError) {\n console.error('[ReasoningBank] All query methods failed:', fallbackError);\n return [];\n }\n }\n}\n\n/**\n * List all memories (using Node.js backend database query)\n */\nexport async function listMemories(options = {}) {\n await ensureInitialized();\n const limit = options.limit || 10;\n const namespace = options.namespace;\n\n try {\n let memories;\n\n if (namespace && namespace !== 'default') {\n // Filter by namespace/domain\n const allMemories = ReasoningBank.db.getAllActiveMemories();\n memories = allMemories\n .filter(m => m.pattern_data?.domain === namespace)\n .slice(0, limit);\n } else {\n // Get all active memories\n memories = ReasoningBank.db.getAllActiveMemories().slice(0, limit);\n }\n\n return memories.map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || memory.pattern_data?.original_key || 'unknown',\n value: memory.pattern_data?.content || memory.pattern_data?.original_value || '',\n namespace: memory.pattern_data?.domain || memory.pattern_data?.namespace || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n } catch (error) {\n console.error('[ReasoningBank] listMemories failed:', error);\n return [];\n }\n}\n\n/**\n * Get ReasoningBank statistics (Node.js backend)\n */\nexport async function getStatus() {\n await ensureInitialized();\n\n try {\n const db = ReasoningBank.db.getDb();\n\n // Count patterns\n const patterns = db.prepare(\"SELECT COUNT(*) as count FROM patterns WHERE type = 'reasoning_memory'\").get();\n const embeddings = db.prepare(\"SELECT COUNT(*) as count FROM pattern_embeddings\").get();\n const trajectories = db.prepare(\"SELECT COUNT(*) as count FROM task_trajectories\").get();\n const links = db.prepare(\"SELECT COUNT(*) as count FROM pattern_links\").get();\n\n // Get average confidence\n const avgConf = db.prepare(\"SELECT AVG(confidence) as avg FROM patterns WHERE type = 'reasoning_memory'\").get();\n\n // Count unique domains\n const domains = db.prepare(\"SELECT COUNT(DISTINCT json_extract(pattern_data, '$.domain')) as count FROM patterns WHERE type = 'reasoning_memory'\").get();\n\n return {\n total_memories: patterns.count || 0,\n total_categories: domains.count || 0,\n storage_backend: 'SQLite (Node.js)',\n database_path: process.env.CLAUDE_FLOW_DB_PATH || '.swarm/memory.db',\n performance: 'SQLite with persistent storage',\n avg_confidence: avgConf.avg || 0.8,\n total_embeddings: embeddings.count || 0,\n total_trajectories: trajectories.count || 0,\n total_links: links.count || 0\n };\n } catch (error) {\n console.error('[ReasoningBank] getStatus failed:', error);\n return {\n total_memories: 0,\n error: error.message\n };\n }\n}\n\n/**\n * Check which ReasoningBank tables are present (Node.js backend)\n */\nexport async function checkReasoningBankTables() {\n try {\n await ensureInitialized();\n const db = ReasoningBank.db.getDb();\n\n const tables = db.prepare(\"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'pattern%'\").all();\n const tableNames = tables.map(t => t.name);\n\n const requiredTables = ['patterns', 'pattern_embeddings', 'pattern_links', 'task_trajectories'];\n const missingTables = requiredTables.filter(t => !tableNames.includes(t));\n\n return {\n exists: true,\n existingTables: tableNames,\n missingTables: missingTables,\n requiredTables: requiredTables,\n backend: 'SQLite (Node.js)',\n note: missingTables.length > 0 ? 'Some tables are missing - run migrations' : 'All tables present'\n };\n } catch (error) {\n return {\n exists: false,\n existingTables: [],\n missingTables: [],\n requiredTables: [],\n error: error.message\n };\n }\n}\n\n/**\n * Migrate existing database (Node.js backend - run migrations)\n */\nexport async function migrateReasoningBank() {\n try {\n await ReasoningBank.db.runMigrations();\n\n return {\n success: true,\n message: 'Database migrations completed successfully',\n migrated: true,\n database_path: process.env.CLAUDE_FLOW_DB_PATH || '.swarm/memory.db'\n };\n } catch (error) {\n return {\n success: false,\n message: `Migration failed: ${error.message}`,\n error: error.message\n };\n }\n}\n\n/**\n * Get cached query results\n */\nfunction getCachedQuery(searchQuery, options) {\n const cacheKey = JSON.stringify({ searchQuery, options });\n const cached = queryCache.get(cacheKey);\n\n if (cached && Date.now() - cached.timestamp < CACHE_TTL) {\n return cached.results;\n }\n\n return null;\n}\n\n/**\n * Set cached query results (LRU eviction)\n */\nfunction setCachedQuery(searchQuery, options, results) {\n const cacheKey = JSON.stringify({ searchQuery, options });\n\n // LRU eviction\n if (queryCache.size >= CACHE_SIZE) {\n const firstKey = queryCache.keys().next().value;\n queryCache.delete(firstKey);\n }\n\n queryCache.set(cacheKey, {\n results,\n timestamp: Date.now()\n });\n}\n\n/**\n * Close database connection and cleanup resources\n * Should be called when done with ReasoningBank operations\n */\nexport function cleanup() {\n try {\n if (backendInitialized) {\n // Clear embedding cache (prevents memory leaks)\n ReasoningBank.clearEmbeddingCache();\n\n // Close database connection\n ReasoningBank.db.closeDb();\n backendInitialized = false;\n initPromise = null;\n console.log('[ReasoningBank] Database connection closed');\n }\n } catch (error) {\n console.error('[ReasoningBank] Cleanup failed:', error.message);\n }\n}\n"],"names":["ReasoningBank","v4","uuidv4","backendInitialized","initPromise","queryCache","Map","CACHE_SIZE","CACHE_TTL","ensureInitialized","initialize","console","log","error","message","includes","isNpx","process","env","npm_config_user_agent","cwd","Error","initializeReasoningBank","storeMemory","key","value","options","memoryId","id","memory","type","pattern_data","title","content","domain","namespace","agent","task_type","original_key","original_value","confidence","usage_count","db","upsertMemory","embedding","computeEmbedding","upsertEmbedding","model","dims","length","vector","embeddingError","warn","clear","queryMemories","searchQuery","cached","getCachedQuery","limit","results","retrieveMemories","k","minConfidence","memories","map","description","components","reliability","created_at","Date","toISOString","score","_pattern","fallbackResults","fetchMemoryCandidates","fallbackMemories","slice","setCachedQuery","fallbackError","listMemories","allMemories","getAllActiveMemories","filter","m","getStatus","getDb","patterns","prepare","get","embeddings","trajectories","links","avgConf","domains","total_memories","count","total_categories","storage_backend","database_path","CLAUDE_FLOW_DB_PATH","performance","avg_confidence","avg","total_embeddings","total_trajectories","total_links","checkReasoningBankTables","tables","all","tableNames","t","name","requiredTables","missingTables","exists","existingTables","backend","note","migrateReasoningBank","runMigrations","success","migrated","cacheKey","JSON","stringify","now","timestamp","size","firstKey","keys","next","delete","set","cleanup","clearEmbeddingCache","closeDb"],"mappings":"AAUA,YAAYA,mBAAmB,6BAA6B;AAC5D,SAASC,MAAMC,MAAM,QAAQ,OAAO;AAGpC,IAAIC,qBAAqB;AACzB,IAAIC,cAAc;AAGlB,MAAMC,aAAa,IAAIC;AACvB,MAAMC,aAAa;AACnB,MAAMC,YAAY;AAMlB,eAAeC;IACb,IAAIN,oBAAoB;QACtB,OAAO;IACT;IAEA,IAAIC,aAAa;QACf,OAAOA;IACT;IAEAA,cAAc,AAAC,CAAA;QACb,IAAI;YAEF,MAAMJ,cAAcU,UAAU;YAC9BP,qBAAqB;YACrBQ,QAAQC,GAAG,CAAC;YACZ,OAAO;QACT,EAAE,OAAOC,OAAO;YACdF,QAAQE,KAAK,CAAC,kDAAkDA;YAGhE,IAAIA,MAAMC,OAAO,EAAEC,SAAS,yCACxBF,MAAMC,OAAO,EAAEC,SAAS,mBAAmB;gBAC7C,MAAMC,QAAQC,QAAQC,GAAG,CAACC,qBAAqB,EAAEJ,SAAS,UAC5CE,QAAQG,GAAG,GAAGL,QAAQ,CAAC;gBAErC,IAAIC,OAAO;oBACTL,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;gBAChB;YACF;YAEA,MAAM,IAAIQ,MAAM,CAAC,oCAAoC,EAAER,MAAMC,OAAO,EAAE;QACxE;IACF,CAAA;IAEA,OAAOV;AACT;AAKA,OAAO,eAAekB;IAEpB,MAAMb;IACN,OAAO;AACT;AAWA,OAAO,eAAec,YAAYC,GAAG,EAAEC,KAAK,EAAEC,UAAU,CAAC,CAAC;IACxD,MAAMjB;IAEN,IAAI;QACF,MAAMkB,WAAWD,QAAQE,EAAE,IAAI1B;QAG/B,MAAM2B,SAAS;YACbD,IAAID;YACJG,MAAM;YACNC,cAAc;gBACZC,OAAOR;gBACPS,SAASR;gBACTS,QAAQR,QAAQS,SAAS,IAAI;gBAC7BC,OAAOV,QAAQU,KAAK,IAAI;gBACxBC,WAAWX,QAAQI,IAAI,IAAI;gBAE3BQ,cAAcd;gBACde,gBAAgBd;gBAChBU,WAAWT,QAAQS,SAAS,IAAI;YAClC;YACAK,YAAYd,QAAQc,UAAU,IAAI;YAClCC,aAAa;QACf;QAGAzC,cAAc0C,EAAE,CAACC,YAAY,CAACd;QAG9B,IAAI;YACF,MAAMe,YAAY,MAAM5C,cAAc6C,gBAAgB,CAACpB;YACvDzB,cAAc0C,EAAE,CAACI,eAAe,CAAC;gBAC/BlB,IAAID;gBACJoB,OAAO;gBACPC,MAAMJ,UAAUK,MAAM;gBACtBC,QAAQN;YACV;QACF,EAAE,OAAOO,gBAAgB;YACvBxC,QAAQyC,IAAI,CAAC,iDAAiDD,eAAerC,OAAO;QAEtF;QAGAT,WAAWgD,KAAK;QAEhB,OAAO1B;IACT,EAAE,OAAOd,OAAO;QACdF,QAAQE,KAAK,CAAC,uCAAuCA;QACrD,MAAM,IAAIQ,MAAM,CAAC,wBAAwB,EAAER,MAAMC,OAAO,EAAE;IAC5D;AACF;AAQA,OAAO,eAAewC,cAAcC,WAAW,EAAE7B,UAAU,CAAC,CAAC;IAE3D,MAAM8B,SAASC,eAAeF,aAAa7B;IAC3C,IAAI8B,QAAQ;QACV,OAAOA;IACT;IAEA,MAAM/C;IACN,MAAMiD,QAAQhC,QAAQgC,KAAK,IAAI;IAE/B,MAAMvB,YAAYT,QAAQS,SAAS,IAAIT,QAAQQ,MAAM,IAAI;IAEzD,IAAI;QAEF,MAAMyB,UAAU,MAAM3D,cAAc4D,gBAAgB,CAACL,aAAa;YAChErB,QAAQC;YACRC,OAAOV,QAAQU,KAAK,IAAI;YACxByB,GAAGH;YACHI,eAAepC,QAAQoC,aAAa,IAAI;QAC1C;QAIA,MAAMC,WAAWJ,QAAQK,GAAG,CAACnC,CAAAA,SAAW,CAAA;gBACtCD,IAAIC,OAAOD,EAAE;gBACbJ,KAAKK,OAAOG,KAAK,IAAI;gBACrBP,OAAOI,OAAOI,OAAO,IAAIJ,OAAOoC,WAAW,IAAI;gBAC/C9B,WAAWA;gBACXK,YAAYX,OAAOqC,UAAU,EAAEC,eAAe;gBAC9C1B,aAAaZ,OAAOY,WAAW,IAAI;gBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACvDC,OAAO1C,OAAO0C,KAAK,IAAI;gBAEvBC,UAAU3C;YACZ,CAAA;QAGA,IAAIkC,SAASd,MAAM,KAAK,GAAG;YACzBtC,QAAQyC,IAAI,CAAC;YACb,MAAMqB,kBAAkBzE,cAAc0C,EAAE,CAACgC,qBAAqB,CAAC;gBAC7DxC,QAAQC;gBACR2B,eAAepC,QAAQoC,aAAa,IAAI;YAC1C;YAEA,MAAMa,mBAAmBF,gBAAgBG,KAAK,CAAC,GAAGlB,OAAOM,GAAG,CAACnC,CAAAA,SAAW,CAAA;oBACtED,IAAIC,OAAOD,EAAE;oBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAASH,OAAOE,YAAY,EAAEO,gBAAgB;oBACxEb,OAAOI,OAAOE,YAAY,EAAEE,WAAWJ,OAAOE,YAAY,EAAEQ,kBAAkB;oBAC9EJ,WAAWN,OAAOE,YAAY,EAAEG,UAAUL,OAAOE,YAAY,EAAEI,aAAa;oBAC5EK,YAAYX,OAAOW,UAAU,IAAI;oBACjCC,aAAaZ,OAAOY,WAAW,IAAI;oBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACzD,CAAA;YAGAO,eAAetB,aAAa7B,SAASiD;YACrC,OAAOA;QACT;QAGAE,eAAetB,aAAa7B,SAASqC;QACrC,OAAOA;IACT,EAAE,OAAOlD,OAAO;QACdF,QAAQyC,IAAI,CAAC,2DAA2DvC,MAAMC,OAAO;QAErF,IAAI;YAEF,MAAM2D,kBAAkBzE,cAAc0C,EAAE,CAACgC,qBAAqB,CAAC;gBAC7DxC,QAAQC;gBACR2B,eAAepC,QAAQoC,aAAa,IAAI;YAC1C;YAEA,MAAMa,mBAAmBF,gBAAgBG,KAAK,CAAC,GAAGlB,OAAOM,GAAG,CAACnC,CAAAA,SAAW,CAAA;oBACtED,IAAIC,OAAOD,EAAE;oBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAAS;oBACnCP,OAAOI,OAAOE,YAAY,EAAEE,WAAW;oBACvCE,WAAWN,OAAOE,YAAY,EAAEG,UAAU;oBAC1CM,YAAYX,OAAOW,UAAU,IAAI;oBACjCC,aAAaZ,OAAOY,WAAW,IAAI;oBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACzD,CAAA;YAEAO,eAAetB,aAAa7B,SAASiD;YACrC,OAAOA;QACT,EAAE,OAAOG,eAAe;YACtBnE,QAAQE,KAAK,CAAC,6CAA6CiE;YAC3D,OAAO,EAAE;QACX;IACF;AACF;AAKA,OAAO,eAAeC,aAAarD,UAAU,CAAC,CAAC;IAC7C,MAAMjB;IACN,MAAMiD,QAAQhC,QAAQgC,KAAK,IAAI;IAC/B,MAAMvB,YAAYT,QAAQS,SAAS;IAEnC,IAAI;QACF,IAAI4B;QAEJ,IAAI5B,aAAaA,cAAc,WAAW;YAExC,MAAM6C,cAAchF,cAAc0C,EAAE,CAACuC,oBAAoB;YACzDlB,WAAWiB,YACRE,MAAM,CAACC,CAAAA,IAAKA,EAAEpD,YAAY,EAAEG,WAAWC,WACvCyC,KAAK,CAAC,GAAGlB;QACd,OAAO;YAELK,WAAW/D,cAAc0C,EAAE,CAACuC,oBAAoB,GAAGL,KAAK,CAAC,GAAGlB;QAC9D;QAEA,OAAOK,SAASC,GAAG,CAACnC,CAAAA,SAAW,CAAA;gBAC7BD,IAAIC,OAAOD,EAAE;gBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAASH,OAAOE,YAAY,EAAEO,gBAAgB;gBACxEb,OAAOI,OAAOE,YAAY,EAAEE,WAAWJ,OAAOE,YAAY,EAAEQ,kBAAkB;gBAC9EJ,WAAWN,OAAOE,YAAY,EAAEG,UAAUL,OAAOE,YAAY,EAAEI,aAAa;gBAC5EK,YAAYX,OAAOW,UAAU,IAAI;gBACjCC,aAAaZ,OAAOY,WAAW,IAAI;gBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;YACzD,CAAA;IACF,EAAE,OAAOzD,OAAO;QACdF,QAAQE,KAAK,CAAC,wCAAwCA;QACtD,OAAO,EAAE;IACX;AACF;AAKA,OAAO,eAAeuE;IACpB,MAAM3E;IAEN,IAAI;QACF,MAAMiC,KAAK1C,cAAc0C,EAAE,CAAC2C,KAAK;QAGjC,MAAMC,WAAW5C,GAAG6C,OAAO,CAAC,0EAA0EC,GAAG;QACzG,MAAMC,aAAa/C,GAAG6C,OAAO,CAAC,oDAAoDC,GAAG;QACrF,MAAME,eAAehD,GAAG6C,OAAO,CAAC,mDAAmDC,GAAG;QACtF,MAAMG,QAAQjD,GAAG6C,OAAO,CAAC,+CAA+CC,GAAG;QAG3E,MAAMI,UAAUlD,GAAG6C,OAAO,CAAC,+EAA+EC,GAAG;QAG7G,MAAMK,UAAUnD,GAAG6C,OAAO,CAAC,wHAAwHC,GAAG;QAEtJ,OAAO;YACLM,gBAAgBR,SAASS,KAAK,IAAI;YAClCC,kBAAkBH,QAAQE,KAAK,IAAI;YACnCE,iBAAiB;YACjBC,eAAejF,QAAQC,GAAG,CAACiF,mBAAmB,IAAI;YAClDC,aAAa;YACbC,gBAAgBT,QAAQU,GAAG,IAAI;YAC/BC,kBAAkBd,WAAWM,KAAK,IAAI;YACtCS,oBAAoBd,aAAaK,KAAK,IAAI;YAC1CU,aAAad,MAAMI,KAAK,IAAI;QAC9B;IACF,EAAE,OAAOlF,OAAO;QACdF,QAAQE,KAAK,CAAC,qCAAqCA;QACnD,OAAO;YACLiF,gBAAgB;YAChBjF,OAAOA,MAAMC,OAAO;QACtB;IACF;AACF;AAKA,OAAO,eAAe4F;IACpB,IAAI;QACF,MAAMjG;QACN,MAAMiC,KAAK1C,cAAc0C,EAAE,CAAC2C,KAAK;QAEjC,MAAMsB,SAASjE,GAAG6C,OAAO,CAAC,8EAA8EqB,GAAG;QAC3G,MAAMC,aAAaF,OAAO3C,GAAG,CAAC8C,CAAAA,IAAKA,EAAEC,IAAI;QAEzC,MAAMC,iBAAiB;YAAC;YAAY;YAAsB;YAAiB;SAAoB;QAC/F,MAAMC,gBAAgBD,eAAe9B,MAAM,CAAC4B,CAAAA,IAAK,CAACD,WAAW9F,QAAQ,CAAC+F;QAEtE,OAAO;YACLI,QAAQ;YACRC,gBAAgBN;YAChBI,eAAeA;YACfD,gBAAgBA;YAChBI,SAAS;YACTC,MAAMJ,cAAchE,MAAM,GAAG,IAAI,6CAA6C;QAChF;IACF,EAAE,OAAOpC,OAAO;QACd,OAAO;YACLqG,QAAQ;YACRC,gBAAgB,EAAE;YAClBF,eAAe,EAAE;YACjBD,gBAAgB,EAAE;YAClBnG,OAAOA,MAAMC,OAAO;QACtB;IACF;AACF;AAKA,OAAO,eAAewG;IACpB,IAAI;QACF,MAAMtH,cAAc0C,EAAE,CAAC6E,aAAa;QAEpC,OAAO;YACLC,SAAS;YACT1G,SAAS;YACT2G,UAAU;YACVvB,eAAejF,QAAQC,GAAG,CAACiF,mBAAmB,IAAI;QACpD;IACF,EAAE,OAAOtF,OAAO;QACd,OAAO;YACL2G,SAAS;YACT1G,SAAS,CAAC,kBAAkB,EAAED,MAAMC,OAAO,EAAE;YAC7CD,OAAOA,MAAMC,OAAO;QACtB;IACF;AACF;AAKA,SAAS2C,eAAeF,WAAW,EAAE7B,OAAO;IAC1C,MAAMgG,WAAWC,KAAKC,SAAS,CAAC;QAAErE;QAAa7B;IAAQ;IACvD,MAAM8B,SAASnD,WAAWmF,GAAG,CAACkC;IAE9B,IAAIlE,UAAUa,KAAKwD,GAAG,KAAKrE,OAAOsE,SAAS,GAAGtH,WAAW;QACvD,OAAOgD,OAAOG,OAAO;IACvB;IAEA,OAAO;AACT;AAKA,SAASkB,eAAetB,WAAW,EAAE7B,OAAO,EAAEiC,OAAO;IACnD,MAAM+D,WAAWC,KAAKC,SAAS,CAAC;QAAErE;QAAa7B;IAAQ;IAGvD,IAAIrB,WAAW0H,IAAI,IAAIxH,YAAY;QACjC,MAAMyH,WAAW3H,WAAW4H,IAAI,GAAGC,IAAI,GAAGzG,KAAK;QAC/CpB,WAAW8H,MAAM,CAACH;IACpB;IAEA3H,WAAW+H,GAAG,CAACV,UAAU;QACvB/D;QACAmE,WAAWzD,KAAKwD,GAAG;IACrB;AACF;AAMA,OAAO,SAASQ;IACd,IAAI;QACF,IAAIlI,oBAAoB;YAEtBH,cAAcsI,mBAAmB;YAGjCtI,cAAc0C,EAAE,CAAC6F,OAAO;YACxBpI,qBAAqB;YACrBC,cAAc;YACdO,QAAQC,GAAG,CAAC;QACd;IACF,EAAE,OAAOC,OAAO;QACdF,QAAQE,KAAK,CAAC,mCAAmCA,MAAMC,OAAO;IAChE;AACF"}
|
|
1
|
+
{"version":3,"sources":["../../../src/reasoningbank/reasoningbank-adapter.js"],"sourcesContent":["/**\n * ReasoningBank Adapter for Claude-Flow (Node.js Backend)\n *\n * Uses agentic-flow@1.5.13 Node.js backend with SQLite for persistent storage\n * Provides semantic search via embeddings and MMR ranking\n *\n * Backend: SQLite with better-sqlite3\n * Features: Persistent storage, semantic search, memory consolidation\n */\n\nimport * as ReasoningBank from 'agentic-flow/reasoningbank';\nimport { v4 as uuidv4 } from 'uuid';\n\n// Backend instance (singleton)\nlet backendInitialized = false;\nlet initPromise = null;\n\n// Query result cache (LRU)\nconst queryCache = new Map();\nconst CACHE_SIZE = 100;\nconst CACHE_TTL = 60000; // 60 seconds\n\n/**\n * Initialize ReasoningBank Node.js backend\n * @returns {Promise<boolean>}\n */\nasync function ensureInitialized() {\n if (backendInitialized) {\n return true;\n }\n\n if (initPromise) {\n return initPromise;\n }\n\n initPromise = (async () => {\n try {\n // Initialize Node.js backend with SQLite database\n await ReasoningBank.initialize();\n backendInitialized = true;\n console.log('[ReasoningBank] Node.js backend initialized successfully');\n return true;\n } catch (error) {\n console.error('[ReasoningBank] Backend initialization failed:', error);\n\n // Check if this is the better-sqlite3 missing error (npx issue)\n const isSqliteError = error.message?.includes('BetterSqlite3 is not a constructor') ||\n error.message?.includes('better-sqlite3') ||\n error.message?.includes('could not run migrations');\n\n if (isSqliteError) {\n const isNpx = process.env.npm_config_user_agent?.includes('npx') ||\n process.cwd().includes('_npx');\n\n if (isNpx) {\n console.error('\\n⚠️ NPX LIMITATION DETECTED\\n');\n console.error('ReasoningBank requires better-sqlite3, which is not available in npx temp directories.\\n');\n console.error('📚 Solutions:\\n');\n console.error(' 1. LOCAL INSTALL (Recommended):');\n console.error(' npm install && node_modules/.bin/claude-flow memory store \"key\" \"value\"\\n');\n console.error(' 2. USE MCP TOOLS instead:');\n console.error(' mcp__claude-flow__memory_usage({ action: \"store\", key: \"test\", value: \"data\" })\\n');\n console.error(' 3. USE JSON FALLBACK:');\n console.error(' npx claude-flow@alpha memory store \"key\" \"value\" --basic\\n');\n console.error('See: docs/MEMORY_COMMAND_FIX.md for details\\n');\n }\n }\n\n throw new Error(`Failed to initialize ReasoningBank: ${error.message}`);\n }\n })();\n\n return initPromise;\n}\n\n/**\n * Initialize ReasoningBank database (Node.js version)\n */\nexport async function initializeReasoningBank() {\n // Initialize the Node.js backend\n await ensureInitialized();\n return true;\n}\n\n/**\n * Store a memory in ReasoningBank (Node.js backend with SQLite)\n *\n * Maps claude-flow memory model to ReasoningBank pattern model:\n * - key -> title\n * - value -> content (searchable text)\n * - namespace -> domain\n * - confidence -> confidence score\n */\nexport async function storeMemory(key, value, options = {}) {\n await ensureInitialized();\n\n try {\n const memoryId = options.id || uuidv4();\n\n // Map our memory model to ReasoningBank pattern model\n const memory = {\n id: memoryId,\n type: 'reasoning_memory',\n pattern_data: {\n title: key,\n content: value,\n domain: options.namespace || 'default',\n agent: options.agent || 'memory-agent',\n task_type: options.type || 'fact',\n // Store original values for compatibility\n original_key: key,\n original_value: value,\n namespace: options.namespace || 'default'\n },\n confidence: options.confidence || 0.8,\n usage_count: 0\n };\n\n // Store memory using Node.js backend\n ReasoningBank.db.upsertMemory(memory);\n\n // Generate and store embedding for semantic search\n try {\n const embedding = await ReasoningBank.computeEmbedding(value);\n ReasoningBank.db.upsertEmbedding({\n id: memoryId,\n model: 'text-embedding-3-small', // Default model\n dims: embedding.length,\n vector: embedding\n });\n } catch (embeddingError) {\n console.warn('[ReasoningBank] Failed to generate embedding:', embeddingError.message);\n // Continue without embedding - memory is still stored\n }\n\n // Invalidate query cache when new memory is added\n queryCache.clear();\n\n return memoryId;\n } catch (error) {\n console.error('[ReasoningBank] storeMemory failed:', error);\n throw new Error(`Failed to store memory: ${error.message}`);\n }\n}\n\n/**\n * Query memories from ReasoningBank (Node.js backend with semantic search)\n *\n * Uses retrieveMemories for semantic search via embeddings and MMR ranking\n * Fallback to database query if semantic search fails\n */\nexport async function queryMemories(searchQuery, options = {}) {\n // Check cache first\n const cached = getCachedQuery(searchQuery, options);\n if (cached) {\n return cached;\n }\n\n await ensureInitialized();\n const limit = options.limit || 10;\n // Accept both 'namespace' and 'domain' for compatibility\n const namespace = options.namespace || options.domain || 'default';\n\n try {\n // Try semantic search first using retrieveMemories\n const results = await ReasoningBank.retrieveMemories(searchQuery, {\n domain: namespace,\n agent: options.agent || 'query-agent',\n k: limit,\n minConfidence: options.minConfidence || 0.3\n });\n\n // Map backend results to our memory format\n // retrieveMemories returns: { id, title, content, description, score, components }\n const memories = results.map(memory => ({\n id: memory.id,\n key: memory.title || 'unknown',\n value: memory.content || memory.description || '',\n namespace: namespace, // Use the namespace from our query\n confidence: memory.components?.reliability || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString(),\n score: memory.score || 0,\n // Include original pattern for debugging\n _pattern: memory\n }));\n\n // If no results, try direct database query as fallback\n if (memories.length === 0) {\n console.warn('[ReasoningBank] Semantic search returned 0 results, trying database fallback');\n const fallbackResults = ReasoningBank.db.fetchMemoryCandidates({\n domain: namespace,\n minConfidence: options.minConfidence || 0.3\n });\n\n const fallbackMemories = fallbackResults.slice(0, limit).map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || memory.pattern_data?.original_key || 'unknown',\n value: memory.pattern_data?.content || memory.pattern_data?.original_value || '',\n namespace: memory.pattern_data?.domain || memory.pattern_data?.namespace || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n\n // Cache and return fallback results\n setCachedQuery(searchQuery, options, fallbackMemories);\n return fallbackMemories;\n }\n\n // Cache successful results\n setCachedQuery(searchQuery, options, memories);\n return memories;\n } catch (error) {\n console.warn('[ReasoningBank] Query failed, trying database fallback:', error.message);\n\n try {\n // Final fallback: direct database query\n const fallbackResults = ReasoningBank.db.fetchMemoryCandidates({\n domain: namespace,\n minConfidence: options.minConfidence || 0.3\n });\n\n const fallbackMemories = fallbackResults.slice(0, limit).map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || 'unknown',\n value: memory.pattern_data?.content || '',\n namespace: memory.pattern_data?.domain || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n\n setCachedQuery(searchQuery, options, fallbackMemories);\n return fallbackMemories;\n } catch (fallbackError) {\n console.error('[ReasoningBank] All query methods failed:', fallbackError);\n return [];\n }\n }\n}\n\n/**\n * List all memories (using Node.js backend database query)\n */\nexport async function listMemories(options = {}) {\n await ensureInitialized();\n const limit = options.limit || 10;\n const namespace = options.namespace;\n\n try {\n let memories;\n\n if (namespace && namespace !== 'default') {\n // Filter by namespace/domain\n const allMemories = ReasoningBank.db.getAllActiveMemories();\n memories = allMemories\n .filter(m => m.pattern_data?.domain === namespace)\n .slice(0, limit);\n } else {\n // Get all active memories\n memories = ReasoningBank.db.getAllActiveMemories().slice(0, limit);\n }\n\n return memories.map(memory => ({\n id: memory.id,\n key: memory.pattern_data?.title || memory.pattern_data?.original_key || 'unknown',\n value: memory.pattern_data?.content || memory.pattern_data?.original_value || '',\n namespace: memory.pattern_data?.domain || memory.pattern_data?.namespace || 'default',\n confidence: memory.confidence || 0.8,\n usage_count: memory.usage_count || 0,\n created_at: memory.created_at || new Date().toISOString()\n }));\n } catch (error) {\n console.error('[ReasoningBank] listMemories failed:', error);\n return [];\n }\n}\n\n/**\n * Get ReasoningBank statistics (Node.js backend)\n */\nexport async function getStatus() {\n await ensureInitialized();\n\n try {\n const db = ReasoningBank.db.getDb();\n\n // Count patterns\n const patterns = db.prepare(\"SELECT COUNT(*) as count FROM patterns WHERE type = 'reasoning_memory'\").get();\n const embeddings = db.prepare(\"SELECT COUNT(*) as count FROM pattern_embeddings\").get();\n const trajectories = db.prepare(\"SELECT COUNT(*) as count FROM task_trajectories\").get();\n const links = db.prepare(\"SELECT COUNT(*) as count FROM pattern_links\").get();\n\n // Get average confidence\n const avgConf = db.prepare(\"SELECT AVG(confidence) as avg FROM patterns WHERE type = 'reasoning_memory'\").get();\n\n // Count unique domains\n const domains = db.prepare(\"SELECT COUNT(DISTINCT json_extract(pattern_data, '$.domain')) as count FROM patterns WHERE type = 'reasoning_memory'\").get();\n\n return {\n total_memories: patterns.count || 0,\n total_categories: domains.count || 0,\n storage_backend: 'SQLite (Node.js)',\n database_path: process.env.CLAUDE_FLOW_DB_PATH || '.swarm/memory.db',\n performance: 'SQLite with persistent storage',\n avg_confidence: avgConf.avg || 0.8,\n total_embeddings: embeddings.count || 0,\n total_trajectories: trajectories.count || 0,\n total_links: links.count || 0\n };\n } catch (error) {\n console.error('[ReasoningBank] getStatus failed:', error);\n return {\n total_memories: 0,\n error: error.message\n };\n }\n}\n\n/**\n * Check which ReasoningBank tables are present (Node.js backend)\n */\nexport async function checkReasoningBankTables() {\n try {\n await ensureInitialized();\n const db = ReasoningBank.db.getDb();\n\n const tables = db.prepare(\"SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'pattern%'\").all();\n const tableNames = tables.map(t => t.name);\n\n const requiredTables = ['patterns', 'pattern_embeddings', 'pattern_links', 'task_trajectories'];\n const missingTables = requiredTables.filter(t => !tableNames.includes(t));\n\n return {\n exists: true,\n existingTables: tableNames,\n missingTables: missingTables,\n requiredTables: requiredTables,\n backend: 'SQLite (Node.js)',\n note: missingTables.length > 0 ? 'Some tables are missing - run migrations' : 'All tables present'\n };\n } catch (error) {\n return {\n exists: false,\n existingTables: [],\n missingTables: [],\n requiredTables: [],\n error: error.message\n };\n }\n}\n\n/**\n * Migrate existing database (Node.js backend - run migrations)\n */\nexport async function migrateReasoningBank() {\n try {\n await ReasoningBank.db.runMigrations();\n\n return {\n success: true,\n message: 'Database migrations completed successfully',\n migrated: true,\n database_path: process.env.CLAUDE_FLOW_DB_PATH || '.swarm/memory.db'\n };\n } catch (error) {\n return {\n success: false,\n message: `Migration failed: ${error.message}`,\n error: error.message\n };\n }\n}\n\n/**\n * Get cached query results\n */\nfunction getCachedQuery(searchQuery, options) {\n const cacheKey = JSON.stringify({ searchQuery, options });\n const cached = queryCache.get(cacheKey);\n\n if (cached && Date.now() - cached.timestamp < CACHE_TTL) {\n return cached.results;\n }\n\n return null;\n}\n\n/**\n * Set cached query results (LRU eviction)\n */\nfunction setCachedQuery(searchQuery, options, results) {\n const cacheKey = JSON.stringify({ searchQuery, options });\n\n // LRU eviction\n if (queryCache.size >= CACHE_SIZE) {\n const firstKey = queryCache.keys().next().value;\n queryCache.delete(firstKey);\n }\n\n queryCache.set(cacheKey, {\n results,\n timestamp: Date.now()\n });\n}\n\n/**\n * Close database connection and cleanup resources\n * Should be called when done with ReasoningBank operations\n */\nexport function cleanup() {\n try {\n if (backendInitialized) {\n // Clear embedding cache (prevents memory leaks)\n ReasoningBank.clearEmbeddingCache();\n\n // Close database connection\n ReasoningBank.db.closeDb();\n backendInitialized = false;\n initPromise = null;\n console.log('[ReasoningBank] Database connection closed');\n }\n } catch (error) {\n console.error('[ReasoningBank] Cleanup failed:', error.message);\n }\n}\n"],"names":["ReasoningBank","v4","uuidv4","backendInitialized","initPromise","queryCache","Map","CACHE_SIZE","CACHE_TTL","ensureInitialized","initialize","console","log","error","isSqliteError","message","includes","isNpx","process","env","npm_config_user_agent","cwd","Error","initializeReasoningBank","storeMemory","key","value","options","memoryId","id","memory","type","pattern_data","title","content","domain","namespace","agent","task_type","original_key","original_value","confidence","usage_count","db","upsertMemory","embedding","computeEmbedding","upsertEmbedding","model","dims","length","vector","embeddingError","warn","clear","queryMemories","searchQuery","cached","getCachedQuery","limit","results","retrieveMemories","k","minConfidence","memories","map","description","components","reliability","created_at","Date","toISOString","score","_pattern","fallbackResults","fetchMemoryCandidates","fallbackMemories","slice","setCachedQuery","fallbackError","listMemories","allMemories","getAllActiveMemories","filter","m","getStatus","getDb","patterns","prepare","get","embeddings","trajectories","links","avgConf","domains","total_memories","count","total_categories","storage_backend","database_path","CLAUDE_FLOW_DB_PATH","performance","avg_confidence","avg","total_embeddings","total_trajectories","total_links","checkReasoningBankTables","tables","all","tableNames","t","name","requiredTables","missingTables","exists","existingTables","backend","note","migrateReasoningBank","runMigrations","success","migrated","cacheKey","JSON","stringify","now","timestamp","size","firstKey","keys","next","delete","set","cleanup","clearEmbeddingCache","closeDb"],"mappings":"AAUA,YAAYA,mBAAmB,6BAA6B;AAC5D,SAASC,MAAMC,MAAM,QAAQ,OAAO;AAGpC,IAAIC,qBAAqB;AACzB,IAAIC,cAAc;AAGlB,MAAMC,aAAa,IAAIC;AACvB,MAAMC,aAAa;AACnB,MAAMC,YAAY;AAMlB,eAAeC;IACb,IAAIN,oBAAoB;QACtB,OAAO;IACT;IAEA,IAAIC,aAAa;QACf,OAAOA;IACT;IAEAA,cAAc,AAAC,CAAA;QACb,IAAI;YAEF,MAAMJ,cAAcU,UAAU;YAC9BP,qBAAqB;YACrBQ,QAAQC,GAAG,CAAC;YACZ,OAAO;QACT,EAAE,OAAOC,OAAO;YACdF,QAAQE,KAAK,CAAC,kDAAkDA;YAGhE,MAAMC,gBAAgBD,MAAME,OAAO,EAAEC,SAAS,yCACzBH,MAAME,OAAO,EAAEC,SAAS,qBACxBH,MAAME,OAAO,EAAEC,SAAS;YAE7C,IAAIF,eAAe;gBACjB,MAAMG,QAAQC,QAAQC,GAAG,CAACC,qBAAqB,EAAEJ,SAAS,UAC5CE,QAAQG,GAAG,GAAGL,QAAQ,CAAC;gBAErC,IAAIC,OAAO;oBACTN,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;oBACdF,QAAQE,KAAK,CAAC;gBAChB;YACF;YAEA,MAAM,IAAIS,MAAM,CAAC,oCAAoC,EAAET,MAAME,OAAO,EAAE;QACxE;IACF,CAAA;IAEA,OAAOX;AACT;AAKA,OAAO,eAAemB;IAEpB,MAAMd;IACN,OAAO;AACT;AAWA,OAAO,eAAee,YAAYC,GAAG,EAAEC,KAAK,EAAEC,UAAU,CAAC,CAAC;IACxD,MAAMlB;IAEN,IAAI;QACF,MAAMmB,WAAWD,QAAQE,EAAE,IAAI3B;QAG/B,MAAM4B,SAAS;YACbD,IAAID;YACJG,MAAM;YACNC,cAAc;gBACZC,OAAOR;gBACPS,SAASR;gBACTS,QAAQR,QAAQS,SAAS,IAAI;gBAC7BC,OAAOV,QAAQU,KAAK,IAAI;gBACxBC,WAAWX,QAAQI,IAAI,IAAI;gBAE3BQ,cAAcd;gBACde,gBAAgBd;gBAChBU,WAAWT,QAAQS,SAAS,IAAI;YAClC;YACAK,YAAYd,QAAQc,UAAU,IAAI;YAClCC,aAAa;QACf;QAGA1C,cAAc2C,EAAE,CAACC,YAAY,CAACd;QAG9B,IAAI;YACF,MAAMe,YAAY,MAAM7C,cAAc8C,gBAAgB,CAACpB;YACvD1B,cAAc2C,EAAE,CAACI,eAAe,CAAC;gBAC/BlB,IAAID;gBACJoB,OAAO;gBACPC,MAAMJ,UAAUK,MAAM;gBACtBC,QAAQN;YACV;QACF,EAAE,OAAOO,gBAAgB;YACvBzC,QAAQ0C,IAAI,CAAC,iDAAiDD,eAAerC,OAAO;QAEtF;QAGAV,WAAWiD,KAAK;QAEhB,OAAO1B;IACT,EAAE,OAAOf,OAAO;QACdF,QAAQE,KAAK,CAAC,uCAAuCA;QACrD,MAAM,IAAIS,MAAM,CAAC,wBAAwB,EAAET,MAAME,OAAO,EAAE;IAC5D;AACF;AAQA,OAAO,eAAewC,cAAcC,WAAW,EAAE7B,UAAU,CAAC,CAAC;IAE3D,MAAM8B,SAASC,eAAeF,aAAa7B;IAC3C,IAAI8B,QAAQ;QACV,OAAOA;IACT;IAEA,MAAMhD;IACN,MAAMkD,QAAQhC,QAAQgC,KAAK,IAAI;IAE/B,MAAMvB,YAAYT,QAAQS,SAAS,IAAIT,QAAQQ,MAAM,IAAI;IAEzD,IAAI;QAEF,MAAMyB,UAAU,MAAM5D,cAAc6D,gBAAgB,CAACL,aAAa;YAChErB,QAAQC;YACRC,OAAOV,QAAQU,KAAK,IAAI;YACxByB,GAAGH;YACHI,eAAepC,QAAQoC,aAAa,IAAI;QAC1C;QAIA,MAAMC,WAAWJ,QAAQK,GAAG,CAACnC,CAAAA,SAAW,CAAA;gBACtCD,IAAIC,OAAOD,EAAE;gBACbJ,KAAKK,OAAOG,KAAK,IAAI;gBACrBP,OAAOI,OAAOI,OAAO,IAAIJ,OAAOoC,WAAW,IAAI;gBAC/C9B,WAAWA;gBACXK,YAAYX,OAAOqC,UAAU,EAAEC,eAAe;gBAC9C1B,aAAaZ,OAAOY,WAAW,IAAI;gBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACvDC,OAAO1C,OAAO0C,KAAK,IAAI;gBAEvBC,UAAU3C;YACZ,CAAA;QAGA,IAAIkC,SAASd,MAAM,KAAK,GAAG;YACzBvC,QAAQ0C,IAAI,CAAC;YACb,MAAMqB,kBAAkB1E,cAAc2C,EAAE,CAACgC,qBAAqB,CAAC;gBAC7DxC,QAAQC;gBACR2B,eAAepC,QAAQoC,aAAa,IAAI;YAC1C;YAEA,MAAMa,mBAAmBF,gBAAgBG,KAAK,CAAC,GAAGlB,OAAOM,GAAG,CAACnC,CAAAA,SAAW,CAAA;oBACtED,IAAIC,OAAOD,EAAE;oBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAASH,OAAOE,YAAY,EAAEO,gBAAgB;oBACxEb,OAAOI,OAAOE,YAAY,EAAEE,WAAWJ,OAAOE,YAAY,EAAEQ,kBAAkB;oBAC9EJ,WAAWN,OAAOE,YAAY,EAAEG,UAAUL,OAAOE,YAAY,EAAEI,aAAa;oBAC5EK,YAAYX,OAAOW,UAAU,IAAI;oBACjCC,aAAaZ,OAAOY,WAAW,IAAI;oBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACzD,CAAA;YAGAO,eAAetB,aAAa7B,SAASiD;YACrC,OAAOA;QACT;QAGAE,eAAetB,aAAa7B,SAASqC;QACrC,OAAOA;IACT,EAAE,OAAOnD,OAAO;QACdF,QAAQ0C,IAAI,CAAC,2DAA2DxC,MAAME,OAAO;QAErF,IAAI;YAEF,MAAM2D,kBAAkB1E,cAAc2C,EAAE,CAACgC,qBAAqB,CAAC;gBAC7DxC,QAAQC;gBACR2B,eAAepC,QAAQoC,aAAa,IAAI;YAC1C;YAEA,MAAMa,mBAAmBF,gBAAgBG,KAAK,CAAC,GAAGlB,OAAOM,GAAG,CAACnC,CAAAA,SAAW,CAAA;oBACtED,IAAIC,OAAOD,EAAE;oBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAAS;oBACnCP,OAAOI,OAAOE,YAAY,EAAEE,WAAW;oBACvCE,WAAWN,OAAOE,YAAY,EAAEG,UAAU;oBAC1CM,YAAYX,OAAOW,UAAU,IAAI;oBACjCC,aAAaZ,OAAOY,WAAW,IAAI;oBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;gBACzD,CAAA;YAEAO,eAAetB,aAAa7B,SAASiD;YACrC,OAAOA;QACT,EAAE,OAAOG,eAAe;YACtBpE,QAAQE,KAAK,CAAC,6CAA6CkE;YAC3D,OAAO,EAAE;QACX;IACF;AACF;AAKA,OAAO,eAAeC,aAAarD,UAAU,CAAC,CAAC;IAC7C,MAAMlB;IACN,MAAMkD,QAAQhC,QAAQgC,KAAK,IAAI;IAC/B,MAAMvB,YAAYT,QAAQS,SAAS;IAEnC,IAAI;QACF,IAAI4B;QAEJ,IAAI5B,aAAaA,cAAc,WAAW;YAExC,MAAM6C,cAAcjF,cAAc2C,EAAE,CAACuC,oBAAoB;YACzDlB,WAAWiB,YACRE,MAAM,CAACC,CAAAA,IAAKA,EAAEpD,YAAY,EAAEG,WAAWC,WACvCyC,KAAK,CAAC,GAAGlB;QACd,OAAO;YAELK,WAAWhE,cAAc2C,EAAE,CAACuC,oBAAoB,GAAGL,KAAK,CAAC,GAAGlB;QAC9D;QAEA,OAAOK,SAASC,GAAG,CAACnC,CAAAA,SAAW,CAAA;gBAC7BD,IAAIC,OAAOD,EAAE;gBACbJ,KAAKK,OAAOE,YAAY,EAAEC,SAASH,OAAOE,YAAY,EAAEO,gBAAgB;gBACxEb,OAAOI,OAAOE,YAAY,EAAEE,WAAWJ,OAAOE,YAAY,EAAEQ,kBAAkB;gBAC9EJ,WAAWN,OAAOE,YAAY,EAAEG,UAAUL,OAAOE,YAAY,EAAEI,aAAa;gBAC5EK,YAAYX,OAAOW,UAAU,IAAI;gBACjCC,aAAaZ,OAAOY,WAAW,IAAI;gBACnC2B,YAAYvC,OAAOuC,UAAU,IAAI,IAAIC,OAAOC,WAAW;YACzD,CAAA;IACF,EAAE,OAAO1D,OAAO;QACdF,QAAQE,KAAK,CAAC,wCAAwCA;QACtD,OAAO,EAAE;IACX;AACF;AAKA,OAAO,eAAewE;IACpB,MAAM5E;IAEN,IAAI;QACF,MAAMkC,KAAK3C,cAAc2C,EAAE,CAAC2C,KAAK;QAGjC,MAAMC,WAAW5C,GAAG6C,OAAO,CAAC,0EAA0EC,GAAG;QACzG,MAAMC,aAAa/C,GAAG6C,OAAO,CAAC,oDAAoDC,GAAG;QACrF,MAAME,eAAehD,GAAG6C,OAAO,CAAC,mDAAmDC,GAAG;QACtF,MAAMG,QAAQjD,GAAG6C,OAAO,CAAC,+CAA+CC,GAAG;QAG3E,MAAMI,UAAUlD,GAAG6C,OAAO,CAAC,+EAA+EC,GAAG;QAG7G,MAAMK,UAAUnD,GAAG6C,OAAO,CAAC,wHAAwHC,GAAG;QAEtJ,OAAO;YACLM,gBAAgBR,SAASS,KAAK,IAAI;YAClCC,kBAAkBH,QAAQE,KAAK,IAAI;YACnCE,iBAAiB;YACjBC,eAAejF,QAAQC,GAAG,CAACiF,mBAAmB,IAAI;YAClDC,aAAa;YACbC,gBAAgBT,QAAQU,GAAG,IAAI;YAC/BC,kBAAkBd,WAAWM,KAAK,IAAI;YACtCS,oBAAoBd,aAAaK,KAAK,IAAI;YAC1CU,aAAad,MAAMI,KAAK,IAAI;QAC9B;IACF,EAAE,OAAOnF,OAAO;QACdF,QAAQE,KAAK,CAAC,qCAAqCA;QACnD,OAAO;YACLkF,gBAAgB;YAChBlF,OAAOA,MAAME,OAAO;QACtB;IACF;AACF;AAKA,OAAO,eAAe4F;IACpB,IAAI;QACF,MAAMlG;QACN,MAAMkC,KAAK3C,cAAc2C,EAAE,CAAC2C,KAAK;QAEjC,MAAMsB,SAASjE,GAAG6C,OAAO,CAAC,8EAA8EqB,GAAG;QAC3G,MAAMC,aAAaF,OAAO3C,GAAG,CAAC8C,CAAAA,IAAKA,EAAEC,IAAI;QAEzC,MAAMC,iBAAiB;YAAC;YAAY;YAAsB;YAAiB;SAAoB;QAC/F,MAAMC,gBAAgBD,eAAe9B,MAAM,CAAC4B,CAAAA,IAAK,CAACD,WAAW9F,QAAQ,CAAC+F;QAEtE,OAAO;YACLI,QAAQ;YACRC,gBAAgBN;YAChBI,eAAeA;YACfD,gBAAgBA;YAChBI,SAAS;YACTC,MAAMJ,cAAchE,MAAM,GAAG,IAAI,6CAA6C;QAChF;IACF,EAAE,OAAOrC,OAAO;QACd,OAAO;YACLsG,QAAQ;YACRC,gBAAgB,EAAE;YAClBF,eAAe,EAAE;YACjBD,gBAAgB,EAAE;YAClBpG,OAAOA,MAAME,OAAO;QACtB;IACF;AACF;AAKA,OAAO,eAAewG;IACpB,IAAI;QACF,MAAMvH,cAAc2C,EAAE,CAAC6E,aAAa;QAEpC,OAAO;YACLC,SAAS;YACT1G,SAAS;YACT2G,UAAU;YACVvB,eAAejF,QAAQC,GAAG,CAACiF,mBAAmB,IAAI;QACpD;IACF,EAAE,OAAOvF,OAAO;QACd,OAAO;YACL4G,SAAS;YACT1G,SAAS,CAAC,kBAAkB,EAAEF,MAAME,OAAO,EAAE;YAC7CF,OAAOA,MAAME,OAAO;QACtB;IACF;AACF;AAKA,SAAS2C,eAAeF,WAAW,EAAE7B,OAAO;IAC1C,MAAMgG,WAAWC,KAAKC,SAAS,CAAC;QAAErE;QAAa7B;IAAQ;IACvD,MAAM8B,SAASpD,WAAWoF,GAAG,CAACkC;IAE9B,IAAIlE,UAAUa,KAAKwD,GAAG,KAAKrE,OAAOsE,SAAS,GAAGvH,WAAW;QACvD,OAAOiD,OAAOG,OAAO;IACvB;IAEA,OAAO;AACT;AAKA,SAASkB,eAAetB,WAAW,EAAE7B,OAAO,EAAEiC,OAAO;IACnD,MAAM+D,WAAWC,KAAKC,SAAS,CAAC;QAAErE;QAAa7B;IAAQ;IAGvD,IAAItB,WAAW2H,IAAI,IAAIzH,YAAY;QACjC,MAAM0H,WAAW5H,WAAW6H,IAAI,GAAGC,IAAI,GAAGzG,KAAK;QAC/CrB,WAAW+H,MAAM,CAACH;IACpB;IAEA5H,WAAWgI,GAAG,CAACV,UAAU;QACvB/D;QACAmE,WAAWzD,KAAKwD,GAAG;IACrB;AACF;AAMA,OAAO,SAASQ;IACd,IAAI;QACF,IAAInI,oBAAoB;YAEtBH,cAAcuI,mBAAmB;YAGjCvI,cAAc2C,EAAE,CAAC6F,OAAO;YACxBrI,qBAAqB;YACrBC,cAAc;YACdO,QAAQC,GAAG,CAAC;QACd;IACF,EAAE,OAAOC,OAAO;QACdF,QAAQE,KAAK,CAAC,mCAAmCA,MAAME,OAAO;IAChE;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/utils/key-redactor.js"],"sourcesContent":["/**\n * API Key Redaction Utility\n * Prevents sensitive data from leaking into logs, memory, or git commits\n */\n\nexport class KeyRedactor {\n static API_KEY_PATTERNS = [\n // Anthropic API keys\n /sk-ant-[a-zA-Z0-9_-]{95,}/gi,\n\n // OpenRouter API keys\n /sk-or-[a-zA-Z0-9_-]{32,}/gi,\n\n // Google/Gemini API keys\n /AIza[a-zA-Z0-9_-]{35}/gi,\n\n // Generic API keys\n /[a-zA-Z0-9_-]{20,}API[a-zA-Z0-9_-]{20,}/gi,\n\n // Bearer tokens\n /Bearer\\s+[a-zA-Z0-9_\\-\\.]{20,}/gi,\n\n // Environment variable format\n /([A-Z_]+_API_KEY|[A-Z_]+_TOKEN|[A-Z_]+_SECRET)=[\"']?([^\"'\\s]+)[\"']?/gi,\n\n // Supabase keys\n /eyJ[a-zA-Z0-9_-]*\\.eyJ[a-zA-Z0-9_-]*\\.[a-zA-Z0-9_-]*/gi,\n ];\n\n static SENSITIVE_FIELDS = [\n 'apiKey',\n 'api_key',\n 'token',\n 'secret',\n 'password',\n 'private_key',\n 'privateKey',\n 'accessToken',\n 'access_token',\n 'refreshToken',\n 'refresh_token',\n ];\n\n /**\n * Redact API keys and sensitive data from text\n */\n static redact(text, showPrefix = true) {\n if (!text) return text;\n\n let redacted = text;\n\n // Redact using patterns\n this.API_KEY_PATTERNS.forEach(pattern => {\n redacted = redacted.replace(pattern, (match) => {\n if (showPrefix && match.length > 8) {\n const prefix = match.substring(0, 8);\n return `${prefix}...[REDACTED]`;\n }\n return '[REDACTED_API_KEY]';\n });\n });\n\n return redacted;\n }\n\n /**\n * Redact sensitive fields in objects\n */\n static redactObject(obj, deep = true) {\n if (!obj || typeof obj !== 'object') return obj;\n\n const redacted = { ...obj };\n\n Object.keys(redacted).forEach(key => {\n const lowerKey = key.toLowerCase();\n\n // Check if field name is sensitive\n const isSensitive = this.SENSITIVE_FIELDS.some(field =>\n lowerKey.includes(field)\n );\n\n if (isSensitive && typeof redacted[key] === 'string') {\n const value = redacted[key];\n if (value && value.length > 8) {\n redacted[key] = `${value.substring(0, 4)}...[REDACTED]`;\n } else {\n redacted[key] = '[REDACTED]';\n }\n } else if (deep && typeof redacted[key] === 'object' && redacted[key] !== null) {\n redacted[key] = this.redactObject(redacted[key], deep);\n } else if (typeof redacted[key] === 'string') {\n // Redact any API keys in string values\n redacted[key] = this.redact(redacted[key]);\n }\n });\n\n return redacted;\n }\n\n /**\n * Sanitize text for safe logging\n */\n static sanitize(text) {\n return this.redact(text, true);\n }\n\n /**\n * Sanitize command arguments\n */\n static sanitizeArgs(args) {\n return args.map(arg => {\n // Check if arg is a flag value pair\n if (arg.includes('key') || arg.includes('token') || arg.includes('secret')) {\n return this.redact(arg);\n }\n return arg;\n });\n }\n\n /**\n * Check if text contains unredacted sensitive data\n */\n static containsSensitiveData(text) {\n return this.API_KEY_PATTERNS.some(pattern => pattern.test(text));\n }\n\n /**\n * Validate that text is safe for logging/storage\n */\n static validate(text) {\n const warnings = [];\n\n this.API_KEY_PATTERNS.forEach((pattern, index) => {\n if (pattern.test(text)) {\n warnings.push(`Potential API key detected (pattern ${index + 1})`);\n }\n });\n\n return {\n safe: warnings.length === 0,\n warnings,\n };\n }\n\n /**\n * Redact environment variables\n */\n static redactEnv(env) {\n const redacted = {};\n\n Object.keys(env).forEach(key => {\n const value = env[key];\n if (!value) {\n redacted[key] = '';\n return;\n }\n\n const lowerKey = key.toLowerCase();\n const isSensitive = lowerKey.includes('key') ||\n lowerKey.includes('token') ||\n lowerKey.includes('secret') ||\n lowerKey.includes('password');\n\n if (isSensitive) {\n redacted[key] = value.length > 8\n ? `${value.substring(0, 4)}...[REDACTED]`\n : '[REDACTED]';\n } else {\n redacted[key] = value;\n }\n });\n\n return redacted;\n }\n}\n\n// Export singleton instance\nexport const redactor = KeyRedactor;\n"],"names":["KeyRedactor","API_KEY_PATTERNS","SENSITIVE_FIELDS","redact","text","showPrefix","redacted","forEach","pattern","replace","match","length","prefix","substring","redactObject","obj","deep","Object","keys","key","lowerKey","toLowerCase","isSensitive","some","field","includes","value","sanitize","sanitizeArgs","args","map","arg","containsSensitiveData","test","validate","warnings","index","push","safe","redactEnv","env","redactor"],"mappings":"AAKA,OAAO,MAAMA;IACX,OAAOC,mBAAmB;QAExB;QAGA;QAGA;QAGA;QAGA;QAGA;QAGA;KACD,CAAC;IAEF,OAAOC,mBAAmB;QACxB;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;KACD,CAAC;IAKF,OAAOC,OAAOC,IAAI,EAAEC,aAAa,IAAI,EAAE;QACrC,IAAI,CAACD,MAAM,OAAOA;QAElB,IAAIE,WAAWF;QAGf,IAAI,CAACH,gBAAgB,CAACM,OAAO,CAACC,CAAAA;YAC5BF,WAAWA,SAASG,OAAO,CAACD,SAAS,CAACE;gBACpC,IAAIL,cAAcK,MAAMC,MAAM,GAAG,GAAG;oBAClC,MAAMC,SAASF,MAAMG,SAAS,CAAC,GAAG;oBAClC,OAAO,GAAGD,OAAO,aAAa,CAAC;gBACjC;gBACA,OAAO;YACT;QACF;QAEA,OAAON;IACT;IAKA,OAAOQ,aAAaC,GAAG,EAAEC,OAAO,IAAI,EAAE;QACpC,IAAI,CAACD,OAAO,OAAOA,QAAQ,UAAU,OAAOA;QAE5C,MAAMT,WAAW;YAAE,GAAGS,GAAG;QAAC;QAE1BE,OAAOC,IAAI,CAACZ,UAAUC,OAAO,CAACY,CAAAA;YAC5B,MAAMC,WAAWD,IAAIE,WAAW;YAGhC,MAAMC,cAAc,IAAI,CAACpB,gBAAgB,CAACqB,IAAI,CAACC,CAAAA,QAC7CJ,SAASK,QAAQ,CAACD;YAGpB,IAAIF,eAAe,OAAOhB,QAAQ,CAACa,IAAI,KAAK,UAAU;gBACpD,MAAMO,QAAQpB,QAAQ,CAACa,IAAI;gBAC3B,IAAIO,SAASA,MAAMf,MAAM,GAAG,GAAG;oBAC7BL,QAAQ,CAACa,IAAI,GAAG,GAAGO,MAAMb,SAAS,CAAC,GAAG,GAAG,aAAa,CAAC;gBACzD,OAAO;oBACLP,QAAQ,CAACa,IAAI,GAAG;gBAClB;YACF,OAAO,IAAIH,QAAQ,OAAOV,QAAQ,CAACa,IAAI,KAAK,YAAYb,QAAQ,CAACa,IAAI,KAAK,MAAM;gBAC9Eb,QAAQ,CAACa,IAAI,GAAG,IAAI,CAACL,YAAY,CAACR,QAAQ,CAACa,IAAI,EAAEH;YACnD,OAAO,IAAI,OAAOV,QAAQ,CAACa,IAAI,KAAK,UAAU;gBAE5Cb,QAAQ,CAACa,IAAI,GAAG,IAAI,CAAChB,MAAM,CAACG,QAAQ,CAACa,IAAI;YAC3C;QACF;QAEA,OAAOb;IACT;IAKA,OAAOqB,SAASvB,IAAI,EAAE;QACpB,OAAO,IAAI,CAACD,MAAM,CAACC,MAAM;IAC3B;IAKA,OAAOwB,aAAaC,IAAI,EAAE;QACxB,OAAOA,KAAKC,GAAG,CAACC,CAAAA;YAEd,IAAIA,IAAIN,QAAQ,CAAC,UAAUM,IAAIN,QAAQ,CAAC,YAAYM,IAAIN,QAAQ,CAAC,WAAW;gBAC1E,OAAO,IAAI,CAACtB,MAAM,CAAC4B;YACrB;YACA,OAAOA;QACT;IACF;IAKA,OAAOC,sBAAsB5B,IAAI,EAAE;QACjC,OAAO,IAAI,CAACH,gBAAgB,CAACsB,IAAI,CAACf,CAAAA,UAAWA,QAAQyB,IAAI,CAAC7B;IAC5D;IAKA,OAAO8B,SAAS9B,IAAI,EAAE;QACpB,MAAM+B,WAAW,EAAE;QAEnB,IAAI,CAAClC,gBAAgB,CAACM,OAAO,CAAC,CAACC,SAAS4B;YACtC,IAAI5B,QAAQyB,IAAI,CAAC7B,OAAO;gBACtB+B,SAASE,IAAI,CAAC,CAAC,oCAAoC,EAAED,QAAQ,EAAE,CAAC,CAAC;YACnE;QACF;QAEA,OAAO;YACLE,MAAMH,SAASxB,MAAM,KAAK;YAC1BwB;QACF;IACF;IAKA,OAAOI,UAAUC,GAAG,EAAE;QACpB,MAAMlC,WAAW,CAAC;QAElBW,OAAOC,IAAI,CAACsB,KAAKjC,OAAO,CAACY,CAAAA;YACvB,MAAMO,QAAQc,GAAG,CAACrB,IAAI;YACtB,IAAI,CAACO,OAAO;gBACVpB,QAAQ,CAACa,IAAI,GAAG;gBAChB;YACF;YAEA,MAAMC,WAAWD,IAAIE,WAAW;YAChC,MAAMC,cAAcF,SAASK,QAAQ,CAAC,UACnBL,SAASK,QAAQ,CAAC,YAClBL,SAASK,QAAQ,CAAC,aAClBL,SAASK,QAAQ,CAAC;YAErC,IAAIH,aAAa;gBACfhB,QAAQ,CAACa,IAAI,GAAGO,MAAMf,MAAM,GAAG,IAC3B,GAAGe,MAAMb,SAAS,CAAC,GAAG,GAAG,aAAa,CAAC,GACvC;YACN,OAAO;gBACLP,QAAQ,CAACa,IAAI,GAAGO;YAClB;QACF;QAEA,OAAOpB;IACT;AACF;AAGA,OAAO,MAAMmC,WAAWzC,YAAY"}
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/key-redactor.js"],"sourcesContent":["/**\n * API Key Redaction Utility\n * Prevents sensitive data from leaking into logs, memory, or git commits\n */\n\nexport class KeyRedactor {\n static API_KEY_PATTERNS = [\n // Anthropic API keys\n /sk-ant-[a-zA-Z0-9_-]{95,}/gi,\n\n // OpenRouter API keys\n /sk-or-[a-zA-Z0-9_-]{32,}/gi,\n\n // Google/Gemini API keys\n /AIza[a-zA-Z0-9_-]{35}/gi,\n\n // Generic API keys\n /[a-zA-Z0-9_-]{20,}API[a-zA-Z0-9_-]{20,}/gi,\n\n // Bearer tokens\n /Bearer\\s+[a-zA-Z0-9_\\-\\.]{20,}/gi,\n\n // Environment variable format\n /([A-Z_]+_API_KEY|[A-Z_]+_TOKEN|[A-Z_]+_SECRET)=[\"']?([^\"'\\s]+)[\"']?/gi,\n\n // Supabase keys\n /eyJ[a-zA-Z0-9_-]*\\.eyJ[a-zA-Z0-9_-]*\\.[a-zA-Z0-9_-]*/gi,\n ];\n\n static SENSITIVE_FIELDS = [\n 'apiKey',\n 'api_key',\n 'token',\n 'secret',\n 'password',\n 'private_key',\n 'privateKey',\n 'accessToken',\n 'access_token',\n 'refreshToken',\n 'refresh_token',\n ];\n\n /**\n * Redact API keys and sensitive data from text\n */\n static redact(text, showPrefix = true) {\n if (!text) return text;\n\n let redacted = text;\n\n // Redact using patterns\n this.API_KEY_PATTERNS.forEach(pattern => {\n redacted = redacted.replace(pattern, (match) => {\n if (showPrefix && match.length > 8) {\n const prefix = match.substring(0, 8);\n return `${prefix}...[REDACTED]`;\n }\n return '[REDACTED_API_KEY]';\n });\n });\n\n return redacted;\n }\n\n /**\n * Redact sensitive fields in objects\n */\n static redactObject(obj, deep = true) {\n if (!obj || typeof obj !== 'object') return obj;\n\n const redacted = { ...obj };\n\n Object.keys(redacted).forEach(key => {\n const lowerKey = key.toLowerCase();\n\n // Check if field name is sensitive\n const isSensitive = this.SENSITIVE_FIELDS.some(field =>\n lowerKey.includes(field)\n );\n\n if (isSensitive && typeof redacted[key] === 'string') {\n const value = redacted[key];\n if (value && value.length > 8) {\n redacted[key] = `${value.substring(0, 4)}...[REDACTED]`;\n } else {\n redacted[key] = '[REDACTED]';\n }\n } else if (deep && typeof redacted[key] === 'object' && redacted[key] !== null) {\n redacted[key] = this.redactObject(redacted[key], deep);\n } else if (typeof redacted[key] === 'string') {\n // Redact any API keys in string values\n redacted[key] = this.redact(redacted[key]);\n }\n });\n\n return redacted;\n }\n\n /**\n * Sanitize text for safe logging\n */\n static sanitize(text) {\n return this.redact(text, true);\n }\n\n /**\n * Sanitize command arguments\n */\n static sanitizeArgs(args) {\n return args.map(arg => {\n // Check if arg is a flag value pair\n if (arg.includes('key') || arg.includes('token') || arg.includes('secret')) {\n return this.redact(arg);\n }\n return arg;\n });\n }\n\n /**\n * Check if text contains unredacted sensitive data\n */\n static containsSensitiveData(text) {\n return this.API_KEY_PATTERNS.some(pattern => pattern.test(text));\n }\n\n /**\n * Validate that text is safe for logging/storage\n */\n static validate(text) {\n const warnings = [];\n\n this.API_KEY_PATTERNS.forEach((pattern, index) => {\n if (pattern.test(text)) {\n warnings.push(`Potential API key detected (pattern ${index + 1})`);\n }\n });\n\n return {\n safe: warnings.length === 0,\n warnings,\n };\n }\n\n /**\n * Redact environment variables\n */\n static redactEnv(env) {\n const redacted = {};\n\n Object.keys(env).forEach(key => {\n const value = env[key];\n if (!value) {\n redacted[key] = '';\n return;\n }\n\n const lowerKey = key.toLowerCase();\n const isSensitive = lowerKey.includes('key') ||\n lowerKey.includes('token') ||\n lowerKey.includes('secret') ||\n lowerKey.includes('password');\n\n if (isSensitive) {\n redacted[key] = value.length > 8\n ? `${value.substring(0, 4)}...[REDACTED]`\n : '[REDACTED]';\n } else {\n redacted[key] = value;\n }\n });\n\n return redacted;\n }\n}\n\n// Export singleton instance\nexport const redactor = KeyRedactor;\n"],"names":["KeyRedactor","API_KEY_PATTERNS","SENSITIVE_FIELDS","redact","text","showPrefix","redacted","forEach","pattern","replace","match","length","prefix","substring","redactObject","obj","deep","Object","keys","key","lowerKey","toLowerCase","isSensitive","some","field","includes","value","sanitize","sanitizeArgs","args","map","arg","containsSensitiveData","test","validate","warnings","index","push","safe","redactEnv","env","redactor"],"mappings":"AAKA,OAAO,MAAMA;IACX,OAAOC,mBAAmB;QAExB;QAGA;QAGA;QAGA;QAGA;QAGA;QAGA;KACD,CAAC;IAEF,OAAOC,mBAAmB;QACxB;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;KACD,CAAC;IAKF,OAAOC,OAAOC,IAAI,EAAEC,aAAa,IAAI,EAAE;QACrC,IAAI,CAACD,MAAM,OAAOA;QAElB,IAAIE,WAAWF;QAGf,IAAI,CAACH,gBAAgB,CAACM,OAAO,CAACC,CAAAA;YAC5BF,WAAWA,SAASG,OAAO,CAACD,SAAS,CAACE;gBACpC,IAAIL,cAAcK,MAAMC,MAAM,GAAG,GAAG;oBAClC,MAAMC,SAASF,MAAMG,SAAS,CAAC,GAAG;oBAClC,OAAO,GAAGD,OAAO,aAAa,CAAC;gBACjC;gBACA,OAAO;YACT;QACF;QAEA,OAAON;IACT;IAKA,OAAOQ,aAAaC,GAAG,EAAEC,OAAO,IAAI,EAAE;QACpC,IAAI,CAACD,OAAO,OAAOA,QAAQ,UAAU,OAAOA;QAE5C,MAAMT,WAAW;YAAE,GAAGS,GAAG;QAAC;QAE1BE,OAAOC,IAAI,CAACZ,UAAUC,OAAO,CAACY,CAAAA;YAC5B,MAAMC,WAAWD,IAAIE,WAAW;YAGhC,MAAMC,cAAc,IAAI,CAACpB,gBAAgB,CAACqB,IAAI,CAACC,CAAAA,QAC7CJ,SAASK,QAAQ,CAACD;YAGpB,IAAIF,eAAe,OAAOhB,QAAQ,CAACa,IAAI,KAAK,UAAU;gBACpD,MAAMO,QAAQpB,QAAQ,CAACa,IAAI;gBAC3B,IAAIO,SAASA,MAAMf,MAAM,GAAG,GAAG;oBAC7BL,QAAQ,CAACa,IAAI,GAAG,GAAGO,MAAMb,SAAS,CAAC,GAAG,GAAG,aAAa,CAAC;gBACzD,OAAO;oBACLP,QAAQ,CAACa,IAAI,GAAG;gBAClB;YACF,OAAO,IAAIH,QAAQ,OAAOV,QAAQ,CAACa,IAAI,KAAK,YAAYb,QAAQ,CAACa,IAAI,KAAK,MAAM;gBAC9Eb,QAAQ,CAACa,IAAI,GAAG,IAAI,CAACL,YAAY,CAACR,QAAQ,CAACa,IAAI,EAAEH;YACnD,OAAO,IAAI,OAAOV,QAAQ,CAACa,IAAI,KAAK,UAAU;gBAE5Cb,QAAQ,CAACa,IAAI,GAAG,IAAI,CAAChB,MAAM,CAACG,QAAQ,CAACa,IAAI;YAC3C;QACF;QAEA,OAAOb;IACT;IAKA,OAAOqB,SAASvB,IAAI,EAAE;QACpB,OAAO,IAAI,CAACD,MAAM,CAACC,MAAM;IAC3B;IAKA,OAAOwB,aAAaC,IAAI,EAAE;QACxB,OAAOA,KAAKC,GAAG,CAACC,CAAAA;YAEd,IAAIA,IAAIN,QAAQ,CAAC,UAAUM,IAAIN,QAAQ,CAAC,YAAYM,IAAIN,QAAQ,CAAC,WAAW;gBAC1E,OAAO,IAAI,CAACtB,MAAM,CAAC4B;YACrB;YACA,OAAOA;QACT;IACF;IAKA,OAAOC,sBAAsB5B,IAAI,EAAE;QACjC,OAAO,IAAI,CAACH,gBAAgB,CAACsB,IAAI,CAACf,CAAAA,UAAWA,QAAQyB,IAAI,CAAC7B;IAC5D;IAKA,OAAO8B,SAAS9B,IAAI,EAAE;QACpB,MAAM+B,WAAW,EAAE;QAEnB,IAAI,CAAClC,gBAAgB,CAACM,OAAO,CAAC,CAACC,SAAS4B;YACtC,IAAI5B,QAAQyB,IAAI,CAAC7B,OAAO;gBACtB+B,SAASE,IAAI,CAAC,CAAC,oCAAoC,EAAED,QAAQ,EAAE,CAAC,CAAC;YACnE;QACF;QAEA,OAAO;YACLE,MAAMH,SAASxB,MAAM,KAAK;YAC1BwB;QACF;IACF;IAKA,OAAOI,UAAUC,GAAG,EAAE;QACpB,MAAMlC,WAAW,CAAC;QAElBW,OAAOC,IAAI,CAACsB,KAAKjC,OAAO,CAACY,CAAAA;YACvB,MAAMO,QAAQc,GAAG,CAACrB,IAAI;YACtB,IAAI,CAACO,OAAO;gBACVpB,QAAQ,CAACa,IAAI,GAAG;gBAChB;YACF;YAEA,MAAMC,WAAWD,IAAIE,WAAW;YAChC,MAAMC,cAAcF,SAASK,QAAQ,CAAC,UACnBL,SAASK,QAAQ,CAAC,YAClBL,SAASK,QAAQ,CAAC,aAClBL,SAASK,QAAQ,CAAC;YAErC,IAAIH,aAAa;gBACfhB,QAAQ,CAACa,IAAI,GAAGO,MAAMf,MAAM,GAAG,IAC3B,GAAGe,MAAMb,SAAS,CAAC,GAAG,GAAG,aAAa,CAAC,GACvC;YACN,OAAO;gBACLP,QAAQ,CAACa,IAAI,GAAGO;YAClB;QACF;QAEA,OAAOpB;IACT;AACF;AAGA,OAAO,MAAMmC,WAAWzC,YAAY"}
|
|
@@ -1,13 +1,11 @@
|
|
|
1
|
-
import
|
|
2
|
-
import path from 'path';
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
3
|
import { exec } from 'child_process';
|
|
4
4
|
import { promisify } from 'util';
|
|
5
5
|
const execAsync = promisify(exec);
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
this.sessionsDir = '.claude-flow/sessions';
|
|
10
|
-
}
|
|
6
|
+
export class MetricsReader {
|
|
7
|
+
metricsDir = '.claude-flow/metrics';
|
|
8
|
+
sessionsDir = '.claude-flow/sessions';
|
|
11
9
|
async getSystemMetrics() {
|
|
12
10
|
try {
|
|
13
11
|
const filePath = path.join(this.metricsDir, 'system-metrics.json');
|
|
@@ -18,15 +16,6 @@ let MetricsReader = class MetricsReader {
|
|
|
18
16
|
return null;
|
|
19
17
|
}
|
|
20
18
|
}
|
|
21
|
-
async getTaskQueue() {
|
|
22
|
-
try {
|
|
23
|
-
const queueFile = '.claude-flow/tasks/queue.json';
|
|
24
|
-
const content = await fs.readFile(queueFile, 'utf8');
|
|
25
|
-
return JSON.parse(content);
|
|
26
|
-
} catch (error) {
|
|
27
|
-
return [];
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
19
|
async getTaskMetrics() {
|
|
31
20
|
try {
|
|
32
21
|
const filePath = path.join(this.metricsDir, 'task-metrics.json');
|
|
@@ -47,30 +36,30 @@ let MetricsReader = class MetricsReader {
|
|
|
47
36
|
}
|
|
48
37
|
async getActiveAgents() {
|
|
49
38
|
try {
|
|
39
|
+
const perfMetrics = await this.getPerformanceMetrics();
|
|
40
|
+
const sessionFiles = await this.getSessionFiles();
|
|
50
41
|
const agents = [];
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
if (
|
|
56
|
-
|
|
57
|
-
const content = await fs.readFile(path.join(agentsDir, file), 'utf8');
|
|
58
|
-
const agent = JSON.parse(content);
|
|
59
|
-
agents.push(agent);
|
|
60
|
-
} catch {}
|
|
42
|
+
for (const file of sessionFiles){
|
|
43
|
+
try {
|
|
44
|
+
const content = await fs.readFile(path.join(this.sessionsDir, 'pair', file), 'utf8');
|
|
45
|
+
const sessionData = JSON.parse(content);
|
|
46
|
+
if (sessionData.agents && Array.isArray(sessionData.agents)) {
|
|
47
|
+
agents.push(...sessionData.agents);
|
|
61
48
|
}
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
if (agents.length === 0) {
|
|
65
|
-
const
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
49
|
+
} catch {}
|
|
50
|
+
}
|
|
51
|
+
if (agents.length === 0 && perfMetrics) {
|
|
52
|
+
const activeCount = perfMetrics.activeAgents || 0;
|
|
53
|
+
const totalCount = perfMetrics.totalAgents || 0;
|
|
54
|
+
for(let i = 0; i < totalCount; i++){
|
|
55
|
+
agents.push({
|
|
56
|
+
id: `agent-${i + 1}`,
|
|
57
|
+
name: `Agent ${i + 1}`,
|
|
58
|
+
type: i === 0 ? 'orchestrator' : 'worker',
|
|
59
|
+
status: i < activeCount ? 'active' : 'idle',
|
|
60
|
+
activeTasks: i < activeCount ? 1 : 0,
|
|
61
|
+
lastActivity: Date.now() - i * 1000
|
|
62
|
+
});
|
|
74
63
|
}
|
|
75
64
|
}
|
|
76
65
|
return agents;
|
|
@@ -146,7 +135,7 @@ let MetricsReader = class MetricsReader {
|
|
|
146
135
|
}
|
|
147
136
|
async getMCPServerStatus() {
|
|
148
137
|
try {
|
|
149
|
-
const { stdout } = await execAsync('ps aux | grep -E "mcp" | grep -v grep | wc -l');
|
|
138
|
+
const { stdout } = await execAsync('ps aux | grep -E "mcp-server\\.js|claude-flow mcp start" | grep -v grep | wc -l');
|
|
150
139
|
const processCount = parseInt(stdout.trim(), 10);
|
|
151
140
|
const { stdout: orchestratorOut } = await execAsync('ps aux | grep -E "claude-flow start" | grep -v grep | wc -l');
|
|
152
141
|
const orchestratorRunning = parseInt(orchestratorOut.trim(), 10) > 0;
|
|
@@ -175,7 +164,6 @@ let MetricsReader = class MetricsReader {
|
|
|
175
164
|
};
|
|
176
165
|
}
|
|
177
166
|
}
|
|
178
|
-
}
|
|
179
|
-
export { MetricsReader };
|
|
167
|
+
}
|
|
180
168
|
|
|
181
169
|
//# sourceMappingURL=metrics-reader.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/utils/metrics-reader.js"],"sourcesContent":["import { promises as fs } from 'fs';\nimport path from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\n\nconst execAsync = promisify(exec);\n\nclass MetricsReader {\n constructor() {\n this.metricsDir = '.claude-flow/metrics';\n this.sessionsDir = '.claude-flow/sessions';\n }\n\n async getSystemMetrics() {\n try {\n const filePath = path.join(this.metricsDir, 'system-metrics.json');\n const content = await fs.readFile(filePath, 'utf8');\n const metrics = JSON.parse(content);\n \n // Return the most recent metric\n return metrics.length > 0 ? metrics[metrics.length - 1] : null;\n } catch (error) {\n return null;\n }\n }\n\n async getTaskQueue() {\n try {\n const queueFile = '.claude-flow/tasks/queue.json';\n const content = await fs.readFile(queueFile, 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return [];\n }\n }\n\n async getTaskMetrics() {\n try {\n const filePath = path.join(this.metricsDir, 'task-metrics.json');\n const content = await fs.readFile(filePath, 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return [];\n }\n }\n\n async getPerformanceMetrics() {\n try {\n const filePath = path.join(this.metricsDir, 'performance.json');\n const content = await fs.readFile(filePath, 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return null;\n }\n }\n\n async getActiveAgents() {\n try {\n const agents = [];\n \n // Check for agents in the .claude-flow/agents directory\n const agentsDir = '.claude-flow/agents';\n try {\n const agentFiles = await fs.readdir(agentsDir);\n for (const file of agentFiles) {\n if (file.endsWith('.json')) {\n try {\n const content = await fs.readFile(path.join(agentsDir, file), 'utf8');\n const agent = JSON.parse(content);\n agents.push(agent);\n } catch {\n // Skip invalid agent files\n }\n }\n }\n } catch {\n // Agents directory doesn't exist yet\n }\n \n // If no agents found in directory, check session files\n if (agents.length === 0) {\n const sessionFiles = await this.getSessionFiles();\n for (const file of sessionFiles) {\n try {\n const content = await fs.readFile(path.join(this.sessionsDir, 'pair', file), 'utf8');\n const sessionData = JSON.parse(content);\n \n if (sessionData.agents && Array.isArray(sessionData.agents)) {\n agents.push(...sessionData.agents);\n }\n } catch {\n // Skip invalid session files\n }\n }\n }\n \n return agents;\n } catch (error) {\n return [];\n }\n }\n\n async getSessionStatus() {\n try {\n const sessionFiles = await this.getSessionFiles();\n \n if (sessionFiles.length === 0) {\n return null;\n }\n \n // Get the most recent session\n const mostRecentFile = sessionFiles[sessionFiles.length - 1];\n const content = await fs.readFile(path.join(this.sessionsDir, 'pair', mostRecentFile), 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return null;\n }\n }\n\n async getRecentTasks(limit = 10) {\n try {\n const taskMetrics = await this.getTaskMetrics();\n \n // Sort by timestamp descending and take the limit\n return taskMetrics\n .sort((a, b) => b.timestamp - a.timestamp)\n .slice(0, limit)\n .map(task => ({\n id: task.id,\n type: task.type,\n status: task.success ? 'completed' : 'failed',\n startTime: task.timestamp - task.duration,\n endTime: task.timestamp,\n duration: task.duration\n }));\n } catch (error) {\n return [];\n }\n }\n\n async getOverallHealth() {\n try {\n const systemMetrics = await this.getSystemMetrics();\n const perfMetrics = await this.getPerformanceMetrics();\n \n if (!systemMetrics && !perfMetrics) {\n return 'error';\n }\n \n // Check memory usage\n if (systemMetrics && systemMetrics.memoryUsagePercent > 90) {\n return 'error';\n }\n \n if (systemMetrics && systemMetrics.memoryUsagePercent > 75) {\n return 'warning';\n }\n \n // Check CPU load\n if (systemMetrics && systemMetrics.cpuLoad > 0.8) {\n return 'warning';\n }\n \n // Check task failure rate\n if (perfMetrics && perfMetrics.totalTasks > 0) {\n const failureRate = perfMetrics.failedTasks / perfMetrics.totalTasks;\n if (failureRate > 0.5) {\n return 'error';\n }\n if (failureRate > 0.2) {\n return 'warning';\n }\n }\n \n return 'healthy';\n } catch (error) {\n return 'error';\n }\n }\n\n async getSessionFiles() {\n try {\n const files = await fs.readdir(path.join(this.sessionsDir, 'pair'));\n return files.filter(f => f.endsWith('.json')).sort();\n } catch (error) {\n return [];\n }\n }\n\n async getMCPServerStatus() {\n try {\n // Check if MCP server process is running (including flow-nexus and other MCP variants)\n const { stdout } = await execAsync('ps aux | grep -E \"mcp\" | grep -v grep | wc -l');\n const processCount = parseInt(stdout.trim(), 10);\n \n // Check for orchestrator running\n const { stdout: orchestratorOut } = await execAsync('ps aux | grep -E \"claude-flow start\" | grep -v grep | wc -l');\n const orchestratorRunning = parseInt(orchestratorOut.trim(), 10) > 0;\n \n // Determine status\n const isRunning = processCount > 0;\n \n // Try to get port from process (default is 3000)\n let port = 3000;\n try {\n const { stdout: portOut } = await execAsync('lsof -i :3000 2>/dev/null | grep LISTEN | wc -l');\n if (parseInt(portOut.trim(), 10) === 0) {\n // If port 3000 not listening, check other common ports\n port = null;\n }\n } catch {\n // lsof might not be available or port not in use\n }\n \n return {\n running: isRunning,\n processCount,\n orchestratorRunning,\n port,\n connections: processCount > 0 ? Math.max(1, processCount - 1) : 0 // Estimate connections\n };\n } catch (error) {\n // Fallback if commands fail\n return {\n running: false,\n processCount: 0,\n orchestratorRunning: false,\n port: null,\n connections: 0\n };\n }\n }\n}\n\nexport { MetricsReader };"],"names":["promises","fs","path","exec","promisify","execAsync","MetricsReader","metricsDir","sessionsDir","getSystemMetrics","filePath","join","content","readFile","metrics","JSON","parse","length","error","getTaskQueue","queueFile","getTaskMetrics","getPerformanceMetrics","getActiveAgents","agents","agentsDir","agentFiles","readdir","file","endsWith","agent","push","sessionFiles","getSessionFiles","sessionData","Array","isArray","getSessionStatus","mostRecentFile","getRecentTasks","limit","taskMetrics","sort","a","b","timestamp","slice","map","task","id","type","status","success","startTime","duration","endTime","getOverallHealth","systemMetrics","perfMetrics","memoryUsagePercent","cpuLoad","totalTasks","failureRate","failedTasks","files","filter","f","getMCPServerStatus","stdout","processCount","parseInt","trim","orchestratorOut","orchestratorRunning","isRunning","port","portOut","running","connections","Math","max"],"mappings":"AAAA,SAASA,YAAYC,EAAE,QAAQ,KAAK;AACpC,OAAOC,UAAU,OAAO;AACxB,SAASC,IAAI,QAAQ,gBAAgB;AACrC,SAASC,SAAS,QAAQ,OAAO;AAEjC,MAAMC,YAAYD,UAAUD;AAE5B,IAAA,AAAMG,gBAAN,MAAMA;IACJ,aAAc;QACZ,IAAI,CAACC,UAAU,GAAG;QAClB,IAAI,CAACC,WAAW,GAAG;IACrB;IAEA,MAAMC,mBAAmB;QACvB,IAAI;YACF,MAAMC,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,MAAMI,UAAUC,KAAKC,KAAK,CAACJ;YAG3B,OAAOE,QAAQG,MAAM,GAAG,IAAIH,OAAO,CAACA,QAAQG,MAAM,GAAG,EAAE,GAAG;QAC5D,EAAE,OAAOC,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMC,eAAe;QACnB,IAAI;YACF,MAAMC,YAAY;YAClB,MAAMR,UAAU,MAAMX,GAAGY,QAAQ,CAACO,WAAW;YAC7C,OAAOL,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMG,iBAAiB;QACrB,IAAI;YACF,MAAMX,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,OAAOK,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMI,wBAAwB;QAC5B,IAAI;YACF,MAAMZ,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,OAAOK,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMK,kBAAkB;QACtB,IAAI;YACF,MAAMC,SAAS,EAAE;YAGjB,MAAMC,YAAY;YAClB,IAAI;gBACF,MAAMC,aAAa,MAAMzB,GAAG0B,OAAO,CAACF;gBACpC,KAAK,MAAMG,QAAQF,WAAY;oBAC7B,IAAIE,KAAKC,QAAQ,CAAC,UAAU;wBAC1B,IAAI;4BACF,MAAMjB,UAAU,MAAMX,GAAGY,QAAQ,CAACX,KAAKS,IAAI,CAACc,WAAWG,OAAO;4BAC9D,MAAME,QAAQf,KAAKC,KAAK,CAACJ;4BACzBY,OAAOO,IAAI,CAACD;wBACd,EAAE,OAAM,CAER;oBACF;gBACF;YACF,EAAE,OAAM,CAER;YAGA,IAAIN,OAAOP,MAAM,KAAK,GAAG;gBACvB,MAAMe,eAAe,MAAM,IAAI,CAACC,eAAe;gBAC/C,KAAK,MAAML,QAAQI,aAAc;oBAC/B,IAAI;wBACF,MAAMpB,UAAU,MAAMX,GAAGY,QAAQ,CAACX,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE,QAAQoB,OAAO;wBAC7E,MAAMM,cAAcnB,KAAKC,KAAK,CAACJ;wBAE/B,IAAIsB,YAAYV,MAAM,IAAIW,MAAMC,OAAO,CAACF,YAAYV,MAAM,GAAG;4BAC3DA,OAAOO,IAAI,IAAIG,YAAYV,MAAM;wBACnC;oBACF,EAAE,OAAM,CAER;gBACF;YACF;YAEA,OAAOA;QACT,EAAE,OAAON,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMmB,mBAAmB;QACvB,IAAI;YACF,MAAML,eAAe,MAAM,IAAI,CAACC,eAAe;YAE/C,IAAID,aAAaf,MAAM,KAAK,GAAG;gBAC7B,OAAO;YACT;YAGA,MAAMqB,iBAAiBN,YAAY,CAACA,aAAaf,MAAM,GAAG,EAAE;YAC5D,MAAML,UAAU,MAAMX,GAAGY,QAAQ,CAACX,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE,QAAQ8B,iBAAiB;YACvF,OAAOvB,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMqB,eAAeC,QAAQ,EAAE,EAAE;QAC/B,IAAI;YACF,MAAMC,cAAc,MAAM,IAAI,CAACpB,cAAc;YAG7C,OAAOoB,YACJC,IAAI,CAAC,CAACC,GAAGC,IAAMA,EAAEC,SAAS,GAAGF,EAAEE,SAAS,EACxCC,KAAK,CAAC,GAAGN,OACTO,GAAG,CAACC,CAAAA,OAAS,CAAA;oBACZC,IAAID,KAAKC,EAAE;oBACXC,MAAMF,KAAKE,IAAI;oBACfC,QAAQH,KAAKI,OAAO,GAAG,cAAc;oBACrCC,WAAWL,KAAKH,SAAS,GAAGG,KAAKM,QAAQ;oBACzCC,SAASP,KAAKH,SAAS;oBACvBS,UAAUN,KAAKM,QAAQ;gBACzB,CAAA;QACJ,EAAE,OAAOpC,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMsC,mBAAmB;QACvB,IAAI;YACF,MAAMC,gBAAgB,MAAM,IAAI,CAAChD,gBAAgB;YACjD,MAAMiD,cAAc,MAAM,IAAI,CAACpC,qBAAqB;YAEpD,IAAI,CAACmC,iBAAiB,CAACC,aAAa;gBAClC,OAAO;YACT;YAGA,IAAID,iBAAiBA,cAAcE,kBAAkB,GAAG,IAAI;gBAC1D,OAAO;YACT;YAEA,IAAIF,iBAAiBA,cAAcE,kBAAkB,GAAG,IAAI;gBAC1D,OAAO;YACT;YAGA,IAAIF,iBAAiBA,cAAcG,OAAO,GAAG,KAAK;gBAChD,OAAO;YACT;YAGA,IAAIF,eAAeA,YAAYG,UAAU,GAAG,GAAG;gBAC7C,MAAMC,cAAcJ,YAAYK,WAAW,GAAGL,YAAYG,UAAU;gBACpE,IAAIC,cAAc,KAAK;oBACrB,OAAO;gBACT;gBACA,IAAIA,cAAc,KAAK;oBACrB,OAAO;gBACT;YACF;YAEA,OAAO;QACT,EAAE,OAAO5C,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMe,kBAAkB;QACtB,IAAI;YACF,MAAM+B,QAAQ,MAAM/D,GAAG0B,OAAO,CAACzB,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE;YAC3D,OAAOwD,MAAMC,MAAM,CAACC,CAAAA,IAAKA,EAAErC,QAAQ,CAAC,UAAUa,IAAI;QACpD,EAAE,OAAOxB,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMiD,qBAAqB;QACzB,IAAI;YAEF,MAAM,EAAEC,MAAM,EAAE,GAAG,MAAM/D,UAAU;YACnC,MAAMgE,eAAeC,SAASF,OAAOG,IAAI,IAAI;YAG7C,MAAM,EAAEH,QAAQI,eAAe,EAAE,GAAG,MAAMnE,UAAU;YACpD,MAAMoE,sBAAsBH,SAASE,gBAAgBD,IAAI,IAAI,MAAM;YAGnE,MAAMG,YAAYL,eAAe;YAGjC,IAAIM,OAAO;YACX,IAAI;gBACF,MAAM,EAAEP,QAAQQ,OAAO,EAAE,GAAG,MAAMvE,UAAU;gBAC5C,IAAIiE,SAASM,QAAQL,IAAI,IAAI,QAAQ,GAAG;oBAEtCI,OAAO;gBACT;YACF,EAAE,OAAM,CAER;YAEA,OAAO;gBACLE,SAASH;gBACTL;gBACAI;gBACAE;gBACAG,aAAaT,eAAe,IAAIU,KAAKC,GAAG,CAAC,GAAGX,eAAe,KAAK;YAClE;QACF,EAAE,OAAOnD,OAAO;YAEd,OAAO;gBACL2D,SAAS;gBACTR,cAAc;gBACdI,qBAAqB;gBACrBE,MAAM;gBACNG,aAAa;YACf;QACF;IACF;AACF;AAEA,SAASxE,aAAa,GAAG"}
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/metrics-reader.ts"],"sourcesContent":["import * as fs from 'fs/promises';\nimport * as path from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\n\nconst execAsync = promisify(exec);\n\ninterface SystemMetrics {\n timestamp: number;\n memoryTotal: number;\n memoryUsed: number;\n memoryFree: number;\n memoryUsagePercent: number;\n memoryEfficiency: number;\n cpuCount: number;\n cpuLoad: number;\n platform: string;\n uptime: number;\n}\n\ninterface TaskMetric {\n id: string;\n type: string;\n success: boolean;\n duration: number;\n timestamp: number;\n metadata: Record<string, any>;\n}\n\ninterface PerformanceMetrics {\n startTime: number;\n totalTasks: number;\n successfulTasks: number;\n failedTasks: number;\n totalAgents: number;\n activeAgents: number;\n neuralEvents: number;\n}\n\ninterface Agent {\n id: string;\n name: string;\n type: string;\n status: 'active' | 'idle' | 'busy';\n activeTasks: number;\n lastActivity?: number;\n}\n\ninterface SessionData {\n id: string;\n startTime: number;\n endTime?: number;\n agents: Agent[];\n tasks: any[];\n status: 'active' | 'completed' | 'paused';\n}\n\ninterface MCPServerStatus {\n running: boolean;\n processCount: number;\n orchestratorRunning: boolean;\n port: number | null;\n connections: number;\n}\n\nexport class MetricsReader {\n private metricsDir = '.claude-flow/metrics';\n private sessionsDir = '.claude-flow/sessions';\n\n async getSystemMetrics(): Promise<SystemMetrics | null> {\n try {\n const filePath = path.join(this.metricsDir, 'system-metrics.json');\n const content = await fs.readFile(filePath, 'utf8');\n const metrics: SystemMetrics[] = JSON.parse(content);\n \n // Return the most recent metric\n return metrics.length > 0 ? metrics[metrics.length - 1] : null;\n } catch (error) {\n return null;\n }\n }\n\n async getTaskMetrics(): Promise<TaskMetric[]> {\n try {\n const filePath = path.join(this.metricsDir, 'task-metrics.json');\n const content = await fs.readFile(filePath, 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return [];\n }\n }\n\n async getPerformanceMetrics(): Promise<PerformanceMetrics | null> {\n try {\n const filePath = path.join(this.metricsDir, 'performance.json');\n const content = await fs.readFile(filePath, 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return null;\n }\n }\n\n async getActiveAgents(): Promise<Agent[]> {\n try {\n // First check performance metrics for agent count\n const perfMetrics = await this.getPerformanceMetrics();\n \n // Also check session files for more detailed agent info\n const sessionFiles = await this.getSessionFiles();\n const agents: Agent[] = [];\n \n for (const file of sessionFiles) {\n try {\n const content = await fs.readFile(path.join(this.sessionsDir, 'pair', file), 'utf8');\n const sessionData = JSON.parse(content);\n \n if (sessionData.agents && Array.isArray(sessionData.agents)) {\n agents.push(...sessionData.agents);\n }\n } catch {\n // Skip invalid session files\n }\n }\n \n // If no agents found in sessions, create mock agents based on performance metrics\n if (agents.length === 0 && perfMetrics) {\n const activeCount = perfMetrics.activeAgents || 0;\n const totalCount = perfMetrics.totalAgents || 0;\n \n for (let i = 0; i < totalCount; i++) {\n agents.push({\n id: `agent-${i + 1}`,\n name: `Agent ${i + 1}`,\n type: i === 0 ? 'orchestrator' : 'worker',\n status: i < activeCount ? 'active' : 'idle',\n activeTasks: i < activeCount ? 1 : 0,\n lastActivity: Date.now() - (i * 1000)\n });\n }\n }\n \n return agents;\n } catch (error) {\n return [];\n }\n }\n\n async getSessionStatus(): Promise<SessionData | null> {\n try {\n const sessionFiles = await this.getSessionFiles();\n \n if (sessionFiles.length === 0) {\n return null;\n }\n \n // Get the most recent session\n const mostRecentFile = sessionFiles[sessionFiles.length - 1];\n const content = await fs.readFile(path.join(this.sessionsDir, 'pair', mostRecentFile), 'utf8');\n return JSON.parse(content);\n } catch (error) {\n return null;\n }\n }\n\n async getRecentTasks(limit: number = 10): Promise<any[]> {\n try {\n const taskMetrics = await this.getTaskMetrics();\n \n // Sort by timestamp descending and take the limit\n return taskMetrics\n .sort((a, b) => b.timestamp - a.timestamp)\n .slice(0, limit)\n .map(task => ({\n id: task.id,\n type: task.type,\n status: task.success ? 'completed' : 'failed',\n startTime: task.timestamp - task.duration,\n endTime: task.timestamp,\n duration: task.duration\n }));\n } catch (error) {\n return [];\n }\n }\n\n async getOverallHealth(): Promise<'healthy' | 'warning' | 'error'> {\n try {\n const systemMetrics = await this.getSystemMetrics();\n const perfMetrics = await this.getPerformanceMetrics();\n \n if (!systemMetrics && !perfMetrics) {\n return 'error';\n }\n \n // Check memory usage\n if (systemMetrics && systemMetrics.memoryUsagePercent > 90) {\n return 'error';\n }\n \n if (systemMetrics && systemMetrics.memoryUsagePercent > 75) {\n return 'warning';\n }\n \n // Check CPU load\n if (systemMetrics && systemMetrics.cpuLoad > 0.8) {\n return 'warning';\n }\n \n // Check task failure rate\n if (perfMetrics && perfMetrics.totalTasks > 0) {\n const failureRate = perfMetrics.failedTasks / perfMetrics.totalTasks;\n if (failureRate > 0.5) {\n return 'error';\n }\n if (failureRate > 0.2) {\n return 'warning';\n }\n }\n \n return 'healthy';\n } catch (error) {\n return 'error';\n }\n }\n\n private async getSessionFiles(): Promise<string[]> {\n try {\n const files = await fs.readdir(path.join(this.sessionsDir, 'pair'));\n return files.filter(f => f.endsWith('.json')).sort();\n } catch (error) {\n return [];\n }\n }\n\n async getMCPServerStatus(): Promise<MCPServerStatus> {\n try {\n // Check if MCP server process is running\n const { stdout } = await execAsync('ps aux | grep -E \"mcp-server\\\\.js|claude-flow mcp start\" | grep -v grep | wc -l');\n const processCount = parseInt(stdout.trim(), 10);\n \n // Check for orchestrator running\n const { stdout: orchestratorOut } = await execAsync('ps aux | grep -E \"claude-flow start\" | grep -v grep | wc -l');\n const orchestratorRunning = parseInt(orchestratorOut.trim(), 10) > 0;\n \n // Determine status\n const isRunning = processCount > 0;\n \n // Try to get port from process (default is 3000)\n let port: number | null = 3000;\n try {\n const { stdout: portOut } = await execAsync('lsof -i :3000 2>/dev/null | grep LISTEN | wc -l');\n if (parseInt(portOut.trim(), 10) === 0) {\n // If port 3000 not listening, check other common ports\n port = null;\n }\n } catch {\n // lsof might not be available or port not in use\n }\n \n return {\n running: isRunning,\n processCount,\n orchestratorRunning,\n port,\n connections: processCount > 0 ? Math.max(1, processCount - 1) : 0 // Estimate connections\n };\n } catch (error) {\n // Fallback if commands fail\n return {\n running: false,\n processCount: 0,\n orchestratorRunning: false,\n port: null,\n connections: 0\n };\n }\n }\n}"],"names":["fs","path","exec","promisify","execAsync","MetricsReader","metricsDir","sessionsDir","getSystemMetrics","filePath","join","content","readFile","metrics","JSON","parse","length","error","getTaskMetrics","getPerformanceMetrics","getActiveAgents","perfMetrics","sessionFiles","getSessionFiles","agents","file","sessionData","Array","isArray","push","activeCount","activeAgents","totalCount","totalAgents","i","id","name","type","status","activeTasks","lastActivity","Date","now","getSessionStatus","mostRecentFile","getRecentTasks","limit","taskMetrics","sort","a","b","timestamp","slice","map","task","success","startTime","duration","endTime","getOverallHealth","systemMetrics","memoryUsagePercent","cpuLoad","totalTasks","failureRate","failedTasks","files","readdir","filter","f","endsWith","getMCPServerStatus","stdout","processCount","parseInt","trim","orchestratorOut","orchestratorRunning","isRunning","port","portOut","running","connections","Math","max"],"mappings":"AAAA,YAAYA,QAAQ,cAAc;AAClC,YAAYC,UAAU,OAAO;AAC7B,SAASC,IAAI,QAAQ,gBAAgB;AACrC,SAASC,SAAS,QAAQ,OAAO;AAEjC,MAAMC,YAAYD,UAAUD;AA4D5B,OAAO,MAAMG;IACHC,aAAa,uBAAuB;IACpCC,cAAc,wBAAwB;IAE9C,MAAMC,mBAAkD;QACtD,IAAI;YACF,MAAMC,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,MAAMI,UAA2BC,KAAKC,KAAK,CAACJ;YAG5C,OAAOE,QAAQG,MAAM,GAAG,IAAIH,OAAO,CAACA,QAAQG,MAAM,GAAG,EAAE,GAAG;QAC5D,EAAE,OAAOC,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMC,iBAAwC;QAC5C,IAAI;YACF,MAAMT,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,OAAOK,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAME,wBAA4D;QAChE,IAAI;YACF,MAAMV,WAAWR,KAAKS,IAAI,CAAC,IAAI,CAACJ,UAAU,EAAE;YAC5C,MAAMK,UAAU,MAAMX,GAAGY,QAAQ,CAACH,UAAU;YAC5C,OAAOK,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAMG,kBAAoC;QACxC,IAAI;YAEF,MAAMC,cAAc,MAAM,IAAI,CAACF,qBAAqB;YAGpD,MAAMG,eAAe,MAAM,IAAI,CAACC,eAAe;YAC/C,MAAMC,SAAkB,EAAE;YAE1B,KAAK,MAAMC,QAAQH,aAAc;gBAC/B,IAAI;oBACF,MAAMX,UAAU,MAAMX,GAAGY,QAAQ,CAACX,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE,QAAQkB,OAAO;oBAC7E,MAAMC,cAAcZ,KAAKC,KAAK,CAACJ;oBAE/B,IAAIe,YAAYF,MAAM,IAAIG,MAAMC,OAAO,CAACF,YAAYF,MAAM,GAAG;wBAC3DA,OAAOK,IAAI,IAAIH,YAAYF,MAAM;oBACnC;gBACF,EAAE,OAAM,CAER;YACF;YAGA,IAAIA,OAAOR,MAAM,KAAK,KAAKK,aAAa;gBACtC,MAAMS,cAAcT,YAAYU,YAAY,IAAI;gBAChD,MAAMC,aAAaX,YAAYY,WAAW,IAAI;gBAE9C,IAAK,IAAIC,IAAI,GAAGA,IAAIF,YAAYE,IAAK;oBACnCV,OAAOK,IAAI,CAAC;wBACVM,IAAI,CAAC,MAAM,EAAED,IAAI,GAAG;wBACpBE,MAAM,CAAC,MAAM,EAAEF,IAAI,GAAG;wBACtBG,MAAMH,MAAM,IAAI,iBAAiB;wBACjCI,QAAQJ,IAAIJ,cAAc,WAAW;wBACrCS,aAAaL,IAAIJ,cAAc,IAAI;wBACnCU,cAAcC,KAAKC,GAAG,KAAMR,IAAI;oBAClC;gBACF;YACF;YAEA,OAAOV;QACT,EAAE,OAAOP,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAM0B,mBAAgD;QACpD,IAAI;YACF,MAAMrB,eAAe,MAAM,IAAI,CAACC,eAAe;YAE/C,IAAID,aAAaN,MAAM,KAAK,GAAG;gBAC7B,OAAO;YACT;YAGA,MAAM4B,iBAAiBtB,YAAY,CAACA,aAAaN,MAAM,GAAG,EAAE;YAC5D,MAAML,UAAU,MAAMX,GAAGY,QAAQ,CAACX,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE,QAAQqC,iBAAiB;YACvF,OAAO9B,KAAKC,KAAK,CAACJ;QACpB,EAAE,OAAOM,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAM4B,eAAeC,QAAgB,EAAE,EAAkB;QACvD,IAAI;YACF,MAAMC,cAAc,MAAM,IAAI,CAAC7B,cAAc;YAG7C,OAAO6B,YACJC,IAAI,CAAC,CAACC,GAAGC,IAAMA,EAAEC,SAAS,GAAGF,EAAEE,SAAS,EACxCC,KAAK,CAAC,GAAGN,OACTO,GAAG,CAACC,CAAAA,OAAS,CAAA;oBACZnB,IAAImB,KAAKnB,EAAE;oBACXE,MAAMiB,KAAKjB,IAAI;oBACfC,QAAQgB,KAAKC,OAAO,GAAG,cAAc;oBACrCC,WAAWF,KAAKH,SAAS,GAAGG,KAAKG,QAAQ;oBACzCC,SAASJ,KAAKH,SAAS;oBACvBM,UAAUH,KAAKG,QAAQ;gBACzB,CAAA;QACJ,EAAE,OAAOxC,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAM0C,mBAA6D;QACjE,IAAI;YACF,MAAMC,gBAAgB,MAAM,IAAI,CAACpD,gBAAgB;YACjD,MAAMa,cAAc,MAAM,IAAI,CAACF,qBAAqB;YAEpD,IAAI,CAACyC,iBAAiB,CAACvC,aAAa;gBAClC,OAAO;YACT;YAGA,IAAIuC,iBAAiBA,cAAcC,kBAAkB,GAAG,IAAI;gBAC1D,OAAO;YACT;YAEA,IAAID,iBAAiBA,cAAcC,kBAAkB,GAAG,IAAI;gBAC1D,OAAO;YACT;YAGA,IAAID,iBAAiBA,cAAcE,OAAO,GAAG,KAAK;gBAChD,OAAO;YACT;YAGA,IAAIzC,eAAeA,YAAY0C,UAAU,GAAG,GAAG;gBAC7C,MAAMC,cAAc3C,YAAY4C,WAAW,GAAG5C,YAAY0C,UAAU;gBACpE,IAAIC,cAAc,KAAK;oBACrB,OAAO;gBACT;gBACA,IAAIA,cAAc,KAAK;oBACrB,OAAO;gBACT;YACF;YAEA,OAAO;QACT,EAAE,OAAO/C,OAAO;YACd,OAAO;QACT;IACF;IAEA,MAAcM,kBAAqC;QACjD,IAAI;YACF,MAAM2C,QAAQ,MAAMlE,GAAGmE,OAAO,CAAClE,KAAKS,IAAI,CAAC,IAAI,CAACH,WAAW,EAAE;YAC3D,OAAO2D,MAAME,MAAM,CAACC,CAAAA,IAAKA,EAAEC,QAAQ,CAAC,UAAUtB,IAAI;QACpD,EAAE,OAAO/B,OAAO;YACd,OAAO,EAAE;QACX;IACF;IAEA,MAAMsD,qBAA+C;QACnD,IAAI;YAEF,MAAM,EAAEC,MAAM,EAAE,GAAG,MAAMpE,UAAU;YACnC,MAAMqE,eAAeC,SAASF,OAAOG,IAAI,IAAI;YAG7C,MAAM,EAAEH,QAAQI,eAAe,EAAE,GAAG,MAAMxE,UAAU;YACpD,MAAMyE,sBAAsBH,SAASE,gBAAgBD,IAAI,IAAI,MAAM;YAGnE,MAAMG,YAAYL,eAAe;YAGjC,IAAIM,OAAsB;YAC1B,IAAI;gBACF,MAAM,EAAEP,QAAQQ,OAAO,EAAE,GAAG,MAAM5E,UAAU;gBAC5C,IAAIsE,SAASM,QAAQL,IAAI,IAAI,QAAQ,GAAG;oBAEtCI,OAAO;gBACT;YACF,EAAE,OAAM,CAER;YAEA,OAAO;gBACLE,SAASH;gBACTL;gBACAI;gBACAE;gBACAG,aAAaT,eAAe,IAAIU,KAAKC,GAAG,CAAC,GAAGX,eAAe,KAAK;YAClE;QACF,EAAE,OAAOxD,OAAO;YAEd,OAAO;gBACLgE,SAAS;gBACTR,cAAc;gBACdI,qBAAqB;gBACrBE,MAAM;gBACNG,aAAa;YACf;QACF;IACF;AACF"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "claude-flow",
|
|
3
|
-
"version": "2.7.
|
|
3
|
+
"version": "2.7.17",
|
|
4
4
|
"description": "Enterprise-grade AI agent orchestration with WASM-powered ReasoningBank memory and AgentDB vector database (always uses latest agentic-flow)",
|
|
5
5
|
"mcpName": "io.github.ruvnet/claude-flow",
|
|
6
6
|
"main": "cli.mjs",
|
|
@@ -423,13 +423,16 @@ async function detectMemoryMode(flags, subArgs) {
|
|
|
423
423
|
return 'reasoningbank';
|
|
424
424
|
} catch (error) {
|
|
425
425
|
// SQLite initialization failed - fall back to JSON
|
|
426
|
-
const
|
|
427
|
-
|
|
426
|
+
const isSqliteError = error.message?.includes('BetterSqlite3') ||
|
|
427
|
+
error.message?.includes('better-sqlite3') ||
|
|
428
|
+
error.message?.includes('could not run migrations') ||
|
|
429
|
+
error.message?.includes('ReasoningBank initialization failed');
|
|
428
430
|
const isNpx = process.env.npm_config_user_agent?.includes('npx') ||
|
|
429
431
|
process.cwd().includes('_npx');
|
|
430
432
|
|
|
431
|
-
if (
|
|
433
|
+
if (isSqliteError && isNpx) {
|
|
432
434
|
// Silent fallback for npx - error already shown by adapter
|
|
435
|
+
console.log('\n✅ Automatically using JSON fallback for this command\n');
|
|
433
436
|
return 'basic';
|
|
434
437
|
} else {
|
|
435
438
|
printWarning(`⚠️ SQLite unavailable, using JSON fallback`);
|
|
@@ -44,8 +44,11 @@ async function ensureInitialized() {
|
|
|
44
44
|
console.error('[ReasoningBank] Backend initialization failed:', error);
|
|
45
45
|
|
|
46
46
|
// Check if this is the better-sqlite3 missing error (npx issue)
|
|
47
|
-
|
|
48
|
-
|
|
47
|
+
const isSqliteError = error.message?.includes('BetterSqlite3 is not a constructor') ||
|
|
48
|
+
error.message?.includes('better-sqlite3') ||
|
|
49
|
+
error.message?.includes('could not run migrations');
|
|
50
|
+
|
|
51
|
+
if (isSqliteError) {
|
|
49
52
|
const isNpx = process.env.npm_config_user_agent?.includes('npx') ||
|
|
50
53
|
process.cwd().includes('_npx');
|
|
51
54
|
|
|
@@ -58,7 +61,7 @@ async function ensureInitialized() {
|
|
|
58
61
|
console.error(' 2. USE MCP TOOLS instead:');
|
|
59
62
|
console.error(' mcp__claude-flow__memory_usage({ action: "store", key: "test", value: "data" })\n');
|
|
60
63
|
console.error(' 3. USE JSON FALLBACK:');
|
|
61
|
-
console.error(' npx claude-flow@alpha memory store "key" "value" --
|
|
64
|
+
console.error(' npx claude-flow@alpha memory store "key" "value" --basic\n');
|
|
62
65
|
console.error('See: docs/MEMORY_COMMAND_FIX.md for details\n');
|
|
63
66
|
}
|
|
64
67
|
}
|