@stackmemoryai/stackmemory 0.3.7 → 0.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/core/agent-task-manager.js +5 -5
- package/dist/agents/core/agent-task-manager.js.map +2 -2
- package/dist/agents/verifiers/base-verifier.js +2 -2
- package/dist/agents/verifiers/base-verifier.js.map +2 -2
- package/dist/cli/claude-sm.js +0 -11
- package/dist/cli/claude-sm.js.map +2 -2
- package/dist/cli/codex-sm.js +0 -11
- package/dist/cli/codex-sm.js.map +2 -2
- package/dist/cli/commands/chromadb.js +64 -34
- package/dist/cli/commands/chromadb.js.map +2 -2
- package/dist/cli/commands/clear.js +9 -13
- package/dist/cli/commands/clear.js.map +2 -2
- package/dist/cli/commands/config.js +43 -33
- package/dist/cli/commands/config.js.map +2 -2
- package/dist/cli/commands/context.js.map +2 -2
- package/dist/cli/commands/dashboard.js +41 -13
- package/dist/cli/commands/dashboard.js.map +2 -2
- package/dist/cli/commands/gc.js +69 -20
- package/dist/cli/commands/gc.js.map +2 -2
- package/dist/cli/commands/handoff.js.map +2 -2
- package/dist/cli/commands/infinite-storage.js +60 -19
- package/dist/cli/commands/infinite-storage.js.map +2 -2
- package/dist/cli/commands/linear-create.js +36 -8
- package/dist/cli/commands/linear-create.js.map +2 -2
- package/dist/cli/commands/linear-list.js +33 -10
- package/dist/cli/commands/linear-list.js.map +2 -2
- package/dist/cli/commands/linear-migrate.js +17 -4
- package/dist/cli/commands/linear-migrate.js.map +2 -2
- package/dist/cli/commands/linear-test.js +14 -6
- package/dist/cli/commands/linear-test.js.map +2 -2
- package/dist/cli/commands/linear-unified.js +123 -35
- package/dist/cli/commands/linear-unified.js.map +2 -2
- package/dist/cli/commands/linear.js.map +2 -2
- package/dist/cli/commands/monitor.js.map +2 -2
- package/dist/cli/commands/onboard.js +35 -8
- package/dist/cli/commands/onboard.js.map +2 -2
- package/dist/cli/commands/quality.js +2 -7
- package/dist/cli/commands/quality.js.map +2 -2
- package/dist/cli/commands/session.js +23 -6
- package/dist/cli/commands/session.js.map +2 -2
- package/dist/cli/commands/skills.js +72 -27
- package/dist/cli/commands/skills.js.map +2 -2
- package/dist/cli/commands/storage.js +108 -38
- package/dist/cli/commands/storage.js.map +2 -2
- package/dist/cli/commands/tui.js.map +2 -2
- package/dist/cli/commands/webhook.js +57 -18
- package/dist/cli/commands/webhook.js.map +2 -2
- package/dist/cli/commands/workflow.js +8 -15
- package/dist/cli/commands/workflow.js.map +2 -2
- package/dist/cli/commands/worktree.js +34 -13
- package/dist/cli/commands/worktree.js.map +2 -2
- package/dist/cli/index.js +0 -11
- package/dist/cli/index.js.map +2 -2
- package/dist/core/config/types.js.map +1 -1
- package/dist/core/context/auto-context.js +10 -6
- package/dist/core/context/auto-context.js.map +2 -2
- package/dist/core/context/context-bridge.js.map +2 -2
- package/dist/core/context/frame-database.js +13 -3
- package/dist/core/context/frame-database.js.map +2 -2
- package/dist/core/context/frame-digest.js +7 -5
- package/dist/core/context/frame-digest.js.map +2 -2
- package/dist/core/context/frame-manager.js.map +2 -2
- package/dist/core/context/frame-stack.js +16 -5
- package/dist/core/context/frame-stack.js.map +2 -2
- package/dist/core/context/incremental-gc.js +10 -3
- package/dist/core/context/incremental-gc.js.map +2 -2
- package/dist/core/context/index.js.map +1 -1
- package/dist/core/context/permission-manager.js.map +2 -2
- package/dist/core/context/recursive-context-manager.js +582 -0
- package/dist/core/context/recursive-context-manager.js.map +7 -0
- package/dist/core/context/refactored-frame-manager.js +12 -3
- package/dist/core/context/refactored-frame-manager.js.map +2 -2
- package/dist/core/context/shared-context-layer.js +4 -2
- package/dist/core/context/shared-context-layer.js.map +2 -2
- package/dist/core/database/batch-operations.js +112 -86
- package/dist/core/database/batch-operations.js.map +2 -2
- package/dist/core/database/query-cache.js +19 -9
- package/dist/core/database/query-cache.js.map +2 -2
- package/dist/core/database/sqlite-adapter.js +1 -1
- package/dist/core/database/sqlite-adapter.js.map +2 -2
- package/dist/core/digest/enhanced-hybrid-digest.js +8 -2
- package/dist/core/digest/enhanced-hybrid-digest.js.map +2 -2
- package/dist/core/errors/recovery.js +9 -2
- package/dist/core/errors/recovery.js.map +2 -2
- package/dist/core/execution/parallel-executor.js +254 -0
- package/dist/core/execution/parallel-executor.js.map +7 -0
- package/dist/core/frame/workflow-templates-stub.js.map +1 -1
- package/dist/core/frame/workflow-templates.js +40 -1
- package/dist/core/frame/workflow-templates.js.map +2 -2
- package/dist/core/monitoring/logger.js +6 -1
- package/dist/core/monitoring/logger.js.map +2 -2
- package/dist/core/monitoring/metrics.js.map +2 -2
- package/dist/core/monitoring/progress-tracker.js.map +2 -2
- package/dist/core/performance/context-cache.js.map +2 -2
- package/dist/core/performance/lazy-context-loader.js +24 -20
- package/dist/core/performance/lazy-context-loader.js.map +2 -2
- package/dist/core/performance/optimized-frame-context.js +27 -12
- package/dist/core/performance/optimized-frame-context.js.map +2 -2
- package/dist/core/performance/performance-benchmark.js +10 -6
- package/dist/core/performance/performance-benchmark.js.map +2 -2
- package/dist/core/performance/performance-profiler.js +51 -14
- package/dist/core/performance/performance-profiler.js.map +2 -2
- package/dist/core/performance/streaming-jsonl-parser.js +5 -1
- package/dist/core/performance/streaming-jsonl-parser.js.map +2 -2
- package/dist/core/projects/project-manager.js +14 -20
- package/dist/core/projects/project-manager.js.map +2 -2
- package/dist/core/retrieval/context-retriever.js.map +1 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +2 -2
- package/dist/core/session/clear-survival-stub.js +5 -1
- package/dist/core/session/clear-survival-stub.js.map +2 -2
- package/dist/core/session/clear-survival.js +35 -0
- package/dist/core/session/clear-survival.js.map +2 -2
- package/dist/core/session/index.js.map +1 -1
- package/dist/core/session/session-manager.js.map +2 -2
- package/dist/core/storage/chromadb-adapter.js +6 -2
- package/dist/core/storage/chromadb-adapter.js.map +2 -2
- package/dist/core/storage/chromadb-simple.js +17 -5
- package/dist/core/storage/chromadb-simple.js.map +2 -2
- package/dist/core/storage/infinite-storage.js +109 -46
- package/dist/core/storage/infinite-storage.js.map +2 -2
- package/dist/core/storage/railway-optimized-storage.js +48 -22
- package/dist/core/storage/railway-optimized-storage.js.map +2 -2
- package/dist/core/storage/remote-storage.js +41 -23
- package/dist/core/storage/remote-storage.js.map +2 -2
- package/dist/core/trace/cli-trace-wrapper.js +9 -2
- package/dist/core/trace/cli-trace-wrapper.js.map +2 -2
- package/dist/core/trace/db-trace-wrapper.js +96 -68
- package/dist/core/trace/db-trace-wrapper.js.map +2 -2
- package/dist/core/trace/debug-trace.js +25 -8
- package/dist/core/trace/debug-trace.js.map +2 -2
- package/dist/core/trace/index.js +6 -2
- package/dist/core/trace/index.js.map +2 -2
- package/dist/core/trace/linear-api-wrapper.js +10 -5
- package/dist/core/trace/linear-api-wrapper.js.map +2 -2
- package/dist/core/trace/trace-demo.js +14 -10
- package/dist/core/trace/trace-demo.js.map +2 -2
- package/dist/core/trace/trace-detector.js +9 -2
- package/dist/core/trace/trace-detector.js.map +2 -2
- package/dist/core/trace/types.js.map +1 -1
- package/dist/core/utils/compression.js.map +1 -1
- package/dist/core/utils/update-checker.js.map +1 -1
- package/dist/core/worktree/worktree-manager.js +18 -7
- package/dist/core/worktree/worktree-manager.js.map +2 -2
- package/dist/features/analytics/core/analytics-service.js.map +2 -2
- package/dist/features/analytics/queries/metrics-queries.js +1 -1
- package/dist/features/analytics/queries/metrics-queries.js.map +2 -2
- package/dist/features/tasks/pebbles-task-store.js.map +1 -1
- package/dist/features/tui/components/analytics-panel.js +36 -15
- package/dist/features/tui/components/analytics-panel.js.map +2 -2
- package/dist/features/tui/components/pr-tracker.js +19 -7
- package/dist/features/tui/components/pr-tracker.js.map +2 -2
- package/dist/features/tui/components/session-monitor.js +22 -9
- package/dist/features/tui/components/session-monitor.js.map +2 -2
- package/dist/features/tui/components/subagent-fleet.js +20 -13
- package/dist/features/tui/components/subagent-fleet.js.map +2 -2
- package/dist/features/tui/components/task-board.js +26 -10
- package/dist/features/tui/components/task-board.js.map +2 -2
- package/dist/features/tui/index.js.map +2 -2
- package/dist/features/tui/services/data-service.js +6 -2
- package/dist/features/tui/services/data-service.js.map +2 -2
- package/dist/features/tui/services/linear-task-reader.js +3 -1
- package/dist/features/tui/services/linear-task-reader.js.map +2 -2
- package/dist/features/tui/services/websocket-client.js +3 -1
- package/dist/features/tui/services/websocket-client.js.map +2 -2
- package/dist/features/tui/terminal-compat.js +6 -2
- package/dist/features/tui/terminal-compat.js.map +2 -2
- package/dist/features/web/client/stores/task-store.js.map +2 -2
- package/dist/features/web/server/index.js +18 -10
- package/dist/features/web/server/index.js.map +2 -2
- package/dist/integrations/anthropic/client.js +259 -0
- package/dist/integrations/anthropic/client.js.map +7 -0
- package/dist/integrations/claude-code/subagent-client.js +404 -0
- package/dist/integrations/claude-code/subagent-client.js.map +7 -0
- package/dist/integrations/linear/sync-service.js +12 -13
- package/dist/integrations/linear/sync-service.js.map +2 -2
- package/dist/integrations/linear/sync.js +174 -12
- package/dist/integrations/linear/sync.js.map +2 -2
- package/dist/integrations/linear/unified-sync.js +1 -1
- package/dist/integrations/linear/unified-sync.js.map +1 -1
- package/dist/integrations/linear/webhook-server.js +15 -16
- package/dist/integrations/linear/webhook-server.js.map +2 -2
- package/dist/mcp/stackmemory-mcp-server.js +0 -11
- package/dist/mcp/stackmemory-mcp-server.js.map +2 -2
- package/dist/servers/production/auth-middleware.js.map +2 -2
- package/dist/servers/railway/index.js.map +2 -2
- package/dist/services/config-service.js +6 -7
- package/dist/services/config-service.js.map +2 -2
- package/dist/services/context-service.js +11 -12
- package/dist/services/context-service.js.map +2 -2
- package/dist/skills/claude-skills.js +101 -2
- package/dist/skills/claude-skills.js.map +2 -2
- package/dist/skills/dashboard-launcher.js.map +2 -2
- package/dist/skills/recursive-agent-orchestrator.js +559 -0
- package/dist/skills/recursive-agent-orchestrator.js.map +7 -0
- package/dist/skills/repo-ingestion-skill.js.map +2 -2
- package/dist/skills/security-secrets-scanner.js +265 -0
- package/dist/skills/security-secrets-scanner.js.map +7 -0
- package/dist/utils/env.js +46 -0
- package/dist/utils/env.js.map +7 -0
- package/dist/utils/logger.js +0 -11
- package/dist/utils/logger.js.map +2 -2
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/skills/repo-ingestion-skill.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Repository Ingestion Skill for ChromaDB\n *\n * Ingests and maintains code repositories in ChromaDB for enhanced code search and context\n */\n\nimport { ChromaDBAdapter } from '../core/storage/chromadb-adapter.js';\nimport { Logger } from '../core/monitoring/logger.js';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as crypto from 'crypto';\nimport { execSync } from 'child_process';\nimport ignore from 'ignore';\n\nexport interface RepoIngestionOptions {\n incremental?: boolean;\n forceUpdate?: boolean;\n includeTests?: boolean;\n includeDocs?: boolean;\n maxFileSize?: number;\n chunkSize?: number;\n extensions?: string[];\n excludePatterns?: string[];\n}\n\nexport interface RepoMetadata {\n repoId: string;\n repoName: string;\n branch: string;\n lastCommit: string;\n lastIngested: number;\n filesCount: number;\n totalSize: number;\n language: string;\n framework?: string;\n}\n\nexport interface FileChunk {\n id: string;\n filePath: string;\n content: string;\n startLine: number;\n endLine: number;\n hash: string;\n language: string;\n}\n\nexport class RepoIngestionSkill {\n private logger: Logger;\n private adapter: ChromaDBAdapter;\n private metadataCache: Map<string, RepoMetadata> = new Map();\n private fileHashCache: Map<string, string> = new Map();\n\n constructor(\n private config: {\n apiKey: string;\n tenant: string;\n database: string;\n collectionName?: string;\n },\n private userId: string,\n private teamId?: string\n ) {\n this.logger = new Logger('RepoIngestionSkill');\n this.adapter = new ChromaDBAdapter(\n {\n ...config,\n collectionName: config.collectionName || 'stackmemory_repos',\n },\n userId,\n teamId\n );\n }\n\n async initialize(): Promise<void> {\n await this.adapter.initialize();\n await this.loadMetadataCache();\n }\n\n /**\n * Ingest a repository into ChromaDB\n */\n async ingestRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesProcessed: number;\n chunksCreated: number;\n timeElapsed: number;\n totalSize: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n this.logger.info(`Starting repository ingestion for ${repoName}`);\n\n // Validate repository path\n if (!fs.existsSync(repoPath)) {\n throw new Error(`Repository path not found: ${repoPath}`);\n }\n\n // Get repository metadata\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n\n // Check if incremental update is possible\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n if (options.incremental && existingMetadata && !options.forceUpdate) {\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected since last ingestion',\n };\n }\n\n this.logger.info(\n `Incremental update: ${changedFiles.length} files changed`\n );\n }\n\n // Get files to process\n const files = await this.getRepoFiles(repoPath, options);\n this.logger.info(`Found ${files.length} files to process`);\n\n // Process files and create chunks\n let filesProcessed = 0;\n let chunksCreated = 0;\n let totalSize = 0;\n\n for (const file of files) {\n try {\n const chunks = await this.processFile(\n file,\n repoPath,\n repoName,\n metadata,\n options\n );\n\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n chunksCreated++;\n }\n\n filesProcessed++;\n totalSize += fs.statSync(file).size;\n\n // Log progress every 100 files\n if (filesProcessed % 100 === 0) {\n this.logger.info(\n `Processed ${filesProcessed}/${files.length} files`\n );\n }\n } catch (error: unknown) {\n this.logger.warn(`Failed to process file ${file}:`, error);\n }\n }\n\n // Update metadata\n metadata.filesCount = filesProcessed;\n metadata.totalSize = totalSize;\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n this.logger.info(\n `Repository ingestion complete: ${filesProcessed} files, ${chunksCreated} chunks in ${timeElapsed}ms`\n );\n\n return {\n success: true,\n message: `Successfully ingested ${repoName}`,\n stats: {\n filesProcessed,\n chunksCreated,\n timeElapsed,\n totalSize,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository ingestion failed:', error);\n return {\n success: false,\n message: `Failed to ingest repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Update an existing repository in ChromaDB\n */\n async updateRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesUpdated: number;\n filesAdded: number;\n filesRemoved: number;\n timeElapsed: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n\n if (!existingMetadata) {\n // No existing data, perform full ingestion\n return this.ingestRepository(repoPath, repoName, options);\n }\n\n // Get changed files since last ingestion\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected',\n stats: {\n filesUpdated: 0,\n filesAdded: 0,\n filesRemoved: 0,\n timeElapsed: Date.now() - startTime,\n },\n };\n }\n\n let filesUpdated = 0;\n let filesAdded = 0;\n let filesRemoved = 0;\n\n for (const change of changedFiles) {\n const filePath = path.join(repoPath, change.path);\n\n if (change.status === 'deleted') {\n await this.removeFileChunks(change.path, metadata.repoId);\n filesRemoved++;\n } else if (change.status === 'added') {\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesAdded++;\n } else if (change.status === 'modified') {\n // Remove old chunks and add new ones\n await this.removeFileChunks(change.path, metadata.repoId);\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesUpdated++;\n }\n }\n\n // Update metadata\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n return {\n success: true,\n message: `Successfully updated ${repoName}`,\n stats: {\n filesUpdated,\n filesAdded,\n filesRemoved,\n timeElapsed,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository update failed:', error);\n return {\n success: false,\n message: `Failed to update repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Search code in ingested repositories\n */\n async searchCode(\n query: string,\n options?: {\n repoName?: string;\n language?: string;\n limit?: number;\n includeContext?: boolean;\n }\n ): Promise<\n Array<{\n filePath: string;\n content: string;\n score: number;\n startLine: number;\n endLine: number;\n repoName: string;\n }>\n > {\n try {\n const filters: any = {\n type: ['code_chunk'],\n };\n\n if (options?.repoName) {\n filters.repo_name = options.repoName;\n }\n\n if (options?.language) {\n filters.language = options.language;\n }\n\n const results = await this.adapter.queryContexts(\n query,\n options?.limit || 20,\n filters\n );\n\n return results.map((result) => ({\n filePath: result.metadata.file_path,\n content: result.content,\n score: 1 - result.distance, // Convert distance to similarity score\n startLine: result.metadata.start_line,\n endLine: result.metadata.end_line,\n repoName: result.metadata.repo_name,\n }));\n } catch (error: unknown) {\n this.logger.error('Code search failed:', error);\n return [];\n }\n }\n\n /**\n * Get repository metadata\n */\n private async getRepoMetadata(\n repoPath: string,\n repoName: string\n ): Promise<RepoMetadata> {\n const branch = this.getCurrentBranch(repoPath);\n const lastCommit = this.getLastCommit(repoPath);\n const repoId = `${repoName}_${branch}`.replace(/[^a-zA-Z0-9_-]/g, '_');\n\n // Detect primary language and framework\n const { language, framework } =\n await this.detectLanguageAndFramework(repoPath);\n\n return {\n repoId,\n repoName,\n branch,\n lastCommit,\n lastIngested: Date.now(),\n filesCount: 0,\n totalSize: 0,\n language,\n framework,\n };\n }\n\n /**\n * Get current git branch\n */\n private getCurrentBranch(repoPath: string): string {\n try {\n return execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'main';\n }\n }\n\n /**\n * Get last commit hash\n */\n private getLastCommit(repoPath: string): string {\n try {\n return execSync('git rev-parse HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'unknown';\n }\n }\n\n /**\n * Get changed files between commits\n */\n private async getChangedFiles(\n repoPath: string,\n fromCommit: string,\n toCommit: string\n ): Promise<Array<{ path: string; status: string }>> {\n try {\n const diff = execSync(\n `git diff --name-status ${fromCommit}..${toCommit}`,\n {\n cwd: repoPath,\n encoding: 'utf8',\n }\n );\n\n return diff\n .split('\\n')\n .filter((line) => line.trim())\n .map((line) => {\n const [status, ...pathParts] = line.split('\\t');\n return {\n path: pathParts.join('\\t'),\n status:\n status === 'A'\n ? 'added'\n : status === 'D'\n ? 'deleted'\n : 'modified',\n };\n });\n } catch {\n return [];\n }\n }\n\n /**\n * Get repository files to process\n */\n private async getRepoFiles(\n repoPath: string,\n options: RepoIngestionOptions\n ): Promise<string[]> {\n const files: string[] = [];\n const ig = ignore();\n\n // Load .gitignore if it exists\n const gitignorePath = path.join(repoPath, '.gitignore');\n if (fs.existsSync(gitignorePath)) {\n ig.add(fs.readFileSync(gitignorePath, 'utf8'));\n }\n\n // Add default exclude patterns\n const defaultExcludes = [\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n '.env',\n '*.log',\n ...(options.excludePatterns || []),\n ];\n ig.add(defaultExcludes);\n\n // Default extensions to include\n const extensions = options.extensions || [\n '.ts',\n '.tsx',\n '.js',\n '.jsx',\n '.py',\n '.java',\n '.go',\n '.rs',\n '.c',\n '.cpp',\n '.h',\n '.hpp',\n '.cs',\n '.rb',\n '.php',\n '.swift',\n '.kt',\n '.scala',\n '.r',\n '.m',\n '.sql',\n '.yaml',\n '.yml',\n '.json',\n ];\n\n // Add documentation if requested\n if (options.includeDocs) {\n extensions.push('.md', '.rst', '.txt');\n }\n\n const maxFileSize = options.maxFileSize || 1024 * 1024; // 1MB default\n\n const walkDir = (dir: string, baseDir: string = repoPath) => {\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath);\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n walkDir(fullPath, baseDir);\n } else if (entry.isFile()) {\n const ext = path.extname(entry.name);\n\n // Check if file should be included\n if (!extensions.includes(ext)) {\n continue;\n }\n\n // Check if it's a test file\n if (\n !options.includeTests &&\n (entry.name.includes('.test.') ||\n entry.name.includes('.spec.') ||\n relativePath.includes('__tests__') ||\n relativePath.includes('test/') ||\n relativePath.includes('tests/'))\n ) {\n continue;\n }\n\n // Check file size\n const stats = fs.statSync(fullPath);\n if (stats.size > maxFileSize) {\n this.logger.debug(`Skipping large file: ${relativePath}`);\n continue;\n }\n\n files.push(fullPath);\n }\n }\n };\n\n walkDir(repoPath);\n return files;\n }\n\n /**\n * Process a file into chunks\n */\n private async processFile(\n filePath: string,\n repoPath: string,\n repoName: string,\n metadata: RepoMetadata,\n options: RepoIngestionOptions\n ): Promise<FileChunk[]> {\n const relativePath = path.relative(repoPath, filePath);\n const content = fs.readFileSync(filePath, 'utf8');\n const lines = content.split('\\n');\n const language = this.detectFileLanguage(filePath);\n\n const chunkSize = options.chunkSize || 100; // 100 lines per chunk\n const chunks: FileChunk[] = [];\n\n // Calculate file hash for caching\n const fileHash = crypto.createHash('md5').update(content).digest('hex');\n\n // Check if file has changed\n const cachedHash = this.fileHashCache.get(relativePath);\n if (cachedHash === fileHash && !options.forceUpdate) {\n return []; // File hasn't changed\n }\n\n this.fileHashCache.set(relativePath, fileHash);\n\n // Split into chunks\n for (let i = 0; i < lines.length; i += chunkSize) {\n const chunkLines = lines.slice(i, Math.min(i + chunkSize, lines.length));\n const chunkContent = chunkLines.join('\\n');\n\n if (chunkContent.trim().length === 0) {\n continue; // Skip empty chunks\n }\n\n const chunkId = `${metadata.repoId}_${relativePath}_${i}`;\n const chunkHash = crypto\n .createHash('md5')\n .update(chunkContent)\n .digest('hex');\n\n chunks.push({\n id: chunkId,\n filePath: relativePath,\n content: chunkContent,\n startLine: i + 1,\n endLine: Math.min(i + chunkSize, lines.length),\n hash: chunkHash,\n language,\n });\n }\n\n return chunks;\n }\n\n /**\n * Store a chunk in ChromaDB\n */\n private async storeChunk(\n chunk: FileChunk,\n metadata: RepoMetadata\n ): Promise<void> {\n const documentContent = `File: ${chunk.filePath} (Lines ${chunk.startLine}-${chunk.endLine})\nLanguage: ${chunk.language}\nRepository: ${metadata.repoName}/${metadata.branch}\n\n${chunk.content}`;\n\n await this.adapter.storeContext('observation', documentContent, {\n type: 'code_chunk',\n repo_id: metadata.repoId,\n repo_name: metadata.repoName,\n branch: metadata.branch,\n file_path: chunk.filePath,\n start_line: chunk.startLine,\n end_line: chunk.endLine,\n language: chunk.language,\n framework: metadata.framework,\n chunk_hash: chunk.hash,\n last_commit: metadata.lastCommit,\n });\n }\n\n /**\n * Remove file chunks from ChromaDB\n */\n private async removeFileChunks(\n filePath: string,\n repoId: string\n ): Promise<void> {\n // This would need to be implemented in ChromaDBAdapter\n // For now, we'll log it\n this.logger.debug(\n `Would remove chunks for file: ${filePath} from repo: ${repoId}`\n );\n }\n\n /**\n * Detect file language\n */\n private detectFileLanguage(filePath: string): string {\n const ext = path.extname(filePath).toLowerCase();\n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.py': 'python',\n '.java': 'java',\n '.go': 'go',\n '.rs': 'rust',\n '.c': 'c',\n '.cpp': 'cpp',\n '.cs': 'csharp',\n '.rb': 'ruby',\n '.php': 'php',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.scala': 'scala',\n '.r': 'r',\n '.sql': 'sql',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.json': 'json',\n '.md': 'markdown',\n };\n\n return languageMap[ext] || 'unknown';\n }\n\n /**\n * Detect language and framework\n */\n private async detectLanguageAndFramework(repoPath: string): Promise<{\n language: string;\n framework?: string;\n }> {\n // Check for package.json (JavaScript/TypeScript)\n const packageJsonPath = path.join(repoPath, 'package.json');\n if (fs.existsSync(packageJsonPath)) {\n try {\n const packageJson = JSON.parse(\n fs.readFileSync(packageJsonPath, 'utf8')\n );\n const deps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n };\n\n let framework: string | undefined;\n if (deps.react) framework = 'react';\n else if (deps.vue) framework = 'vue';\n else if (deps.angular) framework = 'angular';\n else if (deps.express) framework = 'express';\n else if (deps.next) framework = 'nextjs';\n else if (deps.svelte) framework = 'svelte';\n\n return {\n language: deps.typescript ? 'typescript' : 'javascript',\n framework,\n };\n } catch {}\n }\n\n // Check for requirements.txt or setup.py (Python)\n if (\n fs.existsSync(path.join(repoPath, 'requirements.txt')) ||\n fs.existsSync(path.join(repoPath, 'setup.py'))\n ) {\n return { language: 'python' };\n }\n\n // Check for go.mod (Go)\n if (fs.existsSync(path.join(repoPath, 'go.mod'))) {\n return { language: 'go' };\n }\n\n // Check for Cargo.toml (Rust)\n if (fs.existsSync(path.join(repoPath, 'Cargo.toml'))) {\n return { language: 'rust' };\n }\n\n // Check for pom.xml or build.gradle (Java)\n if (\n fs.existsSync(path.join(repoPath, 'pom.xml')) ||\n fs.existsSync(path.join(repoPath, 'build.gradle'))\n ) {\n return { language: 'java' };\n }\n\n // Default to unknown\n return { language: 'unknown' };\n }\n\n /**\n * Load metadata cache\n */\n private async loadMetadataCache(): Promise<void> {\n // In a real implementation, this would load from a persistent store\n // For now, we'll just initialize an empty cache\n this.metadataCache.clear();\n }\n\n /**\n * Save metadata\n */\n private async saveMetadata(metadata: RepoMetadata): Promise<void> {\n this.metadataCache.set(metadata.repoId, metadata);\n // In a real implementation, this would persist to a store\n }\n\n /**\n * Get repository statistics\n */\n async getRepoStats(repoName?: string): Promise<{\n totalRepos: number;\n totalFiles: number;\n totalChunks: number;\n languages: Record<string, number>;\n frameworks: Record<string, number>;\n }> {\n // This would query ChromaDB for statistics\n const stats = {\n totalRepos: this.metadataCache.size,\n totalFiles: 0,\n totalChunks: 0,\n languages: {} as Record<string, number>,\n frameworks: {} as Record<string, number>,\n };\n\n for (const metadata of this.metadataCache.values()) {\n if (!repoName || metadata.repoName === repoName) {\n stats.totalFiles += metadata.filesCount;\n\n if (metadata.language) {\n stats.languages[metadata.language] =\n (stats.languages[metadata.language] || 0) + 1;\n }\n\n if (metadata.framework) {\n stats.frameworks[metadata.framework] =\n (stats.frameworks[metadata.framework] || 0) + 1;\n }\n }\n }\n\n return stats;\n }\n}\n"],
|
|
5
|
-
"mappings": "AAMA,SAAS,uBAAuB;AAChC,SAAS,cAAc;AACvB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,YAAY,YAAY;AACxB,SAAS,gBAAgB;AACzB,OAAO,YAAY;AAmCZ,MAAM,mBAAmB;AAAA,EAM9B,YACU,QAMA,QACA,QACR;AARQ;AAMA;AACA;AAER,SAAK,SAAS,IAAI,OAAO,oBAAoB;AAC7C,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,QACE,GAAG;AAAA,QACH,gBAAgB,OAAO,kBAAkB;AAAA,MAC3C;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAxBQ;AAAA,EACA;AAAA,EACA,gBAA2C,oBAAI,IAAI;AAAA,EACnD,gBAAqC,oBAAI,IAAI;AAAA,EAuBrD,MAAM,aAA4B;AAChC,UAAM,KAAK,QAAQ,WAAW;AAC9B,UAAM,KAAK,kBAAkB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,WAAK,OAAO,KAAK,qCAAqC,QAAQ,EAAE;AAGhE,UAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,cAAM,IAAI,MAAM,8BAA8B,QAAQ,EAAE;AAAA,MAC1D;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAG9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAC/D,UAAI,QAAQ,eAAe,oBAAoB,CAAC,QAAQ,aAAa;AACnE,cAAM,eAAe,MAAM,KAAK;AAAA,UAC9B;AAAA,UACA,iBAAiB;AAAA,UACjB,SAAS;AAAA,QACX;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS;AAAA,UACX;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,uBAAuB,aAAa,MAAM;AAAA,QAC5C;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,OAAO;AACvD,WAAK,OAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAGzD,UAAI,iBAAiB;AACrB,UAAI,gBAAgB;AACpB,UAAI,YAAY;AAEhB,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AACrC;AAAA,UACF;AAEA;AACA,uBAAa,GAAG,SAAS,IAAI,EAAE;AAG/B,cAAI,iBAAiB,QAAQ,GAAG;AAC9B,iBAAK,OAAO;AAAA,cACV,aAAa,cAAc,IAAI,MAAM,MAAM;AAAA,YAC7C;AAAA,UACF;AAAA,QACF,SAAS,OAAgB;AACvB,eAAK,OAAO,KAAK,0BAA0B,IAAI,KAAK,KAAK;AAAA,QAC3D;AAAA,MACF;AAGA,eAAS,aAAa;AACtB,eAAS,YAAY;AACrB,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,WAAK,OAAO;AAAA,QACV,kCAAkC,cAAc,WAAW,aAAa,cAAc,WAAW;AAAA,MACnG;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,yBAAyB,QAAQ;AAAA,QAC1C,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,gCAAgC,KAAK;AACvD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAC9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAE/D,UAAI,CAAC,kBAAkB;AAErB,eAAO,KAAK,iBAAiB,UAAU,UAAU,OAAO;AAAA,MAC1D;AAGA,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B;AAAA,QACA,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAEA,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS;AAAA,UACT,OAAO;AAAA,YACL,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,aAAa,KAAK,IAAI,IAAI;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,eAAe;AACnB,UAAI,aAAa;AACjB,UAAI,eAAe;AAEnB,iBAAW,UAAU,cAAc;AACjC,cAAM,WAAW,KAAK,KAAK,UAAU,OAAO,IAAI;AAEhD,YAAI,OAAO,WAAW,WAAW;AAC/B,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD;AAAA,QACF,WAAW,OAAO,WAAW,SAAS;AACpC,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF,WAAW,OAAO,WAAW,YAAY;AAEvC,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF;AAAA,MACF;AAGA,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,wBAAwB,QAAQ;AAAA,QACzC,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,6BAA6B,KAAK;AACpD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WACJ,OACA,SAeA;AACA,QAAI;AACF,YAAM,UAAe;AAAA,QACnB,MAAM,CAAC,YAAY;AAAA,MACrB;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,YAAY,QAAQ;AAAA,MAC9B;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,YAAM,UAAU,MAAM,KAAK,QAAQ;AAAA,QACjC;AAAA,QACA,SAAS,SAAS;AAAA,QAClB;AAAA,MACF;AAEA,aAAO,QAAQ,IAAI,CAAC,YAAY;AAAA,QAC9B,UAAU,OAAO,SAAS;AAAA,QAC1B,SAAS,OAAO;AAAA,QAChB,OAAO,IAAI,OAAO;AAAA;AAAA,QAClB,WAAW,OAAO,SAAS;AAAA,QAC3B,SAAS,OAAO,SAAS;AAAA,QACzB,UAAU,OAAO,SAAS;AAAA,MAC5B,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,uBAAuB,KAAK;AAC9C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,UACuB;AACvB,UAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,UAAM,aAAa,KAAK,cAAc,QAAQ;AAC9C,UAAM,SAAS,GAAG,QAAQ,IAAI,MAAM,GAAG,QAAQ,mBAAmB,GAAG;AAGrE,UAAM,EAAE,UAAU,UAAU,IAC1B,MAAM,KAAK,2BAA2B,QAAQ;AAEhD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,KAAK,IAAI;AAAA,MACvB,YAAY;AAAA,MACZ,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAA0B;AACjD,QAAI;AACF,aAAO,SAAS,mCAAmC;AAAA,QACjD,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,UAA0B;AAC9C,QAAI;AACF,aAAO,SAAS,sBAAsB;AAAA,QACpC,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,YACA,UACkD;AAClD,QAAI;AACF,YAAM,OAAO;AAAA,QACX,0BAA0B,UAAU,KAAK,QAAQ;AAAA,QACjD;AAAA,UACE,KAAK;AAAA,UACL,UAAU;AAAA,QACZ;AAAA,MACF;AAEA,aAAO,KACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC,EAC5B,IAAI,CAAC,SAAS;AACb,cAAM,CAAC,QAAQ,GAAG,SAAS,IAAI,KAAK,MAAM,GAAI;AAC9C,eAAO;AAAA,UACL,MAAM,UAAU,KAAK,GAAI;AAAA,UACzB,QACE,WAAW,MACP,UACA,WAAW,MACT,YACA;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACL,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,UACA,SACmB;AACnB,UAAM,QAAkB,CAAC;AACzB,UAAM,KAAK,OAAO;AAGlB,UAAM,gBAAgB,KAAK,KAAK,UAAU,YAAY;AACtD,QAAI,GAAG,WAAW,aAAa,GAAG;AAChC,SAAG,IAAI,GAAG,aAAa,eAAe,MAAM,CAAC;AAAA,IAC/C;AAGA,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,mBAAmB,CAAC;AAAA,IAClC;AACA,OAAG,IAAI,eAAe;AAGtB,UAAM,aAAa,QAAQ,cAAc;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa;AACvB,iBAAW,KAAK,OAAO,QAAQ,MAAM;AAAA,IACvC;AAEA,UAAM,cAAc,QAAQ,eAAe,OAAO;AAElD,UAAM,UAAU,CAAC,KAAa,UAAkB,aAAa;AAC3D,YAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAE3D,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAM,eAAe,KAAK,SAAS,SAAS,QAAQ;AAEpD,YAAI,GAAG,QAAQ,YAAY,GAAG;AAC5B;AAAA,QACF;AAEA,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,UAAU,OAAO;AAAA,QAC3B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,QAAQ,MAAM,IAAI;AAGnC,cAAI,CAAC,WAAW,SAAS,GAAG,GAAG;AAC7B;AAAA,UACF;AAGA,cACE,CAAC,QAAQ,iBACR,MAAM,KAAK,SAAS,QAAQ,KAC3B,MAAM,KAAK,SAAS,QAAQ,KAC5B,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,OAAO,KAC7B,aAAa,SAAS,QAAQ,IAChC;AACA;AAAA,UACF;AAGA,gBAAM,QAAQ,GAAG,SAAS,QAAQ;AAClC,cAAI,MAAM,OAAO,aAAa;AAC5B,iBAAK,OAAO,MAAM,wBAAwB,YAAY,EAAE;AACxD;AAAA,UACF;AAEA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,QAAQ;AAChB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACA,UACA,UACA,SACsB;AACtB,UAAM,eAAe,KAAK,SAAS,UAAU,QAAQ;AACrD,UAAM,UAAU,GAAG,aAAa,UAAU,MAAM;AAChD,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAAW,KAAK,mBAAmB,QAAQ;AAEjD,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,SAAsB,CAAC;AAG7B,UAAM,WAAW,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAGtE,UAAM,aAAa,KAAK,cAAc,IAAI,YAAY;AACtD,QAAI,eAAe,YAAY,CAAC,QAAQ,aAAa;AACnD,aAAO,CAAC;AAAA,IACV;AAEA,SAAK,cAAc,IAAI,cAAc,QAAQ;AAG7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,YAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM,CAAC;AACvE,YAAM,eAAe,WAAW,KAAK,IAAI;AAEzC,UAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,MACF;AAEA,YAAM,UAAU,GAAG,SAAS,MAAM,IAAI,YAAY,IAAI,CAAC;AACvD,YAAM,YAAY,OACf,WAAW,KAAK,EAChB,OAAO,YAAY,EACnB,OAAO,KAAK;AAEf,aAAO,KAAK;AAAA,QACV,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,SAAS;AAAA,QACT,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AAAA,QAC7C,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WACZ,OACA,UACe;AACf,UAAM,kBAAkB,SAAS,MAAM,QAAQ,WAAW,MAAM,SAAS,IAAI,MAAM,OAAO;AAAA,YAClF,MAAM,QAAQ;AAAA,cACZ,SAAS,QAAQ,IAAI,SAAS,MAAM;AAAA;AAAA,EAEhD,MAAM,OAAO;AAEX,UAAM,KAAK,QAAQ,aAAa,eAAe,iBAAiB;AAAA,MAC9D,MAAM;AAAA,MACN,SAAS,SAAS;AAAA,MAClB,WAAW,SAAS;AAAA,MACpB,QAAQ,SAAS;AAAA,MACjB,WAAW,MAAM;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,WAAW,SAAS;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,aAAa,SAAS;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,UACA,QACe;AAGf,SAAK,OAAO;AAAA,MACV,iCAAiC,QAAQ,eAAe,MAAM;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAA0B;AACnD,UAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,UAAM,cAAsC;AAAA,MAC1C,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,OAAO;AAAA,MACP,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAEA,WAAO,YAAY,GAAG,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,UAGtC;AAED,UAAM,kBAAkB,KAAK,KAAK,UAAU,cAAc;AAC1D,QAAI,GAAG,WAAW,eAAe,GAAG;AAClC,UAAI;AACF,cAAM,cAAc,KAAK;AAAA,UACvB,GAAG,aAAa,iBAAiB,MAAM;AAAA,QACzC;AACA,cAAM,OAAO;AAAA,UACX,GAAG,YAAY;AAAA,UACf,GAAG,YAAY;AAAA,QACjB;AAEA,YAAI;AACJ,YAAI,KAAK,MAAO,aAAY;AAAA,iBACnB,KAAK,IAAK,aAAY;AAAA,iBACtB,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,KAAM,aAAY;AAAA,iBACvB,KAAK,OAAQ,aAAY;AAElC,eAAO;AAAA,UACL,UAAU,KAAK,aAAa,eAAe;AAAA,UAC3C;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,kBAAkB,CAAC,KACrD,GAAG,WAAW,KAAK,KAAK,UAAU,UAAU,CAAC,GAC7C;AACA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,QAAQ,CAAC,GAAG;AAChD,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,YAAY,CAAC,GAAG;AACpD,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,SAAS,CAAC,KAC5C,GAAG,WAAW,KAAK,KAAK,UAAU,cAAc,CAAC,GACjD;AACA,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAG/C,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,UAAuC;AAChE,SAAK,cAAc,IAAI,SAAS,QAAQ,QAAQ;AAAA,EAElD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAMhB;AAED,UAAM,QAAQ;AAAA,MACZ,YAAY,KAAK,cAAc;AAAA,MAC/B,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,WAAW,CAAC;AAAA,MACZ,YAAY,CAAC;AAAA,IACf;AAEA,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AAClD,UAAI,CAAC,YAAY,SAAS,aAAa,UAAU;AAC/C,cAAM,cAAc,SAAS;AAE7B,YAAI,SAAS,UAAU;AACrB,gBAAM,UAAU,SAAS,QAAQ,KAC9B,MAAM,UAAU,SAAS,QAAQ,KAAK,KAAK;AAAA,QAChD;AAEA,YAAI,SAAS,WAAW;AACtB,gBAAM,WAAW,SAAS,SAAS,KAChC,MAAM,WAAW,SAAS,SAAS,KAAK,KAAK;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;",
|
|
4
|
+
"sourcesContent": ["/**\n * Repository Ingestion Skill for ChromaDB\n *\n * Ingests and maintains code repositories in ChromaDB for enhanced code search and context\n */\n\nimport { ChromaDBAdapter } from '../core/storage/chromadb-adapter.js';\nimport { Logger } from '../core/monitoring/logger.js';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as crypto from 'crypto';\nimport { execSync } from 'child_process';\nimport ignore from 'ignore';\n\nexport interface RepoIngestionOptions {\n incremental?: boolean;\n forceUpdate?: boolean;\n includeTests?: boolean;\n includeDocs?: boolean;\n maxFileSize?: number;\n chunkSize?: number;\n extensions?: string[];\n excludePatterns?: string[];\n}\n\nexport interface RepoMetadata {\n repoId: string;\n repoName: string;\n branch: string;\n lastCommit: string;\n lastIngested: number;\n filesCount: number;\n totalSize: number;\n language: string;\n framework?: string;\n}\n\nexport interface FileChunk {\n id: string;\n filePath: string;\n content: string;\n startLine: number;\n endLine: number;\n hash: string;\n language: string;\n}\n\nexport class RepoIngestionSkill {\n private logger: Logger;\n private adapter: ChromaDBAdapter;\n private metadataCache: Map<string, RepoMetadata> = new Map();\n private fileHashCache: Map<string, string> = new Map();\n\n constructor(\n private config: {\n apiKey: string;\n tenant: string;\n database: string;\n collectionName?: string;\n },\n private userId: string,\n private teamId?: string\n ) {\n this.logger = new Logger('RepoIngestionSkill');\n this.adapter = new ChromaDBAdapter(\n {\n ...config,\n collectionName: config.collectionName || 'stackmemory_repos',\n },\n userId,\n teamId\n );\n }\n\n async initialize(): Promise<void> {\n await this.adapter.initialize();\n await this.loadMetadataCache();\n }\n\n /**\n * Ingest a repository into ChromaDB\n */\n async ingestRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesProcessed: number;\n chunksCreated: number;\n timeElapsed: number;\n totalSize: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n this.logger.info(`Starting repository ingestion for ${repoName}`);\n\n // Validate repository path\n if (!fs.existsSync(repoPath)) {\n throw new Error(`Repository path not found: ${repoPath}`);\n }\n\n // Get repository metadata\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n\n // Check if incremental update is possible\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n if (options.incremental && existingMetadata && !options.forceUpdate) {\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected since last ingestion',\n };\n }\n\n this.logger.info(\n `Incremental update: ${changedFiles.length} files changed`\n );\n }\n\n // Get files to process\n const files = await this.getRepoFiles(repoPath, options);\n this.logger.info(`Found ${files.length} files to process`);\n\n // Process files and create chunks\n let filesProcessed = 0;\n let chunksCreated = 0;\n let totalSize = 0;\n\n for (const file of files) {\n try {\n const chunks = await this.processFile(\n file,\n repoPath,\n repoName,\n metadata,\n options\n );\n\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n chunksCreated++;\n }\n\n filesProcessed++;\n totalSize += fs.statSync(file).size;\n\n // Log progress every 100 files\n if (filesProcessed % 100 === 0) {\n this.logger.info(\n `Processed ${filesProcessed}/${files.length} files`\n );\n }\n } catch (error: unknown) {\n this.logger.warn(`Failed to process file ${file}:`, error);\n }\n }\n\n // Update metadata\n metadata.filesCount = filesProcessed;\n metadata.totalSize = totalSize;\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n this.logger.info(\n `Repository ingestion complete: ${filesProcessed} files, ${chunksCreated} chunks in ${timeElapsed}ms`\n );\n\n return {\n success: true,\n message: `Successfully ingested ${repoName}`,\n stats: {\n filesProcessed,\n chunksCreated,\n timeElapsed,\n totalSize,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository ingestion failed:', error);\n return {\n success: false,\n message: `Failed to ingest repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Update an existing repository in ChromaDB\n */\n async updateRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesUpdated: number;\n filesAdded: number;\n filesRemoved: number;\n timeElapsed: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n\n if (!existingMetadata) {\n // No existing data, perform full ingestion\n return this.ingestRepository(repoPath, repoName, options);\n }\n\n // Get changed files since last ingestion\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected',\n stats: {\n filesUpdated: 0,\n filesAdded: 0,\n filesRemoved: 0,\n timeElapsed: Date.now() - startTime,\n },\n };\n }\n\n let filesUpdated = 0;\n let filesAdded = 0;\n let filesRemoved = 0;\n\n for (const change of changedFiles) {\n const filePath = path.join(repoPath, change.path);\n\n if (change.status === 'deleted') {\n await this.removeFileChunks(change.path, metadata.repoId);\n filesRemoved++;\n } else if (change.status === 'added') {\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesAdded++;\n } else if (change.status === 'modified') {\n // Remove old chunks and add new ones\n await this.removeFileChunks(change.path, metadata.repoId);\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesUpdated++;\n }\n }\n\n // Update metadata\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n return {\n success: true,\n message: `Successfully updated ${repoName}`,\n stats: {\n filesUpdated,\n filesAdded,\n filesRemoved,\n timeElapsed,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository update failed:', error);\n return {\n success: false,\n message: `Failed to update repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Search code in ingested repositories\n */\n async searchCode(\n query: string,\n options?: {\n repoName?: string;\n language?: string;\n limit?: number;\n includeContext?: boolean;\n }\n ): Promise<\n Array<{\n filePath: string;\n content: string;\n score: number;\n startLine: number;\n endLine: number;\n repoName: string;\n }>\n > {\n try {\n const filters: Record<string, unknown> = {\n type: ['code_chunk'],\n };\n\n if (options?.repoName) {\n filters.repo_name = options.repoName;\n }\n\n if (options?.language) {\n filters.language = options.language;\n }\n\n const results = await this.adapter.queryContexts(\n query,\n options?.limit || 20,\n filters\n );\n\n return results.map((result) => ({\n filePath: result.metadata.file_path,\n content: result.content,\n score: 1 - result.distance, // Convert distance to similarity score\n startLine: result.metadata.start_line,\n endLine: result.metadata.end_line,\n repoName: result.metadata.repo_name,\n }));\n } catch (error: unknown) {\n this.logger.error('Code search failed:', error);\n return [];\n }\n }\n\n /**\n * Get repository metadata\n */\n private async getRepoMetadata(\n repoPath: string,\n repoName: string\n ): Promise<RepoMetadata> {\n const branch = this.getCurrentBranch(repoPath);\n const lastCommit = this.getLastCommit(repoPath);\n const repoId = `${repoName}_${branch}`.replace(/[^a-zA-Z0-9_-]/g, '_');\n\n // Detect primary language and framework\n const { language, framework } =\n await this.detectLanguageAndFramework(repoPath);\n\n return {\n repoId,\n repoName,\n branch,\n lastCommit,\n lastIngested: Date.now(),\n filesCount: 0,\n totalSize: 0,\n language,\n framework,\n };\n }\n\n /**\n * Get current git branch\n */\n private getCurrentBranch(repoPath: string): string {\n try {\n return execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'main';\n }\n }\n\n /**\n * Get last commit hash\n */\n private getLastCommit(repoPath: string): string {\n try {\n return execSync('git rev-parse HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'unknown';\n }\n }\n\n /**\n * Get changed files between commits\n */\n private async getChangedFiles(\n repoPath: string,\n fromCommit: string,\n toCommit: string\n ): Promise<Array<{ path: string; status: string }>> {\n try {\n const diff = execSync(\n `git diff --name-status ${fromCommit}..${toCommit}`,\n {\n cwd: repoPath,\n encoding: 'utf8',\n }\n );\n\n return diff\n .split('\\n')\n .filter((line) => line.trim())\n .map((line) => {\n const [status, ...pathParts] = line.split('\\t');\n return {\n path: pathParts.join('\\t'),\n status:\n status === 'A'\n ? 'added'\n : status === 'D'\n ? 'deleted'\n : 'modified',\n };\n });\n } catch {\n return [];\n }\n }\n\n /**\n * Get repository files to process\n */\n private async getRepoFiles(\n repoPath: string,\n options: RepoIngestionOptions\n ): Promise<string[]> {\n const files: string[] = [];\n const ig = ignore();\n\n // Load .gitignore if it exists\n const gitignorePath = path.join(repoPath, '.gitignore');\n if (fs.existsSync(gitignorePath)) {\n ig.add(fs.readFileSync(gitignorePath, 'utf8'));\n }\n\n // Add default exclude patterns\n const defaultExcludes = [\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n '.env',\n '*.log',\n ...(options.excludePatterns || []),\n ];\n ig.add(defaultExcludes);\n\n // Default extensions to include\n const extensions = options.extensions || [\n '.ts',\n '.tsx',\n '.js',\n '.jsx',\n '.py',\n '.java',\n '.go',\n '.rs',\n '.c',\n '.cpp',\n '.h',\n '.hpp',\n '.cs',\n '.rb',\n '.php',\n '.swift',\n '.kt',\n '.scala',\n '.r',\n '.m',\n '.sql',\n '.yaml',\n '.yml',\n '.json',\n ];\n\n // Add documentation if requested\n if (options.includeDocs) {\n extensions.push('.md', '.rst', '.txt');\n }\n\n const maxFileSize = options.maxFileSize || 1024 * 1024; // 1MB default\n\n const walkDir = (dir: string, baseDir: string = repoPath) => {\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath);\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n walkDir(fullPath, baseDir);\n } else if (entry.isFile()) {\n const ext = path.extname(entry.name);\n\n // Check if file should be included\n if (!extensions.includes(ext)) {\n continue;\n }\n\n // Check if it's a test file\n if (\n !options.includeTests &&\n (entry.name.includes('.test.') ||\n entry.name.includes('.spec.') ||\n relativePath.includes('__tests__') ||\n relativePath.includes('test/') ||\n relativePath.includes('tests/'))\n ) {\n continue;\n }\n\n // Check file size\n const stats = fs.statSync(fullPath);\n if (stats.size > maxFileSize) {\n this.logger.debug(`Skipping large file: ${relativePath}`);\n continue;\n }\n\n files.push(fullPath);\n }\n }\n };\n\n walkDir(repoPath);\n return files;\n }\n\n /**\n * Process a file into chunks\n */\n private async processFile(\n filePath: string,\n repoPath: string,\n repoName: string,\n metadata: RepoMetadata,\n options: RepoIngestionOptions\n ): Promise<FileChunk[]> {\n const relativePath = path.relative(repoPath, filePath);\n const content = fs.readFileSync(filePath, 'utf8');\n const lines = content.split('\\n');\n const language = this.detectFileLanguage(filePath);\n\n const chunkSize = options.chunkSize || 100; // 100 lines per chunk\n const chunks: FileChunk[] = [];\n\n // Calculate file hash for caching\n const fileHash = crypto.createHash('md5').update(content).digest('hex');\n\n // Check if file has changed\n const cachedHash = this.fileHashCache.get(relativePath);\n if (cachedHash === fileHash && !options.forceUpdate) {\n return []; // File hasn't changed\n }\n\n this.fileHashCache.set(relativePath, fileHash);\n\n // Split into chunks\n for (let i = 0; i < lines.length; i += chunkSize) {\n const chunkLines = lines.slice(i, Math.min(i + chunkSize, lines.length));\n const chunkContent = chunkLines.join('\\n');\n\n if (chunkContent.trim().length === 0) {\n continue; // Skip empty chunks\n }\n\n const chunkId = `${metadata.repoId}_${relativePath}_${i}`;\n const chunkHash = crypto\n .createHash('md5')\n .update(chunkContent)\n .digest('hex');\n\n chunks.push({\n id: chunkId,\n filePath: relativePath,\n content: chunkContent,\n startLine: i + 1,\n endLine: Math.min(i + chunkSize, lines.length),\n hash: chunkHash,\n language,\n });\n }\n\n return chunks;\n }\n\n /**\n * Store a chunk in ChromaDB\n */\n private async storeChunk(\n chunk: FileChunk,\n metadata: RepoMetadata\n ): Promise<void> {\n const documentContent = `File: ${chunk.filePath} (Lines ${chunk.startLine}-${chunk.endLine})\nLanguage: ${chunk.language}\nRepository: ${metadata.repoName}/${metadata.branch}\n\n${chunk.content}`;\n\n await this.adapter.storeContext('observation', documentContent, {\n type: 'code_chunk',\n repo_id: metadata.repoId,\n repo_name: metadata.repoName,\n branch: metadata.branch,\n file_path: chunk.filePath,\n start_line: chunk.startLine,\n end_line: chunk.endLine,\n language: chunk.language,\n framework: metadata.framework,\n chunk_hash: chunk.hash,\n last_commit: metadata.lastCommit,\n });\n }\n\n /**\n * Remove file chunks from ChromaDB\n */\n private async removeFileChunks(\n filePath: string,\n repoId: string\n ): Promise<void> {\n // This would need to be implemented in ChromaDBAdapter\n // For now, we'll log it\n this.logger.debug(\n `Would remove chunks for file: ${filePath} from repo: ${repoId}`\n );\n }\n\n /**\n * Detect file language\n */\n private detectFileLanguage(filePath: string): string {\n const ext = path.extname(filePath).toLowerCase();\n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.py': 'python',\n '.java': 'java',\n '.go': 'go',\n '.rs': 'rust',\n '.c': 'c',\n '.cpp': 'cpp',\n '.cs': 'csharp',\n '.rb': 'ruby',\n '.php': 'php',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.scala': 'scala',\n '.r': 'r',\n '.sql': 'sql',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.json': 'json',\n '.md': 'markdown',\n };\n\n return languageMap[ext] || 'unknown';\n }\n\n /**\n * Detect language and framework\n */\n private async detectLanguageAndFramework(repoPath: string): Promise<{\n language: string;\n framework?: string;\n }> {\n // Check for package.json (JavaScript/TypeScript)\n const packageJsonPath = path.join(repoPath, 'package.json');\n if (fs.existsSync(packageJsonPath)) {\n try {\n const packageJson = JSON.parse(\n fs.readFileSync(packageJsonPath, 'utf8')\n );\n const deps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n };\n\n let framework: string | undefined;\n if (deps.react) framework = 'react';\n else if (deps.vue) framework = 'vue';\n else if (deps.angular) framework = 'angular';\n else if (deps.express) framework = 'express';\n else if (deps.next) framework = 'nextjs';\n else if (deps.svelte) framework = 'svelte';\n\n return {\n language: deps.typescript ? 'typescript' : 'javascript',\n framework,\n };\n } catch {}\n }\n\n // Check for requirements.txt or setup.py (Python)\n if (\n fs.existsSync(path.join(repoPath, 'requirements.txt')) ||\n fs.existsSync(path.join(repoPath, 'setup.py'))\n ) {\n return { language: 'python' };\n }\n\n // Check for go.mod (Go)\n if (fs.existsSync(path.join(repoPath, 'go.mod'))) {\n return { language: 'go' };\n }\n\n // Check for Cargo.toml (Rust)\n if (fs.existsSync(path.join(repoPath, 'Cargo.toml'))) {\n return { language: 'rust' };\n }\n\n // Check for pom.xml or build.gradle (Java)\n if (\n fs.existsSync(path.join(repoPath, 'pom.xml')) ||\n fs.existsSync(path.join(repoPath, 'build.gradle'))\n ) {\n return { language: 'java' };\n }\n\n // Default to unknown\n return { language: 'unknown' };\n }\n\n /**\n * Load metadata cache\n */\n private async loadMetadataCache(): Promise<void> {\n // In a real implementation, this would load from a persistent store\n // For now, we'll just initialize an empty cache\n this.metadataCache.clear();\n }\n\n /**\n * Save metadata\n */\n private async saveMetadata(metadata: RepoMetadata): Promise<void> {\n this.metadataCache.set(metadata.repoId, metadata);\n // In a real implementation, this would persist to a store\n }\n\n /**\n * Get repository statistics\n */\n async getRepoStats(repoName?: string): Promise<{\n totalRepos: number;\n totalFiles: number;\n totalChunks: number;\n languages: Record<string, number>;\n frameworks: Record<string, number>;\n }> {\n // This would query ChromaDB for statistics\n const stats = {\n totalRepos: this.metadataCache.size,\n totalFiles: 0,\n totalChunks: 0,\n languages: {} as Record<string, number>,\n frameworks: {} as Record<string, number>,\n };\n\n for (const metadata of this.metadataCache.values()) {\n if (!repoName || metadata.repoName === repoName) {\n stats.totalFiles += metadata.filesCount;\n\n if (metadata.language) {\n stats.languages[metadata.language] =\n (stats.languages[metadata.language] || 0) + 1;\n }\n\n if (metadata.framework) {\n stats.frameworks[metadata.framework] =\n (stats.frameworks[metadata.framework] || 0) + 1;\n }\n }\n }\n\n return stats;\n }\n}\n"],
|
|
5
|
+
"mappings": "AAMA,SAAS,uBAAuB;AAChC,SAAS,cAAc;AACvB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,YAAY,YAAY;AACxB,SAAS,gBAAgB;AACzB,OAAO,YAAY;AAmCZ,MAAM,mBAAmB;AAAA,EAM9B,YACU,QAMA,QACA,QACR;AARQ;AAMA;AACA;AAER,SAAK,SAAS,IAAI,OAAO,oBAAoB;AAC7C,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,QACE,GAAG;AAAA,QACH,gBAAgB,OAAO,kBAAkB;AAAA,MAC3C;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAxBQ;AAAA,EACA;AAAA,EACA,gBAA2C,oBAAI,IAAI;AAAA,EACnD,gBAAqC,oBAAI,IAAI;AAAA,EAuBrD,MAAM,aAA4B;AAChC,UAAM,KAAK,QAAQ,WAAW;AAC9B,UAAM,KAAK,kBAAkB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,WAAK,OAAO,KAAK,qCAAqC,QAAQ,EAAE;AAGhE,UAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,cAAM,IAAI,MAAM,8BAA8B,QAAQ,EAAE;AAAA,MAC1D;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAG9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAC/D,UAAI,QAAQ,eAAe,oBAAoB,CAAC,QAAQ,aAAa;AACnE,cAAM,eAAe,MAAM,KAAK;AAAA,UAC9B;AAAA,UACA,iBAAiB;AAAA,UACjB,SAAS;AAAA,QACX;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS;AAAA,UACX;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,uBAAuB,aAAa,MAAM;AAAA,QAC5C;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,OAAO;AACvD,WAAK,OAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAGzD,UAAI,iBAAiB;AACrB,UAAI,gBAAgB;AACpB,UAAI,YAAY;AAEhB,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AACrC;AAAA,UACF;AAEA;AACA,uBAAa,GAAG,SAAS,IAAI,EAAE;AAG/B,cAAI,iBAAiB,QAAQ,GAAG;AAC9B,iBAAK,OAAO;AAAA,cACV,aAAa,cAAc,IAAI,MAAM,MAAM;AAAA,YAC7C;AAAA,UACF;AAAA,QACF,SAAS,OAAgB;AACvB,eAAK,OAAO,KAAK,0BAA0B,IAAI,KAAK,KAAK;AAAA,QAC3D;AAAA,MACF;AAGA,eAAS,aAAa;AACtB,eAAS,YAAY;AACrB,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,WAAK,OAAO;AAAA,QACV,kCAAkC,cAAc,WAAW,aAAa,cAAc,WAAW;AAAA,MACnG;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,yBAAyB,QAAQ;AAAA,QAC1C,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,gCAAgC,KAAK;AACvD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAC9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAE/D,UAAI,CAAC,kBAAkB;AAErB,eAAO,KAAK,iBAAiB,UAAU,UAAU,OAAO;AAAA,MAC1D;AAGA,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B;AAAA,QACA,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAEA,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS;AAAA,UACT,OAAO;AAAA,YACL,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,aAAa,KAAK,IAAI,IAAI;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,eAAe;AACnB,UAAI,aAAa;AACjB,UAAI,eAAe;AAEnB,iBAAW,UAAU,cAAc;AACjC,cAAM,WAAW,KAAK,KAAK,UAAU,OAAO,IAAI;AAEhD,YAAI,OAAO,WAAW,WAAW;AAC/B,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD;AAAA,QACF,WAAW,OAAO,WAAW,SAAS;AACpC,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF,WAAW,OAAO,WAAW,YAAY;AAEvC,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF;AAAA,MACF;AAGA,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,wBAAwB,QAAQ;AAAA,QACzC,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,6BAA6B,KAAK;AACpD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WACJ,OACA,SAeA;AACA,QAAI;AACF,YAAM,UAAmC;AAAA,QACvC,MAAM,CAAC,YAAY;AAAA,MACrB;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,YAAY,QAAQ;AAAA,MAC9B;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,YAAM,UAAU,MAAM,KAAK,QAAQ;AAAA,QACjC;AAAA,QACA,SAAS,SAAS;AAAA,QAClB;AAAA,MACF;AAEA,aAAO,QAAQ,IAAI,CAAC,YAAY;AAAA,QAC9B,UAAU,OAAO,SAAS;AAAA,QAC1B,SAAS,OAAO;AAAA,QAChB,OAAO,IAAI,OAAO;AAAA;AAAA,QAClB,WAAW,OAAO,SAAS;AAAA,QAC3B,SAAS,OAAO,SAAS;AAAA,QACzB,UAAU,OAAO,SAAS;AAAA,MAC5B,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,uBAAuB,KAAK;AAC9C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,UACuB;AACvB,UAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,UAAM,aAAa,KAAK,cAAc,QAAQ;AAC9C,UAAM,SAAS,GAAG,QAAQ,IAAI,MAAM,GAAG,QAAQ,mBAAmB,GAAG;AAGrE,UAAM,EAAE,UAAU,UAAU,IAC1B,MAAM,KAAK,2BAA2B,QAAQ;AAEhD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,KAAK,IAAI;AAAA,MACvB,YAAY;AAAA,MACZ,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAA0B;AACjD,QAAI;AACF,aAAO,SAAS,mCAAmC;AAAA,QACjD,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,UAA0B;AAC9C,QAAI;AACF,aAAO,SAAS,sBAAsB;AAAA,QACpC,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,YACA,UACkD;AAClD,QAAI;AACF,YAAM,OAAO;AAAA,QACX,0BAA0B,UAAU,KAAK,QAAQ;AAAA,QACjD;AAAA,UACE,KAAK;AAAA,UACL,UAAU;AAAA,QACZ;AAAA,MACF;AAEA,aAAO,KACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC,EAC5B,IAAI,CAAC,SAAS;AACb,cAAM,CAAC,QAAQ,GAAG,SAAS,IAAI,KAAK,MAAM,GAAI;AAC9C,eAAO;AAAA,UACL,MAAM,UAAU,KAAK,GAAI;AAAA,UACzB,QACE,WAAW,MACP,UACA,WAAW,MACT,YACA;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACL,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,UACA,SACmB;AACnB,UAAM,QAAkB,CAAC;AACzB,UAAM,KAAK,OAAO;AAGlB,UAAM,gBAAgB,KAAK,KAAK,UAAU,YAAY;AACtD,QAAI,GAAG,WAAW,aAAa,GAAG;AAChC,SAAG,IAAI,GAAG,aAAa,eAAe,MAAM,CAAC;AAAA,IAC/C;AAGA,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,mBAAmB,CAAC;AAAA,IAClC;AACA,OAAG,IAAI,eAAe;AAGtB,UAAM,aAAa,QAAQ,cAAc;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa;AACvB,iBAAW,KAAK,OAAO,QAAQ,MAAM;AAAA,IACvC;AAEA,UAAM,cAAc,QAAQ,eAAe,OAAO;AAElD,UAAM,UAAU,CAAC,KAAa,UAAkB,aAAa;AAC3D,YAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAE3D,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAM,eAAe,KAAK,SAAS,SAAS,QAAQ;AAEpD,YAAI,GAAG,QAAQ,YAAY,GAAG;AAC5B;AAAA,QACF;AAEA,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,UAAU,OAAO;AAAA,QAC3B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,QAAQ,MAAM,IAAI;AAGnC,cAAI,CAAC,WAAW,SAAS,GAAG,GAAG;AAC7B;AAAA,UACF;AAGA,cACE,CAAC,QAAQ,iBACR,MAAM,KAAK,SAAS,QAAQ,KAC3B,MAAM,KAAK,SAAS,QAAQ,KAC5B,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,OAAO,KAC7B,aAAa,SAAS,QAAQ,IAChC;AACA;AAAA,UACF;AAGA,gBAAM,QAAQ,GAAG,SAAS,QAAQ;AAClC,cAAI,MAAM,OAAO,aAAa;AAC5B,iBAAK,OAAO,MAAM,wBAAwB,YAAY,EAAE;AACxD;AAAA,UACF;AAEA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,QAAQ;AAChB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACA,UACA,UACA,SACsB;AACtB,UAAM,eAAe,KAAK,SAAS,UAAU,QAAQ;AACrD,UAAM,UAAU,GAAG,aAAa,UAAU,MAAM;AAChD,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAAW,KAAK,mBAAmB,QAAQ;AAEjD,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,SAAsB,CAAC;AAG7B,UAAM,WAAW,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAGtE,UAAM,aAAa,KAAK,cAAc,IAAI,YAAY;AACtD,QAAI,eAAe,YAAY,CAAC,QAAQ,aAAa;AACnD,aAAO,CAAC;AAAA,IACV;AAEA,SAAK,cAAc,IAAI,cAAc,QAAQ;AAG7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,YAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM,CAAC;AACvE,YAAM,eAAe,WAAW,KAAK,IAAI;AAEzC,UAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,MACF;AAEA,YAAM,UAAU,GAAG,SAAS,MAAM,IAAI,YAAY,IAAI,CAAC;AACvD,YAAM,YAAY,OACf,WAAW,KAAK,EAChB,OAAO,YAAY,EACnB,OAAO,KAAK;AAEf,aAAO,KAAK;AAAA,QACV,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,SAAS;AAAA,QACT,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AAAA,QAC7C,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WACZ,OACA,UACe;AACf,UAAM,kBAAkB,SAAS,MAAM,QAAQ,WAAW,MAAM,SAAS,IAAI,MAAM,OAAO;AAAA,YAClF,MAAM,QAAQ;AAAA,cACZ,SAAS,QAAQ,IAAI,SAAS,MAAM;AAAA;AAAA,EAEhD,MAAM,OAAO;AAEX,UAAM,KAAK,QAAQ,aAAa,eAAe,iBAAiB;AAAA,MAC9D,MAAM;AAAA,MACN,SAAS,SAAS;AAAA,MAClB,WAAW,SAAS;AAAA,MACpB,QAAQ,SAAS;AAAA,MACjB,WAAW,MAAM;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,WAAW,SAAS;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,aAAa,SAAS;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,UACA,QACe;AAGf,SAAK,OAAO;AAAA,MACV,iCAAiC,QAAQ,eAAe,MAAM;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAA0B;AACnD,UAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,UAAM,cAAsC;AAAA,MAC1C,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,OAAO;AAAA,MACP,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAEA,WAAO,YAAY,GAAG,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,UAGtC;AAED,UAAM,kBAAkB,KAAK,KAAK,UAAU,cAAc;AAC1D,QAAI,GAAG,WAAW,eAAe,GAAG;AAClC,UAAI;AACF,cAAM,cAAc,KAAK;AAAA,UACvB,GAAG,aAAa,iBAAiB,MAAM;AAAA,QACzC;AACA,cAAM,OAAO;AAAA,UACX,GAAG,YAAY;AAAA,UACf,GAAG,YAAY;AAAA,QACjB;AAEA,YAAI;AACJ,YAAI,KAAK,MAAO,aAAY;AAAA,iBACnB,KAAK,IAAK,aAAY;AAAA,iBACtB,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,KAAM,aAAY;AAAA,iBACvB,KAAK,OAAQ,aAAY;AAElC,eAAO;AAAA,UACL,UAAU,KAAK,aAAa,eAAe;AAAA,UAC3C;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,kBAAkB,CAAC,KACrD,GAAG,WAAW,KAAK,KAAK,UAAU,UAAU,CAAC,GAC7C;AACA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,QAAQ,CAAC,GAAG;AAChD,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,YAAY,CAAC,GAAG;AACpD,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,SAAS,CAAC,KAC5C,GAAG,WAAW,KAAK,KAAK,UAAU,cAAc,CAAC,GACjD;AACA,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAG/C,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,UAAuC;AAChE,SAAK,cAAc,IAAI,SAAS,QAAQ,QAAQ;AAAA,EAElD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAMhB;AAED,UAAM,QAAQ;AAAA,MACZ,YAAY,KAAK,cAAc;AAAA,MAC/B,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,WAAW,CAAC;AAAA,MACZ,YAAY,CAAC;AAAA,IACf;AAEA,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AAClD,UAAI,CAAC,YAAY,SAAS,aAAa,UAAU;AAC/C,cAAM,cAAc,SAAS;AAE7B,YAAI,SAAS,UAAU;AACrB,gBAAM,UAAU,SAAS,QAAQ,KAC9B,MAAM,UAAU,SAAS,QAAQ,KAAK,KAAK;AAAA,QAChD;AAEA,YAAI,SAAS,WAAW;AACtB,gBAAM,WAAW,SAAS,SAAS,KAChC,MAAM,WAAW,SAAS,SAAS,KAAK,KAAK;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { execSync } from "child_process";
|
|
3
|
+
import fs from "fs";
|
|
4
|
+
import { glob } from "glob";
|
|
5
|
+
const SECRET_PATTERNS = [
|
|
6
|
+
{
|
|
7
|
+
pattern: /lin_api_[a-zA-Z0-9]{40}/,
|
|
8
|
+
name: "Linear API Key",
|
|
9
|
+
envVar: "LINEAR_API_KEY"
|
|
10
|
+
},
|
|
11
|
+
{
|
|
12
|
+
pattern: /lin_oauth_[a-zA-Z0-9]{64}/,
|
|
13
|
+
name: "Linear OAuth Token",
|
|
14
|
+
envVar: "LINEAR_OAUTH_TOKEN"
|
|
15
|
+
},
|
|
16
|
+
{
|
|
17
|
+
pattern: /sk-[a-zA-Z0-9]{48}/,
|
|
18
|
+
name: "OpenAI API Key",
|
|
19
|
+
envVar: "OPENAI_API_KEY"
|
|
20
|
+
},
|
|
21
|
+
{ pattern: /npm_[a-zA-Z0-9]{36}/, name: "NPM Token", envVar: "NPM_TOKEN" },
|
|
22
|
+
{
|
|
23
|
+
pattern: /ghp_[a-zA-Z0-9]{36}/,
|
|
24
|
+
name: "GitHub Token",
|
|
25
|
+
envVar: "GITHUB_TOKEN"
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
pattern: /ghs_[a-zA-Z0-9]{36}/,
|
|
29
|
+
name: "GitHub Secret",
|
|
30
|
+
envVar: "GITHUB_SECRET"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
pattern: /pk_live_[a-zA-Z0-9]{24,}/,
|
|
34
|
+
name: "Stripe Live Key",
|
|
35
|
+
envVar: "STRIPE_LIVE_KEY"
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
pattern: /sk_live_[a-zA-Z0-9]{24,}/,
|
|
39
|
+
name: "Stripe Secret Key",
|
|
40
|
+
envVar: "STRIPE_SECRET_KEY"
|
|
41
|
+
}
|
|
42
|
+
];
|
|
43
|
+
class SecuritySecretsScanner {
|
|
44
|
+
detectedSecrets = /* @__PURE__ */ new Map();
|
|
45
|
+
/**
|
|
46
|
+
* Scan files for hardcoded secrets
|
|
47
|
+
*/
|
|
48
|
+
async scanForSecrets(patterns = [
|
|
49
|
+
"**/*.js",
|
|
50
|
+
"**/*.ts",
|
|
51
|
+
"**/*.jsx",
|
|
52
|
+
"**/*.tsx",
|
|
53
|
+
"**/*.sh"
|
|
54
|
+
]) {
|
|
55
|
+
console.log("\u{1F50D} Scanning for hardcoded secrets...\n");
|
|
56
|
+
for (const pattern of patterns) {
|
|
57
|
+
const files = await glob(pattern, {
|
|
58
|
+
ignore: ["node_modules/**", "dist/**", "build/**", ".git/**"]
|
|
59
|
+
});
|
|
60
|
+
for (const file of files) {
|
|
61
|
+
await this.scanFile(file);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
this.reportFindings();
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Scan a single file for secrets
|
|
68
|
+
*/
|
|
69
|
+
async scanFile(filePath) {
|
|
70
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
71
|
+
const lines = content.split("\n");
|
|
72
|
+
lines.forEach((line, index) => {
|
|
73
|
+
for (const secretPattern of SECRET_PATTERNS) {
|
|
74
|
+
if (secretPattern.pattern.test(line)) {
|
|
75
|
+
if (!this.detectedSecrets.has(filePath)) {
|
|
76
|
+
this.detectedSecrets.set(filePath, /* @__PURE__ */ new Set());
|
|
77
|
+
}
|
|
78
|
+
const secrets = this.detectedSecrets.get(filePath);
|
|
79
|
+
if (secrets) {
|
|
80
|
+
secrets.add(
|
|
81
|
+
`Line ${index + 1}: ${secretPattern.name} detected (use ${secretPattern.envVar})`
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Report findings
|
|
90
|
+
*/
|
|
91
|
+
reportFindings() {
|
|
92
|
+
if (this.detectedSecrets.size === 0) {
|
|
93
|
+
console.log("\u2705 No hardcoded secrets detected!\n");
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
console.log(
|
|
97
|
+
`\u26A0\uFE0F Found hardcoded secrets in ${this.detectedSecrets.size} files:
|
|
98
|
+
`
|
|
99
|
+
);
|
|
100
|
+
for (const [file, secrets] of this.detectedSecrets) {
|
|
101
|
+
console.log(`\u{1F4C4} ${file}:`);
|
|
102
|
+
for (const secret of secrets) {
|
|
103
|
+
console.log(` ${secret}`);
|
|
104
|
+
}
|
|
105
|
+
console.log();
|
|
106
|
+
}
|
|
107
|
+
console.log("\u{1F4DD} How to fix:");
|
|
108
|
+
console.log("1. Replace hardcoded values with process.env.VARIABLE_NAME");
|
|
109
|
+
console.log(`2. Add "import 'dotenv/config'" at the top of the file`);
|
|
110
|
+
console.log("3. Add the actual values to your .env file");
|
|
111
|
+
console.log("4. Never commit .env files to git\n");
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Auto-fix secrets in files
|
|
115
|
+
*/
|
|
116
|
+
async autoFix() {
|
|
117
|
+
console.log("\u{1F527} Auto-fixing hardcoded secrets...\n");
|
|
118
|
+
for (const [filePath] of this.detectedSecrets) {
|
|
119
|
+
let content = fs.readFileSync(filePath, "utf-8");
|
|
120
|
+
let modified = false;
|
|
121
|
+
if ((filePath.endsWith(".js") || filePath.endsWith(".ts")) && !content.includes("dotenv/config") && !content.includes("require('dotenv')")) {
|
|
122
|
+
if (content.startsWith("#!/")) {
|
|
123
|
+
const firstNewline = content.indexOf("\n");
|
|
124
|
+
content = content.slice(0, firstNewline + 1) + "\nimport 'dotenv/config';\n" + content.slice(firstNewline + 1);
|
|
125
|
+
} else {
|
|
126
|
+
content = "import 'dotenv/config';\n\n" + content;
|
|
127
|
+
}
|
|
128
|
+
modified = true;
|
|
129
|
+
}
|
|
130
|
+
for (const pattern of SECRET_PATTERNS) {
|
|
131
|
+
const regex = new RegExp(
|
|
132
|
+
`(['"\`])(${pattern.pattern.source})(['"\`])`,
|
|
133
|
+
"g"
|
|
134
|
+
);
|
|
135
|
+
const replacement = `process.env.${pattern.envVar}`;
|
|
136
|
+
if (regex.test(content)) {
|
|
137
|
+
content = content.replace(regex, replacement);
|
|
138
|
+
modified = true;
|
|
139
|
+
const varPattern = new RegExp(
|
|
140
|
+
`const\\s+(\\w+)\\s*=\\s*process\\.env\\.${pattern.envVar}`
|
|
141
|
+
);
|
|
142
|
+
const match = content.match(varPattern);
|
|
143
|
+
if (match) {
|
|
144
|
+
const varName = match[1];
|
|
145
|
+
const checkCode = `
|
|
146
|
+
if (!${varName}) {
|
|
147
|
+
console.error('\u274C ${pattern.envVar} environment variable not set');
|
|
148
|
+
console.log('Please set ${pattern.envVar} in your .env file or export it in your shell');
|
|
149
|
+
process.exit(1);
|
|
150
|
+
}
|
|
151
|
+
`;
|
|
152
|
+
const insertPos = content.indexOf(match[0]) + match[0].length;
|
|
153
|
+
content = content.slice(0, insertPos) + checkCode + content.slice(insertPos);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
if (modified) {
|
|
158
|
+
fs.writeFileSync(filePath, content);
|
|
159
|
+
console.log(`\u2705 Fixed ${filePath}`);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
console.log("\n\u{1F4CB} Next steps:");
|
|
163
|
+
console.log("1. Review the changes");
|
|
164
|
+
console.log("2. Add actual values to .env file");
|
|
165
|
+
console.log("3. Test that everything still works");
|
|
166
|
+
console.log("4. Commit the fixes\n");
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Check git history for secrets
|
|
170
|
+
*/
|
|
171
|
+
async checkGitHistory() {
|
|
172
|
+
console.log("\u{1F50D} Checking git history for secrets...\n");
|
|
173
|
+
try {
|
|
174
|
+
for (const pattern of SECRET_PATTERNS) {
|
|
175
|
+
const command = `git log -p --all -G"${pattern.pattern.source}" --format="%H %s" | head -20`;
|
|
176
|
+
const result = execSync(command, {
|
|
177
|
+
encoding: "utf-8",
|
|
178
|
+
stdio: "pipe"
|
|
179
|
+
}).trim();
|
|
180
|
+
if (result) {
|
|
181
|
+
console.log(`\u26A0\uFE0F Found ${pattern.name} in git history:`);
|
|
182
|
+
console.log(result.split("\n").slice(0, 3).join("\n"));
|
|
183
|
+
console.log("...\n");
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
console.log("\u{1F4DD} To clean git history:");
|
|
187
|
+
console.log("1. Use BFG Repo-Cleaner: bfg --replace-text passwords.txt");
|
|
188
|
+
console.log("2. Or interactive rebase: git rebase -i <commit>");
|
|
189
|
+
console.log("3. Or allow via GitHub: Check push error for allow URLs\n");
|
|
190
|
+
} catch {
|
|
191
|
+
console.log("Could not check git history");
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Generate pre-commit hook
|
|
196
|
+
*/
|
|
197
|
+
generatePreCommitHook() {
|
|
198
|
+
const hookContent = `#!/bin/sh
|
|
199
|
+
# Pre-commit hook to check for hardcoded secrets
|
|
200
|
+
|
|
201
|
+
echo "\u{1F50D} Checking for hardcoded secrets..."
|
|
202
|
+
|
|
203
|
+
# Patterns to check
|
|
204
|
+
patterns=(
|
|
205
|
+
"lin_api_[a-zA-Z0-9]{40}"
|
|
206
|
+
"lin_oauth_[a-zA-Z0-9]{64}"
|
|
207
|
+
"sk-[a-zA-Z0-9]{48}"
|
|
208
|
+
"npm_[a-zA-Z0-9]{36}"
|
|
209
|
+
"ghp_[a-zA-Z0-9]{36}"
|
|
210
|
+
"pk_live_[a-zA-Z0-9]{24,}"
|
|
211
|
+
"sk_live_[a-zA-Z0-9]{24,}"
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
# Check staged files
|
|
215
|
+
for pattern in "\${patterns[@]}"; do
|
|
216
|
+
if git diff --staged --no-color | grep -E "$pattern"; then
|
|
217
|
+
echo "\u274C Found hardcoded secret matching: $pattern"
|
|
218
|
+
echo "Please use environment variables instead!"
|
|
219
|
+
exit 1
|
|
220
|
+
fi
|
|
221
|
+
done
|
|
222
|
+
|
|
223
|
+
echo "\u2705 No hardcoded secrets detected"
|
|
224
|
+
exit 0
|
|
225
|
+
`;
|
|
226
|
+
const hookPath = ".git/hooks/pre-commit";
|
|
227
|
+
fs.writeFileSync(hookPath, hookContent);
|
|
228
|
+
fs.chmodSync(hookPath, "755");
|
|
229
|
+
console.log("\u2705 Generated pre-commit hook at .git/hooks/pre-commit");
|
|
230
|
+
console.log(
|
|
231
|
+
"This will prevent committing hardcoded secrets in the future.\n"
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
236
|
+
const scanner = new SecuritySecretsScanner();
|
|
237
|
+
const command = process.argv[2];
|
|
238
|
+
(async () => {
|
|
239
|
+
switch (command) {
|
|
240
|
+
case "scan":
|
|
241
|
+
await scanner.scanForSecrets();
|
|
242
|
+
break;
|
|
243
|
+
case "fix":
|
|
244
|
+
await scanner.scanForSecrets();
|
|
245
|
+
await scanner.autoFix();
|
|
246
|
+
break;
|
|
247
|
+
case "history":
|
|
248
|
+
await scanner.checkGitHistory();
|
|
249
|
+
break;
|
|
250
|
+
case "hook":
|
|
251
|
+
scanner.generatePreCommitHook();
|
|
252
|
+
break;
|
|
253
|
+
default:
|
|
254
|
+
console.log("Usage: security-secrets-scanner [scan|fix|history|hook]");
|
|
255
|
+
console.log(" scan - Scan for hardcoded secrets");
|
|
256
|
+
console.log(" fix - Auto-fix hardcoded secrets");
|
|
257
|
+
console.log(" history - Check git history for secrets");
|
|
258
|
+
console.log(" hook - Generate pre-commit hook");
|
|
259
|
+
}
|
|
260
|
+
})();
|
|
261
|
+
}
|
|
262
|
+
export {
|
|
263
|
+
SecuritySecretsScanner
|
|
264
|
+
};
|
|
265
|
+
//# sourceMappingURL=security-secrets-scanner.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/skills/security-secrets-scanner.ts"],
|
|
4
|
+
"sourcesContent": ["#!/usr/bin/env node\n\n/**\n * Security Secrets Scanner Skill\n * Detects and fixes hardcoded secrets in code files\n */\n\nimport { execSync } from 'child_process';\nimport fs from 'fs';\nimport { glob } from 'glob';\n\ninterface SecretPattern {\n pattern: RegExp;\n name: string;\n envVar: string;\n}\n\nconst SECRET_PATTERNS: SecretPattern[] = [\n {\n pattern: /lin_api_[a-zA-Z0-9]{40}/,\n name: 'Linear API Key',\n envVar: 'LINEAR_API_KEY',\n },\n {\n pattern: /lin_oauth_[a-zA-Z0-9]{64}/,\n name: 'Linear OAuth Token',\n envVar: 'LINEAR_OAUTH_TOKEN',\n },\n {\n pattern: /sk-[a-zA-Z0-9]{48}/,\n name: 'OpenAI API Key',\n envVar: 'OPENAI_API_KEY',\n },\n { pattern: /npm_[a-zA-Z0-9]{36}/, name: 'NPM Token', envVar: 'NPM_TOKEN' },\n {\n pattern: /ghp_[a-zA-Z0-9]{36}/,\n name: 'GitHub Token',\n envVar: 'GITHUB_TOKEN',\n },\n {\n pattern: /ghs_[a-zA-Z0-9]{36}/,\n name: 'GitHub Secret',\n envVar: 'GITHUB_SECRET',\n },\n {\n pattern: /pk_live_[a-zA-Z0-9]{24,}/,\n name: 'Stripe Live Key',\n envVar: 'STRIPE_LIVE_KEY',\n },\n {\n pattern: /sk_live_[a-zA-Z0-9]{24,}/,\n name: 'Stripe Secret Key',\n envVar: 'STRIPE_SECRET_KEY',\n },\n];\n\nexport class SecuritySecretsScanner {\n private detectedSecrets: Map<string, Set<string>> = new Map();\n\n /**\n * Scan files for hardcoded secrets\n */\n async scanForSecrets(\n patterns: string[] = [\n '**/*.js',\n '**/*.ts',\n '**/*.jsx',\n '**/*.tsx',\n '**/*.sh',\n ]\n ): Promise<void> {\n console.log('\uD83D\uDD0D Scanning for hardcoded secrets...\\n');\n\n for (const pattern of patterns) {\n const files = await glob(pattern, {\n ignore: ['node_modules/**', 'dist/**', 'build/**', '.git/**'],\n });\n\n for (const file of files) {\n await this.scanFile(file);\n }\n }\n\n this.reportFindings();\n }\n\n /**\n * Scan a single file for secrets\n */\n private async scanFile(filePath: string): Promise<void> {\n const content = fs.readFileSync(filePath, 'utf-8');\n const lines = content.split('\\n');\n\n lines.forEach((line, index) => {\n for (const secretPattern of SECRET_PATTERNS) {\n if (secretPattern.pattern.test(line)) {\n if (!this.detectedSecrets.has(filePath)) {\n this.detectedSecrets.set(filePath, new Set());\n }\n const secrets = this.detectedSecrets.get(filePath);\n if (secrets) {\n secrets.add(\n `Line ${index + 1}: ${secretPattern.name} detected (use ${secretPattern.envVar})`\n );\n }\n }\n }\n });\n }\n\n /**\n * Report findings\n */\n private reportFindings(): void {\n if (this.detectedSecrets.size === 0) {\n console.log('\u2705 No hardcoded secrets detected!\\n');\n return;\n }\n\n console.log(\n `\u26A0\uFE0F Found hardcoded secrets in ${this.detectedSecrets.size} files:\\n`\n );\n\n for (const [file, secrets] of this.detectedSecrets) {\n console.log(`\uD83D\uDCC4 ${file}:`);\n for (const secret of secrets) {\n console.log(` ${secret}`);\n }\n console.log();\n }\n\n console.log('\uD83D\uDCDD How to fix:');\n console.log('1. Replace hardcoded values with process.env.VARIABLE_NAME');\n console.log('2. Add \"import \\'dotenv/config\\'\" at the top of the file');\n console.log('3. Add the actual values to your .env file');\n console.log('4. Never commit .env files to git\\n');\n }\n\n /**\n * Auto-fix secrets in files\n */\n async autoFix(): Promise<void> {\n console.log('\uD83D\uDD27 Auto-fixing hardcoded secrets...\\n');\n\n for (const [filePath] of this.detectedSecrets) {\n let content = fs.readFileSync(filePath, 'utf-8');\n let modified = false;\n\n // Add dotenv import if it's a JS/TS file and doesn't have it\n if (\n (filePath.endsWith('.js') || filePath.endsWith('.ts')) &&\n !content.includes('dotenv/config') &&\n !content.includes(\"require('dotenv')\")\n ) {\n // Add after shebang if present, otherwise at the top\n if (content.startsWith('#!/')) {\n const firstNewline = content.indexOf('\\n');\n content =\n content.slice(0, firstNewline + 1) +\n \"\\nimport 'dotenv/config';\\n\" +\n content.slice(firstNewline + 1);\n } else {\n content = \"import 'dotenv/config';\\n\\n\" + content;\n }\n modified = true;\n }\n\n // Replace secrets with environment variables\n for (const pattern of SECRET_PATTERNS) {\n const regex = new RegExp(\n `(['\"\\`])(${pattern.pattern.source})(['\"\\`])`,\n 'g'\n );\n const replacement = `process.env.${pattern.envVar}`;\n\n if (regex.test(content)) {\n content = content.replace(regex, replacement);\n modified = true;\n\n // Add error checking after the variable definition\n const varPattern = new RegExp(\n `const\\\\s+(\\\\w+)\\\\s*=\\\\s*process\\\\.env\\\\.${pattern.envVar}`\n );\n const match = content.match(varPattern);\n if (match) {\n const varName = match[1];\n const checkCode =\n `\\nif (!${varName}) {\\n` +\n ` console.error('\u274C ${pattern.envVar} environment variable not set');\\n` +\n ` console.log('Please set ${pattern.envVar} in your .env file or export it in your shell');\\n` +\n ` process.exit(1);\\n}\\n`;\n\n // Insert after the variable declaration\n const insertPos = content.indexOf(match[0]) + match[0].length;\n content =\n content.slice(0, insertPos) +\n checkCode +\n content.slice(insertPos);\n }\n }\n }\n\n if (modified) {\n fs.writeFileSync(filePath, content);\n console.log(`\u2705 Fixed ${filePath}`);\n }\n }\n\n console.log('\\n\uD83D\uDCCB Next steps:');\n console.log('1. Review the changes');\n console.log('2. Add actual values to .env file');\n console.log('3. Test that everything still works');\n console.log('4. Commit the fixes\\n');\n }\n\n /**\n * Check git history for secrets\n */\n async checkGitHistory(): Promise<void> {\n console.log('\uD83D\uDD0D Checking git history for secrets...\\n');\n\n try {\n for (const pattern of SECRET_PATTERNS) {\n const command = `git log -p --all -G\"${pattern.pattern.source}\" --format=\"%H %s\" | head -20`;\n const result = execSync(command, {\n encoding: 'utf-8',\n stdio: 'pipe',\n }).trim();\n\n if (result) {\n console.log(`\u26A0\uFE0F Found ${pattern.name} in git history:`);\n console.log(result.split('\\n').slice(0, 3).join('\\n'));\n console.log('...\\n');\n }\n }\n\n console.log('\uD83D\uDCDD To clean git history:');\n console.log('1. Use BFG Repo-Cleaner: bfg --replace-text passwords.txt');\n console.log('2. Or interactive rebase: git rebase -i <commit>');\n console.log('3. Or allow via GitHub: Check push error for allow URLs\\n');\n } catch {\n console.log('Could not check git history');\n }\n }\n\n /**\n * Generate pre-commit hook\n */\n generatePreCommitHook(): void {\n const hookContent = `#!/bin/sh\n# Pre-commit hook to check for hardcoded secrets\n\necho \"\uD83D\uDD0D Checking for hardcoded secrets...\"\n\n# Patterns to check\npatterns=(\n \"lin_api_[a-zA-Z0-9]{40}\"\n \"lin_oauth_[a-zA-Z0-9]{64}\"\n \"sk-[a-zA-Z0-9]{48}\"\n \"npm_[a-zA-Z0-9]{36}\"\n \"ghp_[a-zA-Z0-9]{36}\"\n \"pk_live_[a-zA-Z0-9]{24,}\"\n \"sk_live_[a-zA-Z0-9]{24,}\"\n)\n\n# Check staged files\nfor pattern in \"\\${patterns[@]}\"; do\n if git diff --staged --no-color | grep -E \"$pattern\"; then\n echo \"\u274C Found hardcoded secret matching: $pattern\"\n echo \"Please use environment variables instead!\"\n exit 1\n fi\ndone\n\necho \"\u2705 No hardcoded secrets detected\"\nexit 0\n`;\n\n const hookPath = '.git/hooks/pre-commit';\n fs.writeFileSync(hookPath, hookContent);\n fs.chmodSync(hookPath, '755');\n\n console.log('\u2705 Generated pre-commit hook at .git/hooks/pre-commit');\n console.log(\n 'This will prevent committing hardcoded secrets in the future.\\n'\n );\n }\n}\n\n// CLI usage\n\nif (import.meta.url === `file://${process.argv[1]}`) {\n const scanner = new SecuritySecretsScanner();\n const command = process.argv[2];\n\n (async () => {\n switch (command) {\n case 'scan':\n await scanner.scanForSecrets();\n break;\n case 'fix':\n await scanner.scanForSecrets();\n await scanner.autoFix();\n break;\n case 'history':\n await scanner.checkGitHistory();\n break;\n case 'hook':\n scanner.generatePreCommitHook();\n break;\n default:\n console.log('Usage: security-secrets-scanner [scan|fix|history|hook]');\n console.log(' scan - Scan for hardcoded secrets');\n console.log(' fix - Auto-fix hardcoded secrets');\n console.log(' history - Check git history for secrets');\n console.log(' hook - Generate pre-commit hook');\n }\n })();\n}\n"],
|
|
5
|
+
"mappings": ";AAOA,SAAS,gBAAgB;AACzB,OAAO,QAAQ;AACf,SAAS,YAAY;AAQrB,MAAM,kBAAmC;AAAA,EACvC;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA,EAAE,SAAS,uBAAuB,MAAM,aAAa,QAAQ,YAAY;AAAA,EACzE;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ;AAAA,EACV;AACF;AAEO,MAAM,uBAAuB;AAAA,EAC1B,kBAA4C,oBAAI,IAAI;AAAA;AAAA;AAAA;AAAA,EAK5D,MAAM,eACJ,WAAqB;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GACe;AACf,YAAQ,IAAI,+CAAwC;AAEpD,eAAW,WAAW,UAAU;AAC9B,YAAM,QAAQ,MAAM,KAAK,SAAS;AAAA,QAChC,QAAQ,CAAC,mBAAmB,WAAW,YAAY,SAAS;AAAA,MAC9D,CAAC;AAED,iBAAW,QAAQ,OAAO;AACxB,cAAM,KAAK,SAAS,IAAI;AAAA,MAC1B;AAAA,IACF;AAEA,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,SAAS,UAAiC;AACtD,UAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,UAAM,QAAQ,CAAC,MAAM,UAAU;AAC7B,iBAAW,iBAAiB,iBAAiB;AAC3C,YAAI,cAAc,QAAQ,KAAK,IAAI,GAAG;AACpC,cAAI,CAAC,KAAK,gBAAgB,IAAI,QAAQ,GAAG;AACvC,iBAAK,gBAAgB,IAAI,UAAU,oBAAI,IAAI,CAAC;AAAA,UAC9C;AACA,gBAAM,UAAU,KAAK,gBAAgB,IAAI,QAAQ;AACjD,cAAI,SAAS;AACX,oBAAQ;AAAA,cACN,QAAQ,QAAQ,CAAC,KAAK,cAAc,IAAI,kBAAkB,cAAc,MAAM;AAAA,YAChF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAuB;AAC7B,QAAI,KAAK,gBAAgB,SAAS,GAAG;AACnC,cAAQ,IAAI,yCAAoC;AAChD;AAAA,IACF;AAEA,YAAQ;AAAA,MACN,4CAAkC,KAAK,gBAAgB,IAAI;AAAA;AAAA,IAC7D;AAEA,eAAW,CAAC,MAAM,OAAO,KAAK,KAAK,iBAAiB;AAClD,cAAQ,IAAI,aAAM,IAAI,GAAG;AACzB,iBAAW,UAAU,SAAS;AAC5B,gBAAQ,IAAI,MAAM,MAAM,EAAE;AAAA,MAC5B;AACA,cAAQ,IAAI;AAAA,IACd;AAEA,YAAQ,IAAI,uBAAgB;AAC5B,YAAQ,IAAI,4DAA4D;AACxE,YAAQ,IAAI,wDAA0D;AACtE,YAAQ,IAAI,4CAA4C;AACxD,YAAQ,IAAI,qCAAqC;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,YAAQ,IAAI,8CAAuC;AAEnD,eAAW,CAAC,QAAQ,KAAK,KAAK,iBAAiB;AAC7C,UAAI,UAAU,GAAG,aAAa,UAAU,OAAO;AAC/C,UAAI,WAAW;AAGf,WACG,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,KAAK,MACpD,CAAC,QAAQ,SAAS,eAAe,KACjC,CAAC,QAAQ,SAAS,mBAAmB,GACrC;AAEA,YAAI,QAAQ,WAAW,KAAK,GAAG;AAC7B,gBAAM,eAAe,QAAQ,QAAQ,IAAI;AACzC,oBACE,QAAQ,MAAM,GAAG,eAAe,CAAC,IACjC,gCACA,QAAQ,MAAM,eAAe,CAAC;AAAA,QAClC,OAAO;AACL,oBAAU,gCAAgC;AAAA,QAC5C;AACA,mBAAW;AAAA,MACb;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,QAAQ,IAAI;AAAA,UAChB,YAAY,QAAQ,QAAQ,MAAM;AAAA,UAClC;AAAA,QACF;AACA,cAAM,cAAc,eAAe,QAAQ,MAAM;AAEjD,YAAI,MAAM,KAAK,OAAO,GAAG;AACvB,oBAAU,QAAQ,QAAQ,OAAO,WAAW;AAC5C,qBAAW;AAGX,gBAAM,aAAa,IAAI;AAAA,YACrB,2CAA2C,QAAQ,MAAM;AAAA,UAC3D;AACA,gBAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,cAAI,OAAO;AACT,kBAAM,UAAU,MAAM,CAAC;AACvB,kBAAM,YACJ;AAAA,OAAU,OAAO;AAAA,0BACK,QAAQ,MAAM;AAAA,4BACP,QAAQ,MAAM;AAAA;AAAA;AAAA;AAI7C,kBAAM,YAAY,QAAQ,QAAQ,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC,EAAE;AACvD,sBACE,QAAQ,MAAM,GAAG,SAAS,IAC1B,YACA,QAAQ,MAAM,SAAS;AAAA,UAC3B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,UAAU;AACZ,WAAG,cAAc,UAAU,OAAO;AAClC,gBAAQ,IAAI,gBAAW,QAAQ,EAAE;AAAA,MACnC;AAAA,IACF;AAEA,YAAQ,IAAI,yBAAkB;AAC9B,YAAQ,IAAI,uBAAuB;AACnC,YAAQ,IAAI,mCAAmC;AAC/C,YAAQ,IAAI,qCAAqC;AACjD,YAAQ,IAAI,uBAAuB;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBAAiC;AACrC,YAAQ,IAAI,iDAA0C;AAEtD,QAAI;AACF,iBAAW,WAAW,iBAAiB;AACrC,cAAM,UAAU,uBAAuB,QAAQ,QAAQ,MAAM;AAC7D,cAAM,SAAS,SAAS,SAAS;AAAA,UAC/B,UAAU;AAAA,UACV,OAAO;AAAA,QACT,CAAC,EAAE,KAAK;AAER,YAAI,QAAQ;AACV,kBAAQ,IAAI,uBAAa,QAAQ,IAAI,kBAAkB;AACvD,kBAAQ,IAAI,OAAO,MAAM,IAAI,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI,CAAC;AACrD,kBAAQ,IAAI,OAAO;AAAA,QACrB;AAAA,MACF;AAEA,cAAQ,IAAI,iCAA0B;AACtC,cAAQ,IAAI,2DAA2D;AACvE,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,2DAA2D;AAAA,IACzE,QAAQ;AACN,cAAQ,IAAI,6BAA6B;AAAA,IAC3C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,wBAA8B;AAC5B,UAAM,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6BpB,UAAM,WAAW;AACjB,OAAG,cAAc,UAAU,WAAW;AACtC,OAAG,UAAU,UAAU,KAAK;AAE5B,YAAQ,IAAI,2DAAsD;AAClE,YAAQ;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,QAAM,UAAU,IAAI,uBAAuB;AAC3C,QAAM,UAAU,QAAQ,KAAK,CAAC;AAE9B,GAAC,YAAY;AACX,YAAQ,SAAS;AAAA,MACf,KAAK;AACH,cAAM,QAAQ,eAAe;AAC7B;AAAA,MACF,KAAK;AACH,cAAM,QAAQ,eAAe;AAC7B,cAAM,QAAQ,QAAQ;AACtB;AAAA,MACF,KAAK;AACH,cAAM,QAAQ,gBAAgB;AAC9B;AAAA,MACF,KAAK;AACH,gBAAQ,sBAAsB;AAC9B;AAAA,MACF;AACE,gBAAQ,IAAI,yDAAyD;AACrE,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI,2CAA2C;AACvD,gBAAQ,IAAI,sCAAsC;AAAA,IACtD;AAAA,EACF,GAAG;AACL;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
function getEnv(key, defaultValue) {
|
|
2
|
+
const value = process.env[key];
|
|
3
|
+
if (!value && !defaultValue) {
|
|
4
|
+
throw new Error(`Environment variable ${key} is required but not set`);
|
|
5
|
+
}
|
|
6
|
+
return value || defaultValue || "";
|
|
7
|
+
}
|
|
8
|
+
function getOptionalEnv(key, defaultValue) {
|
|
9
|
+
return process.env[key] || defaultValue;
|
|
10
|
+
}
|
|
11
|
+
function getRequiredEnv(key) {
|
|
12
|
+
const value = process.env[key];
|
|
13
|
+
if (!value) {
|
|
14
|
+
throw new Error(`Environment variable ${key} is required but not set`);
|
|
15
|
+
}
|
|
16
|
+
return value;
|
|
17
|
+
}
|
|
18
|
+
function getBooleanEnv(key, defaultValue = false) {
|
|
19
|
+
const value = process.env[key];
|
|
20
|
+
if (!value) return defaultValue;
|
|
21
|
+
return value.toLowerCase() === "true" || value === "1";
|
|
22
|
+
}
|
|
23
|
+
function getNumberEnv(key, defaultValue) {
|
|
24
|
+
const value = process.env[key];
|
|
25
|
+
if (!value) {
|
|
26
|
+
if (defaultValue === void 0) {
|
|
27
|
+
throw new Error(`Environment variable ${key} is required but not set`);
|
|
28
|
+
}
|
|
29
|
+
return defaultValue;
|
|
30
|
+
}
|
|
31
|
+
const num = parseInt(value, 10);
|
|
32
|
+
if (isNaN(num)) {
|
|
33
|
+
throw new Error(
|
|
34
|
+
`Environment variable ${key} must be a number, got: ${value}`
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
return num;
|
|
38
|
+
}
|
|
39
|
+
export {
|
|
40
|
+
getBooleanEnv,
|
|
41
|
+
getEnv,
|
|
42
|
+
getNumberEnv,
|
|
43
|
+
getOptionalEnv,
|
|
44
|
+
getRequiredEnv
|
|
45
|
+
};
|
|
46
|
+
//# sourceMappingURL=env.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/utils/env.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Shared environment variable utilities\n */\n\nexport function getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (!value && !defaultValue) {\n throw new Error(`Environment variable ${key} is required but not set`);\n }\n return value || defaultValue || '';\n}\n\nexport function getOptionalEnv(\n key: string,\n defaultValue?: string\n): string | undefined {\n return process.env[key] || defaultValue;\n}\n\nexport function getRequiredEnv(key: string): string {\n const value = process.env[key];\n if (!value) {\n throw new Error(`Environment variable ${key} is required but not set`);\n }\n return value;\n}\n\nexport function getBooleanEnv(\n key: string,\n defaultValue: boolean = false\n): boolean {\n const value = process.env[key];\n if (!value) return defaultValue;\n return value.toLowerCase() === 'true' || value === '1';\n}\n\nexport function getNumberEnv(key: string, defaultValue?: number): number {\n const value = process.env[key];\n if (!value) {\n if (defaultValue === undefined) {\n throw new Error(`Environment variable ${key} is required but not set`);\n }\n return defaultValue;\n }\n const num = parseInt(value, 10);\n if (isNaN(num)) {\n throw new Error(\n `Environment variable ${key} must be a number, got: ${value}`\n );\n }\n return num;\n}\n"],
|
|
5
|
+
"mappings": "AAIO,SAAS,OAAO,KAAa,cAA+B;AACjE,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,CAAC,SAAS,CAAC,cAAc;AAC3B,UAAM,IAAI,MAAM,wBAAwB,GAAG,0BAA0B;AAAA,EACvE;AACA,SAAO,SAAS,gBAAgB;AAClC;AAEO,SAAS,eACd,KACA,cACoB;AACpB,SAAO,QAAQ,IAAI,GAAG,KAAK;AAC7B;AAEO,SAAS,eAAe,KAAqB;AAClD,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,wBAAwB,GAAG,0BAA0B;AAAA,EACvE;AACA,SAAO;AACT;AAEO,SAAS,cACd,KACA,eAAwB,OACf;AACT,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO,MAAM,YAAY,MAAM,UAAU,UAAU;AACrD;AAEO,SAAS,aAAa,KAAa,cAA+B;AACvE,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,CAAC,OAAO;AACV,QAAI,iBAAiB,QAAW;AAC9B,YAAM,IAAI,MAAM,wBAAwB,GAAG,0BAA0B;AAAA,IACvE;AACA,WAAO;AAAA,EACT;AACA,QAAM,MAAM,SAAS,OAAO,EAAE;AAC9B,MAAI,MAAM,GAAG,GAAG;AACd,UAAM,IAAI;AAAA,MACR,wBAAwB,GAAG,2BAA2B,KAAK;AAAA,IAC7D;AAAA,EACF;AACA,SAAO;AACT;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/dist/utils/logger.js
CHANGED
|
@@ -1,15 +1,4 @@
|
|
|
1
1
|
import chalk from "chalk";
|
|
2
|
-
function getEnv(key, defaultValue) {
|
|
3
|
-
const value = process.env[key];
|
|
4
|
-
if (value === void 0) {
|
|
5
|
-
if (defaultValue !== void 0) return defaultValue;
|
|
6
|
-
throw new Error(`Environment variable ${key} is required`);
|
|
7
|
-
}
|
|
8
|
-
return value;
|
|
9
|
-
}
|
|
10
|
-
function getOptionalEnv(key) {
|
|
11
|
-
return process.env[key];
|
|
12
|
-
}
|
|
13
2
|
class Logger {
|
|
14
3
|
name;
|
|
15
4
|
logLevel;
|
package/dist/utils/logger.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/logger.ts"],
|
|
4
|
-
"sourcesContent": ["import chalk from 'chalk';\n
|
|
5
|
-
"mappings": "AAAA,OAAO,WAAW;
|
|
4
|
+
"sourcesContent": ["import chalk from 'chalk';\n\nexport type LogLevel = 'debug' | 'info' | 'warn' | 'error';\n\nexport class Logger {\n private name: string;\n private logLevel: LogLevel;\n\n constructor(name: string, logLevel: LogLevel = 'info') {\n this.name = name;\n this.logLevel = (process.env['LOG_LEVEL'] as LogLevel) || logLevel;\n }\n\n private shouldLog(level: LogLevel): boolean {\n const levels: LogLevel[] = ['debug', 'info', 'warn', 'error'];\n const currentIndex = levels.indexOf(this.logLevel);\n const targetIndex = levels.indexOf(level);\n return targetIndex >= currentIndex;\n }\n\n private formatMessage(\n level: LogLevel,\n message: string,\n ...args: unknown[]\n ): string {\n const timestamp = new Date().toISOString();\n const prefix = `[${timestamp}] [${this.name}] [${level.toUpperCase()}]`;\n\n const formattedArgs =\n args.length > 0\n ? ' ' +\n args\n .map((arg) =>\n typeof arg === 'object' ? JSON.stringify(arg, null, 2) : arg\n )\n .join(' ')\n : '';\n\n return `${prefix} ${message}${formattedArgs}`;\n }\n\n public debug(message: string, ...args: unknown[]): void {\n if (this.shouldLog('debug')) {\n console.log(chalk.gray(this.formatMessage('debug', message, ...args)));\n }\n }\n\n public info(message: string, ...args: unknown[]): void {\n if (this.shouldLog('info')) {\n console.log(chalk.blue(this.formatMessage('info', message, ...args)));\n }\n }\n\n public warn(message: string, ...args: unknown[]): void {\n if (this.shouldLog('warn')) {\n console.warn(chalk.yellow(this.formatMessage('warn', message, ...args)));\n }\n }\n\n public error(message: string, error?: unknown): void {\n if (this.shouldLog('error')) {\n const errorDetails = error\n ? error instanceof Error\n ? `\\n${error.stack || error.message}`\n : `\\n${JSON.stringify(error, null, 2)}`\n : '';\n console.error(\n chalk.red(this.formatMessage('error', message) + errorDetails)\n );\n }\n }\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,WAAW;AAIX,MAAM,OAAO;AAAA,EACV;AAAA,EACA;AAAA,EAER,YAAY,MAAc,WAAqB,QAAQ;AACrD,SAAK,OAAO;AACZ,SAAK,WAAY,QAAQ,IAAI,WAAW,KAAkB;AAAA,EAC5D;AAAA,EAEQ,UAAU,OAA0B;AAC1C,UAAM,SAAqB,CAAC,SAAS,QAAQ,QAAQ,OAAO;AAC5D,UAAM,eAAe,OAAO,QAAQ,KAAK,QAAQ;AACjD,UAAM,cAAc,OAAO,QAAQ,KAAK;AACxC,WAAO,eAAe;AAAA,EACxB;AAAA,EAEQ,cACN,OACA,YACG,MACK;AACR,UAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,UAAM,SAAS,IAAI,SAAS,MAAM,KAAK,IAAI,MAAM,MAAM,YAAY,CAAC;AAEpE,UAAM,gBACJ,KAAK,SAAS,IACV,MACA,KACG;AAAA,MAAI,CAAC,QACJ,OAAO,QAAQ,WAAW,KAAK,UAAU,KAAK,MAAM,CAAC,IAAI;AAAA,IAC3D,EACC,KAAK,GAAG,IACX;AAEN,WAAO,GAAG,MAAM,IAAI,OAAO,GAAG,aAAa;AAAA,EAC7C;AAAA,EAEO,MAAM,YAAoB,MAAuB;AACtD,QAAI,KAAK,UAAU,OAAO,GAAG;AAC3B,cAAQ,IAAI,MAAM,KAAK,KAAK,cAAc,SAAS,SAAS,GAAG,IAAI,CAAC,CAAC;AAAA,IACvE;AAAA,EACF;AAAA,EAEO,KAAK,YAAoB,MAAuB;AACrD,QAAI,KAAK,UAAU,MAAM,GAAG;AAC1B,cAAQ,IAAI,MAAM,KAAK,KAAK,cAAc,QAAQ,SAAS,GAAG,IAAI,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AAAA,EAEO,KAAK,YAAoB,MAAuB;AACrD,QAAI,KAAK,UAAU,MAAM,GAAG;AAC1B,cAAQ,KAAK,MAAM,OAAO,KAAK,cAAc,QAAQ,SAAS,GAAG,IAAI,CAAC,CAAC;AAAA,IACzE;AAAA,EACF;AAAA,EAEO,MAAM,SAAiB,OAAuB;AACnD,QAAI,KAAK,UAAU,OAAO,GAAG;AAC3B,YAAM,eAAe,QACjB,iBAAiB,QACf;AAAA,EAAK,MAAM,SAAS,MAAM,OAAO,KACjC;AAAA,EAAK,KAAK,UAAU,OAAO,MAAM,CAAC,CAAC,KACrC;AACJ,cAAQ;AAAA,QACN,MAAM,IAAI,KAAK,cAAc,SAAS,OAAO,IAAI,YAAY;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@stackmemoryai/stackmemory",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.9",
|
|
4
4
|
"description": "Lossless memory runtime for AI coding tools - organizes context as a call stack instead of linear chat logs, with team collaboration and infinite retention",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=20.0.0",
|