@stackmemoryai/stackmemory 0.5.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/cli/commands/config.js +81 -0
  2. package/dist/cli/commands/config.js.map +2 -2
  3. package/dist/cli/commands/decision.js +262 -0
  4. package/dist/cli/commands/decision.js.map +7 -0
  5. package/dist/cli/commands/handoff.js +87 -24
  6. package/dist/cli/commands/handoff.js.map +3 -3
  7. package/dist/cli/commands/service.js +684 -0
  8. package/dist/cli/commands/service.js.map +7 -0
  9. package/dist/cli/commands/sweep.js +311 -0
  10. package/dist/cli/commands/sweep.js.map +7 -0
  11. package/dist/cli/index.js +98 -4
  12. package/dist/cli/index.js.map +2 -2
  13. package/dist/cli/streamlined-cli.js +144 -0
  14. package/dist/cli/streamlined-cli.js.map +7 -0
  15. package/dist/core/config/storage-config.js +111 -0
  16. package/dist/core/config/storage-config.js.map +7 -0
  17. package/dist/core/events/event-bus.js +110 -0
  18. package/dist/core/events/event-bus.js.map +7 -0
  19. package/dist/core/plugins/plugin-interface.js +87 -0
  20. package/dist/core/plugins/plugin-interface.js.map +7 -0
  21. package/dist/core/session/enhanced-handoff.js +654 -0
  22. package/dist/core/session/enhanced-handoff.js.map +7 -0
  23. package/dist/core/storage/simplified-storage.js +328 -0
  24. package/dist/core/storage/simplified-storage.js.map +7 -0
  25. package/dist/daemon/session-daemon.js +308 -0
  26. package/dist/daemon/session-daemon.js.map +7 -0
  27. package/dist/plugins/linear/index.js +166 -0
  28. package/dist/plugins/linear/index.js.map +7 -0
  29. package/dist/plugins/loader.js +57 -0
  30. package/dist/plugins/loader.js.map +7 -0
  31. package/dist/plugins/plugin-interface.js +67 -0
  32. package/dist/plugins/plugin-interface.js.map +7 -0
  33. package/dist/plugins/ralph/simple-ralph-plugin.js +305 -0
  34. package/dist/plugins/ralph/simple-ralph-plugin.js.map +7 -0
  35. package/dist/plugins/ralph/use-cases/code-generator.js +151 -0
  36. package/dist/plugins/ralph/use-cases/code-generator.js.map +7 -0
  37. package/dist/plugins/ralph/use-cases/test-generator.js +201 -0
  38. package/dist/plugins/ralph/use-cases/test-generator.js.map +7 -0
  39. package/dist/skills/repo-ingestion-skill.js +54 -10
  40. package/dist/skills/repo-ingestion-skill.js.map +2 -2
  41. package/package.json +4 -8
  42. package/scripts/archive/check-all-duplicates.ts +2 -2
  43. package/scripts/archive/merge-linear-duplicates.ts +6 -4
  44. package/scripts/install-claude-hooks-auto.js +72 -15
  45. package/scripts/measure-handoff-impact.mjs +395 -0
  46. package/scripts/measure-handoff-impact.ts +450 -0
  47. package/templates/claude-hooks/on-startup.js +200 -19
  48. package/templates/services/com.stackmemory.guardian.plist +59 -0
  49. package/templates/services/stackmemory-guardian.service +41 -0
  50. package/scripts/testing/results/real-performance-results.json +0 -90
  51. package/scripts/testing/test-tier-migration.js +0 -100
@@ -0,0 +1,201 @@
1
+ import { SimpleRalphPlugin } from "../simple-ralph-plugin.js";
2
+ import * as fs from "fs/promises";
3
+ import * as path from "path";
4
+ import { execSync } from "child_process";
5
+ class TestGenerator {
6
+ ralphPlugin;
7
+ constructor(ralphPlugin) {
8
+ this.ralphPlugin = ralphPlugin;
9
+ }
10
+ /**
11
+ * Generate tests iteratively until coverage target is met
12
+ */
13
+ async generateTests(request) {
14
+ const coverageTarget = request.coverageTarget || 80;
15
+ const task = {
16
+ id: `testgen-${Date.now()}`,
17
+ description: `Generate ${request.framework} tests for ${request.targetFile}`,
18
+ acceptanceCriteria: [
19
+ "All tests pass",
20
+ `Test coverage >= ${coverageTarget}%`,
21
+ "No test duplication",
22
+ "Tests are maintainable and clear"
23
+ ],
24
+ maxIterations: 7
25
+ };
26
+ if (request.includeEdgeCases) {
27
+ task.acceptanceCriteria.push("Edge cases covered");
28
+ task.acceptanceCriteria.push("Error conditions tested");
29
+ }
30
+ if (request.includeIntegrationTests) {
31
+ task.acceptanceCriteria.push("Integration tests included");
32
+ }
33
+ const result = await this.ralphPlugin.runTask(task);
34
+ const analysis = await this.analyzeGeneratedTests(request.targetFile, request.framework);
35
+ return {
36
+ testFiles: analysis.files,
37
+ coverage: analysis.coverage,
38
+ testCount: analysis.testCount,
39
+ passRate: analysis.passRate,
40
+ iterations: result.iterations
41
+ };
42
+ }
43
+ /**
44
+ * Analyze generated tests
45
+ */
46
+ async analyzeGeneratedTests(targetFile, framework) {
47
+ const analysis = {
48
+ files: [],
49
+ coverage: 0,
50
+ testCount: 0,
51
+ passRate: 0
52
+ };
53
+ try {
54
+ const testDir = path.dirname(targetFile);
55
+ const baseName = path.basename(targetFile, path.extname(targetFile));
56
+ const testPatterns = [
57
+ `${baseName}.test.*`,
58
+ `${baseName}.spec.*`,
59
+ `test_${baseName}.*`,
60
+ `${baseName}_test.*`
61
+ ];
62
+ for (const pattern of testPatterns) {
63
+ const files = await this.findFiles(testDir, pattern);
64
+ analysis.files.push(...files);
65
+ }
66
+ analysis.coverage = await this.runCoverageAnalysis(framework);
67
+ const testResults = await this.runTests(framework);
68
+ analysis.testCount = testResults.total;
69
+ analysis.passRate = testResults.passed / testResults.total * 100;
70
+ } catch (error) {
71
+ console.error("Error analyzing tests:", error);
72
+ }
73
+ return analysis;
74
+ }
75
+ /**
76
+ * Find files matching pattern
77
+ */
78
+ async findFiles(dir, pattern) {
79
+ const files = [];
80
+ try {
81
+ const entries = await fs.readdir(dir);
82
+ for (const entry of entries) {
83
+ if (this.matchesPattern(entry, pattern)) {
84
+ files.push(path.join(dir, entry));
85
+ }
86
+ }
87
+ } catch (error) {
88
+ console.error("Error finding files:", error);
89
+ }
90
+ return files;
91
+ }
92
+ /**
93
+ * Simple pattern matching
94
+ */
95
+ matchesPattern(filename, pattern) {
96
+ const regex = new RegExp(pattern.replace(/\*/g, ".*"));
97
+ return regex.test(filename);
98
+ }
99
+ /**
100
+ * Run coverage analysis
101
+ */
102
+ async runCoverageAnalysis(framework) {
103
+ try {
104
+ let command = "";
105
+ switch (framework) {
106
+ case "jest":
107
+ command = "npx jest --coverage --silent";
108
+ break;
109
+ case "vitest":
110
+ command = "npx vitest run --coverage --silent";
111
+ break;
112
+ case "mocha":
113
+ command = "npx nyc mocha";
114
+ break;
115
+ case "pytest":
116
+ command = "pytest --cov --cov-report=json";
117
+ break;
118
+ }
119
+ if (command) {
120
+ const output = execSync(command, { encoding: "utf-8", stdio: "pipe" });
121
+ const match = output.match(/(\d+(?:\.\d+)?)\s*%/);
122
+ if (match) {
123
+ return parseFloat(match[1]);
124
+ }
125
+ }
126
+ } catch (error) {
127
+ console.error("Coverage analysis failed:", error);
128
+ }
129
+ return 0;
130
+ }
131
+ /**
132
+ * Run tests and get results
133
+ */
134
+ async runTests(framework) {
135
+ try {
136
+ let command = "";
137
+ switch (framework) {
138
+ case "jest":
139
+ command = "npx jest --json";
140
+ break;
141
+ case "vitest":
142
+ command = "npx vitest run --reporter=json";
143
+ break;
144
+ case "mocha":
145
+ command = "npx mocha --reporter json";
146
+ break;
147
+ case "pytest":
148
+ command = "pytest --json-report";
149
+ break;
150
+ }
151
+ if (command) {
152
+ const output = execSync(command, { encoding: "utf-8", stdio: "pipe" });
153
+ try {
154
+ const json = JSON.parse(output);
155
+ return {
156
+ total: json.numTotalTests || json.tests || 0,
157
+ passed: json.numPassedTests || json.passes || 0
158
+ };
159
+ } catch {
160
+ const totalMatch = output.match(/(\d+)\s+tests?/i);
161
+ const passedMatch = output.match(/(\d+)\s+pass/i);
162
+ return {
163
+ total: totalMatch ? parseInt(totalMatch[1]) : 0,
164
+ passed: passedMatch ? parseInt(passedMatch[1]) : 0
165
+ };
166
+ }
167
+ }
168
+ } catch (error) {
169
+ console.error("Test execution failed:", error);
170
+ }
171
+ return { total: 0, passed: 0 };
172
+ }
173
+ }
174
+ async function generateTestsForModule() {
175
+ const ralphPlugin = new SimpleRalphPlugin();
176
+ await ralphPlugin.initialize({
177
+ eventBus: {},
178
+ config: { name: "simple-ralph", version: "2.0.0", enabled: true },
179
+ dataDir: ".ralph",
180
+ getRepository: () => null,
181
+ registerRepository: () => {
182
+ }
183
+ });
184
+ const generator = new TestGenerator(ralphPlugin);
185
+ const result = await generator.generateTests({
186
+ targetFile: "./src/utils/validator.ts",
187
+ framework: "jest",
188
+ coverageTarget: 90,
189
+ includeEdgeCases: true,
190
+ includeIntegrationTests: false,
191
+ mockStrategy: "partial"
192
+ });
193
+ console.log(`Generated ${result.testCount} tests with ${result.coverage}% coverage`);
194
+ console.log(`Pass rate: ${result.passRate}%`);
195
+ console.log(`Completed in ${result.iterations} iterations`);
196
+ }
197
+ export {
198
+ TestGenerator,
199
+ generateTestsForModule
200
+ };
201
+ //# sourceMappingURL=test-generator.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../src/plugins/ralph/use-cases/test-generator.ts"],
4
+ "sourcesContent": ["/**\n * Test Generator - Iteratively generate comprehensive test suites\n * Focuses on achieving high coverage and catching edge cases\n */\n\nimport { SimpleRalphPlugin, SimpleTask } from '../simple-ralph-plugin.js';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport { execSync } from 'child_process';\n\nexport interface TestGenRequest {\n targetFile: string;\n framework: 'jest' | 'mocha' | 'vitest' | 'pytest';\n coverageTarget?: number;\n includeEdgeCases?: boolean;\n includeIntegrationTests?: boolean;\n mockStrategy?: 'full' | 'partial' | 'none';\n}\n\nexport interface GeneratedTests {\n testFiles: string[];\n coverage: number;\n testCount: number;\n passRate: number;\n iterations: number;\n}\n\nexport class TestGenerator {\n private ralphPlugin: SimpleRalphPlugin;\n\n constructor(ralphPlugin: SimpleRalphPlugin) {\n this.ralphPlugin = ralphPlugin;\n }\n\n /**\n * Generate tests iteratively until coverage target is met\n */\n async generateTests(request: TestGenRequest): Promise<GeneratedTests> {\n const coverageTarget = request.coverageTarget || 80;\n\n const task: SimpleTask = {\n id: `testgen-${Date.now()}`,\n description: `Generate ${request.framework} tests for ${request.targetFile}`,\n acceptanceCriteria: [\n 'All tests pass',\n `Test coverage >= ${coverageTarget}%`,\n 'No test duplication',\n 'Tests are maintainable and clear'\n ],\n maxIterations: 7\n };\n\n // Add edge case criteria if requested\n if (request.includeEdgeCases) {\n task.acceptanceCriteria.push('Edge cases covered');\n task.acceptanceCriteria.push('Error conditions tested');\n }\n\n if (request.includeIntegrationTests) {\n task.acceptanceCriteria.push('Integration tests included');\n }\n\n // Run iterative test generation\n const result = await this.ralphPlugin.runTask(task);\n\n // Analyze generated tests\n const analysis = await this.analyzeGeneratedTests(request.targetFile, request.framework);\n\n return {\n testFiles: analysis.files,\n coverage: analysis.coverage,\n testCount: analysis.testCount,\n passRate: analysis.passRate,\n iterations: result.iterations\n };\n }\n\n /**\n * Analyze generated tests\n */\n private async analyzeGeneratedTests(\n targetFile: string, \n framework: string\n ): Promise<{\n files: string[];\n coverage: number;\n testCount: number;\n passRate: number;\n }> {\n const analysis = {\n files: [] as string[],\n coverage: 0,\n testCount: 0,\n passRate: 0\n };\n\n try {\n // Find test files\n const testDir = path.dirname(targetFile);\n const baseName = path.basename(targetFile, path.extname(targetFile));\n \n const testPatterns = [\n `${baseName}.test.*`,\n `${baseName}.spec.*`,\n `test_${baseName}.*`,\n `${baseName}_test.*`\n ];\n\n for (const pattern of testPatterns) {\n const files = await this.findFiles(testDir, pattern);\n analysis.files.push(...files);\n }\n\n // Run coverage analysis\n analysis.coverage = await this.runCoverageAnalysis(framework);\n \n // Count tests and check pass rate\n const testResults = await this.runTests(framework);\n analysis.testCount = testResults.total;\n analysis.passRate = testResults.passed / testResults.total * 100;\n\n } catch (error) {\n console.error('Error analyzing tests:', error);\n }\n\n return analysis;\n }\n\n /**\n * Find files matching pattern\n */\n private async findFiles(dir: string, pattern: string): Promise<string[]> {\n const files: string[] = [];\n try {\n const entries = await fs.readdir(dir);\n for (const entry of entries) {\n if (this.matchesPattern(entry, pattern)) {\n files.push(path.join(dir, entry));\n }\n }\n } catch (error) {\n console.error('Error finding files:', error);\n }\n return files;\n }\n\n /**\n * Simple pattern matching\n */\n private matchesPattern(filename: string, pattern: string): boolean {\n const regex = new RegExp(pattern.replace(/\\*/g, '.*'));\n return regex.test(filename);\n }\n\n /**\n * Run coverage analysis\n */\n private async runCoverageAnalysis(framework: string): Promise<number> {\n try {\n let command = '';\n switch (framework) {\n case 'jest':\n command = 'npx jest --coverage --silent';\n break;\n case 'vitest':\n command = 'npx vitest run --coverage --silent';\n break;\n case 'mocha':\n command = 'npx nyc mocha';\n break;\n case 'pytest':\n command = 'pytest --cov --cov-report=json';\n break;\n }\n\n if (command) {\n const output = execSync(command, { encoding: 'utf-8', stdio: 'pipe' });\n \n // Parse coverage from output (simplified)\n const match = output.match(/(\\d+(?:\\.\\d+)?)\\s*%/);\n if (match) {\n return parseFloat(match[1]);\n }\n }\n } catch (error) {\n console.error('Coverage analysis failed:', error);\n }\n \n return 0;\n }\n\n /**\n * Run tests and get results\n */\n private async runTests(framework: string): Promise<{ total: number; passed: number }> {\n try {\n let command = '';\n switch (framework) {\n case 'jest':\n command = 'npx jest --json';\n break;\n case 'vitest':\n command = 'npx vitest run --reporter=json';\n break;\n case 'mocha':\n command = 'npx mocha --reporter json';\n break;\n case 'pytest':\n command = 'pytest --json-report';\n break;\n }\n\n if (command) {\n const output = execSync(command, { encoding: 'utf-8', stdio: 'pipe' });\n \n try {\n const json = JSON.parse(output);\n // Parse based on framework (simplified)\n return {\n total: json.numTotalTests || json.tests || 0,\n passed: json.numPassedTests || json.passes || 0\n };\n } catch {\n // Fallback to regex parsing\n const totalMatch = output.match(/(\\d+)\\s+tests?/i);\n const passedMatch = output.match(/(\\d+)\\s+pass/i);\n \n return {\n total: totalMatch ? parseInt(totalMatch[1]) : 0,\n passed: passedMatch ? parseInt(passedMatch[1]) : 0\n };\n }\n }\n } catch (error) {\n console.error('Test execution failed:', error);\n }\n \n return { total: 0, passed: 0 };\n }\n}\n\n// Example: Generate tests for a TypeScript module\nexport async function generateTestsForModule(): Promise<void> {\n const ralphPlugin = new SimpleRalphPlugin();\n await ralphPlugin.initialize({\n eventBus: {} as any,\n config: { name: 'simple-ralph', version: '2.0.0', enabled: true },\n dataDir: '.ralph',\n getRepository: () => null as any,\n registerRepository: () => {}\n });\n\n const generator = new TestGenerator(ralphPlugin);\n \n const result = await generator.generateTests({\n targetFile: './src/utils/validator.ts',\n framework: 'jest',\n coverageTarget: 90,\n includeEdgeCases: true,\n includeIntegrationTests: false,\n mockStrategy: 'partial'\n });\n\n console.log(`Generated ${result.testCount} tests with ${result.coverage}% coverage`);\n console.log(`Pass rate: ${result.passRate}%`);\n console.log(`Completed in ${result.iterations} iterations`);\n}"],
5
+ "mappings": "AAKA,SAAS,yBAAqC;AAC9C,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,SAAS,gBAAgB;AAmBlB,MAAM,cAAc;AAAA,EACjB;AAAA,EAER,YAAY,aAAgC;AAC1C,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,SAAkD;AACpE,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,UAAM,OAAmB;AAAA,MACvB,IAAI,WAAW,KAAK,IAAI,CAAC;AAAA,MACzB,aAAa,YAAY,QAAQ,SAAS,cAAc,QAAQ,UAAU;AAAA,MAC1E,oBAAoB;AAAA,QAClB;AAAA,QACA,oBAAoB,cAAc;AAAA,QAClC;AAAA,QACA;AAAA,MACF;AAAA,MACA,eAAe;AAAA,IACjB;AAGA,QAAI,QAAQ,kBAAkB;AAC5B,WAAK,mBAAmB,KAAK,oBAAoB;AACjD,WAAK,mBAAmB,KAAK,yBAAyB;AAAA,IACxD;AAEA,QAAI,QAAQ,yBAAyB;AACnC,WAAK,mBAAmB,KAAK,4BAA4B;AAAA,IAC3D;AAGA,UAAM,SAAS,MAAM,KAAK,YAAY,QAAQ,IAAI;AAGlD,UAAM,WAAW,MAAM,KAAK,sBAAsB,QAAQ,YAAY,QAAQ,SAAS;AAEvF,WAAO;AAAA,MACL,WAAW,SAAS;AAAA,MACpB,UAAU,SAAS;AAAA,MACnB,WAAW,SAAS;AAAA,MACpB,UAAU,SAAS;AAAA,MACnB,YAAY,OAAO;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,YACA,WAMC;AACD,UAAM,WAAW;AAAA,MACf,OAAO,CAAC;AAAA,MACR,UAAU;AAAA,MACV,WAAW;AAAA,MACX,UAAU;AAAA,IACZ;AAEA,QAAI;AAEF,YAAM,UAAU,KAAK,QAAQ,UAAU;AACvC,YAAM,WAAW,KAAK,SAAS,YAAY,KAAK,QAAQ,UAAU,CAAC;AAEnE,YAAM,eAAe;AAAA,QACnB,GAAG,QAAQ;AAAA,QACX,GAAG,QAAQ;AAAA,QACX,QAAQ,QAAQ;AAAA,QAChB,GAAG,QAAQ;AAAA,MACb;AAEA,iBAAW,WAAW,cAAc;AAClC,cAAM,QAAQ,MAAM,KAAK,UAAU,SAAS,OAAO;AACnD,iBAAS,MAAM,KAAK,GAAG,KAAK;AAAA,MAC9B;AAGA,eAAS,WAAW,MAAM,KAAK,oBAAoB,SAAS;AAG5D,YAAM,cAAc,MAAM,KAAK,SAAS,SAAS;AACjD,eAAS,YAAY,YAAY;AACjC,eAAS,WAAW,YAAY,SAAS,YAAY,QAAQ;AAAA,IAE/D,SAAS,OAAO;AACd,cAAQ,MAAM,0BAA0B,KAAK;AAAA,IAC/C;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,KAAa,SAAoC;AACvE,UAAM,QAAkB,CAAC;AACzB,QAAI;AACF,YAAM,UAAU,MAAM,GAAG,QAAQ,GAAG;AACpC,iBAAW,SAAS,SAAS;AAC3B,YAAI,KAAK,eAAe,OAAO,OAAO,GAAG;AACvC,gBAAM,KAAK,KAAK,KAAK,KAAK,KAAK,CAAC;AAAA,QAClC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,wBAAwB,KAAK;AAAA,IAC7C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,UAAkB,SAA0B;AACjE,UAAM,QAAQ,IAAI,OAAO,QAAQ,QAAQ,OAAO,IAAI,CAAC;AACrD,WAAO,MAAM,KAAK,QAAQ;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,WAAoC;AACpE,QAAI;AACF,UAAI,UAAU;AACd,cAAQ,WAAW;AAAA,QACjB,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,MACJ;AAEA,UAAI,SAAS;AACX,cAAM,SAAS,SAAS,SAAS,EAAE,UAAU,SAAS,OAAO,OAAO,CAAC;AAGrE,cAAM,QAAQ,OAAO,MAAM,qBAAqB;AAChD,YAAI,OAAO;AACT,iBAAO,WAAW,MAAM,CAAC,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,6BAA6B,KAAK;AAAA,IAClD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,SAAS,WAA+D;AACpF,QAAI;AACF,UAAI,UAAU;AACd,cAAQ,WAAW;AAAA,QACjB,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,QACF,KAAK;AACH,oBAAU;AACV;AAAA,MACJ;AAEA,UAAI,SAAS;AACX,cAAM,SAAS,SAAS,SAAS,EAAE,UAAU,SAAS,OAAO,OAAO,CAAC;AAErE,YAAI;AACF,gBAAM,OAAO,KAAK,MAAM,MAAM;AAE9B,iBAAO;AAAA,YACL,OAAO,KAAK,iBAAiB,KAAK,SAAS;AAAA,YAC3C,QAAQ,KAAK,kBAAkB,KAAK,UAAU;AAAA,UAChD;AAAA,QACF,QAAQ;AAEN,gBAAM,aAAa,OAAO,MAAM,iBAAiB;AACjD,gBAAM,cAAc,OAAO,MAAM,eAAe;AAEhD,iBAAO;AAAA,YACL,OAAO,aAAa,SAAS,WAAW,CAAC,CAAC,IAAI;AAAA,YAC9C,QAAQ,cAAc,SAAS,YAAY,CAAC,CAAC,IAAI;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,0BAA0B,KAAK;AAAA,IAC/C;AAEA,WAAO,EAAE,OAAO,GAAG,QAAQ,EAAE;AAAA,EAC/B;AACF;AAGA,eAAsB,yBAAwC;AAC5D,QAAM,cAAc,IAAI,kBAAkB;AAC1C,QAAM,YAAY,WAAW;AAAA,IAC3B,UAAU,CAAC;AAAA,IACX,QAAQ,EAAE,MAAM,gBAAgB,SAAS,SAAS,SAAS,KAAK;AAAA,IAChE,SAAS;AAAA,IACT,eAAe,MAAM;AAAA,IACrB,oBAAoB,MAAM;AAAA,IAAC;AAAA,EAC7B,CAAC;AAED,QAAM,YAAY,IAAI,cAAc,WAAW;AAE/C,QAAM,SAAS,MAAM,UAAU,cAAc;AAAA,IAC3C,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,IAClB,yBAAyB;AAAA,IACzB,cAAc;AAAA,EAChB,CAAC;AAED,UAAQ,IAAI,aAAa,OAAO,SAAS,eAAe,OAAO,QAAQ,YAAY;AACnF,UAAQ,IAAI,cAAc,OAAO,QAAQ,GAAG;AAC5C,UAAQ,IAAI,gBAAgB,OAAO,UAAU,aAAa;AAC5D;",
6
+ "names": []
7
+ }
@@ -1,5 +1,9 @@
1
1
  import { ChromaDBAdapter } from "../core/storage/chromadb-adapter.js";
2
2
  import { Logger } from "../core/monitoring/logger.js";
3
+ import {
4
+ isChromaDBEnabled,
5
+ getChromaDBConfig
6
+ } from "../core/config/storage-config.js";
3
7
  import * as fs from "fs";
4
8
  import * as path from "path";
5
9
  import * as crypto from "crypto";
@@ -11,27 +15,57 @@ class RepoIngestionSkill {
11
15
  this.userId = userId;
12
16
  this.teamId = teamId;
13
17
  this.logger = new Logger("RepoIngestionSkill");
14
- this.adapter = new ChromaDBAdapter(
15
- {
16
- ...config,
17
- collectionName: config.collectionName || "stackmemory_repos"
18
- },
19
- userId,
20
- teamId
21
- );
18
+ this.chromaEnabled = isChromaDBEnabled();
19
+ if (this.chromaEnabled) {
20
+ const chromaConfig = getChromaDBConfig();
21
+ if (chromaConfig && chromaConfig.apiKey) {
22
+ this.adapter = new ChromaDBAdapter(
23
+ {
24
+ apiKey: config?.apiKey || chromaConfig.apiKey,
25
+ tenant: config?.tenant || chromaConfig.tenant || "default_tenant",
26
+ database: config?.database || chromaConfig.database || "default_database",
27
+ collectionName: config?.collectionName || "stackmemory_repos"
28
+ },
29
+ userId,
30
+ teamId
31
+ );
32
+ }
33
+ }
22
34
  }
23
35
  logger;
24
- adapter;
36
+ adapter = null;
25
37
  metadataCache = /* @__PURE__ */ new Map();
26
38
  fileHashCache = /* @__PURE__ */ new Map();
39
+ chromaEnabled = false;
40
+ /**
41
+ * Check if ChromaDB is available for use
42
+ */
43
+ isAvailable() {
44
+ return this.chromaEnabled && this.adapter !== null;
45
+ }
27
46
  async initialize() {
28
- await this.adapter.initialize();
47
+ if (!this.isAvailable()) {
48
+ this.logger.warn(
49
+ "ChromaDB not enabled. Repository ingestion features are unavailable."
50
+ );
51
+ this.logger.warn('Run "stackmemory init --chromadb" to enable ChromaDB.');
52
+ return;
53
+ }
54
+ if (this.adapter) {
55
+ await this.adapter.initialize();
56
+ }
29
57
  await this.loadMetadataCache();
30
58
  }
31
59
  /**
32
60
  * Ingest a repository into ChromaDB
33
61
  */
34
62
  async ingestRepository(repoPath, repoName, options = {}) {
63
+ if (!this.isAvailable()) {
64
+ return {
65
+ success: false,
66
+ message: 'ChromaDB not enabled. Run "stackmemory init --chromadb" to enable semantic search features.'
67
+ };
68
+ }
35
69
  const startTime = Date.now();
36
70
  try {
37
71
  this.logger.info(`Starting repository ingestion for ${repoName}`);
@@ -199,6 +233,10 @@ class RepoIngestionSkill {
199
233
  * Search code in ingested repositories
200
234
  */
201
235
  async searchCode(query, options) {
236
+ if (!this.isAvailable() || !this.adapter) {
237
+ this.logger.warn("ChromaDB not enabled. Code search unavailable.");
238
+ return [];
239
+ }
202
240
  try {
203
241
  const filters = {
204
242
  type: ["code_chunk"]
@@ -418,11 +456,17 @@ class RepoIngestionSkill {
418
456
  * Store a chunk in ChromaDB
419
457
  */
420
458
  async storeChunk(chunk, metadata) {
459
+ if (!this.adapter) {
460
+ throw new Error("ChromaDB adapter not available");
461
+ }
421
462
  const documentContent = `File: ${chunk.filePath} (Lines ${chunk.startLine}-${chunk.endLine})
422
463
  Language: ${chunk.language}
423
464
  Repository: ${metadata.repoName}/${metadata.branch}
424
465
 
425
466
  ${chunk.content}`;
467
+ if (!this.adapter) {
468
+ throw new Error("ChromaDB adapter not initialized");
469
+ }
426
470
  await this.adapter.storeContext("observation", documentContent, {
427
471
  type: "code_chunk",
428
472
  repo_id: metadata.repoId,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/skills/repo-ingestion-skill.ts"],
4
- "sourcesContent": ["/**\n * Repository Ingestion Skill for ChromaDB\n *\n * Ingests and maintains code repositories in ChromaDB for enhanced code search and context\n */\n\nimport { ChromaDBAdapter } from '../core/storage/chromadb-adapter.js';\nimport { Logger } from '../core/monitoring/logger.js';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as crypto from 'crypto';\nimport { execSync } from 'child_process';\nimport ignore from 'ignore';\n\nexport interface RepoIngestionOptions {\n incremental?: boolean;\n forceUpdate?: boolean;\n includeTests?: boolean;\n includeDocs?: boolean;\n maxFileSize?: number;\n chunkSize?: number;\n extensions?: string[];\n excludePatterns?: string[];\n}\n\nexport interface RepoMetadata {\n repoId: string;\n repoName: string;\n branch: string;\n lastCommit: string;\n lastIngested: number;\n filesCount: number;\n totalSize: number;\n language: string;\n framework?: string;\n}\n\nexport interface FileChunk {\n id: string;\n filePath: string;\n content: string;\n startLine: number;\n endLine: number;\n hash: string;\n language: string;\n}\n\nexport class RepoIngestionSkill {\n private logger: Logger;\n private adapter: ChromaDBAdapter;\n private metadataCache: Map<string, RepoMetadata> = new Map();\n private fileHashCache: Map<string, string> = new Map();\n\n constructor(\n private config: {\n apiKey: string;\n tenant: string;\n database: string;\n collectionName?: string;\n },\n private userId: string,\n private teamId?: string\n ) {\n this.logger = new Logger('RepoIngestionSkill');\n this.adapter = new ChromaDBAdapter(\n {\n ...config,\n collectionName: config.collectionName || 'stackmemory_repos',\n },\n userId,\n teamId\n );\n }\n\n async initialize(): Promise<void> {\n await this.adapter.initialize();\n await this.loadMetadataCache();\n }\n\n /**\n * Ingest a repository into ChromaDB\n */\n async ingestRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesProcessed: number;\n chunksCreated: number;\n timeElapsed: number;\n totalSize: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n this.logger.info(`Starting repository ingestion for ${repoName}`);\n\n // Validate repository path\n if (!fs.existsSync(repoPath)) {\n throw new Error(`Repository path not found: ${repoPath}`);\n }\n\n // Get repository metadata\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n\n // Check if incremental update is possible\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n if (options.incremental && existingMetadata && !options.forceUpdate) {\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected since last ingestion',\n };\n }\n\n this.logger.info(\n `Incremental update: ${changedFiles.length} files changed`\n );\n }\n\n // Get files to process\n const files = await this.getRepoFiles(repoPath, options);\n this.logger.info(`Found ${files.length} files to process`);\n\n // Process files and create chunks\n let filesProcessed = 0;\n let chunksCreated = 0;\n let totalSize = 0;\n\n for (const file of files) {\n try {\n const chunks = await this.processFile(\n file,\n repoPath,\n repoName,\n metadata,\n options\n );\n\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n chunksCreated++;\n }\n\n filesProcessed++;\n totalSize += fs.statSync(file).size;\n\n // Log progress every 100 files\n if (filesProcessed % 100 === 0) {\n this.logger.info(\n `Processed ${filesProcessed}/${files.length} files`\n );\n }\n } catch (error: unknown) {\n this.logger.warn(`Failed to process file ${file}:`, error);\n }\n }\n\n // Update metadata\n metadata.filesCount = filesProcessed;\n metadata.totalSize = totalSize;\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n this.logger.info(\n `Repository ingestion complete: ${filesProcessed} files, ${chunksCreated} chunks in ${timeElapsed}ms`\n );\n\n return {\n success: true,\n message: `Successfully ingested ${repoName}`,\n stats: {\n filesProcessed,\n chunksCreated,\n timeElapsed,\n totalSize,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository ingestion failed:', error);\n return {\n success: false,\n message: `Failed to ingest repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Update an existing repository in ChromaDB\n */\n async updateRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesUpdated: number;\n filesAdded: number;\n filesRemoved: number;\n timeElapsed: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n\n if (!existingMetadata) {\n // No existing data, perform full ingestion\n return this.ingestRepository(repoPath, repoName, options);\n }\n\n // Get changed files since last ingestion\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected',\n stats: {\n filesUpdated: 0,\n filesAdded: 0,\n filesRemoved: 0,\n timeElapsed: Date.now() - startTime,\n },\n };\n }\n\n let filesUpdated = 0;\n let filesAdded = 0;\n let filesRemoved = 0;\n\n for (const change of changedFiles) {\n const filePath = path.join(repoPath, change.path);\n\n if (change.status === 'deleted') {\n await this.removeFileChunks(change.path, metadata.repoId);\n filesRemoved++;\n } else if (change.status === 'added') {\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesAdded++;\n } else if (change.status === 'modified') {\n // Remove old chunks and add new ones\n await this.removeFileChunks(change.path, metadata.repoId);\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesUpdated++;\n }\n }\n\n // Update metadata\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n return {\n success: true,\n message: `Successfully updated ${repoName}`,\n stats: {\n filesUpdated,\n filesAdded,\n filesRemoved,\n timeElapsed,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository update failed:', error);\n return {\n success: false,\n message: `Failed to update repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Search code in ingested repositories\n */\n async searchCode(\n query: string,\n options?: {\n repoName?: string;\n language?: string;\n limit?: number;\n includeContext?: boolean;\n }\n ): Promise<\n Array<{\n filePath: string;\n content: string;\n score: number;\n startLine: number;\n endLine: number;\n repoName: string;\n }>\n > {\n try {\n const filters: Record<string, unknown> = {\n type: ['code_chunk'],\n };\n\n if (options?.repoName) {\n filters.repo_name = options.repoName;\n }\n\n if (options?.language) {\n filters.language = options.language;\n }\n\n const results = await this.adapter.queryContexts(\n query,\n options?.limit || 20,\n filters\n );\n\n return results.map((result) => ({\n filePath: result.metadata.file_path,\n content: result.content,\n score: 1 - result.distance, // Convert distance to similarity score\n startLine: result.metadata.start_line,\n endLine: result.metadata.end_line,\n repoName: result.metadata.repo_name,\n }));\n } catch (error: unknown) {\n this.logger.error('Code search failed:', error);\n return [];\n }\n }\n\n /**\n * Get repository metadata\n */\n private async getRepoMetadata(\n repoPath: string,\n repoName: string\n ): Promise<RepoMetadata> {\n const branch = this.getCurrentBranch(repoPath);\n const lastCommit = this.getLastCommit(repoPath);\n const repoId = `${repoName}_${branch}`.replace(/[^a-zA-Z0-9_-]/g, '_');\n\n // Detect primary language and framework\n const { language, framework } =\n await this.detectLanguageAndFramework(repoPath);\n\n return {\n repoId,\n repoName,\n branch,\n lastCommit,\n lastIngested: Date.now(),\n filesCount: 0,\n totalSize: 0,\n language,\n framework,\n };\n }\n\n /**\n * Get current git branch\n */\n private getCurrentBranch(repoPath: string): string {\n try {\n return execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'main';\n }\n }\n\n /**\n * Get last commit hash\n */\n private getLastCommit(repoPath: string): string {\n try {\n return execSync('git rev-parse HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'unknown';\n }\n }\n\n /**\n * Get changed files between commits\n */\n private async getChangedFiles(\n repoPath: string,\n fromCommit: string,\n toCommit: string\n ): Promise<Array<{ path: string; status: string }>> {\n try {\n const diff = execSync(\n `git diff --name-status ${fromCommit}..${toCommit}`,\n {\n cwd: repoPath,\n encoding: 'utf8',\n }\n );\n\n return diff\n .split('\\n')\n .filter((line) => line.trim())\n .map((line) => {\n const [status, ...pathParts] = line.split('\\t');\n return {\n path: pathParts.join('\\t'),\n status:\n status === 'A'\n ? 'added'\n : status === 'D'\n ? 'deleted'\n : 'modified',\n };\n });\n } catch {\n return [];\n }\n }\n\n /**\n * Get repository files to process\n */\n private async getRepoFiles(\n repoPath: string,\n options: RepoIngestionOptions\n ): Promise<string[]> {\n const files: string[] = [];\n const ig = ignore();\n\n // Load .gitignore if it exists\n const gitignorePath = path.join(repoPath, '.gitignore');\n if (fs.existsSync(gitignorePath)) {\n ig.add(fs.readFileSync(gitignorePath, 'utf8'));\n }\n\n // Add default exclude patterns\n const defaultExcludes = [\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n '.env',\n '*.log',\n ...(options.excludePatterns || []),\n ];\n ig.add(defaultExcludes);\n\n // Default extensions to include\n const extensions = options.extensions || [\n '.ts',\n '.tsx',\n '.js',\n '.jsx',\n '.py',\n '.java',\n '.go',\n '.rs',\n '.c',\n '.cpp',\n '.h',\n '.hpp',\n '.cs',\n '.rb',\n '.php',\n '.swift',\n '.kt',\n '.scala',\n '.r',\n '.m',\n '.sql',\n '.yaml',\n '.yml',\n '.json',\n ];\n\n // Add documentation if requested\n if (options.includeDocs) {\n extensions.push('.md', '.rst', '.txt');\n }\n\n const maxFileSize = options.maxFileSize || 1024 * 1024; // 1MB default\n\n const walkDir = (dir: string, baseDir: string = repoPath) => {\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath);\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n walkDir(fullPath, baseDir);\n } else if (entry.isFile()) {\n const ext = path.extname(entry.name);\n\n // Check if file should be included\n if (!extensions.includes(ext)) {\n continue;\n }\n\n // Check if it's a test file\n if (\n !options.includeTests &&\n (entry.name.includes('.test.') ||\n entry.name.includes('.spec.') ||\n relativePath.includes('__tests__') ||\n relativePath.includes('test/') ||\n relativePath.includes('tests/'))\n ) {\n continue;\n }\n\n // Check file size\n const stats = fs.statSync(fullPath);\n if (stats.size > maxFileSize) {\n this.logger.debug(`Skipping large file: ${relativePath}`);\n continue;\n }\n\n files.push(fullPath);\n }\n }\n };\n\n walkDir(repoPath);\n return files;\n }\n\n /**\n * Process a file into chunks\n */\n private async processFile(\n filePath: string,\n repoPath: string,\n repoName: string,\n metadata: RepoMetadata,\n options: RepoIngestionOptions\n ): Promise<FileChunk[]> {\n const relativePath = path.relative(repoPath, filePath);\n const content = fs.readFileSync(filePath, 'utf8');\n const lines = content.split('\\n');\n const language = this.detectFileLanguage(filePath);\n\n const chunkSize = options.chunkSize || 100; // 100 lines per chunk\n const chunks: FileChunk[] = [];\n\n // Calculate file hash for caching\n const fileHash = crypto.createHash('md5').update(content).digest('hex');\n\n // Check if file has changed\n const cachedHash = this.fileHashCache.get(relativePath);\n if (cachedHash === fileHash && !options.forceUpdate) {\n return []; // File hasn't changed\n }\n\n this.fileHashCache.set(relativePath, fileHash);\n\n // Split into chunks\n for (let i = 0; i < lines.length; i += chunkSize) {\n const chunkLines = lines.slice(i, Math.min(i + chunkSize, lines.length));\n const chunkContent = chunkLines.join('\\n');\n\n if (chunkContent.trim().length === 0) {\n continue; // Skip empty chunks\n }\n\n const chunkId = `${metadata.repoId}_${relativePath}_${i}`;\n const chunkHash = crypto\n .createHash('md5')\n .update(chunkContent)\n .digest('hex');\n\n chunks.push({\n id: chunkId,\n filePath: relativePath,\n content: chunkContent,\n startLine: i + 1,\n endLine: Math.min(i + chunkSize, lines.length),\n hash: chunkHash,\n language,\n });\n }\n\n return chunks;\n }\n\n /**\n * Store a chunk in ChromaDB\n */\n private async storeChunk(\n chunk: FileChunk,\n metadata: RepoMetadata\n ): Promise<void> {\n const documentContent = `File: ${chunk.filePath} (Lines ${chunk.startLine}-${chunk.endLine})\nLanguage: ${chunk.language}\nRepository: ${metadata.repoName}/${metadata.branch}\n\n${chunk.content}`;\n\n await this.adapter.storeContext('observation', documentContent, {\n type: 'code_chunk',\n repo_id: metadata.repoId,\n repo_name: metadata.repoName,\n branch: metadata.branch,\n file_path: chunk.filePath,\n start_line: chunk.startLine,\n end_line: chunk.endLine,\n language: chunk.language,\n framework: metadata.framework,\n chunk_hash: chunk.hash,\n last_commit: metadata.lastCommit,\n });\n }\n\n /**\n * Remove file chunks from ChromaDB\n */\n private async removeFileChunks(\n filePath: string,\n repoId: string\n ): Promise<void> {\n // This would need to be implemented in ChromaDBAdapter\n // For now, we'll log it\n this.logger.debug(\n `Would remove chunks for file: ${filePath} from repo: ${repoId}`\n );\n }\n\n /**\n * Detect file language\n */\n private detectFileLanguage(filePath: string): string {\n const ext = path.extname(filePath).toLowerCase();\n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.py': 'python',\n '.java': 'java',\n '.go': 'go',\n '.rs': 'rust',\n '.c': 'c',\n '.cpp': 'cpp',\n '.cs': 'csharp',\n '.rb': 'ruby',\n '.php': 'php',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.scala': 'scala',\n '.r': 'r',\n '.sql': 'sql',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.json': 'json',\n '.md': 'markdown',\n };\n\n return languageMap[ext] || 'unknown';\n }\n\n /**\n * Detect language and framework\n */\n private async detectLanguageAndFramework(repoPath: string): Promise<{\n language: string;\n framework?: string;\n }> {\n // Check for package.json (JavaScript/TypeScript)\n const packageJsonPath = path.join(repoPath, 'package.json');\n if (fs.existsSync(packageJsonPath)) {\n try {\n const packageJson = JSON.parse(\n fs.readFileSync(packageJsonPath, 'utf8')\n );\n const deps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n };\n\n let framework: string | undefined;\n if (deps.react) framework = 'react';\n else if (deps.vue) framework = 'vue';\n else if (deps.angular) framework = 'angular';\n else if (deps.express) framework = 'express';\n else if (deps.next) framework = 'nextjs';\n else if (deps.svelte) framework = 'svelte';\n\n return {\n language: deps.typescript ? 'typescript' : 'javascript',\n framework,\n };\n } catch {}\n }\n\n // Check for requirements.txt or setup.py (Python)\n if (\n fs.existsSync(path.join(repoPath, 'requirements.txt')) ||\n fs.existsSync(path.join(repoPath, 'setup.py'))\n ) {\n return { language: 'python' };\n }\n\n // Check for go.mod (Go)\n if (fs.existsSync(path.join(repoPath, 'go.mod'))) {\n return { language: 'go' };\n }\n\n // Check for Cargo.toml (Rust)\n if (fs.existsSync(path.join(repoPath, 'Cargo.toml'))) {\n return { language: 'rust' };\n }\n\n // Check for pom.xml or build.gradle (Java)\n if (\n fs.existsSync(path.join(repoPath, 'pom.xml')) ||\n fs.existsSync(path.join(repoPath, 'build.gradle'))\n ) {\n return { language: 'java' };\n }\n\n // Default to unknown\n return { language: 'unknown' };\n }\n\n /**\n * Load metadata cache\n */\n private async loadMetadataCache(): Promise<void> {\n // In a real implementation, this would load from a persistent store\n // For now, we'll just initialize an empty cache\n this.metadataCache.clear();\n }\n\n /**\n * Save metadata\n */\n private async saveMetadata(metadata: RepoMetadata): Promise<void> {\n this.metadataCache.set(metadata.repoId, metadata);\n // In a real implementation, this would persist to a store\n }\n\n /**\n * Get repository statistics\n */\n async getRepoStats(repoName?: string): Promise<{\n totalRepos: number;\n totalFiles: number;\n totalChunks: number;\n languages: Record<string, number>;\n frameworks: Record<string, number>;\n }> {\n // This would query ChromaDB for statistics\n const stats = {\n totalRepos: this.metadataCache.size,\n totalFiles: 0,\n totalChunks: 0,\n languages: {} as Record<string, number>,\n frameworks: {} as Record<string, number>,\n };\n\n for (const metadata of this.metadataCache.values()) {\n if (!repoName || metadata.repoName === repoName) {\n stats.totalFiles += metadata.filesCount;\n\n if (metadata.language) {\n stats.languages[metadata.language] =\n (stats.languages[metadata.language] || 0) + 1;\n }\n\n if (metadata.framework) {\n stats.frameworks[metadata.framework] =\n (stats.frameworks[metadata.framework] || 0) + 1;\n }\n }\n }\n\n return stats;\n }\n}\n"],
5
- "mappings": "AAMA,SAAS,uBAAuB;AAChC,SAAS,cAAc;AACvB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,YAAY,YAAY;AACxB,SAAS,gBAAgB;AACzB,OAAO,YAAY;AAmCZ,MAAM,mBAAmB;AAAA,EAM9B,YACU,QAMA,QACA,QACR;AARQ;AAMA;AACA;AAER,SAAK,SAAS,IAAI,OAAO,oBAAoB;AAC7C,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,QACE,GAAG;AAAA,QACH,gBAAgB,OAAO,kBAAkB;AAAA,MAC3C;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAxBQ;AAAA,EACA;AAAA,EACA,gBAA2C,oBAAI,IAAI;AAAA,EACnD,gBAAqC,oBAAI,IAAI;AAAA,EAuBrD,MAAM,aAA4B;AAChC,UAAM,KAAK,QAAQ,WAAW;AAC9B,UAAM,KAAK,kBAAkB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,WAAK,OAAO,KAAK,qCAAqC,QAAQ,EAAE;AAGhE,UAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,cAAM,IAAI,MAAM,8BAA8B,QAAQ,EAAE;AAAA,MAC1D;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAG9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAC/D,UAAI,QAAQ,eAAe,oBAAoB,CAAC,QAAQ,aAAa;AACnE,cAAM,eAAe,MAAM,KAAK;AAAA,UAC9B;AAAA,UACA,iBAAiB;AAAA,UACjB,SAAS;AAAA,QACX;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS;AAAA,UACX;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,uBAAuB,aAAa,MAAM;AAAA,QAC5C;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,OAAO;AACvD,WAAK,OAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAGzD,UAAI,iBAAiB;AACrB,UAAI,gBAAgB;AACpB,UAAI,YAAY;AAEhB,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AACrC;AAAA,UACF;AAEA;AACA,uBAAa,GAAG,SAAS,IAAI,EAAE;AAG/B,cAAI,iBAAiB,QAAQ,GAAG;AAC9B,iBAAK,OAAO;AAAA,cACV,aAAa,cAAc,IAAI,MAAM,MAAM;AAAA,YAC7C;AAAA,UACF;AAAA,QACF,SAAS,OAAgB;AACvB,eAAK,OAAO,KAAK,0BAA0B,IAAI,KAAK,KAAK;AAAA,QAC3D;AAAA,MACF;AAGA,eAAS,aAAa;AACtB,eAAS,YAAY;AACrB,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,WAAK,OAAO;AAAA,QACV,kCAAkC,cAAc,WAAW,aAAa,cAAc,WAAW;AAAA,MACnG;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,yBAAyB,QAAQ;AAAA,QAC1C,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,gCAAgC,KAAK;AACvD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAC9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAE/D,UAAI,CAAC,kBAAkB;AAErB,eAAO,KAAK,iBAAiB,UAAU,UAAU,OAAO;AAAA,MAC1D;AAGA,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B;AAAA,QACA,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAEA,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS;AAAA,UACT,OAAO;AAAA,YACL,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,aAAa,KAAK,IAAI,IAAI;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,eAAe;AACnB,UAAI,aAAa;AACjB,UAAI,eAAe;AAEnB,iBAAW,UAAU,cAAc;AACjC,cAAM,WAAW,KAAK,KAAK,UAAU,OAAO,IAAI;AAEhD,YAAI,OAAO,WAAW,WAAW;AAC/B,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD;AAAA,QACF,WAAW,OAAO,WAAW,SAAS;AACpC,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF,WAAW,OAAO,WAAW,YAAY;AAEvC,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF;AAAA,MACF;AAGA,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,wBAAwB,QAAQ;AAAA,QACzC,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,6BAA6B,KAAK;AACpD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WACJ,OACA,SAeA;AACA,QAAI;AACF,YAAM,UAAmC;AAAA,QACvC,MAAM,CAAC,YAAY;AAAA,MACrB;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,YAAY,QAAQ;AAAA,MAC9B;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,YAAM,UAAU,MAAM,KAAK,QAAQ;AAAA,QACjC;AAAA,QACA,SAAS,SAAS;AAAA,QAClB;AAAA,MACF;AAEA,aAAO,QAAQ,IAAI,CAAC,YAAY;AAAA,QAC9B,UAAU,OAAO,SAAS;AAAA,QAC1B,SAAS,OAAO;AAAA,QAChB,OAAO,IAAI,OAAO;AAAA;AAAA,QAClB,WAAW,OAAO,SAAS;AAAA,QAC3B,SAAS,OAAO,SAAS;AAAA,QACzB,UAAU,OAAO,SAAS;AAAA,MAC5B,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,uBAAuB,KAAK;AAC9C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,UACuB;AACvB,UAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,UAAM,aAAa,KAAK,cAAc,QAAQ;AAC9C,UAAM,SAAS,GAAG,QAAQ,IAAI,MAAM,GAAG,QAAQ,mBAAmB,GAAG;AAGrE,UAAM,EAAE,UAAU,UAAU,IAC1B,MAAM,KAAK,2BAA2B,QAAQ;AAEhD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,KAAK,IAAI;AAAA,MACvB,YAAY;AAAA,MACZ,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAA0B;AACjD,QAAI;AACF,aAAO,SAAS,mCAAmC;AAAA,QACjD,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,UAA0B;AAC9C,QAAI;AACF,aAAO,SAAS,sBAAsB;AAAA,QACpC,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,YACA,UACkD;AAClD,QAAI;AACF,YAAM,OAAO;AAAA,QACX,0BAA0B,UAAU,KAAK,QAAQ;AAAA,QACjD;AAAA,UACE,KAAK;AAAA,UACL,UAAU;AAAA,QACZ;AAAA,MACF;AAEA,aAAO,KACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC,EAC5B,IAAI,CAAC,SAAS;AACb,cAAM,CAAC,QAAQ,GAAG,SAAS,IAAI,KAAK,MAAM,GAAI;AAC9C,eAAO;AAAA,UACL,MAAM,UAAU,KAAK,GAAI;AAAA,UACzB,QACE,WAAW,MACP,UACA,WAAW,MACT,YACA;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACL,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,UACA,SACmB;AACnB,UAAM,QAAkB,CAAC;AACzB,UAAM,KAAK,OAAO;AAGlB,UAAM,gBAAgB,KAAK,KAAK,UAAU,YAAY;AACtD,QAAI,GAAG,WAAW,aAAa,GAAG;AAChC,SAAG,IAAI,GAAG,aAAa,eAAe,MAAM,CAAC;AAAA,IAC/C;AAGA,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,mBAAmB,CAAC;AAAA,IAClC;AACA,OAAG,IAAI,eAAe;AAGtB,UAAM,aAAa,QAAQ,cAAc;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa;AACvB,iBAAW,KAAK,OAAO,QAAQ,MAAM;AAAA,IACvC;AAEA,UAAM,cAAc,QAAQ,eAAe,OAAO;AAElD,UAAM,UAAU,CAAC,KAAa,UAAkB,aAAa;AAC3D,YAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAE3D,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAM,eAAe,KAAK,SAAS,SAAS,QAAQ;AAEpD,YAAI,GAAG,QAAQ,YAAY,GAAG;AAC5B;AAAA,QACF;AAEA,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,UAAU,OAAO;AAAA,QAC3B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,QAAQ,MAAM,IAAI;AAGnC,cAAI,CAAC,WAAW,SAAS,GAAG,GAAG;AAC7B;AAAA,UACF;AAGA,cACE,CAAC,QAAQ,iBACR,MAAM,KAAK,SAAS,QAAQ,KAC3B,MAAM,KAAK,SAAS,QAAQ,KAC5B,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,OAAO,KAC7B,aAAa,SAAS,QAAQ,IAChC;AACA;AAAA,UACF;AAGA,gBAAM,QAAQ,GAAG,SAAS,QAAQ;AAClC,cAAI,MAAM,OAAO,aAAa;AAC5B,iBAAK,OAAO,MAAM,wBAAwB,YAAY,EAAE;AACxD;AAAA,UACF;AAEA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,QAAQ;AAChB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACA,UACA,UACA,SACsB;AACtB,UAAM,eAAe,KAAK,SAAS,UAAU,QAAQ;AACrD,UAAM,UAAU,GAAG,aAAa,UAAU,MAAM;AAChD,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAAW,KAAK,mBAAmB,QAAQ;AAEjD,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,SAAsB,CAAC;AAG7B,UAAM,WAAW,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAGtE,UAAM,aAAa,KAAK,cAAc,IAAI,YAAY;AACtD,QAAI,eAAe,YAAY,CAAC,QAAQ,aAAa;AACnD,aAAO,CAAC;AAAA,IACV;AAEA,SAAK,cAAc,IAAI,cAAc,QAAQ;AAG7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,YAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM,CAAC;AACvE,YAAM,eAAe,WAAW,KAAK,IAAI;AAEzC,UAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,MACF;AAEA,YAAM,UAAU,GAAG,SAAS,MAAM,IAAI,YAAY,IAAI,CAAC;AACvD,YAAM,YAAY,OACf,WAAW,KAAK,EAChB,OAAO,YAAY,EACnB,OAAO,KAAK;AAEf,aAAO,KAAK;AAAA,QACV,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,SAAS;AAAA,QACT,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AAAA,QAC7C,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WACZ,OACA,UACe;AACf,UAAM,kBAAkB,SAAS,MAAM,QAAQ,WAAW,MAAM,SAAS,IAAI,MAAM,OAAO;AAAA,YAClF,MAAM,QAAQ;AAAA,cACZ,SAAS,QAAQ,IAAI,SAAS,MAAM;AAAA;AAAA,EAEhD,MAAM,OAAO;AAEX,UAAM,KAAK,QAAQ,aAAa,eAAe,iBAAiB;AAAA,MAC9D,MAAM;AAAA,MACN,SAAS,SAAS;AAAA,MAClB,WAAW,SAAS;AAAA,MACpB,QAAQ,SAAS;AAAA,MACjB,WAAW,MAAM;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,WAAW,SAAS;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,aAAa,SAAS;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,UACA,QACe;AAGf,SAAK,OAAO;AAAA,MACV,iCAAiC,QAAQ,eAAe,MAAM;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAA0B;AACnD,UAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,UAAM,cAAsC;AAAA,MAC1C,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,OAAO;AAAA,MACP,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAEA,WAAO,YAAY,GAAG,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,UAGtC;AAED,UAAM,kBAAkB,KAAK,KAAK,UAAU,cAAc;AAC1D,QAAI,GAAG,WAAW,eAAe,GAAG;AAClC,UAAI;AACF,cAAM,cAAc,KAAK;AAAA,UACvB,GAAG,aAAa,iBAAiB,MAAM;AAAA,QACzC;AACA,cAAM,OAAO;AAAA,UACX,GAAG,YAAY;AAAA,UACf,GAAG,YAAY;AAAA,QACjB;AAEA,YAAI;AACJ,YAAI,KAAK,MAAO,aAAY;AAAA,iBACnB,KAAK,IAAK,aAAY;AAAA,iBACtB,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,KAAM,aAAY;AAAA,iBACvB,KAAK,OAAQ,aAAY;AAElC,eAAO;AAAA,UACL,UAAU,KAAK,aAAa,eAAe;AAAA,UAC3C;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,kBAAkB,CAAC,KACrD,GAAG,WAAW,KAAK,KAAK,UAAU,UAAU,CAAC,GAC7C;AACA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,QAAQ,CAAC,GAAG;AAChD,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,YAAY,CAAC,GAAG;AACpD,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,SAAS,CAAC,KAC5C,GAAG,WAAW,KAAK,KAAK,UAAU,cAAc,CAAC,GACjD;AACA,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAG/C,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,UAAuC;AAChE,SAAK,cAAc,IAAI,SAAS,QAAQ,QAAQ;AAAA,EAElD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAMhB;AAED,UAAM,QAAQ;AAAA,MACZ,YAAY,KAAK,cAAc;AAAA,MAC/B,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,WAAW,CAAC;AAAA,MACZ,YAAY,CAAC;AAAA,IACf;AAEA,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AAClD,UAAI,CAAC,YAAY,SAAS,aAAa,UAAU;AAC/C,cAAM,cAAc,SAAS;AAE7B,YAAI,SAAS,UAAU;AACrB,gBAAM,UAAU,SAAS,QAAQ,KAC9B,MAAM,UAAU,SAAS,QAAQ,KAAK,KAAK;AAAA,QAChD;AAEA,YAAI,SAAS,WAAW;AACtB,gBAAM,WAAW,SAAS,SAAS,KAChC,MAAM,WAAW,SAAS,SAAS,KAAK,KAAK;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;",
4
+ "sourcesContent": ["/**\n * Repository Ingestion Skill for ChromaDB\n *\n * Ingests and maintains code repositories in ChromaDB for enhanced code search and context\n */\n\nimport { ChromaDBAdapter } from '../core/storage/chromadb-adapter.js';\nimport { Logger } from '../core/monitoring/logger.js';\nimport {\n isChromaDBEnabled,\n getChromaDBConfig,\n} from '../core/config/storage-config.js';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as crypto from 'crypto';\nimport { execSync } from 'child_process';\nimport ignore from 'ignore';\n\nexport interface RepoIngestionOptions {\n incremental?: boolean;\n forceUpdate?: boolean;\n includeTests?: boolean;\n includeDocs?: boolean;\n maxFileSize?: number;\n chunkSize?: number;\n extensions?: string[];\n excludePatterns?: string[];\n}\n\nexport interface RepoMetadata {\n repoId: string;\n repoName: string;\n branch: string;\n lastCommit: string;\n lastIngested: number;\n filesCount: number;\n totalSize: number;\n language: string;\n framework?: string;\n}\n\nexport interface FileChunk {\n id: string;\n filePath: string;\n content: string;\n startLine: number;\n endLine: number;\n hash: string;\n language: string;\n}\n\nexport class RepoIngestionSkill {\n private logger: Logger;\n private adapter: ChromaDBAdapter | null = null;\n private metadataCache: Map<string, RepoMetadata> = new Map();\n private fileHashCache: Map<string, string> = new Map();\n private chromaEnabled: boolean = false;\n\n constructor(\n private config: {\n apiKey?: string;\n tenant?: string;\n database?: string;\n collectionName?: string;\n } | null,\n private userId: string,\n private teamId?: string\n ) {\n this.logger = new Logger('RepoIngestionSkill');\n\n // Check if ChromaDB is enabled via storage config\n this.chromaEnabled = isChromaDBEnabled();\n\n if (this.chromaEnabled) {\n const chromaConfig = getChromaDBConfig();\n if (chromaConfig && chromaConfig.apiKey) {\n this.adapter = new ChromaDBAdapter(\n {\n apiKey: config?.apiKey || chromaConfig.apiKey,\n tenant: config?.tenant || chromaConfig.tenant || 'default_tenant',\n database:\n config?.database || chromaConfig.database || 'default_database',\n collectionName: config?.collectionName || 'stackmemory_repos',\n },\n userId,\n teamId\n );\n }\n }\n }\n\n /**\n * Check if ChromaDB is available for use\n */\n isAvailable(): boolean {\n return this.chromaEnabled && this.adapter !== null;\n }\n\n async initialize(): Promise<void> {\n if (!this.isAvailable()) {\n this.logger.warn(\n 'ChromaDB not enabled. Repository ingestion features are unavailable.'\n );\n this.logger.warn('Run \"stackmemory init --chromadb\" to enable ChromaDB.');\n return;\n }\n\n if (this.adapter) {\n await this.adapter.initialize();\n }\n await this.loadMetadataCache();\n }\n\n /**\n * Ingest a repository into ChromaDB\n */\n async ingestRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesProcessed: number;\n chunksCreated: number;\n timeElapsed: number;\n totalSize: number;\n };\n }> {\n if (!this.isAvailable()) {\n return {\n success: false,\n message:\n 'ChromaDB not enabled. Run \"stackmemory init --chromadb\" to enable semantic search features.',\n };\n }\n\n const startTime = Date.now();\n\n try {\n this.logger.info(`Starting repository ingestion for ${repoName}`);\n\n // Validate repository path\n if (!fs.existsSync(repoPath)) {\n throw new Error(`Repository path not found: ${repoPath}`);\n }\n\n // Get repository metadata\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n\n // Check if incremental update is possible\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n if (options.incremental && existingMetadata && !options.forceUpdate) {\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected since last ingestion',\n };\n }\n\n this.logger.info(\n `Incremental update: ${changedFiles.length} files changed`\n );\n }\n\n // Get files to process\n const files = await this.getRepoFiles(repoPath, options);\n this.logger.info(`Found ${files.length} files to process`);\n\n // Process files and create chunks\n let filesProcessed = 0;\n let chunksCreated = 0;\n let totalSize = 0;\n\n for (const file of files) {\n try {\n const chunks = await this.processFile(\n file,\n repoPath,\n repoName,\n metadata,\n options\n );\n\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n chunksCreated++;\n }\n\n filesProcessed++;\n totalSize += fs.statSync(file).size;\n\n // Log progress every 100 files\n if (filesProcessed % 100 === 0) {\n this.logger.info(\n `Processed ${filesProcessed}/${files.length} files`\n );\n }\n } catch (error: unknown) {\n this.logger.warn(`Failed to process file ${file}:`, error);\n }\n }\n\n // Update metadata\n metadata.filesCount = filesProcessed;\n metadata.totalSize = totalSize;\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n this.logger.info(\n `Repository ingestion complete: ${filesProcessed} files, ${chunksCreated} chunks in ${timeElapsed}ms`\n );\n\n return {\n success: true,\n message: `Successfully ingested ${repoName}`,\n stats: {\n filesProcessed,\n chunksCreated,\n timeElapsed,\n totalSize,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository ingestion failed:', error);\n return {\n success: false,\n message: `Failed to ingest repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Update an existing repository in ChromaDB\n */\n async updateRepository(\n repoPath: string,\n repoName: string,\n options: RepoIngestionOptions = {}\n ): Promise<{\n success: boolean;\n message: string;\n stats?: {\n filesUpdated: number;\n filesAdded: number;\n filesRemoved: number;\n timeElapsed: number;\n };\n }> {\n const startTime = Date.now();\n\n try {\n const metadata = await this.getRepoMetadata(repoPath, repoName);\n const existingMetadata = this.metadataCache.get(metadata.repoId);\n\n if (!existingMetadata) {\n // No existing data, perform full ingestion\n return this.ingestRepository(repoPath, repoName, options);\n }\n\n // Get changed files since last ingestion\n const changedFiles = await this.getChangedFiles(\n repoPath,\n existingMetadata.lastCommit,\n metadata.lastCommit\n );\n\n if (changedFiles.length === 0) {\n return {\n success: true,\n message: 'No changes detected',\n stats: {\n filesUpdated: 0,\n filesAdded: 0,\n filesRemoved: 0,\n timeElapsed: Date.now() - startTime,\n },\n };\n }\n\n let filesUpdated = 0;\n let filesAdded = 0;\n let filesRemoved = 0;\n\n for (const change of changedFiles) {\n const filePath = path.join(repoPath, change.path);\n\n if (change.status === 'deleted') {\n await this.removeFileChunks(change.path, metadata.repoId);\n filesRemoved++;\n } else if (change.status === 'added') {\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesAdded++;\n } else if (change.status === 'modified') {\n // Remove old chunks and add new ones\n await this.removeFileChunks(change.path, metadata.repoId);\n const chunks = await this.processFile(\n filePath,\n repoPath,\n repoName,\n metadata,\n options\n );\n for (const chunk of chunks) {\n await this.storeChunk(chunk, metadata);\n }\n filesUpdated++;\n }\n }\n\n // Update metadata\n metadata.lastIngested = Date.now();\n await this.saveMetadata(metadata);\n\n const timeElapsed = Date.now() - startTime;\n\n return {\n success: true,\n message: `Successfully updated ${repoName}`,\n stats: {\n filesUpdated,\n filesAdded,\n filesRemoved,\n timeElapsed,\n },\n };\n } catch (error: unknown) {\n this.logger.error('Repository update failed:', error);\n return {\n success: false,\n message: `Failed to update repository: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n }\n\n /**\n * Search code in ingested repositories\n */\n async searchCode(\n query: string,\n options?: {\n repoName?: string;\n language?: string;\n limit?: number;\n includeContext?: boolean;\n }\n ): Promise<\n Array<{\n filePath: string;\n content: string;\n score: number;\n startLine: number;\n endLine: number;\n repoName: string;\n }>\n > {\n if (!this.isAvailable() || !this.adapter) {\n this.logger.warn('ChromaDB not enabled. Code search unavailable.');\n return [];\n }\n\n try {\n const filters: Record<string, unknown> = {\n type: ['code_chunk'],\n };\n\n if (options?.repoName) {\n filters.repo_name = options.repoName;\n }\n\n if (options?.language) {\n filters.language = options.language;\n }\n\n const results = await this.adapter.queryContexts(\n query,\n options?.limit || 20,\n filters\n );\n\n return results.map((result) => ({\n filePath: result.metadata.file_path,\n content: result.content,\n score: 1 - result.distance, // Convert distance to similarity score\n startLine: result.metadata.start_line,\n endLine: result.metadata.end_line,\n repoName: result.metadata.repo_name,\n }));\n } catch (error: unknown) {\n this.logger.error('Code search failed:', error);\n return [];\n }\n }\n\n /**\n * Get repository metadata\n */\n private async getRepoMetadata(\n repoPath: string,\n repoName: string\n ): Promise<RepoMetadata> {\n const branch = this.getCurrentBranch(repoPath);\n const lastCommit = this.getLastCommit(repoPath);\n const repoId = `${repoName}_${branch}`.replace(/[^a-zA-Z0-9_-]/g, '_');\n\n // Detect primary language and framework\n const { language, framework } =\n await this.detectLanguageAndFramework(repoPath);\n\n return {\n repoId,\n repoName,\n branch,\n lastCommit,\n lastIngested: Date.now(),\n filesCount: 0,\n totalSize: 0,\n language,\n framework,\n };\n }\n\n /**\n * Get current git branch\n */\n private getCurrentBranch(repoPath: string): string {\n try {\n return execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'main';\n }\n }\n\n /**\n * Get last commit hash\n */\n private getLastCommit(repoPath: string): string {\n try {\n return execSync('git rev-parse HEAD', {\n cwd: repoPath,\n encoding: 'utf8',\n }).trim();\n } catch {\n return 'unknown';\n }\n }\n\n /**\n * Get changed files between commits\n */\n private async getChangedFiles(\n repoPath: string,\n fromCommit: string,\n toCommit: string\n ): Promise<Array<{ path: string; status: string }>> {\n try {\n const diff = execSync(\n `git diff --name-status ${fromCommit}..${toCommit}`,\n {\n cwd: repoPath,\n encoding: 'utf8',\n }\n );\n\n return diff\n .split('\\n')\n .filter((line) => line.trim())\n .map((line) => {\n const [status, ...pathParts] = line.split('\\t');\n return {\n path: pathParts.join('\\t'),\n status:\n status === 'A'\n ? 'added'\n : status === 'D'\n ? 'deleted'\n : 'modified',\n };\n });\n } catch {\n return [];\n }\n }\n\n /**\n * Get repository files to process\n */\n private async getRepoFiles(\n repoPath: string,\n options: RepoIngestionOptions\n ): Promise<string[]> {\n const files: string[] = [];\n const ig = ignore();\n\n // Load .gitignore if it exists\n const gitignorePath = path.join(repoPath, '.gitignore');\n if (fs.existsSync(gitignorePath)) {\n ig.add(fs.readFileSync(gitignorePath, 'utf8'));\n }\n\n // Add default exclude patterns\n const defaultExcludes = [\n 'node_modules',\n '.git',\n 'dist',\n 'build',\n 'coverage',\n '.env',\n '*.log',\n ...(options.excludePatterns || []),\n ];\n ig.add(defaultExcludes);\n\n // Default extensions to include\n const extensions = options.extensions || [\n '.ts',\n '.tsx',\n '.js',\n '.jsx',\n '.py',\n '.java',\n '.go',\n '.rs',\n '.c',\n '.cpp',\n '.h',\n '.hpp',\n '.cs',\n '.rb',\n '.php',\n '.swift',\n '.kt',\n '.scala',\n '.r',\n '.m',\n '.sql',\n '.yaml',\n '.yml',\n '.json',\n ];\n\n // Add documentation if requested\n if (options.includeDocs) {\n extensions.push('.md', '.rst', '.txt');\n }\n\n const maxFileSize = options.maxFileSize || 1024 * 1024; // 1MB default\n\n const walkDir = (dir: string, baseDir: string = repoPath) => {\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath);\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n walkDir(fullPath, baseDir);\n } else if (entry.isFile()) {\n const ext = path.extname(entry.name);\n\n // Check if file should be included\n if (!extensions.includes(ext)) {\n continue;\n }\n\n // Check if it's a test file\n if (\n !options.includeTests &&\n (entry.name.includes('.test.') ||\n entry.name.includes('.spec.') ||\n relativePath.includes('__tests__') ||\n relativePath.includes('test/') ||\n relativePath.includes('tests/'))\n ) {\n continue;\n }\n\n // Check file size\n const stats = fs.statSync(fullPath);\n if (stats.size > maxFileSize) {\n this.logger.debug(`Skipping large file: ${relativePath}`);\n continue;\n }\n\n files.push(fullPath);\n }\n }\n };\n\n walkDir(repoPath);\n return files;\n }\n\n /**\n * Process a file into chunks\n */\n private async processFile(\n filePath: string,\n repoPath: string,\n repoName: string,\n metadata: RepoMetadata,\n options: RepoIngestionOptions\n ): Promise<FileChunk[]> {\n const relativePath = path.relative(repoPath, filePath);\n const content = fs.readFileSync(filePath, 'utf8');\n const lines = content.split('\\n');\n const language = this.detectFileLanguage(filePath);\n\n const chunkSize = options.chunkSize || 100; // 100 lines per chunk\n const chunks: FileChunk[] = [];\n\n // Calculate file hash for caching\n const fileHash = crypto.createHash('md5').update(content).digest('hex');\n\n // Check if file has changed\n const cachedHash = this.fileHashCache.get(relativePath);\n if (cachedHash === fileHash && !options.forceUpdate) {\n return []; // File hasn't changed\n }\n\n this.fileHashCache.set(relativePath, fileHash);\n\n // Split into chunks\n for (let i = 0; i < lines.length; i += chunkSize) {\n const chunkLines = lines.slice(i, Math.min(i + chunkSize, lines.length));\n const chunkContent = chunkLines.join('\\n');\n\n if (chunkContent.trim().length === 0) {\n continue; // Skip empty chunks\n }\n\n const chunkId = `${metadata.repoId}_${relativePath}_${i}`;\n const chunkHash = crypto\n .createHash('md5')\n .update(chunkContent)\n .digest('hex');\n\n chunks.push({\n id: chunkId,\n filePath: relativePath,\n content: chunkContent,\n startLine: i + 1,\n endLine: Math.min(i + chunkSize, lines.length),\n hash: chunkHash,\n language,\n });\n }\n\n return chunks;\n }\n\n /**\n * Store a chunk in ChromaDB\n */\n private async storeChunk(\n chunk: FileChunk,\n metadata: RepoMetadata\n ): Promise<void> {\n if (!this.adapter) {\n throw new Error('ChromaDB adapter not available');\n }\n\n const documentContent = `File: ${chunk.filePath} (Lines ${chunk.startLine}-${chunk.endLine})\nLanguage: ${chunk.language}\nRepository: ${metadata.repoName}/${metadata.branch}\n\n${chunk.content}`;\n\n if (!this.adapter) {\n throw new Error('ChromaDB adapter not initialized');\n }\n await this.adapter.storeContext('observation', documentContent, {\n type: 'code_chunk',\n repo_id: metadata.repoId,\n repo_name: metadata.repoName,\n branch: metadata.branch,\n file_path: chunk.filePath,\n start_line: chunk.startLine,\n end_line: chunk.endLine,\n language: chunk.language,\n framework: metadata.framework,\n chunk_hash: chunk.hash,\n last_commit: metadata.lastCommit,\n });\n }\n\n /**\n * Remove file chunks from ChromaDB\n */\n private async removeFileChunks(\n filePath: string,\n repoId: string\n ): Promise<void> {\n // This would need to be implemented in ChromaDBAdapter\n // For now, we'll log it\n this.logger.debug(\n `Would remove chunks for file: ${filePath} from repo: ${repoId}`\n );\n }\n\n /**\n * Detect file language\n */\n private detectFileLanguage(filePath: string): string {\n const ext = path.extname(filePath).toLowerCase();\n const languageMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.py': 'python',\n '.java': 'java',\n '.go': 'go',\n '.rs': 'rust',\n '.c': 'c',\n '.cpp': 'cpp',\n '.cs': 'csharp',\n '.rb': 'ruby',\n '.php': 'php',\n '.swift': 'swift',\n '.kt': 'kotlin',\n '.scala': 'scala',\n '.r': 'r',\n '.sql': 'sql',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.json': 'json',\n '.md': 'markdown',\n };\n\n return languageMap[ext] || 'unknown';\n }\n\n /**\n * Detect language and framework\n */\n private async detectLanguageAndFramework(repoPath: string): Promise<{\n language: string;\n framework?: string;\n }> {\n // Check for package.json (JavaScript/TypeScript)\n const packageJsonPath = path.join(repoPath, 'package.json');\n if (fs.existsSync(packageJsonPath)) {\n try {\n const packageJson = JSON.parse(\n fs.readFileSync(packageJsonPath, 'utf8')\n );\n const deps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n };\n\n let framework: string | undefined;\n if (deps.react) framework = 'react';\n else if (deps.vue) framework = 'vue';\n else if (deps.angular) framework = 'angular';\n else if (deps.express) framework = 'express';\n else if (deps.next) framework = 'nextjs';\n else if (deps.svelte) framework = 'svelte';\n\n return {\n language: deps.typescript ? 'typescript' : 'javascript',\n framework,\n };\n } catch {}\n }\n\n // Check for requirements.txt or setup.py (Python)\n if (\n fs.existsSync(path.join(repoPath, 'requirements.txt')) ||\n fs.existsSync(path.join(repoPath, 'setup.py'))\n ) {\n return { language: 'python' };\n }\n\n // Check for go.mod (Go)\n if (fs.existsSync(path.join(repoPath, 'go.mod'))) {\n return { language: 'go' };\n }\n\n // Check for Cargo.toml (Rust)\n if (fs.existsSync(path.join(repoPath, 'Cargo.toml'))) {\n return { language: 'rust' };\n }\n\n // Check for pom.xml or build.gradle (Java)\n if (\n fs.existsSync(path.join(repoPath, 'pom.xml')) ||\n fs.existsSync(path.join(repoPath, 'build.gradle'))\n ) {\n return { language: 'java' };\n }\n\n // Default to unknown\n return { language: 'unknown' };\n }\n\n /**\n * Load metadata cache\n */\n private async loadMetadataCache(): Promise<void> {\n // In a real implementation, this would load from a persistent store\n // For now, we'll just initialize an empty cache\n this.metadataCache.clear();\n }\n\n /**\n * Save metadata\n */\n private async saveMetadata(metadata: RepoMetadata): Promise<void> {\n this.metadataCache.set(metadata.repoId, metadata);\n // In a real implementation, this would persist to a store\n }\n\n /**\n * Get repository statistics\n */\n async getRepoStats(repoName?: string): Promise<{\n totalRepos: number;\n totalFiles: number;\n totalChunks: number;\n languages: Record<string, number>;\n frameworks: Record<string, number>;\n }> {\n // This would query ChromaDB for statistics\n const stats = {\n totalRepos: this.metadataCache.size,\n totalFiles: 0,\n totalChunks: 0,\n languages: {} as Record<string, number>,\n frameworks: {} as Record<string, number>,\n };\n\n for (const metadata of this.metadataCache.values()) {\n if (!repoName || metadata.repoName === repoName) {\n stats.totalFiles += metadata.filesCount;\n\n if (metadata.language) {\n stats.languages[metadata.language] =\n (stats.languages[metadata.language] || 0) + 1;\n }\n\n if (metadata.framework) {\n stats.frameworks[metadata.framework] =\n (stats.frameworks[metadata.framework] || 0) + 1;\n }\n }\n }\n\n return stats;\n }\n}\n"],
5
+ "mappings": "AAMA,SAAS,uBAAuB;AAChC,SAAS,cAAc;AACvB;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,YAAY,YAAY;AACxB,SAAS,gBAAgB;AACzB,OAAO,YAAY;AAmCZ,MAAM,mBAAmB;AAAA,EAO9B,YACU,QAMA,QACA,QACR;AARQ;AAMA;AACA;AAER,SAAK,SAAS,IAAI,OAAO,oBAAoB;AAG7C,SAAK,gBAAgB,kBAAkB;AAEvC,QAAI,KAAK,eAAe;AACtB,YAAM,eAAe,kBAAkB;AACvC,UAAI,gBAAgB,aAAa,QAAQ;AACvC,aAAK,UAAU,IAAI;AAAA,UACjB;AAAA,YACE,QAAQ,QAAQ,UAAU,aAAa;AAAA,YACvC,QAAQ,QAAQ,UAAU,aAAa,UAAU;AAAA,YACjD,UACE,QAAQ,YAAY,aAAa,YAAY;AAAA,YAC/C,gBAAgB,QAAQ,kBAAkB;AAAA,UAC5C;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EArCQ;AAAA,EACA,UAAkC;AAAA,EAClC,gBAA2C,oBAAI,IAAI;AAAA,EACnD,gBAAqC,oBAAI,IAAI;AAAA,EAC7C,gBAAyB;AAAA;AAAA;AAAA;AAAA,EAsCjC,cAAuB;AACrB,WAAO,KAAK,iBAAiB,KAAK,YAAY;AAAA,EAChD;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,YAAY,GAAG;AACvB,WAAK,OAAO;AAAA,QACV;AAAA,MACF;AACA,WAAK,OAAO,KAAK,uDAAuD;AACxE;AAAA,IACF;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,WAAW;AAAA,IAChC;AACA,UAAM,KAAK,kBAAkB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,QAAI,CAAC,KAAK,YAAY,GAAG;AACvB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SACE;AAAA,MACJ;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,WAAK,OAAO,KAAK,qCAAqC,QAAQ,EAAE;AAGhE,UAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,cAAM,IAAI,MAAM,8BAA8B,QAAQ,EAAE;AAAA,MAC1D;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAG9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAC/D,UAAI,QAAQ,eAAe,oBAAoB,CAAC,QAAQ,aAAa;AACnE,cAAM,eAAe,MAAM,KAAK;AAAA,UAC9B;AAAA,UACA,iBAAiB;AAAA,UACjB,SAAS;AAAA,QACX;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS;AAAA,UACX;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,uBAAuB,aAAa,MAAM;AAAA,QAC5C;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,KAAK,aAAa,UAAU,OAAO;AACvD,WAAK,OAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAGzD,UAAI,iBAAiB;AACrB,UAAI,gBAAgB;AACpB,UAAI,YAAY;AAEhB,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AACrC;AAAA,UACF;AAEA;AACA,uBAAa,GAAG,SAAS,IAAI,EAAE;AAG/B,cAAI,iBAAiB,QAAQ,GAAG;AAC9B,iBAAK,OAAO;AAAA,cACV,aAAa,cAAc,IAAI,MAAM,MAAM;AAAA,YAC7C;AAAA,UACF;AAAA,QACF,SAAS,OAAgB;AACvB,eAAK,OAAO,KAAK,0BAA0B,IAAI,KAAK,KAAK;AAAA,QAC3D;AAAA,MACF;AAGA,eAAS,aAAa;AACtB,eAAS,YAAY;AACrB,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,WAAK,OAAO;AAAA,QACV,kCAAkC,cAAc,WAAW,aAAa,cAAc,WAAW;AAAA,MACnG;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,yBAAyB,QAAQ;AAAA,QAC1C,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,gCAAgC,KAAK;AACvD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,UACA,UACA,UAAgC,CAAC,GAUhC;AACD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,gBAAgB,UAAU,QAAQ;AAC9D,YAAM,mBAAmB,KAAK,cAAc,IAAI,SAAS,MAAM;AAE/D,UAAI,CAAC,kBAAkB;AAErB,eAAO,KAAK,iBAAiB,UAAU,UAAU,OAAO;AAAA,MAC1D;AAGA,YAAM,eAAe,MAAM,KAAK;AAAA,QAC9B;AAAA,QACA,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAEA,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,SAAS;AAAA,UACT,OAAO;AAAA,YACL,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,aAAa,KAAK,IAAI,IAAI;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,eAAe;AACnB,UAAI,aAAa;AACjB,UAAI,eAAe;AAEnB,iBAAW,UAAU,cAAc;AACjC,cAAM,WAAW,KAAK,KAAK,UAAU,OAAO,IAAI;AAEhD,YAAI,OAAO,WAAW,WAAW;AAC/B,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD;AAAA,QACF,WAAW,OAAO,WAAW,SAAS;AACpC,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF,WAAW,OAAO,WAAW,YAAY;AAEvC,gBAAM,KAAK,iBAAiB,OAAO,MAAM,SAAS,MAAM;AACxD,gBAAM,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,KAAK,WAAW,OAAO,QAAQ;AAAA,UACvC;AACA;AAAA,QACF;AAAA,MACF;AAGA,eAAS,eAAe,KAAK,IAAI;AACjC,YAAM,KAAK,aAAa,QAAQ;AAEhC,YAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,wBAAwB,QAAQ;AAAA,QACzC,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,6BAA6B,KAAK;AACpD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,gCAAgC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WACJ,OACA,SAeA;AACA,QAAI,CAAC,KAAK,YAAY,KAAK,CAAC,KAAK,SAAS;AACxC,WAAK,OAAO,KAAK,gDAAgD;AACjE,aAAO,CAAC;AAAA,IACV;AAEA,QAAI;AACF,YAAM,UAAmC;AAAA,QACvC,MAAM,CAAC,YAAY;AAAA,MACrB;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,YAAY,QAAQ;AAAA,MAC9B;AAEA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,YAAM,UAAU,MAAM,KAAK,QAAQ;AAAA,QACjC;AAAA,QACA,SAAS,SAAS;AAAA,QAClB;AAAA,MACF;AAEA,aAAO,QAAQ,IAAI,CAAC,YAAY;AAAA,QAC9B,UAAU,OAAO,SAAS;AAAA,QAC1B,SAAS,OAAO;AAAA,QAChB,OAAO,IAAI,OAAO;AAAA;AAAA,QAClB,WAAW,OAAO,SAAS;AAAA,QAC3B,SAAS,OAAO,SAAS;AAAA,QACzB,UAAU,OAAO,SAAS;AAAA,MAC5B,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,WAAK,OAAO,MAAM,uBAAuB,KAAK;AAC9C,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,UACuB;AACvB,UAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,UAAM,aAAa,KAAK,cAAc,QAAQ;AAC9C,UAAM,SAAS,GAAG,QAAQ,IAAI,MAAM,GAAG,QAAQ,mBAAmB,GAAG;AAGrE,UAAM,EAAE,UAAU,UAAU,IAC1B,MAAM,KAAK,2BAA2B,QAAQ;AAEhD,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,KAAK,IAAI;AAAA,MACvB,YAAY;AAAA,MACZ,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAA0B;AACjD,QAAI;AACF,aAAO,SAAS,mCAAmC;AAAA,QACjD,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,UAA0B;AAC9C,QAAI;AACF,aAAO,SAAS,sBAAsB;AAAA,QACpC,KAAK;AAAA,QACL,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AAAA,IACV,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,YACA,UACkD;AAClD,QAAI;AACF,YAAM,OAAO;AAAA,QACX,0BAA0B,UAAU,KAAK,QAAQ;AAAA,QACjD;AAAA,UACE,KAAK;AAAA,UACL,UAAU;AAAA,QACZ;AAAA,MACF;AAEA,aAAO,KACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC,EAC5B,IAAI,CAAC,SAAS;AACb,cAAM,CAAC,QAAQ,GAAG,SAAS,IAAI,KAAK,MAAM,GAAI;AAC9C,eAAO;AAAA,UACL,MAAM,UAAU,KAAK,GAAI;AAAA,UACzB,QACE,WAAW,MACP,UACA,WAAW,MACT,YACA;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACL,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,UACA,SACmB;AACnB,UAAM,QAAkB,CAAC;AACzB,UAAM,KAAK,OAAO;AAGlB,UAAM,gBAAgB,KAAK,KAAK,UAAU,YAAY;AACtD,QAAI,GAAG,WAAW,aAAa,GAAG;AAChC,SAAG,IAAI,GAAG,aAAa,eAAe,MAAM,CAAC;AAAA,IAC/C;AAGA,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,mBAAmB,CAAC;AAAA,IAClC;AACA,OAAG,IAAI,eAAe;AAGtB,UAAM,aAAa,QAAQ,cAAc;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa;AACvB,iBAAW,KAAK,OAAO,QAAQ,MAAM;AAAA,IACvC;AAEA,UAAM,cAAc,QAAQ,eAAe,OAAO;AAElD,UAAM,UAAU,CAAC,KAAa,UAAkB,aAAa;AAC3D,YAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAE3D,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAM,eAAe,KAAK,SAAS,SAAS,QAAQ;AAEpD,YAAI,GAAG,QAAQ,YAAY,GAAG;AAC5B;AAAA,QACF;AAEA,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,UAAU,OAAO;AAAA,QAC3B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,KAAK,QAAQ,MAAM,IAAI;AAGnC,cAAI,CAAC,WAAW,SAAS,GAAG,GAAG;AAC7B;AAAA,UACF;AAGA,cACE,CAAC,QAAQ,iBACR,MAAM,KAAK,SAAS,QAAQ,KAC3B,MAAM,KAAK,SAAS,QAAQ,KAC5B,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,OAAO,KAC7B,aAAa,SAAS,QAAQ,IAChC;AACA;AAAA,UACF;AAGA,gBAAM,QAAQ,GAAG,SAAS,QAAQ;AAClC,cAAI,MAAM,OAAO,aAAa;AAC5B,iBAAK,OAAO,MAAM,wBAAwB,YAAY,EAAE;AACxD;AAAA,UACF;AAEA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,QAAQ;AAChB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACA,UACA,UACA,SACsB;AACtB,UAAM,eAAe,KAAK,SAAS,UAAU,QAAQ;AACrD,UAAM,UAAU,GAAG,aAAa,UAAU,MAAM;AAChD,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,WAAW,KAAK,mBAAmB,QAAQ;AAEjD,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,SAAsB,CAAC;AAG7B,UAAM,WAAW,OAAO,WAAW,KAAK,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAGtE,UAAM,aAAa,KAAK,cAAc,IAAI,YAAY;AACtD,QAAI,eAAe,YAAY,CAAC,QAAQ,aAAa;AACnD,aAAO,CAAC;AAAA,IACV;AAEA,SAAK,cAAc,IAAI,cAAc,QAAQ;AAG7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAChD,YAAM,aAAa,MAAM,MAAM,GAAG,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM,CAAC;AACvE,YAAM,eAAe,WAAW,KAAK,IAAI;AAEzC,UAAI,aAAa,KAAK,EAAE,WAAW,GAAG;AACpC;AAAA,MACF;AAEA,YAAM,UAAU,GAAG,SAAS,MAAM,IAAI,YAAY,IAAI,CAAC;AACvD,YAAM,YAAY,OACf,WAAW,KAAK,EAChB,OAAO,YAAY,EACnB,OAAO,KAAK;AAEf,aAAO,KAAK;AAAA,QACV,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,SAAS;AAAA,QACT,WAAW,IAAI;AAAA,QACf,SAAS,KAAK,IAAI,IAAI,WAAW,MAAM,MAAM;AAAA,QAC7C,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WACZ,OACA,UACe;AACf,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,gCAAgC;AAAA,IAClD;AAEA,UAAM,kBAAkB,SAAS,MAAM,QAAQ,WAAW,MAAM,SAAS,IAAI,MAAM,OAAO;AAAA,YAClF,MAAM,QAAQ;AAAA,cACZ,SAAS,QAAQ,IAAI,SAAS,MAAM;AAAA;AAAA,EAEhD,MAAM,OAAO;AAEX,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AACA,UAAM,KAAK,QAAQ,aAAa,eAAe,iBAAiB;AAAA,MAC9D,MAAM;AAAA,MACN,SAAS,SAAS;AAAA,MAClB,WAAW,SAAS;AAAA,MACpB,QAAQ,SAAS;AAAA,MACjB,WAAW,MAAM;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,WAAW,SAAS;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,aAAa,SAAS;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,UACA,QACe;AAGf,SAAK,OAAO;AAAA,MACV,iCAAiC,QAAQ,eAAe,MAAM;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,UAA0B;AACnD,UAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,UAAM,cAAsC;AAAA,MAC1C,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,OAAO;AAAA,MACP,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAEA,WAAO,YAAY,GAAG,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BAA2B,UAGtC;AAED,UAAM,kBAAkB,KAAK,KAAK,UAAU,cAAc;AAC1D,QAAI,GAAG,WAAW,eAAe,GAAG;AAClC,UAAI;AACF,cAAM,cAAc,KAAK;AAAA,UACvB,GAAG,aAAa,iBAAiB,MAAM;AAAA,QACzC;AACA,cAAM,OAAO;AAAA,UACX,GAAG,YAAY;AAAA,UACf,GAAG,YAAY;AAAA,QACjB;AAEA,YAAI;AACJ,YAAI,KAAK,MAAO,aAAY;AAAA,iBACnB,KAAK,IAAK,aAAY;AAAA,iBACtB,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,QAAS,aAAY;AAAA,iBAC1B,KAAK,KAAM,aAAY;AAAA,iBACvB,KAAK,OAAQ,aAAY;AAElC,eAAO;AAAA,UACL,UAAU,KAAK,aAAa,eAAe;AAAA,UAC3C;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,kBAAkB,CAAC,KACrD,GAAG,WAAW,KAAK,KAAK,UAAU,UAAU,CAAC,GAC7C;AACA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,QAAQ,CAAC,GAAG;AAChD,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,QAAI,GAAG,WAAW,KAAK,KAAK,UAAU,YAAY,CAAC,GAAG;AACpD,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,QACE,GAAG,WAAW,KAAK,KAAK,UAAU,SAAS,CAAC,KAC5C,GAAG,WAAW,KAAK,KAAK,UAAU,cAAc,CAAC,GACjD;AACA,aAAO,EAAE,UAAU,OAAO;AAAA,IAC5B;AAGA,WAAO,EAAE,UAAU,UAAU;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAmC;AAG/C,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,UAAuC;AAChE,SAAK,cAAc,IAAI,SAAS,QAAQ,QAAQ;AAAA,EAElD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAMhB;AAED,UAAM,QAAQ;AAAA,MACZ,YAAY,KAAK,cAAc;AAAA,MAC/B,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,WAAW,CAAC;AAAA,MACZ,YAAY,CAAC;AAAA,IACf;AAEA,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AAClD,UAAI,CAAC,YAAY,SAAS,aAAa,UAAU;AAC/C,cAAM,cAAc,SAAS;AAE7B,YAAI,SAAS,UAAU;AACrB,gBAAM,UAAU,SAAS,QAAQ,KAC9B,MAAM,UAAU,SAAS,QAAQ,KAAK,KAAK;AAAA,QAChD;AAEA,YAAI,SAAS,WAAW;AACtB,gBAAM,WAAW,SAAS,SAAS,KAChC,MAAM,WAAW,SAAS,SAAS,KAAK,KAAK;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;",
6
6
  "names": []
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stackmemoryai/stackmemory",
3
- "version": "0.5.0",
3
+ "version": "0.5.2",
4
4
  "description": "Lossless memory runtime for AI coding tools - organizes context as a call stack instead of linear chat logs, with team collaboration and infinite retention",
5
5
  "engines": {
6
6
  "node": ">=20.0.0",
@@ -74,23 +74,24 @@
74
74
  "daemons:start": "node scripts/claude-sm-autostart.js",
75
75
  "daemons:status": "node scripts/claude-sm-autostart.js status",
76
76
  "daemons:stop": "node scripts/claude-sm-autostart.js stop",
77
+ "daemon:session": "node dist/daemon/session-daemon.js",
78
+ "daemon:session:start": "node dist/daemon/session-daemon.js --session-id",
77
79
  "sync:start": "node scripts/background-sync-manager.js",
78
80
  "sync:setup": "./scripts/setup-background-sync.sh",
79
81
  "prepare": "echo 'Prepare step completed'"
80
82
  },
81
83
  "dependencies": {
82
84
  "@anthropic-ai/sdk": "^0.71.2",
85
+ "@anthropic-ai/tokenizer": "^0.0.4",
83
86
  "@aws-sdk/client-s3": "^3.958.0",
84
87
  "@browsermcp/mcp": "^0.1.3",
85
88
  "@google-cloud/storage": "^7.18.0",
86
89
  "@linear/sdk": "^68.1.0",
87
90
  "@modelcontextprotocol/sdk": "^0.5.0",
88
91
  "@stackmemoryai/stackmemory": "^0.3.19",
89
- "@types/bcryptjs": "^2.4.6",
90
92
  "@types/blessed": "^0.1.27",
91
93
  "@types/inquirer": "^9.0.9",
92
94
  "@types/pg": "^8.16.0",
93
- "bcryptjs": "^3.0.3",
94
95
  "better-sqlite3": "^9.2.2",
95
96
  "chalk": "^5.3.0",
96
97
  "chromadb": "^3.2.2",
@@ -105,17 +106,12 @@
105
106
  "helmet": "^8.1.0",
106
107
  "ignore": "^7.0.5",
107
108
  "inquirer": "^9.3.8",
108
- "ioredis": "^5.8.2",
109
- "jsonwebtoken": "^9.0.3",
110
- "jwks-rsa": "^3.2.0",
111
109
  "msgpackr": "^1.10.1",
112
110
  "ngrok": "^5.0.0-beta.2",
113
111
  "open": "^11.0.0",
114
112
  "ora": "^9.0.0",
115
113
  "pg": "^8.17.1",
116
- "puppeteer": "^24.34.0",
117
114
  "rate-limiter-flexible": "^9.0.1",
118
- "redis": "^5.10.0",
119
115
  "shell-escape": "^0.2.0",
120
116
  "socket.io": "^4.6.0",
121
117
  "socket.io-client": "^4.6.0",
@@ -161,9 +161,9 @@ class DuplicateChecker {
161
161
  },
162
162
  ],
163
163
  recommendation:
164
- duplicateCheck.similarity! > 0.95
164
+ (duplicateCheck.similarity ?? 0) > 0.95
165
165
  ? 'merge'
166
- : duplicateCheck.similarity! > 0.85
166
+ : (duplicateCheck.similarity ?? 0) > 0.85
167
167
  ? 'review'
168
168
  : 'skip',
169
169
  });
@@ -138,9 +138,9 @@ async function mergeDuplicateTasks() {
138
138
  console.log(
139
139
  ` ✅ Marked ${duplicateId} as duplicate of ${group.primaryId}`
140
140
  );
141
- } catch (error: any) {
141
+ } catch (error: unknown) {
142
142
  console.log(
143
- ` Failed to process ${duplicateId}: ${error.message}`
143
+ ` [ERROR] Failed to process ${duplicateId}: ${error instanceof Error ? error.message : String(error)}`
144
144
  );
145
145
  }
146
146
  }
@@ -156,8 +156,10 @@ async function mergeDuplicateTasks() {
156
156
  }
157
157
 
158
158
  console.log(` ✅ Group "${group.name}" processed successfully`);
159
- } catch (error: any) {
160
- console.error(` ❌ Error processing group: ${error.message}`);
159
+ } catch (error: unknown) {
160
+ console.error(
161
+ ` [ERROR] Error processing group: ${error instanceof Error ? error.message : String(error)}`
162
+ );
161
163
  }
162
164
  }
163
165