@juspay/yama 1.6.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.mcp-config.example.json +26 -0
- package/CHANGELOG.md +34 -0
- package/README.md +311 -685
- package/dist/cli/v2.cli.d.ts +13 -0
- package/dist/cli/v2.cli.js +290 -0
- package/dist/index.d.ts +12 -13
- package/dist/index.js +18 -19
- package/dist/v2/config/ConfigLoader.d.ts +50 -0
- package/dist/v2/config/ConfigLoader.js +205 -0
- package/dist/v2/config/DefaultConfig.d.ts +9 -0
- package/dist/v2/config/DefaultConfig.js +191 -0
- package/dist/v2/core/MCPServerManager.d.ts +22 -0
- package/dist/v2/core/MCPServerManager.js +92 -0
- package/dist/v2/core/SessionManager.d.ts +72 -0
- package/dist/v2/core/SessionManager.js +200 -0
- package/dist/v2/core/YamaV2Orchestrator.d.ts +112 -0
- package/dist/v2/core/YamaV2Orchestrator.js +549 -0
- package/dist/v2/prompts/EnhancementSystemPrompt.d.ts +8 -0
- package/dist/v2/prompts/EnhancementSystemPrompt.js +216 -0
- package/dist/v2/prompts/PromptBuilder.d.ts +38 -0
- package/dist/v2/prompts/PromptBuilder.js +228 -0
- package/dist/v2/prompts/ReviewSystemPrompt.d.ts +8 -0
- package/dist/v2/prompts/ReviewSystemPrompt.js +270 -0
- package/dist/v2/types/config.types.d.ts +120 -0
- package/dist/v2/types/config.types.js +5 -0
- package/dist/v2/types/mcp.types.d.ts +191 -0
- package/dist/v2/types/mcp.types.js +6 -0
- package/dist/v2/types/v2.types.d.ts +182 -0
- package/dist/v2/types/v2.types.js +42 -0
- package/dist/v2/utils/ObservabilityConfig.d.ts +22 -0
- package/dist/v2/utils/ObservabilityConfig.js +48 -0
- package/package.json +11 -9
- package/yama.config.example.yaml +214 -204
- package/dist/cli/index.d.ts +0 -12
- package/dist/cli/index.js +0 -538
- package/dist/core/ContextGatherer.d.ts +0 -110
- package/dist/core/ContextGatherer.js +0 -470
- package/dist/core/Guardian.d.ts +0 -81
- package/dist/core/Guardian.js +0 -480
- package/dist/core/providers/BitbucketProvider.d.ts +0 -105
- package/dist/core/providers/BitbucketProvider.js +0 -489
- package/dist/features/CodeReviewer.d.ts +0 -173
- package/dist/features/CodeReviewer.js +0 -1707
- package/dist/features/DescriptionEnhancer.d.ts +0 -70
- package/dist/features/DescriptionEnhancer.js +0 -511
- package/dist/features/MultiInstanceProcessor.d.ts +0 -74
- package/dist/features/MultiInstanceProcessor.js +0 -360
- package/dist/types/index.d.ts +0 -624
- package/dist/types/index.js +0 -104
- package/dist/utils/Cache.d.ts +0 -103
- package/dist/utils/Cache.js +0 -444
- package/dist/utils/ConfigManager.d.ts +0 -88
- package/dist/utils/ConfigManager.js +0 -602
- package/dist/utils/ContentSimilarityService.d.ts +0 -74
- package/dist/utils/ContentSimilarityService.js +0 -215
- package/dist/utils/ExactDuplicateRemover.d.ts +0 -77
- package/dist/utils/ExactDuplicateRemover.js +0 -361
- package/dist/utils/Logger.d.ts +0 -31
- package/dist/utils/Logger.js +0 -214
- package/dist/utils/MemoryBankManager.d.ts +0 -73
- package/dist/utils/MemoryBankManager.js +0 -310
- package/dist/utils/ParallelProcessing.d.ts +0 -140
- package/dist/utils/ParallelProcessing.js +0 -333
- package/dist/utils/ProviderLimits.d.ts +0 -58
- package/dist/utils/ProviderLimits.js +0 -143
- package/dist/utils/RetryManager.d.ts +0 -78
- package/dist/utils/RetryManager.js +0 -205
|
@@ -1,361 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Exact Duplicate Removal Utility for Multi-Instance Processing
|
|
3
|
-
* Handles deduplication of violations from multiple Neurolink SDK instances
|
|
4
|
-
*/
|
|
5
|
-
import { createHash } from "crypto";
|
|
6
|
-
import { logger } from "./Logger.js";
|
|
7
|
-
/**
|
|
8
|
-
* Exact Duplicate Remover for Multi-Instance Results
|
|
9
|
-
* Implements multi-level deduplication strategy
|
|
10
|
-
*/
|
|
11
|
-
export class ExactDuplicateRemover {
|
|
12
|
-
/**
|
|
13
|
-
* Remove exact duplicates from multiple instance results
|
|
14
|
-
*/
|
|
15
|
-
removeDuplicates(instanceResults) {
|
|
16
|
-
const startTime = Date.now();
|
|
17
|
-
logger.debug("🔍 Starting exact duplicate removal process");
|
|
18
|
-
// Step 1: Flatten all violations with source tracking
|
|
19
|
-
const allViolations = this.flattenViolationsWithSource(instanceResults);
|
|
20
|
-
logger.debug(`📊 Total violations from all instances: ${allViolations.length}`);
|
|
21
|
-
// Step 2: Remove exact hash duplicates
|
|
22
|
-
const exactDuplicates = this.removeExactHashDuplicates(allViolations);
|
|
23
|
-
logger.debug(`🎯 Exact duplicates removed: ${exactDuplicates.removed}`);
|
|
24
|
-
// Step 3: Remove normalized duplicates
|
|
25
|
-
const normalizedDuplicates = this.removeNormalizedDuplicates(exactDuplicates.unique);
|
|
26
|
-
logger.debug(`📝 Normalized duplicates removed: ${normalizedDuplicates.removed}`);
|
|
27
|
-
// Step 4: Remove same file+line duplicates
|
|
28
|
-
const finalResult = this.removeSameLineDuplicates(normalizedDuplicates.unique);
|
|
29
|
-
logger.debug(`📍 Same-line duplicates removed: ${finalResult.removed}`);
|
|
30
|
-
// Step 5: Track contributions and create metrics
|
|
31
|
-
const instanceContributions = this.trackContributions(finalResult.unique);
|
|
32
|
-
const processingTime = Date.now() - startTime;
|
|
33
|
-
const metrics = {
|
|
34
|
-
totalViolationsInput: allViolations.length,
|
|
35
|
-
exactDuplicatesRemoved: exactDuplicates.removed,
|
|
36
|
-
normalizedDuplicatesRemoved: normalizedDuplicates.removed,
|
|
37
|
-
sameLineDuplicatesRemoved: finalResult.removed,
|
|
38
|
-
finalUniqueViolations: finalResult.unique.length,
|
|
39
|
-
deduplicationRate: ((allViolations.length - finalResult.unique.length) /
|
|
40
|
-
allViolations.length) *
|
|
41
|
-
100,
|
|
42
|
-
instanceContributions: Object.fromEntries(instanceContributions),
|
|
43
|
-
processingTimeMs: processingTime,
|
|
44
|
-
};
|
|
45
|
-
logger.success(`✅ Deduplication completed: ${allViolations.length} → ${finalResult.unique.length} violations ` +
|
|
46
|
-
`(${metrics.deduplicationRate.toFixed(1)}% reduction) in ${processingTime}ms`);
|
|
47
|
-
return {
|
|
48
|
-
uniqueViolations: finalResult.unique.map((v) => this.stripSourceInfo(v)),
|
|
49
|
-
duplicatesRemoved: {
|
|
50
|
-
exactDuplicates: exactDuplicates.removed,
|
|
51
|
-
normalizedDuplicates: normalizedDuplicates.removed,
|
|
52
|
-
sameLineDuplicates: finalResult.removed,
|
|
53
|
-
},
|
|
54
|
-
instanceContributions,
|
|
55
|
-
processingMetrics: metrics,
|
|
56
|
-
};
|
|
57
|
-
}
|
|
58
|
-
/**
|
|
59
|
-
* Flatten violations from all instances with source tracking
|
|
60
|
-
*/
|
|
61
|
-
flattenViolationsWithSource(instanceResults) {
|
|
62
|
-
const allViolations = [];
|
|
63
|
-
for (const result of instanceResults) {
|
|
64
|
-
if (!result.success || !result.violations) {
|
|
65
|
-
logger.debug(`⚠️ Skipping failed instance: ${result.instanceName}`);
|
|
66
|
-
continue;
|
|
67
|
-
}
|
|
68
|
-
result.violations.forEach((violation, index) => {
|
|
69
|
-
allViolations.push({
|
|
70
|
-
...violation,
|
|
71
|
-
source: result.instanceName,
|
|
72
|
-
originalIndex: index,
|
|
73
|
-
});
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
return allViolations;
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* Remove exact hash duplicates (Level 1)
|
|
80
|
-
*/
|
|
81
|
-
removeExactHashDuplicates(violations) {
|
|
82
|
-
const seenHashes = new Set();
|
|
83
|
-
const unique = [];
|
|
84
|
-
let removed = 0;
|
|
85
|
-
for (const violation of violations) {
|
|
86
|
-
const hash = this.createViolationHash(violation);
|
|
87
|
-
if (!seenHashes.has(hash)) {
|
|
88
|
-
seenHashes.add(hash);
|
|
89
|
-
unique.push(violation);
|
|
90
|
-
}
|
|
91
|
-
else {
|
|
92
|
-
removed++;
|
|
93
|
-
logger.debug(`🔄 Exact duplicate removed: ${violation.issue} (${violation.source})`);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
return { unique, removed };
|
|
97
|
-
}
|
|
98
|
-
/**
|
|
99
|
-
* Remove normalized duplicates (Level 2)
|
|
100
|
-
*/
|
|
101
|
-
removeNormalizedDuplicates(violations) {
|
|
102
|
-
const seenNormalizedHashes = new Set();
|
|
103
|
-
const unique = [];
|
|
104
|
-
let removed = 0;
|
|
105
|
-
for (const violation of violations) {
|
|
106
|
-
const normalizedHash = this.createNormalizedViolationHash(violation);
|
|
107
|
-
if (!seenNormalizedHashes.has(normalizedHash)) {
|
|
108
|
-
seenNormalizedHashes.add(normalizedHash);
|
|
109
|
-
unique.push(violation);
|
|
110
|
-
}
|
|
111
|
-
else {
|
|
112
|
-
removed++;
|
|
113
|
-
logger.debug(`📝 Normalized duplicate removed: ${violation.issue} (${violation.source})`);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
return { unique, removed };
|
|
117
|
-
}
|
|
118
|
-
/**
|
|
119
|
-
* Remove same file+line duplicates (Level 3)
|
|
120
|
-
*/
|
|
121
|
-
removeSameLineDuplicates(violations) {
|
|
122
|
-
const fileLineMap = new Map();
|
|
123
|
-
const uniqueMap = new Map();
|
|
124
|
-
let removed = 0;
|
|
125
|
-
for (const violation of violations) {
|
|
126
|
-
if (!violation.file || !violation.code_snippet) {
|
|
127
|
-
uniqueMap.set(`${violation.file}_${violation.originalIndex}`, violation);
|
|
128
|
-
continue;
|
|
129
|
-
}
|
|
130
|
-
const fileKey = violation.file;
|
|
131
|
-
const lineKey = this.normalizeCodeSnippet(violation.code_snippet);
|
|
132
|
-
const uniqueKey = `${violation.file}_${violation.originalIndex}`;
|
|
133
|
-
if (!fileLineMap.has(fileKey)) {
|
|
134
|
-
fileLineMap.set(fileKey, new Map());
|
|
135
|
-
}
|
|
136
|
-
const linesInFile = fileLineMap.get(fileKey);
|
|
137
|
-
if (linesInFile.has(lineKey)) {
|
|
138
|
-
// Duplicate found - resolve by severity and instance quality
|
|
139
|
-
const existing = linesInFile.get(lineKey);
|
|
140
|
-
const better = this.resolveDuplicateBySeverity([existing, violation]);
|
|
141
|
-
if (better === violation) {
|
|
142
|
-
// Replace existing with current
|
|
143
|
-
linesInFile.set(lineKey, violation);
|
|
144
|
-
// Remove existing from unique map and add current
|
|
145
|
-
const existingKey = `${existing.file}_${existing.originalIndex}`;
|
|
146
|
-
uniqueMap.delete(existingKey);
|
|
147
|
-
uniqueMap.set(uniqueKey, violation);
|
|
148
|
-
}
|
|
149
|
-
removed++;
|
|
150
|
-
logger.debug(`📍 Same-line duplicate resolved: ${violation.issue} (${violation.source})`);
|
|
151
|
-
}
|
|
152
|
-
else {
|
|
153
|
-
linesInFile.set(lineKey, violation);
|
|
154
|
-
uniqueMap.set(uniqueKey, violation);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
return { unique: Array.from(uniqueMap.values()), removed };
|
|
158
|
-
}
|
|
159
|
-
/**
|
|
160
|
-
* Create hash for exact violation matching
|
|
161
|
-
*/
|
|
162
|
-
createViolationHash(violation) {
|
|
163
|
-
const key = {
|
|
164
|
-
file: violation.file?.trim(),
|
|
165
|
-
code_snippet: violation.code_snippet?.trim(),
|
|
166
|
-
severity: violation.severity,
|
|
167
|
-
category: violation.category,
|
|
168
|
-
issue: violation.issue.trim(),
|
|
169
|
-
message: violation.message.trim(),
|
|
170
|
-
};
|
|
171
|
-
return createHash("sha256").update(JSON.stringify(key)).digest("hex");
|
|
172
|
-
}
|
|
173
|
-
/**
|
|
174
|
-
* Create hash for normalized violation matching
|
|
175
|
-
*/
|
|
176
|
-
createNormalizedViolationHash(violation) {
|
|
177
|
-
const normalized = {
|
|
178
|
-
file: violation.file?.toLowerCase().trim(),
|
|
179
|
-
code_snippet: this.normalizeCodeSnippet(violation.code_snippet || ""),
|
|
180
|
-
severity: violation.severity,
|
|
181
|
-
category: violation.category,
|
|
182
|
-
issue: this.normalizeText(violation.issue),
|
|
183
|
-
message: this.normalizeText(violation.message),
|
|
184
|
-
};
|
|
185
|
-
return createHash("sha256")
|
|
186
|
-
.update(JSON.stringify(normalized))
|
|
187
|
-
.digest("hex");
|
|
188
|
-
}
|
|
189
|
-
/**
|
|
190
|
-
* Normalize code snippet for comparison
|
|
191
|
-
*/
|
|
192
|
-
normalizeCodeSnippet(snippet) {
|
|
193
|
-
return snippet
|
|
194
|
-
.replace(/\s+/g, " ") // Normalize whitespace
|
|
195
|
-
.replace(/['"]/g, '"') // Normalize quotes
|
|
196
|
-
.replace(/;+$/, "") // Remove trailing semicolons
|
|
197
|
-
.replace(/[{}]/g, "") // Remove braces for comparison
|
|
198
|
-
.trim()
|
|
199
|
-
.toLowerCase();
|
|
200
|
-
}
|
|
201
|
-
/**
|
|
202
|
-
* Normalize text for comparison
|
|
203
|
-
*/
|
|
204
|
-
normalizeText(text) {
|
|
205
|
-
return text
|
|
206
|
-
.toLowerCase()
|
|
207
|
-
.replace(/[^\w\s]/g, "") // Remove punctuation
|
|
208
|
-
.replace(/\s+/g, " ") // Normalize whitespace
|
|
209
|
-
.trim();
|
|
210
|
-
}
|
|
211
|
-
/**
|
|
212
|
-
* Resolve duplicate by severity (and potentially other factors)
|
|
213
|
-
*/
|
|
214
|
-
resolveDuplicateBySeverity(duplicates) {
|
|
215
|
-
const severityOrder = {
|
|
216
|
-
CRITICAL: 4,
|
|
217
|
-
MAJOR: 3,
|
|
218
|
-
MINOR: 2,
|
|
219
|
-
SUGGESTION: 1,
|
|
220
|
-
};
|
|
221
|
-
return duplicates.reduce((best, current) => {
|
|
222
|
-
const bestScore = severityOrder[best.severity] || 0;
|
|
223
|
-
const currentScore = severityOrder[current.severity] || 0;
|
|
224
|
-
if (currentScore > bestScore) {
|
|
225
|
-
return current;
|
|
226
|
-
}
|
|
227
|
-
else if (currentScore === bestScore) {
|
|
228
|
-
// Same severity - could add more sophisticated logic here
|
|
229
|
-
// For now, prefer the first one (could be based on instance quality)
|
|
230
|
-
return best;
|
|
231
|
-
}
|
|
232
|
-
return best;
|
|
233
|
-
});
|
|
234
|
-
}
|
|
235
|
-
/**
|
|
236
|
-
* Track which instance contributed how many violations
|
|
237
|
-
*/
|
|
238
|
-
trackContributions(violations) {
|
|
239
|
-
const contributions = new Map();
|
|
240
|
-
for (const violation of violations) {
|
|
241
|
-
const current = contributions.get(violation.source) || 0;
|
|
242
|
-
contributions.set(violation.source, current + 1);
|
|
243
|
-
}
|
|
244
|
-
return contributions;
|
|
245
|
-
}
|
|
246
|
-
/**
|
|
247
|
-
* Remove source tracking information from violation
|
|
248
|
-
*/
|
|
249
|
-
stripSourceInfo(violation) {
|
|
250
|
-
const { source, originalIndex, ...cleanViolation } = violation;
|
|
251
|
-
return cleanViolation;
|
|
252
|
-
}
|
|
253
|
-
/**
|
|
254
|
-
* Remove violations that duplicate existing PR comments using semantic similarity
|
|
255
|
-
* Uses AI-powered ContentSimilarityService for intelligent deduplication
|
|
256
|
-
*/
|
|
257
|
-
async removeAgainstExistingComments(newViolations, existingComments, aiConfig, similarityThreshold = 85) {
|
|
258
|
-
const startTime = Date.now();
|
|
259
|
-
logger.debug("🔍 Starting semantic comment deduplication process");
|
|
260
|
-
logger.debug(`📊 New violations: ${newViolations.length}, Existing comments: ${existingComments.length}`);
|
|
261
|
-
logger.debug(`🎯 Similarity threshold: ${similarityThreshold}%`);
|
|
262
|
-
if (newViolations.length === 0 || existingComments.length === 0) {
|
|
263
|
-
logger.debug("⏭️ No violations or comments to compare, skipping deduplication");
|
|
264
|
-
return {
|
|
265
|
-
uniqueViolations: newViolations,
|
|
266
|
-
duplicatesRemoved: 0,
|
|
267
|
-
semanticMatches: [],
|
|
268
|
-
};
|
|
269
|
-
}
|
|
270
|
-
try {
|
|
271
|
-
// Use ContentSimilarityService for semantic analysis
|
|
272
|
-
const { ContentSimilarityService } = await import("./ContentSimilarityService.js");
|
|
273
|
-
const similarityService = new ContentSimilarityService(aiConfig);
|
|
274
|
-
// Get similarity results
|
|
275
|
-
const similarityResults = await similarityService.batchCalculateSimilarity(newViolations, existingComments, 15);
|
|
276
|
-
// Filter violations based on similarity threshold
|
|
277
|
-
const duplicateViolationIndices = new Set();
|
|
278
|
-
const semanticMatches = [];
|
|
279
|
-
for (const result of similarityResults) {
|
|
280
|
-
if (result.similarityScore >= similarityThreshold) {
|
|
281
|
-
duplicateViolationIndices.add(result.violationIndex);
|
|
282
|
-
const violation = newViolations[result.violationIndex];
|
|
283
|
-
const comment = existingComments[result.commentIndex];
|
|
284
|
-
semanticMatches.push({
|
|
285
|
-
violation: violation.issue,
|
|
286
|
-
comment: `Comment ${comment.id}`,
|
|
287
|
-
similarityScore: result.similarityScore,
|
|
288
|
-
reasoning: result.reasoning,
|
|
289
|
-
});
|
|
290
|
-
logger.debug(`🎯 Semantic duplicate found: "${violation.issue}" matches comment ${comment.id} ` +
|
|
291
|
-
`(${result.similarityScore}% similarity)`);
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
// Create final list of unique violations
|
|
295
|
-
const uniqueViolations = newViolations.filter((_, index) => !duplicateViolationIndices.has(index));
|
|
296
|
-
const processingTime = Date.now() - startTime;
|
|
297
|
-
const duplicatesRemoved = duplicateViolationIndices.size;
|
|
298
|
-
logger.success(`✅ Semantic deduplication completed: ${newViolations.length} → ${uniqueViolations.length} violations ` +
|
|
299
|
-
`(${duplicatesRemoved} duplicates removed) in ${processingTime}ms`);
|
|
300
|
-
return {
|
|
301
|
-
uniqueViolations,
|
|
302
|
-
duplicatesRemoved,
|
|
303
|
-
semanticMatches,
|
|
304
|
-
};
|
|
305
|
-
}
|
|
306
|
-
catch (error) {
|
|
307
|
-
logger.error(`❌ Semantic deduplication failed: ${error.message}`);
|
|
308
|
-
logger.warn("⚠️ Falling back to no deduplication - returning all violations");
|
|
309
|
-
// Graceful fallback: return all violations if AI analysis fails
|
|
310
|
-
return {
|
|
311
|
-
uniqueViolations: newViolations,
|
|
312
|
-
duplicatesRemoved: 0,
|
|
313
|
-
semanticMatches: [],
|
|
314
|
-
};
|
|
315
|
-
}
|
|
316
|
-
}
|
|
317
|
-
/**
|
|
318
|
-
* Get detailed deduplication statistics
|
|
319
|
-
*/
|
|
320
|
-
getDeduplicationStats(result) {
|
|
321
|
-
const metrics = result.processingMetrics;
|
|
322
|
-
const contributions = Array.from(result.instanceContributions.entries())
|
|
323
|
-
.map(([instance, count]) => `${instance}: ${count}`)
|
|
324
|
-
.join(", ");
|
|
325
|
-
return `
|
|
326
|
-
📊 Deduplication Statistics:
|
|
327
|
-
• Input violations: ${metrics.totalViolationsInput}
|
|
328
|
-
• Exact duplicates removed: ${metrics.exactDuplicatesRemoved}
|
|
329
|
-
• Normalized duplicates removed: ${metrics.normalizedDuplicatesRemoved}
|
|
330
|
-
• Same-line duplicates removed: ${metrics.sameLineDuplicatesRemoved}
|
|
331
|
-
• Final unique violations: ${metrics.finalUniqueViolations}
|
|
332
|
-
• Deduplication rate: ${metrics.deduplicationRate.toFixed(1)}%
|
|
333
|
-
• Processing time: ${metrics.processingTimeMs}ms
|
|
334
|
-
• Instance contributions: ${contributions}
|
|
335
|
-
`.trim();
|
|
336
|
-
}
|
|
337
|
-
/**
|
|
338
|
-
* Get detailed comment deduplication statistics
|
|
339
|
-
*/
|
|
340
|
-
getCommentDeduplicationStats(result) {
|
|
341
|
-
const averageSimilarity = result.semanticMatches.length > 0
|
|
342
|
-
? result.semanticMatches.reduce((sum, match) => sum + match.similarityScore, 0) / result.semanticMatches.length
|
|
343
|
-
: 0;
|
|
344
|
-
return `
|
|
345
|
-
📊 Comment Deduplication Statistics:
|
|
346
|
-
• Input violations: ${result.uniqueViolations.length + result.duplicatesRemoved}
|
|
347
|
-
• Unique violations: ${result.uniqueViolations.length}
|
|
348
|
-
• Duplicates removed: ${result.duplicatesRemoved}
|
|
349
|
-
• Deduplication rate: ${((result.duplicatesRemoved / (result.uniqueViolations.length + result.duplicatesRemoved)) * 100).toFixed(1)}%
|
|
350
|
-
• Semantic matches: ${result.semanticMatches.length}
|
|
351
|
-
• Average similarity score: ${averageSimilarity.toFixed(1)}%
|
|
352
|
-
`.trim();
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
/**
|
|
356
|
-
* Factory function to create ExactDuplicateRemover
|
|
357
|
-
*/
|
|
358
|
-
export function createExactDuplicateRemover() {
|
|
359
|
-
return new ExactDuplicateRemover();
|
|
360
|
-
}
|
|
361
|
-
//# sourceMappingURL=ExactDuplicateRemover.js.map
|
package/dist/utils/Logger.d.ts
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Enhanced Logger utility - Optimized from both pr-police.js and pr-describe.js
|
|
3
|
-
* Provides consistent logging across all Guardian operations
|
|
4
|
-
*/
|
|
5
|
-
import { Logger as ILogger, LogLevel, LoggerOptions } from "../types/index.js";
|
|
6
|
-
export declare class Logger implements ILogger {
|
|
7
|
-
private options;
|
|
8
|
-
private showBanner;
|
|
9
|
-
constructor(options?: Partial<LoggerOptions>, showBanner?: boolean);
|
|
10
|
-
private shouldLog;
|
|
11
|
-
private formatMessage;
|
|
12
|
-
private colorize;
|
|
13
|
-
debug(message: string, ...args: any[]): void;
|
|
14
|
-
info(message: string, ...args: any[]): void;
|
|
15
|
-
warn(message: string, ...args: any[]): void;
|
|
16
|
-
error(message: string, ...args: any[]): void;
|
|
17
|
-
badge(): void;
|
|
18
|
-
phase(message: string): void;
|
|
19
|
-
success(message: string): void;
|
|
20
|
-
operation(operation: string, status: "started" | "completed" | "failed"): void;
|
|
21
|
-
violation(severity: string, message: string, file?: string): void;
|
|
22
|
-
progress(current: number, total: number, operation: string): void;
|
|
23
|
-
private createProgressBar;
|
|
24
|
-
child(context: Record<string, any>): Logger;
|
|
25
|
-
setLevel(level: LogLevel): void;
|
|
26
|
-
setVerbose(verbose: boolean): void;
|
|
27
|
-
getConfig(): LoggerOptions;
|
|
28
|
-
}
|
|
29
|
-
export declare const logger: Logger;
|
|
30
|
-
export declare function createLogger(options?: Partial<LoggerOptions>, showBanner?: boolean): Logger;
|
|
31
|
-
//# sourceMappingURL=Logger.d.ts.map
|
package/dist/utils/Logger.js
DELETED
|
@@ -1,214 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Enhanced Logger utility - Optimized from both pr-police.js and pr-describe.js
|
|
3
|
-
* Provides consistent logging across all Guardian operations
|
|
4
|
-
*/
|
|
5
|
-
import chalk from "chalk";
|
|
6
|
-
const YAMA_BADGE = `
|
|
7
|
-
⚔️ ═══════════════════════════════════════════════════════════ ⚔️
|
|
8
|
-
██╗ ██╗ █████╗ ███╗ ███╗ █████╗
|
|
9
|
-
╚██╗ ██╔╝██╔══██╗████╗ ████║██╔══██╗
|
|
10
|
-
╚████╔╝ ███████║██╔████╔██║███████║
|
|
11
|
-
╚██╔╝ ██╔══██║██║╚██╔╝██║██╔══██║
|
|
12
|
-
██║ ██║ ██║██║ ╚═╝ ██║██║ ██║
|
|
13
|
-
╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝
|
|
14
|
-
⚔️ ═══════════════════════════════════════════════════════════ ⚔️
|
|
15
|
-
AI-Powered PR Automation • Enterprise Security • Code Quality Yama
|
|
16
|
-
`;
|
|
17
|
-
export class Logger {
|
|
18
|
-
options;
|
|
19
|
-
showBanner;
|
|
20
|
-
constructor(options = {}, showBanner = true) {
|
|
21
|
-
this.options = {
|
|
22
|
-
level: "info",
|
|
23
|
-
verbose: false,
|
|
24
|
-
format: "simple",
|
|
25
|
-
colors: true,
|
|
26
|
-
...options,
|
|
27
|
-
};
|
|
28
|
-
this.showBanner = showBanner;
|
|
29
|
-
}
|
|
30
|
-
shouldLog(level) {
|
|
31
|
-
const levels = {
|
|
32
|
-
debug: 0,
|
|
33
|
-
info: 1,
|
|
34
|
-
warn: 2,
|
|
35
|
-
error: 3,
|
|
36
|
-
};
|
|
37
|
-
return levels[level] >= levels[this.options.level];
|
|
38
|
-
}
|
|
39
|
-
formatMessage(level, message, ...args) {
|
|
40
|
-
const timestamp = new Date().toISOString();
|
|
41
|
-
const formattedArgs = args.length > 0
|
|
42
|
-
? ` ${args
|
|
43
|
-
.map((a) => typeof a === "object" ? JSON.stringify(a, null, 2) : String(a))
|
|
44
|
-
.join(" ")}`
|
|
45
|
-
: "";
|
|
46
|
-
switch (this.options.format) {
|
|
47
|
-
case "json":
|
|
48
|
-
return JSON.stringify({
|
|
49
|
-
timestamp,
|
|
50
|
-
level: level.toUpperCase(),
|
|
51
|
-
message: message + formattedArgs,
|
|
52
|
-
args: args.length > 0 ? args : undefined,
|
|
53
|
-
});
|
|
54
|
-
case "detailed":
|
|
55
|
-
return `[${timestamp}] [${level.toUpperCase().padEnd(5)}] ${message}${formattedArgs}`;
|
|
56
|
-
default: // simple
|
|
57
|
-
return `${message}${formattedArgs}`;
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
colorize(level, text) {
|
|
61
|
-
if (!this.options.colors) {
|
|
62
|
-
return text;
|
|
63
|
-
}
|
|
64
|
-
switch (level) {
|
|
65
|
-
case "debug":
|
|
66
|
-
return chalk.gray(text);
|
|
67
|
-
case "info":
|
|
68
|
-
return chalk.blue(text);
|
|
69
|
-
case "warn":
|
|
70
|
-
return chalk.yellow(text);
|
|
71
|
-
case "error":
|
|
72
|
-
return chalk.red(text);
|
|
73
|
-
default:
|
|
74
|
-
return text;
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
debug(message, ...args) {
|
|
78
|
-
if (!this.shouldLog("debug") || !this.options.verbose) {
|
|
79
|
-
return;
|
|
80
|
-
}
|
|
81
|
-
const formatted = this.formatMessage("debug", `🔍 ${message}`, ...args);
|
|
82
|
-
console.log(this.colorize("debug", formatted));
|
|
83
|
-
}
|
|
84
|
-
info(message, ...args) {
|
|
85
|
-
if (!this.shouldLog("info")) {
|
|
86
|
-
return;
|
|
87
|
-
}
|
|
88
|
-
const formatted = this.formatMessage("info", `ℹ️ ${message}`, ...args);
|
|
89
|
-
console.log(this.colorize("info", formatted));
|
|
90
|
-
}
|
|
91
|
-
warn(message, ...args) {
|
|
92
|
-
if (!this.shouldLog("warn")) {
|
|
93
|
-
return;
|
|
94
|
-
}
|
|
95
|
-
const formatted = this.formatMessage("warn", `⚠️ ${message}`, ...args);
|
|
96
|
-
console.warn(this.colorize("warn", formatted));
|
|
97
|
-
}
|
|
98
|
-
error(message, ...args) {
|
|
99
|
-
if (!this.shouldLog("error")) {
|
|
100
|
-
return;
|
|
101
|
-
}
|
|
102
|
-
const formatted = this.formatMessage("error", `❌ ${message}`, ...args);
|
|
103
|
-
console.error(this.colorize("error", formatted));
|
|
104
|
-
}
|
|
105
|
-
badge() {
|
|
106
|
-
if (this.showBanner) {
|
|
107
|
-
console.log(chalk.cyan(YAMA_BADGE));
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
phase(message) {
|
|
111
|
-
const formatted = `\n🔄 ${message}`;
|
|
112
|
-
console.log(this.options.colors ? chalk.magenta(formatted) : formatted);
|
|
113
|
-
}
|
|
114
|
-
success(message) {
|
|
115
|
-
const formatted = `✅ ${message}`;
|
|
116
|
-
console.log(this.options.colors ? chalk.green(formatted) : formatted);
|
|
117
|
-
}
|
|
118
|
-
operation(operation, status) {
|
|
119
|
-
const emoji = status === "started" ? "🚀" : status === "completed" ? "✅" : "❌";
|
|
120
|
-
const color = status === "started" ? "blue" : status === "completed" ? "green" : "red";
|
|
121
|
-
const message = `${emoji} ${operation.toUpperCase()}: ${status}`;
|
|
122
|
-
if (this.options.colors) {
|
|
123
|
-
console.log(chalk[color](message));
|
|
124
|
-
}
|
|
125
|
-
else {
|
|
126
|
-
console.log(message);
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
violation(severity, message, file) {
|
|
130
|
-
const emoji = {
|
|
131
|
-
CRITICAL: "🚨",
|
|
132
|
-
MAJOR: "⚠️",
|
|
133
|
-
MINOR: "📝",
|
|
134
|
-
SUGGESTION: "💡",
|
|
135
|
-
}[severity] || "📋";
|
|
136
|
-
const color = {
|
|
137
|
-
CRITICAL: "red",
|
|
138
|
-
MAJOR: "yellow",
|
|
139
|
-
MINOR: "blue",
|
|
140
|
-
SUGGESTION: "cyan",
|
|
141
|
-
}[severity] || "white";
|
|
142
|
-
const location = file ? ` in ${file}` : "";
|
|
143
|
-
const formatted = `${emoji} ${severity}: ${message}${location}`;
|
|
144
|
-
if (this.options.colors) {
|
|
145
|
-
console.log(chalk[color](formatted));
|
|
146
|
-
}
|
|
147
|
-
else {
|
|
148
|
-
console.log(formatted);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
progress(current, total, operation) {
|
|
152
|
-
const percentage = Math.round((current / total) * 100);
|
|
153
|
-
const progressBar = this.createProgressBar(percentage);
|
|
154
|
-
const message = `🔄 ${operation}: ${progressBar} ${current}/${total} (${percentage}%)`;
|
|
155
|
-
// Use carriage return to overwrite the line
|
|
156
|
-
process.stdout.write(`\r${message}`);
|
|
157
|
-
// Add newline when complete
|
|
158
|
-
if (current === total) {
|
|
159
|
-
process.stdout.write("\n");
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
createProgressBar(percentage) {
|
|
163
|
-
const width = 20;
|
|
164
|
-
const filled = Math.round((percentage / 100) * width);
|
|
165
|
-
const empty = width - filled;
|
|
166
|
-
if (this.options.colors) {
|
|
167
|
-
return chalk.green("█".repeat(filled)) + chalk.gray("░".repeat(empty));
|
|
168
|
-
}
|
|
169
|
-
else {
|
|
170
|
-
return "█".repeat(filled) + "░".repeat(empty);
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
// Method to create child logger with context
|
|
174
|
-
child(context) {
|
|
175
|
-
const childLogger = new Logger(this.options);
|
|
176
|
-
// Override methods to include context
|
|
177
|
-
const originalMethods = ["debug", "info", "warn", "error"];
|
|
178
|
-
originalMethods.forEach((method) => {
|
|
179
|
-
const original = childLogger[method].bind(childLogger);
|
|
180
|
-
childLogger[method] = (message, ...args) => {
|
|
181
|
-
const contextStr = Object.entries(context)
|
|
182
|
-
.map(([k, v]) => `${k}=${v}`)
|
|
183
|
-
.join(" ");
|
|
184
|
-
original(`[${contextStr}] ${message}`, ...args);
|
|
185
|
-
};
|
|
186
|
-
});
|
|
187
|
-
return childLogger;
|
|
188
|
-
}
|
|
189
|
-
// Method to update log level dynamically
|
|
190
|
-
setLevel(level) {
|
|
191
|
-
this.options.level = level;
|
|
192
|
-
}
|
|
193
|
-
// Method to toggle verbose mode
|
|
194
|
-
setVerbose(verbose) {
|
|
195
|
-
this.options.verbose = verbose;
|
|
196
|
-
}
|
|
197
|
-
// Method to get current configuration
|
|
198
|
-
getConfig() {
|
|
199
|
-
return { ...this.options };
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
// Export singleton instance for convenience with environment-aware defaults
|
|
203
|
-
const loggerOptions = {};
|
|
204
|
-
// Check environment variables for debug mode
|
|
205
|
-
if (process.env.YAMA_DEBUG === "true") {
|
|
206
|
-
loggerOptions.level = "debug";
|
|
207
|
-
loggerOptions.verbose = true;
|
|
208
|
-
}
|
|
209
|
-
export const logger = new Logger(loggerOptions);
|
|
210
|
-
// Export factory function
|
|
211
|
-
export function createLogger(options, showBanner) {
|
|
212
|
-
return new Logger(options, showBanner);
|
|
213
|
-
}
|
|
214
|
-
//# sourceMappingURL=Logger.js.map
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Memory Bank Manager - Handles configurable memory bank operations
|
|
3
|
-
* Provides abstraction for memory bank file access with fallback support
|
|
4
|
-
*/
|
|
5
|
-
import { MemoryBankConfig, PRIdentifier } from "../types/index.js";
|
|
6
|
-
import { BitbucketProvider } from "../core/providers/BitbucketProvider.js";
|
|
7
|
-
export interface MemoryBankFile {
|
|
8
|
-
name: string;
|
|
9
|
-
content: string;
|
|
10
|
-
path: string;
|
|
11
|
-
}
|
|
12
|
-
export interface MemoryBankResult {
|
|
13
|
-
files: MemoryBankFile[];
|
|
14
|
-
resolvedPath: string;
|
|
15
|
-
filesProcessed: number;
|
|
16
|
-
fallbackUsed: boolean;
|
|
17
|
-
}
|
|
18
|
-
export declare class MemoryBankManager {
|
|
19
|
-
private config;
|
|
20
|
-
private bitbucketProvider;
|
|
21
|
-
constructor(config: MemoryBankConfig, bitbucketProvider: BitbucketProvider);
|
|
22
|
-
/**
|
|
23
|
-
* Get memory bank files from the configured path with fallback support
|
|
24
|
-
*/
|
|
25
|
-
getMemoryBankFiles(identifier: PRIdentifier, forceRefresh?: boolean): Promise<MemoryBankResult>;
|
|
26
|
-
/**
|
|
27
|
-
* Try to get files from a specific path
|
|
28
|
-
*/
|
|
29
|
-
private tryGetFilesFromPath;
|
|
30
|
-
/**
|
|
31
|
-
* Get the effective memory bank path (resolved after fallback logic)
|
|
32
|
-
*/
|
|
33
|
-
getEffectiveMemoryBankPath(identifier: PRIdentifier): Promise<string | null>;
|
|
34
|
-
/**
|
|
35
|
-
* Check if memory bank exists at any configured path
|
|
36
|
-
*/
|
|
37
|
-
hasMemoryBank(identifier: PRIdentifier): Promise<boolean>;
|
|
38
|
-
/**
|
|
39
|
-
* Get memory bank configuration
|
|
40
|
-
*/
|
|
41
|
-
getConfig(): MemoryBankConfig;
|
|
42
|
-
/**
|
|
43
|
-
* Update memory bank configuration
|
|
44
|
-
*/
|
|
45
|
-
updateConfig(newConfig: Partial<MemoryBankConfig>): void;
|
|
46
|
-
/**
|
|
47
|
-
* Validates that a path is safe for use as a relative path
|
|
48
|
-
* Protects against path traversal attacks including encoded variants
|
|
49
|
-
*/
|
|
50
|
-
private static isSafeRelativePath;
|
|
51
|
-
/**
|
|
52
|
-
* Validate memory bank configuration
|
|
53
|
-
*/
|
|
54
|
-
private validateConfig;
|
|
55
|
-
/**
|
|
56
|
-
* Clear memory bank cache for a specific repository
|
|
57
|
-
*/
|
|
58
|
-
clearCache(identifier: PRIdentifier): void;
|
|
59
|
-
/**
|
|
60
|
-
* Get memory bank statistics
|
|
61
|
-
*/
|
|
62
|
-
getStats(identifier: PRIdentifier): Promise<{
|
|
63
|
-
enabled: boolean;
|
|
64
|
-
primaryPath: string;
|
|
65
|
-
fallbackPaths: string[];
|
|
66
|
-
hasMemoryBank: boolean;
|
|
67
|
-
resolvedPath: string | null;
|
|
68
|
-
fileCount: number;
|
|
69
|
-
cacheHits: number;
|
|
70
|
-
}>;
|
|
71
|
-
}
|
|
72
|
-
export declare function createMemoryBankManager(config: MemoryBankConfig, bitbucketProvider: BitbucketProvider): MemoryBankManager;
|
|
73
|
-
//# sourceMappingURL=MemoryBankManager.d.ts.map
|