activo 0.4.3 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +203 -1
- package/data/2026-03-04_20-54.json +181 -0
- package/data/2026-03-04_20-56.json +181 -0
- package/data/apex-rulesets/egov.yaml +469 -0
- package/data/apex-rulesets/modernize.yaml +687 -0
- package/data/apex-rulesets/quality.yaml +1677 -0
- package/data/apex-rulesets/rule-schema.yaml +587 -0
- package/data/apex-rulesets/secure.yaml +1688 -0
- package/data/apex-rulesets/spring.yaml +455 -0
- package/data/apex-rulesets/sql-format.yaml +99 -0
- package/data/apex-rulesets/sql-oracle.yaml +281 -0
- package/data/apex-rulesets/sql.yaml +1660 -0
- package/dist/cli/headless.d.ts.map +1 -1
- package/dist/cli/headless.js +32 -10
- package/dist/cli/headless.js.map +1 -1
- package/dist/cli/index.js +31 -3
- package/dist/cli/index.js.map +1 -1
- package/dist/core/agent.d.ts +3 -3
- package/dist/core/agent.d.ts.map +1 -1
- package/dist/core/agent.js +255 -17
- package/dist/core/agent.js.map +1 -1
- package/dist/core/commands.d.ts +2 -1
- package/dist/core/commands.d.ts.map +1 -1
- package/dist/core/commands.js +61 -9
- package/dist/core/commands.js.map +1 -1
- package/dist/core/config.d.ts +14 -0
- package/dist/core/config.d.ts.map +1 -1
- package/dist/core/config.js +41 -4
- package/dist/core/config.js.map +1 -1
- package/dist/core/conversation.d.ts +2 -2
- package/dist/core/conversation.d.ts.map +1 -1
- package/dist/core/conversation.js.map +1 -1
- package/dist/core/intentRouter.d.ts +43 -0
- package/dist/core/intentRouter.d.ts.map +1 -0
- package/dist/core/intentRouter.js +804 -0
- package/dist/core/intentRouter.js.map +1 -0
- package/dist/core/llm/anthropic.d.ts +24 -0
- package/dist/core/llm/anthropic.d.ts.map +1 -0
- package/dist/core/llm/anthropic.js +226 -0
- package/dist/core/llm/anthropic.js.map +1 -0
- package/dist/core/llm/ollama.d.ts +5 -14
- package/dist/core/llm/ollama.d.ts.map +1 -1
- package/dist/core/llm/ollama.js +3 -0
- package/dist/core/llm/ollama.js.map +1 -1
- package/dist/core/llm/types.d.ts +22 -0
- package/dist/core/llm/types.d.ts.map +1 -0
- package/dist/core/llm/types.js +2 -0
- package/dist/core/llm/types.js.map +1 -0
- package/dist/core/mcp/client.d.ts +6 -0
- package/dist/core/mcp/client.d.ts.map +1 -1
- package/dist/core/mcp/client.js +16 -0
- package/dist/core/mcp/client.js.map +1 -1
- package/dist/core/mcp/init.d.ts +12 -0
- package/dist/core/mcp/init.d.ts.map +1 -0
- package/dist/core/mcp/init.js +55 -0
- package/dist/core/mcp/init.js.map +1 -0
- package/dist/core/mcp/logger.d.ts +14 -0
- package/dist/core/mcp/logger.d.ts.map +1 -0
- package/dist/core/mcp/logger.js +50 -0
- package/dist/core/mcp/logger.js.map +1 -0
- package/dist/core/tools/analyzeAll.d.ts.map +1 -1
- package/dist/core/tools/analyzeAll.js +16 -28
- package/dist/core/tools/analyzeAll.js.map +1 -1
- package/dist/core/tools/analyzePatterns.d.ts +3 -0
- package/dist/core/tools/analyzePatterns.d.ts.map +1 -0
- package/dist/core/tools/analyzePatterns.js +293 -0
- package/dist/core/tools/analyzePatterns.js.map +1 -0
- package/dist/core/tools/apexPaths.d.ts +14 -0
- package/dist/core/tools/apexPaths.d.ts.map +1 -0
- package/dist/core/tools/apexPaths.js +54 -0
- package/dist/core/tools/apexPaths.js.map +1 -0
- package/dist/core/tools/apexUtils.d.ts +36 -0
- package/dist/core/tools/apexUtils.d.ts.map +1 -0
- package/dist/core/tools/apexUtils.js +83 -0
- package/dist/core/tools/apexUtils.js.map +1 -0
- package/dist/core/tools/explainIssue.d.ts +3 -0
- package/dist/core/tools/explainIssue.d.ts.map +1 -0
- package/dist/core/tools/explainIssue.js +181 -0
- package/dist/core/tools/explainIssue.js.map +1 -0
- package/dist/core/tools/fixGen.d.ts +3 -0
- package/dist/core/tools/fixGen.d.ts.map +1 -0
- package/dist/core/tools/fixGen.js +338 -0
- package/dist/core/tools/fixGen.js.map +1 -0
- package/dist/core/tools/generateImprovements.d.ts +21 -0
- package/dist/core/tools/generateImprovements.d.ts.map +1 -0
- package/dist/core/tools/generateImprovements.js +602 -0
- package/dist/core/tools/generateImprovements.js.map +1 -0
- package/dist/core/tools/generateReport.d.ts +3 -0
- package/dist/core/tools/generateReport.d.ts.map +1 -0
- package/dist/core/tools/generateReport.js +315 -0
- package/dist/core/tools/generateReport.js.map +1 -0
- package/dist/core/tools/index.d.ts +7 -0
- package/dist/core/tools/index.d.ts.map +1 -1
- package/dist/core/tools/index.js +62 -23
- package/dist/core/tools/index.js.map +1 -1
- package/dist/core/tools/javaAst.d.ts.map +1 -1
- package/dist/core/tools/javaAst.js +191 -0
- package/dist/core/tools/javaAst.js.map +1 -1
- package/dist/core/tools/recommendProfile.d.ts +3 -0
- package/dist/core/tools/recommendProfile.d.ts.map +1 -0
- package/dist/core/tools/recommendProfile.js +334 -0
- package/dist/core/tools/recommendProfile.js.map +1 -0
- package/dist/core/tools/ruleGen.d.ts +3 -0
- package/dist/core/tools/ruleGen.d.ts.map +1 -0
- package/dist/core/tools/ruleGen.js +1103 -0
- package/dist/core/tools/ruleGen.js.map +1 -0
- package/dist/core/tools/standards.d.ts.map +1 -1
- package/dist/core/tools/standards.js +7 -3
- package/dist/core/tools/standards.js.map +1 -1
- package/dist/ui/App.d.ts.map +1 -1
- package/dist/ui/App.js +86 -35
- package/dist/ui/App.js.map +1 -1
- package/dist/ui/components/InputBox.d.ts +1 -3
- package/dist/ui/components/InputBox.d.ts.map +1 -1
- package/dist/ui/components/InputBox.js +146 -5
- package/dist/ui/components/InputBox.js.map +1 -1
- package/dist/ui/components/MessageList.d.ts +3 -1
- package/dist/ui/components/MessageList.d.ts.map +1 -1
- package/dist/ui/components/MessageList.js +13 -7
- package/dist/ui/components/MessageList.js.map +1 -1
- package/dist/ui/components/StatusBar.d.ts +1 -1
- package/dist/ui/components/StatusBar.d.ts.map +1 -1
- package/dist/ui/components/StatusBar.js +3 -2
- package/dist/ui/components/StatusBar.js.map +1 -1
- package/dist/ui/components/ToolStatus.d.ts +3 -1
- package/dist/ui/components/ToolStatus.d.ts.map +1 -1
- package/dist/ui/components/ToolStatus.js +19 -4
- package/dist/ui/components/ToolStatus.js.map +1 -1
- package/package.json +7 -1
- package/demo.gif +0 -0
- package/demo.tape +0 -53
- package/screenshot.png +0 -0
- package/src/cli/banner.ts +0 -38
- package/src/cli/headless.ts +0 -63
- package/src/cli/index.ts +0 -57
- package/src/core/agent.ts +0 -237
- package/src/core/commands.ts +0 -118
- package/src/core/config.ts +0 -98
- package/src/core/conversation.ts +0 -235
- package/src/core/llm/ollama.ts +0 -351
- package/src/core/mcp/client.ts +0 -143
- package/src/core/tools/analyzeAll.ts +0 -494
- package/src/core/tools/ast.ts +0 -826
- package/src/core/tools/builtIn.ts +0 -221
- package/src/core/tools/cache.ts +0 -570
- package/src/core/tools/cssAnalysis.ts +0 -324
- package/src/core/tools/dependencyAnalysis.ts +0 -363
- package/src/core/tools/embeddings.ts +0 -746
- package/src/core/tools/frontendAst.ts +0 -802
- package/src/core/tools/htmlAnalysis.ts +0 -466
- package/src/core/tools/index.ts +0 -160
- package/src/core/tools/javaAst.ts +0 -812
- package/src/core/tools/memory.ts +0 -655
- package/src/core/tools/mybatisAnalysis.ts +0 -322
- package/src/core/tools/openapiAnalysis.ts +0 -431
- package/src/core/tools/pythonAnalysis.ts +0 -477
- package/src/core/tools/sqlAnalysis.ts +0 -298
- package/src/core/tools/standards.test.ts +0 -186
- package/src/core/tools/standards.ts +0 -889
- package/src/core/tools/types.ts +0 -38
- package/src/ui/App.tsx +0 -334
- package/src/ui/components/InputBox.tsx +0 -37
- package/src/ui/components/MessageList.tsx +0 -80
- package/src/ui/components/StatusBar.tsx +0 -36
- package/src/ui/components/ToolStatus.tsx +0 -38
- package/tsconfig.json +0 -21
|
@@ -1,889 +0,0 @@
|
|
|
1
|
-
import fs from "fs";
|
|
2
|
-
import path from "path";
|
|
3
|
-
import crypto from "crypto";
|
|
4
|
-
import pdfParse from "pdf-parse";
|
|
5
|
-
import { toMarkdown as hwpToMarkdown } from "@ohah/hwpjs";
|
|
6
|
-
import { Tool, ToolResult } from "./types.js";
|
|
7
|
-
import { OllamaClient } from "../llm/ollama.js";
|
|
8
|
-
import { loadConfig } from "../config.js";
|
|
9
|
-
|
|
10
|
-
// RAG constants
|
|
11
|
-
const STANDARDS_EMBEDDINGS_DIR = ".activo/standards-rag";
|
|
12
|
-
const DEFAULT_EMBED_MODEL = "nomic-embed-text";
|
|
13
|
-
|
|
14
|
-
// RAG interfaces
|
|
15
|
-
interface StandardsChunk {
|
|
16
|
-
filepath: string;
|
|
17
|
-
section: string;
|
|
18
|
-
ruleId?: string;
|
|
19
|
-
content: string;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
interface StandardsEmbedding {
|
|
23
|
-
chunk: StandardsChunk;
|
|
24
|
-
embedding: number[];
|
|
25
|
-
hash: string;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
interface StandardsIndex {
|
|
29
|
-
version: string;
|
|
30
|
-
model: string;
|
|
31
|
-
createdAt: string;
|
|
32
|
-
updatedAt: string;
|
|
33
|
-
totalChunks: number;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
// RAG helper functions
|
|
37
|
-
function getStandardsRagDir(): string {
|
|
38
|
-
return path.resolve(process.cwd(), STANDARDS_EMBEDDINGS_DIR);
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
function ensureStandardsRagDir(): void {
|
|
42
|
-
const dir = getStandardsRagDir();
|
|
43
|
-
if (!fs.existsSync(dir)) {
|
|
44
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
function getStandardsIndexPath(): string {
|
|
49
|
-
return path.join(getStandardsRagDir(), "index.json");
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
function getStandardsDataPath(): string {
|
|
53
|
-
return path.join(getStandardsRagDir(), "embeddings.json");
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
function loadStandardsIndex(): StandardsIndex | null {
|
|
57
|
-
const indexPath = getStandardsIndexPath();
|
|
58
|
-
if (fs.existsSync(indexPath)) {
|
|
59
|
-
try {
|
|
60
|
-
return JSON.parse(fs.readFileSync(indexPath, "utf-8"));
|
|
61
|
-
} catch {
|
|
62
|
-
return null;
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
return null;
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
function saveStandardsIndex(index: StandardsIndex): void {
|
|
69
|
-
ensureStandardsRagDir();
|
|
70
|
-
fs.writeFileSync(getStandardsIndexPath(), JSON.stringify(index, null, 2));
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
function loadStandardsEmbeddings(): StandardsEmbedding[] {
|
|
74
|
-
const dataPath = getStandardsDataPath();
|
|
75
|
-
if (fs.existsSync(dataPath)) {
|
|
76
|
-
try {
|
|
77
|
-
return JSON.parse(fs.readFileSync(dataPath, "utf-8"));
|
|
78
|
-
} catch {
|
|
79
|
-
return [];
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
return [];
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
function saveStandardsEmbeddings(data: StandardsEmbedding[]): void {
|
|
86
|
-
ensureStandardsRagDir();
|
|
87
|
-
fs.writeFileSync(getStandardsDataPath(), JSON.stringify(data));
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
function calculateHash(content: string): string {
|
|
91
|
-
return crypto.createHash("md5").update(content).digest("hex");
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
// Split markdown into semantic chunks (by sections/rules)
|
|
95
|
-
function splitStandardsIntoChunks(content: string, filepath: string): StandardsChunk[] {
|
|
96
|
-
const chunks: StandardsChunk[] = [];
|
|
97
|
-
const lines = content.split("\n");
|
|
98
|
-
|
|
99
|
-
let currentSection = "";
|
|
100
|
-
let currentRuleId: string | undefined;
|
|
101
|
-
let currentContent: string[] = [];
|
|
102
|
-
let inRule = false;
|
|
103
|
-
|
|
104
|
-
for (const line of lines) {
|
|
105
|
-
// Detect rule pattern: ## RULE-XXX: Title
|
|
106
|
-
const ruleMatch = line.match(/^##\s+(RULE-\d+):\s*(.+)/i);
|
|
107
|
-
// Detect section headers
|
|
108
|
-
const sectionMatch = line.match(/^#+\s+(.+)/);
|
|
109
|
-
|
|
110
|
-
if (ruleMatch) {
|
|
111
|
-
// Save previous chunk
|
|
112
|
-
if (currentContent.length > 0) {
|
|
113
|
-
const text = currentContent.join("\n").trim();
|
|
114
|
-
if (text) {
|
|
115
|
-
chunks.push({
|
|
116
|
-
filepath,
|
|
117
|
-
section: currentSection,
|
|
118
|
-
ruleId: currentRuleId,
|
|
119
|
-
content: text,
|
|
120
|
-
});
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
currentRuleId = ruleMatch[1];
|
|
124
|
-
currentSection = ruleMatch[2];
|
|
125
|
-
currentContent = [line];
|
|
126
|
-
inRule = true;
|
|
127
|
-
} else if (sectionMatch && !inRule) {
|
|
128
|
-
// Save previous chunk
|
|
129
|
-
if (currentContent.length > 0) {
|
|
130
|
-
const text = currentContent.join("\n").trim();
|
|
131
|
-
if (text) {
|
|
132
|
-
chunks.push({
|
|
133
|
-
filepath,
|
|
134
|
-
section: currentSection,
|
|
135
|
-
ruleId: currentRuleId,
|
|
136
|
-
content: text,
|
|
137
|
-
});
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
currentSection = sectionMatch[1];
|
|
141
|
-
currentRuleId = undefined;
|
|
142
|
-
currentContent = [line];
|
|
143
|
-
} else if (line.match(/^##/) && inRule) {
|
|
144
|
-
// End of rule, start new section
|
|
145
|
-
if (currentContent.length > 0) {
|
|
146
|
-
const text = currentContent.join("\n").trim();
|
|
147
|
-
if (text) {
|
|
148
|
-
chunks.push({
|
|
149
|
-
filepath,
|
|
150
|
-
section: currentSection,
|
|
151
|
-
ruleId: currentRuleId,
|
|
152
|
-
content: text,
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
inRule = false;
|
|
157
|
-
currentRuleId = undefined;
|
|
158
|
-
const newSection = line.match(/^#+\s+(.+)/);
|
|
159
|
-
currentSection = newSection ? newSection[1] : "";
|
|
160
|
-
currentContent = [line];
|
|
161
|
-
} else {
|
|
162
|
-
currentContent.push(line);
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
// Save remaining content
|
|
167
|
-
if (currentContent.length > 0) {
|
|
168
|
-
const text = currentContent.join("\n").trim();
|
|
169
|
-
if (text) {
|
|
170
|
-
chunks.push({
|
|
171
|
-
filepath,
|
|
172
|
-
section: currentSection,
|
|
173
|
-
ruleId: currentRuleId,
|
|
174
|
-
content: text,
|
|
175
|
-
});
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
// Filter out small chunks (less than 50 chars)
|
|
180
|
-
return chunks.filter(c => c.content.length >= 50);
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
// Cosine similarity
|
|
184
|
-
function cosineSimilarity(a: number[], b: number[]): number {
|
|
185
|
-
if (a.length !== b.length) return 0;
|
|
186
|
-
let dotProduct = 0;
|
|
187
|
-
let normA = 0;
|
|
188
|
-
let normB = 0;
|
|
189
|
-
for (let i = 0; i < a.length; i++) {
|
|
190
|
-
dotProduct += a[i] * b[i];
|
|
191
|
-
normA += a[i] * a[i];
|
|
192
|
-
normB += b[i] * b[i];
|
|
193
|
-
}
|
|
194
|
-
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
// Resolve natural language directory paths
|
|
198
|
-
function resolveOutputDir(outputDir: string | undefined): string {
|
|
199
|
-
if (!outputDir) {
|
|
200
|
-
return path.resolve(".activo/standards");
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
const normalized = outputDir.trim().toLowerCase();
|
|
204
|
-
|
|
205
|
-
// Natural language mappings
|
|
206
|
-
const currentDirPatterns = [
|
|
207
|
-
"현재 디렉토리", "현재 폴더", "현재디렉토리", "현재폴더",
|
|
208
|
-
"이 디렉토리", "이 폴더", "여기", "here",
|
|
209
|
-
"current directory", "current folder", "current dir",
|
|
210
|
-
".", "./"
|
|
211
|
-
];
|
|
212
|
-
|
|
213
|
-
for (const pattern of currentDirPatterns) {
|
|
214
|
-
if (normalized.includes(pattern)) {
|
|
215
|
-
return path.resolve(".");
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
// Handle ~ for home directory
|
|
220
|
-
if (outputDir.startsWith("~")) {
|
|
221
|
-
return path.resolve(outputDir.replace("~", process.env.HOME || ""));
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
return path.resolve(outputDir);
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
// Import PDF Tool
|
|
228
|
-
export const importPdfTool: Tool = {
|
|
229
|
-
name: "import_pdf_standards",
|
|
230
|
-
description: "Import development standards from a PDF file and convert to markdown (PDF를 마크다운으로 변환). Use when user asks: 'PDF 변환', 'PDF를 md로', 'PDF 마크다운', 'convert PDF', '현재 디렉토리에 저장'.",
|
|
231
|
-
parameters: {
|
|
232
|
-
type: "object",
|
|
233
|
-
required: ["pdfPath"],
|
|
234
|
-
properties: {
|
|
235
|
-
pdfPath: {
|
|
236
|
-
type: "string",
|
|
237
|
-
description: "Path to the PDF file (PDF 파일 경로)",
|
|
238
|
-
},
|
|
239
|
-
outputDir: {
|
|
240
|
-
type: "string",
|
|
241
|
-
description: "Output directory. Use '.' or '현재 디렉토리' for current dir. Default: .activo/standards",
|
|
242
|
-
},
|
|
243
|
-
singleFile: {
|
|
244
|
-
type: "boolean",
|
|
245
|
-
description: "Save as single markdown file instead of chunks (default: false)",
|
|
246
|
-
},
|
|
247
|
-
},
|
|
248
|
-
},
|
|
249
|
-
handler: async (args): Promise<ToolResult> => {
|
|
250
|
-
try {
|
|
251
|
-
// Resolve PDF path (handle ~)
|
|
252
|
-
let pdfPath = args.pdfPath as string;
|
|
253
|
-
if (pdfPath.startsWith("~")) {
|
|
254
|
-
pdfPath = pdfPath.replace("~", process.env.HOME || "");
|
|
255
|
-
}
|
|
256
|
-
pdfPath = path.resolve(pdfPath);
|
|
257
|
-
|
|
258
|
-
const outputDir = resolveOutputDir(args.outputDir as string | undefined);
|
|
259
|
-
const singleFile = args.singleFile as boolean || false;
|
|
260
|
-
|
|
261
|
-
if (!fs.existsSync(pdfPath)) {
|
|
262
|
-
return { success: false, content: "", error: `PDF not found: ${pdfPath}` };
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
// Extract text from PDF using pdf-parse
|
|
266
|
-
const dataBuffer = fs.readFileSync(pdfPath);
|
|
267
|
-
const pdfData = await pdfParse(dataBuffer);
|
|
268
|
-
const text = pdfData.text;
|
|
269
|
-
const numpages = pdfData.numpages;
|
|
270
|
-
|
|
271
|
-
// Create output directory
|
|
272
|
-
if (!fs.existsSync(outputDir)) {
|
|
273
|
-
fs.mkdirSync(outputDir, { recursive: true });
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
const filename = path.basename(pdfPath, ".pdf");
|
|
277
|
-
const extractionDate = new Date().toISOString().split("T")[0];
|
|
278
|
-
const results: string[] = [];
|
|
279
|
-
|
|
280
|
-
if (singleFile) {
|
|
281
|
-
// Save as single file
|
|
282
|
-
const outputFilename = `${sanitize(filename)}.md`;
|
|
283
|
-
const outputPath = path.join(outputDir, outputFilename);
|
|
284
|
-
|
|
285
|
-
let md = `# ${filename}\n\n`;
|
|
286
|
-
md += `> Source: ${path.basename(pdfPath)}\n`;
|
|
287
|
-
md += `> Extracted: ${extractionDate}\n`;
|
|
288
|
-
md += `> Pages: ${numpages}\n`;
|
|
289
|
-
md += `> Method: pdf-parse\n\n`;
|
|
290
|
-
md += `---\n\n`;
|
|
291
|
-
md += text;
|
|
292
|
-
md += `\n\n---\n`;
|
|
293
|
-
md += `[Edit this file to add structured rules]\n`;
|
|
294
|
-
|
|
295
|
-
fs.writeFileSync(outputPath, md, "utf-8");
|
|
296
|
-
results.push(outputFilename);
|
|
297
|
-
} else {
|
|
298
|
-
// Split into chunks
|
|
299
|
-
const chunks = splitIntoChunks(text, 3000);
|
|
300
|
-
|
|
301
|
-
for (let i = 0; i < chunks.length; i++) {
|
|
302
|
-
const chunkFilename = `${String(i + 1).padStart(2, "0")}_${sanitize(filename)}.md`;
|
|
303
|
-
const chunkPath = path.join(outputDir, chunkFilename);
|
|
304
|
-
|
|
305
|
-
let md = `# ${filename} - Part ${i + 1}\n\n`;
|
|
306
|
-
md += `> Source: ${path.basename(pdfPath)}\n`;
|
|
307
|
-
md += `> Extracted: ${extractionDate}\n`;
|
|
308
|
-
md += `> Pages: ${numpages}\n`;
|
|
309
|
-
md += `> Method: pdf-parse\n\n`;
|
|
310
|
-
md += `---\n\n`;
|
|
311
|
-
md += chunks[i];
|
|
312
|
-
md += `\n\n---\n`;
|
|
313
|
-
md += `[Edit this file to add structured rules]\n`;
|
|
314
|
-
|
|
315
|
-
fs.writeFileSync(chunkPath, md, "utf-8");
|
|
316
|
-
results.push(chunkFilename);
|
|
317
|
-
}
|
|
318
|
-
|
|
319
|
-
// Create index
|
|
320
|
-
const indexPath = path.join(outputDir, "_index.md");
|
|
321
|
-
let indexMd = `# Development Standards Index\n\n`;
|
|
322
|
-
indexMd += `> Source: ${path.basename(pdfPath)}\n`;
|
|
323
|
-
indexMd += `> Extracted: ${extractionDate}\n`;
|
|
324
|
-
indexMd += `> Files: ${results.length}\n`;
|
|
325
|
-
indexMd += `> Method: pdf-parse\n\n`;
|
|
326
|
-
indexMd += `## Files\n\n`;
|
|
327
|
-
for (const r of results) {
|
|
328
|
-
indexMd += `- [${r}](./${r})\n`;
|
|
329
|
-
}
|
|
330
|
-
fs.writeFileSync(indexPath, indexMd, "utf-8");
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
return {
|
|
334
|
-
success: true,
|
|
335
|
-
content: `PDF 변환 완료!\n\n` +
|
|
336
|
-
`📂 저장 위치: ${outputDir}\n` +
|
|
337
|
-
`📄 파일 수: ${results.length}\n` +
|
|
338
|
-
`📖 페이지: ${numpages}\n` +
|
|
339
|
-
`🔧 추출 방법: pdf-parse\n\n` +
|
|
340
|
-
`파일 목록:\n${results.map(r => ` - ${r}`).join("\n")}\n\n` +
|
|
341
|
-
`다음 단계: 파일을 편집하여 구조화된 규칙 추가\n` +
|
|
342
|
-
`형식: ## RULE-001: Title\n- 심각도: error|warning|info\n- 규칙: description`,
|
|
343
|
-
};
|
|
344
|
-
} catch (error) {
|
|
345
|
-
return { success: false, content: "", error: String(error) };
|
|
346
|
-
}
|
|
347
|
-
},
|
|
348
|
-
};
|
|
349
|
-
|
|
350
|
-
// List Standards Tool
|
|
351
|
-
export const listStandardsTool: Tool = {
|
|
352
|
-
name: "list_standards",
|
|
353
|
-
description: "List all loaded development standards and rules.",
|
|
354
|
-
parameters: {
|
|
355
|
-
type: "object",
|
|
356
|
-
properties: {
|
|
357
|
-
directory: {
|
|
358
|
-
type: "string",
|
|
359
|
-
description: "Standards directory (default: .activo/standards)",
|
|
360
|
-
},
|
|
361
|
-
},
|
|
362
|
-
},
|
|
363
|
-
handler: async (args): Promise<ToolResult> => {
|
|
364
|
-
try {
|
|
365
|
-
const dir = resolveOutputDir(args.directory as string | undefined);
|
|
366
|
-
|
|
367
|
-
if (!fs.existsSync(dir)) {
|
|
368
|
-
return { success: true, content: "No standards directory found. Import a PDF first." };
|
|
369
|
-
}
|
|
370
|
-
|
|
371
|
-
const files = fs.readdirSync(dir).filter((f) => f.endsWith(".md") && f !== "_index.md");
|
|
372
|
-
if (files.length === 0) {
|
|
373
|
-
return { success: true, content: "No standard files found." };
|
|
374
|
-
}
|
|
375
|
-
|
|
376
|
-
let totalRules = 0;
|
|
377
|
-
const results: string[] = [];
|
|
378
|
-
|
|
379
|
-
for (const file of files) {
|
|
380
|
-
const content = fs.readFileSync(path.join(dir, file), "utf-8");
|
|
381
|
-
const rules = content.match(/^## [A-Z]+-\d+/gm) || [];
|
|
382
|
-
totalRules += rules.length;
|
|
383
|
-
results.push(`📄 ${file}: ${rules.length} rules`);
|
|
384
|
-
}
|
|
385
|
-
|
|
386
|
-
return {
|
|
387
|
-
success: true,
|
|
388
|
-
content: `Standards Directory: ${dir}\n\n${results.join("\n")}\n\nTotal: ${files.length} files, ${totalRules} rules`,
|
|
389
|
-
};
|
|
390
|
-
} catch (error) {
|
|
391
|
-
return { success: false, content: "", error: String(error) };
|
|
392
|
-
}
|
|
393
|
-
},
|
|
394
|
-
};
|
|
395
|
-
|
|
396
|
-
// Check Code Quality Tool
|
|
397
|
-
export const checkQualityTool: Tool = {
|
|
398
|
-
name: "check_code_quality",
|
|
399
|
-
description: "Check code against loaded development standards.",
|
|
400
|
-
parameters: {
|
|
401
|
-
type: "object",
|
|
402
|
-
required: ["filepath"],
|
|
403
|
-
properties: {
|
|
404
|
-
filepath: {
|
|
405
|
-
type: "string",
|
|
406
|
-
description: "File or directory to check",
|
|
407
|
-
},
|
|
408
|
-
standardsDir: {
|
|
409
|
-
type: "string",
|
|
410
|
-
description: "Standards directory (default: .activo/standards)",
|
|
411
|
-
},
|
|
412
|
-
},
|
|
413
|
-
},
|
|
414
|
-
handler: async (args): Promise<ToolResult> => {
|
|
415
|
-
try {
|
|
416
|
-
const filepath = path.resolve(args.filepath as string);
|
|
417
|
-
const standardsDir = resolveOutputDir(args.standardsDir as string | undefined);
|
|
418
|
-
|
|
419
|
-
if (!fs.existsSync(filepath)) {
|
|
420
|
-
return { success: false, content: "", error: `Path not found: ${filepath}` };
|
|
421
|
-
}
|
|
422
|
-
|
|
423
|
-
// Load standards
|
|
424
|
-
let standards = "";
|
|
425
|
-
if (fs.existsSync(standardsDir)) {
|
|
426
|
-
const files = fs.readdirSync(standardsDir).filter((f) => f.endsWith(".md") && f !== "_index.md");
|
|
427
|
-
for (const file of files) {
|
|
428
|
-
standards += fs.readFileSync(path.join(standardsDir, file), "utf-8") + "\n\n";
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
|
-
|
|
432
|
-
// Get code
|
|
433
|
-
let code = "";
|
|
434
|
-
const stat = fs.statSync(filepath);
|
|
435
|
-
if (stat.isFile()) {
|
|
436
|
-
code = fs.readFileSync(filepath, "utf-8");
|
|
437
|
-
} else {
|
|
438
|
-
return { success: false, content: "", error: "Directory check not yet supported. Specify a file." };
|
|
439
|
-
}
|
|
440
|
-
|
|
441
|
-
// Build analysis prompt
|
|
442
|
-
const prompt = buildAnalysisPrompt(code, filepath, standards);
|
|
443
|
-
|
|
444
|
-
// Call Ollama
|
|
445
|
-
const config = loadConfig();
|
|
446
|
-
const client = new OllamaClient(config.ollama);
|
|
447
|
-
|
|
448
|
-
const response = await client.chat([{ role: "user", content: prompt }]);
|
|
449
|
-
|
|
450
|
-
return { success: true, content: response.content };
|
|
451
|
-
} catch (error) {
|
|
452
|
-
return { success: false, content: "", error: String(error) };
|
|
453
|
-
}
|
|
454
|
-
},
|
|
455
|
-
};
|
|
456
|
-
|
|
457
|
-
// Helper functions
|
|
458
|
-
function splitIntoChunks(text: string, maxSize: number): string[] {
|
|
459
|
-
const chunks: string[] = [];
|
|
460
|
-
const paragraphs = text.split(/\n\n+/);
|
|
461
|
-
let current = "";
|
|
462
|
-
|
|
463
|
-
for (const para of paragraphs) {
|
|
464
|
-
if (current.length + para.length > maxSize && current.length > 0) {
|
|
465
|
-
chunks.push(current.trim());
|
|
466
|
-
current = "";
|
|
467
|
-
}
|
|
468
|
-
current += para + "\n\n";
|
|
469
|
-
}
|
|
470
|
-
|
|
471
|
-
if (current.trim()) {
|
|
472
|
-
chunks.push(current.trim());
|
|
473
|
-
}
|
|
474
|
-
|
|
475
|
-
return chunks.length > 0 ? chunks : [text];
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
function sanitize(name: string): string {
|
|
479
|
-
return name.replace(/[^a-zA-Z0-9가-힣\s-]/g, "").replace(/\s+/g, "_").slice(0, 50);
|
|
480
|
-
}
|
|
481
|
-
|
|
482
|
-
function buildAnalysisPrompt(code: string, filepath: string, standards: string): string {
|
|
483
|
-
const ext = path.extname(filepath);
|
|
484
|
-
const lang = { ".ts": "typescript", ".js": "javascript", ".java": "java", ".py": "python" }[ext] || "text";
|
|
485
|
-
|
|
486
|
-
let prompt = `당신은 코드 품질 전문가입니다. 아래 코드를 분석하세요.\n\n`;
|
|
487
|
-
|
|
488
|
-
if (standards) {
|
|
489
|
-
prompt += `[개발 표준 규칙]\n${standards.slice(0, 4000)}\n\n`;
|
|
490
|
-
}
|
|
491
|
-
|
|
492
|
-
prompt += `[분석 대상 코드]\n파일: ${filepath}\n\`\`\`${lang}\n${code.slice(0, 8000)}\n\`\`\`\n\n`;
|
|
493
|
-
prompt += `[점검 요청]\n`;
|
|
494
|
-
prompt += `1. 규칙 위반 사항 (있다면)\n`;
|
|
495
|
-
prompt += `2. 개선 제안\n`;
|
|
496
|
-
prompt += `3. 전반적인 코드 품질 평가\n`;
|
|
497
|
-
|
|
498
|
-
return prompt;
|
|
499
|
-
}
|
|
500
|
-
|
|
501
|
-
// Import HWP Tool
|
|
502
|
-
export const importHwpTool: Tool = {
|
|
503
|
-
name: "import_hwp_standards",
|
|
504
|
-
description: "Import development standards from a HWP file and convert to markdown (HWP를 마크다운으로 변환). Use when user asks: 'HWP 변환', 'HWP를 md로', '한글 파일 변환', 'convert HWP'.",
|
|
505
|
-
parameters: {
|
|
506
|
-
type: "object",
|
|
507
|
-
required: ["hwpPath"],
|
|
508
|
-
properties: {
|
|
509
|
-
hwpPath: {
|
|
510
|
-
type: "string",
|
|
511
|
-
description: "Path to the HWP file (HWP 파일 경로)",
|
|
512
|
-
},
|
|
513
|
-
outputDir: {
|
|
514
|
-
type: "string",
|
|
515
|
-
description: "Output directory. Use '.' or '현재 디렉토리' for current dir. Default: .activo/standards",
|
|
516
|
-
},
|
|
517
|
-
singleFile: {
|
|
518
|
-
type: "boolean",
|
|
519
|
-
description: "Save as single markdown file instead of chunks (default: false)",
|
|
520
|
-
},
|
|
521
|
-
},
|
|
522
|
-
},
|
|
523
|
-
handler: async (args): Promise<ToolResult> => {
|
|
524
|
-
try {
|
|
525
|
-
// Resolve HWP path (handle ~)
|
|
526
|
-
let hwpPath = args.hwpPath as string;
|
|
527
|
-
if (hwpPath.startsWith("~")) {
|
|
528
|
-
hwpPath = hwpPath.replace("~", process.env.HOME || "");
|
|
529
|
-
}
|
|
530
|
-
hwpPath = path.resolve(hwpPath);
|
|
531
|
-
|
|
532
|
-
const outputDir = resolveOutputDir(args.outputDir as string | undefined);
|
|
533
|
-
const singleFile = args.singleFile as boolean || false;
|
|
534
|
-
|
|
535
|
-
if (!fs.existsSync(hwpPath)) {
|
|
536
|
-
return { success: false, content: "", error: `HWP not found: ${hwpPath}` };
|
|
537
|
-
}
|
|
538
|
-
|
|
539
|
-
// Parse HWP file using @ohah/hwpjs
|
|
540
|
-
const dataBuffer = fs.readFileSync(hwpPath);
|
|
541
|
-
const { markdown: text } = hwpToMarkdown(dataBuffer, {
|
|
542
|
-
image: "base64",
|
|
543
|
-
useHtml: false,
|
|
544
|
-
includeVersion: false,
|
|
545
|
-
includePageInfo: false,
|
|
546
|
-
});
|
|
547
|
-
|
|
548
|
-
// Create output directory
|
|
549
|
-
if (!fs.existsSync(outputDir)) {
|
|
550
|
-
fs.mkdirSync(outputDir, { recursive: true });
|
|
551
|
-
}
|
|
552
|
-
|
|
553
|
-
const filename = path.basename(hwpPath, ".hwp");
|
|
554
|
-
const extractionDate = new Date().toISOString().split("T")[0];
|
|
555
|
-
const results: string[] = [];
|
|
556
|
-
|
|
557
|
-
if (singleFile) {
|
|
558
|
-
// Save as single file
|
|
559
|
-
const outputFilename = `${sanitize(filename)}.md`;
|
|
560
|
-
const outputPath = path.join(outputDir, outputFilename);
|
|
561
|
-
|
|
562
|
-
let md = `# ${filename}\n\n`;
|
|
563
|
-
md += `> Source: ${path.basename(hwpPath)}\n`;
|
|
564
|
-
md += `> Extracted: ${extractionDate}\n`;
|
|
565
|
-
md += `> Format: HWP\n`;
|
|
566
|
-
md += `> Method: hwp.js\n\n`;
|
|
567
|
-
md += `---\n\n`;
|
|
568
|
-
md += text;
|
|
569
|
-
md += `\n\n---\n`;
|
|
570
|
-
md += `[Edit this file to add structured rules]\n`;
|
|
571
|
-
|
|
572
|
-
fs.writeFileSync(outputPath, md, "utf-8");
|
|
573
|
-
results.push(outputFilename);
|
|
574
|
-
} else {
|
|
575
|
-
// Split into chunks
|
|
576
|
-
const chunks = splitIntoChunks(text, 3000);
|
|
577
|
-
|
|
578
|
-
for (let i = 0; i < chunks.length; i++) {
|
|
579
|
-
const chunkFilename = `${String(i + 1).padStart(2, "0")}_${sanitize(filename)}.md`;
|
|
580
|
-
const chunkPath = path.join(outputDir, chunkFilename);
|
|
581
|
-
|
|
582
|
-
let md = `# ${filename} - Part ${i + 1}\n\n`;
|
|
583
|
-
md += `> Source: ${path.basename(hwpPath)}\n`;
|
|
584
|
-
md += `> Extracted: ${extractionDate}\n`;
|
|
585
|
-
md += `> Format: HWP\n`;
|
|
586
|
-
md += `> Method: hwp.js\n\n`;
|
|
587
|
-
md += `---\n\n`;
|
|
588
|
-
md += chunks[i];
|
|
589
|
-
md += `\n\n---\n`;
|
|
590
|
-
md += `[Edit this file to add structured rules]\n`;
|
|
591
|
-
|
|
592
|
-
fs.writeFileSync(chunkPath, md, "utf-8");
|
|
593
|
-
results.push(chunkFilename);
|
|
594
|
-
}
|
|
595
|
-
|
|
596
|
-
// Create index
|
|
597
|
-
const indexPath = path.join(outputDir, "_index.md");
|
|
598
|
-
let indexMd = `# Development Standards Index\n\n`;
|
|
599
|
-
indexMd += `> Source: ${path.basename(hwpPath)}\n`;
|
|
600
|
-
indexMd += `> Extracted: ${extractionDate}\n`;
|
|
601
|
-
indexMd += `> Files: ${results.length}\n`;
|
|
602
|
-
indexMd += `> Method: hwp.js\n\n`;
|
|
603
|
-
indexMd += `## Files\n\n`;
|
|
604
|
-
for (const r of results) {
|
|
605
|
-
indexMd += `- [${r}](./${r})\n`;
|
|
606
|
-
}
|
|
607
|
-
fs.writeFileSync(indexPath, indexMd, "utf-8");
|
|
608
|
-
}
|
|
609
|
-
|
|
610
|
-
return {
|
|
611
|
-
success: true,
|
|
612
|
-
content: `HWP 변환 완료!\n\n` +
|
|
613
|
-
`📂 저장 위치: ${outputDir}\n` +
|
|
614
|
-
`📄 파일 수: ${results.length}\n` +
|
|
615
|
-
`📖 형식: HWP\n` +
|
|
616
|
-
`🔧 추출 방법: hwp.js\n\n` +
|
|
617
|
-
`파일 목록:\n${results.map(r => ` - ${r}`).join("\n")}\n\n` +
|
|
618
|
-
`다음 단계: 파일을 편집하여 구조화된 규칙 추가\n` +
|
|
619
|
-
`형식: ## RULE-001: Title\n- 심각도: error|warning|info\n- 규칙: description`,
|
|
620
|
-
};
|
|
621
|
-
} catch (error) {
|
|
622
|
-
return { success: false, content: "", error: String(error) };
|
|
623
|
-
}
|
|
624
|
-
},
|
|
625
|
-
};
|
|
626
|
-
|
|
627
|
-
// Index Standards for RAG
|
|
628
|
-
export const indexStandardsTool: Tool = {
|
|
629
|
-
name: "index_standards",
|
|
630
|
-
description: "Index development standards for RAG search. Run this after importing PDF/HWP files to enable semantic search.",
|
|
631
|
-
parameters: {
|
|
632
|
-
type: "object",
|
|
633
|
-
properties: {
|
|
634
|
-
directory: {
|
|
635
|
-
type: "string",
|
|
636
|
-
description: "Standards directory (default: .activo/standards)",
|
|
637
|
-
},
|
|
638
|
-
},
|
|
639
|
-
},
|
|
640
|
-
handler: async (args): Promise<ToolResult> => {
|
|
641
|
-
try {
|
|
642
|
-
const dir = resolveOutputDir(args.directory as string | undefined);
|
|
643
|
-
|
|
644
|
-
if (!fs.existsSync(dir)) {
|
|
645
|
-
return { success: false, content: "", error: `Standards directory not found: ${dir}` };
|
|
646
|
-
}
|
|
647
|
-
|
|
648
|
-
const files = fs.readdirSync(dir).filter((f) => f.endsWith(".md") && f !== "_index.md");
|
|
649
|
-
if (files.length === 0) {
|
|
650
|
-
return { success: false, content: "", error: "No markdown files found in standards directory" };
|
|
651
|
-
}
|
|
652
|
-
|
|
653
|
-
const config = loadConfig();
|
|
654
|
-
const client = new OllamaClient(config.ollama);
|
|
655
|
-
|
|
656
|
-
const allChunks: StandardsChunk[] = [];
|
|
657
|
-
|
|
658
|
-
// Collect all chunks from all files
|
|
659
|
-
for (const file of files) {
|
|
660
|
-
const content = fs.readFileSync(path.join(dir, file), "utf-8");
|
|
661
|
-
const chunks = splitStandardsIntoChunks(content, file);
|
|
662
|
-
allChunks.push(...chunks);
|
|
663
|
-
}
|
|
664
|
-
|
|
665
|
-
if (allChunks.length === 0) {
|
|
666
|
-
return { success: false, content: "", error: "No valid chunks found in standards files" };
|
|
667
|
-
}
|
|
668
|
-
|
|
669
|
-
// Generate embeddings
|
|
670
|
-
const embeddings: StandardsEmbedding[] = [];
|
|
671
|
-
let processed = 0;
|
|
672
|
-
|
|
673
|
-
for (const chunk of allChunks) {
|
|
674
|
-
const embedding = await client.embed(chunk.content, DEFAULT_EMBED_MODEL);
|
|
675
|
-
embeddings.push({
|
|
676
|
-
chunk,
|
|
677
|
-
embedding,
|
|
678
|
-
hash: calculateHash(chunk.content),
|
|
679
|
-
});
|
|
680
|
-
processed++;
|
|
681
|
-
}
|
|
682
|
-
|
|
683
|
-
// Save embeddings
|
|
684
|
-
saveStandardsEmbeddings(embeddings);
|
|
685
|
-
|
|
686
|
-
// Save index
|
|
687
|
-
const index: StandardsIndex = {
|
|
688
|
-
version: "1.0",
|
|
689
|
-
model: DEFAULT_EMBED_MODEL,
|
|
690
|
-
createdAt: new Date().toISOString(),
|
|
691
|
-
updatedAt: new Date().toISOString(),
|
|
692
|
-
totalChunks: embeddings.length,
|
|
693
|
-
};
|
|
694
|
-
saveStandardsIndex(index);
|
|
695
|
-
|
|
696
|
-
return {
|
|
697
|
-
success: true,
|
|
698
|
-
content: `Standards indexed successfully!\n\n` +
|
|
699
|
-
`📂 Directory: ${dir}\n` +
|
|
700
|
-
`📄 Files: ${files.length}\n` +
|
|
701
|
-
`🔖 Chunks: ${embeddings.length}\n` +
|
|
702
|
-
`🧠 Model: ${DEFAULT_EMBED_MODEL}\n\n` +
|
|
703
|
-
`Use 'search_standards' to find relevant rules.`,
|
|
704
|
-
};
|
|
705
|
-
} catch (error) {
|
|
706
|
-
return { success: false, content: "", error: String(error) };
|
|
707
|
-
}
|
|
708
|
-
},
|
|
709
|
-
};
|
|
710
|
-
|
|
711
|
-
// Search Standards using RAG
|
|
712
|
-
export const searchStandardsTool: Tool = {
|
|
713
|
-
name: "search_standards",
|
|
714
|
-
description: "Search development standards using semantic search (RAG). Returns relevant rules/sections based on query.",
|
|
715
|
-
parameters: {
|
|
716
|
-
type: "object",
|
|
717
|
-
required: ["query"],
|
|
718
|
-
properties: {
|
|
719
|
-
query: {
|
|
720
|
-
type: "string",
|
|
721
|
-
description: "Search query (e.g., 'variable naming', 'error handling', 'SQL injection')",
|
|
722
|
-
},
|
|
723
|
-
topK: {
|
|
724
|
-
type: "number",
|
|
725
|
-
description: "Number of results to return (default: 5)",
|
|
726
|
-
},
|
|
727
|
-
},
|
|
728
|
-
},
|
|
729
|
-
handler: async (args): Promise<ToolResult> => {
|
|
730
|
-
try {
|
|
731
|
-
const query = args.query as string;
|
|
732
|
-
const topK = (args.topK as number) || 5;
|
|
733
|
-
|
|
734
|
-
const embeddings = loadStandardsEmbeddings();
|
|
735
|
-
if (embeddings.length === 0) {
|
|
736
|
-
return {
|
|
737
|
-
success: false,
|
|
738
|
-
content: "",
|
|
739
|
-
error: "No standards indexed. Run 'index_standards' first.",
|
|
740
|
-
};
|
|
741
|
-
}
|
|
742
|
-
|
|
743
|
-
const config = loadConfig();
|
|
744
|
-
const client = new OllamaClient(config.ollama);
|
|
745
|
-
|
|
746
|
-
// Get query embedding
|
|
747
|
-
const queryEmbedding = await client.embed(query, DEFAULT_EMBED_MODEL);
|
|
748
|
-
|
|
749
|
-
// Calculate similarities
|
|
750
|
-
const results = embeddings.map((e) => ({
|
|
751
|
-
...e,
|
|
752
|
-
similarity: cosineSimilarity(queryEmbedding, e.embedding),
|
|
753
|
-
}));
|
|
754
|
-
|
|
755
|
-
// Sort by similarity and get top K
|
|
756
|
-
results.sort((a, b) => b.similarity - a.similarity);
|
|
757
|
-
const topResults = results.slice(0, topK);
|
|
758
|
-
|
|
759
|
-
// Format results
|
|
760
|
-
let output = `## Search Results for: "${query}"\n\n`;
|
|
761
|
-
output += `Found ${topResults.length} relevant standards:\n\n`;
|
|
762
|
-
|
|
763
|
-
for (let i = 0; i < topResults.length; i++) {
|
|
764
|
-
const r = topResults[i];
|
|
765
|
-
output += `### ${i + 1}. ${r.chunk.ruleId || r.chunk.section || "Section"}\n`;
|
|
766
|
-
output += `📄 File: ${r.chunk.filepath}\n`;
|
|
767
|
-
output += `📊 Relevance: ${(r.similarity * 100).toFixed(1)}%\n\n`;
|
|
768
|
-
output += `${r.chunk.content.slice(0, 500)}${r.chunk.content.length > 500 ? "..." : ""}\n\n`;
|
|
769
|
-
output += `---\n\n`;
|
|
770
|
-
}
|
|
771
|
-
|
|
772
|
-
return { success: true, content: output };
|
|
773
|
-
} catch (error) {
|
|
774
|
-
return { success: false, content: "", error: String(error) };
|
|
775
|
-
}
|
|
776
|
-
},
|
|
777
|
-
};
|
|
778
|
-
|
|
779
|
-
// Check Code Quality with RAG
|
|
780
|
-
export const checkQualityWithRagTool: Tool = {
|
|
781
|
-
name: "check_quality_rag",
|
|
782
|
-
description: "Check code quality using RAG to find relevant standards automatically.",
|
|
783
|
-
parameters: {
|
|
784
|
-
type: "object",
|
|
785
|
-
required: ["filepath"],
|
|
786
|
-
properties: {
|
|
787
|
-
filepath: {
|
|
788
|
-
type: "string",
|
|
789
|
-
description: "File to check",
|
|
790
|
-
},
|
|
791
|
-
topK: {
|
|
792
|
-
type: "number",
|
|
793
|
-
description: "Number of relevant standards to use (default: 5)",
|
|
794
|
-
},
|
|
795
|
-
},
|
|
796
|
-
},
|
|
797
|
-
handler: async (args): Promise<ToolResult> => {
|
|
798
|
-
try {
|
|
799
|
-
const filepath = path.resolve(args.filepath as string);
|
|
800
|
-
const topK = (args.topK as number) || 5;
|
|
801
|
-
|
|
802
|
-
if (!fs.existsSync(filepath)) {
|
|
803
|
-
return { success: false, content: "", error: `File not found: ${filepath}` };
|
|
804
|
-
}
|
|
805
|
-
|
|
806
|
-
const embeddings = loadStandardsEmbeddings();
|
|
807
|
-
if (embeddings.length === 0) {
|
|
808
|
-
return {
|
|
809
|
-
success: false,
|
|
810
|
-
content: "",
|
|
811
|
-
error: "No standards indexed. Run 'index_standards' first after importing PDF/HWP.",
|
|
812
|
-
};
|
|
813
|
-
}
|
|
814
|
-
|
|
815
|
-
// Read code
|
|
816
|
-
const code = fs.readFileSync(filepath, "utf-8");
|
|
817
|
-
const config = loadConfig();
|
|
818
|
-
const client = new OllamaClient(config.ollama);
|
|
819
|
-
|
|
820
|
-
// Create query from code (first 1000 chars + filename)
|
|
821
|
-
const queryText = `Code analysis for ${path.basename(filepath)}:\n${code.slice(0, 1000)}`;
|
|
822
|
-
const queryEmbedding = await client.embed(queryText, DEFAULT_EMBED_MODEL);
|
|
823
|
-
|
|
824
|
-
// Find relevant standards
|
|
825
|
-
const results = embeddings.map((e) => ({
|
|
826
|
-
...e,
|
|
827
|
-
similarity: cosineSimilarity(queryEmbedding, e.embedding),
|
|
828
|
-
}));
|
|
829
|
-
results.sort((a, b) => b.similarity - a.similarity);
|
|
830
|
-
const relevantStandards = results.slice(0, topK);
|
|
831
|
-
|
|
832
|
-
// Build standards context
|
|
833
|
-
let standardsContext = "## Relevant Development Standards\n\n";
|
|
834
|
-
for (const r of relevantStandards) {
|
|
835
|
-
if (r.chunk.ruleId) {
|
|
836
|
-
standardsContext += `### ${r.chunk.ruleId}: ${r.chunk.section}\n`;
|
|
837
|
-
} else {
|
|
838
|
-
standardsContext += `### ${r.chunk.section}\n`;
|
|
839
|
-
}
|
|
840
|
-
standardsContext += `${r.chunk.content}\n\n`;
|
|
841
|
-
}
|
|
842
|
-
|
|
843
|
-
// Build analysis prompt
|
|
844
|
-
const ext = path.extname(filepath);
|
|
845
|
-
const lang = { ".ts": "typescript", ".js": "javascript", ".java": "java", ".py": "python" }[ext] || "text";
|
|
846
|
-
|
|
847
|
-
const prompt = `You are a code quality expert. Analyze the code based on the provided development standards.
|
|
848
|
-
|
|
849
|
-
${standardsContext}
|
|
850
|
-
|
|
851
|
-
## Code to Analyze
|
|
852
|
-
File: ${filepath}
|
|
853
|
-
\`\`\`${lang}
|
|
854
|
-
${code.slice(0, 6000)}
|
|
855
|
-
\`\`\`
|
|
856
|
-
|
|
857
|
-
## Analysis Request
|
|
858
|
-
1. Check for violations of the above standards
|
|
859
|
-
2. Provide specific line numbers if possible
|
|
860
|
-
3. Suggest improvements
|
|
861
|
-
4. Rate overall compliance (1-10)
|
|
862
|
-
|
|
863
|
-
Respond in Korean.`;
|
|
864
|
-
|
|
865
|
-
const response = await client.chat([{ role: "user", content: prompt }]);
|
|
866
|
-
|
|
867
|
-
return {
|
|
868
|
-
success: true,
|
|
869
|
-
content: `## Code Quality Analysis (RAG)\n\n` +
|
|
870
|
-
`📄 File: ${filepath}\n` +
|
|
871
|
-
`🔖 Standards used: ${relevantStandards.length}\n\n` +
|
|
872
|
-
`---\n\n${response.content}`,
|
|
873
|
-
};
|
|
874
|
-
} catch (error) {
|
|
875
|
-
return { success: false, content: "", error: String(error) };
|
|
876
|
-
}
|
|
877
|
-
},
|
|
878
|
-
};
|
|
879
|
-
|
|
880
|
-
// All standards tools
|
|
881
|
-
export const standardsTools: Tool[] = [
|
|
882
|
-
importPdfTool,
|
|
883
|
-
importHwpTool,
|
|
884
|
-
listStandardsTool,
|
|
885
|
-
checkQualityTool,
|
|
886
|
-
indexStandardsTool,
|
|
887
|
-
searchStandardsTool,
|
|
888
|
-
checkQualityWithRagTool,
|
|
889
|
-
];
|