@dastbal/nestjs-ai-agent 1.0.2 β 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +75 -6
- package/dist/bin/cli.js +1 -1
- package/dist/core/agent/factory.d.ts +0 -9
- package/dist/core/agent/factory.js +11 -10
- package/dist/core/rag/indexer.d.ts +1 -8
- package/dist/core/rag/indexer.js +7 -14
- package/dist/core/rag/retriever.d.ts +4 -15
- package/dist/core/rag/retriever.js +82 -108
- package/dist/core/tools/tools.d.ts +22 -27
- package/dist/core/tools/tools.js +120 -94
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,10 +1,79 @@
|
|
|
1
|
-
@dastbal/nestjs-ai-agent π§ββοΈ
|
|
1
|
+
# @dastbal/nestjs-ai-agent π§ββοΈ
|
|
2
|
+
### Autonomous Principal Software Engineer for NestJS
|
|
3
|
+
|
|
4
|
+
[](https://www.npmjs.com/package/@dastbal/nestjs-ai-agent)
|
|
5
|
+
[](https://opensource.org/licenses/MIT)
|
|
6
|
+
|
|
7
|
+
Transform your NestJS development with an agent that doesn't just "chat", but **operates** directly on your codebase with Senior-level precision.
|
|
8
|
+
|
|
9
|
+
---
|
|
10
|
+
|
|
11
|
+
## π Quick Start
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# Install the agent
|
|
2
15
|
npm install @dastbal/nestjs-ai-agent
|
|
3
16
|
|
|
4
17
|
# Run your first command
|
|
5
18
|
npx gen "Create a new Payments service with DDD patterns"
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## π Key Features
|
|
24
|
+
|
|
25
|
+
This agent operates with a strict set of principles and advanced capabilities:
|
|
26
|
+
|
|
27
|
+
* **π RAG Search:** Performs semantic search across your entire codebase before proposing changes, ensuring context-aware development.
|
|
28
|
+
* **π©Ί The Surgeon Rule:** Never overwrites a file without reading and analyzing it first, preserving existing logic and intent.
|
|
29
|
+
* **β
Self-Healing:** Runs integrity checks (TypeScript compiler) and attempts to auto-fix compilation errors (up to 3 retries).
|
|
30
|
+
* **πΎ Safe Writes:** Automatically creates backups before any file modification, ensuring data safety.
|
|
31
|
+
* **π§ SQLite Memory:** Remembers conversation threads and learned preferences across restarts using a local SQLite database.
|
|
32
|
+
* **π Configuration:** Leverages Google Vertex AI. Requires a service account JSON file (`credentials_vertex.json`) in the root folder and specific environment variables.
|
|
33
|
+
|
|
34
|
+
**Credentials File:** Place your Google Service Account JSON in the root folder and name it exactly `credentials_vertex.json`.
|
|
35
|
+
|
|
36
|
+
**Environment Variables:** Add the following to your `.env` file:
|
|
37
|
+
```dotenv
|
|
38
|
+
GOOGLE_APPLICATION_CREDENTIALS="./credentials_vertex.json"
|
|
39
|
+
GCP_PROJECT_ID="your-project-id"
|
|
40
|
+
GCP_LOCATION="us-central1"
|
|
41
|
+
```
|
|
42
|
+
**[CAUTION] Security First:** Always add `credentials_vertex.json` and `.env` to your `.gitignore` file to protect your credentials.
|
|
43
|
+
|
|
44
|
+
---
|
|
45
|
+
|
|
46
|
+
## βοΈ Internal Workflow
|
|
47
|
+
|
|
48
|
+
The agent follows a strict Principal Engineer protocol:
|
|
49
|
+
|
|
50
|
+
1. **Research:** Uses `ask_codebase` to find existing patterns, logic, and dependencies.
|
|
51
|
+
2. **Comprehension:** Reads existing code using `safe_read_file` to understand context and avoid regressions.
|
|
52
|
+
3. **Implementation:** Writes new code adhering to DDD principles, strict TypeScript typing (no `any`), and TSDocs.
|
|
53
|
+
4. **Validation:** Runs `run_integrity_check` (TypeScript compiler) immediately after implementation to ensure type safety.
|
|
54
|
+
5. **Safety:** Creates backups before writing files using `safe_write_file`.
|
|
55
|
+
6. **Human-in-the-loop:** Pauses for explicit approval before performing critical file operations or major changes.
|
|
56
|
+
|
|
57
|
+
---
|
|
58
|
+
|
|
59
|
+
## π‘ Usage Examples
|
|
60
|
+
|
|
61
|
+
Try these commands to see the agent in action:
|
|
62
|
+
|
|
63
|
+
* **Scaffolding:** `"Create a UserEntity with email and password fields using TypeORM"`
|
|
64
|
+
* **Logic Implementation:** `"Add a validation pipe to the login DTO"`
|
|
65
|
+
* **Testing:** `"Write a unit test for the AuthService including mocks for the repository"`
|
|
66
|
+
* **Refactoring:** `"Standardize all HTTP exceptions in the users controller"`
|
|
67
|
+
* **Code Generation:** `"Generate a NestJS module for handling user authentication"`
|
|
68
|
+
|
|
69
|
+
---
|
|
70
|
+
|
|
71
|
+
## π§ Learning & Adaptation
|
|
72
|
+
|
|
73
|
+
The agent learns from your feedback. If you provide a style correction or a new pattern, it stores this information in `.agent/memories/style-guide.txt` to ensure future code generation aligns with your preferences.
|
|
74
|
+
|
|
75
|
+
---
|
|
76
|
+
|
|
77
|
+
## π License
|
|
78
|
+
|
|
79
|
+
This project is released under the MIT License. Build something amazing! β¨
|
package/dist/bin/cli.js
CHANGED
|
@@ -61,7 +61,7 @@ program
|
|
|
61
61
|
}
|
|
62
62
|
log.sys("Inicializando Agente en modo CLI...");
|
|
63
63
|
// El threadId puede ser fijo para sesiones de CLI o dinΓ‘mico
|
|
64
|
-
const threadId = "cli-user
|
|
64
|
+
const threadId = "cli-user";
|
|
65
65
|
const agent = await factory_1.AgentFactory.create(threadId);
|
|
66
66
|
log.ai(`Procesando: "${instruction}"`);
|
|
67
67
|
const response = await agent.invoke({ messages: [{ role: "user", content: instruction }] }, { configurable: { thread_id: threadId }, recursionLimit: 50 });
|
|
@@ -1,33 +1,24 @@
|
|
|
1
1
|
export declare class AgentFactory {
|
|
2
2
|
static create(threadId?: string): Promise<import("langchain").ReactAgent<import("langchain").AgentTypeConfig<import("langchain").ResponseFormatUndefined, undefined, import("langchain").AnyAnnotationRoot, readonly import("langchain").AgentMiddleware<any, any, any, readonly (import("@langchain/core/tools").ClientTool | import("@langchain/core/tools").ServerTool)[]>[], readonly [import("langchain").DynamicStructuredTool<import("zod").ZodObject<{
|
|
3
3
|
query: import("zod").ZodString;
|
|
4
|
-
projectRoot: import("zod").ZodOptional<import("zod").ZodString>;
|
|
5
4
|
}, import("zod/v4/core").$strip>, {
|
|
6
5
|
query: string;
|
|
7
|
-
projectRoot?: string | undefined;
|
|
8
6
|
}, {
|
|
9
7
|
query: string;
|
|
10
|
-
projectRoot?: string | undefined;
|
|
11
8
|
}, string, "ask_codebase">, import("langchain").DynamicStructuredTool<import("zod").ZodObject<{}, import("zod/v4/core").$strip>, Record<string, never>, Record<string, never>, string, "run_integrity_check">, import("langchain").DynamicStructuredTool<import("zod").ZodObject<{
|
|
12
9
|
filePath: import("zod").ZodString;
|
|
13
10
|
content: import("zod").ZodString;
|
|
14
|
-
projectRoot: import("zod").ZodOptional<import("zod").ZodString>;
|
|
15
11
|
}, import("zod/v4/core").$strip>, {
|
|
16
12
|
filePath: string;
|
|
17
13
|
content: string;
|
|
18
|
-
projectRoot?: string | undefined;
|
|
19
14
|
}, {
|
|
20
15
|
filePath: string;
|
|
21
16
|
content: string;
|
|
22
|
-
projectRoot?: string | undefined;
|
|
23
17
|
}, string, "safe_write_file">, import("langchain").DynamicStructuredTool<import("zod").ZodObject<{
|
|
24
18
|
filePath: import("zod").ZodString;
|
|
25
|
-
projectRoot: import("zod").ZodOptional<import("zod").ZodString>;
|
|
26
19
|
}, import("zod/v4/core").$strip>, {
|
|
27
20
|
filePath: string;
|
|
28
|
-
projectRoot?: string | undefined;
|
|
29
21
|
}, {
|
|
30
22
|
filePath: string;
|
|
31
|
-
projectRoot?: string | undefined;
|
|
32
23
|
}, string, "safe_read_file">, import("langchain").DynamicStructuredTool<import("zod").ZodObject<{}, import("zod/v4/core").$strip>, Record<string, never>, Record<string, never>, string, "refresh_project_index">]>>>;
|
|
33
24
|
}
|
|
@@ -42,14 +42,14 @@ const tools_1 = require("../tools/tools");
|
|
|
42
42
|
const path = __importStar(require("path"));
|
|
43
43
|
const fs = __importStar(require("fs"));
|
|
44
44
|
class AgentFactory {
|
|
45
|
-
static async create(threadId =
|
|
45
|
+
static async create(threadId = "cli-session") {
|
|
46
46
|
const rootDir = process.cwd();
|
|
47
47
|
// ConfiguraciΓ³n de directorios
|
|
48
|
-
const agentDir = path.join(rootDir,
|
|
48
|
+
const agentDir = path.join(rootDir, ".agent");
|
|
49
49
|
if (!fs.existsSync(agentDir))
|
|
50
50
|
fs.mkdirSync(agentDir, { recursive: true });
|
|
51
51
|
// 1. Persistencia (Checkpointer)
|
|
52
|
-
const dbPath = path.join(agentDir,
|
|
52
|
+
const dbPath = path.join(agentDir, "history.db");
|
|
53
53
|
const checkpointer = langgraph_checkpoint_sqlite_1.SqliteSaver.fromConnString(dbPath);
|
|
54
54
|
// 2. Store (Opcional en createAgent, pero lo mantenemos por si lo usas en el runtime)
|
|
55
55
|
const memoryStore = new langgraph_checkpoint_1.InMemoryStore();
|
|
@@ -85,7 +85,7 @@ Typing: Strict TypeScript. The use of any is FORBIDDEN.
|
|
|
85
85
|
|
|
86
86
|
βοΈ EXECUTION PROTOCOL:
|
|
87
87
|
|
|
88
|
-
|
|
88
|
+
- You operate on a NestJS project. The root directory is: ${process.cwd()}
|
|
89
89
|
RESEARCH (ask_codebase): BEFORE touching anything, search for existing patterns in the project.
|
|
90
90
|
|
|
91
91
|
Example: "Search for UserEntity before creating a related DTO."
|
|
@@ -113,13 +113,14 @@ NOTE ON INDEXING:
|
|
|
113
113
|
- If 'ask_codebase' fails to find recent changes, use 'refresh_project_index'.
|
|
114
114
|
|
|
115
115
|
Wait for human approval. If rejected, propose a different solution.
|
|
116
|
-
|
|
117
|
-
-
|
|
118
|
-
-
|
|
119
|
-
|
|
116
|
+
- Use RELATIVE PATHS for all file operations.
|
|
117
|
+
- All source code is inside the 'src' folder (e.g., 'src/app.module.ts').
|
|
118
|
+
- DO NOT use the '/project/' prefix anymore. Just use the path relative to the root.
|
|
119
|
+
|
|
120
|
+
π EXAMPLE:
|
|
121
|
+
- Correct: safe_write_file('src/calculator/calculator.controller.ts', '...')
|
|
122
|
+
- Incorrect: safe_write_file('/project/src/...', '...')
|
|
120
123
|
|
|
121
|
-
ALWAYS use the '/project/' prefix when reading or writing source code.
|
|
122
|
-
Example: write_file('/project/src/app.service.ts', '...')
|
|
123
124
|
π CODE REFINEMENT & INTEGRITY (THE SURGEON'S RULE):
|
|
124
125
|
|
|
125
126
|
1. Read-Before-Write: NEVER overwrite a file without reading it first using 'safe_read_file'. You must understand the existing logic, TSDocs, and dependencies before making any changes.
|
|
@@ -12,33 +12,26 @@ export declare class IndexerService {
|
|
|
12
12
|
/**
|
|
13
13
|
* Main Entry Point: Scans the project and updates the brain.
|
|
14
14
|
* Scans files, checks hashes, generates embeddings, and saves the knowledge graph.
|
|
15
|
-
* @param sourceDir - Relative path to source code (usually 'src').
|
|
15
|
+
* * @param sourceDir - Relative path to source code (usually 'src').
|
|
16
16
|
*/
|
|
17
17
|
indexProject(sourceDir?: string): Promise<void>;
|
|
18
18
|
/**
|
|
19
19
|
* Processes a single file: Reads content, Calculates Hash, Parses AST,
|
|
20
20
|
* Updates Registry, and Accumulates Chunks.
|
|
21
|
-
* @param filePath - The relative path of the file to process.
|
|
22
|
-
* @param chunkAccumulator - An array to accumulate processed code chunks.
|
|
23
|
-
* @param edgeAccumulator - An array to accumulate dependency graph edges.
|
|
24
21
|
*/
|
|
25
22
|
private processSingleFile;
|
|
26
23
|
/**
|
|
27
24
|
* Generates embeddings using Vertex AI and saves them to SQLite in transactions.
|
|
28
|
-
* @param allChunks - An array of all processed chunks to embed and save.
|
|
29
25
|
*/
|
|
30
26
|
private embedAndSaveBatches;
|
|
31
27
|
/**
|
|
32
28
|
* Persists dependency relationships into the graph table.
|
|
33
29
|
* Uses 'INSERT OR IGNORE' to prevent duplicates without errors.
|
|
34
|
-
* @param edges - An array of GraphEdge objects to save.
|
|
35
30
|
*/
|
|
36
31
|
private saveGraph;
|
|
37
32
|
/**
|
|
38
33
|
* Recursively gets all .ts files in a directory.
|
|
39
34
|
* Returns RELATIVE paths (e.g., 'src/users/users.service.ts') to ensure consistency in DB.
|
|
40
|
-
* @param dir - The directory to search in.
|
|
41
|
-
* @param fileList - An accumulator for the list of files found.
|
|
42
35
|
*/
|
|
43
36
|
private getAllFiles;
|
|
44
37
|
}
|
package/dist/core/rag/indexer.js
CHANGED
|
@@ -56,7 +56,7 @@ class IndexerService {
|
|
|
56
56
|
/**
|
|
57
57
|
* Main Entry Point: Scans the project and updates the brain.
|
|
58
58
|
* Scans files, checks hashes, generates embeddings, and saves the knowledge graph.
|
|
59
|
-
* @param sourceDir - Relative path to source code (usually 'src').
|
|
59
|
+
* * @param sourceDir - Relative path to source code (usually 'src').
|
|
60
60
|
*/
|
|
61
61
|
async indexProject(sourceDir = 'src') {
|
|
62
62
|
const rootDir = process.cwd();
|
|
@@ -100,9 +100,6 @@ class IndexerService {
|
|
|
100
100
|
/**
|
|
101
101
|
* Processes a single file: Reads content, Calculates Hash, Parses AST,
|
|
102
102
|
* Updates Registry, and Accumulates Chunks.
|
|
103
|
-
* @param filePath - The relative path of the file to process.
|
|
104
|
-
* @param chunkAccumulator - An array to accumulate processed code chunks.
|
|
105
|
-
* @param edgeAccumulator - An array to accumulate dependency graph edges.
|
|
106
103
|
*/
|
|
107
104
|
processSingleFile(filePath, chunkAccumulator, edgeAccumulator) {
|
|
108
105
|
try {
|
|
@@ -134,7 +131,6 @@ class IndexerService {
|
|
|
134
131
|
}
|
|
135
132
|
/**
|
|
136
133
|
* Generates embeddings using Vertex AI and saves them to SQLite in transactions.
|
|
137
|
-
* @param allChunks - An array of all processed chunks to embed and save.
|
|
138
134
|
*/
|
|
139
135
|
async embedAndSaveBatches(allChunks) {
|
|
140
136
|
console.log(`π§ Generating Embeddings for ${allChunks.length} chunks...`);
|
|
@@ -154,9 +150,9 @@ class IndexerService {
|
|
|
154
150
|
const embeddingsModel = provider_1.LLMProvider.getEmbeddingsModel();
|
|
155
151
|
const vectors = await embeddingsModel.embedDocuments(textsToEmbed);
|
|
156
152
|
// 3. Save to DB (Transaction for performance)
|
|
157
|
-
const insertChunk = this.db.prepare(`
|
|
158
|
-
INSERT OR REPLACE INTO code_chunks (id, file_path, chunk_type, content, vector_json, metadata)
|
|
159
|
-
VALUES (?, ?, ?, ?, ?, ?)
|
|
153
|
+
const insertChunk = this.db.prepare(`
|
|
154
|
+
INSERT OR REPLACE INTO code_chunks (id, file_path, chunk_type, content, vector_json, metadata)
|
|
155
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
160
156
|
`);
|
|
161
157
|
// Explicitly typed transaction callback to fix TS7006
|
|
162
158
|
const insertMany = this.db.transaction((chunks, vectors) => {
|
|
@@ -178,14 +174,13 @@ class IndexerService {
|
|
|
178
174
|
/**
|
|
179
175
|
* Persists dependency relationships into the graph table.
|
|
180
176
|
* Uses 'INSERT OR IGNORE' to prevent duplicates without errors.
|
|
181
|
-
* @param edges - An array of GraphEdge objects to save.
|
|
182
177
|
*/
|
|
183
178
|
saveGraph(edges) {
|
|
184
179
|
if (!edges || edges.length === 0)
|
|
185
180
|
return;
|
|
186
|
-
const insertEdge = this.db.prepare(`
|
|
187
|
-
INSERT OR IGNORE INTO dependency_graph (source, target, relation)
|
|
188
|
-
VALUES (?, ?, ?)
|
|
181
|
+
const insertEdge = this.db.prepare(`
|
|
182
|
+
INSERT OR IGNORE INTO dependency_graph (source, target, relation)
|
|
183
|
+
VALUES (?, ?, ?)
|
|
189
184
|
`);
|
|
190
185
|
// Explicitly typed transaction callback to fix TS7006
|
|
191
186
|
const runMany = this.db.transaction((edges) => {
|
|
@@ -196,8 +191,6 @@ class IndexerService {
|
|
|
196
191
|
/**
|
|
197
192
|
* Recursively gets all .ts files in a directory.
|
|
198
193
|
* Returns RELATIVE paths (e.g., 'src/users/users.service.ts') to ensure consistency in DB.
|
|
199
|
-
* @param dir - The directory to search in.
|
|
200
|
-
* @param fileList - An accumulator for the list of files found.
|
|
201
194
|
*/
|
|
202
195
|
getAllFiles(dir, fileList = []) {
|
|
203
196
|
const files = fs.readdirSync(dir);
|
|
@@ -3,25 +3,17 @@ interface SearchResult {
|
|
|
3
3
|
chunk: ProcessedChunk;
|
|
4
4
|
score: number;
|
|
5
5
|
}
|
|
6
|
-
/**
|
|
7
|
-
* @description Service responsible for retrieving relevant code context from the codebase using vector search and dependency graph analysis.
|
|
8
|
-
*/
|
|
9
6
|
export declare class RetrieverService {
|
|
10
7
|
private db;
|
|
11
8
|
/**
|
|
12
9
|
* Searches the codebase using Vector Embeddings (Cosine Similarity).
|
|
13
|
-
* @param query - The natural language query
|
|
14
|
-
* @param limit -
|
|
15
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for the search and ensures consistency with how paths were indexed. Defaults to process.cwd().
|
|
16
|
-
* @returns {Promise<SearchResult[]>} A promise that resolves to an array of search results, each containing a code chunk and its relevance score.
|
|
10
|
+
* @param query - The natural language query.
|
|
11
|
+
* @param limit - Max chunks to retrieve.
|
|
17
12
|
*/
|
|
18
|
-
query(query: string, limit?: number
|
|
13
|
+
query(query: string, limit?: number): Promise<SearchResult[]>;
|
|
19
14
|
/**
|
|
20
15
|
* Retrieves the 'Graph Dependencies' for a specific file from the DB.
|
|
21
16
|
* This allows the Agent to know what other files are related (DTOs, Interfaces).
|
|
22
|
-
* @param sourcePath - The path to the source file (expected to be relative to projectRoot).
|
|
23
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for interpreting paths. Defaults to process.cwd().
|
|
24
|
-
* @returns {string[]} An array of paths representing the dependencies of the source file.
|
|
25
17
|
*/
|
|
26
18
|
private getDependencies;
|
|
27
19
|
/**
|
|
@@ -30,10 +22,7 @@ export declare class RetrieverService {
|
|
|
30
22
|
* 1. The matched code snippets (Vector Search).
|
|
31
23
|
* 2. The file's dependencies (Graph Search).
|
|
32
24
|
* 3. Explicit File Paths to encourage using 'read_file'.
|
|
33
|
-
* @param query - The search query.
|
|
34
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for the search and ensures consistency with how paths were indexed. Defaults to process.cwd().
|
|
35
|
-
* @returns {Promise<string>} A promise that resolves to the formatted context report string.
|
|
36
25
|
*/
|
|
37
|
-
getContextForLLM(query: string
|
|
26
|
+
getContextForLLM(query: string): Promise<string>;
|
|
38
27
|
}
|
|
39
28
|
export {};
|
|
@@ -32,89 +32,68 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
32
32
|
return result;
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
38
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
36
|
exports.RetrieverService = void 0;
|
|
40
37
|
const db_1 = require("../state/db");
|
|
41
38
|
const provider_1 = require("../llm/provider");
|
|
42
39
|
const math_1 = require("./math");
|
|
43
40
|
const path = __importStar(require("path"));
|
|
44
|
-
const chalk_1 = __importDefault(require("chalk")); // Import chalk for colored logs
|
|
45
|
-
const log = {
|
|
46
|
-
tool: (msg) => console.log(chalk_1.default.yellow('π οΈ [TOOL]: ') + msg),
|
|
47
|
-
error: (msg) => console.log(chalk_1.default.red('β [ERR]: ') + msg),
|
|
48
|
-
};
|
|
49
|
-
/**
|
|
50
|
-
* @description Service responsible for retrieving relevant code context from the codebase using vector search and dependency graph analysis.
|
|
51
|
-
*/
|
|
52
41
|
class RetrieverService {
|
|
53
42
|
constructor() {
|
|
54
43
|
this.db = db_1.AgentDB.getInstance();
|
|
55
44
|
}
|
|
56
45
|
/**
|
|
57
46
|
* Searches the codebase using Vector Embeddings (Cosine Similarity).
|
|
58
|
-
* @param query - The natural language query
|
|
59
|
-
* @param limit -
|
|
60
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for the search and ensures consistency with how paths were indexed. Defaults to process.cwd().
|
|
61
|
-
* @returns {Promise<SearchResult[]>} A promise that resolves to an array of search results, each containing a code chunk and its relevance score.
|
|
47
|
+
* @param query - The natural language query.
|
|
48
|
+
* @param limit - Max chunks to retrieve.
|
|
62
49
|
*/
|
|
63
|
-
async query(query, limit = 5
|
|
64
|
-
log
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
const
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
log.tool(`RetrieverService.query: Found ${scoredChunks.length} raw chunks.`);
|
|
87
|
-
return scoredChunks.sort((a, b) => b.score - a.score).slice(0, limit);
|
|
88
|
-
}
|
|
89
|
-
catch (error) {
|
|
90
|
-
log.error(`RetrieverService.query: Error during search: ${error.message}`);
|
|
91
|
-
throw new Error(`Failed to perform codebase search: ${error.message}`);
|
|
92
|
-
}
|
|
50
|
+
async query(query, limit = 5) {
|
|
51
|
+
console.log(`π [RAG] Embedding Query: "${query}"...`);
|
|
52
|
+
const embeddingModel = provider_1.LLMProvider.getEmbeddingsModel();
|
|
53
|
+
const queryVector = await embeddingModel.embedQuery(query);
|
|
54
|
+
const stmt = this.db.prepare('SELECT * FROM code_chunks');
|
|
55
|
+
const rows = stmt.all();
|
|
56
|
+
const scoredChunks = rows.map((row) => {
|
|
57
|
+
const vector = JSON.parse(row.vector_json);
|
|
58
|
+
const score = (0, math_1.cosineSimilarity)(queryVector, vector);
|
|
59
|
+
const metadata = JSON.parse(row.metadata);
|
|
60
|
+
return {
|
|
61
|
+
score,
|
|
62
|
+
chunk: {
|
|
63
|
+
id: row.id,
|
|
64
|
+
type: row.chunk_type,
|
|
65
|
+
content: row.content,
|
|
66
|
+
metadata: metadata,
|
|
67
|
+
// Ensure filePath is recovered from the DB row or metadata
|
|
68
|
+
filePath: row.file_path || metadata.filePath,
|
|
69
|
+
},
|
|
70
|
+
};
|
|
71
|
+
});
|
|
72
|
+
return scoredChunks.sort((a, b) => b.score - a.score).slice(0, limit);
|
|
93
73
|
}
|
|
94
74
|
/**
|
|
95
75
|
* Retrieves the 'Graph Dependencies' for a specific file from the DB.
|
|
96
76
|
* This allows the Agent to know what other files are related (DTOs, Interfaces).
|
|
97
|
-
* @param sourcePath - The path to the source file (expected to be relative to projectRoot).
|
|
98
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for interpreting paths. Defaults to process.cwd().
|
|
99
|
-
* @returns {string[]} An array of paths representing the dependencies of the source file.
|
|
100
77
|
*/
|
|
101
|
-
getDependencies(sourcePath
|
|
102
|
-
//
|
|
103
|
-
//
|
|
78
|
+
getDependencies(sourcePath) {
|
|
79
|
+
// 1. IMPORTANTE: Normalizar la ruta para que coincida con lo guardado en DB
|
|
80
|
+
// Esto convierte "src\module\..." a "src/module/..."
|
|
104
81
|
const normalizedPath = sourcePath.split(path.sep).join('/');
|
|
105
82
|
try {
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
83
|
+
// 2. Consultar la tabla dependency_graph que definiste en AgentDB
|
|
84
|
+
// Buscamos todo lo que este archivo (source) importa (target)
|
|
85
|
+
const stmt = this.db.prepare(`
|
|
86
|
+
SELECT target
|
|
87
|
+
FROM dependency_graph
|
|
88
|
+
WHERE source = ? OR source = ?
|
|
110
89
|
`);
|
|
111
|
-
//
|
|
90
|
+
// Probamos con la ruta normalizada y la original por si acaso
|
|
112
91
|
const results = stmt.all(normalizedPath, sourcePath);
|
|
113
|
-
|
|
92
|
+
// 3. Devolver solo los strings de los targets
|
|
114
93
|
return results.map((row) => row.target);
|
|
115
94
|
}
|
|
116
95
|
catch (error) {
|
|
117
|
-
|
|
96
|
+
console.error(`Error fetching dependencies for ${sourcePath}:`, error);
|
|
118
97
|
return [];
|
|
119
98
|
}
|
|
120
99
|
}
|
|
@@ -124,59 +103,54 @@ class RetrieverService {
|
|
|
124
103
|
* 1. The matched code snippets (Vector Search).
|
|
125
104
|
* 2. The file's dependencies (Graph Search).
|
|
126
105
|
* 3. Explicit File Paths to encourage using 'read_file'.
|
|
127
|
-
* @param query - The search query.
|
|
128
|
-
* @param projectRoot - Optional. The project root directory. This parameter provides context for the search and ensures consistency with how paths were indexed. Defaults to process.cwd().
|
|
129
|
-
* @returns {Promise<string>} A promise that resolves to the formatted context report string.
|
|
130
106
|
*/
|
|
131
|
-
async getContextForLLM(query
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
}
|
|
147
|
-
filesMap.get(filePath)?.chunks.push(res.chunk);
|
|
148
|
-
}
|
|
149
|
-
let output = `π **RAG ANALYSIS REPORT**\n`;
|
|
150
|
-
output += `Query: "${query}"\n`;
|
|
151
|
-
output += `Found ${filesMap.size} relevant files.\n\n`;
|
|
152
|
-
filesMap.forEach((fileCtx) => {
|
|
153
|
-
const relevancePct = (fileCtx.relevance * 100).toFixed(1);
|
|
154
|
-
output += `=================================================================\n`;
|
|
155
|
-
output += `π **FILE:** ${fileCtx.filePath}\n`;
|
|
156
|
-
output += `π **RELEVANCE:** ${relevancePct}%\n`;
|
|
157
|
-
if (fileCtx.imports.length > 0) {
|
|
158
|
-
output += `π **DEPENDENCIES (Imports):**\n`;
|
|
159
|
-
fileCtx.imports
|
|
160
|
-
.slice(0, 5)
|
|
161
|
-
.forEach((imp) => (output += ` - ${imp}\n`));
|
|
162
|
-
if (fileCtx.imports.length > 5)
|
|
163
|
-
output += ` - (...and ${fileCtx.imports.length - 5} more)\n`;
|
|
164
|
-
}
|
|
165
|
-
output += `\nπ **CODE SNIPPETS:**\n`;
|
|
166
|
-
fileCtx.chunks.forEach((chunk) => {
|
|
167
|
-
output += ` --- [${chunk.metadata.methodName || 'Class Structure'}] ---\n`;
|
|
168
|
-
output += `${chunk.content.trim()}\n\n`;
|
|
107
|
+
async getContextForLLM(query) {
|
|
108
|
+
const results = await this.query(query, 4);
|
|
109
|
+
// Group chunks by File to provide a structured view
|
|
110
|
+
const filesMap = new Map();
|
|
111
|
+
for (const res of results) {
|
|
112
|
+
const path = res.chunk.filePath || 'unknown';
|
|
113
|
+
// console.log(res);
|
|
114
|
+
// console.log(path);
|
|
115
|
+
// console.log(this.getDependencies(path));
|
|
116
|
+
if (!filesMap.has(path)) {
|
|
117
|
+
filesMap.set(path, {
|
|
118
|
+
filePath: path,
|
|
119
|
+
relevance: res.score,
|
|
120
|
+
chunks: [],
|
|
121
|
+
imports: this.getDependencies(path), // <--- GRAPH MAGIC πΈοΈ
|
|
169
122
|
});
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
});
|
|
173
|
-
log.tool(`RetrieverService.getContextForLLM: Successfully generated context report.`);
|
|
174
|
-
return output;
|
|
175
|
-
}
|
|
176
|
-
catch (error) {
|
|
177
|
-
log.error(`RetrieverService.getContextForLLM: Error generating context: ${error.message}`);
|
|
178
|
-
throw new Error(`Failed to generate context for LLM: ${error.message}`);
|
|
123
|
+
}
|
|
124
|
+
filesMap.get(path)?.chunks.push(res.chunk);
|
|
179
125
|
}
|
|
126
|
+
// Build the formatted string
|
|
127
|
+
let output = `π **RAG ANALYSIS REPORT**\n`;
|
|
128
|
+
output += `Query: "${query}"\n`;
|
|
129
|
+
output += `Found ${filesMap.size} relevant files.\n\n`;
|
|
130
|
+
filesMap.forEach((fileCtx) => {
|
|
131
|
+
const relevancePct = (fileCtx.relevance * 100).toFixed(1);
|
|
132
|
+
output += `=================================================================\n`;
|
|
133
|
+
output += `π **FILE:** ${fileCtx.filePath}\n`;
|
|
134
|
+
output += `π **RELEVANCE:** ${relevancePct}%\n`;
|
|
135
|
+
if (fileCtx.imports.length > 0) {
|
|
136
|
+
output += `π **DEPENDENCIES (Imports):**\n`;
|
|
137
|
+
// Show top 5 imports to give context on DTOs/Entities used
|
|
138
|
+
fileCtx.imports
|
|
139
|
+
.slice(0, 5)
|
|
140
|
+
.forEach((imp) => (output += ` - ${imp}\n`));
|
|
141
|
+
if (fileCtx.imports.length > 5)
|
|
142
|
+
output += ` - (...and ${fileCtx.imports.length - 5} more)\n`;
|
|
143
|
+
}
|
|
144
|
+
output += `\nπ **CODE SNIPPETS:**\n`;
|
|
145
|
+
fileCtx.chunks.forEach((chunk) => {
|
|
146
|
+
output += ` --- [${chunk.metadata.methodName || 'Class Structure'}] ---\n`;
|
|
147
|
+
output += `${chunk.content.trim()}\n\n`;
|
|
148
|
+
});
|
|
149
|
+
output += `π‘ **AGENT HINT:** To edit this file or see full imports, run: read_file("${fileCtx.filePath}")\n`;
|
|
150
|
+
output += `=================================================================\n\n`;
|
|
151
|
+
});
|
|
152
|
+
// console.log(output);
|
|
153
|
+
return output;
|
|
180
154
|
}
|
|
181
155
|
}
|
|
182
156
|
exports.RetrieverService = RetrieverService;
|
|
@@ -1,64 +1,59 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
/**
|
|
3
|
-
*
|
|
4
|
-
*
|
|
5
|
-
* @param
|
|
6
|
-
* @param
|
|
7
|
-
* @
|
|
3
|
+
* Tool for safely writing content to a file on the real disk.
|
|
4
|
+
* It creates a backup before writing and then triggers a project re-indexing.
|
|
5
|
+
* @param {object} params - The parameters for the tool.
|
|
6
|
+
* @param {string} params.filePath - The relative path where the file should be saved.
|
|
7
|
+
* @param {string} params.content - The content to write to the file.
|
|
8
|
+
* @returns {Promise<string>} A message indicating success or failure.
|
|
8
9
|
*/
|
|
9
10
|
export declare const safeWriteFileTool: import("@langchain/core/tools").DynamicStructuredTool<z.ZodObject<{
|
|
10
11
|
filePath: z.ZodString;
|
|
11
12
|
content: z.ZodString;
|
|
12
|
-
projectRoot: z.ZodOptional<z.ZodString>;
|
|
13
13
|
}, z.core.$strip>, {
|
|
14
14
|
filePath: string;
|
|
15
15
|
content: string;
|
|
16
|
-
projectRoot?: string | undefined;
|
|
17
16
|
}, {
|
|
18
17
|
filePath: string;
|
|
19
18
|
content: string;
|
|
20
|
-
projectRoot?: string | undefined;
|
|
21
19
|
}, string, "safe_write_file">;
|
|
22
20
|
/**
|
|
23
|
-
*
|
|
24
|
-
* @param
|
|
25
|
-
* @param
|
|
26
|
-
* @returns {Promise<string>}
|
|
21
|
+
* Tool for safely reading the content of a file from the real disk.
|
|
22
|
+
* @param {object} params - The parameters for the tool.
|
|
23
|
+
* @param {string} params.filePath - The relative path of the file to read.
|
|
24
|
+
* @returns {Promise<string>} The content of the file, or an error message if reading fails.
|
|
27
25
|
*/
|
|
28
26
|
export declare const safeReadFileTool: import("@langchain/core/tools").DynamicStructuredTool<z.ZodObject<{
|
|
29
27
|
filePath: z.ZodString;
|
|
30
|
-
projectRoot: z.ZodOptional<z.ZodString>;
|
|
31
28
|
}, z.core.$strip>, {
|
|
32
29
|
filePath: string;
|
|
33
|
-
projectRoot?: string | undefined;
|
|
34
30
|
}, {
|
|
35
31
|
filePath: string;
|
|
36
|
-
projectRoot?: string | undefined;
|
|
37
32
|
}, string, "safe_read_file">;
|
|
38
33
|
/**
|
|
39
|
-
*
|
|
40
|
-
*
|
|
34
|
+
* Tool to query the codebase using semantic search and dependency graph analysis.
|
|
35
|
+
* It's the primary way for the agent to explore and understand the project structure and logic.
|
|
36
|
+
* @param {object} params - The parameters for the tool.
|
|
37
|
+
* @param {string} params.query - A natural language query describing the code or functionality to find.
|
|
38
|
+
* @returns {Promise<string>} A report containing relevant code snippets, file paths, and dependencies.
|
|
41
39
|
*/
|
|
42
40
|
export declare const askCodebaseTool: import("@langchain/core/tools").DynamicStructuredTool<z.ZodObject<{
|
|
43
41
|
query: z.ZodString;
|
|
44
|
-
projectRoot: z.ZodOptional<z.ZodString>;
|
|
45
42
|
}, z.core.$strip>, {
|
|
46
43
|
query: string;
|
|
47
|
-
projectRoot?: string | undefined;
|
|
48
44
|
}, {
|
|
49
45
|
query: string;
|
|
50
|
-
projectRoot?: string | undefined;
|
|
51
46
|
}, string, "ask_codebase">;
|
|
52
47
|
/**
|
|
53
|
-
*
|
|
54
|
-
*
|
|
48
|
+
* Tool to run the TypeScript compiler (tsc) for type checking.
|
|
49
|
+
* This is crucial for maintaining code quality and catching errors early.
|
|
50
|
+
* @returns {Promise<string>} A message indicating whether the integrity check passed or failed, including compiler output on failure.
|
|
55
51
|
*/
|
|
56
52
|
export declare const integrityCheckTool: import("@langchain/core/tools").DynamicStructuredTool<z.ZodObject<{}, z.core.$strip>, Record<string, never>, Record<string, never>, string, "run_integrity_check">;
|
|
57
53
|
/**
|
|
58
|
-
*
|
|
59
|
-
*
|
|
60
|
-
*
|
|
61
|
-
*
|
|
62
|
-
* * @returns {Promise<string>} A confirmation message or error details.
|
|
54
|
+
* Tool to refresh the project's index, forcing a re-scan and re-vectorization of all files.
|
|
55
|
+
* This is useful when the agent needs to be absolutely sure it's working with the latest code,
|
|
56
|
+
* especially after significant changes or if the automatic indexing seems to be lagging.
|
|
57
|
+
* @returns {Promise<string>} A confirmation message or details about any errors encountered during indexing.
|
|
63
58
|
*/
|
|
64
59
|
export declare const refreshIndexTool: import("@langchain/core/tools").DynamicStructuredTool<z.ZodObject<{}, z.core.$strip>, Record<string, never>, Record<string, never>, string, "refresh_project_index">;
|
package/dist/core/tools/tools.js
CHANGED
|
@@ -52,127 +52,146 @@ const log = {
|
|
|
52
52
|
tool: (msg) => console.log(chalk_1.default.yellow('π οΈ [TOOL]: ') + msg),
|
|
53
53
|
sys: (msg) => console.log(chalk_1.default.gray('βοΈ [SYS]: ') + msg),
|
|
54
54
|
error: (msg) => console.log(chalk_1.default.red('β [ERR]: ') + msg),
|
|
55
|
+
debug: (msg) => console.log(chalk_1.default.magenta('π [DEBUG]: ') + msg), // Added for debugging
|
|
55
56
|
};
|
|
56
57
|
/**
|
|
57
|
-
*
|
|
58
|
+
* Creates a backup of a file before it is modified.
|
|
59
|
+
* The backup is stored in the .agent/backups directory with a timestamp.
|
|
60
|
+
* @param {string} filePath - The relative path of the file to back up.
|
|
58
61
|
*/
|
|
59
|
-
const createBackup = (filePath
|
|
60
|
-
|
|
61
|
-
|
|
62
|
+
const createBackup = (filePath) => {
|
|
63
|
+
log.debug(`Starting backup process for file: ${filePath}`);
|
|
64
|
+
const rootDir = process.cwd();
|
|
65
|
+
const backupDir = path.join(rootDir, '.agent', 'backups');
|
|
66
|
+
log.debug(`Backup directory resolved to: ${backupDir}`);
|
|
67
|
+
if (!fs.existsSync(backupDir)) {
|
|
68
|
+
log.debug(`Backup directory does not exist. Creating: ${backupDir}`);
|
|
62
69
|
fs.mkdirSync(backupDir, { recursive: true });
|
|
63
|
-
|
|
70
|
+
}
|
|
71
|
+
const realPath = path.resolve(rootDir, filePath);
|
|
72
|
+
log.debug(`Resolved real path for backup: ${realPath}`);
|
|
64
73
|
if (fs.existsSync(realPath)) {
|
|
65
74
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
66
75
|
const filename = path.basename(realPath);
|
|
67
76
|
const backupPath = path.join(backupDir, `${timestamp}_${filename}.bak`);
|
|
77
|
+
log.debug(`Attempting to copy ${realPath} to ${backupPath}`);
|
|
68
78
|
fs.copyFileSync(realPath, backupPath);
|
|
79
|
+
log.sys(`Backup created for ${filePath} at ${backupPath}`);
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
log.debug(`File does not exist, no backup needed: ${realPath}`);
|
|
69
83
|
}
|
|
70
84
|
};
|
|
71
85
|
/**
|
|
72
|
-
*
|
|
73
|
-
*
|
|
74
|
-
* @param
|
|
75
|
-
* @param
|
|
76
|
-
* @
|
|
86
|
+
* Tool for safely writing content to a file on the real disk.
|
|
87
|
+
* It creates a backup before writing and then triggers a project re-indexing.
|
|
88
|
+
* @param {object} params - The parameters for the tool.
|
|
89
|
+
* @param {string} params.filePath - The relative path where the file should be saved.
|
|
90
|
+
* @param {string} params.content - The content to write to the file.
|
|
91
|
+
* @returns {Promise<string>} A message indicating success or failure.
|
|
77
92
|
*/
|
|
78
|
-
exports.safeWriteFileTool = (0, tools_1.tool)(async ({ filePath, content
|
|
93
|
+
exports.safeWriteFileTool = (0, tools_1.tool)(async ({ filePath, content }) => {
|
|
94
|
+
log.debug(`safe_write_file called with filePath: ${filePath}`);
|
|
79
95
|
try {
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
log.error(`safe_write_file: Access denied. Target path "${canonicalTargetPath}" is outside the project root "${canonicalProjectPath}".`);
|
|
89
|
-
return 'β Error: Access denied. Attempted to write outside the project root.';
|
|
96
|
+
const rootDir = process.cwd();
|
|
97
|
+
log.debug(`Current working directory: ${rootDir}`);
|
|
98
|
+
const targetPath = path.resolve(rootDir, filePath);
|
|
99
|
+
log.debug(`Resolved target path: ${targetPath}`);
|
|
100
|
+
// Security check: Ensure the path is within the project root
|
|
101
|
+
if (!targetPath.startsWith(rootDir)) {
|
|
102
|
+
log.error(`Attempted write outside root directory: ${filePath}. Resolved path: ${targetPath}`);
|
|
103
|
+
return 'β Error: Access denied. Cannot write outside the project root.';
|
|
90
104
|
}
|
|
91
|
-
// Ensure the directory exists
|
|
92
105
|
const dir = path.dirname(targetPath);
|
|
106
|
+
log.debug(`Directory for target path: ${dir}`);
|
|
93
107
|
if (!fs.existsSync(dir)) {
|
|
94
|
-
log.
|
|
108
|
+
log.debug(`Directory does not exist. Creating: ${dir}`);
|
|
95
109
|
fs.mkdirSync(dir, { recursive: true });
|
|
110
|
+
log.sys(`Created directory: ${dir}`);
|
|
96
111
|
}
|
|
97
|
-
// Create backup
|
|
98
|
-
|
|
99
|
-
// Write the file
|
|
112
|
+
createBackup(filePath); // Create backup before writing
|
|
113
|
+
log.debug(`Writing content to file: ${targetPath}`);
|
|
100
114
|
fs.writeFileSync(targetPath, content, 'utf-8');
|
|
101
|
-
log.
|
|
102
|
-
//
|
|
103
|
-
log.sys(`
|
|
115
|
+
log.sys(`File saved to REAL DISK: ${filePath}`);
|
|
116
|
+
// Trigger re-indexing after a successful write
|
|
117
|
+
log.sys(`Initiating re-index for: ${filePath}`);
|
|
104
118
|
const indexer = new indexer_1.IndexerService();
|
|
105
|
-
//
|
|
106
|
-
indexer.indexProject(
|
|
119
|
+
// Run indexing asynchronously, log errors but don't block the write confirmation
|
|
120
|
+
indexer.indexProject().catch((err) => {
|
|
121
|
+
log.error(`Failed to re-index after write for ${filePath}: ${err.message}`);
|
|
122
|
+
});
|
|
107
123
|
return `β
File saved to REAL DISK: ${filePath}`;
|
|
108
124
|
}
|
|
109
125
|
catch (error) {
|
|
110
|
-
log.error(`
|
|
111
|
-
return `β Error: ${error.message}`;
|
|
126
|
+
log.error(`Failed to write file ${filePath}: ${error.message}`);
|
|
127
|
+
return `β Error writing file: ${error.message}`;
|
|
112
128
|
}
|
|
113
129
|
}, {
|
|
114
130
|
name: 'safe_write_file',
|
|
115
|
-
description: 'WRITES code to the REAL local disk. Creates a backup automatically.
|
|
131
|
+
description: 'WRITES code to the REAL local disk. Creates a backup automatically.',
|
|
116
132
|
schema: zod_1.z.object({
|
|
117
|
-
filePath: zod_1.z.string().describe('Relative path
|
|
118
|
-
content: zod_1.z.string().describe('Full file content
|
|
119
|
-
projectRoot: zod_1.z.string().optional().describe('Optional. The project root directory. Defaults to process.cwd().'),
|
|
133
|
+
filePath: zod_1.z.string().describe('Relative path (e.g., src/app.service.ts)'),
|
|
134
|
+
content: zod_1.z.string().describe('Full file content'),
|
|
120
135
|
}),
|
|
121
136
|
});
|
|
122
|
-
// --- TOOL 2: READ FILE (Manual) ---
|
|
123
137
|
/**
|
|
124
|
-
*
|
|
125
|
-
* @param
|
|
126
|
-
* @param
|
|
127
|
-
* @returns {Promise<string>}
|
|
138
|
+
* Tool for safely reading the content of a file from the real disk.
|
|
139
|
+
* @param {object} params - The parameters for the tool.
|
|
140
|
+
* @param {string} params.filePath - The relative path of the file to read.
|
|
141
|
+
* @returns {Promise<string>} The content of the file, or an error message if reading fails.
|
|
128
142
|
*/
|
|
129
|
-
exports.safeReadFileTool = (0, tools_1.tool)(async ({ filePath
|
|
143
|
+
exports.safeReadFileTool = (0, tools_1.tool)(async ({ filePath }) => {
|
|
144
|
+
log.debug(`safe_read_file called with filePath: ${filePath}`);
|
|
130
145
|
try {
|
|
131
|
-
const
|
|
132
|
-
log.
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
// Security Check: Ensure the target path is within the project root
|
|
137
|
-
if (!canonicalTargetPath.startsWith(canonicalProjectPath)) {
|
|
138
|
-
log.error(`safe_read_file: Access denied. Target path "${canonicalTargetPath}" is outside the project root "${canonicalProjectPath}".`);
|
|
139
|
-
return 'β Error: Access denied. Attempted to read outside the project root.';
|
|
140
|
-
}
|
|
146
|
+
const rootDir = process.cwd();
|
|
147
|
+
log.debug(`Current working directory: ${rootDir}`);
|
|
148
|
+
const targetPath = path.resolve(rootDir, filePath);
|
|
149
|
+
log.debug(`Resolved target path: ${targetPath}`);
|
|
150
|
+
// Security check: Ensure the path is within the project root
|
|
141
151
|
if (!fs.existsSync(targetPath)) {
|
|
142
|
-
log.error(`
|
|
143
|
-
return
|
|
152
|
+
log.error(`File not found for reading: ${filePath}. Resolved path: ${targetPath}`);
|
|
153
|
+
return `β File not found: ${filePath}`;
|
|
154
|
+
}
|
|
155
|
+
if (!targetPath.startsWith(rootDir)) {
|
|
156
|
+
log.error(`Attempted read outside root directory: ${filePath}. Resolved path: ${targetPath}`);
|
|
157
|
+
return 'β Error: Access denied. Cannot read outside the project root.';
|
|
144
158
|
}
|
|
159
|
+
log.debug(`Reading file content from: ${targetPath}`);
|
|
145
160
|
const content = fs.readFileSync(targetPath, 'utf-8');
|
|
146
|
-
log.
|
|
161
|
+
log.sys(`File read successfully: ${filePath}`);
|
|
147
162
|
return content;
|
|
148
163
|
}
|
|
149
164
|
catch (e) {
|
|
150
|
-
log.error(`
|
|
151
|
-
return
|
|
165
|
+
log.error(`Failed to read file ${filePath}: ${e.message}`);
|
|
166
|
+
return `β Error reading file: ${e.message}`;
|
|
152
167
|
}
|
|
153
168
|
}, {
|
|
154
169
|
name: 'safe_read_file',
|
|
155
|
-
description: 'READS code from the REAL local disk.
|
|
156
|
-
schema: zod_1.z.object({
|
|
157
|
-
filePath: zod_1.z.string().describe('Relative path to the file (e.g., src/app.service.ts).'),
|
|
158
|
-
projectRoot: zod_1.z.string().optional().describe('Optional. The project root directory. Defaults to process.cwd().')
|
|
159
|
-
}),
|
|
170
|
+
description: 'READS code from the REAL local disk.',
|
|
171
|
+
schema: zod_1.z.object({ filePath: zod_1.z.string() }),
|
|
160
172
|
});
|
|
161
173
|
/**
|
|
162
|
-
*
|
|
163
|
-
*
|
|
174
|
+
* Tool to query the codebase using semantic search and dependency graph analysis.
|
|
175
|
+
* It's the primary way for the agent to explore and understand the project structure and logic.
|
|
176
|
+
* @param {object} params - The parameters for the tool.
|
|
177
|
+
* @param {string} params.query - A natural language query describing the code or functionality to find.
|
|
178
|
+
* @returns {Promise<string>} A report containing relevant code snippets, file paths, and dependencies.
|
|
164
179
|
*/
|
|
165
|
-
exports.askCodebaseTool = (0, tools_1.tool)(async ({ query
|
|
180
|
+
exports.askCodebaseTool = (0, tools_1.tool)(async ({ query }) => {
|
|
181
|
+
log.debug(`ask_codebase called with query: "${query}"`);
|
|
166
182
|
try {
|
|
167
|
-
log.tool(`
|
|
183
|
+
log.tool(`Querying codebase: "${query}"`);
|
|
168
184
|
const retriever = new retriever_1.RetrieverService();
|
|
169
|
-
|
|
170
|
-
|
|
185
|
+
log.debug('RetrieverService instantiated.');
|
|
186
|
+
const context = await retriever.getContextForLLM(query);
|
|
187
|
+
log.tool(`Codebase query complete for: "${query}"`);
|
|
188
|
+
log.debug(`Context retrieved for query "${query}".`);
|
|
171
189
|
return context;
|
|
172
190
|
}
|
|
173
191
|
catch (error) {
|
|
174
|
-
|
|
175
|
-
|
|
192
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
193
|
+
log.error(`Error during codebase query "${query}": ${errorMessage}`);
|
|
194
|
+
return `β Error querying codebase: ${errorMessage}`;
|
|
176
195
|
}
|
|
177
196
|
}, {
|
|
178
197
|
name: 'ask_codebase',
|
|
@@ -185,27 +204,35 @@ exports.askCodebaseTool = (0, tools_1.tool)(async ({ query, projectRoot = proces
|
|
|
185
204
|
query: zod_1.z
|
|
186
205
|
.string()
|
|
187
206
|
.describe("A natural language query describing the logic, DTO, or functionality you are looking for. (e.g., 'How is the RefundEntity defined?', 'Show me the auth guard')"),
|
|
188
|
-
projectRoot: zod_1.z.string().optional().describe('Optional. The project root directory. Defaults to process.cwd().'),
|
|
189
207
|
}),
|
|
190
208
|
});
|
|
191
209
|
/**
|
|
192
|
-
*
|
|
193
|
-
*
|
|
210
|
+
* Tool to run the TypeScript compiler (tsc) for type checking.
|
|
211
|
+
* This is crucial for maintaining code quality and catching errors early.
|
|
212
|
+
* @returns {Promise<string>} A message indicating whether the integrity check passed or failed, including compiler output on failure.
|
|
194
213
|
*/
|
|
195
214
|
exports.integrityCheckTool = (0, tools_1.tool)(async () => {
|
|
215
|
+
const rootDir = process.cwd();
|
|
216
|
+
log.tool('Running TypeScript integrity check...');
|
|
217
|
+
log.debug(`Integrity check running in directory: ${rootDir}`);
|
|
196
218
|
try {
|
|
197
|
-
|
|
198
|
-
log.
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
219
|
+
// 'tsc --noEmit' checks types without generating JS files. It's fast and safe.
|
|
220
|
+
log.debug('Executing command: npx tsc --noEmit');
|
|
221
|
+
const { stdout, stderr } = await execAsync('npx tsc --noEmit', { cwd: rootDir });
|
|
222
|
+
if (stderr) {
|
|
223
|
+
// Log stderr as an error even if stdout indicates success, as tsc might output warnings here
|
|
224
|
+
log.error(`TypeScript integrity check produced stderr output:\n${stderr}`);
|
|
225
|
+
}
|
|
226
|
+
log.tool('TypeScript integrity check PASSED.');
|
|
227
|
+
log.debug(`Integrity check stdout:\n${stdout}`);
|
|
228
|
+
// Include stdout in the success message for completeness, though it's usually empty on success.
|
|
203
229
|
return `β
INTEGRITY CHECK PASSED. The codebase is strictly typed and compiles correctly.\n${stdout}`;
|
|
204
230
|
}
|
|
205
231
|
catch (error) {
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
232
|
+
// Return the exact compiler error output so the agent can attempt to fix it
|
|
233
|
+
const errorMessage = error.stdout || error.stderr || error.message || 'Unknown error';
|
|
234
|
+
log.error(`TypeScript integrity check FAILED.\n${errorMessage}`);
|
|
235
|
+
return `β INTEGRITY CHECK FAILED. You must fix these TypeScript errors before finishing:\n${errorMessage}`;
|
|
209
236
|
}
|
|
210
237
|
}, {
|
|
211
238
|
name: 'run_integrity_check',
|
|
@@ -214,32 +241,31 @@ exports.integrityCheckTool = (0, tools_1.tool)(async () => {
|
|
|
214
241
|
schema: zod_1.z.object({}),
|
|
215
242
|
});
|
|
216
243
|
/**
|
|
217
|
-
*
|
|
218
|
-
*
|
|
219
|
-
*
|
|
220
|
-
*
|
|
221
|
-
* * @returns {Promise<string>} A confirmation message or error details.
|
|
244
|
+
* Tool to refresh the project's index, forcing a re-scan and re-vectorization of all files.
|
|
245
|
+
* This is useful when the agent needs to be absolutely sure it's working with the latest code,
|
|
246
|
+
* especially after significant changes or if the automatic indexing seems to be lagging.
|
|
247
|
+
* @returns {Promise<string>} A confirmation message or details about any errors encountered during indexing.
|
|
222
248
|
*/
|
|
223
249
|
exports.refreshIndexTool = (0, tools_1.tool)(async () => {
|
|
224
250
|
log.sys('π Starting full project re-indexing...');
|
|
225
251
|
try {
|
|
226
|
-
log.tool('refreshIndexTool: Starting full project re-indexing...'); // ADDED LOG
|
|
227
|
-
// Start the expensive operation
|
|
228
252
|
const indexer = new indexer_1.IndexerService();
|
|
229
|
-
|
|
230
|
-
|
|
253
|
+
log.debug('IndexerService instantiated.');
|
|
254
|
+
// Execute the indexing process.
|
|
255
|
+
await indexer.indexProject(); // Await the completion of the indexing process
|
|
231
256
|
log.sys('β
Re-indexing completed successfully.');
|
|
257
|
+
log.debug('Project re-indexing process finished.');
|
|
232
258
|
return 'β
Index successfully updated. I now have access to the latest code version.';
|
|
233
259
|
}
|
|
234
260
|
catch (error) {
|
|
235
|
-
//
|
|
261
|
+
// Provide a detailed error message if indexing fails.
|
|
236
262
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
237
263
|
log.error(`β Indexing failed: ${errorMessage}`);
|
|
264
|
+
log.debug(`Error details during indexing: ${errorMessage}`);
|
|
238
265
|
return `β Critical error while attempting to index the project: ${errorMessage}. Please try again or check the logs.`;
|
|
239
266
|
}
|
|
240
267
|
}, {
|
|
241
268
|
name: 'refresh_project_index',
|
|
242
|
-
// CRITICAL IMPROVEMENT: Instruction-oriented description for the LLM
|
|
243
269
|
description: 'Triggers a forced, full re-indexing of the project codebase. That fucntion is optimazed only index changes comparing hash' +
|
|
244
270
|
'USE THIS TOOL ONLY WHEN: ' +
|
|
245
271
|
'1) The user explicitly states that files have changed. ' +
|
package/package.json
CHANGED