renote-server 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/auth.test.js +49 -0
- package/dist/__tests__/watcher.test.js +58 -0
- package/dist/claude/sessionBrowser.js +689 -0
- package/dist/claude/watcher.js +242 -0
- package/dist/config.js +17 -0
- package/dist/files/browser.js +127 -0
- package/dist/files/reader.js +159 -0
- package/dist/files/search.js +124 -0
- package/dist/git/gitHandler.js +95 -0
- package/dist/git/gitService.js +237 -0
- package/dist/git/index.js +8 -0
- package/dist/http/server.js +28 -0
- package/dist/index.js +77 -0
- package/dist/ssh/index.js +9 -0
- package/dist/ssh/sshHandler.js +205 -0
- package/dist/ssh/sshManager.js +329 -0
- package/dist/terminal/index.js +11 -0
- package/dist/terminal/localTerminalHandler.js +144 -0
- package/dist/terminal/localTerminalManager.js +465 -0
- package/dist/terminal/terminalWebSocket.js +128 -0
- package/dist/types.js +2 -0
- package/dist/utils/logger.js +42 -0
- package/dist/websocket/auth.js +18 -0
- package/dist/websocket/server.js +512 -0
- package/package.json +64 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ClaudeWatcher = void 0;
|
|
7
|
+
const events_1 = require("events");
|
|
8
|
+
const chokidar_1 = __importDefault(require("chokidar"));
|
|
9
|
+
const promises_1 = require("fs/promises");
|
|
10
|
+
const path_1 = require("path");
|
|
11
|
+
const config_1 = require("../config");
|
|
12
|
+
const logger_1 = require("../utils/logger");
|
|
13
|
+
class ClaudeWatcher extends events_1.EventEmitter {
|
|
14
|
+
constructor() {
|
|
15
|
+
super();
|
|
16
|
+
this.historyWatcher = null;
|
|
17
|
+
this.sessionWatcher = null;
|
|
18
|
+
this.lastHistorySize = 0;
|
|
19
|
+
this.lastSessionSize = 0;
|
|
20
|
+
this.activeSessionId = null;
|
|
21
|
+
this.activeProjectPath = null;
|
|
22
|
+
// Queue mechanism for handling rapid updates
|
|
23
|
+
this.messageQueue = [];
|
|
24
|
+
this.isProcessing = false;
|
|
25
|
+
this.historyPath = (0, path_1.join)(config_1.CONFIG.claudeHome, 'history.jsonl');
|
|
26
|
+
}
|
|
27
|
+
async start() {
|
|
28
|
+
logger_1.logger.info('Starting Claude Code watcher');
|
|
29
|
+
await this.watchHistory();
|
|
30
|
+
this.on('user_input', (data) => {
|
|
31
|
+
if (data.sessionId !== this.activeSessionId) {
|
|
32
|
+
this.activeSessionId = data.sessionId;
|
|
33
|
+
this.activeProjectPath = this.projectPathToDir(data.project);
|
|
34
|
+
this.watchSession();
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
stop() {
|
|
39
|
+
logger_1.logger.info('Stopping Claude Code watcher');
|
|
40
|
+
if (this.historyWatcher) {
|
|
41
|
+
this.historyWatcher.close();
|
|
42
|
+
}
|
|
43
|
+
if (this.sessionWatcher) {
|
|
44
|
+
this.sessionWatcher.close();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
async watchHistory() {
|
|
48
|
+
this.historyWatcher = chokidar_1.default.watch(this.historyPath, {
|
|
49
|
+
persistent: true,
|
|
50
|
+
usePolling: false,
|
|
51
|
+
awaitWriteFinish: {
|
|
52
|
+
stabilityThreshold: 100,
|
|
53
|
+
pollInterval: 50
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
this.historyWatcher.on('change', async () => {
|
|
57
|
+
await this.processHistoryChanges();
|
|
58
|
+
});
|
|
59
|
+
this.historyWatcher.on('error', (error) => {
|
|
60
|
+
logger_1.logger.error('History watcher error:', error);
|
|
61
|
+
});
|
|
62
|
+
logger_1.logger.info(`Watching history: ${this.historyPath}`);
|
|
63
|
+
}
|
|
64
|
+
async processHistoryChanges() {
|
|
65
|
+
try {
|
|
66
|
+
const content = await (0, promises_1.readFile)(this.historyPath, 'utf-8');
|
|
67
|
+
const lines = content.split('\n').filter(l => l.trim());
|
|
68
|
+
if (lines.length > this.lastHistorySize) {
|
|
69
|
+
const newLines = lines.slice(this.lastHistorySize);
|
|
70
|
+
// Add to queue instead of processing immediately
|
|
71
|
+
this.messageQueue.push(...newLines.map(line => ({ type: 'history', line })));
|
|
72
|
+
// Start processing if not already processing
|
|
73
|
+
if (!this.isProcessing) {
|
|
74
|
+
this.isProcessing = true;
|
|
75
|
+
await this.processQueue();
|
|
76
|
+
this.isProcessing = false;
|
|
77
|
+
}
|
|
78
|
+
this.lastHistorySize = lines.length;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
logger_1.logger.error('Error processing history changes:', error);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
projectPathToDir(path) {
|
|
86
|
+
return path.replace(/\//g, '-').replace(/^-/, '');
|
|
87
|
+
}
|
|
88
|
+
watchSession() {
|
|
89
|
+
if (!this.activeSessionId || !this.activeProjectPath)
|
|
90
|
+
return;
|
|
91
|
+
if (this.sessionWatcher) {
|
|
92
|
+
this.sessionWatcher.close();
|
|
93
|
+
}
|
|
94
|
+
const sessionPath = (0, path_1.join)(config_1.CONFIG.claudeHome, 'projects', this.activeProjectPath, `${this.activeSessionId}.jsonl`);
|
|
95
|
+
this.lastSessionSize = 0;
|
|
96
|
+
this.sessionWatcher = chokidar_1.default.watch(sessionPath, {
|
|
97
|
+
persistent: true,
|
|
98
|
+
usePolling: false,
|
|
99
|
+
awaitWriteFinish: {
|
|
100
|
+
stabilityThreshold: 100,
|
|
101
|
+
pollInterval: 50
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
this.sessionWatcher.on('change', async () => {
|
|
105
|
+
await this.processSessionChanges(sessionPath);
|
|
106
|
+
});
|
|
107
|
+
this.sessionWatcher.on('error', (error) => {
|
|
108
|
+
logger_1.logger.error('Session watcher error:', error);
|
|
109
|
+
});
|
|
110
|
+
logger_1.logger.info(`Watching session: ${sessionPath}`);
|
|
111
|
+
}
|
|
112
|
+
async processSessionChanges(sessionPath) {
|
|
113
|
+
try {
|
|
114
|
+
const content = await (0, promises_1.readFile)(sessionPath, 'utf-8');
|
|
115
|
+
const lines = content.split('\n').filter(l => l.trim());
|
|
116
|
+
if (lines.length > this.lastSessionSize) {
|
|
117
|
+
const newLines = lines.slice(this.lastSessionSize);
|
|
118
|
+
// Add to queue
|
|
119
|
+
this.messageQueue.push(...newLines.map(line => ({ type: 'session', line })));
|
|
120
|
+
// Start processing if not already processing
|
|
121
|
+
if (!this.isProcessing) {
|
|
122
|
+
this.isProcessing = true;
|
|
123
|
+
await this.processQueue();
|
|
124
|
+
this.isProcessing = false;
|
|
125
|
+
}
|
|
126
|
+
this.lastSessionSize = lines.length;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
catch (error) {
|
|
130
|
+
logger_1.logger.error('Error processing session changes:', error);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
async processQueue() {
|
|
134
|
+
while (this.messageQueue.length > 0) {
|
|
135
|
+
const batch = this.messageQueue.splice(0, 10);
|
|
136
|
+
for (const item of batch) {
|
|
137
|
+
try {
|
|
138
|
+
if (item.type === 'history') {
|
|
139
|
+
const entry = JSON.parse(item.line);
|
|
140
|
+
this.emit('user_input', {
|
|
141
|
+
message: entry.display,
|
|
142
|
+
timestamp: entry.timestamp,
|
|
143
|
+
sessionId: entry.sessionId,
|
|
144
|
+
project: entry.project
|
|
145
|
+
});
|
|
146
|
+
logger_1.logger.debug(`User input: ${entry.display.substring(0, 50)}...`);
|
|
147
|
+
}
|
|
148
|
+
else if (item.type === 'session') {
|
|
149
|
+
const entry = JSON.parse(item.line);
|
|
150
|
+
await this.processSessionEntry(entry);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
catch (error) {
|
|
154
|
+
logger_1.logger.error('Error processing queue item:', error);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
async processSessionEntry(entry) {
|
|
161
|
+
switch (entry.type) {
|
|
162
|
+
case 'assistant':
|
|
163
|
+
this.handleAssistantMessage(entry);
|
|
164
|
+
break;
|
|
165
|
+
case 'system':
|
|
166
|
+
this.handleSystemMessage(entry);
|
|
167
|
+
break;
|
|
168
|
+
case 'progress':
|
|
169
|
+
this.emit('progress', {
|
|
170
|
+
message: entry.data?.message,
|
|
171
|
+
timestamp: entry.timestamp
|
|
172
|
+
});
|
|
173
|
+
break;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
handleAssistantMessage(entry) {
|
|
177
|
+
const content = entry.message?.content || [];
|
|
178
|
+
for (const block of content) {
|
|
179
|
+
if (block.type === 'text') {
|
|
180
|
+
this.emit('assistant_message', {
|
|
181
|
+
content: block.text,
|
|
182
|
+
timestamp: entry.timestamp,
|
|
183
|
+
messageId: entry.message.id
|
|
184
|
+
});
|
|
185
|
+
logger_1.logger.debug(`Assistant: ${block.text.substring(0, 50)}...`);
|
|
186
|
+
}
|
|
187
|
+
else if (block.type === 'tool_use') {
|
|
188
|
+
this.emit('tool_call', {
|
|
189
|
+
toolName: block.name,
|
|
190
|
+
toolId: block.id,
|
|
191
|
+
input: block.input,
|
|
192
|
+
timestamp: entry.timestamp
|
|
193
|
+
});
|
|
194
|
+
logger_1.logger.debug(`Tool: ${block.name}`);
|
|
195
|
+
if (['Edit', 'Write'].includes(block.name)) {
|
|
196
|
+
this.handleFileOperation(block, entry.timestamp);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
handleSystemMessage(entry) {
|
|
202
|
+
if (entry.data?.type === 'tool_result') {
|
|
203
|
+
this.emit('tool_result', {
|
|
204
|
+
toolId: entry.data.tool_use_id,
|
|
205
|
+
content: entry.data.content,
|
|
206
|
+
timestamp: entry.timestamp
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
async handleFileOperation(toolUse, timestamp) {
|
|
211
|
+
const { name, input } = toolUse;
|
|
212
|
+
const filePath = input.file_path;
|
|
213
|
+
if (!filePath)
|
|
214
|
+
return;
|
|
215
|
+
try {
|
|
216
|
+
const oldContent = await (0, promises_1.readFile)(filePath, 'utf-8').catch(() => '');
|
|
217
|
+
let newContent = '';
|
|
218
|
+
if (name === 'Edit') {
|
|
219
|
+
newContent = oldContent.replace(input.old_string, input.new_string);
|
|
220
|
+
}
|
|
221
|
+
else if (name === 'Write') {
|
|
222
|
+
newContent = input.content;
|
|
223
|
+
}
|
|
224
|
+
this.emit('file_change', {
|
|
225
|
+
filePath,
|
|
226
|
+
operation: name.toLowerCase(),
|
|
227
|
+
oldContent,
|
|
228
|
+
newContent,
|
|
229
|
+
timestamp
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
catch (error) {
|
|
233
|
+
logger_1.logger.error('Error handling file operation:', error);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
computeSimpleDiff(oldContent, newContent) {
|
|
237
|
+
const oldLines = oldContent.split('\n');
|
|
238
|
+
const newLines = newContent.split('\n');
|
|
239
|
+
return `+${newLines.length - oldLines.length} lines`;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
exports.ClaudeWatcher = ClaudeWatcher;
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CONFIG = void 0;
|
|
4
|
+
const dotenv_1 = require("dotenv");
|
|
5
|
+
const os_1 = require("os");
|
|
6
|
+
(0, dotenv_1.config)();
|
|
7
|
+
exports.CONFIG = {
|
|
8
|
+
port: parseInt(process.env.PORT || '8080'),
|
|
9
|
+
authToken: process.env.AUTH_TOKEN || '',
|
|
10
|
+
claudeHome: process.env.CLAUDE_HOME || `${(0, os_1.homedir)()}/.claude`,
|
|
11
|
+
maxFileSize: parseInt(process.env.MAX_FILE_SIZE || '10485760'),
|
|
12
|
+
searchTimeout: parseInt(process.env.SEARCH_TIMEOUT || '5000'),
|
|
13
|
+
logLevel: process.env.LOG_LEVEL || 'info',
|
|
14
|
+
};
|
|
15
|
+
if (!exports.CONFIG.authToken) {
|
|
16
|
+
console.warn('WARNING: AUTH_TOKEN not set. Generate: openssl rand -hex 32');
|
|
17
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.fileBrowser = exports.FileBrowser = void 0;
|
|
4
|
+
const promises_1 = require("fs/promises");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
const logger_1 = require("../utils/logger");
|
|
7
|
+
class FileBrowser {
|
|
8
|
+
constructor() {
|
|
9
|
+
this.ignoredDirs = new Set([
|
|
10
|
+
'node_modules',
|
|
11
|
+
'.git',
|
|
12
|
+
'dist',
|
|
13
|
+
'build',
|
|
14
|
+
'.next',
|
|
15
|
+
'coverage',
|
|
16
|
+
'.cache',
|
|
17
|
+
'__pycache__',
|
|
18
|
+
'.venv',
|
|
19
|
+
'venv',
|
|
20
|
+
]);
|
|
21
|
+
this.ignoredFiles = new Set([
|
|
22
|
+
'.DS_Store',
|
|
23
|
+
'Thumbs.db',
|
|
24
|
+
'.env',
|
|
25
|
+
'.env.local',
|
|
26
|
+
]);
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Generate file tree structure for a given directory
|
|
30
|
+
*/
|
|
31
|
+
async generateTree(rootPath, maxDepth = 5) {
|
|
32
|
+
return this.buildTree(rootPath, rootPath, 0, maxDepth);
|
|
33
|
+
}
|
|
34
|
+
async buildTree(rootPath, currentPath, depth, maxDepth) {
|
|
35
|
+
const stats = await (0, promises_1.stat)(currentPath);
|
|
36
|
+
const name = currentPath === rootPath ? '.' : (0, path_1.relative)(rootPath, currentPath).split('/').pop() || '.';
|
|
37
|
+
if (stats.isFile()) {
|
|
38
|
+
return {
|
|
39
|
+
name,
|
|
40
|
+
path: (0, path_1.relative)(rootPath, currentPath),
|
|
41
|
+
type: 'file',
|
|
42
|
+
size: stats.size,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
// Directory
|
|
46
|
+
const node = {
|
|
47
|
+
name,
|
|
48
|
+
path: (0, path_1.relative)(rootPath, currentPath) || '.',
|
|
49
|
+
type: 'directory',
|
|
50
|
+
children: [],
|
|
51
|
+
};
|
|
52
|
+
// Stop at max depth
|
|
53
|
+
if (depth >= maxDepth) {
|
|
54
|
+
return node;
|
|
55
|
+
}
|
|
56
|
+
try {
|
|
57
|
+
const entries = await (0, promises_1.readdir)(currentPath);
|
|
58
|
+
const children = [];
|
|
59
|
+
for (const entry of entries) {
|
|
60
|
+
// Skip ignored directories and files
|
|
61
|
+
if (this.ignoredDirs.has(entry) || this.ignoredFiles.has(entry)) {
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
const entryPath = (0, path_1.join)(currentPath, entry);
|
|
65
|
+
try {
|
|
66
|
+
const childNode = await this.buildTree(rootPath, entryPath, depth + 1, maxDepth);
|
|
67
|
+
children.push(childNode);
|
|
68
|
+
}
|
|
69
|
+
catch (error) {
|
|
70
|
+
logger_1.logger.warn(`Failed to process ${entryPath}:`, error);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
// Sort: directories first, then files, alphabetically
|
|
74
|
+
node.children = children.sort((a, b) => {
|
|
75
|
+
if (a.type !== b.type) {
|
|
76
|
+
return a.type === 'directory' ? -1 : 1;
|
|
77
|
+
}
|
|
78
|
+
return a.name.localeCompare(b.name);
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
logger_1.logger.error(`Failed to read directory ${currentPath}:`, error);
|
|
83
|
+
}
|
|
84
|
+
return node;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Get directory listing (non-recursive)
|
|
88
|
+
*/
|
|
89
|
+
async listDirectory(dirPath) {
|
|
90
|
+
try {
|
|
91
|
+
const entries = await (0, promises_1.readdir)(dirPath);
|
|
92
|
+
const nodes = [];
|
|
93
|
+
for (const entry of entries) {
|
|
94
|
+
// Skip ignored items
|
|
95
|
+
if (this.ignoredDirs.has(entry) || this.ignoredFiles.has(entry)) {
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
const entryPath = (0, path_1.join)(dirPath, entry);
|
|
99
|
+
try {
|
|
100
|
+
const stats = await (0, promises_1.stat)(entryPath);
|
|
101
|
+
nodes.push({
|
|
102
|
+
name: entry,
|
|
103
|
+
path: entryPath,
|
|
104
|
+
type: stats.isDirectory() ? 'directory' : 'file',
|
|
105
|
+
size: stats.isFile() ? stats.size : undefined,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
catch (error) {
|
|
109
|
+
logger_1.logger.warn(`Failed to stat ${entryPath}:`, error);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
// Sort: directories first, then files
|
|
113
|
+
return nodes.sort((a, b) => {
|
|
114
|
+
if (a.type !== b.type) {
|
|
115
|
+
return a.type === 'directory' ? -1 : 1;
|
|
116
|
+
}
|
|
117
|
+
return a.name.localeCompare(b.name);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
catch (error) {
|
|
121
|
+
logger_1.logger.error(`Failed to list directory ${dirPath}:`, error);
|
|
122
|
+
throw new Error(`Failed to list directory: ${error}`);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
exports.FileBrowser = FileBrowser;
|
|
127
|
+
exports.fileBrowser = new FileBrowser();
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.fileReader = exports.FileReader = void 0;
|
|
4
|
+
const promises_1 = require("fs/promises");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
const config_1 = require("../config");
|
|
7
|
+
const logger_1 = require("../utils/logger");
|
|
8
|
+
class FileReader {
|
|
9
|
+
constructor() {
|
|
10
|
+
this.languageMap = {
|
|
11
|
+
'.js': 'javascript',
|
|
12
|
+
'.jsx': 'javascript',
|
|
13
|
+
'.ts': 'typescript',
|
|
14
|
+
'.tsx': 'typescript',
|
|
15
|
+
'.py': 'python',
|
|
16
|
+
'.rb': 'ruby',
|
|
17
|
+
'.go': 'go',
|
|
18
|
+
'.rs': 'rust',
|
|
19
|
+
'.java': 'java',
|
|
20
|
+
'.c': 'c',
|
|
21
|
+
'.cpp': 'cpp',
|
|
22
|
+
'.h': 'c',
|
|
23
|
+
'.hpp': 'cpp',
|
|
24
|
+
'.cs': 'csharp',
|
|
25
|
+
'.php': 'php',
|
|
26
|
+
'.swift': 'swift',
|
|
27
|
+
'.kt': 'kotlin',
|
|
28
|
+
'.scala': 'scala',
|
|
29
|
+
'.sh': 'bash',
|
|
30
|
+
'.bash': 'bash',
|
|
31
|
+
'.zsh': 'bash',
|
|
32
|
+
'.fish': 'fish',
|
|
33
|
+
'.ps1': 'powershell',
|
|
34
|
+
'.html': 'html',
|
|
35
|
+
'.htm': 'html',
|
|
36
|
+
'.xml': 'xml',
|
|
37
|
+
'.css': 'css',
|
|
38
|
+
'.scss': 'scss',
|
|
39
|
+
'.sass': 'sass',
|
|
40
|
+
'.less': 'less',
|
|
41
|
+
'.json': 'json',
|
|
42
|
+
'.yaml': 'yaml',
|
|
43
|
+
'.yml': 'yaml',
|
|
44
|
+
'.toml': 'toml',
|
|
45
|
+
'.ini': 'ini',
|
|
46
|
+
'.md': 'markdown',
|
|
47
|
+
'.sql': 'sql',
|
|
48
|
+
'.graphql': 'graphql',
|
|
49
|
+
'.gql': 'graphql',
|
|
50
|
+
'.vue': 'vue',
|
|
51
|
+
'.svelte': 'svelte',
|
|
52
|
+
'.dockerfile': 'dockerfile',
|
|
53
|
+
'.Dockerfile': 'dockerfile',
|
|
54
|
+
};
|
|
55
|
+
this.binaryExtensions = new Set([
|
|
56
|
+
'.png', '.jpg', '.jpeg', '.gif', '.bmp', '.ico', '.svg',
|
|
57
|
+
'.pdf', '.zip', '.tar', '.gz', '.rar', '.7z',
|
|
58
|
+
'.exe', '.dll', '.so', '.dylib',
|
|
59
|
+
'.mp3', '.mp4', '.avi', '.mov', '.wav',
|
|
60
|
+
'.ttf', '.otf', '.woff', '.woff2',
|
|
61
|
+
'.bin', '.dat', '.db', '.sqlite',
|
|
62
|
+
]);
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Read file content with size limit
|
|
66
|
+
*/
|
|
67
|
+
async readFile(filePath) {
|
|
68
|
+
try {
|
|
69
|
+
const stats = await (0, promises_1.stat)(filePath);
|
|
70
|
+
const ext = (0, path_1.extname)(filePath).toLowerCase();
|
|
71
|
+
const isBinary = this.isBinaryFile(ext);
|
|
72
|
+
// Check if file is too large
|
|
73
|
+
if (stats.size > config_1.CONFIG.maxFileSize) {
|
|
74
|
+
logger_1.logger.warn(`File too large: ${filePath} (${stats.size} bytes)`);
|
|
75
|
+
return {
|
|
76
|
+
path: filePath,
|
|
77
|
+
content: '',
|
|
78
|
+
size: stats.size,
|
|
79
|
+
language: this.detectLanguage(ext),
|
|
80
|
+
isBinary,
|
|
81
|
+
truncated: true,
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
// Don't read binary files
|
|
85
|
+
if (isBinary) {
|
|
86
|
+
return {
|
|
87
|
+
path: filePath,
|
|
88
|
+
content: '[Binary file]',
|
|
89
|
+
size: stats.size,
|
|
90
|
+
language: 'binary',
|
|
91
|
+
isBinary: true,
|
|
92
|
+
truncated: false,
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
// Read file content
|
|
96
|
+
const content = await (0, promises_1.readFile)(filePath, 'utf-8');
|
|
97
|
+
return {
|
|
98
|
+
path: filePath,
|
|
99
|
+
content,
|
|
100
|
+
size: stats.size,
|
|
101
|
+
language: this.detectLanguage(ext),
|
|
102
|
+
isBinary: false,
|
|
103
|
+
truncated: false,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
catch (error) {
|
|
107
|
+
logger_1.logger.error(`Failed to read file ${filePath}:`, error);
|
|
108
|
+
throw new Error(`Failed to read file: ${error}`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Read file with line range
|
|
113
|
+
*/
|
|
114
|
+
async readFileLines(filePath, startLine, endLine) {
|
|
115
|
+
const fileContent = await this.readFile(filePath);
|
|
116
|
+
if (fileContent.isBinary || fileContent.truncated) {
|
|
117
|
+
return fileContent;
|
|
118
|
+
}
|
|
119
|
+
const lines = fileContent.content.split('\n');
|
|
120
|
+
const selectedLines = lines.slice(startLine - 1, endLine);
|
|
121
|
+
return {
|
|
122
|
+
...fileContent,
|
|
123
|
+
content: selectedLines.join('\n'),
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Detect file language from extension
|
|
128
|
+
*/
|
|
129
|
+
detectLanguage(ext) {
|
|
130
|
+
return this.languageMap[ext] || 'plaintext';
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Check if file is binary based on extension
|
|
134
|
+
*/
|
|
135
|
+
isBinaryFile(ext) {
|
|
136
|
+
return this.binaryExtensions.has(ext);
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Get file metadata without reading content
|
|
140
|
+
*/
|
|
141
|
+
async getFileInfo(filePath) {
|
|
142
|
+
try {
|
|
143
|
+
const stats = await (0, promises_1.stat)(filePath);
|
|
144
|
+
const ext = (0, path_1.extname)(filePath).toLowerCase();
|
|
145
|
+
return {
|
|
146
|
+
path: filePath,
|
|
147
|
+
size: stats.size,
|
|
148
|
+
language: this.detectLanguage(ext),
|
|
149
|
+
isBinary: this.isBinaryFile(ext),
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
logger_1.logger.error(`Failed to get file info ${filePath}:`, error);
|
|
154
|
+
throw new Error(`Failed to get file info: ${error}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
exports.FileReader = FileReader;
|
|
159
|
+
exports.fileReader = new FileReader();
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.searchService = exports.SearchService = void 0;
|
|
4
|
+
const child_process_1 = require("child_process");
|
|
5
|
+
const util_1 = require("util");
|
|
6
|
+
const config_1 = require("../config");
|
|
7
|
+
const logger_1 = require("../utils/logger");
|
|
8
|
+
const execAsync = (0, util_1.promisify)(child_process_1.exec);
|
|
9
|
+
class SearchService {
|
|
10
|
+
/**
|
|
11
|
+
* Search for text in files using ripgrep
|
|
12
|
+
*/
|
|
13
|
+
async search(query, searchPath, options = {}) {
|
|
14
|
+
if (!query || query.trim().length === 0) {
|
|
15
|
+
throw new Error('Search query cannot be empty');
|
|
16
|
+
}
|
|
17
|
+
try {
|
|
18
|
+
const args = this.buildRipgrepArgs(query, options);
|
|
19
|
+
const command = `rg ${args.join(' ')} "${query}" "${searchPath}"`;
|
|
20
|
+
logger_1.logger.debug(`Executing search: ${command}`);
|
|
21
|
+
const { stdout, stderr } = await execAsync(command, {
|
|
22
|
+
timeout: config_1.CONFIG.searchTimeout,
|
|
23
|
+
maxBuffer: 10 * 1024 * 1024, // 10MB
|
|
24
|
+
});
|
|
25
|
+
if (stderr && !stderr.includes('No such file or directory')) {
|
|
26
|
+
logger_1.logger.warn('Ripgrep stderr:', stderr);
|
|
27
|
+
}
|
|
28
|
+
return this.parseRipgrepOutput(stdout, options.maxResults);
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
// ripgrep returns exit code 1 when no matches found
|
|
32
|
+
if (error.code === 1) {
|
|
33
|
+
return [];
|
|
34
|
+
}
|
|
35
|
+
// ripgrep not installed
|
|
36
|
+
if (error.code === 127 || error.message.includes('command not found')) {
|
|
37
|
+
logger_1.logger.error('ripgrep not installed');
|
|
38
|
+
throw new Error('ripgrep is not installed. Please install it: https://github.com/BurntSushi/ripgrep');
|
|
39
|
+
}
|
|
40
|
+
// Timeout
|
|
41
|
+
if (error.killed) {
|
|
42
|
+
logger_1.logger.error('Search timeout');
|
|
43
|
+
throw new Error('Search timeout exceeded');
|
|
44
|
+
}
|
|
45
|
+
logger_1.logger.error('Search failed:', error);
|
|
46
|
+
throw new Error(`Search failed: ${error.message}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Build ripgrep command arguments
|
|
51
|
+
*/
|
|
52
|
+
buildRipgrepArgs(query, options) {
|
|
53
|
+
const args = [
|
|
54
|
+
'--json', // JSON output for easier parsing
|
|
55
|
+
'--line-number',
|
|
56
|
+
'--column',
|
|
57
|
+
'--no-heading',
|
|
58
|
+
'--with-filename',
|
|
59
|
+
];
|
|
60
|
+
// Case sensitivity
|
|
61
|
+
if (!options.caseSensitive) {
|
|
62
|
+
args.push('--ignore-case');
|
|
63
|
+
}
|
|
64
|
+
// Regex mode
|
|
65
|
+
if (!options.regex) {
|
|
66
|
+
args.push('--fixed-strings');
|
|
67
|
+
}
|
|
68
|
+
// File type filter
|
|
69
|
+
if (options.fileType) {
|
|
70
|
+
args.push(`--type=${options.fileType}`);
|
|
71
|
+
}
|
|
72
|
+
// Max results
|
|
73
|
+
if (options.maxResults) {
|
|
74
|
+
args.push(`--max-count=${options.maxResults}`);
|
|
75
|
+
}
|
|
76
|
+
return args;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Parse ripgrep JSON output
|
|
80
|
+
*/
|
|
81
|
+
parseRipgrepOutput(output, maxResults) {
|
|
82
|
+
const results = [];
|
|
83
|
+
const lines = output.split('\n').filter(l => l.trim());
|
|
84
|
+
for (const line of lines) {
|
|
85
|
+
try {
|
|
86
|
+
const json = JSON.parse(line);
|
|
87
|
+
// Only process match entries
|
|
88
|
+
if (json.type === 'match') {
|
|
89
|
+
const data = json.data;
|
|
90
|
+
const result = {
|
|
91
|
+
file: data.path.text,
|
|
92
|
+
line: data.line_number,
|
|
93
|
+
column: data.submatches[0]?.start || 0,
|
|
94
|
+
content: data.lines.text.trim(),
|
|
95
|
+
match: data.submatches[0]?.match?.text || '',
|
|
96
|
+
};
|
|
97
|
+
results.push(result);
|
|
98
|
+
// Stop if we've reached max results
|
|
99
|
+
if (maxResults && results.length >= maxResults) {
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
catch (error) {
|
|
105
|
+
logger_1.logger.warn('Failed to parse ripgrep line:', line, error);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
return results;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Check if ripgrep is installed
|
|
112
|
+
*/
|
|
113
|
+
async isRipgrepInstalled() {
|
|
114
|
+
try {
|
|
115
|
+
await execAsync('rg --version');
|
|
116
|
+
return true;
|
|
117
|
+
}
|
|
118
|
+
catch (error) {
|
|
119
|
+
return false;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
exports.SearchService = SearchService;
|
|
124
|
+
exports.searchService = new SearchService();
|