recoder-code 2.5.2 → 2.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +0 -0
- package/dist/src/commands/context/index.js +2 -2
- package/dist/src/commands/mcp/marketplace.d.ts +6 -0
- package/dist/src/commands/mcp/marketplace.js +448 -0
- package/dist/src/commands/mcp.js +2 -0
- package/dist/src/commands/parallel.d.ts +20 -0
- package/dist/src/commands/parallel.js +133 -0
- package/dist/src/commands/recoderWeb.js +184 -5
- package/dist/src/commands/web/diff.d.ts +13 -0
- package/dist/src/commands/web/diff.js +235 -0
- package/dist/src/commands/web/link.d.ts +11 -0
- package/dist/src/commands/web/link.js +96 -0
- package/dist/src/commands/web/pull.d.ts +13 -0
- package/dist/src/commands/web/pull.js +203 -0
- package/dist/src/commands/web/status.d.ts +10 -0
- package/dist/src/commands/web/status.js +104 -0
- package/dist/src/commands/web/unlink.d.ts +10 -0
- package/dist/src/commands/web/unlink.js +45 -0
- package/dist/src/commands/web/watch.d.ts +14 -0
- package/dist/src/commands/web/watch.js +360 -0
- package/dist/src/commands/web.js +12 -0
- package/dist/src/config/config.js +6 -2
- package/dist/src/config/defaultMcpServers.d.ts +1 -0
- package/dist/src/config/defaultMcpServers.js +46 -0
- package/dist/src/gemini.js +10 -0
- package/dist/src/parallel/git-utils.d.ts +42 -0
- package/dist/src/parallel/git-utils.js +161 -0
- package/dist/src/parallel/index.d.ts +14 -0
- package/dist/src/parallel/index.js +14 -0
- package/dist/src/parallel/parallel-mode.d.ts +48 -0
- package/dist/src/parallel/parallel-mode.js +224 -0
- package/dist/src/services/AgentBridgeService.d.ts +61 -0
- package/dist/src/services/AgentBridgeService.js +253 -0
- package/dist/src/services/BuiltinCommandLoader.js +7 -0
- package/dist/src/services/PlatformSyncService.d.ts +154 -0
- package/dist/src/services/PlatformSyncService.js +588 -0
- package/dist/src/ui/commands/workflowCommands.d.ts +16 -0
- package/dist/src/ui/commands/workflowCommands.js +291 -0
- package/dist/src/ui/commands/workspaceCommand.d.ts +11 -0
- package/dist/src/ui/commands/workspaceCommand.js +329 -0
- package/dist/src/zed-integration/schema.d.ts +30 -30
- package/package.json +29 -10
- package/src/postinstall.cjs +3 -2
- package/dist/tsconfig.tsbuildinfo +0 -1
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 'recoder web watch' command
|
|
3
|
+
* Live sync - watch for local changes and sync to web in real-time
|
|
4
|
+
*/
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import fs from 'node:fs/promises';
|
|
7
|
+
import { watch } from 'node:fs';
|
|
8
|
+
import { RecoderWebService } from '../../services/RecoderWebService.js';
|
|
9
|
+
import { RecoderAuthService } from '../../services/RecoderAuthService.js';
|
|
10
|
+
const DOCKER_BACKEND_URL = process.env['RECODER_DOCKER_URL'] || 'https://docker.recoder.xyz';
|
|
11
|
+
// Directories and files to ignore
|
|
12
|
+
const IGNORED_DIRS = new Set([
|
|
13
|
+
'node_modules',
|
|
14
|
+
'.git',
|
|
15
|
+
'dist',
|
|
16
|
+
'build',
|
|
17
|
+
'.next',
|
|
18
|
+
'.vercel',
|
|
19
|
+
'.cache',
|
|
20
|
+
'coverage',
|
|
21
|
+
'__pycache__',
|
|
22
|
+
]);
|
|
23
|
+
const IGNORED_FILES = new Set([
|
|
24
|
+
'.DS_Store',
|
|
25
|
+
'.env',
|
|
26
|
+
'.env.local',
|
|
27
|
+
'.env.production',
|
|
28
|
+
'.recoder-web',
|
|
29
|
+
'package-lock.json',
|
|
30
|
+
'yarn.lock',
|
|
31
|
+
'pnpm-lock.yaml',
|
|
32
|
+
]);
|
|
33
|
+
export const watchCommand = {
|
|
34
|
+
command: 'watch [urlId]',
|
|
35
|
+
describe: 'Live sync - watch for changes and sync to web in real-time',
|
|
36
|
+
builder: (yargs) => yargs
|
|
37
|
+
.positional('urlId', {
|
|
38
|
+
type: 'string',
|
|
39
|
+
description: 'Project URL ID (optional, will read from .recoder-web if present)',
|
|
40
|
+
})
|
|
41
|
+
.option('directory', {
|
|
42
|
+
type: 'string',
|
|
43
|
+
alias: 'd',
|
|
44
|
+
description: 'Directory to watch (defaults to current directory)',
|
|
45
|
+
default: process.cwd(),
|
|
46
|
+
})
|
|
47
|
+
.option('push-only', {
|
|
48
|
+
type: 'boolean',
|
|
49
|
+
description: 'Only push local changes to web (no pull)',
|
|
50
|
+
default: false,
|
|
51
|
+
})
|
|
52
|
+
.option('pull-only', {
|
|
53
|
+
type: 'boolean',
|
|
54
|
+
description: 'Only pull web changes to local (no push)',
|
|
55
|
+
default: false,
|
|
56
|
+
})
|
|
57
|
+
.option('interval', {
|
|
58
|
+
type: 'number',
|
|
59
|
+
alias: 'i',
|
|
60
|
+
description: 'Debounce interval in milliseconds',
|
|
61
|
+
default: 500,
|
|
62
|
+
})
|
|
63
|
+
.check((argv) => {
|
|
64
|
+
if (argv['push-only'] && argv['pull-only']) {
|
|
65
|
+
throw new Error('Cannot use both --push-only and --pull-only');
|
|
66
|
+
}
|
|
67
|
+
return true;
|
|
68
|
+
}),
|
|
69
|
+
handler: async (argv) => {
|
|
70
|
+
const webService = new RecoderWebService();
|
|
71
|
+
const authService = new RecoderAuthService();
|
|
72
|
+
try {
|
|
73
|
+
// Check authentication
|
|
74
|
+
const isAuth = await authService.isAuthenticated();
|
|
75
|
+
if (!isAuth) {
|
|
76
|
+
console.error('Not authenticated');
|
|
77
|
+
console.log('Run: recoder auth login');
|
|
78
|
+
process.exit(1);
|
|
79
|
+
}
|
|
80
|
+
const targetDir = path.resolve(argv.directory || process.cwd());
|
|
81
|
+
let urlId = argv.urlId;
|
|
82
|
+
// Try to read .recoder-web metadata if no urlId provided
|
|
83
|
+
if (!urlId) {
|
|
84
|
+
try {
|
|
85
|
+
const metadataPath = path.join(targetDir, '.recoder-web');
|
|
86
|
+
const metadata = JSON.parse(await fs.readFile(metadataPath, 'utf-8'));
|
|
87
|
+
urlId = metadata.urlId || metadata.projectId;
|
|
88
|
+
console.log(`Found project ID from .recoder-web: ${urlId}`);
|
|
89
|
+
}
|
|
90
|
+
catch {
|
|
91
|
+
console.error('No project ID provided and no .recoder-web file found');
|
|
92
|
+
console.log('Usage: recoder web watch <urlId>');
|
|
93
|
+
console.log(' Or run from a linked directory (use recoder web link first)');
|
|
94
|
+
process.exit(1);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
// Get token
|
|
98
|
+
const token = await authService.getAccessToken();
|
|
99
|
+
if (!token) {
|
|
100
|
+
console.error('Failed to get access token');
|
|
101
|
+
process.exit(1);
|
|
102
|
+
}
|
|
103
|
+
// Generate a unique client ID for this watch session
|
|
104
|
+
const clientId = `cli-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
105
|
+
console.log('\n' + '='.repeat(60));
|
|
106
|
+
console.log(' RECODER LIVE SYNC');
|
|
107
|
+
console.log('='.repeat(60));
|
|
108
|
+
console.log(`Project: ${urlId}`);
|
|
109
|
+
console.log(`Directory: ${targetDir}`);
|
|
110
|
+
console.log(`Mode: ${argv['push-only'] ? 'Push only' : argv['pull-only'] ? 'Pull only' : 'Bidirectional'}`);
|
|
111
|
+
console.log(`Client ID: ${clientId.substring(0, 16)}...`);
|
|
112
|
+
console.log('='.repeat(60));
|
|
113
|
+
console.log('\nWatching for changes... (Press Ctrl+C to stop)\n');
|
|
114
|
+
// Track pending changes with debouncing
|
|
115
|
+
const pendingChanges = new Map();
|
|
116
|
+
let syncTimeout = null;
|
|
117
|
+
// Function to sync pending changes
|
|
118
|
+
const syncPendingChanges = async () => {
|
|
119
|
+
if (pendingChanges.size === 0)
|
|
120
|
+
return;
|
|
121
|
+
const changes = Array.from(pendingChanges.values());
|
|
122
|
+
pendingChanges.clear();
|
|
123
|
+
console.log(`\nSyncing ${changes.length} changes...`);
|
|
124
|
+
try {
|
|
125
|
+
// Group by type
|
|
126
|
+
const added = [];
|
|
127
|
+
const modified = [];
|
|
128
|
+
const deleted = [];
|
|
129
|
+
for (const change of changes) {
|
|
130
|
+
if (change.type === 'unlink') {
|
|
131
|
+
deleted.push(change.path);
|
|
132
|
+
}
|
|
133
|
+
else if (change.type === 'add') {
|
|
134
|
+
added.push(change.path);
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
modified.push(change.path);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
// Read file contents for added and modified files
|
|
141
|
+
const files = {};
|
|
142
|
+
for (const filePath of [...added, ...modified]) {
|
|
143
|
+
try {
|
|
144
|
+
const fullPath = path.join(targetDir, filePath);
|
|
145
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
146
|
+
files[filePath] = content;
|
|
147
|
+
}
|
|
148
|
+
catch {
|
|
149
|
+
// File might have been deleted since we recorded the change
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
// Push to backend
|
|
153
|
+
const response = await fetch(`${DOCKER_BACKEND_URL}/api/sync/${urlId}/push`, {
|
|
154
|
+
method: 'POST',
|
|
155
|
+
headers: {
|
|
156
|
+
'Authorization': `Bearer ${token}`,
|
|
157
|
+
'Content-Type': 'application/json',
|
|
158
|
+
},
|
|
159
|
+
body: JSON.stringify({
|
|
160
|
+
files,
|
|
161
|
+
clientId,
|
|
162
|
+
}),
|
|
163
|
+
});
|
|
164
|
+
if (response.ok) {
|
|
165
|
+
const data = await response.json();
|
|
166
|
+
const { applied } = data;
|
|
167
|
+
const total = applied.added + applied.modified + applied.deleted;
|
|
168
|
+
if (total > 0) {
|
|
169
|
+
const parts = [];
|
|
170
|
+
if (applied.added > 0)
|
|
171
|
+
parts.push(`+${applied.added}`);
|
|
172
|
+
if (applied.modified > 0)
|
|
173
|
+
parts.push(`~${applied.modified}`);
|
|
174
|
+
if (applied.deleted > 0)
|
|
175
|
+
parts.push(`-${applied.deleted}`);
|
|
176
|
+
console.log(` Synced: ${parts.join(', ')} files`);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
const error = await response.json().catch(() => ({ error: 'Unknown error' }));
|
|
181
|
+
console.error(` Sync failed: ${error.error}`);
|
|
182
|
+
// If container not found, try to push via web API
|
|
183
|
+
if (error.error?.includes('Container not found')) {
|
|
184
|
+
console.log(' Container not running. Pushing via web API...');
|
|
185
|
+
await webService.uploadProject(urlId, files);
|
|
186
|
+
console.log(' Uploaded to web API');
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
catch (error) {
|
|
191
|
+
console.error(` Sync error: ${error.message}`);
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
// Schedule sync with debouncing
|
|
195
|
+
const schedulSync = () => {
|
|
196
|
+
if (syncTimeout) {
|
|
197
|
+
clearTimeout(syncTimeout);
|
|
198
|
+
}
|
|
199
|
+
syncTimeout = setTimeout(syncPendingChanges, argv.interval);
|
|
200
|
+
};
|
|
201
|
+
// Check if a path should be ignored
|
|
202
|
+
const shouldIgnore = (filePath) => {
|
|
203
|
+
const parts = filePath.split(path.sep);
|
|
204
|
+
// Check if any parent directory is ignored
|
|
205
|
+
for (const part of parts) {
|
|
206
|
+
if (IGNORED_DIRS.has(part)) {
|
|
207
|
+
return true;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
// Check if file itself is ignored
|
|
211
|
+
const fileName = path.basename(filePath);
|
|
212
|
+
if (IGNORED_FILES.has(fileName)) {
|
|
213
|
+
return true;
|
|
214
|
+
}
|
|
215
|
+
return false;
|
|
216
|
+
};
|
|
217
|
+
// Get relative path from target directory
|
|
218
|
+
const getRelativePath = (fullPath) => {
|
|
219
|
+
return path.relative(targetDir, fullPath);
|
|
220
|
+
};
|
|
221
|
+
// Set up file watcher
|
|
222
|
+
// Using recursive watch (works on macOS and Windows, limited on Linux)
|
|
223
|
+
const watcher = watch(targetDir, { recursive: true }, async (eventType, filename) => {
|
|
224
|
+
if (!filename)
|
|
225
|
+
return;
|
|
226
|
+
if (argv['pull-only'])
|
|
227
|
+
return;
|
|
228
|
+
const relativePath = filename.toString().replace(/\\/g, '/');
|
|
229
|
+
if (shouldIgnore(relativePath)) {
|
|
230
|
+
return;
|
|
231
|
+
}
|
|
232
|
+
const fullPath = path.join(targetDir, relativePath);
|
|
233
|
+
try {
|
|
234
|
+
const stats = await fs.stat(fullPath);
|
|
235
|
+
if (stats.isDirectory()) {
|
|
236
|
+
return; // Skip directory events
|
|
237
|
+
}
|
|
238
|
+
// Determine change type
|
|
239
|
+
let changeType = 'change';
|
|
240
|
+
// Check if this is a new file
|
|
241
|
+
const existingChange = pendingChanges.get(relativePath);
|
|
242
|
+
if (!existingChange) {
|
|
243
|
+
// Try to determine if it's new or modified
|
|
244
|
+
// This is a heuristic - if file was created in last 100ms, consider it new
|
|
245
|
+
const mtime = stats.mtimeMs;
|
|
246
|
+
const ctime = stats.ctimeMs;
|
|
247
|
+
if (Math.abs(mtime - ctime) < 100) {
|
|
248
|
+
changeType = 'add';
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
pendingChanges.set(relativePath, {
|
|
252
|
+
path: relativePath,
|
|
253
|
+
type: changeType,
|
|
254
|
+
timestamp: Date.now(),
|
|
255
|
+
});
|
|
256
|
+
const icon = changeType === 'add' ? '+' : '~';
|
|
257
|
+
console.log(` ${icon} ${relativePath}`);
|
|
258
|
+
schedulSync();
|
|
259
|
+
}
|
|
260
|
+
catch (error) {
|
|
261
|
+
// File was deleted
|
|
262
|
+
if (error.code === 'ENOENT') {
|
|
263
|
+
pendingChanges.set(relativePath, {
|
|
264
|
+
path: relativePath,
|
|
265
|
+
type: 'unlink',
|
|
266
|
+
timestamp: Date.now(),
|
|
267
|
+
});
|
|
268
|
+
console.log(` - ${relativePath}`);
|
|
269
|
+
schedulSync();
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
// Set up WebSocket for receiving remote changes (bidirectional sync)
|
|
274
|
+
let ws = null;
|
|
275
|
+
if (!argv['push-only']) {
|
|
276
|
+
try {
|
|
277
|
+
const wsUrl = DOCKER_BACKEND_URL.replace('http', 'ws');
|
|
278
|
+
ws = new WebSocket(`${wsUrl}/cli-sync/${urlId}?clientId=${clientId}`);
|
|
279
|
+
ws.onopen = () => {
|
|
280
|
+
console.log('Connected to live sync server');
|
|
281
|
+
};
|
|
282
|
+
ws.onmessage = async (event) => {
|
|
283
|
+
try {
|
|
284
|
+
const data = JSON.parse(event.data.toString());
|
|
285
|
+
if (data.type === 'file-change' && data.source !== 'cli') {
|
|
286
|
+
// Remote change - apply to local
|
|
287
|
+
const filePath = data.path;
|
|
288
|
+
const content = data.content;
|
|
289
|
+
const fullPath = path.join(targetDir, filePath);
|
|
290
|
+
if (content !== undefined) {
|
|
291
|
+
// File created or modified
|
|
292
|
+
const dir = path.dirname(fullPath);
|
|
293
|
+
await fs.mkdir(dir, { recursive: true });
|
|
294
|
+
await fs.writeFile(fullPath, content, 'utf-8');
|
|
295
|
+
console.log(` < ${filePath} (from web)`);
|
|
296
|
+
}
|
|
297
|
+
else {
|
|
298
|
+
// File deleted
|
|
299
|
+
try {
|
|
300
|
+
await fs.unlink(fullPath);
|
|
301
|
+
console.log(` < ${filePath} (deleted from web)`);
|
|
302
|
+
}
|
|
303
|
+
catch {
|
|
304
|
+
// File doesn't exist locally, that's fine
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
if (data.type === 'connected') {
|
|
309
|
+
console.log('Bidirectional sync enabled');
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
catch {
|
|
313
|
+
// Ignore parse errors
|
|
314
|
+
}
|
|
315
|
+
};
|
|
316
|
+
ws.onerror = () => {
|
|
317
|
+
console.log('WebSocket connection failed - running in push-only mode');
|
|
318
|
+
};
|
|
319
|
+
ws.onclose = () => {
|
|
320
|
+
console.log('Disconnected from live sync server');
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
catch {
|
|
324
|
+
console.log('Could not connect to live sync server - running in push-only mode');
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
// Handle graceful shutdown
|
|
328
|
+
const cleanup = () => {
|
|
329
|
+
console.log('\n\nStopping watch...');
|
|
330
|
+
watcher.close();
|
|
331
|
+
if (ws) {
|
|
332
|
+
ws.close();
|
|
333
|
+
}
|
|
334
|
+
if (syncTimeout) {
|
|
335
|
+
clearTimeout(syncTimeout);
|
|
336
|
+
}
|
|
337
|
+
// Sync any remaining changes
|
|
338
|
+
if (pendingChanges.size > 0) {
|
|
339
|
+
console.log('Syncing remaining changes...');
|
|
340
|
+
syncPendingChanges().then(() => {
|
|
341
|
+
console.log('Watch stopped.');
|
|
342
|
+
process.exit(0);
|
|
343
|
+
});
|
|
344
|
+
}
|
|
345
|
+
else {
|
|
346
|
+
console.log('Watch stopped.');
|
|
347
|
+
process.exit(0);
|
|
348
|
+
}
|
|
349
|
+
};
|
|
350
|
+
process.on('SIGINT', cleanup);
|
|
351
|
+
process.on('SIGTERM', cleanup);
|
|
352
|
+
// Keep the process running
|
|
353
|
+
await new Promise(() => { });
|
|
354
|
+
}
|
|
355
|
+
catch (error) {
|
|
356
|
+
console.error(`${error.message}`);
|
|
357
|
+
process.exit(1);
|
|
358
|
+
}
|
|
359
|
+
},
|
|
360
|
+
};
|
package/dist/src/commands/web.js
CHANGED
|
@@ -7,19 +7,31 @@ import { downloadCommand } from './web/download.js';
|
|
|
7
7
|
import { openCommand } from './web/open.js';
|
|
8
8
|
import { infoCommand } from './web/info.js';
|
|
9
9
|
import { pushCommand } from './web/push.js';
|
|
10
|
+
import { pullCommand } from './web/pull.js';
|
|
10
11
|
import { syncCommand } from './web/sync.js';
|
|
11
12
|
import { launchCommand } from './web/launch.js';
|
|
13
|
+
import { linkCommand } from './web/link.js';
|
|
14
|
+
import { unlinkCommand } from './web/unlink.js';
|
|
15
|
+
import { statusCommand } from './web/status.js';
|
|
16
|
+
import { diffCommand } from './web/diff.js';
|
|
17
|
+
import { watchCommand } from './web/watch.js';
|
|
12
18
|
export const webCommand = {
|
|
13
19
|
command: 'web',
|
|
14
20
|
describe: 'Manage projects from web IDE',
|
|
15
21
|
builder: (yargs) => yargs
|
|
16
22
|
.command(launchCommand)
|
|
23
|
+
.command(linkCommand)
|
|
24
|
+
.command(unlinkCommand)
|
|
25
|
+
.command(statusCommand)
|
|
17
26
|
.command(listCommand)
|
|
18
27
|
.command(downloadCommand)
|
|
19
28
|
.command(openCommand)
|
|
20
29
|
.command(infoCommand)
|
|
21
30
|
.command(pushCommand)
|
|
31
|
+
.command(pullCommand)
|
|
22
32
|
.command(syncCommand)
|
|
33
|
+
.command(diffCommand)
|
|
34
|
+
.command(watchCommand)
|
|
23
35
|
.demandCommand(1, 'You need at least one command. Try: recoder web launch')
|
|
24
36
|
.version(false),
|
|
25
37
|
handler: () => {
|
|
@@ -23,6 +23,7 @@ import { hintsCommand } from '../commands/hints.js';
|
|
|
23
23
|
import { modelsCommand } from '../commands/models-cmd.js';
|
|
24
24
|
import { configureCommand } from '../commands/configure.js';
|
|
25
25
|
import { connectCommand } from '../commands/connect-cmd.js';
|
|
26
|
+
import { parallelCommand, parallelFinishCommand } from '../commands/parallel.js';
|
|
26
27
|
import { resolvePath } from '../utils/resolvePath.js';
|
|
27
28
|
import { getCliVersion } from '../utils/version.js';
|
|
28
29
|
import { annotateActiveExtensions } from './extension.js';
|
|
@@ -280,7 +281,10 @@ export async function parseArguments(settings) {
|
|
|
280
281
|
// Register configure command for interactive setup
|
|
281
282
|
.command(configureCommand)
|
|
282
283
|
// Register connect command for custom providers
|
|
283
|
-
.command(connectCommand)
|
|
284
|
+
.command(connectCommand)
|
|
285
|
+
// Register parallel mode commands for git worktree-based parallel execution
|
|
286
|
+
.command(parallelCommand)
|
|
287
|
+
.command(parallelFinishCommand);
|
|
284
288
|
if (settings?.experimental?.extensionManagement ?? false) {
|
|
285
289
|
yargsInstance.command(extensionsCommand);
|
|
286
290
|
}
|
|
@@ -296,7 +300,7 @@ export async function parseArguments(settings) {
|
|
|
296
300
|
// Handle case where MCP subcommands are executed - they should exit the process
|
|
297
301
|
// and not return to main CLI logic
|
|
298
302
|
if (result._.length > 0 &&
|
|
299
|
-
(result._[0] === 'mcp' || result._[0] === 'extensions' || result._[0] === 'auth' || result._[0] === 'ai' || result._[0] === 'web' || result._[0] === 'init' || result._[0] === 'plan')) {
|
|
303
|
+
(result._[0] === 'mcp' || result._[0] === 'extensions' || result._[0] === 'auth' || result._[0] === 'ai' || result._[0] === 'web' || result._[0] === 'init' || result._[0] === 'plan' || result._[0] === 'parallel' || result._[0] === 'parallel-finish')) {
|
|
300
304
|
// Subcommands handle their own execution and process exit
|
|
301
305
|
process.exit(0);
|
|
302
306
|
}
|
|
@@ -17,6 +17,7 @@ export declare function getEssentialMcpServers(): Record<string, MCPServerConfig
|
|
|
17
17
|
*/
|
|
18
18
|
export declare function getExtendedMcpServers(): Record<string, MCPServerConfig>;
|
|
19
19
|
export declare const MCP_SERVER_CATEGORIES: {
|
|
20
|
+
readonly 'built-in': readonly ["eternos"];
|
|
20
21
|
readonly essential: readonly ["github", "filesystem", "gitmcp-docs"];
|
|
21
22
|
readonly automation: readonly ["playwright", "puppeteer"];
|
|
22
23
|
readonly 'problem-solving': readonly ["sequential-thinking"];
|
|
@@ -3,11 +3,55 @@
|
|
|
3
3
|
* Copyright 2025 Google LLC
|
|
4
4
|
* SPDX-License-Identifier: Apache-2.0
|
|
5
5
|
*/
|
|
6
|
+
import { createRequire } from 'node:module';
|
|
7
|
+
import { join, dirname } from 'node:path';
|
|
8
|
+
import { existsSync } from 'node:fs';
|
|
9
|
+
import { fileURLToPath } from 'node:url';
|
|
10
|
+
/**
|
|
11
|
+
* Resolves the path to the mcp-eternos dist/index.js entry point.
|
|
12
|
+
* Tries multiple resolution strategies:
|
|
13
|
+
* 1. npm workspace resolution (via require.resolve)
|
|
14
|
+
* 2. Relative path from CLI package (sibling package in monorepo)
|
|
15
|
+
* 3. Global npx fallback
|
|
16
|
+
*/
|
|
17
|
+
function getMcpEternosPath() {
|
|
18
|
+
// Strategy 1: Try npm/workspace resolution
|
|
19
|
+
try {
|
|
20
|
+
const require = createRequire(import.meta.url);
|
|
21
|
+
const resolved = require.resolve('mcp-eternos/dist/index.js');
|
|
22
|
+
return resolved;
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
// Not installed as a dependency, try other strategies
|
|
26
|
+
}
|
|
27
|
+
// Strategy 2: Relative path from CLI package (monorepo sibling)
|
|
28
|
+
const thisDir = dirname(fileURLToPath(import.meta.url));
|
|
29
|
+
// From packages/cli/dist/src/config/ -> packages/mcp-eternos/dist/index.js
|
|
30
|
+
const monorepoPath = join(thisDir, '..', '..', '..', '..', 'mcp-eternos', 'dist', 'index.js');
|
|
31
|
+
if (existsSync(monorepoPath)) {
|
|
32
|
+
return monorepoPath;
|
|
33
|
+
}
|
|
34
|
+
// Strategy 3: From source location (during development)
|
|
35
|
+
const devPath = join(thisDir, '..', '..', '..', 'mcp-eternos', 'dist', 'index.js');
|
|
36
|
+
if (existsSync(devPath)) {
|
|
37
|
+
return devPath;
|
|
38
|
+
}
|
|
39
|
+
// Fallback: assume it's globally installed
|
|
40
|
+
return 'mcp-eternos';
|
|
41
|
+
}
|
|
6
42
|
/**
|
|
7
43
|
* Default MCP servers that come pre-configured with Recoder Code.
|
|
8
44
|
* All servers listed here have been tested and confirmed working.
|
|
9
45
|
*/
|
|
10
46
|
export const DEFAULT_MCP_SERVERS = {
|
|
47
|
+
// Eternos MCP - Recoder's built-in MCP server with 107+ tools
|
|
48
|
+
eternos: {
|
|
49
|
+
command: 'node',
|
|
50
|
+
args: [getMcpEternosPath()],
|
|
51
|
+
description: `Eternos - Recoder's built-in MCP server with 107+ tools including web, file, data, and agent-to-agent communication.`,
|
|
52
|
+
trust: true,
|
|
53
|
+
timeout: 30000,
|
|
54
|
+
},
|
|
11
55
|
// GitHub MCP - Repository management (26 tools)
|
|
12
56
|
github: {
|
|
13
57
|
command: 'npx',
|
|
@@ -141,6 +185,7 @@ export function getDefaultMcpServers() {
|
|
|
141
185
|
}
|
|
142
186
|
export function getEssentialMcpServers() {
|
|
143
187
|
return {
|
|
188
|
+
eternos: DEFAULT_MCP_SERVERS.eternos,
|
|
144
189
|
github: DEFAULT_MCP_SERVERS.github,
|
|
145
190
|
playwright: DEFAULT_MCP_SERVERS.playwright,
|
|
146
191
|
filesystem: DEFAULT_MCP_SERVERS.filesystem,
|
|
@@ -194,6 +239,7 @@ export function getExtendedMcpServers() {
|
|
|
194
239
|
};
|
|
195
240
|
}
|
|
196
241
|
export const MCP_SERVER_CATEGORIES = {
|
|
242
|
+
'built-in': ['eternos'],
|
|
197
243
|
essential: ['github', 'filesystem', 'gitmcp-docs'],
|
|
198
244
|
automation: ['playwright', 'puppeteer'],
|
|
199
245
|
'problem-solving': ['sequential-thinking'],
|
package/dist/src/gemini.js
CHANGED
|
@@ -147,6 +147,16 @@ export async function main() {
|
|
|
147
147
|
}
|
|
148
148
|
}
|
|
149
149
|
// ===== END AUTH GUARD =====
|
|
150
|
+
// ===== RECODER.XYZ PLATFORM INTEGRATION =====
|
|
151
|
+
// Auto-detect if running in recoder.xyz container and show preview URL
|
|
152
|
+
try {
|
|
153
|
+
const { platformSync } = await import('./services/PlatformSyncService.js');
|
|
154
|
+
await platformSync.autoInitialize();
|
|
155
|
+
}
|
|
156
|
+
catch (e) {
|
|
157
|
+
// Silently ignore - platform integration is optional
|
|
158
|
+
}
|
|
159
|
+
// ===== END PLATFORM INTEGRATION =====
|
|
150
160
|
// Check if API key is configured before proceeding
|
|
151
161
|
const { isApiKeyConfigured, checkAndSetupApiKey } = await import('./setup/apiKeySetup.js');
|
|
152
162
|
if (!isApiKeyConfigured()) {
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Git utilities for parallel mode
|
|
3
|
+
* Based on Kilo Code (Apache-2.0 License)
|
|
4
|
+
*/
|
|
5
|
+
export interface GitInfo {
|
|
6
|
+
branch: string | null;
|
|
7
|
+
isClean: boolean;
|
|
8
|
+
isRepo: boolean;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Get Git repository information for a given directory
|
|
12
|
+
*/
|
|
13
|
+
export declare function getGitInfo(cwd: string): Promise<GitInfo>;
|
|
14
|
+
/**
|
|
15
|
+
* Get just the branch name (faster than full git info)
|
|
16
|
+
*/
|
|
17
|
+
export declare function getGitBranch(cwd: string): Promise<string | null>;
|
|
18
|
+
/**
|
|
19
|
+
* Check if a branch exists in the repository
|
|
20
|
+
*/
|
|
21
|
+
export declare function branchExists(cwd: string, branchName: string): Promise<boolean>;
|
|
22
|
+
/**
|
|
23
|
+
* Generate a valid git branch name from a prompt
|
|
24
|
+
* Sanitizes the prompt to create a safe branch name
|
|
25
|
+
*/
|
|
26
|
+
export declare function generateBranchName(prompt: string): string;
|
|
27
|
+
/**
|
|
28
|
+
* Check if a directory is a git worktree
|
|
29
|
+
*/
|
|
30
|
+
export declare function isGitWorktree(cwd: string): Promise<boolean>;
|
|
31
|
+
/**
|
|
32
|
+
* Create a git worktree
|
|
33
|
+
*/
|
|
34
|
+
export declare function createWorktree(cwd: string, worktreePath: string, branchName: string, isNewBranch: boolean): Promise<void>;
|
|
35
|
+
/**
|
|
36
|
+
* Remove a git worktree
|
|
37
|
+
*/
|
|
38
|
+
export declare function removeWorktree(cwd: string, worktreePath: string): Promise<void>;
|
|
39
|
+
/**
|
|
40
|
+
* List all worktrees for a repository
|
|
41
|
+
*/
|
|
42
|
+
export declare function listWorktrees(cwd: string): Promise<string[]>;
|