ai-extension-preview 0.1.11 → 0.1.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +81 -47
- package/dist/plugins/DownloaderPlugin.js +35 -57
- package/dist/plugins/ServerPlugin.js +80 -16
- package/dist/plugins/browser/BrowserManagerPlugin.js +86 -0
- package/dist/plugins/browser/NativeLauncherPlugin.js +91 -0
- package/dist/plugins/browser/WSLLauncherPlugin.js +180 -0
- package/dist/utils/browserUtils.js +79 -0
- package/package.json +3 -2
- package/dist/plugins/BrowserPlugin.js +0 -297
package/dist/index.js
CHANGED
|
@@ -8,7 +8,9 @@ import os from 'os';
|
|
|
8
8
|
import { Runtime } from 'skeleton-crew-runtime';
|
|
9
9
|
import { CorePlugin } from './plugins/CorePlugin.js';
|
|
10
10
|
import { DownloaderPlugin } from './plugins/DownloaderPlugin.js';
|
|
11
|
-
import {
|
|
11
|
+
import { BrowserManagerPlugin } from './plugins/browser/BrowserManagerPlugin.js';
|
|
12
|
+
import { WSLLauncherPlugin } from './plugins/browser/WSLLauncherPlugin.js';
|
|
13
|
+
import { NativeLauncherPlugin } from './plugins/browser/NativeLauncherPlugin.js';
|
|
12
14
|
import { ServerPlugin } from './plugins/ServerPlugin.js';
|
|
13
15
|
import axios from 'axios';
|
|
14
16
|
import chalk from 'chalk';
|
|
@@ -24,10 +26,19 @@ program
|
|
|
24
26
|
.option('--user <user>', 'User ID (if required)')
|
|
25
27
|
.parse(process.argv);
|
|
26
28
|
const options = program.opts();
|
|
27
|
-
async function authenticate(host) {
|
|
29
|
+
async function authenticate(host, port) {
|
|
28
30
|
try {
|
|
29
|
-
// 1. Init Session
|
|
30
|
-
|
|
31
|
+
// 1. Init Session with port
|
|
32
|
+
console.log('[DEBUG] Sending port to backend:', port);
|
|
33
|
+
const initRes = await axios({
|
|
34
|
+
method: 'post',
|
|
35
|
+
url: `${host}/preview/init`,
|
|
36
|
+
data: { port },
|
|
37
|
+
headers: {
|
|
38
|
+
'Content-Type': 'application/json'
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
console.log('[DEBUG] Init response:', initRes.data);
|
|
31
42
|
const { code, sessionId } = initRes.data;
|
|
32
43
|
console.log('\n' + chalk.bgBlue.bold(' DETACHED PREVIEW MODE ') + '\n');
|
|
33
44
|
console.log('To connect, please go to your Extension Dashboard and click "Connect Preview".');
|
|
@@ -46,10 +57,12 @@ async function authenticate(host) {
|
|
|
46
57
|
console.error('Error: No Job ID associated with this connection.');
|
|
47
58
|
process.exit(1);
|
|
48
59
|
}
|
|
60
|
+
console.log('[DEBUG] Received userId:', data.userId);
|
|
61
|
+
console.log('[DEBUG] Received jobId:', data.jobId);
|
|
49
62
|
return {
|
|
50
63
|
jobId: data.jobId,
|
|
51
64
|
userId: data.userId,
|
|
52
|
-
token:
|
|
65
|
+
token: data.token || ''
|
|
53
66
|
};
|
|
54
67
|
}
|
|
55
68
|
if (data.status === 'expired') {
|
|
@@ -57,54 +70,64 @@ async function authenticate(host) {
|
|
|
57
70
|
process.exit(1);
|
|
58
71
|
}
|
|
59
72
|
}
|
|
60
|
-
catch (
|
|
61
|
-
// Ignore
|
|
73
|
+
catch (err) {
|
|
74
|
+
// Ignore poll errors, keep trying
|
|
62
75
|
}
|
|
63
76
|
}
|
|
64
77
|
}
|
|
65
78
|
catch (error) {
|
|
66
|
-
console.error(
|
|
67
|
-
|
|
79
|
+
console.error('Authentication failed:', error);
|
|
80
|
+
throw error;
|
|
68
81
|
}
|
|
69
82
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const authData = await authenticate(host);
|
|
78
|
-
jobId = authData.jobId;
|
|
79
|
-
userId = authData.userId || userId;
|
|
80
|
-
token = authData.token || token;
|
|
81
|
-
}
|
|
82
|
-
// Use os.homedir() to ensure we have write permissions
|
|
83
|
-
// Git Bash sometimes defaults cwd to C:\Program Files\Git which causes EPERM
|
|
84
|
-
const HOME_DIR = os.homedir();
|
|
85
|
-
const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', jobId);
|
|
86
|
-
// 1. Initialize Runtime
|
|
83
|
+
// Use os.homedir() to ensure we have write permissions
|
|
84
|
+
// Git Bash sometimes defaults cwd to C:\Program Files\Git which causes EPERM
|
|
85
|
+
const HOME_DIR = os.homedir();
|
|
86
|
+
const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', options.job || 'default'); // Use default if job not provided yet
|
|
87
|
+
(async () => {
|
|
88
|
+
const { job: jobId, host, token, user: userId } = options;
|
|
89
|
+
// 1. Initialize Runtime first to allocate port
|
|
87
90
|
const runtime = new Runtime({
|
|
88
91
|
hostContext: {
|
|
89
92
|
config: {
|
|
90
93
|
host,
|
|
91
|
-
token,
|
|
92
|
-
user: userId,
|
|
93
|
-
jobId,
|
|
94
|
+
token: token || '',
|
|
95
|
+
user: userId || '',
|
|
96
|
+
jobId: jobId || '',
|
|
94
97
|
workDir: WORK_DIR
|
|
95
98
|
}
|
|
96
99
|
}
|
|
97
100
|
});
|
|
98
|
-
// 2. Register Plugins
|
|
99
|
-
// Note: In a real dynamic system we might load these from a folder
|
|
100
101
|
runtime.logger.info('Registering plugins...');
|
|
101
102
|
runtime.registerPlugin(CorePlugin);
|
|
102
103
|
runtime.registerPlugin(DownloaderPlugin);
|
|
103
|
-
runtime.registerPlugin(
|
|
104
|
+
runtime.registerPlugin(BrowserManagerPlugin);
|
|
105
|
+
runtime.registerPlugin(WSLLauncherPlugin);
|
|
106
|
+
runtime.registerPlugin(NativeLauncherPlugin);
|
|
104
107
|
runtime.registerPlugin(ServerPlugin);
|
|
105
108
|
runtime.logger.info('Initializing runtime...');
|
|
106
109
|
await runtime.initialize();
|
|
107
110
|
const ctx = runtime.getContext();
|
|
111
|
+
// Get allocated port from ServerPlugin
|
|
112
|
+
const allocatedPort = ctx.hotReloadPort;
|
|
113
|
+
if (!allocatedPort) {
|
|
114
|
+
console.error('Failed to allocate server port');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
// 2. Now authenticate with the allocated port
|
|
118
|
+
let finalJobId = jobId;
|
|
119
|
+
let finalUserId = userId;
|
|
120
|
+
let finalToken = token;
|
|
121
|
+
if (!jobId || !userId) {
|
|
122
|
+
const authData = await authenticate(host, allocatedPort);
|
|
123
|
+
finalJobId = authData.jobId;
|
|
124
|
+
finalUserId = authData.userId;
|
|
125
|
+
finalToken = authData.token;
|
|
126
|
+
// Update runtime config with auth data
|
|
127
|
+
ctx.host.config.jobId = finalJobId;
|
|
128
|
+
ctx.host.config.user = finalUserId;
|
|
129
|
+
ctx.host.config.token = finalToken;
|
|
130
|
+
}
|
|
108
131
|
// 3. Start LifeCycle
|
|
109
132
|
await ctx.actions.runAction('core:log', { level: 'info', message: 'Initializing Local Satellite...' });
|
|
110
133
|
// Ensure work dir exists
|
|
@@ -115,18 +138,33 @@ async function main() {
|
|
|
115
138
|
await ctx.actions.runAction('core:log', { level: 'error', message: 'Initial check failed. Could not verify job or download extension.' });
|
|
116
139
|
process.exit(1);
|
|
117
140
|
}
|
|
118
|
-
//
|
|
119
|
-
const
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
141
|
+
// Wait for Extension files (Manifest)
|
|
142
|
+
const manifestPath = path.join(WORK_DIR, 'dist', 'manifest.json');
|
|
143
|
+
let attempts = 0;
|
|
144
|
+
const maxAttempts = 60; // 2 minutes
|
|
145
|
+
console.log('[DEBUG] Waiting for extension files...');
|
|
146
|
+
while (!fs.existsSync(manifestPath) && attempts < maxAttempts) {
|
|
147
|
+
await new Promise(r => setTimeout(r, 2000));
|
|
148
|
+
attempts++;
|
|
149
|
+
if (attempts % 5 === 0)
|
|
150
|
+
console.log(`Waiting for extension generation... (${attempts * 2}s)`);
|
|
151
|
+
}
|
|
152
|
+
if (!fs.existsSync(manifestPath)) {
|
|
153
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: 'Timed out waiting for extension files. Status check succeeded but files are missing.' });
|
|
154
|
+
process.exit(1);
|
|
155
|
+
}
|
|
156
|
+
// Launch Browser
|
|
157
|
+
await ctx.actions.runAction('browser:start', {});
|
|
158
|
+
// Keep process alive
|
|
159
|
+
process.on('SIGINT', async () => {
|
|
160
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Shutting down...' });
|
|
161
|
+
process.exit(0);
|
|
162
|
+
});
|
|
128
163
|
runtime.logger.info('Press Ctrl+C to exit.');
|
|
129
|
-
}
|
|
164
|
+
})().catch((err) => {
|
|
165
|
+
console.error(chalk.red('Fatal Error:'), err.message || err);
|
|
166
|
+
process.exit(1);
|
|
167
|
+
});
|
|
130
168
|
// Handle global errors
|
|
131
169
|
process.on('uncaughtException', (err) => {
|
|
132
170
|
if (err.code === 'ECONNRESET' || err.message?.includes('ECONNRESET')) {
|
|
@@ -139,7 +177,3 @@ process.on('uncaughtException', (err) => {
|
|
|
139
177
|
process.on('unhandledRejection', (reason) => {
|
|
140
178
|
console.error('Unhandled Rejection:', reason);
|
|
141
179
|
});
|
|
142
|
-
main().catch(err => {
|
|
143
|
-
console.error(chalk.red('Fatal Error:'), err.message || err);
|
|
144
|
-
process.exit(1);
|
|
145
|
-
});
|
|
@@ -12,35 +12,36 @@ export const DownloaderPlugin = {
|
|
|
12
12
|
const config = ctx.host.config;
|
|
13
13
|
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
14
14
|
const DOWNLOAD_PATH = path.join(config.workDir, 'extension.zip');
|
|
15
|
-
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
15
|
+
// Helper function to create axios client with current config
|
|
16
|
+
const createClient = () => {
|
|
17
|
+
const rawToken = config.token ? String(config.token) : '';
|
|
18
|
+
const token = rawToken.replace(/^Bearer\s+/i, '').trim();
|
|
19
|
+
// Auto-extract user ID from token if not provided
|
|
20
|
+
let userId = config.user;
|
|
21
|
+
if (!userId && token) {
|
|
22
|
+
try {
|
|
23
|
+
const parts = token.split('.');
|
|
24
|
+
if (parts.length === 3) {
|
|
25
|
+
const payload = JSON.parse(Buffer.from(parts[1], 'base64').toString());
|
|
26
|
+
userId = payload.id || payload.sub || payload.userId;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
catch (e) {
|
|
30
|
+
// Ignore parse errors
|
|
28
31
|
}
|
|
29
32
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
})
|
|
43
|
-
});
|
|
33
|
+
ctx.actions.runAction('core:log', { level: 'info', message: `[DEBUG] DownloaderPlugin creating client with userId: ${userId}` });
|
|
34
|
+
return axios.create({
|
|
35
|
+
baseURL: config.host,
|
|
36
|
+
headers: {
|
|
37
|
+
'X-Preview-Token': token,
|
|
38
|
+
'X-User-Id': userId
|
|
39
|
+
},
|
|
40
|
+
httpsAgent: new https.Agent({
|
|
41
|
+
rejectUnauthorized: false
|
|
42
|
+
})
|
|
43
|
+
});
|
|
44
|
+
};
|
|
44
45
|
const VERSION_FILE = path.join(config.workDir, 'version');
|
|
45
46
|
let lastModified = '';
|
|
46
47
|
if (fs.existsSync(VERSION_FILE)) {
|
|
@@ -58,6 +59,7 @@ export const DownloaderPlugin = {
|
|
|
58
59
|
let attempt = 0;
|
|
59
60
|
while (attempt < MAX_RETRIES) {
|
|
60
61
|
try {
|
|
62
|
+
const client = createClient(); // Create client with current config
|
|
61
63
|
const res = await client.get(`/jobs/${config.jobId}`);
|
|
62
64
|
const job = res.data;
|
|
63
65
|
const newVersion = job.version;
|
|
@@ -114,6 +116,7 @@ export const DownloaderPlugin = {
|
|
|
114
116
|
handler: async () => {
|
|
115
117
|
const spinner = ora('Downloading new version...').start();
|
|
116
118
|
try {
|
|
119
|
+
const client = createClient(); // Create client with current config
|
|
117
120
|
const response = await client.get(`/download/${config.jobId}`, {
|
|
118
121
|
responseType: 'arraybuffer'
|
|
119
122
|
});
|
|
@@ -124,8 +127,10 @@ export const DownloaderPlugin = {
|
|
|
124
127
|
zip.extractAllTo(DIST_DIR, true);
|
|
125
128
|
// --- HOT RELOAD INJECTION ---
|
|
126
129
|
try {
|
|
130
|
+
// Get dynamically allocated port from ServerPlugin
|
|
131
|
+
const hotReloadPort = ctx.hotReloadPort || 3500;
|
|
127
132
|
const HOT_RELOAD_CODE = `
|
|
128
|
-
const EVENT_SOURCE_URL = 'http://localhost
|
|
133
|
+
const EVENT_SOURCE_URL = 'http://localhost:${hotReloadPort}/status';
|
|
129
134
|
const CURRENT_JOB_ID = '${config.jobId}';
|
|
130
135
|
let lastVersion = null;
|
|
131
136
|
let lastJobId = null;
|
|
@@ -194,34 +199,7 @@ console.log('[Hot Reload] Active for Job:', CURRENT_JOB_ID);
|
|
|
194
199
|
}
|
|
195
200
|
}
|
|
196
201
|
});
|
|
197
|
-
//
|
|
198
|
-
|
|
199
|
-
// Listen for browser failure to stop polling
|
|
200
|
-
ctx.events.on('browser:launch-failed', () => {
|
|
201
|
-
if (checkInterval) {
|
|
202
|
-
clearInterval(checkInterval);
|
|
203
|
-
checkInterval = undefined;
|
|
204
|
-
ctx.actions.runAction('core:log', { level: 'warn', message: 'Polling stopped due to browser launch failure.' });
|
|
205
|
-
// Update status happens in UI
|
|
206
|
-
}
|
|
207
|
-
});
|
|
208
|
-
checkInterval = setInterval(async () => {
|
|
209
|
-
try {
|
|
210
|
-
// Use actions for main log (UI Plugin captures this)
|
|
211
|
-
// console.error('[DownloaderPlugin] Tick - Checking Status...'); // REMOVE (Outside UI)
|
|
212
|
-
// Silent polling for CLI mode
|
|
213
|
-
// await ctx.actions.runAction('core:log', { level: 'info', message: '[DEBUG] Polling...' });
|
|
214
|
-
await ctx.actions.runAction('downloader:check', null);
|
|
215
|
-
}
|
|
216
|
-
catch (err) {
|
|
217
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Poll Error: ${err.message}` });
|
|
218
|
-
}
|
|
219
|
-
}, 2000);
|
|
220
|
-
},
|
|
221
|
-
dispose(ctx) {
|
|
222
|
-
if (checkInterval) {
|
|
223
|
-
clearInterval(checkInterval);
|
|
224
|
-
checkInterval = undefined;
|
|
225
|
-
}
|
|
202
|
+
// Polling removed in favor of push-based updates (POST /refresh)
|
|
203
|
+
ctx.actions.runAction('core:log', { level: 'info', message: 'Ready. Waiting for update signals...' });
|
|
226
204
|
}
|
|
227
205
|
};
|
|
@@ -2,9 +2,13 @@ import http from 'http';
|
|
|
2
2
|
export const ServerPlugin = {
|
|
3
3
|
name: 'server',
|
|
4
4
|
version: '1.0.0',
|
|
5
|
-
setup(ctx) {
|
|
5
|
+
async setup(ctx) {
|
|
6
6
|
let currentVersion = '0.0.0';
|
|
7
|
-
|
|
7
|
+
// Try to bind to a port, retrying with incremented ports on failure
|
|
8
|
+
const startPort = 3500;
|
|
9
|
+
const maxAttempts = 100;
|
|
10
|
+
let allocatedPort = null;
|
|
11
|
+
let server = null;
|
|
8
12
|
// Listen for version updates
|
|
9
13
|
ctx.events.on('downloader:updated', (data) => {
|
|
10
14
|
if (data && data.version) {
|
|
@@ -12,11 +16,13 @@ export const ServerPlugin = {
|
|
|
12
16
|
ctx.actions.runAction('core:log', { level: 'info', message: `Server: Reporting version ${currentVersion}` });
|
|
13
17
|
}
|
|
14
18
|
});
|
|
15
|
-
|
|
19
|
+
// Create server with request handler
|
|
20
|
+
const requestHandler = (req, res) => {
|
|
16
21
|
// CORS Headers
|
|
17
22
|
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
18
|
-
res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS');
|
|
23
|
+
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
|
|
19
24
|
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
|
25
|
+
res.setHeader('Access-Control-Allow-Private-Network', 'true');
|
|
20
26
|
if (req.method === 'OPTIONS') {
|
|
21
27
|
res.writeHead(204);
|
|
22
28
|
res.end();
|
|
@@ -24,29 +30,87 @@ export const ServerPlugin = {
|
|
|
24
30
|
}
|
|
25
31
|
if (req.url === '/status') {
|
|
26
32
|
const currentJobId = ctx.host.config.jobId;
|
|
27
|
-
// ctx.actions.runAction('core:log', { level: 'info', message: `[DEBUG] Server: Extension requested status (Reporting: ${currentVersion})` });
|
|
28
33
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
29
34
|
res.end(JSON.stringify({
|
|
30
35
|
version: currentVersion,
|
|
31
|
-
jobId: currentJobId
|
|
36
|
+
jobId: currentJobId,
|
|
37
|
+
port: allocatedPort
|
|
32
38
|
}));
|
|
33
39
|
}
|
|
40
|
+
else if (req.url === '/refresh' && req.method === 'POST') {
|
|
41
|
+
// Trigger manual check
|
|
42
|
+
ctx.actions.runAction('core:log', { level: 'info', message: '[API] Refresh request received' });
|
|
43
|
+
ctx.actions.runAction('downloader:check', null).then((result) => {
|
|
44
|
+
ctx.actions.runAction('core:log', { level: 'info', message: `[API] Check result: ${result}` });
|
|
45
|
+
}).catch((err) => {
|
|
46
|
+
ctx.actions.runAction('core:log', { level: 'error', message: `[API] Check failed: ${err.message}` });
|
|
47
|
+
});
|
|
48
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
49
|
+
res.end(JSON.stringify({ success: true }));
|
|
50
|
+
}
|
|
51
|
+
else if (req.url === '/disconnect' && req.method === 'POST') {
|
|
52
|
+
// Trigger browser stop
|
|
53
|
+
ctx.actions.runAction('core:log', { level: 'info', message: '[API] Disconnect request received' });
|
|
54
|
+
ctx.actions.runAction('browser:stop', null).then((result) => {
|
|
55
|
+
ctx.actions.runAction('core:log', { level: 'info', message: `[API] Browser stop result: ${result}` });
|
|
56
|
+
}).catch((err) => {
|
|
57
|
+
ctx.actions.runAction('core:log', { level: 'error', message: `[API] Browser stop failed: ${err.message}` });
|
|
58
|
+
});
|
|
59
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
60
|
+
res.end(JSON.stringify({ success: true }));
|
|
61
|
+
}
|
|
34
62
|
else {
|
|
35
63
|
res.writeHead(404);
|
|
36
64
|
res.end('Not Found');
|
|
37
65
|
}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
66
|
+
};
|
|
67
|
+
// Try to bind to ports sequentially
|
|
68
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
69
|
+
const port = startPort + attempt;
|
|
70
|
+
try {
|
|
71
|
+
server = http.createServer(requestHandler);
|
|
72
|
+
// Wrap listen in a promise to handle async properly
|
|
73
|
+
await new Promise((resolve, reject) => {
|
|
74
|
+
server.once('error', (err) => {
|
|
75
|
+
if (err.code === 'EADDRINUSE') {
|
|
76
|
+
reject(err);
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
reject(err);
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
server.once('listening', () => {
|
|
83
|
+
resolve();
|
|
84
|
+
});
|
|
85
|
+
server.listen(port);
|
|
86
|
+
});
|
|
87
|
+
// Success! Port is allocated
|
|
88
|
+
allocatedPort = port;
|
|
89
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Hot Reload Server running on port ${allocatedPort}` });
|
|
90
|
+
break;
|
|
45
91
|
}
|
|
46
|
-
|
|
47
|
-
|
|
92
|
+
catch (err) {
|
|
93
|
+
if (err.code === 'EADDRINUSE') {
|
|
94
|
+
// Port busy, try next one
|
|
95
|
+
if (server) {
|
|
96
|
+
server.removeAllListeners();
|
|
97
|
+
server = null;
|
|
98
|
+
}
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
// Other error, fail immediately
|
|
103
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Server error: ${err.message}` });
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
48
106
|
}
|
|
49
|
-
}
|
|
107
|
+
}
|
|
108
|
+
if (!allocatedPort || !server) {
|
|
109
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to allocate port after ${maxAttempts} attempts (ports ${startPort}-${startPort + maxAttempts - 1})` });
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
// Store port in context for DownloaderPlugin to use
|
|
113
|
+
ctx.hotReloadPort = allocatedPort;
|
|
50
114
|
// Store server instance to close later
|
|
51
115
|
ctx._serverInstance = server;
|
|
52
116
|
},
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { findExtensionRoot, validateExtension } from '../../utils/browserUtils.js';
|
|
4
|
+
export const BrowserManagerPlugin = {
|
|
5
|
+
name: 'browser-manager',
|
|
6
|
+
version: '1.0.0',
|
|
7
|
+
setup(ctx) {
|
|
8
|
+
const config = ctx.host.config;
|
|
9
|
+
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
10
|
+
// --- Centralized Path Strategy ---
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
const isWin = process.platform === 'win32';
|
|
13
|
+
// Unified Staging Path (C:\\Temp for Windows/WSL, local for others)
|
|
14
|
+
const STAGING_DIR = isWSL
|
|
15
|
+
? '/mnt/c/Temp/ai-ext-preview'
|
|
16
|
+
: (isWin ? 'C:\\Temp\\ai-ext-preview' : path.join(config.workDir, '../staging'));
|
|
17
|
+
// --- SYNC FUNCTION ---
|
|
18
|
+
const syncToStaging = async () => {
|
|
19
|
+
try {
|
|
20
|
+
if (fs.existsSync(STAGING_DIR)) {
|
|
21
|
+
fs.emptyDirSync(STAGING_DIR);
|
|
22
|
+
}
|
|
23
|
+
fs.ensureDirSync(STAGING_DIR);
|
|
24
|
+
fs.copySync(DIST_DIR, STAGING_DIR);
|
|
25
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Synced code to Staging` });
|
|
26
|
+
// Emit staged event (optional)
|
|
27
|
+
ctx.events.emit('browser:staged', { path: STAGING_DIR });
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to sync to staging: ${err.message}` });
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
const launchBrowser = async () => {
|
|
34
|
+
// Resolve proper root AFTER sync
|
|
35
|
+
const extensionRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
36
|
+
// Validate
|
|
37
|
+
const validation = validateExtension(extensionRoot);
|
|
38
|
+
if (!validation.valid) {
|
|
39
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `[CRITICAL] Extension validation failed: ${validation.error} in ${extensionRoot}` });
|
|
40
|
+
}
|
|
41
|
+
else if (extensionRoot !== STAGING_DIR) {
|
|
42
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Detected nested extension at: ${path.basename(extensionRoot)}` });
|
|
43
|
+
}
|
|
44
|
+
// Delegate Launch
|
|
45
|
+
// We pass the filesystem path (STAGING_DIR or extensionRoot)
|
|
46
|
+
// The specific Launcher plugin handles environment specific path verification/conversion
|
|
47
|
+
await ctx.actions.runAction('launcher:launch', {
|
|
48
|
+
extensionPath: extensionRoot,
|
|
49
|
+
stagingDir: STAGING_DIR
|
|
50
|
+
});
|
|
51
|
+
};
|
|
52
|
+
let isInitialized = false;
|
|
53
|
+
// Action: Start Browser (Orchestrator)
|
|
54
|
+
ctx.actions.registerAction({
|
|
55
|
+
id: 'browser:start',
|
|
56
|
+
handler: async () => {
|
|
57
|
+
await syncToStaging();
|
|
58
|
+
await launchBrowser();
|
|
59
|
+
isInitialized = true;
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
// Action: Stop Browser
|
|
64
|
+
ctx.actions.registerAction({
|
|
65
|
+
id: 'browser:stop',
|
|
66
|
+
handler: async () => {
|
|
67
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Stopping browser...' });
|
|
68
|
+
const result = await ctx.actions.runAction('launcher:kill', null);
|
|
69
|
+
return result;
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
// Event: Update detected
|
|
73
|
+
ctx.events.on('downloader:updated', async () => {
|
|
74
|
+
if (isInitialized) {
|
|
75
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Update detected. Syncing to staging...' });
|
|
76
|
+
await ctx.actions.runAction('browser:start', {});
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
// Event: Browser closed (from launcher)
|
|
80
|
+
ctx.events.on('browser:closed', async (data) => {
|
|
81
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Browser closed with code ${data.code}` });
|
|
82
|
+
// Emit event that can be picked up by other plugins (e.g., to notify backend)
|
|
83
|
+
ctx.events.emit('session:terminated', { reason: 'browser_closed' });
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { spawn } from 'child_process';
|
|
4
|
+
import { findChrome, normalizePathToWindows } from '../../utils/browserUtils.js';
|
|
5
|
+
let chromeProcess = null;
|
|
6
|
+
export const NativeLauncherPlugin = {
|
|
7
|
+
name: 'native-launcher',
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
setup(ctx) {
|
|
10
|
+
// Only active if NOT in WSL
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
if (isWSL)
|
|
13
|
+
return;
|
|
14
|
+
ctx.actions.registerAction({
|
|
15
|
+
id: 'launcher:launch',
|
|
16
|
+
handler: async (payload) => {
|
|
17
|
+
const config = ctx.host.config;
|
|
18
|
+
const chromePath = findChrome();
|
|
19
|
+
if (!chromePath) {
|
|
20
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found.' });
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
let executable = chromePath;
|
|
24
|
+
if (process.platform === 'win32') {
|
|
25
|
+
executable = normalizePathToWindows(chromePath);
|
|
26
|
+
}
|
|
27
|
+
// Native Windows / Linux
|
|
28
|
+
let safeDist = path.resolve(payload.extensionPath);
|
|
29
|
+
// Default Profile
|
|
30
|
+
let safeProfile = path.join(path.dirname(config.workDir), 'profile');
|
|
31
|
+
if (process.platform === 'win32') {
|
|
32
|
+
safeDist = normalizePathToWindows(safeDist);
|
|
33
|
+
// Use C:\\Temp profile to avoid permissions issues
|
|
34
|
+
safeProfile = 'C:\\\\Temp\\\\ai-ext-profile';
|
|
35
|
+
}
|
|
36
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Executable: ${executable} ` });
|
|
37
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Target: ${safeDist} ` });
|
|
38
|
+
const cleanArgs = [
|
|
39
|
+
`--load-extension=${safeDist}`,
|
|
40
|
+
`--user-data-dir=${safeProfile}`,
|
|
41
|
+
'--no-first-run',
|
|
42
|
+
'--no-default-browser-check',
|
|
43
|
+
'--disable-gpu',
|
|
44
|
+
'chrome://extensions'
|
|
45
|
+
];
|
|
46
|
+
try {
|
|
47
|
+
// Kill existing process if any
|
|
48
|
+
if (chromeProcess) {
|
|
49
|
+
chromeProcess.kill();
|
|
50
|
+
chromeProcess = null;
|
|
51
|
+
}
|
|
52
|
+
chromeProcess = spawn(executable, cleanArgs, {
|
|
53
|
+
detached: false,
|
|
54
|
+
stdio: 'ignore'
|
|
55
|
+
});
|
|
56
|
+
// Monitor process exit
|
|
57
|
+
chromeProcess.on('exit', async (code) => {
|
|
58
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome exited with code ${code} ` });
|
|
59
|
+
chromeProcess = null;
|
|
60
|
+
ctx.events.emit('browser:closed', { code });
|
|
61
|
+
});
|
|
62
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome launched with PID: ${chromeProcess.pid} ` });
|
|
63
|
+
}
|
|
64
|
+
catch (spawnErr) {
|
|
65
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Spawn Failed: ${spawnErr.message} ` });
|
|
66
|
+
return false;
|
|
67
|
+
}
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
// Register kill action
|
|
72
|
+
ctx.actions.registerAction({
|
|
73
|
+
id: 'launcher:kill',
|
|
74
|
+
handler: async () => {
|
|
75
|
+
if (chromeProcess) {
|
|
76
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Terminating Chrome process...' });
|
|
77
|
+
chromeProcess.kill();
|
|
78
|
+
chromeProcess = null;
|
|
79
|
+
return true;
|
|
80
|
+
}
|
|
81
|
+
return false;
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
},
|
|
85
|
+
dispose(ctx) {
|
|
86
|
+
if (chromeProcess) {
|
|
87
|
+
chromeProcess.kill();
|
|
88
|
+
chromeProcess = null;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
};
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { spawn } from 'child_process';
|
|
4
|
+
import { findChrome } from '../../utils/browserUtils.js';
|
|
5
|
+
let chromePid = null;
|
|
6
|
+
export const WSLLauncherPlugin = {
|
|
7
|
+
name: 'wsl-launcher',
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
setup(ctx) {
|
|
10
|
+
// Only active in WSL
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
if (!isWSL)
|
|
13
|
+
return;
|
|
14
|
+
ctx.actions.registerAction({
|
|
15
|
+
id: 'launcher:launch',
|
|
16
|
+
handler: async (payload) => {
|
|
17
|
+
const chromePath = findChrome();
|
|
18
|
+
if (!chromePath) {
|
|
19
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found for detached launch.' });
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
// Hardcoded Safe Paths for WSL Strategy
|
|
23
|
+
const winStagingDir = 'C:\\\\Temp\\\\ai-ext-preview';
|
|
24
|
+
const winProfile = 'C:\\\\Temp\\\\ai-ext-profile';
|
|
25
|
+
// Calculate Final Windows Extension Path
|
|
26
|
+
// We assume payload.extensionPath starts with /mnt/c/Temp/ai-ext-preview
|
|
27
|
+
// But simplified: We know we sync to STAGING_DIR.
|
|
28
|
+
// If extensionPath is nested, we handle it relative to STAGING_DIR.
|
|
29
|
+
let finalWinExtensionPath = winStagingDir;
|
|
30
|
+
if (payload.extensionPath !== payload.stagingDir) {
|
|
31
|
+
const relative = path.relative(payload.stagingDir, payload.extensionPath);
|
|
32
|
+
// Join with backslashes
|
|
33
|
+
finalWinExtensionPath = path.posix.join(winStagingDir.replace(/\\\\/g, '/'), relative).replace(/\//g, '\\\\');
|
|
34
|
+
}
|
|
35
|
+
const driveLetter = 'c';
|
|
36
|
+
const winChromePath = chromePath
|
|
37
|
+
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\\\`)
|
|
38
|
+
.replace(/\//g, '\\\\');
|
|
39
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Launch Target (Win): ${finalWinExtensionPath}` });
|
|
40
|
+
// Create PowerShell Launch Script with PID capture
|
|
41
|
+
const psContent = `
|
|
42
|
+
$chromePath = "${winChromePath}"
|
|
43
|
+
$extPath = "${finalWinExtensionPath}"
|
|
44
|
+
$profilePath = "${winProfile}"
|
|
45
|
+
|
|
46
|
+
# Verify Paths
|
|
47
|
+
if (-not (Test-Path -Path $extPath)) {
|
|
48
|
+
Write-Host "ERROR: Extension Path NOT FOUND!"
|
|
49
|
+
exit 1
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Create Profile Dir if needed
|
|
53
|
+
if (-not (Test-Path -Path $profilePath)) {
|
|
54
|
+
New-Item -ItemType Directory -Force -Path $profilePath | Out-Null
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
$argsList = @(
|
|
58
|
+
"--load-extension=\`"$extPath\`"",
|
|
59
|
+
"--user-data-dir=\`"$profilePath\`"",
|
|
60
|
+
"--no-first-run",
|
|
61
|
+
"--no-default-browser-check",
|
|
62
|
+
"--disable-gpu",
|
|
63
|
+
"about:blank"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Launch and capture PID
|
|
67
|
+
$process = Start-Process -FilePath $chromePath -ArgumentList $argsList -PassThru
|
|
68
|
+
Write-Host "CHROME_PID:$($process.Id)"
|
|
69
|
+
`;
|
|
70
|
+
// Write ps1 to STAGING_DIR/launch.ps1
|
|
71
|
+
const psPath = path.join(payload.stagingDir, 'launch.ps1');
|
|
72
|
+
try {
|
|
73
|
+
await fs.writeFile(psPath, psContent);
|
|
74
|
+
}
|
|
75
|
+
catch (e) {
|
|
76
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `WSL Write PS1 Failed: ${e.message}` });
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
// Execute via PowerShell
|
|
80
|
+
const psPathWin = `${winStagingDir}\\\\launch.ps1`;
|
|
81
|
+
const child = spawn('powershell.exe', ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', psPathWin], {
|
|
82
|
+
detached: false,
|
|
83
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
84
|
+
});
|
|
85
|
+
// Capture PID from output
|
|
86
|
+
if (child.stdout) {
|
|
87
|
+
child.stdout.on('data', async (chunk) => {
|
|
88
|
+
const msg = chunk.toString();
|
|
89
|
+
const pidMatch = msg.match(/CHROME_PID:(\d+)/);
|
|
90
|
+
if (pidMatch) {
|
|
91
|
+
chromePid = parseInt(pidMatch[1], 10);
|
|
92
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome launched with PID: ${chromePid}` });
|
|
93
|
+
// Start monitoring the process
|
|
94
|
+
monitorProcess(ctx, chromePid);
|
|
95
|
+
}
|
|
96
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `[PS1] ${msg.trim()}` });
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
if (child.stderr) {
|
|
100
|
+
child.stderr.on('data', async (chunk) => {
|
|
101
|
+
const msg = chunk.toString();
|
|
102
|
+
// Ignore minor PS noise unless critical
|
|
103
|
+
if (msg.includes('Exec format error')) {
|
|
104
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `CRITICAL: WSL Interop broken.` });
|
|
105
|
+
}
|
|
106
|
+
else if (msg.trim()) {
|
|
107
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Error: ${msg}` });
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
// Register kill action
|
|
115
|
+
ctx.actions.registerAction({
|
|
116
|
+
id: 'launcher:kill',
|
|
117
|
+
handler: async () => {
|
|
118
|
+
if (chromePid) {
|
|
119
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Terminating Chrome process (PID: ${chromePid})...` });
|
|
120
|
+
try {
|
|
121
|
+
// Use taskkill via PowerShell
|
|
122
|
+
const killChild = spawn('powershell.exe', ['-Command', `Stop-Process -Id ${chromePid} -Force`], {
|
|
123
|
+
stdio: 'ignore'
|
|
124
|
+
});
|
|
125
|
+
killChild.on('exit', async (code) => {
|
|
126
|
+
if (code === 0) {
|
|
127
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Chrome process terminated successfully.' });
|
|
128
|
+
chromePid = null;
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
await ctx.actions.runAction('core:log', { level: 'warn', message: `taskkill exited with code ${code}` });
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
catch (err) {
|
|
137
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Kill failed: ${err.message}` });
|
|
138
|
+
return false;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
// Helper function to monitor process
|
|
145
|
+
function monitorProcess(ctx, pid) {
|
|
146
|
+
const checkInterval = setInterval(async () => {
|
|
147
|
+
try {
|
|
148
|
+
const checkChild = spawn('powershell.exe', ['-Command', `Get-Process -Id ${pid} -ErrorAction SilentlyContinue`], {
|
|
149
|
+
stdio: 'pipe'
|
|
150
|
+
});
|
|
151
|
+
let output = '';
|
|
152
|
+
if (checkChild.stdout) {
|
|
153
|
+
checkChild.stdout.on('data', (chunk) => {
|
|
154
|
+
output += chunk.toString();
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
checkChild.on('exit', async (code) => {
|
|
158
|
+
if (!output.trim() || code !== 0) {
|
|
159
|
+
// Process no longer exists
|
|
160
|
+
clearInterval(checkInterval);
|
|
161
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Chrome process exited.' });
|
|
162
|
+
chromePid = null;
|
|
163
|
+
ctx.events.emit('browser:closed', { code: 0 });
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
catch (err) {
|
|
168
|
+
clearInterval(checkInterval);
|
|
169
|
+
}
|
|
170
|
+
}, 2000); // Check every 2 seconds
|
|
171
|
+
}
|
|
172
|
+
},
|
|
173
|
+
dispose(ctx) {
|
|
174
|
+
if (chromePid) {
|
|
175
|
+
// Attempt to kill on cleanup
|
|
176
|
+
spawn('powershell.exe', ['-Command', `Stop-Process -Id ${chromePid} -Force`], { stdio: 'ignore' });
|
|
177
|
+
chromePid = null;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
};
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
const CHROME_PATHS = [
|
|
4
|
+
// Standard Windows Paths
|
|
5
|
+
'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe',
|
|
6
|
+
'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe',
|
|
7
|
+
// WSL Mappings
|
|
8
|
+
'/mnt/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
9
|
+
'/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
10
|
+
// Git Bash / Unix-y Windows Environment Mappings
|
|
11
|
+
'/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
12
|
+
'/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
13
|
+
// Linux
|
|
14
|
+
'/usr/bin/google-chrome',
|
|
15
|
+
'/usr/bin/chromium'
|
|
16
|
+
];
|
|
17
|
+
export function findChrome() {
|
|
18
|
+
for (const p of CHROME_PATHS) {
|
|
19
|
+
if (fs.existsSync(p))
|
|
20
|
+
return p;
|
|
21
|
+
}
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
// --- Helper to find actual extension root (handle nested folder in zip) ---
|
|
25
|
+
export const findExtensionRoot = (dir) => {
|
|
26
|
+
if (fs.existsSync(path.join(dir, 'manifest.json')))
|
|
27
|
+
return dir;
|
|
28
|
+
// Check immediate subdirectories (depth 1)
|
|
29
|
+
try {
|
|
30
|
+
const items = fs.readdirSync(dir);
|
|
31
|
+
for (const item of items) {
|
|
32
|
+
const fullPath = path.join(dir, item);
|
|
33
|
+
if (fs.statSync(fullPath).isDirectory()) {
|
|
34
|
+
if (fs.existsSync(path.join(fullPath, 'manifest.json'))) {
|
|
35
|
+
return fullPath;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
// Dir might be empty or invalid
|
|
42
|
+
}
|
|
43
|
+
return null;
|
|
44
|
+
};
|
|
45
|
+
export const normalizePathToWindows = (p) => {
|
|
46
|
+
// Handle Git Bash /c/ style
|
|
47
|
+
const gitBashMatch = p.match(/^\/([a-z])\/(.*)/i);
|
|
48
|
+
if (gitBashMatch) {
|
|
49
|
+
return `${gitBashMatch[1].toUpperCase()}:\\${gitBashMatch[2].replace(/\//g, '\\')}`;
|
|
50
|
+
}
|
|
51
|
+
// Handle Forward slashes
|
|
52
|
+
return p.replace(/\//g, '\\');
|
|
53
|
+
};
|
|
54
|
+
export const stripTrailingSlash = (p) => {
|
|
55
|
+
return p.replace(/[\\\/]+$/, '');
|
|
56
|
+
};
|
|
57
|
+
// --- Helper to validate extension directory existence and structure ---
|
|
58
|
+
export const validateExtension = (dir) => {
|
|
59
|
+
if (!fs.existsSync(dir)) {
|
|
60
|
+
return { valid: false, error: 'Directory does not exist' };
|
|
61
|
+
}
|
|
62
|
+
const stats = fs.statSync(dir);
|
|
63
|
+
if (!stats.isDirectory()) {
|
|
64
|
+
return { valid: false, error: 'Path is not a directory' };
|
|
65
|
+
}
|
|
66
|
+
const manifestPath = path.join(dir, 'manifest.json');
|
|
67
|
+
if (!fs.existsSync(manifestPath)) {
|
|
68
|
+
return { valid: false, error: 'manifest.json missing' };
|
|
69
|
+
}
|
|
70
|
+
// Basic JSON validity check
|
|
71
|
+
try {
|
|
72
|
+
const content = fs.readFileSync(manifestPath, 'utf-8');
|
|
73
|
+
JSON.parse(content);
|
|
74
|
+
}
|
|
75
|
+
catch (e) {
|
|
76
|
+
return { valid: false, error: 'manifest.json is invalid JSON' };
|
|
77
|
+
}
|
|
78
|
+
return { valid: true };
|
|
79
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ai-extension-preview",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.13",
|
|
4
4
|
"description": "Local preview tool for AI Extension Builder",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -21,6 +21,7 @@
|
|
|
21
21
|
"license": "MIT",
|
|
22
22
|
"scripts": {
|
|
23
23
|
"build": "shx rm -rf dist && tsc -b && shx chmod +x dist/index.js",
|
|
24
|
+
"prepublishOnly": "npm run build",
|
|
24
25
|
"start": "tsx src/index.ts",
|
|
25
26
|
"dev": "tsx watch src/index.ts",
|
|
26
27
|
"preview": "node dist/index.js"
|
|
@@ -50,4 +51,4 @@
|
|
|
50
51
|
"typescript": "^5.7.2",
|
|
51
52
|
"vitest": "^4.0.16"
|
|
52
53
|
}
|
|
53
|
-
}
|
|
54
|
+
}
|
|
@@ -1,297 +0,0 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import { spawn } from 'child_process';
|
|
3
|
-
import fs from 'fs-extra';
|
|
4
|
-
const CHROME_PATHS = [
|
|
5
|
-
// Standard Windows Paths
|
|
6
|
-
'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe',
|
|
7
|
-
'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe',
|
|
8
|
-
// WSL Mappings
|
|
9
|
-
'/mnt/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
10
|
-
'/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
11
|
-
// Git Bash / Unix-y Windows Environment Mappings
|
|
12
|
-
'/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
13
|
-
'/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
14
|
-
// Linux
|
|
15
|
-
'/usr/bin/google-chrome',
|
|
16
|
-
'/usr/bin/chromium'
|
|
17
|
-
];
|
|
18
|
-
function findChrome() {
|
|
19
|
-
for (const p of CHROME_PATHS) {
|
|
20
|
-
if (fs.existsSync(p))
|
|
21
|
-
return p;
|
|
22
|
-
}
|
|
23
|
-
return null;
|
|
24
|
-
}
|
|
25
|
-
// --- Helper to find actual extension root (handle nested folder in zip) ---
|
|
26
|
-
export const findExtensionRoot = (dir) => {
|
|
27
|
-
if (fs.existsSync(path.join(dir, 'manifest.json')))
|
|
28
|
-
return dir;
|
|
29
|
-
// Check immediate subdirectories (depth 1)
|
|
30
|
-
try {
|
|
31
|
-
const items = fs.readdirSync(dir);
|
|
32
|
-
for (const item of items) {
|
|
33
|
-
const fullPath = path.join(dir, item);
|
|
34
|
-
if (fs.statSync(fullPath).isDirectory()) {
|
|
35
|
-
if (fs.existsSync(path.join(fullPath, 'manifest.json'))) {
|
|
36
|
-
return fullPath;
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
catch (e) {
|
|
42
|
-
// Dir might be empty or invalid
|
|
43
|
-
}
|
|
44
|
-
return null;
|
|
45
|
-
};
|
|
46
|
-
export const normalizePathToWindows = (p) => {
|
|
47
|
-
// Handle Git Bash /c/ style
|
|
48
|
-
const gitBashMatch = p.match(/^\/([a-z])\/(.*)/i);
|
|
49
|
-
if (gitBashMatch) {
|
|
50
|
-
return `${gitBashMatch[1].toUpperCase()}:\\${gitBashMatch[2].replace(/\//g, '\\')}`;
|
|
51
|
-
}
|
|
52
|
-
// Handle Forward slashes
|
|
53
|
-
return p.replace(/\//g, '\\');
|
|
54
|
-
};
|
|
55
|
-
export const stripTrailingSlash = (p) => {
|
|
56
|
-
return p.replace(/[\\\/]+$/, '');
|
|
57
|
-
};
|
|
58
|
-
// --- Helper to validate extension directory existence and structure ---
|
|
59
|
-
export const validateExtension = (dir) => {
|
|
60
|
-
if (!fs.existsSync(dir)) {
|
|
61
|
-
return { valid: false, error: 'Directory does not exist' };
|
|
62
|
-
}
|
|
63
|
-
const stats = fs.statSync(dir);
|
|
64
|
-
if (!stats.isDirectory()) {
|
|
65
|
-
return { valid: false, error: 'Path is not a directory' };
|
|
66
|
-
}
|
|
67
|
-
const manifestPath = path.join(dir, 'manifest.json');
|
|
68
|
-
if (!fs.existsSync(manifestPath)) {
|
|
69
|
-
return { valid: false, error: 'manifest.json missing' };
|
|
70
|
-
}
|
|
71
|
-
// Basic JSON validity check
|
|
72
|
-
try {
|
|
73
|
-
const content = fs.readFileSync(manifestPath, 'utf-8');
|
|
74
|
-
JSON.parse(content);
|
|
75
|
-
}
|
|
76
|
-
catch (e) {
|
|
77
|
-
return { valid: false, error: 'manifest.json is invalid JSON' };
|
|
78
|
-
}
|
|
79
|
-
return { valid: true };
|
|
80
|
-
};
|
|
81
|
-
export const BrowserPlugin = {
|
|
82
|
-
name: 'browser',
|
|
83
|
-
version: '1.0.0',
|
|
84
|
-
setup(ctx) {
|
|
85
|
-
const config = ctx.host.config;
|
|
86
|
-
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
87
|
-
let runner = null;
|
|
88
|
-
const launchDetached = async () => {
|
|
89
|
-
const chromePath = findChrome();
|
|
90
|
-
if (!chromePath) {
|
|
91
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found for detached launch.' });
|
|
92
|
-
return false;
|
|
93
|
-
}
|
|
94
|
-
const isWSL = fs.existsSync('/mnt/c');
|
|
95
|
-
let executable = chromePath;
|
|
96
|
-
// Normalize Executable for Native Windows (Git Bash)
|
|
97
|
-
if (!isWSL && process.platform === 'win32') {
|
|
98
|
-
executable = normalizePathToWindows(chromePath);
|
|
99
|
-
}
|
|
100
|
-
const isWin = process.platform === 'win32';
|
|
101
|
-
const STAGING_DIR = isWSL
|
|
102
|
-
? '/mnt/c/Temp/ai-ext-preview'
|
|
103
|
-
: (isWin ? 'C:\\Temp\\ai-ext-preview' : path.join(config.workDir, '../staging'));
|
|
104
|
-
// On Windows (Native or WSL host), Chrome sees:
|
|
105
|
-
const EXTENSION_PATH = (isWSL || isWin) ? 'C:\\Temp\\ai-ext-preview' : STAGING_DIR;
|
|
106
|
-
// Clean profile path for everyone
|
|
107
|
-
const WIN_PROFILE_DIR = 'C:\\Temp\\ai-ext-profile';
|
|
108
|
-
// --- SYNC FUNCTION ---
|
|
109
|
-
const syncToStaging = async () => {
|
|
110
|
-
try {
|
|
111
|
-
if (fs.existsSync(STAGING_DIR)) {
|
|
112
|
-
fs.emptyDirSync(STAGING_DIR);
|
|
113
|
-
}
|
|
114
|
-
fs.ensureDirSync(STAGING_DIR);
|
|
115
|
-
fs.copySync(DIST_DIR, STAGING_DIR);
|
|
116
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Synced code to Staging` });
|
|
117
|
-
// Emit staged event for ServerPlugin (optional for now, but good practice)
|
|
118
|
-
ctx.events.emit('browser:staged', { path: STAGING_DIR });
|
|
119
|
-
}
|
|
120
|
-
catch (err) {
|
|
121
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to sync to staging: ${err.message}` });
|
|
122
|
-
}
|
|
123
|
-
};
|
|
124
|
-
// Initial Sync
|
|
125
|
-
await syncToStaging();
|
|
126
|
-
// Resolve proper root AFTER sync
|
|
127
|
-
let extensionRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
128
|
-
// Check if we found a valid root
|
|
129
|
-
const validation = validateExtension(extensionRoot);
|
|
130
|
-
if (!validation.valid) {
|
|
131
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `[CRITICAL] Extension validation failed: ${validation.error} in ${extensionRoot}` });
|
|
132
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Checked Path: ${extensionRoot}` });
|
|
133
|
-
// We proceed anyway? Or should we stop?
|
|
134
|
-
// Previous logic proceeded but logged critical error.
|
|
135
|
-
// Let's keep it logging critical but maybe return false if we wanted to be strict.
|
|
136
|
-
// However, user might fix it live.
|
|
137
|
-
}
|
|
138
|
-
else if (extensionRoot !== STAGING_DIR) {
|
|
139
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Detected nested extension at: ${path.basename(extensionRoot)}` });
|
|
140
|
-
}
|
|
141
|
-
// Listen for updates and re-sync
|
|
142
|
-
ctx.events.on('downloader:updated', async (data) => {
|
|
143
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: 'Update detected. Syncing to staging...' });
|
|
144
|
-
await syncToStaging();
|
|
145
|
-
// Re-validate on update?
|
|
146
|
-
// const newRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
147
|
-
// const newValidation = validateExtension(newRoot);
|
|
148
|
-
// if (!newValidation.valid) ...
|
|
149
|
-
});
|
|
150
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: 'Browser running in Detached Mode.' });
|
|
151
|
-
// Launch Logic
|
|
152
|
-
// Launch Logic
|
|
153
|
-
if (isWSL) {
|
|
154
|
-
// -------------------------------------------------------------------------
|
|
155
|
-
// WSL STRATEGY (Validated 2025-12-24)
|
|
156
|
-
// 1. Use Windows User Profile for staging to avoid Permission/Path issues
|
|
157
|
-
// 2. Use PowerShell script to launch Chrome to reliably pass arguments
|
|
158
|
-
// -------------------------------------------------------------------------
|
|
159
|
-
// 1. Setup Safe Paths (C:\Temp)
|
|
160
|
-
// We use the same path that syncToStaging() used (/mnt/c/Temp/ai-ext-preview)
|
|
161
|
-
const winStagingDir = 'C:\\Temp\\ai-ext-preview';
|
|
162
|
-
const winProfile = 'C:\\Temp\\ai-ext-profile';
|
|
163
|
-
let userProfileWin = 'C:\\Temp'; // Legacy variable support
|
|
164
|
-
const driveLetter = 'c';
|
|
165
|
-
// Calculate final paths
|
|
166
|
-
let finalWinExtensionPath = winStagingDir;
|
|
167
|
-
// Handle nested extension root
|
|
168
|
-
if (extensionRoot !== STAGING_DIR) {
|
|
169
|
-
const relative = path.relative(STAGING_DIR, extensionRoot);
|
|
170
|
-
finalWinExtensionPath = path.posix.join(winStagingDir.replace(/\\/g, '/'), relative).replace(/\//g, '\\');
|
|
171
|
-
}
|
|
172
|
-
const winChromePath = chromePath
|
|
173
|
-
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\`)
|
|
174
|
-
.replace(/\//g, '\\');
|
|
175
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Launch Target (Win): ${finalWinExtensionPath}` });
|
|
176
|
-
// await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Profile (Win): ${winProfile}` });
|
|
177
|
-
// Create PowerShell Launch Script
|
|
178
|
-
const psContent = `
|
|
179
|
-
$chromePath = "${winChromePath}"
|
|
180
|
-
$extPath = "${finalWinExtensionPath}"
|
|
181
|
-
$profilePath = "${winProfile}"
|
|
182
|
-
|
|
183
|
-
Write-Host "DEBUG: ChromePath: $chromePath"
|
|
184
|
-
Write-Host "DEBUG: ExtPath: $extPath"
|
|
185
|
-
Write-Host "DEBUG: ProfilePath: $profilePath"
|
|
186
|
-
|
|
187
|
-
# Verify Paths
|
|
188
|
-
if (-not (Test-Path -Path $extPath)) {
|
|
189
|
-
Write-Host "ERROR: Extension Path NOT FOUND!"
|
|
190
|
-
} else {
|
|
191
|
-
Write-Host "DEBUG: Extension Path Exists."
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
# Create Profile Dir if needed
|
|
195
|
-
if (-not (Test-Path -Path $profilePath)) {
|
|
196
|
-
New-Item -ItemType Directory -Force -Path $profilePath | Out-Null
|
|
197
|
-
}
|
|
198
|
-
|
|
199
|
-
$argsList = @(
|
|
200
|
-
"--load-extension=""$extPath""",
|
|
201
|
-
"--user-data-dir=""$profilePath""",
|
|
202
|
-
"--no-first-run",
|
|
203
|
-
"--no-default-browser-check",
|
|
204
|
-
"--disable-gpu",
|
|
205
|
-
"about:blank"
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
# Convert to single string to ensure Start-Process handles it safely
|
|
209
|
-
$argStr = $argsList -join " "
|
|
210
|
-
Write-Host "DEBUG: Args: $argStr"
|
|
211
|
-
|
|
212
|
-
Write-Host "DEBUG: Launching Chrome..."
|
|
213
|
-
Start-Process -FilePath $chromePath -ArgumentList $argStr
|
|
214
|
-
`;
|
|
215
|
-
// Write ps1 to /mnt/c/Temp/ai-ext-preview/launch.ps1 (Same as STAGING_DIR)
|
|
216
|
-
const psPath = path.join(STAGING_DIR, 'launch.ps1');
|
|
217
|
-
try {
|
|
218
|
-
await fs.writeFile(psPath, psContent);
|
|
219
|
-
}
|
|
220
|
-
catch (e) {
|
|
221
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `WSL Write PS1 Failed: ${e.message}` });
|
|
222
|
-
}
|
|
223
|
-
// Execute via PowerShell (Spawn detached)
|
|
224
|
-
// psPathWin is C:\\Temp\\ai-ext-preview\\launch.ps1
|
|
225
|
-
const psPathWin = `${winStagingDir}\\launch.ps1`;
|
|
226
|
-
const child = spawn('powershell.exe', ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', psPathWin], {
|
|
227
|
-
detached: true,
|
|
228
|
-
stdio: ['ignore', 'pipe', 'pipe'] // Pipe stderr AND stdout to catch launch errors/debug
|
|
229
|
-
});
|
|
230
|
-
if (child.stdout) {
|
|
231
|
-
child.stdout.on('data', async (chunk) => {
|
|
232
|
-
const msg = chunk.toString();
|
|
233
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `[PS1] ${msg.trim()}` });
|
|
234
|
-
});
|
|
235
|
-
}
|
|
236
|
-
if (child.stderr) {
|
|
237
|
-
child.stderr.on('data', async (chunk) => {
|
|
238
|
-
const msg = chunk.toString();
|
|
239
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Error (Stderr): ${msg}` });
|
|
240
|
-
if (msg.includes('Exec format error')) {
|
|
241
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `CRITICAL: WSL Interop is broken. Cannot launch Chrome.` });
|
|
242
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `FIX: Open PowerShell as Admin and run: wsl --shutdown` });
|
|
243
|
-
ctx.events.emit('browser:launch-failed', { reason: 'WSL_INTEROP_BROKEN' });
|
|
244
|
-
}
|
|
245
|
-
});
|
|
246
|
-
}
|
|
247
|
-
child.on('error', async (err) => {
|
|
248
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Failed: ${err.message}` });
|
|
249
|
-
ctx.events.emit('browser:launch-failed', { reason: err.message });
|
|
250
|
-
});
|
|
251
|
-
child.unref();
|
|
252
|
-
return true;
|
|
253
|
-
}
|
|
254
|
-
else {
|
|
255
|
-
// Native Windows / Linux
|
|
256
|
-
// Use extensionRoot which points to the detected subfolder or root
|
|
257
|
-
let safeDist = path.resolve(extensionRoot);
|
|
258
|
-
let safeProfile = path.join(path.dirname(config.workDir), 'profile'); // Default Linux/Mac
|
|
259
|
-
// FIX: On Git Bash (win32), ensure paths are C:\Style for Chrome
|
|
260
|
-
if (process.platform === 'win32') {
|
|
261
|
-
safeDist = normalizePathToWindows(safeDist);
|
|
262
|
-
// Use C:\Temp profile to avoid permissions issues, matching WSL strategy
|
|
263
|
-
safeProfile = WIN_PROFILE_DIR;
|
|
264
|
-
}
|
|
265
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Executable: ${executable}` });
|
|
266
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Target: ${safeDist}` });
|
|
267
|
-
const cleanArgs = [
|
|
268
|
-
`--load-extension=${safeDist}`,
|
|
269
|
-
`--user-data-dir=${safeProfile}`,
|
|
270
|
-
'--no-first-run',
|
|
271
|
-
'--no-default-browser-check',
|
|
272
|
-
'--disable-gpu',
|
|
273
|
-
'chrome://extensions'
|
|
274
|
-
];
|
|
275
|
-
try {
|
|
276
|
-
const subprocess = spawn(executable, cleanArgs, {
|
|
277
|
-
detached: true,
|
|
278
|
-
stdio: 'ignore'
|
|
279
|
-
});
|
|
280
|
-
subprocess.unref();
|
|
281
|
-
}
|
|
282
|
-
catch (spawnErr) {
|
|
283
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Spawn Failed: ${spawnErr.message}` });
|
|
284
|
-
}
|
|
285
|
-
return true;
|
|
286
|
-
}
|
|
287
|
-
};
|
|
288
|
-
ctx.actions.registerAction({
|
|
289
|
-
id: 'browser:start',
|
|
290
|
-
handler: async () => {
|
|
291
|
-
// Force Detached Mode for Reliability on ALL platforms
|
|
292
|
-
// This creates the stable "Staging" workflow we want.
|
|
293
|
-
return await launchDetached();
|
|
294
|
-
}
|
|
295
|
-
});
|
|
296
|
-
}
|
|
297
|
-
};
|