ai-extension-preview 0.1.14 → 0.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +28 -129
- package/dist/plugins/AppPlugin.js +59 -0
- package/dist/plugins/AuthPlugin.js +89 -0
- package/dist/plugins/ConfigPlugin.js +52 -0
- package/dist/plugins/CorePlugin.js +4 -6
- package/dist/plugins/DownloaderPlugin.js +50 -24
- package/dist/plugins/ServerPlugin.js +19 -16
- package/dist/plugins/browser/BrowserManagerPlugin.js +39 -18
- package/dist/plugins/browser/NativeLauncherPlugin.js +8 -6
- package/dist/plugins/browser/WSLLauncherPlugin.js +42 -22
- package/dist/types.js +1 -0
- package/dist/utils/sandbox.js +192 -0
- package/package.json +4 -3
package/dist/index.js
CHANGED
|
@@ -3,18 +3,9 @@ import 'dotenv/config'; // Load .env
|
|
|
3
3
|
import { Command } from 'commander';
|
|
4
4
|
import path from 'path';
|
|
5
5
|
import { fileURLToPath } from 'url';
|
|
6
|
-
import fs from 'fs-extra';
|
|
7
6
|
import os from 'os';
|
|
8
7
|
import { Runtime } from 'skeleton-crew-runtime';
|
|
9
|
-
import { CorePlugin } from './plugins/CorePlugin.js';
|
|
10
|
-
import { DownloaderPlugin } from './plugins/DownloaderPlugin.js';
|
|
11
|
-
import { BrowserManagerPlugin } from './plugins/browser/BrowserManagerPlugin.js';
|
|
12
|
-
import { WSLLauncherPlugin } from './plugins/browser/WSLLauncherPlugin.js';
|
|
13
|
-
import { NativeLauncherPlugin } from './plugins/browser/NativeLauncherPlugin.js';
|
|
14
|
-
import { ServerPlugin } from './plugins/ServerPlugin.js';
|
|
15
|
-
import axios from 'axios';
|
|
16
8
|
import chalk from 'chalk';
|
|
17
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
18
9
|
const DEFAULT_HOST = process.env.API_HOST || 'https://ai-extension-builder.01kb6018z1t9tpaza4y5f1c56w.lmapp.run/api';
|
|
19
10
|
const program = new Command();
|
|
20
11
|
program
|
|
@@ -26,138 +17,47 @@ program
|
|
|
26
17
|
.option('--user <user>', 'User ID (if required)')
|
|
27
18
|
.parse(process.argv);
|
|
28
19
|
const options = program.opts();
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
console.log('[DEBUG] Sending port to backend:', port);
|
|
33
|
-
const initRes = await axios({
|
|
34
|
-
method: 'post',
|
|
35
|
-
url: `${host}/preview/init`,
|
|
36
|
-
data: { port },
|
|
37
|
-
headers: {
|
|
38
|
-
'Content-Type': 'application/json'
|
|
39
|
-
}
|
|
40
|
-
});
|
|
41
|
-
console.log('[DEBUG] Init response:', initRes.data);
|
|
42
|
-
const { code, sessionId } = initRes.data;
|
|
43
|
-
console.log('\n' + chalk.bgBlue.bold(' DETACHED PREVIEW MODE ') + '\n');
|
|
44
|
-
console.log('To connect, please go to your Extension Dashboard and click "Connect Preview".');
|
|
45
|
-
console.log('Enter the following code:');
|
|
46
|
-
console.log('\n' + chalk.green.bold(` ${code} `) + '\n');
|
|
47
|
-
console.log('Waiting for connection...');
|
|
48
|
-
// 2. Poll for Status
|
|
49
|
-
while (true) {
|
|
50
|
-
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
51
|
-
try {
|
|
52
|
-
const statusRes = await axios.get(`${host}/preview/status/${sessionId}`);
|
|
53
|
-
const data = statusRes.data;
|
|
54
|
-
if (data.status === 'linked') {
|
|
55
|
-
console.log(chalk.green('✔ Connected!'));
|
|
56
|
-
if (!data.jobId) {
|
|
57
|
-
console.error('Error: No Job ID associated with this connection.');
|
|
58
|
-
process.exit(1);
|
|
59
|
-
}
|
|
60
|
-
console.log('[DEBUG] Received userId:', data.userId);
|
|
61
|
-
console.log('[DEBUG] Received jobId:', data.jobId);
|
|
62
|
-
return {
|
|
63
|
-
jobId: data.jobId,
|
|
64
|
-
userId: data.userId,
|
|
65
|
-
token: data.token || ''
|
|
66
|
-
};
|
|
67
|
-
}
|
|
68
|
-
if (data.status === 'expired') {
|
|
69
|
-
console.error(chalk.red('Code expired. Please restart.'));
|
|
70
|
-
process.exit(1);
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
catch (err) {
|
|
74
|
-
// Ignore poll errors, keep trying
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
catch (error) {
|
|
79
|
-
console.error('Authentication failed:', error);
|
|
80
|
-
throw error;
|
|
81
|
-
}
|
|
82
|
-
}
|
|
20
|
+
// Define __dirname for ESM
|
|
21
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
22
|
+
const __dirname = path.dirname(__filename);
|
|
83
23
|
// Use os.homedir() to ensure we have write permissions
|
|
84
|
-
// Git Bash sometimes defaults cwd to C:\Program Files\Git which causes EPERM
|
|
85
24
|
const HOME_DIR = os.homedir();
|
|
86
|
-
|
|
25
|
+
// Initial workdir based on options, or specific 'default' if not yet known.
|
|
26
|
+
// AuthPlugin will update this if job changes.
|
|
27
|
+
const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', options.job || 'default');
|
|
87
28
|
(async () => {
|
|
88
|
-
const { job:
|
|
89
|
-
// 1. Initialize Runtime
|
|
29
|
+
const { job: initialJobId, host, token, user: userId } = options;
|
|
30
|
+
// 1. Initialize Runtime with Config
|
|
90
31
|
const runtime = new Runtime({
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
32
|
+
config: {
|
|
33
|
+
host,
|
|
34
|
+
token: token || '',
|
|
35
|
+
user: userId || '',
|
|
36
|
+
jobId: initialJobId || '',
|
|
37
|
+
workDir: WORK_DIR
|
|
38
|
+
},
|
|
39
|
+
hostContext: {}, // Clear hostContext config wrapping
|
|
40
|
+
pluginPaths: [path.join(__dirname, 'plugins')] // [NEW] Auto-discovery
|
|
100
41
|
});
|
|
101
|
-
|
|
102
|
-
runtime.registerPlugin(CorePlugin);
|
|
103
|
-
runtime.registerPlugin(DownloaderPlugin);
|
|
104
|
-
runtime.registerPlugin(BrowserManagerPlugin);
|
|
105
|
-
runtime.registerPlugin(WSLLauncherPlugin);
|
|
106
|
-
runtime.registerPlugin(NativeLauncherPlugin);
|
|
107
|
-
runtime.registerPlugin(ServerPlugin);
|
|
42
|
+
// Register Plugins
|
|
108
43
|
runtime.logger.info('Initializing runtime...');
|
|
109
44
|
await runtime.initialize();
|
|
110
45
|
const ctx = runtime.getContext();
|
|
111
|
-
//
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
console.error('Failed to allocate server port');
|
|
115
|
-
process.exit(1);
|
|
116
|
-
}
|
|
117
|
-
// 2. Now authenticate with the allocated port
|
|
118
|
-
let finalJobId = jobId;
|
|
119
|
-
let finalUserId = userId;
|
|
120
|
-
let finalToken = token;
|
|
121
|
-
if (!jobId || !userId) {
|
|
122
|
-
const authData = await authenticate(host, allocatedPort);
|
|
123
|
-
finalJobId = authData.jobId;
|
|
124
|
-
finalUserId = authData.userId;
|
|
125
|
-
finalToken = authData.token;
|
|
126
|
-
// Update runtime config with auth data
|
|
127
|
-
ctx.host.config.jobId = finalJobId;
|
|
128
|
-
ctx.host.config.user = finalUserId;
|
|
129
|
-
ctx.host.config.token = finalToken;
|
|
130
|
-
}
|
|
131
|
-
// 3. Start LifeCycle
|
|
132
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: 'Initializing Local Satellite...' });
|
|
133
|
-
// Ensure work dir exists
|
|
134
|
-
await fs.ensureDir(WORK_DIR);
|
|
135
|
-
// Initial Check - Must succeed to continue
|
|
136
|
-
const success = await ctx.actions.runAction('downloader:check', null);
|
|
137
|
-
if (!success) {
|
|
138
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: 'Initial check failed. Could not verify job or download extension.' });
|
|
139
|
-
process.exit(1);
|
|
140
|
-
}
|
|
141
|
-
// Wait for Extension files (Manifest)
|
|
142
|
-
const manifestPath = path.join(WORK_DIR, 'dist', 'manifest.json');
|
|
143
|
-
let attempts = 0;
|
|
144
|
-
const maxAttempts = 60; // 2 minutes
|
|
145
|
-
console.log('[DEBUG] Waiting for extension files...');
|
|
146
|
-
while (!fs.existsSync(manifestPath) && attempts < maxAttempts) {
|
|
147
|
-
await new Promise(r => setTimeout(r, 2000));
|
|
148
|
-
attempts++;
|
|
149
|
-
if (attempts % 5 === 0)
|
|
150
|
-
console.log(`Waiting for extension generation... (${attempts * 2}s)`);
|
|
46
|
+
// 2. Start App Flow
|
|
47
|
+
try {
|
|
48
|
+
await ctx.actions.runAction('app:start', null);
|
|
151
49
|
}
|
|
152
|
-
|
|
153
|
-
|
|
50
|
+
catch (error) {
|
|
51
|
+
console.error(chalk.red('App Error:'), error.message);
|
|
52
|
+
await runtime.shutdown();
|
|
154
53
|
process.exit(1);
|
|
155
54
|
}
|
|
156
|
-
//
|
|
157
|
-
|
|
158
|
-
//
|
|
55
|
+
// Keep process alive handled by Node event loop because ServerPlugin has an open server
|
|
56
|
+
// and Browser processes might be attached.
|
|
57
|
+
// Graceful Shutdown
|
|
159
58
|
process.on('SIGINT', async () => {
|
|
160
59
|
await ctx.actions.runAction('core:log', { level: 'info', message: 'Shutting down...' });
|
|
60
|
+
await runtime.shutdown();
|
|
161
61
|
process.exit(0);
|
|
162
62
|
});
|
|
163
63
|
runtime.logger.info('Press Ctrl+C to exit.');
|
|
@@ -168,7 +68,6 @@ const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', options.job || 'de
|
|
|
168
68
|
// Handle global errors
|
|
169
69
|
process.on('uncaughtException', (err) => {
|
|
170
70
|
if (err.code === 'ECONNRESET' || err.message?.includes('ECONNRESET')) {
|
|
171
|
-
// Ignore pipe errors frequently caused by web-ext/chrome teardown
|
|
172
71
|
return;
|
|
173
72
|
}
|
|
174
73
|
console.error('Uncaught Exception:', err);
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
export const AppPlugin = {
|
|
4
|
+
name: 'app',
|
|
5
|
+
version: '1.0.0',
|
|
6
|
+
dependencies: ['auth', 'config', 'downloader', 'browser-manager', 'server'],
|
|
7
|
+
setup(ctx) {
|
|
8
|
+
ctx.actions.registerAction({
|
|
9
|
+
id: 'app:start',
|
|
10
|
+
handler: async () => {
|
|
11
|
+
await ctx.logger.info('Initializing Local Satellite...');
|
|
12
|
+
// 1. Authenticate (if needed)
|
|
13
|
+
// AuthPlugin will automatically skip if already config'd, or prompt if needed
|
|
14
|
+
// It will also update config via config:set
|
|
15
|
+
await ctx.actions.runAction('auth:login');
|
|
16
|
+
// 2. Validate Configuration (Now that we have potential Auth data)
|
|
17
|
+
try {
|
|
18
|
+
await ctx.actions.runAction('config:validate', null);
|
|
19
|
+
}
|
|
20
|
+
catch (e) {
|
|
21
|
+
throw new Error(`Configuration Invalid: ${e.message}`);
|
|
22
|
+
}
|
|
23
|
+
// 3. Get Updated Config
|
|
24
|
+
const workDir = ctx.config.workDir;
|
|
25
|
+
// 3. Ensure Work Directory
|
|
26
|
+
await fs.ensureDir(workDir);
|
|
27
|
+
// 4. Initial Download/Check
|
|
28
|
+
const success = await ctx.actions.runAction('downloader:check', null);
|
|
29
|
+
if (!success) {
|
|
30
|
+
await ctx.logger.error('Initial check failed. Could not verify job or download extension.');
|
|
31
|
+
// We don't exit process here, but we might throw to stop flow
|
|
32
|
+
throw new Error('Initial check failed');
|
|
33
|
+
}
|
|
34
|
+
// 5. Wait for Extension Manifest
|
|
35
|
+
const manifestPath = path.join(workDir, 'dist', 'manifest.json');
|
|
36
|
+
let attempts = 0;
|
|
37
|
+
const maxAttempts = 60; // 2 minutes
|
|
38
|
+
// This logic could be in a 'watcher' plugin but fits here for now as part of "Startup Sequence"
|
|
39
|
+
if (!fs.existsSync(manifestPath)) {
|
|
40
|
+
await ctx.logger.info('[DEBUG] Waiting for extension files...');
|
|
41
|
+
while (!fs.existsSync(manifestPath) && attempts < maxAttempts) {
|
|
42
|
+
await new Promise(r => setTimeout(r, 2000));
|
|
43
|
+
attempts++;
|
|
44
|
+
if (attempts % 5 === 0) {
|
|
45
|
+
await ctx.logger.info(`Waiting for extension generation... (${attempts * 2}s)`);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
if (!fs.existsSync(manifestPath)) {
|
|
50
|
+
await ctx.logger.error('Timed out waiting for extension files. Status check succeeded but files are missing.');
|
|
51
|
+
throw new Error('Timeout waiting for files');
|
|
52
|
+
}
|
|
53
|
+
// 6. Launch Browser
|
|
54
|
+
await ctx.actions.runAction('browser:start', {});
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
export default AppPlugin;
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import axios from 'axios';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import os from 'os';
|
|
5
|
+
const AuthPlugin = {
|
|
6
|
+
name: 'auth',
|
|
7
|
+
version: '1.0.0',
|
|
8
|
+
dependencies: ['config', 'server'],
|
|
9
|
+
setup(ctx) {
|
|
10
|
+
ctx.actions.registerAction({
|
|
11
|
+
id: 'auth:login',
|
|
12
|
+
handler: async () => {
|
|
13
|
+
const hostContext = ctx.config;
|
|
14
|
+
// If we already have JobID and UserID, we might skip, but let's assume we need to verify or start fresh if missing
|
|
15
|
+
if (hostContext.jobId && hostContext.user) {
|
|
16
|
+
await ctx.logger.info('Auth: Job ID and User ID present. Skipping login.');
|
|
17
|
+
return { jobId: hostContext.jobId, user: hostContext.user, token: hostContext.token };
|
|
18
|
+
}
|
|
19
|
+
// We need the port from ServerPlugin
|
|
20
|
+
// We need the port from ServerPlugin
|
|
21
|
+
const allocatedPort = ctx.config.hotReloadPort;
|
|
22
|
+
if (!allocatedPort) {
|
|
23
|
+
throw new Error('Server port not found. Ensure ServerPlugin is loaded before AuthPlugin logic runs.');
|
|
24
|
+
}
|
|
25
|
+
const host = hostContext.host;
|
|
26
|
+
await ctx.logger.info(`Auth: Initiating login flow on ${host} with port ${allocatedPort}`);
|
|
27
|
+
try {
|
|
28
|
+
// 1. Init Session with port
|
|
29
|
+
const initRes = await axios({
|
|
30
|
+
method: 'post',
|
|
31
|
+
url: `${host}/preview/init`,
|
|
32
|
+
data: { port: allocatedPort },
|
|
33
|
+
headers: { 'Content-Type': 'application/json' }
|
|
34
|
+
});
|
|
35
|
+
const { code, sessionId } = initRes.data;
|
|
36
|
+
console.log('\n' + chalk.bgBlue.bold(' DETACHED PREVIEW MODE ') + '\n');
|
|
37
|
+
console.log('To connect, please go to your Extension Dashboard and click "Connect Preview".');
|
|
38
|
+
console.log('Enter the following code:');
|
|
39
|
+
console.log('\n' + chalk.green.bold(` ${code} `) + '\n');
|
|
40
|
+
console.log('Waiting for connection...');
|
|
41
|
+
// 2. Poll for Status
|
|
42
|
+
let attempts = 0;
|
|
43
|
+
while (true) {
|
|
44
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
45
|
+
attempts++;
|
|
46
|
+
// Check if we should abort (e.g. from a cancel signal? unimplemented for now)
|
|
47
|
+
try {
|
|
48
|
+
const statusRes = await axios.get(`${host}/preview/status/${sessionId}`);
|
|
49
|
+
const data = statusRes.data;
|
|
50
|
+
if (data.status === 'linked') {
|
|
51
|
+
console.log(chalk.green('✔ Connected!'));
|
|
52
|
+
if (!data.jobId) {
|
|
53
|
+
throw new Error('No Job ID associated with this connection.');
|
|
54
|
+
}
|
|
55
|
+
const authData = {
|
|
56
|
+
jobId: data.jobId,
|
|
57
|
+
user: data.userId,
|
|
58
|
+
token: data.token || ''
|
|
59
|
+
};
|
|
60
|
+
// UPGRADE CONFIG
|
|
61
|
+
await ctx.actions.runAction('config:set', {
|
|
62
|
+
jobId: authData.jobId,
|
|
63
|
+
user: authData.user,
|
|
64
|
+
token: authData.token,
|
|
65
|
+
workDir: path.join(os.homedir(), '.ai-extension-preview', authData.jobId)
|
|
66
|
+
});
|
|
67
|
+
return authData;
|
|
68
|
+
}
|
|
69
|
+
if (data.status === 'expired') {
|
|
70
|
+
throw new Error('Code expired. Please restart.');
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
catch (err) {
|
|
74
|
+
if (err.message && (err.message.includes('expired') || err.message.includes('No Job ID'))) {
|
|
75
|
+
throw err;
|
|
76
|
+
}
|
|
77
|
+
// Ignore poll errors
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
await ctx.logger.error(`Authentication failed: ${error.message}`);
|
|
83
|
+
throw error;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
export default AuthPlugin;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
const ConfigPlugin = {
|
|
3
|
+
name: 'config',
|
|
4
|
+
version: '1.0.0',
|
|
5
|
+
dependencies: [],
|
|
6
|
+
async setup(ctx) {
|
|
7
|
+
// 1. Define Schema
|
|
8
|
+
const configSchema = z.object({
|
|
9
|
+
host: z.string().url(),
|
|
10
|
+
jobId: z.string().min(1, "Job ID is required"),
|
|
11
|
+
token: z.string().optional(),
|
|
12
|
+
user: z.string().optional(),
|
|
13
|
+
workDir: z.string()
|
|
14
|
+
});
|
|
15
|
+
ctx.actions.registerAction({
|
|
16
|
+
id: 'config:validate',
|
|
17
|
+
handler: async () => {
|
|
18
|
+
const config = ctx.config;
|
|
19
|
+
try {
|
|
20
|
+
configSchema.parse(config);
|
|
21
|
+
return true;
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
if (error instanceof z.ZodError) {
|
|
25
|
+
const issues = error.issues.map(i => `${i.path.join('.')}: ${i.message}`).join(', ');
|
|
26
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Config Validation Failed: ${issues}` });
|
|
27
|
+
}
|
|
28
|
+
throw error;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
// [NEW] Allow runtime config updates
|
|
33
|
+
ctx.actions.registerAction({
|
|
34
|
+
id: 'config:set',
|
|
35
|
+
handler: async (payload) => {
|
|
36
|
+
ctx.getRuntime().updateConfig(payload);
|
|
37
|
+
const config = ctx.config;
|
|
38
|
+
// Validate after set? Optional, but good practice.
|
|
39
|
+
try {
|
|
40
|
+
configSchema.parse(config);
|
|
41
|
+
}
|
|
42
|
+
catch (e) {
|
|
43
|
+
// Log but don't revert for now, trust the caller or add rollback logic if needed.
|
|
44
|
+
// Just warn for now
|
|
45
|
+
await ctx.actions.runAction('core:log', { level: 'warn', message: 'Config updated but validation failed. Some features may not work.' });
|
|
46
|
+
}
|
|
47
|
+
return config;
|
|
48
|
+
}
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
export default ConfigPlugin;
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
import chalk from 'chalk';
|
|
2
|
-
|
|
2
|
+
const CorePlugin = {
|
|
3
3
|
name: 'core',
|
|
4
4
|
version: '1.0.0',
|
|
5
5
|
setup(ctx) {
|
|
6
6
|
console.log('CorePlugin: setup called');
|
|
7
|
-
|
|
8
|
-
const config = ctx.host.config;
|
|
7
|
+
const config = ctx.config;
|
|
9
8
|
ctx.actions.registerAction({
|
|
10
9
|
id: 'core:config',
|
|
11
10
|
handler: async () => config
|
|
@@ -15,9 +14,7 @@ export const CorePlugin = {
|
|
|
15
14
|
id: 'core:log',
|
|
16
15
|
handler: async (payload) => {
|
|
17
16
|
// Access default logger from Runtime
|
|
18
|
-
const
|
|
19
|
-
// Logger is now public
|
|
20
|
-
const logger = rt.logger || console;
|
|
17
|
+
const logger = ctx.logger;
|
|
21
18
|
const { level, message } = payload;
|
|
22
19
|
switch (level) {
|
|
23
20
|
case 'error':
|
|
@@ -41,3 +38,4 @@ export const CorePlugin = {
|
|
|
41
38
|
});
|
|
42
39
|
}
|
|
43
40
|
};
|
|
41
|
+
export default CorePlugin;
|
|
@@ -5,15 +5,23 @@ import AdmZip from 'adm-zip';
|
|
|
5
5
|
import ora from 'ora';
|
|
6
6
|
import https from 'https';
|
|
7
7
|
let checkInterval;
|
|
8
|
-
|
|
8
|
+
const DownloaderPlugin = {
|
|
9
9
|
name: 'downloader',
|
|
10
10
|
version: '1.0.0',
|
|
11
|
+
dependencies: ['config'],
|
|
11
12
|
setup(ctx) {
|
|
12
|
-
|
|
13
|
-
const
|
|
14
|
-
|
|
13
|
+
// Helper to get paths dynamically
|
|
14
|
+
const getPaths = () => {
|
|
15
|
+
const workDir = ctx.config.workDir;
|
|
16
|
+
return {
|
|
17
|
+
DIST_DIR: path.join(workDir, 'dist'),
|
|
18
|
+
DOWNLOAD_PATH: path.join(workDir, 'extension.zip'),
|
|
19
|
+
VERSION_FILE: path.join(workDir, 'version')
|
|
20
|
+
};
|
|
21
|
+
};
|
|
15
22
|
// Helper function to create axios client with current config
|
|
16
23
|
const createClient = () => {
|
|
24
|
+
const config = ctx.config;
|
|
17
25
|
const rawToken = config.token ? String(config.token) : '';
|
|
18
26
|
const token = rawToken.replace(/^Bearer\s+/i, '').trim();
|
|
19
27
|
// Auto-extract user ID from token if not provided
|
|
@@ -30,7 +38,7 @@ export const DownloaderPlugin = {
|
|
|
30
38
|
// Ignore parse errors
|
|
31
39
|
}
|
|
32
40
|
}
|
|
33
|
-
ctx.
|
|
41
|
+
ctx.logger.info(`[DEBUG] DownloaderPlugin creating client with userId: ${userId}`);
|
|
34
42
|
return axios.create({
|
|
35
43
|
baseURL: config.host,
|
|
36
44
|
headers: {
|
|
@@ -42,11 +50,16 @@ export const DownloaderPlugin = {
|
|
|
42
50
|
})
|
|
43
51
|
});
|
|
44
52
|
};
|
|
45
|
-
const VERSION_FILE = path.join(config.workDir, 'version');
|
|
46
53
|
let lastModified = '';
|
|
47
|
-
|
|
48
|
-
|
|
54
|
+
let currentWorkDir = '';
|
|
55
|
+
// Check initial state if workDir exists
|
|
56
|
+
try {
|
|
57
|
+
const { VERSION_FILE } = getPaths();
|
|
58
|
+
if (fs.existsSync(VERSION_FILE)) {
|
|
59
|
+
lastModified = fs.readFileSync(VERSION_FILE, 'utf-8').trim();
|
|
60
|
+
}
|
|
49
61
|
}
|
|
62
|
+
catch (e) { }
|
|
50
63
|
let isChecking = false;
|
|
51
64
|
// Action: Check Status
|
|
52
65
|
ctx.actions.registerAction({
|
|
@@ -55,12 +68,23 @@ export const DownloaderPlugin = {
|
|
|
55
68
|
if (isChecking)
|
|
56
69
|
return true; // Skip if busy
|
|
57
70
|
isChecking = true;
|
|
71
|
+
const { jobId, workDir } = ctx.config;
|
|
72
|
+
const { DIST_DIR, VERSION_FILE } = getPaths();
|
|
73
|
+
// Reset lastModified if workDir changed
|
|
74
|
+
if (workDir !== currentWorkDir) {
|
|
75
|
+
currentWorkDir = workDir;
|
|
76
|
+
lastModified = '';
|
|
77
|
+
if (fs.existsSync(VERSION_FILE)) {
|
|
78
|
+
lastModified = fs.readFileSync(VERSION_FILE, 'utf-8').trim();
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
await ctx.logger.info('Checking for updates...');
|
|
58
82
|
const MAX_RETRIES = 3;
|
|
59
83
|
let attempt = 0;
|
|
60
84
|
while (attempt < MAX_RETRIES) {
|
|
61
85
|
try {
|
|
62
86
|
const client = createClient(); // Create client with current config
|
|
63
|
-
const res = await client.get(`/jobs/${
|
|
87
|
+
const res = await client.get(`/jobs/${jobId}`);
|
|
64
88
|
const job = res.data;
|
|
65
89
|
const newVersion = job.version;
|
|
66
90
|
// If no version in job yet, fall back to timestamp or ignore
|
|
@@ -72,18 +96,18 @@ export const DownloaderPlugin = {
|
|
|
72
96
|
let forceDownload = false;
|
|
73
97
|
const manifestPath = path.join(DIST_DIR, 'manifest.json');
|
|
74
98
|
if (!fs.existsSync(manifestPath)) {
|
|
75
|
-
await ctx.
|
|
99
|
+
await ctx.logger.warn('Version match but files missing. Forcing download...');
|
|
76
100
|
forceDownload = true;
|
|
77
101
|
}
|
|
78
102
|
if (newVersion !== lastModified || forceDownload) {
|
|
79
103
|
if (newVersion !== lastModified) {
|
|
80
|
-
await ctx.
|
|
104
|
+
await ctx.logger.info(`New version detected (Old: "${lastModified}", New: "${newVersion}")`);
|
|
81
105
|
}
|
|
82
106
|
const success = await ctx.actions.runAction('downloader:download', null);
|
|
83
107
|
if (success) {
|
|
84
108
|
lastModified = newVersion;
|
|
85
109
|
fs.writeFileSync(VERSION_FILE, newVersion);
|
|
86
|
-
ctx.events.emit('downloader:updated', { version: job.version, jobId: config.jobId });
|
|
110
|
+
ctx.events.emit('downloader:updated', { version: job.version, jobId: ctx.config.jobId });
|
|
87
111
|
}
|
|
88
112
|
}
|
|
89
113
|
}
|
|
@@ -97,12 +121,12 @@ export const DownloaderPlugin = {
|
|
|
97
121
|
attempt++;
|
|
98
122
|
const isNetworkError = error.code === 'EAI_AGAIN' || error.code === 'ENOTFOUND' || error.code === 'ECONNRESET' || error.code === 'ETIMEDOUT';
|
|
99
123
|
if (attempt < MAX_RETRIES && isNetworkError) {
|
|
100
|
-
await ctx.
|
|
124
|
+
await ctx.logger.warn(`Connection failed (${error.code}). Retrying (${attempt}/${MAX_RETRIES})...`);
|
|
101
125
|
await new Promise(r => setTimeout(r, 1000 * attempt)); // Backoff
|
|
102
126
|
continue;
|
|
103
127
|
}
|
|
104
128
|
isChecking = false;
|
|
105
|
-
await ctx.
|
|
129
|
+
await ctx.logger.error(`Check failed: ${error.message}`);
|
|
106
130
|
return false;
|
|
107
131
|
}
|
|
108
132
|
}
|
|
@@ -117,21 +141,22 @@ export const DownloaderPlugin = {
|
|
|
117
141
|
const spinner = ora('Downloading new version...').start();
|
|
118
142
|
try {
|
|
119
143
|
const client = createClient(); // Create client with current config
|
|
120
|
-
const
|
|
144
|
+
const { DIST_DIR, DOWNLOAD_PATH, VERSION_FILE } = getPaths();
|
|
145
|
+
const response = await client.get(`/download/${ctx.config.jobId}`, {
|
|
121
146
|
responseType: 'arraybuffer'
|
|
122
147
|
});
|
|
123
|
-
await fs.ensureDir(config.workDir);
|
|
148
|
+
await fs.ensureDir(ctx.config.workDir);
|
|
124
149
|
await fs.writeFile(DOWNLOAD_PATH, response.data);
|
|
125
150
|
await fs.emptyDir(DIST_DIR);
|
|
126
151
|
const zip = new AdmZip(DOWNLOAD_PATH);
|
|
127
152
|
zip.extractAllTo(DIST_DIR, true);
|
|
128
153
|
// --- HOT RELOAD INJECTION ---
|
|
129
154
|
try {
|
|
130
|
-
// Get dynamically allocated port from ServerPlugin
|
|
131
|
-
const hotReloadPort = ctx.hotReloadPort || 3500;
|
|
155
|
+
// Get dynamically allocated port from ServerPlugin via config
|
|
156
|
+
const hotReloadPort = ctx.config.hotReloadPort || 3500;
|
|
132
157
|
const HOT_RELOAD_CODE = `
|
|
133
158
|
const EVENT_SOURCE_URL = 'http://localhost:${hotReloadPort}/status';
|
|
134
|
-
const CURRENT_JOB_ID = '${config.jobId}';
|
|
159
|
+
const CURRENT_JOB_ID = '${ctx.config.jobId}';
|
|
135
160
|
let lastVersion = null;
|
|
136
161
|
let lastJobId = null;
|
|
137
162
|
|
|
@@ -174,19 +199,19 @@ console.log('[Hot Reload] Active for Job:', CURRENT_JOB_ID);
|
|
|
174
199
|
const swContent = await fs.readFile(swPath, 'utf-8');
|
|
175
200
|
// Prepend import
|
|
176
201
|
await fs.writeFile(swPath, "import './hot-reload.js';\n" + swContent);
|
|
177
|
-
await ctx.
|
|
202
|
+
await ctx.logger.info('Injected Hot Reload script into background worker.');
|
|
178
203
|
}
|
|
179
204
|
}
|
|
180
205
|
// MV2 Scripts Strategy (Fallback if user generates MV2)
|
|
181
206
|
else if (manifest.background?.scripts) {
|
|
182
207
|
manifest.background.scripts.push('hot-reload.js');
|
|
183
208
|
await fs.writeJson(manifestPath, manifest, { spaces: 2 });
|
|
184
|
-
await ctx.
|
|
209
|
+
await ctx.logger.info('Injected Hot Reload script into background scripts.');
|
|
185
210
|
}
|
|
186
211
|
}
|
|
187
212
|
}
|
|
188
213
|
catch (injectErr) {
|
|
189
|
-
await ctx.
|
|
214
|
+
await ctx.logger.error(`Hot Reload Injection Failed: ${injectErr.message}`);
|
|
190
215
|
}
|
|
191
216
|
// ----------------------------
|
|
192
217
|
spinner.succeed('Updated extension code!');
|
|
@@ -194,12 +219,13 @@ console.log('[Hot Reload] Active for Job:', CURRENT_JOB_ID);
|
|
|
194
219
|
}
|
|
195
220
|
catch (error) {
|
|
196
221
|
spinner.fail(`Failed to download: ${error.message}`);
|
|
197
|
-
await ctx.
|
|
222
|
+
await ctx.logger.error(`Download failed: ${error.message}`);
|
|
198
223
|
return false;
|
|
199
224
|
}
|
|
200
225
|
}
|
|
201
226
|
});
|
|
202
227
|
// Polling removed in favor of push-based updates (POST /refresh)
|
|
203
|
-
ctx.
|
|
228
|
+
ctx.logger.info('Ready. Waiting for update signals...');
|
|
204
229
|
}
|
|
205
230
|
};
|
|
231
|
+
export default DownloaderPlugin;
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import http from 'http';
|
|
2
|
-
|
|
2
|
+
const ServerPlugin = {
|
|
3
3
|
name: 'server',
|
|
4
4
|
version: '1.0.0',
|
|
5
|
+
dependencies: ['config'],
|
|
5
6
|
async setup(ctx) {
|
|
7
|
+
// const context = ctx as PreviewContext; // No longer needed
|
|
6
8
|
let currentVersion = '0.0.0';
|
|
7
9
|
// Try to bind to a port, retrying with incremented ports on failure
|
|
8
10
|
const startPort = 3500;
|
|
@@ -13,7 +15,7 @@ export const ServerPlugin = {
|
|
|
13
15
|
ctx.events.on('downloader:updated', (data) => {
|
|
14
16
|
if (data && data.version) {
|
|
15
17
|
currentVersion = data.version;
|
|
16
|
-
ctx.
|
|
18
|
+
ctx.logger.info(`Server: Reporting version ${currentVersion}`);
|
|
17
19
|
}
|
|
18
20
|
});
|
|
19
21
|
// Create server with request handler
|
|
@@ -29,7 +31,7 @@ export const ServerPlugin = {
|
|
|
29
31
|
return;
|
|
30
32
|
}
|
|
31
33
|
if (req.url === '/status') {
|
|
32
|
-
const currentJobId = ctx.
|
|
34
|
+
const currentJobId = ctx.config.jobId;
|
|
33
35
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
34
36
|
res.end(JSON.stringify({
|
|
35
37
|
version: currentVersion,
|
|
@@ -50,8 +52,8 @@ export const ServerPlugin = {
|
|
|
50
52
|
const data = JSON.parse(body);
|
|
51
53
|
if (data.jobId) {
|
|
52
54
|
newJobId = data.jobId;
|
|
53
|
-
ctx.
|
|
54
|
-
ctx.
|
|
55
|
+
ctx.getRuntime().updateConfig({ jobId: newJobId });
|
|
56
|
+
ctx.logger.info(`[API] Switched to new Job ID: ${newJobId}`);
|
|
55
57
|
}
|
|
56
58
|
}
|
|
57
59
|
}
|
|
@@ -59,24 +61,24 @@ export const ServerPlugin = {
|
|
|
59
61
|
// Ignore parse error
|
|
60
62
|
}
|
|
61
63
|
// Trigger manual check
|
|
62
|
-
ctx.
|
|
64
|
+
ctx.logger.info('[API] Refresh request received');
|
|
63
65
|
ctx.actions.runAction('downloader:check', null).then((result) => {
|
|
64
|
-
ctx.
|
|
66
|
+
ctx.logger.info(`[API] Check result: ${result}`);
|
|
65
67
|
}).catch((err) => {
|
|
66
|
-
ctx.
|
|
68
|
+
ctx.logger.error(`[API] Check failed: ${err.message}`);
|
|
67
69
|
});
|
|
68
70
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
69
|
-
res.end(JSON.stringify({ success: true, jobId: ctx.
|
|
71
|
+
res.end(JSON.stringify({ success: true, jobId: ctx.config.jobId }));
|
|
70
72
|
});
|
|
71
73
|
return; // Return because we handle response in 'end' callback
|
|
72
74
|
}
|
|
73
75
|
else if (req.url === '/disconnect' && req.method === 'POST') {
|
|
74
76
|
// Trigger browser stop
|
|
75
|
-
ctx.
|
|
77
|
+
ctx.logger.info('[API] Disconnect request received');
|
|
76
78
|
ctx.actions.runAction('browser:stop', null).then((result) => {
|
|
77
|
-
ctx.
|
|
79
|
+
ctx.logger.info(`[API] Browser stop result: ${result}`);
|
|
78
80
|
}).catch((err) => {
|
|
79
|
-
ctx.
|
|
81
|
+
ctx.logger.error(`[API] Browser stop failed: ${err.message}`);
|
|
80
82
|
});
|
|
81
83
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
82
84
|
res.end(JSON.stringify({ success: true }));
|
|
@@ -108,7 +110,7 @@ export const ServerPlugin = {
|
|
|
108
110
|
});
|
|
109
111
|
// Success! Port is allocated
|
|
110
112
|
allocatedPort = port;
|
|
111
|
-
await ctx.
|
|
113
|
+
await ctx.logger.info(`Hot Reload Server running on port ${allocatedPort}`);
|
|
112
114
|
break;
|
|
113
115
|
}
|
|
114
116
|
catch (err) {
|
|
@@ -122,17 +124,17 @@ export const ServerPlugin = {
|
|
|
122
124
|
}
|
|
123
125
|
else {
|
|
124
126
|
// Other error, fail immediately
|
|
125
|
-
await ctx.
|
|
127
|
+
await ctx.logger.error(`Server error: ${err.message}`);
|
|
126
128
|
return;
|
|
127
129
|
}
|
|
128
130
|
}
|
|
129
131
|
}
|
|
130
132
|
if (!allocatedPort || !server) {
|
|
131
|
-
await ctx.
|
|
133
|
+
await ctx.logger.error(`Failed to allocate port after ${maxAttempts} attempts (ports ${startPort}-${startPort + maxAttempts - 1})`);
|
|
132
134
|
return;
|
|
133
135
|
}
|
|
134
136
|
// Store port in context for DownloaderPlugin to use
|
|
135
|
-
ctx.hotReloadPort
|
|
137
|
+
ctx.getRuntime().updateConfig({ hotReloadPort: allocatedPort });
|
|
136
138
|
// Store server instance to close later
|
|
137
139
|
ctx._serverInstance = server;
|
|
138
140
|
},
|
|
@@ -143,3 +145,4 @@ export const ServerPlugin = {
|
|
|
143
145
|
}
|
|
144
146
|
}
|
|
145
147
|
};
|
|
148
|
+
export default ServerPlugin;
|
|
@@ -1,46 +1,64 @@
|
|
|
1
1
|
import path from 'path';
|
|
2
2
|
import fs from 'fs-extra';
|
|
3
3
|
import { findExtensionRoot, validateExtension } from '../../utils/browserUtils.js';
|
|
4
|
-
|
|
4
|
+
const BrowserManagerPlugin = {
|
|
5
5
|
name: 'browser-manager',
|
|
6
6
|
version: '1.0.0',
|
|
7
|
+
dependencies: ['config', 'downloader'],
|
|
7
8
|
setup(ctx) {
|
|
8
|
-
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
9
|
+
// Helper to get dynamic paths
|
|
10
|
+
const getPaths = () => {
|
|
11
|
+
const config = ctx.config;
|
|
12
|
+
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
13
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
14
|
+
const isWin = process.platform === 'win32';
|
|
15
|
+
const STAGING_DIR = isWSL
|
|
16
|
+
? '/mnt/c/Temp/ai-ext-preview'
|
|
17
|
+
: (isWin ? 'C:\\Temp\\ai-ext-preview' : path.join(config.workDir, '../staging'));
|
|
18
|
+
return { DIST_DIR, STAGING_DIR };
|
|
19
|
+
};
|
|
17
20
|
// --- SYNC FUNCTION ---
|
|
18
21
|
const syncToStaging = async () => {
|
|
22
|
+
const { DIST_DIR, STAGING_DIR } = getPaths();
|
|
19
23
|
try {
|
|
20
24
|
if (fs.existsSync(STAGING_DIR)) {
|
|
21
25
|
fs.emptyDirSync(STAGING_DIR);
|
|
22
26
|
}
|
|
23
27
|
fs.ensureDirSync(STAGING_DIR);
|
|
24
28
|
fs.copySync(DIST_DIR, STAGING_DIR);
|
|
25
|
-
await ctx.
|
|
29
|
+
await ctx.logger.info(`Synced code to Staging`);
|
|
26
30
|
// Emit staged event (optional)
|
|
27
31
|
ctx.events.emit('browser:staged', { path: STAGING_DIR });
|
|
28
32
|
}
|
|
29
33
|
catch (err) {
|
|
30
|
-
await ctx.
|
|
34
|
+
await ctx.logger.error(`Failed to sync to staging: ${err.message}`);
|
|
31
35
|
}
|
|
32
36
|
};
|
|
33
37
|
const launchBrowser = async () => {
|
|
38
|
+
const { STAGING_DIR } = getPaths();
|
|
34
39
|
// Resolve proper root AFTER sync
|
|
35
40
|
const extensionRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
36
|
-
//
|
|
41
|
+
// 1. Static Validation
|
|
37
42
|
const validation = validateExtension(extensionRoot);
|
|
38
43
|
if (!validation.valid) {
|
|
39
|
-
await ctx.
|
|
44
|
+
await ctx.logger.error(`[CRITICAL] Extension validation failed: ${validation.error} in ${extensionRoot}`);
|
|
40
45
|
}
|
|
41
46
|
else if (extensionRoot !== STAGING_DIR) {
|
|
42
|
-
await ctx.
|
|
47
|
+
await ctx.logger.info(`Detected nested extension at: ${path.basename(extensionRoot)}`);
|
|
48
|
+
}
|
|
49
|
+
// 2. Runtime Verification (Diagnostic) - SKIPPED FOR PERFORMANCE
|
|
50
|
+
// The SandboxRunner spins up a separate headless chrome which is slow and prone to WSL networking issues.
|
|
51
|
+
// Since we have static analysis in the backend, we skip this blocking step to give the user immediate feedback.
|
|
52
|
+
/*
|
|
53
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Running diagnostic verification...' });
|
|
54
|
+
const diagResult = await SandboxRunner.validateExtensionRuntime(extensionRoot);
|
|
55
|
+
|
|
56
|
+
if (diagResult.success) {
|
|
57
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: '✅ Diagnostic Verification Passed.' });
|
|
58
|
+
} else {
|
|
59
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `❌ Diagnostic Verification Failed: ${diagResult.error}` });
|
|
43
60
|
}
|
|
61
|
+
*/
|
|
44
62
|
// Delegate Launch
|
|
45
63
|
// We pass the filesystem path (STAGING_DIR or extensionRoot)
|
|
46
64
|
// The specific Launcher plugin handles environment specific path verification/conversion
|
|
@@ -64,7 +82,7 @@ export const BrowserManagerPlugin = {
|
|
|
64
82
|
ctx.actions.registerAction({
|
|
65
83
|
id: 'browser:stop',
|
|
66
84
|
handler: async () => {
|
|
67
|
-
await ctx.
|
|
85
|
+
await ctx.logger.info('Stopping browser...');
|
|
68
86
|
const result = await ctx.actions.runAction('launcher:kill', null);
|
|
69
87
|
return result;
|
|
70
88
|
}
|
|
@@ -72,21 +90,24 @@ export const BrowserManagerPlugin = {
|
|
|
72
90
|
// Event: Update detected
|
|
73
91
|
ctx.events.on('downloader:updated', async () => {
|
|
74
92
|
if (isInitialized) {
|
|
75
|
-
await ctx.
|
|
93
|
+
await ctx.logger.info('Update detected. Restarting browser...');
|
|
76
94
|
try {
|
|
77
95
|
await ctx.actions.runAction('browser:stop', {});
|
|
78
96
|
}
|
|
79
97
|
catch (e) {
|
|
80
98
|
// Ignore if already stopped
|
|
81
99
|
}
|
|
100
|
+
// [Optimization] Wait for process cleanup to avoid "Open in new tab" race condition
|
|
101
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
82
102
|
await ctx.actions.runAction('browser:start', {});
|
|
83
103
|
}
|
|
84
104
|
});
|
|
85
105
|
// Event: Browser closed (from launcher)
|
|
86
106
|
ctx.events.on('browser:closed', async (data) => {
|
|
87
|
-
await ctx.
|
|
107
|
+
await ctx.logger.info(`Browser closed with code ${data.code}`);
|
|
88
108
|
// Emit event that can be picked up by other plugins (e.g., to notify backend)
|
|
89
109
|
ctx.events.emit('session:terminated', { reason: 'browser_closed' });
|
|
90
110
|
});
|
|
91
111
|
}
|
|
92
112
|
};
|
|
113
|
+
export default BrowserManagerPlugin;
|
|
@@ -3,9 +3,10 @@ import fs from 'fs-extra';
|
|
|
3
3
|
import { spawn } from 'child_process';
|
|
4
4
|
import { findChrome, normalizePathToWindows } from '../../utils/browserUtils.js';
|
|
5
5
|
let chromeProcess = null;
|
|
6
|
-
|
|
6
|
+
const NativeLauncherPlugin = {
|
|
7
7
|
name: 'native-launcher',
|
|
8
8
|
version: '1.0.0',
|
|
9
|
+
dependencies: ['config'],
|
|
9
10
|
setup(ctx) {
|
|
10
11
|
// Only active if NOT in WSL
|
|
11
12
|
const isWSL = fs.existsSync('/mnt/c');
|
|
@@ -14,7 +15,7 @@ export const NativeLauncherPlugin = {
|
|
|
14
15
|
ctx.actions.registerAction({
|
|
15
16
|
id: 'launcher:launch',
|
|
16
17
|
handler: async (payload) => {
|
|
17
|
-
const config = ctx.
|
|
18
|
+
const config = ctx.config;
|
|
18
19
|
const chromePath = findChrome();
|
|
19
20
|
if (!chromePath) {
|
|
20
21
|
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found.' });
|
|
@@ -55,14 +56,14 @@ export const NativeLauncherPlugin = {
|
|
|
55
56
|
});
|
|
56
57
|
// Monitor process exit
|
|
57
58
|
chromeProcess.on('exit', async (code) => {
|
|
58
|
-
|
|
59
|
+
ctx.logger.info(`[NativeLauncher] Chrome exited with code ${code}`);
|
|
59
60
|
chromeProcess = null;
|
|
60
61
|
ctx.events.emit('browser:closed', { code });
|
|
61
62
|
});
|
|
62
|
-
|
|
63
|
+
ctx.logger.info('[NativeLauncher] Chrome started with PID: ' + chromeProcess.pid);
|
|
63
64
|
}
|
|
64
65
|
catch (spawnErr) {
|
|
65
|
-
|
|
66
|
+
ctx.logger.error(`[NativeLauncher] Spawn Failed: ${spawnErr.message}`);
|
|
66
67
|
return false;
|
|
67
68
|
}
|
|
68
69
|
return true;
|
|
@@ -73,7 +74,7 @@ export const NativeLauncherPlugin = {
|
|
|
73
74
|
id: 'launcher:kill',
|
|
74
75
|
handler: async () => {
|
|
75
76
|
if (chromeProcess) {
|
|
76
|
-
|
|
77
|
+
ctx.logger.info('[NativeLauncher] Chrome process force killed.');
|
|
77
78
|
chromeProcess.kill();
|
|
78
79
|
chromeProcess = null;
|
|
79
80
|
return true;
|
|
@@ -89,3 +90,4 @@ export const NativeLauncherPlugin = {
|
|
|
89
90
|
}
|
|
90
91
|
}
|
|
91
92
|
};
|
|
93
|
+
export default NativeLauncherPlugin;
|
|
@@ -3,9 +3,10 @@ import fs from 'fs-extra';
|
|
|
3
3
|
import { spawn } from 'child_process';
|
|
4
4
|
import { findChrome } from '../../utils/browserUtils.js';
|
|
5
5
|
let chromePid = null;
|
|
6
|
-
|
|
6
|
+
const WSLLauncherPlugin = {
|
|
7
7
|
name: 'wsl-launcher',
|
|
8
8
|
version: '1.0.0',
|
|
9
|
+
dependencies: ['config'],
|
|
9
10
|
setup(ctx) {
|
|
10
11
|
// Only active in WSL
|
|
11
12
|
const isWSL = fs.existsSync('/mnt/c');
|
|
@@ -16,7 +17,7 @@ export const WSLLauncherPlugin = {
|
|
|
16
17
|
handler: async (payload) => {
|
|
17
18
|
const chromePath = findChrome();
|
|
18
19
|
if (!chromePath) {
|
|
19
|
-
await ctx.
|
|
20
|
+
await ctx.logger.error('Chrome not found for detached launch.');
|
|
20
21
|
return false;
|
|
21
22
|
}
|
|
22
23
|
// Hardcoded Safe Paths for WSL Strategy
|
|
@@ -36,7 +37,7 @@ export const WSLLauncherPlugin = {
|
|
|
36
37
|
const winChromePath = chromePath
|
|
37
38
|
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\\\`)
|
|
38
39
|
.replace(/\//g, '\\\\');
|
|
39
|
-
await ctx.
|
|
40
|
+
await ctx.logger.info(`WSL Launch Target (Win): ${finalWinExtensionPath}`);
|
|
40
41
|
// Create PowerShell Launch Script with PID capture
|
|
41
42
|
const psContent = `
|
|
42
43
|
$chromePath = "${winChromePath}"
|
|
@@ -73,7 +74,7 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
73
74
|
await fs.writeFile(psPath, psContent);
|
|
74
75
|
}
|
|
75
76
|
catch (e) {
|
|
76
|
-
await ctx.
|
|
77
|
+
await ctx.logger.error(`WSL Write PS1 Failed: ${e.message}`);
|
|
77
78
|
return false;
|
|
78
79
|
}
|
|
79
80
|
// Execute via PowerShell
|
|
@@ -89,11 +90,11 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
89
90
|
const pidMatch = msg.match(/CHROME_PID:(\d+)/);
|
|
90
91
|
if (pidMatch) {
|
|
91
92
|
chromePid = parseInt(pidMatch[1], 10);
|
|
92
|
-
await ctx.
|
|
93
|
+
await ctx.logger.info(`Chrome launched with PID: ${chromePid}`);
|
|
93
94
|
// Start monitoring the process
|
|
94
95
|
monitorProcess(ctx, chromePid);
|
|
95
96
|
}
|
|
96
|
-
await ctx.
|
|
97
|
+
await ctx.logger.info(`[PS1] ${msg.trim()}`);
|
|
97
98
|
});
|
|
98
99
|
}
|
|
99
100
|
if (child.stderr) {
|
|
@@ -101,10 +102,10 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
101
102
|
const msg = chunk.toString();
|
|
102
103
|
// Ignore minor PS noise unless critical
|
|
103
104
|
if (msg.includes('Exec format error')) {
|
|
104
|
-
await ctx.
|
|
105
|
+
await ctx.logger.error(`CRITICAL: WSL Interop broken.`);
|
|
105
106
|
}
|
|
106
107
|
else if (msg.trim()) {
|
|
107
|
-
await ctx.
|
|
108
|
+
await ctx.logger.error(`Launch Error: ${msg}`);
|
|
108
109
|
}
|
|
109
110
|
});
|
|
110
111
|
}
|
|
@@ -116,25 +117,43 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
116
117
|
id: 'launcher:kill',
|
|
117
118
|
handler: async () => {
|
|
118
119
|
if (chromePid) {
|
|
119
|
-
await ctx.
|
|
120
|
+
await ctx.logger.info(`Terminating Chrome process (PID: ${chromePid})...`);
|
|
120
121
|
try {
|
|
121
|
-
//
|
|
122
|
-
const
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
122
|
+
// 1. Try Stop-Process first (Graceful)
|
|
123
|
+
const killCmd = `
|
|
124
|
+
$targetPid = ${chromePid}
|
|
125
|
+
try {
|
|
126
|
+
Stop-Process -Id $targetPid -Force -ErrorAction Stop
|
|
127
|
+
Write-Host "STOPPED"
|
|
128
|
+
} catch {
|
|
129
|
+
try {
|
|
130
|
+
taskkill.exe /F /PID $targetPid
|
|
131
|
+
Write-Host "TASKKILLED"
|
|
132
|
+
} catch {
|
|
133
|
+
Write-Host "FAILED: $_"
|
|
134
|
+
exit 1
|
|
135
|
+
}
|
|
132
136
|
}
|
|
137
|
+
`;
|
|
138
|
+
const killChild = spawn('powershell.exe', ['-Command', killCmd], { stdio: 'pipe' });
|
|
139
|
+
// Capture output to debug why it might fail
|
|
140
|
+
if (killChild.stdout) {
|
|
141
|
+
killChild.stdout.on('data', d => ctx.logger.debug(`[KillParams] ${d}`));
|
|
142
|
+
}
|
|
143
|
+
if (killChild.stderr) {
|
|
144
|
+
killChild.stderr.on('data', d => ctx.logger.warn(`[KillMsg] ${d}`));
|
|
145
|
+
}
|
|
146
|
+
await new Promise((resolve) => {
|
|
147
|
+
killChild.on('exit', (code) => {
|
|
148
|
+
resolve();
|
|
149
|
+
});
|
|
133
150
|
});
|
|
151
|
+
await ctx.logger.info('Chrome process termination signal sent.');
|
|
152
|
+
chromePid = null;
|
|
134
153
|
return true;
|
|
135
154
|
}
|
|
136
155
|
catch (err) {
|
|
137
|
-
await ctx.
|
|
156
|
+
await ctx.logger.error(`Kill failed: ${err.message}`);
|
|
138
157
|
return false;
|
|
139
158
|
}
|
|
140
159
|
}
|
|
@@ -158,7 +177,7 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
158
177
|
if (!output.trim() || code !== 0) {
|
|
159
178
|
// Process no longer exists
|
|
160
179
|
clearInterval(checkInterval);
|
|
161
|
-
await ctx.
|
|
180
|
+
await ctx.logger.info('Chrome process exited.');
|
|
162
181
|
chromePid = null;
|
|
163
182
|
ctx.events.emit('browser:closed', { code: 0 });
|
|
164
183
|
}
|
|
@@ -178,3 +197,4 @@ Write-Host "CHROME_PID:$($process.Id)"
|
|
|
178
197
|
}
|
|
179
198
|
}
|
|
180
199
|
};
|
|
200
|
+
export default WSLLauncherPlugin;
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import puppeteer from 'puppeteer-core';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs-extra';
|
|
4
|
+
import { spawn, execSync } from 'child_process';
|
|
5
|
+
import axios from 'axios';
|
|
6
|
+
import { findChrome } from './browserUtils.js';
|
|
7
|
+
export class SandboxRunner {
|
|
8
|
+
/**
|
|
9
|
+
* Launch a headless browser with the extension loaded to verify it can initialize.
|
|
10
|
+
* @param extensionPath Absolute path to the unpacked extension directory
|
|
11
|
+
* @param chromePath Optional path to Chrome executable. If not provided, attempts to auto-detect.
|
|
12
|
+
*/
|
|
13
|
+
static async validateExtensionRuntime(extensionPath, chromePath) {
|
|
14
|
+
const logs = [];
|
|
15
|
+
const executablePath = chromePath || findChrome();
|
|
16
|
+
if (!executablePath) {
|
|
17
|
+
return {
|
|
18
|
+
success: false,
|
|
19
|
+
logs,
|
|
20
|
+
error: 'Chrome executable not found. Cannot run verification.'
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
const isWSL = executablePath.startsWith('/mnt/');
|
|
24
|
+
if (isWSL) {
|
|
25
|
+
logs.push('[Sandbox] WSL Environment detected. Using "Spawn & Connect" strategy.');
|
|
26
|
+
return this.runWSLCheck(extensionPath, executablePath, logs);
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
return this.runStandardCheck(extensionPath, executablePath, logs);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
static async runStandardCheck(extensionPath, executablePath, logs) {
|
|
33
|
+
let browser;
|
|
34
|
+
try {
|
|
35
|
+
logs.push(`[Sandbox] Launching standard verification for: ${extensionPath}`);
|
|
36
|
+
logs.push(`[Sandbox] Using Chrome at: ${executablePath}`);
|
|
37
|
+
browser = await puppeteer.launch({
|
|
38
|
+
headless: true,
|
|
39
|
+
executablePath: executablePath,
|
|
40
|
+
args: [
|
|
41
|
+
`--disable-extensions-except=${extensionPath}`,
|
|
42
|
+
`--load-extension=${extensionPath}`,
|
|
43
|
+
'--no-sandbox',
|
|
44
|
+
'--disable-setuid-sandbox'
|
|
45
|
+
]
|
|
46
|
+
});
|
|
47
|
+
return await this.performChecks(browser, extensionPath, logs);
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
console.error('[Sandbox] Standard Launch Error:', error);
|
|
51
|
+
return { success: false, logs, error: error instanceof Error ? error.message : String(error) };
|
|
52
|
+
}
|
|
53
|
+
finally {
|
|
54
|
+
if (browser)
|
|
55
|
+
await browser.close();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
static async runWSLCheck(extensionPath, linuxChromePath, logs) {
|
|
59
|
+
let browser;
|
|
60
|
+
let chromePid = null;
|
|
61
|
+
try {
|
|
62
|
+
// 1. Path Conversion (Linux -> Windows)
|
|
63
|
+
const driveMatch = linuxChromePath.match(/^\/mnt\/([a-z])\//);
|
|
64
|
+
if (!driveMatch)
|
|
65
|
+
throw new Error(`Could not parse drive letter from ${linuxChromePath}`);
|
|
66
|
+
const driveLetter = driveMatch[1];
|
|
67
|
+
const winChromePath = linuxChromePath
|
|
68
|
+
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\`)
|
|
69
|
+
.replace(/\//g, '\\');
|
|
70
|
+
// 1b. Detect Host IP (WSL DNS Resolver IP)
|
|
71
|
+
let hostIp = '127.0.0.1';
|
|
72
|
+
try {
|
|
73
|
+
const resolveConf = fs.readFileSync('/etc/resolv.conf', 'utf-8');
|
|
74
|
+
const match = resolveConf.match(/nameserver\s+([\d.]+)/);
|
|
75
|
+
if (match)
|
|
76
|
+
hostIp = match[1];
|
|
77
|
+
logs.push(`[Sandbox] Host IP detected: ${hostIp}`);
|
|
78
|
+
}
|
|
79
|
+
catch (e) {
|
|
80
|
+
logs.push(`[Sandbox] Failed to detect Host IP, fallback to 127.0.0.1: ${e}`);
|
|
81
|
+
}
|
|
82
|
+
let winExtensionPath = extensionPath;
|
|
83
|
+
const extDriveMatch = extensionPath.match(/^\/mnt\/([a-z])\//);
|
|
84
|
+
if (extDriveMatch) {
|
|
85
|
+
winExtensionPath = extensionPath
|
|
86
|
+
.replace(new RegExp(`^/mnt/${extDriveMatch[1]}/`), `${extDriveMatch[1].toUpperCase()}:\\`)
|
|
87
|
+
.replace(/\//g, '\\');
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
logs.push('[Sandbox] WARNING: Extension path is not in /mnt/. Windows Chrome might not see it.');
|
|
91
|
+
}
|
|
92
|
+
// 2. Spawn Chrome via PowerShell
|
|
93
|
+
const port = 9222;
|
|
94
|
+
const winProfile = `C:\\Temp\\ai-ext-sandbox-${Date.now()}`;
|
|
95
|
+
const args = [
|
|
96
|
+
`--headless=new`,
|
|
97
|
+
`--disable-extensions-except="${winExtensionPath}"`,
|
|
98
|
+
`--load-extension="${winExtensionPath}"`,
|
|
99
|
+
`--user-data-dir="${winProfile}"`,
|
|
100
|
+
'--no-sandbox',
|
|
101
|
+
'--disable-gpu',
|
|
102
|
+
'--disable-dev-shm-usage',
|
|
103
|
+
'--no-first-run',
|
|
104
|
+
'--no-default-browser-check',
|
|
105
|
+
`--remote-debugging-port=${port}`,
|
|
106
|
+
`--remote-debugging-address=0.0.0.0`, // Bind to all interfaces so WSL can see it
|
|
107
|
+
`--remote-allow-origins=*` // Allow puppeteer connection
|
|
108
|
+
];
|
|
109
|
+
const psCommand = `Start-Process -FilePath "${winChromePath}" -ArgumentList '${args.join(' ')}' -PassThru`;
|
|
110
|
+
logs.push(`[Sandbox] Spawning Chrome via PowerShell on port ${port}...`);
|
|
111
|
+
logs.push(`[Sandbox] Profile: ${winProfile}`);
|
|
112
|
+
const child = spawn('powershell.exe', ['-Command', psCommand], { stdio: 'pipe' });
|
|
113
|
+
await new Promise((resolve, reject) => {
|
|
114
|
+
child.stdout.on('data', (data) => {
|
|
115
|
+
const output = data.toString();
|
|
116
|
+
const match = output.match(/\s+(\d+)\s+\d+\s+chrome/i) || output.match(/Id\s+:\s+(\d+)/);
|
|
117
|
+
if (match) {
|
|
118
|
+
chromePid = parseInt(match[1], 10);
|
|
119
|
+
logs.push(`[Sandbox] Chrome PID: ${chromePid}`);
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
child.on('close', (code) => {
|
|
123
|
+
if (code === 0)
|
|
124
|
+
resolve();
|
|
125
|
+
else
|
|
126
|
+
reject(new Error(`PowerShell exited with code ${code}`));
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
// 3. Wait for Port
|
|
130
|
+
logs.push('[Sandbox] Waiting for Chrome to accept connections...');
|
|
131
|
+
let connected = false;
|
|
132
|
+
// Increased timeout to 15s (30 * 500ms)
|
|
133
|
+
for (let i = 0; i < 30; i++) {
|
|
134
|
+
try {
|
|
135
|
+
// Use hostIp, not localhost
|
|
136
|
+
await axios.get(`http://${hostIp}:${port}/json/version`, { timeout: 1000 });
|
|
137
|
+
connected = true;
|
|
138
|
+
break;
|
|
139
|
+
}
|
|
140
|
+
catch (e) {
|
|
141
|
+
await new Promise(r => setTimeout(r, 500));
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
if (!connected)
|
|
145
|
+
throw new Error(`Timed out waiting for Chrome debug port ${port}`);
|
|
146
|
+
// 4. Connect Puppeteer
|
|
147
|
+
logs.push('[Sandbox] Connecting Puppeteer...');
|
|
148
|
+
browser = await puppeteer.connect({
|
|
149
|
+
browserURL: `http://${hostIp}:${port}`
|
|
150
|
+
});
|
|
151
|
+
// 5. Perform Checks
|
|
152
|
+
const result = await this.performChecks(browser, extensionPath, logs);
|
|
153
|
+
return result;
|
|
154
|
+
}
|
|
155
|
+
catch (error) {
|
|
156
|
+
console.error('[Sandbox] WSL Check Error:', error);
|
|
157
|
+
return { success: false, logs, error: error instanceof Error ? error.message : String(error) };
|
|
158
|
+
}
|
|
159
|
+
finally {
|
|
160
|
+
if (browser)
|
|
161
|
+
await browser.disconnect();
|
|
162
|
+
if (chromePid) {
|
|
163
|
+
logs.push(`[Sandbox] Killing Chrome PID ${chromePid}...`);
|
|
164
|
+
try {
|
|
165
|
+
execSync(`powershell.exe -Command "Stop-Process -Id ${chromePid} -Force"`);
|
|
166
|
+
}
|
|
167
|
+
catch (e) { /* ignore */ }
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
static async performChecks(browser, extensionPath, logs) {
|
|
172
|
+
await new Promise(r => setTimeout(r, 2000));
|
|
173
|
+
const targets = await browser.targets();
|
|
174
|
+
const backgroundTarget = targets.find(t => t.type() === 'service_worker' || t.type() === 'background_page');
|
|
175
|
+
if (!backgroundTarget) {
|
|
176
|
+
const manifestPath = path.join(extensionPath, 'manifest.json');
|
|
177
|
+
if (fs.existsSync(manifestPath)) {
|
|
178
|
+
const manifest = await fs.readJson(manifestPath);
|
|
179
|
+
if (manifest.background) {
|
|
180
|
+
return { success: false, logs, error: 'Background Service Worker defined in manifest but failed to start.' };
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
logs.push('[Sandbox] No background script defined in manifest. Skipping worker check.');
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
logs.push('Background worker started successfully.');
|
|
189
|
+
}
|
|
190
|
+
return { success: true, logs };
|
|
191
|
+
}
|
|
192
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ai-extension-preview",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.16",
|
|
4
4
|
"description": "Local preview tool for AI Extension Builder",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -37,9 +37,10 @@
|
|
|
37
37
|
"node-fetch": "^3.3.2",
|
|
38
38
|
"ora": "^8.1.1",
|
|
39
39
|
"puppeteer-core": "^24.33.0",
|
|
40
|
-
"skeleton-crew-runtime": "^0.1
|
|
40
|
+
"skeleton-crew-runtime": "^0.2.1",
|
|
41
41
|
"web-ext": "^8.3.0",
|
|
42
|
-
"ws": "^8.18.0"
|
|
42
|
+
"ws": "^8.18.0",
|
|
43
|
+
"zod": "^4.2.1"
|
|
43
44
|
},
|
|
44
45
|
"devDependencies": {
|
|
45
46
|
"@types/adm-zip": "^0.5.6",
|