ai-extension-preview 0.1.10 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +68 -52
- package/dist/plugins/DownloaderPlugin.js +33 -28
- package/dist/plugins/ServerPlugin.js +68 -16
- package/dist/plugins/browser/BrowserManagerPlugin.js +82 -0
- package/dist/plugins/browser/NativeLauncherPlugin.js +91 -0
- package/dist/plugins/browser/WSLLauncherPlugin.js +180 -0
- package/dist/utils/browserUtils.js +79 -0
- package/package.json +1 -1
- package/dist/plugins/BrowserPlugin.js +0 -288
package/dist/index.js
CHANGED
|
@@ -8,7 +8,9 @@ import os from 'os';
|
|
|
8
8
|
import { Runtime } from 'skeleton-crew-runtime';
|
|
9
9
|
import { CorePlugin } from './plugins/CorePlugin.js';
|
|
10
10
|
import { DownloaderPlugin } from './plugins/DownloaderPlugin.js';
|
|
11
|
-
import {
|
|
11
|
+
import { BrowserManagerPlugin } from './plugins/browser/BrowserManagerPlugin.js';
|
|
12
|
+
import { WSLLauncherPlugin } from './plugins/browser/WSLLauncherPlugin.js';
|
|
13
|
+
import { NativeLauncherPlugin } from './plugins/browser/NativeLauncherPlugin.js';
|
|
12
14
|
import { ServerPlugin } from './plugins/ServerPlugin.js';
|
|
13
15
|
import axios from 'axios';
|
|
14
16
|
import chalk from 'chalk';
|
|
@@ -24,10 +26,19 @@ program
|
|
|
24
26
|
.option('--user <user>', 'User ID (if required)')
|
|
25
27
|
.parse(process.argv);
|
|
26
28
|
const options = program.opts();
|
|
27
|
-
async function authenticate(host) {
|
|
29
|
+
async function authenticate(host, port) {
|
|
28
30
|
try {
|
|
29
|
-
// 1. Init Session
|
|
30
|
-
|
|
31
|
+
// 1. Init Session with port
|
|
32
|
+
console.log('[DEBUG] Sending port to backend:', port);
|
|
33
|
+
const initRes = await axios({
|
|
34
|
+
method: 'post',
|
|
35
|
+
url: `${host}/preview/init`,
|
|
36
|
+
data: { port },
|
|
37
|
+
headers: {
|
|
38
|
+
'Content-Type': 'application/json'
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
console.log('[DEBUG] Init response:', initRes.data);
|
|
31
42
|
const { code, sessionId } = initRes.data;
|
|
32
43
|
console.log('\n' + chalk.bgBlue.bold(' DETACHED PREVIEW MODE ') + '\n');
|
|
33
44
|
console.log('To connect, please go to your Extension Dashboard and click "Connect Preview".');
|
|
@@ -41,15 +52,17 @@ async function authenticate(host) {
|
|
|
41
52
|
const statusRes = await axios.get(`${host}/preview/status/${sessionId}`);
|
|
42
53
|
const data = statusRes.data;
|
|
43
54
|
if (data.status === 'linked') {
|
|
44
|
-
|
|
55
|
+
console.log(chalk.green('✔ Connected!'));
|
|
45
56
|
if (!data.jobId) {
|
|
46
57
|
console.error('Error: No Job ID associated with this connection.');
|
|
47
58
|
process.exit(1);
|
|
48
59
|
}
|
|
60
|
+
console.log('[DEBUG] Received userId:', data.userId);
|
|
61
|
+
console.log('[DEBUG] Received jobId:', data.jobId);
|
|
49
62
|
return {
|
|
50
63
|
jobId: data.jobId,
|
|
51
64
|
userId: data.userId,
|
|
52
|
-
token:
|
|
65
|
+
token: data.token || ''
|
|
53
66
|
};
|
|
54
67
|
}
|
|
55
68
|
if (data.status === 'expired') {
|
|
@@ -57,57 +70,64 @@ async function authenticate(host) {
|
|
|
57
70
|
process.exit(1);
|
|
58
71
|
}
|
|
59
72
|
}
|
|
60
|
-
catch (
|
|
61
|
-
// Ignore
|
|
73
|
+
catch (err) {
|
|
74
|
+
// Ignore poll errors, keep trying
|
|
62
75
|
}
|
|
63
76
|
}
|
|
64
77
|
}
|
|
65
78
|
catch (error) {
|
|
66
|
-
console.error(
|
|
67
|
-
|
|
79
|
+
console.error('Authentication failed:', error);
|
|
80
|
+
throw error;
|
|
68
81
|
}
|
|
69
82
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const authData = await authenticate(host);
|
|
78
|
-
jobId = authData.jobId;
|
|
79
|
-
userId = authData.userId || userId;
|
|
80
|
-
token = authData.token || token;
|
|
81
|
-
}
|
|
82
|
-
// Use os.homedir() to ensure we have write permissions
|
|
83
|
-
// Git Bash sometimes defaults cwd to C:\Program Files\Git which causes EPERM
|
|
84
|
-
const HOME_DIR = os.homedir();
|
|
85
|
-
const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', jobId);
|
|
86
|
-
// 1. Initialize Runtime
|
|
83
|
+
// Use os.homedir() to ensure we have write permissions
|
|
84
|
+
// Git Bash sometimes defaults cwd to C:\Program Files\Git which causes EPERM
|
|
85
|
+
const HOME_DIR = os.homedir();
|
|
86
|
+
const WORK_DIR = path.join(HOME_DIR, '.ai-extension-preview', options.job || 'default'); // Use default if job not provided yet
|
|
87
|
+
(async () => {
|
|
88
|
+
const { job: jobId, host, token, user: userId } = options;
|
|
89
|
+
// 1. Initialize Runtime first to allocate port
|
|
87
90
|
const runtime = new Runtime({
|
|
88
91
|
hostContext: {
|
|
89
92
|
config: {
|
|
90
93
|
host,
|
|
91
|
-
token,
|
|
92
|
-
user: userId,
|
|
93
|
-
jobId,
|
|
94
|
+
token: token || '',
|
|
95
|
+
user: userId || '',
|
|
96
|
+
jobId: jobId || '',
|
|
94
97
|
workDir: WORK_DIR
|
|
95
98
|
}
|
|
96
99
|
}
|
|
97
100
|
});
|
|
98
|
-
|
|
99
|
-
// Note: In a real dynamic system we might load these from a folder
|
|
100
|
-
// console.log('Registering plugins...');
|
|
101
|
-
// Register Plugins
|
|
102
|
-
// UI Plugin first or last?
|
|
103
|
-
// If first, it captures subsequent logs.
|
|
101
|
+
runtime.logger.info('Registering plugins...');
|
|
104
102
|
runtime.registerPlugin(CorePlugin);
|
|
105
103
|
runtime.registerPlugin(DownloaderPlugin);
|
|
106
|
-
runtime.registerPlugin(
|
|
104
|
+
runtime.registerPlugin(BrowserManagerPlugin);
|
|
105
|
+
runtime.registerPlugin(WSLLauncherPlugin);
|
|
106
|
+
runtime.registerPlugin(NativeLauncherPlugin);
|
|
107
107
|
runtime.registerPlugin(ServerPlugin);
|
|
108
108
|
runtime.logger.info('Initializing runtime...');
|
|
109
109
|
await runtime.initialize();
|
|
110
110
|
const ctx = runtime.getContext();
|
|
111
|
+
// Get allocated port from ServerPlugin
|
|
112
|
+
const allocatedPort = ctx.hotReloadPort;
|
|
113
|
+
if (!allocatedPort) {
|
|
114
|
+
console.error('Failed to allocate server port');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
// 2. Now authenticate with the allocated port
|
|
118
|
+
let finalJobId = jobId;
|
|
119
|
+
let finalUserId = userId;
|
|
120
|
+
let finalToken = token;
|
|
121
|
+
if (!jobId || !userId) {
|
|
122
|
+
const authData = await authenticate(host, allocatedPort);
|
|
123
|
+
finalJobId = authData.jobId;
|
|
124
|
+
finalUserId = authData.userId;
|
|
125
|
+
finalToken = authData.token;
|
|
126
|
+
// Update runtime config with auth data
|
|
127
|
+
ctx.host.config.jobId = finalJobId;
|
|
128
|
+
ctx.host.config.user = finalUserId;
|
|
129
|
+
ctx.host.config.token = finalToken;
|
|
130
|
+
}
|
|
111
131
|
// 3. Start LifeCycle
|
|
112
132
|
await ctx.actions.runAction('core:log', { level: 'info', message: 'Initializing Local Satellite...' });
|
|
113
133
|
// Ensure work dir exists
|
|
@@ -118,18 +138,18 @@ async function main() {
|
|
|
118
138
|
await ctx.actions.runAction('core:log', { level: 'error', message: 'Initial check failed. Could not verify job or download extension.' });
|
|
119
139
|
process.exit(1);
|
|
120
140
|
}
|
|
121
|
-
//
|
|
122
|
-
|
|
123
|
-
//
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
// 1. Browser closed (web-ext mode) -> we arguably should exit.
|
|
129
|
-
// 2. Detached mode started -> we MUST NOT exit.
|
|
130
|
-
// Changing logic: rely on SIGINT to exit.
|
|
141
|
+
// Launch Browser
|
|
142
|
+
await ctx.actions.runAction('browser:start', {});
|
|
143
|
+
// Keep process alive
|
|
144
|
+
process.on('SIGINT', async () => {
|
|
145
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Shutting down...' });
|
|
146
|
+
process.exit(0);
|
|
147
|
+
});
|
|
131
148
|
runtime.logger.info('Press Ctrl+C to exit.');
|
|
132
|
-
}
|
|
149
|
+
})().catch((err) => {
|
|
150
|
+
console.error(chalk.red('Fatal Error:'), err.message || err);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
});
|
|
133
153
|
// Handle global errors
|
|
134
154
|
process.on('uncaughtException', (err) => {
|
|
135
155
|
if (err.code === 'ECONNRESET' || err.message?.includes('ECONNRESET')) {
|
|
@@ -142,7 +162,3 @@ process.on('uncaughtException', (err) => {
|
|
|
142
162
|
process.on('unhandledRejection', (reason) => {
|
|
143
163
|
console.error('Unhandled Rejection:', reason);
|
|
144
164
|
});
|
|
145
|
-
main().catch(err => {
|
|
146
|
-
console.error(chalk.red('Fatal Error:'), err.message || err);
|
|
147
|
-
process.exit(1);
|
|
148
|
-
});
|
|
@@ -12,35 +12,36 @@ export const DownloaderPlugin = {
|
|
|
12
12
|
const config = ctx.host.config;
|
|
13
13
|
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
14
14
|
const DOWNLOAD_PATH = path.join(config.workDir, 'extension.zip');
|
|
15
|
-
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
15
|
+
// Helper function to create axios client with current config
|
|
16
|
+
const createClient = () => {
|
|
17
|
+
const rawToken = config.token ? String(config.token) : '';
|
|
18
|
+
const token = rawToken.replace(/^Bearer\s+/i, '').trim();
|
|
19
|
+
// Auto-extract user ID from token if not provided
|
|
20
|
+
let userId = config.user;
|
|
21
|
+
if (!userId && token) {
|
|
22
|
+
try {
|
|
23
|
+
const parts = token.split('.');
|
|
24
|
+
if (parts.length === 3) {
|
|
25
|
+
const payload = JSON.parse(Buffer.from(parts[1], 'base64').toString());
|
|
26
|
+
userId = payload.id || payload.sub || payload.userId;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
catch (e) {
|
|
30
|
+
// Ignore parse errors
|
|
28
31
|
}
|
|
29
32
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
})
|
|
43
|
-
});
|
|
33
|
+
ctx.actions.runAction('core:log', { level: 'info', message: `[DEBUG] DownloaderPlugin creating client with userId: ${userId}` });
|
|
34
|
+
return axios.create({
|
|
35
|
+
baseURL: config.host,
|
|
36
|
+
headers: {
|
|
37
|
+
'Authorization': token ? `Bearer ${token}` : undefined,
|
|
38
|
+
'X-User-Id': userId
|
|
39
|
+
},
|
|
40
|
+
httpsAgent: new https.Agent({
|
|
41
|
+
rejectUnauthorized: false
|
|
42
|
+
})
|
|
43
|
+
});
|
|
44
|
+
};
|
|
44
45
|
const VERSION_FILE = path.join(config.workDir, 'version');
|
|
45
46
|
let lastModified = '';
|
|
46
47
|
if (fs.existsSync(VERSION_FILE)) {
|
|
@@ -58,6 +59,7 @@ export const DownloaderPlugin = {
|
|
|
58
59
|
let attempt = 0;
|
|
59
60
|
while (attempt < MAX_RETRIES) {
|
|
60
61
|
try {
|
|
62
|
+
const client = createClient(); // Create client with current config
|
|
61
63
|
const res = await client.get(`/jobs/${config.jobId}`);
|
|
62
64
|
const job = res.data;
|
|
63
65
|
const newVersion = job.version;
|
|
@@ -114,6 +116,7 @@ export const DownloaderPlugin = {
|
|
|
114
116
|
handler: async () => {
|
|
115
117
|
const spinner = ora('Downloading new version...').start();
|
|
116
118
|
try {
|
|
119
|
+
const client = createClient(); // Create client with current config
|
|
117
120
|
const response = await client.get(`/download/${config.jobId}`, {
|
|
118
121
|
responseType: 'arraybuffer'
|
|
119
122
|
});
|
|
@@ -124,8 +127,10 @@ export const DownloaderPlugin = {
|
|
|
124
127
|
zip.extractAllTo(DIST_DIR, true);
|
|
125
128
|
// --- HOT RELOAD INJECTION ---
|
|
126
129
|
try {
|
|
130
|
+
// Get dynamically allocated port from ServerPlugin
|
|
131
|
+
const hotReloadPort = ctx.hotReloadPort || 3500;
|
|
127
132
|
const HOT_RELOAD_CODE = `
|
|
128
|
-
const EVENT_SOURCE_URL = 'http://localhost
|
|
133
|
+
const EVENT_SOURCE_URL = 'http://localhost:${hotReloadPort}/status';
|
|
129
134
|
const CURRENT_JOB_ID = '${config.jobId}';
|
|
130
135
|
let lastVersion = null;
|
|
131
136
|
let lastJobId = null;
|
|
@@ -2,9 +2,13 @@ import http from 'http';
|
|
|
2
2
|
export const ServerPlugin = {
|
|
3
3
|
name: 'server',
|
|
4
4
|
version: '1.0.0',
|
|
5
|
-
setup(ctx) {
|
|
5
|
+
async setup(ctx) {
|
|
6
6
|
let currentVersion = '0.0.0';
|
|
7
|
-
|
|
7
|
+
// Try to bind to a port, retrying with incremented ports on failure
|
|
8
|
+
const startPort = 3500;
|
|
9
|
+
const maxAttempts = 100;
|
|
10
|
+
let allocatedPort = null;
|
|
11
|
+
let server = null;
|
|
8
12
|
// Listen for version updates
|
|
9
13
|
ctx.events.on('downloader:updated', (data) => {
|
|
10
14
|
if (data && data.version) {
|
|
@@ -12,10 +16,11 @@ export const ServerPlugin = {
|
|
|
12
16
|
ctx.actions.runAction('core:log', { level: 'info', message: `Server: Reporting version ${currentVersion}` });
|
|
13
17
|
}
|
|
14
18
|
});
|
|
15
|
-
|
|
19
|
+
// Create server with request handler
|
|
20
|
+
const requestHandler = (req, res) => {
|
|
16
21
|
// CORS Headers
|
|
17
22
|
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
18
|
-
res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS');
|
|
23
|
+
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
|
|
19
24
|
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
|
20
25
|
if (req.method === 'OPTIONS') {
|
|
21
26
|
res.writeHead(204);
|
|
@@ -24,29 +29,76 @@ export const ServerPlugin = {
|
|
|
24
29
|
}
|
|
25
30
|
if (req.url === '/status') {
|
|
26
31
|
const currentJobId = ctx.host.config.jobId;
|
|
27
|
-
// ctx.actions.runAction('core:log', { level: 'info', message: `[DEBUG] Server: Extension requested status (Reporting: ${currentVersion})` });
|
|
28
32
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
29
33
|
res.end(JSON.stringify({
|
|
30
34
|
version: currentVersion,
|
|
31
|
-
jobId: currentJobId
|
|
35
|
+
jobId: currentJobId,
|
|
36
|
+
port: allocatedPort
|
|
32
37
|
}));
|
|
33
38
|
}
|
|
39
|
+
else if (req.url === '/disconnect' && req.method === 'POST') {
|
|
40
|
+
// Trigger browser stop
|
|
41
|
+
ctx.actions.runAction('core:log', { level: 'info', message: '[API] Disconnect request received' });
|
|
42
|
+
ctx.actions.runAction('browser:stop', null).then((result) => {
|
|
43
|
+
ctx.actions.runAction('core:log', { level: 'info', message: `[API] Browser stop result: ${result}` });
|
|
44
|
+
}).catch((err) => {
|
|
45
|
+
ctx.actions.runAction('core:log', { level: 'error', message: `[API] Browser stop failed: ${err.message}` });
|
|
46
|
+
});
|
|
47
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
48
|
+
res.end(JSON.stringify({ success: true }));
|
|
49
|
+
}
|
|
34
50
|
else {
|
|
35
51
|
res.writeHead(404);
|
|
36
52
|
res.end('Not Found');
|
|
37
53
|
}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
54
|
+
};
|
|
55
|
+
// Try to bind to ports sequentially
|
|
56
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
57
|
+
const port = startPort + attempt;
|
|
58
|
+
try {
|
|
59
|
+
server = http.createServer(requestHandler);
|
|
60
|
+
// Wrap listen in a promise to handle async properly
|
|
61
|
+
await new Promise((resolve, reject) => {
|
|
62
|
+
server.once('error', (err) => {
|
|
63
|
+
if (err.code === 'EADDRINUSE') {
|
|
64
|
+
reject(err);
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
reject(err);
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
server.once('listening', () => {
|
|
71
|
+
resolve();
|
|
72
|
+
});
|
|
73
|
+
server.listen(port);
|
|
74
|
+
});
|
|
75
|
+
// Success! Port is allocated
|
|
76
|
+
allocatedPort = port;
|
|
77
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Hot Reload Server running on port ${allocatedPort}` });
|
|
78
|
+
break;
|
|
45
79
|
}
|
|
46
|
-
|
|
47
|
-
|
|
80
|
+
catch (err) {
|
|
81
|
+
if (err.code === 'EADDRINUSE') {
|
|
82
|
+
// Port busy, try next one
|
|
83
|
+
if (server) {
|
|
84
|
+
server.removeAllListeners();
|
|
85
|
+
server = null;
|
|
86
|
+
}
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
// Other error, fail immediately
|
|
91
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Server error: ${err.message}` });
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
48
94
|
}
|
|
49
|
-
}
|
|
95
|
+
}
|
|
96
|
+
if (!allocatedPort || !server) {
|
|
97
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to allocate port after ${maxAttempts} attempts (ports ${startPort}-${startPort + maxAttempts - 1})` });
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
// Store port in context for DownloaderPlugin to use
|
|
101
|
+
ctx.hotReloadPort = allocatedPort;
|
|
50
102
|
// Store server instance to close later
|
|
51
103
|
ctx._serverInstance = server;
|
|
52
104
|
},
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { findExtensionRoot, validateExtension } from '../../utils/browserUtils.js';
|
|
4
|
+
export const BrowserManagerPlugin = {
|
|
5
|
+
name: 'browser-manager',
|
|
6
|
+
version: '1.0.0',
|
|
7
|
+
setup(ctx) {
|
|
8
|
+
const config = ctx.host.config;
|
|
9
|
+
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
10
|
+
// --- Centralized Path Strategy ---
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
const isWin = process.platform === 'win32';
|
|
13
|
+
// Unified Staging Path (C:\\Temp for Windows/WSL, local for others)
|
|
14
|
+
const STAGING_DIR = isWSL
|
|
15
|
+
? '/mnt/c/Temp/ai-ext-preview'
|
|
16
|
+
: (isWin ? 'C:\\Temp\\ai-ext-preview' : path.join(config.workDir, '../staging'));
|
|
17
|
+
// --- SYNC FUNCTION ---
|
|
18
|
+
const syncToStaging = async () => {
|
|
19
|
+
try {
|
|
20
|
+
if (fs.existsSync(STAGING_DIR)) {
|
|
21
|
+
fs.emptyDirSync(STAGING_DIR);
|
|
22
|
+
}
|
|
23
|
+
fs.ensureDirSync(STAGING_DIR);
|
|
24
|
+
fs.copySync(DIST_DIR, STAGING_DIR);
|
|
25
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Synced code to Staging` });
|
|
26
|
+
// Emit staged event (optional)
|
|
27
|
+
ctx.events.emit('browser:staged', { path: STAGING_DIR });
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to sync to staging: ${err.message}` });
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
const launchBrowser = async () => {
|
|
34
|
+
// Resolve proper root AFTER sync
|
|
35
|
+
const extensionRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
36
|
+
// Validate
|
|
37
|
+
const validation = validateExtension(extensionRoot);
|
|
38
|
+
if (!validation.valid) {
|
|
39
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `[CRITICAL] Extension validation failed: ${validation.error} in ${extensionRoot}` });
|
|
40
|
+
}
|
|
41
|
+
else if (extensionRoot !== STAGING_DIR) {
|
|
42
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Detected nested extension at: ${path.basename(extensionRoot)}` });
|
|
43
|
+
}
|
|
44
|
+
// Delegate Launch
|
|
45
|
+
// We pass the filesystem path (STAGING_DIR or extensionRoot)
|
|
46
|
+
// The specific Launcher plugin handles environment specific path verification/conversion
|
|
47
|
+
await ctx.actions.runAction('launcher:launch', {
|
|
48
|
+
extensionPath: extensionRoot,
|
|
49
|
+
stagingDir: STAGING_DIR
|
|
50
|
+
});
|
|
51
|
+
};
|
|
52
|
+
// Action: Start Browser (Orchestrator)
|
|
53
|
+
ctx.actions.registerAction({
|
|
54
|
+
id: 'browser:start',
|
|
55
|
+
handler: async () => {
|
|
56
|
+
await syncToStaging();
|
|
57
|
+
await launchBrowser();
|
|
58
|
+
return true;
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
// Action: Stop Browser
|
|
62
|
+
ctx.actions.registerAction({
|
|
63
|
+
id: 'browser:stop',
|
|
64
|
+
handler: async () => {
|
|
65
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Stopping browser...' });
|
|
66
|
+
const result = await ctx.actions.runAction('launcher:kill', null);
|
|
67
|
+
return result;
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
// Event: Update detected
|
|
71
|
+
ctx.events.on('downloader:updated', async () => {
|
|
72
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Update detected. Syncing to staging...' });
|
|
73
|
+
await ctx.actions.runAction('browser:start', {});
|
|
74
|
+
});
|
|
75
|
+
// Event: Browser closed (from launcher)
|
|
76
|
+
ctx.events.on('browser:closed', async (data) => {
|
|
77
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Browser closed with code ${data.code}` });
|
|
78
|
+
// Emit event that can be picked up by other plugins (e.g., to notify backend)
|
|
79
|
+
ctx.events.emit('session:terminated', { reason: 'browser_closed' });
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { spawn } from 'child_process';
|
|
4
|
+
import { findChrome, normalizePathToWindows } from '../../utils/browserUtils.js';
|
|
5
|
+
let chromeProcess = null;
|
|
6
|
+
export const NativeLauncherPlugin = {
|
|
7
|
+
name: 'native-launcher',
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
setup(ctx) {
|
|
10
|
+
// Only active if NOT in WSL
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
if (isWSL)
|
|
13
|
+
return;
|
|
14
|
+
ctx.actions.registerAction({
|
|
15
|
+
id: 'launcher:launch',
|
|
16
|
+
handler: async (payload) => {
|
|
17
|
+
const config = ctx.host.config;
|
|
18
|
+
const chromePath = findChrome();
|
|
19
|
+
if (!chromePath) {
|
|
20
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found.' });
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
let executable = chromePath;
|
|
24
|
+
if (process.platform === 'win32') {
|
|
25
|
+
executable = normalizePathToWindows(chromePath);
|
|
26
|
+
}
|
|
27
|
+
// Native Windows / Linux
|
|
28
|
+
let safeDist = path.resolve(payload.extensionPath);
|
|
29
|
+
// Default Profile
|
|
30
|
+
let safeProfile = path.join(path.dirname(config.workDir), 'profile');
|
|
31
|
+
if (process.platform === 'win32') {
|
|
32
|
+
safeDist = normalizePathToWindows(safeDist);
|
|
33
|
+
// Use C:\\Temp profile to avoid permissions issues
|
|
34
|
+
safeProfile = 'C:\\\\Temp\\\\ai-ext-profile';
|
|
35
|
+
}
|
|
36
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Executable: ${executable} ` });
|
|
37
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Target: ${safeDist} ` });
|
|
38
|
+
const cleanArgs = [
|
|
39
|
+
`--load - extension=${safeDist} `,
|
|
40
|
+
`--user - data - dir=${safeProfile} `,
|
|
41
|
+
'--no-first-run',
|
|
42
|
+
'--no-default-browser-check',
|
|
43
|
+
'--disable-gpu',
|
|
44
|
+
'chrome://extensions'
|
|
45
|
+
];
|
|
46
|
+
try {
|
|
47
|
+
// Kill existing process if any
|
|
48
|
+
if (chromeProcess) {
|
|
49
|
+
chromeProcess.kill();
|
|
50
|
+
chromeProcess = null;
|
|
51
|
+
}
|
|
52
|
+
chromeProcess = spawn(executable, cleanArgs, {
|
|
53
|
+
detached: false,
|
|
54
|
+
stdio: 'ignore'
|
|
55
|
+
});
|
|
56
|
+
// Monitor process exit
|
|
57
|
+
chromeProcess.on('exit', async (code) => {
|
|
58
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome exited with code ${code} ` });
|
|
59
|
+
chromeProcess = null;
|
|
60
|
+
ctx.events.emit('browser:closed', { code });
|
|
61
|
+
});
|
|
62
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome launched with PID: ${chromeProcess.pid} ` });
|
|
63
|
+
}
|
|
64
|
+
catch (spawnErr) {
|
|
65
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Spawn Failed: ${spawnErr.message} ` });
|
|
66
|
+
return false;
|
|
67
|
+
}
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
// Register kill action
|
|
72
|
+
ctx.actions.registerAction({
|
|
73
|
+
id: 'launcher:kill',
|
|
74
|
+
handler: async () => {
|
|
75
|
+
if (chromeProcess) {
|
|
76
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Terminating Chrome process...' });
|
|
77
|
+
chromeProcess.kill();
|
|
78
|
+
chromeProcess = null;
|
|
79
|
+
return true;
|
|
80
|
+
}
|
|
81
|
+
return false;
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
},
|
|
85
|
+
dispose(ctx) {
|
|
86
|
+
if (chromeProcess) {
|
|
87
|
+
chromeProcess.kill();
|
|
88
|
+
chromeProcess = null;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
};
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { spawn } from 'child_process';
|
|
4
|
+
import { findChrome } from '../../utils/browserUtils.js';
|
|
5
|
+
let chromePid = null;
|
|
6
|
+
export const WSLLauncherPlugin = {
|
|
7
|
+
name: 'wsl-launcher',
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
setup(ctx) {
|
|
10
|
+
// Only active in WSL
|
|
11
|
+
const isWSL = fs.existsSync('/mnt/c');
|
|
12
|
+
if (!isWSL)
|
|
13
|
+
return;
|
|
14
|
+
ctx.actions.registerAction({
|
|
15
|
+
id: 'launcher:launch',
|
|
16
|
+
handler: async (payload) => {
|
|
17
|
+
const chromePath = findChrome();
|
|
18
|
+
if (!chromePath) {
|
|
19
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found for detached launch.' });
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
// Hardcoded Safe Paths for WSL Strategy
|
|
23
|
+
const winStagingDir = 'C:\\\\Temp\\\\ai-ext-preview';
|
|
24
|
+
const winProfile = 'C:\\\\Temp\\\\ai-ext-profile';
|
|
25
|
+
// Calculate Final Windows Extension Path
|
|
26
|
+
// We assume payload.extensionPath starts with /mnt/c/Temp/ai-ext-preview
|
|
27
|
+
// But simplified: We know we sync to STAGING_DIR.
|
|
28
|
+
// If extensionPath is nested, we handle it relative to STAGING_DIR.
|
|
29
|
+
let finalWinExtensionPath = winStagingDir;
|
|
30
|
+
if (payload.extensionPath !== payload.stagingDir) {
|
|
31
|
+
const relative = path.relative(payload.stagingDir, payload.extensionPath);
|
|
32
|
+
// Join with backslashes
|
|
33
|
+
finalWinExtensionPath = path.posix.join(winStagingDir.replace(/\\\\/g, '/'), relative).replace(/\//g, '\\\\');
|
|
34
|
+
}
|
|
35
|
+
const driveLetter = 'c';
|
|
36
|
+
const winChromePath = chromePath
|
|
37
|
+
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\\\`)
|
|
38
|
+
.replace(/\//g, '\\\\');
|
|
39
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Launch Target (Win): ${finalWinExtensionPath}` });
|
|
40
|
+
// Create PowerShell Launch Script with PID capture
|
|
41
|
+
const psContent = `
|
|
42
|
+
$chromePath = "${winChromePath}"
|
|
43
|
+
$extPath = "${finalWinExtensionPath}"
|
|
44
|
+
$profilePath = "${winProfile}"
|
|
45
|
+
|
|
46
|
+
# Verify Paths
|
|
47
|
+
if (-not (Test-Path -Path $extPath)) {
|
|
48
|
+
Write-Host "ERROR: Extension Path NOT FOUND!"
|
|
49
|
+
exit 1
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Create Profile Dir if needed
|
|
53
|
+
if (-not (Test-Path -Path $profilePath)) {
|
|
54
|
+
New-Item -ItemType Directory -Force -Path $profilePath | Out-Null
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
$argsList = @(
|
|
58
|
+
"--load-extension=\`"$extPath\`"",
|
|
59
|
+
"--user-data-dir=\`"$profilePath\`"",
|
|
60
|
+
"--no-first-run",
|
|
61
|
+
"--no-default-browser-check",
|
|
62
|
+
"--disable-gpu",
|
|
63
|
+
"about:blank"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Launch and capture PID
|
|
67
|
+
$process = Start-Process -FilePath $chromePath -ArgumentList $argsList -PassThru
|
|
68
|
+
Write-Host "CHROME_PID:$($process.Id)"
|
|
69
|
+
`;
|
|
70
|
+
// Write ps1 to STAGING_DIR/launch.ps1
|
|
71
|
+
const psPath = path.join(payload.stagingDir, 'launch.ps1');
|
|
72
|
+
try {
|
|
73
|
+
await fs.writeFile(psPath, psContent);
|
|
74
|
+
}
|
|
75
|
+
catch (e) {
|
|
76
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `WSL Write PS1 Failed: ${e.message}` });
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
// Execute via PowerShell
|
|
80
|
+
const psPathWin = `${winStagingDir}\\\\launch.ps1`;
|
|
81
|
+
const child = spawn('powershell.exe', ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', psPathWin], {
|
|
82
|
+
detached: false,
|
|
83
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
84
|
+
});
|
|
85
|
+
// Capture PID from output
|
|
86
|
+
if (child.stdout) {
|
|
87
|
+
child.stdout.on('data', async (chunk) => {
|
|
88
|
+
const msg = chunk.toString();
|
|
89
|
+
const pidMatch = msg.match(/CHROME_PID:(\d+)/);
|
|
90
|
+
if (pidMatch) {
|
|
91
|
+
chromePid = parseInt(pidMatch[1], 10);
|
|
92
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Chrome launched with PID: ${chromePid}` });
|
|
93
|
+
// Start monitoring the process
|
|
94
|
+
monitorProcess(ctx, chromePid);
|
|
95
|
+
}
|
|
96
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `[PS1] ${msg.trim()}` });
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
if (child.stderr) {
|
|
100
|
+
child.stderr.on('data', async (chunk) => {
|
|
101
|
+
const msg = chunk.toString();
|
|
102
|
+
// Ignore minor PS noise unless critical
|
|
103
|
+
if (msg.includes('Exec format error')) {
|
|
104
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `CRITICAL: WSL Interop broken.` });
|
|
105
|
+
}
|
|
106
|
+
else if (msg.trim()) {
|
|
107
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Error: ${msg}` });
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
// Register kill action
|
|
115
|
+
ctx.actions.registerAction({
|
|
116
|
+
id: 'launcher:kill',
|
|
117
|
+
handler: async () => {
|
|
118
|
+
if (chromePid) {
|
|
119
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: `Terminating Chrome process (PID: ${chromePid})...` });
|
|
120
|
+
try {
|
|
121
|
+
// Use taskkill via PowerShell
|
|
122
|
+
const killChild = spawn('powershell.exe', ['-Command', `Stop-Process -Id ${chromePid} -Force`], {
|
|
123
|
+
stdio: 'ignore'
|
|
124
|
+
});
|
|
125
|
+
killChild.on('exit', async (code) => {
|
|
126
|
+
if (code === 0) {
|
|
127
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Chrome process terminated successfully.' });
|
|
128
|
+
chromePid = null;
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
await ctx.actions.runAction('core:log', { level: 'warn', message: `taskkill exited with code ${code}` });
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
catch (err) {
|
|
137
|
+
await ctx.actions.runAction('core:log', { level: 'error', message: `Kill failed: ${err.message}` });
|
|
138
|
+
return false;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
// Helper function to monitor process
|
|
145
|
+
function monitorProcess(ctx, pid) {
|
|
146
|
+
const checkInterval = setInterval(async () => {
|
|
147
|
+
try {
|
|
148
|
+
const checkChild = spawn('powershell.exe', ['-Command', `Get-Process -Id ${pid} -ErrorAction SilentlyContinue`], {
|
|
149
|
+
stdio: 'pipe'
|
|
150
|
+
});
|
|
151
|
+
let output = '';
|
|
152
|
+
if (checkChild.stdout) {
|
|
153
|
+
checkChild.stdout.on('data', (chunk) => {
|
|
154
|
+
output += chunk.toString();
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
checkChild.on('exit', async (code) => {
|
|
158
|
+
if (!output.trim() || code !== 0) {
|
|
159
|
+
// Process no longer exists
|
|
160
|
+
clearInterval(checkInterval);
|
|
161
|
+
await ctx.actions.runAction('core:log', { level: 'info', message: 'Chrome process exited.' });
|
|
162
|
+
chromePid = null;
|
|
163
|
+
ctx.events.emit('browser:closed', { code: 0 });
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
catch (err) {
|
|
168
|
+
clearInterval(checkInterval);
|
|
169
|
+
}
|
|
170
|
+
}, 2000); // Check every 2 seconds
|
|
171
|
+
}
|
|
172
|
+
},
|
|
173
|
+
dispose(ctx) {
|
|
174
|
+
if (chromePid) {
|
|
175
|
+
// Attempt to kill on cleanup
|
|
176
|
+
spawn('powershell.exe', ['-Command', `Stop-Process -Id ${chromePid} -Force`], { stdio: 'ignore' });
|
|
177
|
+
chromePid = null;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
};
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
const CHROME_PATHS = [
|
|
4
|
+
// Standard Windows Paths
|
|
5
|
+
'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe',
|
|
6
|
+
'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe',
|
|
7
|
+
// WSL Mappings
|
|
8
|
+
'/mnt/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
9
|
+
'/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
10
|
+
// Git Bash / Unix-y Windows Environment Mappings
|
|
11
|
+
'/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
12
|
+
'/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
13
|
+
// Linux
|
|
14
|
+
'/usr/bin/google-chrome',
|
|
15
|
+
'/usr/bin/chromium'
|
|
16
|
+
];
|
|
17
|
+
export function findChrome() {
|
|
18
|
+
for (const p of CHROME_PATHS) {
|
|
19
|
+
if (fs.existsSync(p))
|
|
20
|
+
return p;
|
|
21
|
+
}
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
// --- Helper to find actual extension root (handle nested folder in zip) ---
|
|
25
|
+
export const findExtensionRoot = (dir) => {
|
|
26
|
+
if (fs.existsSync(path.join(dir, 'manifest.json')))
|
|
27
|
+
return dir;
|
|
28
|
+
// Check immediate subdirectories (depth 1)
|
|
29
|
+
try {
|
|
30
|
+
const items = fs.readdirSync(dir);
|
|
31
|
+
for (const item of items) {
|
|
32
|
+
const fullPath = path.join(dir, item);
|
|
33
|
+
if (fs.statSync(fullPath).isDirectory()) {
|
|
34
|
+
if (fs.existsSync(path.join(fullPath, 'manifest.json'))) {
|
|
35
|
+
return fullPath;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
// Dir might be empty or invalid
|
|
42
|
+
}
|
|
43
|
+
return null;
|
|
44
|
+
};
|
|
45
|
+
export const normalizePathToWindows = (p) => {
|
|
46
|
+
// Handle Git Bash /c/ style
|
|
47
|
+
const gitBashMatch = p.match(/^\/([a-z])\/(.*)/i);
|
|
48
|
+
if (gitBashMatch) {
|
|
49
|
+
return `${gitBashMatch[1].toUpperCase()}:\\${gitBashMatch[2].replace(/\//g, '\\')}`;
|
|
50
|
+
}
|
|
51
|
+
// Handle Forward slashes
|
|
52
|
+
return p.replace(/\//g, '\\');
|
|
53
|
+
};
|
|
54
|
+
export const stripTrailingSlash = (p) => {
|
|
55
|
+
return p.replace(/[\\\/]+$/, '');
|
|
56
|
+
};
|
|
57
|
+
// --- Helper to validate extension directory existence and structure ---
|
|
58
|
+
export const validateExtension = (dir) => {
|
|
59
|
+
if (!fs.existsSync(dir)) {
|
|
60
|
+
return { valid: false, error: 'Directory does not exist' };
|
|
61
|
+
}
|
|
62
|
+
const stats = fs.statSync(dir);
|
|
63
|
+
if (!stats.isDirectory()) {
|
|
64
|
+
return { valid: false, error: 'Path is not a directory' };
|
|
65
|
+
}
|
|
66
|
+
const manifestPath = path.join(dir, 'manifest.json');
|
|
67
|
+
if (!fs.existsSync(manifestPath)) {
|
|
68
|
+
return { valid: false, error: 'manifest.json missing' };
|
|
69
|
+
}
|
|
70
|
+
// Basic JSON validity check
|
|
71
|
+
try {
|
|
72
|
+
const content = fs.readFileSync(manifestPath, 'utf-8');
|
|
73
|
+
JSON.parse(content);
|
|
74
|
+
}
|
|
75
|
+
catch (e) {
|
|
76
|
+
return { valid: false, error: 'manifest.json is invalid JSON' };
|
|
77
|
+
}
|
|
78
|
+
return { valid: true };
|
|
79
|
+
};
|
package/package.json
CHANGED
|
@@ -1,288 +0,0 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import { spawn } from 'child_process';
|
|
3
|
-
import fs from 'fs-extra';
|
|
4
|
-
const CHROME_PATHS = [
|
|
5
|
-
// Standard Windows Paths
|
|
6
|
-
'C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe',
|
|
7
|
-
'C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe',
|
|
8
|
-
// WSL Mappings
|
|
9
|
-
'/mnt/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
10
|
-
'/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
11
|
-
// Git Bash / Unix-y Windows Environment Mappings
|
|
12
|
-
'/c/Program Files/Google/Chrome/Application/chrome.exe',
|
|
13
|
-
'/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
|
|
14
|
-
// Linux
|
|
15
|
-
'/usr/bin/google-chrome',
|
|
16
|
-
'/usr/bin/chromium'
|
|
17
|
-
];
|
|
18
|
-
function findChrome() {
|
|
19
|
-
for (const p of CHROME_PATHS) {
|
|
20
|
-
if (fs.existsSync(p))
|
|
21
|
-
return p;
|
|
22
|
-
}
|
|
23
|
-
return null;
|
|
24
|
-
}
|
|
25
|
-
// --- Helper to find actual extension root (handle nested folder in zip) ---
|
|
26
|
-
export const findExtensionRoot = (dir) => {
|
|
27
|
-
if (fs.existsSync(path.join(dir, 'manifest.json')))
|
|
28
|
-
return dir;
|
|
29
|
-
// Check immediate subdirectories (depth 1)
|
|
30
|
-
try {
|
|
31
|
-
const items = fs.readdirSync(dir);
|
|
32
|
-
for (const item of items) {
|
|
33
|
-
const fullPath = path.join(dir, item);
|
|
34
|
-
if (fs.statSync(fullPath).isDirectory()) {
|
|
35
|
-
if (fs.existsSync(path.join(fullPath, 'manifest.json'))) {
|
|
36
|
-
return fullPath;
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
catch (e) {
|
|
42
|
-
// Dir might be empty or invalid
|
|
43
|
-
}
|
|
44
|
-
return null;
|
|
45
|
-
};
|
|
46
|
-
export const normalizePathToWindows = (p) => {
|
|
47
|
-
// Handle Git Bash /c/ style
|
|
48
|
-
const gitBashMatch = p.match(/^\/([a-z])\/(.*)/i);
|
|
49
|
-
if (gitBashMatch) {
|
|
50
|
-
return `${gitBashMatch[1].toUpperCase()}:\\${gitBashMatch[2].replace(/\//g, '\\')}`;
|
|
51
|
-
}
|
|
52
|
-
// Handle Forward slashes
|
|
53
|
-
return p.replace(/\//g, '\\');
|
|
54
|
-
};
|
|
55
|
-
export const stripTrailingSlash = (p) => {
|
|
56
|
-
return p.replace(/[\\\/]+$/, '');
|
|
57
|
-
};
|
|
58
|
-
// --- Helper to validate extension directory existence and structure ---
|
|
59
|
-
export const validateExtension = (dir) => {
|
|
60
|
-
if (!fs.existsSync(dir)) {
|
|
61
|
-
return { valid: false, error: 'Directory does not exist' };
|
|
62
|
-
}
|
|
63
|
-
const stats = fs.statSync(dir);
|
|
64
|
-
if (!stats.isDirectory()) {
|
|
65
|
-
return { valid: false, error: 'Path is not a directory' };
|
|
66
|
-
}
|
|
67
|
-
const manifestPath = path.join(dir, 'manifest.json');
|
|
68
|
-
if (!fs.existsSync(manifestPath)) {
|
|
69
|
-
return { valid: false, error: 'manifest.json missing' };
|
|
70
|
-
}
|
|
71
|
-
// Basic JSON validity check
|
|
72
|
-
try {
|
|
73
|
-
const content = fs.readFileSync(manifestPath, 'utf-8');
|
|
74
|
-
JSON.parse(content);
|
|
75
|
-
}
|
|
76
|
-
catch (e) {
|
|
77
|
-
return { valid: false, error: 'manifest.json is invalid JSON' };
|
|
78
|
-
}
|
|
79
|
-
return { valid: true };
|
|
80
|
-
};
|
|
81
|
-
export const BrowserPlugin = {
|
|
82
|
-
name: 'browser',
|
|
83
|
-
version: '1.0.0',
|
|
84
|
-
setup(ctx) {
|
|
85
|
-
const config = ctx.host.config;
|
|
86
|
-
const DIST_DIR = path.join(config.workDir, 'dist');
|
|
87
|
-
let runner = null;
|
|
88
|
-
const launchDetached = async () => {
|
|
89
|
-
const chromePath = findChrome();
|
|
90
|
-
if (!chromePath) {
|
|
91
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: 'Chrome not found for detached launch.' });
|
|
92
|
-
return false;
|
|
93
|
-
}
|
|
94
|
-
const isWSL = fs.existsSync('/mnt/c');
|
|
95
|
-
let executable = chromePath;
|
|
96
|
-
// Normalize Executable for Native Windows (Git Bash)
|
|
97
|
-
if (!isWSL && process.platform === 'win32') {
|
|
98
|
-
executable = normalizePathToWindows(chromePath);
|
|
99
|
-
}
|
|
100
|
-
const STAGING_DIR = isWSL ? '/mnt/c/Temp/ai-ext-preview' : path.join(config.workDir, '../staging');
|
|
101
|
-
const WIN_PROFILE_DIR = 'C:/Temp/ai-ext-profile';
|
|
102
|
-
// For native windows/linux, use local staging path
|
|
103
|
-
// Note: We will evaluate actual extension root later, but base is STAGING_DIR
|
|
104
|
-
const EXTENSION_PATH = isWSL ? 'C:/Temp/ai-ext-preview' : STAGING_DIR;
|
|
105
|
-
// --- SYNC FUNCTION ---
|
|
106
|
-
const syncToStaging = async () => {
|
|
107
|
-
try {
|
|
108
|
-
if (fs.existsSync(STAGING_DIR)) {
|
|
109
|
-
fs.emptyDirSync(STAGING_DIR);
|
|
110
|
-
}
|
|
111
|
-
fs.ensureDirSync(STAGING_DIR);
|
|
112
|
-
fs.copySync(DIST_DIR, STAGING_DIR);
|
|
113
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Synced code to Staging` });
|
|
114
|
-
// Emit staged event for ServerPlugin (optional for now, but good practice)
|
|
115
|
-
ctx.events.emit('browser:staged', { path: STAGING_DIR });
|
|
116
|
-
}
|
|
117
|
-
catch (err) {
|
|
118
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Failed to sync to staging: ${err.message}` });
|
|
119
|
-
}
|
|
120
|
-
};
|
|
121
|
-
// Initial Sync
|
|
122
|
-
await syncToStaging();
|
|
123
|
-
// Resolve proper root AFTER sync
|
|
124
|
-
let extensionRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
125
|
-
// Check if we found a valid root
|
|
126
|
-
const validation = validateExtension(extensionRoot);
|
|
127
|
-
if (!validation.valid) {
|
|
128
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `[CRITICAL] Extension validation failed: ${validation.error} in ${extensionRoot}` });
|
|
129
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Checked Path: ${extensionRoot}` });
|
|
130
|
-
// We proceed anyway? Or should we stop?
|
|
131
|
-
// Previous logic proceeded but logged critical error.
|
|
132
|
-
// Let's keep it logging critical but maybe return false if we wanted to be strict.
|
|
133
|
-
// However, user might fix it live.
|
|
134
|
-
}
|
|
135
|
-
else if (extensionRoot !== STAGING_DIR) {
|
|
136
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Detected nested extension at: ${path.basename(extensionRoot)}` });
|
|
137
|
-
}
|
|
138
|
-
// Listen for updates and re-sync
|
|
139
|
-
ctx.events.on('downloader:updated', async (data) => {
|
|
140
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: 'Update detected. Syncing to staging...' });
|
|
141
|
-
await syncToStaging();
|
|
142
|
-
// Re-validate on update?
|
|
143
|
-
// const newRoot = findExtensionRoot(STAGING_DIR) || STAGING_DIR;
|
|
144
|
-
// const newValidation = validateExtension(newRoot);
|
|
145
|
-
// if (!newValidation.valid) ...
|
|
146
|
-
});
|
|
147
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: 'Browser running in Detached Mode.' });
|
|
148
|
-
// Launch Logic
|
|
149
|
-
// Launch Logic
|
|
150
|
-
if (isWSL) {
|
|
151
|
-
// -------------------------------------------------------------------------
|
|
152
|
-
// WSL STRATEGY (Validated 2025-12-24)
|
|
153
|
-
// 1. Use Windows User Profile for staging to avoid Permission/Path issues
|
|
154
|
-
// 2. Use PowerShell script to launch Chrome to reliably pass arguments
|
|
155
|
-
// -------------------------------------------------------------------------
|
|
156
|
-
// 1. Setup Safe Paths (C:\Temp)
|
|
157
|
-
// We use the same path that syncToStaging() used (/mnt/c/Temp/ai-ext-preview)
|
|
158
|
-
const winStagingDir = 'C:\\Temp\\ai-ext-preview';
|
|
159
|
-
const winProfile = 'C:\\Temp\\ai-ext-profile';
|
|
160
|
-
let userProfileWin = 'C:\\Temp'; // Legacy variable support
|
|
161
|
-
const driveLetter = 'c';
|
|
162
|
-
// Calculate final paths
|
|
163
|
-
let finalWinExtensionPath = winStagingDir;
|
|
164
|
-
// Handle nested extension root
|
|
165
|
-
if (extensionRoot !== STAGING_DIR) {
|
|
166
|
-
const relative = path.relative(STAGING_DIR, extensionRoot);
|
|
167
|
-
finalWinExtensionPath = path.posix.join(winStagingDir.replace(/\\/g, '/'), relative).replace(/\//g, '\\');
|
|
168
|
-
}
|
|
169
|
-
const winChromePath = chromePath
|
|
170
|
-
.replace(new RegExp(`^/mnt/${driveLetter}/`), `${driveLetter.toUpperCase()}:\\`)
|
|
171
|
-
.replace(/\//g, '\\');
|
|
172
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Launch Target (Win): ${finalWinExtensionPath}` });
|
|
173
|
-
// await ctx.actions.runAction('core:log', { level: 'info', message: `WSL Profile (Win): ${winProfile}` });
|
|
174
|
-
// Create PowerShell Launch Script
|
|
175
|
-
const psContent = `
|
|
176
|
-
$chromePath = "${winChromePath}"
|
|
177
|
-
$extPath = "${finalWinExtensionPath}"
|
|
178
|
-
$profilePath = "${winProfile}"
|
|
179
|
-
|
|
180
|
-
Write-Host "DEBUG: ChromePath: $chromePath"
|
|
181
|
-
Write-Host "DEBUG: ExtPath: $extPath"
|
|
182
|
-
Write-Host "DEBUG: ProfilePath: $profilePath"
|
|
183
|
-
|
|
184
|
-
# Verify Paths
|
|
185
|
-
if (-not (Test-Path -Path $extPath)) {
|
|
186
|
-
Write-Host "ERROR: Extension Path NOT FOUND!"
|
|
187
|
-
} else {
|
|
188
|
-
Write-Host "DEBUG: Extension Path Exists."
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
# Create Profile Dir if needed
|
|
192
|
-
if (-not (Test-Path -Path $profilePath)) {
|
|
193
|
-
New-Item -ItemType Directory -Force -Path $profilePath | Out-Null
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
$argsList = @(
|
|
197
|
-
"--load-extension=""$extPath""",
|
|
198
|
-
"--user-data-dir=""$profilePath""",
|
|
199
|
-
"--no-first-run",
|
|
200
|
-
"--no-default-browser-check",
|
|
201
|
-
"--disable-gpu",
|
|
202
|
-
"about:blank"
|
|
203
|
-
)
|
|
204
|
-
|
|
205
|
-
# Convert to single string to ensure Start-Process handles it safely
|
|
206
|
-
$argStr = $argsList -join " "
|
|
207
|
-
Write-Host "DEBUG: Args: $argStr"
|
|
208
|
-
|
|
209
|
-
Write-Host "DEBUG: Launching Chrome..."
|
|
210
|
-
Start-Process -FilePath $chromePath -ArgumentList $argStr
|
|
211
|
-
`;
|
|
212
|
-
// Write ps1 to /mnt/c/Temp/ai-ext-preview/launch.ps1 (Same as STAGING_DIR)
|
|
213
|
-
const psPath = path.join(STAGING_DIR, 'launch.ps1');
|
|
214
|
-
try {
|
|
215
|
-
await fs.writeFile(psPath, psContent);
|
|
216
|
-
}
|
|
217
|
-
catch (e) {
|
|
218
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `WSL Write PS1 Failed: ${e.message}` });
|
|
219
|
-
}
|
|
220
|
-
// Execute via PowerShell (Spawn detached)
|
|
221
|
-
// psPathWin is C:\\Temp\\ai-ext-preview\\launch.ps1
|
|
222
|
-
const psPathWin = `${winStagingDir}\\launch.ps1`;
|
|
223
|
-
const child = spawn('powershell.exe', ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-File', psPathWin], {
|
|
224
|
-
detached: true,
|
|
225
|
-
stdio: ['ignore', 'pipe', 'pipe'] // Pipe stderr AND stdout to catch launch errors/debug
|
|
226
|
-
});
|
|
227
|
-
if (child.stdout) {
|
|
228
|
-
child.stdout.on('data', async (chunk) => {
|
|
229
|
-
const msg = chunk.toString();
|
|
230
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `[PS1] ${msg.trim()}` });
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
if (child.stderr) {
|
|
234
|
-
child.stderr.on('data', async (chunk) => {
|
|
235
|
-
const msg = chunk.toString();
|
|
236
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Error (Stderr): ${msg}` });
|
|
237
|
-
if (msg.includes('Exec format error')) {
|
|
238
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `CRITICAL: WSL Interop is broken. Cannot launch Chrome.` });
|
|
239
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `FIX: Open PowerShell as Admin and run: wsl --shutdown` });
|
|
240
|
-
ctx.events.emit('browser:launch-failed', { reason: 'WSL_INTEROP_BROKEN' });
|
|
241
|
-
}
|
|
242
|
-
});
|
|
243
|
-
}
|
|
244
|
-
child.on('error', async (err) => {
|
|
245
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Launch Failed: ${err.message}` });
|
|
246
|
-
ctx.events.emit('browser:launch-failed', { reason: err.message });
|
|
247
|
-
});
|
|
248
|
-
child.unref();
|
|
249
|
-
return true;
|
|
250
|
-
}
|
|
251
|
-
else {
|
|
252
|
-
// Native Windows / Linux
|
|
253
|
-
// Use extensionRoot which points to the detected subfolder or root
|
|
254
|
-
const safeDist = path.resolve(extensionRoot);
|
|
255
|
-
const safeProfile = path.join(path.dirname(config.workDir), 'profile'); // ~/.ai-extension-preview/profile
|
|
256
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Executable: ${executable}` });
|
|
257
|
-
await ctx.actions.runAction('core:log', { level: 'info', message: `Native Launch Target: ${safeDist}` });
|
|
258
|
-
const cleanArgs = [
|
|
259
|
-
`--load-extension=${safeDist}`,
|
|
260
|
-
`--user-data-dir=${safeProfile}`,
|
|
261
|
-
'--no-first-run',
|
|
262
|
-
'--no-default-browser-check',
|
|
263
|
-
'--disable-gpu',
|
|
264
|
-
'chrome://extensions'
|
|
265
|
-
];
|
|
266
|
-
try {
|
|
267
|
-
const subprocess = spawn(executable, cleanArgs, {
|
|
268
|
-
detached: true,
|
|
269
|
-
stdio: 'ignore'
|
|
270
|
-
});
|
|
271
|
-
subprocess.unref();
|
|
272
|
-
}
|
|
273
|
-
catch (spawnErr) {
|
|
274
|
-
await ctx.actions.runAction('core:log', { level: 'error', message: `Spawn Failed: ${spawnErr.message}` });
|
|
275
|
-
}
|
|
276
|
-
return true;
|
|
277
|
-
}
|
|
278
|
-
};
|
|
279
|
-
ctx.actions.registerAction({
|
|
280
|
-
id: 'browser:start',
|
|
281
|
-
handler: async () => {
|
|
282
|
-
// Force Detached Mode for Reliability on ALL platforms
|
|
283
|
-
// This creates the stable "Staging" workflow we want.
|
|
284
|
-
return await launchDetached();
|
|
285
|
-
}
|
|
286
|
-
});
|
|
287
|
-
}
|
|
288
|
-
};
|