@kritchoff/agent-browser 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +903 -0
- package/README.sdk.md +77 -0
- package/bin/agent-browser-linux-x64 +0 -0
- package/bin/agent-browser.js +109 -0
- package/dist/actions.d.ts +17 -0
- package/dist/actions.d.ts.map +1 -0
- package/dist/actions.js +1427 -0
- package/dist/actions.js.map +1 -0
- package/dist/browser.d.ts +474 -0
- package/dist/browser.d.ts.map +1 -0
- package/dist/browser.js +1566 -0
- package/dist/browser.js.map +1 -0
- package/dist/cdp-client.d.ts +103 -0
- package/dist/cdp-client.d.ts.map +1 -0
- package/dist/cdp-client.js +223 -0
- package/dist/cdp-client.js.map +1 -0
- package/dist/daemon.d.ts +60 -0
- package/dist/daemon.d.ts.map +1 -0
- package/dist/daemon.js +401 -0
- package/dist/daemon.js.map +1 -0
- package/dist/dualmode-config.d.ts +37 -0
- package/dist/dualmode-config.d.ts.map +1 -0
- package/dist/dualmode-config.js +44 -0
- package/dist/dualmode-config.js.map +1 -0
- package/dist/dualmode-fetcher.d.ts +60 -0
- package/dist/dualmode-fetcher.d.ts.map +1 -0
- package/dist/dualmode-fetcher.js +449 -0
- package/dist/dualmode-fetcher.js.map +1 -0
- package/dist/dualmode-types.d.ts +183 -0
- package/dist/dualmode-types.d.ts.map +1 -0
- package/dist/dualmode-types.js +8 -0
- package/dist/dualmode-types.js.map +1 -0
- package/dist/ios-actions.d.ts +11 -0
- package/dist/ios-actions.d.ts.map +1 -0
- package/dist/ios-actions.js +228 -0
- package/dist/ios-actions.js.map +1 -0
- package/dist/ios-manager.d.ts +266 -0
- package/dist/ios-manager.d.ts.map +1 -0
- package/dist/ios-manager.js +1073 -0
- package/dist/ios-manager.js.map +1 -0
- package/dist/protocol.d.ts +26 -0
- package/dist/protocol.d.ts.map +1 -0
- package/dist/protocol.js +832 -0
- package/dist/protocol.js.map +1 -0
- package/dist/snapshot.d.ts +83 -0
- package/dist/snapshot.d.ts.map +1 -0
- package/dist/snapshot.js +653 -0
- package/dist/snapshot.js.map +1 -0
- package/dist/stream-server.d.ts +117 -0
- package/dist/stream-server.d.ts.map +1 -0
- package/dist/stream-server.js +305 -0
- package/dist/stream-server.js.map +1 -0
- package/dist/types.d.ts +742 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/docker-compose.sdk.yml +45 -0
- package/package.json +85 -0
- package/scripts/benchmark.sh +80 -0
- package/scripts/build-all-platforms.sh +68 -0
- package/scripts/check-version-sync.js +39 -0
- package/scripts/copy-native.js +36 -0
- package/scripts/fast_reset.sh +108 -0
- package/scripts/postinstall.js +235 -0
- package/scripts/publish_images.sh +55 -0
- package/scripts/snapshot_manager.sh +293 -0
- package/scripts/start-android-agent.sh +49 -0
- package/scripts/sync-version.js +69 -0
- package/scripts/vaccine-run +26 -0
- package/sdk.sh +153 -0
- package/skills/agent-browser/SKILL.md +217 -0
- package/skills/agent-browser/references/authentication.md +202 -0
- package/skills/agent-browser/references/commands.md +259 -0
- package/skills/agent-browser/references/proxy-support.md +188 -0
- package/skills/agent-browser/references/session-management.md +193 -0
- package/skills/agent-browser/references/snapshot-refs.md +194 -0
- package/skills/agent-browser/references/video-recording.md +173 -0
- package/skills/agent-browser/templates/authenticated-session.sh +97 -0
- package/skills/agent-browser/templates/capture-workflow.sh +69 -0
- package/skills/agent-browser/templates/form-automation.sh +62 -0
- package/skills/skill-creator/LICENSE.txt +202 -0
- package/skills/skill-creator/SKILL.md +356 -0
- package/skills/skill-creator/references/output-patterns.md +82 -0
- package/skills/skill-creator/references/workflows.md +28 -0
- package/skills/skill-creator/scripts/init_skill.py +303 -0
- package/skills/skill-creator/scripts/package_skill.py +113 -0
- package/skills/skill-creator/scripts/quick_validate.py +95 -0
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Postinstall script for agent-browser
|
|
5
|
+
*
|
|
6
|
+
* Downloads the platform-specific native binary if not present.
|
|
7
|
+
* On global installs, patches npm's bin entry to use the native binary directly:
|
|
8
|
+
* - Windows: Overwrites .cmd/.ps1 shims
|
|
9
|
+
* - Mac/Linux: Replaces symlink to point to native binary
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { existsSync, mkdirSync, chmodSync, createWriteStream, unlinkSync, writeFileSync, symlinkSync, lstatSync } from 'fs';
|
|
13
|
+
import { dirname, join } from 'path';
|
|
14
|
+
import { fileURLToPath } from 'url';
|
|
15
|
+
import { platform, arch } from 'os';
|
|
16
|
+
import { get } from 'https';
|
|
17
|
+
import { execSync } from 'child_process';
|
|
18
|
+
|
|
19
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
20
|
+
const projectRoot = join(__dirname, '..');
|
|
21
|
+
const binDir = join(projectRoot, 'bin');
|
|
22
|
+
|
|
23
|
+
// Platform detection
|
|
24
|
+
const platformKey = `${platform()}-${arch()}`;
|
|
25
|
+
const ext = platform() === 'win32' ? '.exe' : '';
|
|
26
|
+
const binaryName = `agent-browser-${platformKey}${ext}`;
|
|
27
|
+
const binaryPath = join(binDir, binaryName);
|
|
28
|
+
|
|
29
|
+
// Package info
|
|
30
|
+
const packageJson = JSON.parse(
|
|
31
|
+
(await import('fs')).readFileSync(join(projectRoot, 'package.json'), 'utf8')
|
|
32
|
+
);
|
|
33
|
+
const version = packageJson.version;
|
|
34
|
+
|
|
35
|
+
// GitHub release URL
|
|
36
|
+
const GITHUB_REPO = 'vercel-labs/agent-browser';
|
|
37
|
+
const DOWNLOAD_URL = `https://github.com/${GITHUB_REPO}/releases/download/v${version}/${binaryName}`;
|
|
38
|
+
|
|
39
|
+
async function downloadFile(url, dest) {
|
|
40
|
+
return new Promise((resolve, reject) => {
|
|
41
|
+
const file = createWriteStream(dest);
|
|
42
|
+
|
|
43
|
+
const request = (url) => {
|
|
44
|
+
get(url, (response) => {
|
|
45
|
+
// Handle redirects
|
|
46
|
+
if (response.statusCode === 301 || response.statusCode === 302) {
|
|
47
|
+
request(response.headers.location);
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (response.statusCode !== 200) {
|
|
52
|
+
reject(new Error(`Failed to download: HTTP ${response.statusCode}`));
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
response.pipe(file);
|
|
57
|
+
file.on('finish', () => {
|
|
58
|
+
file.close();
|
|
59
|
+
resolve();
|
|
60
|
+
});
|
|
61
|
+
}).on('error', (err) => {
|
|
62
|
+
unlinkSync(dest);
|
|
63
|
+
reject(err);
|
|
64
|
+
});
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
request(url);
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async function main() {
|
|
72
|
+
// Check if binary already exists
|
|
73
|
+
if (existsSync(binaryPath)) {
|
|
74
|
+
// Ensure binary is executable (npm doesn't preserve execute bit)
|
|
75
|
+
if (platform() !== 'win32') {
|
|
76
|
+
chmodSync(binaryPath, 0o755);
|
|
77
|
+
}
|
|
78
|
+
console.log(`✓ Native binary ready: ${binaryName}`);
|
|
79
|
+
|
|
80
|
+
// On global installs, fix npm's bin entry to use native binary directly
|
|
81
|
+
await fixGlobalInstallBin();
|
|
82
|
+
|
|
83
|
+
showPlaywrightReminder();
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Ensure bin directory exists
|
|
88
|
+
if (!existsSync(binDir)) {
|
|
89
|
+
mkdirSync(binDir, { recursive: true });
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
console.log(`Downloading native binary for ${platformKey}...`);
|
|
93
|
+
console.log(`URL: ${DOWNLOAD_URL}`);
|
|
94
|
+
|
|
95
|
+
try {
|
|
96
|
+
await downloadFile(DOWNLOAD_URL, binaryPath);
|
|
97
|
+
|
|
98
|
+
// Make executable on Unix
|
|
99
|
+
if (platform() !== 'win32') {
|
|
100
|
+
chmodSync(binaryPath, 0o755);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
console.log(`✓ Downloaded native binary: ${binaryName}`);
|
|
104
|
+
} catch (err) {
|
|
105
|
+
console.log(`⚠ Could not download native binary: ${err.message}`);
|
|
106
|
+
console.log(` The CLI will use Node.js fallback (slightly slower startup)`);
|
|
107
|
+
console.log('');
|
|
108
|
+
console.log('To build the native binary locally:');
|
|
109
|
+
console.log(' 1. Install Rust: https://rustup.rs');
|
|
110
|
+
console.log(' 2. Run: npm run build:native');
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// On global installs, fix npm's bin entry to use native binary directly
|
|
114
|
+
// This avoids the /bin/sh error on Windows and provides zero-overhead execution
|
|
115
|
+
await fixGlobalInstallBin();
|
|
116
|
+
|
|
117
|
+
showPlaywrightReminder();
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function showPlaywrightReminder() {
|
|
121
|
+
console.log('');
|
|
122
|
+
console.log('╔═══════════════════════════════════════════════════════════════════════════╗');
|
|
123
|
+
console.log('║ To download browser binaries, run: ║');
|
|
124
|
+
console.log('║ ║');
|
|
125
|
+
console.log('║ npx playwright install chromium ║');
|
|
126
|
+
console.log('║ ║');
|
|
127
|
+
console.log('║ On Linux, include system dependencies with: ║');
|
|
128
|
+
console.log('║ ║');
|
|
129
|
+
console.log('║ npx playwright install --with-deps chromium ║');
|
|
130
|
+
console.log('║ ║');
|
|
131
|
+
console.log('╚═══════════════════════════════════════════════════════════════════════════╝');
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Fix npm's bin entry on global installs to use the native binary directly.
|
|
136
|
+
* This provides zero-overhead CLI execution for global installs.
|
|
137
|
+
*/
|
|
138
|
+
async function fixGlobalInstallBin() {
|
|
139
|
+
if (platform() === 'win32') {
|
|
140
|
+
await fixWindowsShims();
|
|
141
|
+
} else {
|
|
142
|
+
await fixUnixSymlink();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Fix npm symlink on Mac/Linux global installs.
|
|
148
|
+
* Replace the symlink to the JS wrapper with a symlink to the native binary.
|
|
149
|
+
*/
|
|
150
|
+
async function fixUnixSymlink() {
|
|
151
|
+
// Get npm's global bin directory (npm prefix -g + /bin)
|
|
152
|
+
let npmBinDir;
|
|
153
|
+
try {
|
|
154
|
+
const prefix = execSync('npm prefix -g', { encoding: 'utf8' }).trim();
|
|
155
|
+
npmBinDir = join(prefix, 'bin');
|
|
156
|
+
} catch {
|
|
157
|
+
return; // npm not available
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
const symlinkPath = join(npmBinDir, 'agent-browser');
|
|
161
|
+
|
|
162
|
+
// Check if symlink exists (indicates global install)
|
|
163
|
+
try {
|
|
164
|
+
const stat = lstatSync(symlinkPath);
|
|
165
|
+
if (!stat.isSymbolicLink()) {
|
|
166
|
+
return; // Not a symlink, don't touch it
|
|
167
|
+
}
|
|
168
|
+
} catch {
|
|
169
|
+
return; // Symlink doesn't exist, not a global install
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Replace symlink to point directly to native binary
|
|
173
|
+
try {
|
|
174
|
+
unlinkSync(symlinkPath);
|
|
175
|
+
symlinkSync(binaryPath, symlinkPath);
|
|
176
|
+
console.log('✓ Optimized: symlink points to native binary (zero overhead)');
|
|
177
|
+
} catch (err) {
|
|
178
|
+
// Permission error or other issue - not critical, JS wrapper still works
|
|
179
|
+
console.log(`⚠ Could not optimize symlink: ${err.message}`);
|
|
180
|
+
console.log(' CLI will work via Node.js wrapper (slightly slower startup)');
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Fix npm-generated shims on Windows global installs.
|
|
186
|
+
* npm generates shims that try to run /bin/sh, which doesn't exist on Windows.
|
|
187
|
+
* We overwrite them to invoke the native .exe directly.
|
|
188
|
+
*/
|
|
189
|
+
async function fixWindowsShims() {
|
|
190
|
+
// Check if this is a global install by looking for npm's global prefix
|
|
191
|
+
let npmBinDir;
|
|
192
|
+
try {
|
|
193
|
+
npmBinDir = execSync('npm prefix -g', { encoding: 'utf8' }).trim();
|
|
194
|
+
} catch {
|
|
195
|
+
return; // Not a global install or npm not available
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// The shims are in the npm prefix directory (not prefix/bin on Windows)
|
|
199
|
+
const cmdShim = join(npmBinDir, 'agent-browser.cmd');
|
|
200
|
+
const ps1Shim = join(npmBinDir, 'agent-browser.ps1');
|
|
201
|
+
|
|
202
|
+
// Only fix if shims exist (indicates global install)
|
|
203
|
+
if (!existsSync(cmdShim)) {
|
|
204
|
+
return;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// Path to native binary relative to npm prefix
|
|
208
|
+
const relativeBinaryPath = 'node_modules\\agent-browser\\bin\\agent-browser-win32-x64.exe';
|
|
209
|
+
|
|
210
|
+
try {
|
|
211
|
+
// Overwrite .cmd shim
|
|
212
|
+
const cmdContent = `@ECHO off\r\n"%~dp0${relativeBinaryPath}" %*\r\n`;
|
|
213
|
+
writeFileSync(cmdShim, cmdContent);
|
|
214
|
+
|
|
215
|
+
// Overwrite .ps1 shim
|
|
216
|
+
const ps1Content = `#!/usr/bin/env pwsh
|
|
217
|
+
$basedir = Split-Path $MyInvocation.MyCommand.Definition -Parent
|
|
218
|
+
$exe = ""
|
|
219
|
+
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
|
220
|
+
$exe = ".exe"
|
|
221
|
+
}
|
|
222
|
+
& "$basedir/${relativeBinaryPath.replace(/\\/g, '/')}" $args
|
|
223
|
+
exit $LASTEXITCODE
|
|
224
|
+
`;
|
|
225
|
+
writeFileSync(ps1Shim, ps1Content);
|
|
226
|
+
|
|
227
|
+
console.log('✓ Optimized: shims point to native binary (zero overhead)');
|
|
228
|
+
} catch (err) {
|
|
229
|
+
// Permission error or other issue - not critical, JS wrapper still works
|
|
230
|
+
console.log(`⚠ Could not optimize shims: ${err.message}`);
|
|
231
|
+
console.log(' CLI will work via Node.js wrapper (slightly slower startup)');
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
main().catch(console.error);
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Script to build and push WootzApp Agent Browser images to Docker Hub.
|
|
3
|
+
#
|
|
4
|
+
# Usage:
|
|
5
|
+
# ./scripts/publish_images.sh [version]
|
|
6
|
+
#
|
|
7
|
+
# If no version is provided, it defaults to 'latest'.
|
|
8
|
+
|
|
9
|
+
set -e
|
|
10
|
+
|
|
11
|
+
VERSION="${1:-latest}"
|
|
12
|
+
ORG="kritchoff"
|
|
13
|
+
|
|
14
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
15
|
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
|
16
|
+
|
|
17
|
+
cd "$PROJECT_DIR"
|
|
18
|
+
|
|
19
|
+
# Colors
|
|
20
|
+
GREEN='\033[0;32m'
|
|
21
|
+
BLUE='\033[0;34m'
|
|
22
|
+
NC='\033[0m'
|
|
23
|
+
|
|
24
|
+
echo -e "${BLUE}Building and Pushing WootzApp Agent Browser images (version: $VERSION)...${NC}"
|
|
25
|
+
|
|
26
|
+
# 1. Build Android Image
|
|
27
|
+
echo -e "
|
|
28
|
+
${BLUE}Building Android Image...${NC}"
|
|
29
|
+
docker build -t "$ORG/agent-android:$VERSION" ./android_world
|
|
30
|
+
|
|
31
|
+
# 2. Build Daemon Image
|
|
32
|
+
echo -e "
|
|
33
|
+
${BLUE}Building Daemon Image...${NC}"
|
|
34
|
+
docker build -t "$ORG/agent-daemon:$VERSION" -f docker/Dockerfile.agent-prod .
|
|
35
|
+
|
|
36
|
+
# 3. Push to Docker Hub
|
|
37
|
+
echo -e "
|
|
38
|
+
${BLUE}Pushing to Docker Hub...${NC}"
|
|
39
|
+
echo "Note: You must be logged in to Docker Hub with 'docker login'"
|
|
40
|
+
|
|
41
|
+
docker push "$ORG/agent-android:$VERSION"
|
|
42
|
+
docker push "$ORG/agent-daemon:$VERSION"
|
|
43
|
+
|
|
44
|
+
# Also tag as latest if a specific version was provided
|
|
45
|
+
if [ "$VERSION" != "latest" ]; then
|
|
46
|
+
echo -e "
|
|
47
|
+
${BLUE}Tagging and Pushing latest...${NC}"
|
|
48
|
+
docker tag "$ORG/agent-android:$VERSION" "$ORG/agent-android:latest"
|
|
49
|
+
docker tag "$ORG/agent-daemon:$VERSION" "$ORG/agent-daemon:latest"
|
|
50
|
+
docker push "$ORG/agent-android:latest"
|
|
51
|
+
docker push "$ORG/agent-daemon:latest"
|
|
52
|
+
fi
|
|
53
|
+
|
|
54
|
+
echo -e "
|
|
55
|
+
${GREEN}Successfully published images to $ORG repository!${NC}"
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Emulator snapshot import/export utility
|
|
3
|
+
#
|
|
4
|
+
# Manages Android emulator snapshots for agent-browser.
|
|
5
|
+
# Snapshots are stored in the emulator's AVD directory and can be
|
|
6
|
+
# exported as compressed tar.gz files for sharing or backup.
|
|
7
|
+
#
|
|
8
|
+
# Usage:
|
|
9
|
+
# ./scripts/snapshot_manager.sh export <name> <output.tar.gz>
|
|
10
|
+
# ./scripts/snapshot_manager.sh import <input.tar.gz> [name]
|
|
11
|
+
# ./scripts/snapshot_manager.sh list
|
|
12
|
+
# ./scripts/snapshot_manager.sh validate <name>
|
|
13
|
+
#
|
|
14
|
+
# Examples:
|
|
15
|
+
# # Export current snapshot for sharing
|
|
16
|
+
# ./scripts/snapshot_manager.sh export w8rl_clean ./my_snapshot.tar.gz
|
|
17
|
+
#
|
|
18
|
+
# # Import a snapshot from a file
|
|
19
|
+
# ./scripts/snapshot_manager.sh import ./my_snapshot.tar.gz
|
|
20
|
+
#
|
|
21
|
+
# # Import with a different name
|
|
22
|
+
# ./scripts/snapshot_manager.sh import ./my_snapshot.tar.gz imported_snapshot
|
|
23
|
+
#
|
|
24
|
+
# # List all available snapshots
|
|
25
|
+
# ./scripts/snapshot_manager.sh list
|
|
26
|
+
#
|
|
27
|
+
# # Validate a snapshot is not corrupt
|
|
28
|
+
# ./scripts/snapshot_manager.sh validate w8rl_clean
|
|
29
|
+
|
|
30
|
+
set -euo pipefail
|
|
31
|
+
|
|
32
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
33
|
+
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
|
34
|
+
|
|
35
|
+
cd "$PROJECT_DIR"
|
|
36
|
+
|
|
37
|
+
# Respect COMPOSE_FILE from environment
|
|
38
|
+
COMPOSE_FILE="${COMPOSE_FILE:-docker-compose.prod.yml}"
|
|
39
|
+
|
|
40
|
+
# Detect container name using docker compose
|
|
41
|
+
CONTAINER=$(docker compose -f "$COMPOSE_FILE" ps -q android-service)
|
|
42
|
+
if [ -z "$CONTAINER" ]; then
|
|
43
|
+
echo "Error: android-service container not running."
|
|
44
|
+
exit 1
|
|
45
|
+
fi
|
|
46
|
+
|
|
47
|
+
AVD_NAME="${EMULATOR_NAME:-Pixel_6_API_34}"
|
|
48
|
+
SNAPSHOT_BASE="/root/.android/avd/${AVD_NAME}.avd/snapshots"
|
|
49
|
+
|
|
50
|
+
# Colors for output
|
|
51
|
+
RED='\033[0;31m'
|
|
52
|
+
GREEN='\033[0;32m'
|
|
53
|
+
YELLOW='\033[1;33m'
|
|
54
|
+
BLUE='\033[0;34m'
|
|
55
|
+
NC='\033[0m' # No Color
|
|
56
|
+
|
|
57
|
+
log_info() {
|
|
58
|
+
echo -e "${BLUE}[INFO]${NC} $1"
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
log_success() {
|
|
62
|
+
echo -e "${GREEN}[OK]${NC} $1"
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
log_warn() {
|
|
66
|
+
echo -e "${YELLOW}[WARN]${NC} $1"
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
log_error() {
|
|
70
|
+
echo -e "${RED}[ERROR]${NC} $1"
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# Check if container is running
|
|
74
|
+
check_container() {
|
|
75
|
+
# Use docker inspect for reliable status check (avoids SIGPIPE with grep -q)
|
|
76
|
+
if [ "$(docker inspect -f '{{.State.Running}}' "$CONTAINER" 2>/dev/null)" != "true" ]; then
|
|
77
|
+
log_error "Container '${CONTAINER}' is not running"
|
|
78
|
+
log_info "Start it with: docker compose -f ${COMPOSE_FILE} up -d android-service"
|
|
79
|
+
exit 1
|
|
80
|
+
fi
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# Validate a snapshot has all required files and is not corrupt
|
|
84
|
+
validate_snapshot_internal() {
|
|
85
|
+
local name="$1"
|
|
86
|
+
local snapshot_dir="${SNAPSHOT_BASE}/${name}"
|
|
87
|
+
local valid=true
|
|
88
|
+
|
|
89
|
+
log_info "Validating snapshot '${name}'..."
|
|
90
|
+
|
|
91
|
+
for file in ram.bin snapshot.pb hardware.ini; do
|
|
92
|
+
if docker exec "$CONTAINER" test -f "${snapshot_dir}/${file}"; then
|
|
93
|
+
local size
|
|
94
|
+
size=$(docker exec "$CONTAINER" stat -c%s "${snapshot_dir}/${file}")
|
|
95
|
+
echo -e " ${GREEN}✓${NC} ${file}: ${size} bytes"
|
|
96
|
+
else
|
|
97
|
+
echo -e " ${RED}✗${NC} ${file}: MISSING"
|
|
98
|
+
valid=false
|
|
99
|
+
fi
|
|
100
|
+
done
|
|
101
|
+
|
|
102
|
+
# Check ram.bin size (must be >= 1MB)
|
|
103
|
+
if docker exec "$CONTAINER" test -f "${snapshot_dir}/ram.bin"; then
|
|
104
|
+
local ram_size
|
|
105
|
+
ram_size=$(docker exec "$CONTAINER" stat -c%s "${snapshot_dir}/ram.bin")
|
|
106
|
+
if [ "$ram_size" -lt 1000000 ]; then
|
|
107
|
+
echo -e " ${RED}✗${NC} ram.bin too small (${ram_size} bytes, expected >= 1MB)"
|
|
108
|
+
valid=false
|
|
109
|
+
fi
|
|
110
|
+
fi
|
|
111
|
+
|
|
112
|
+
if [ "$valid" = true ]; then
|
|
113
|
+
log_success "Snapshot validation passed"
|
|
114
|
+
return 0
|
|
115
|
+
else
|
|
116
|
+
log_error "Snapshot validation failed"
|
|
117
|
+
return 1
|
|
118
|
+
fi
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
# Export a snapshot to a tar.gz file
|
|
122
|
+
cmd_export() {
|
|
123
|
+
local name="${1:?Usage: $0 export <name> <output.tar.gz>}"
|
|
124
|
+
local output="${2:?Usage: $0 export <name> <output.tar.gz>}"
|
|
125
|
+
|
|
126
|
+
check_container
|
|
127
|
+
|
|
128
|
+
# Validate snapshot exists
|
|
129
|
+
if ! docker exec "$CONTAINER" test -d "${SNAPSHOT_BASE}/${name}"; then
|
|
130
|
+
log_error "Snapshot '${name}' does not exist"
|
|
131
|
+
exit 1
|
|
132
|
+
fi
|
|
133
|
+
|
|
134
|
+
log_info "Exporting snapshot '${name}' to ${output}..."
|
|
135
|
+
|
|
136
|
+
# Validate before export
|
|
137
|
+
if ! validate_snapshot_internal "$name"; then
|
|
138
|
+
log_error "Cannot export invalid snapshot"
|
|
139
|
+
exit 1
|
|
140
|
+
fi
|
|
141
|
+
|
|
142
|
+
# Create tar.gz from snapshot directory
|
|
143
|
+
docker exec "$CONTAINER" tar -czf - -C "$SNAPSHOT_BASE" "$name" > "$output"
|
|
144
|
+
|
|
145
|
+
local size
|
|
146
|
+
size=$(ls -lh "$output" | awk '{print $5}')
|
|
147
|
+
log_success "Exported: ${output} (${size})"
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
# Import a snapshot from a tar.gz file
|
|
151
|
+
cmd_import() {
|
|
152
|
+
local input="${1:?Usage: $0 import <input.tar.gz> [name]}"
|
|
153
|
+
|
|
154
|
+
# Extract original name from tar
|
|
155
|
+
# Disable pipefail to avoid SIGPIPE from tar | head
|
|
156
|
+
set +o pipefail
|
|
157
|
+
local original_name
|
|
158
|
+
original_name=$(tar -tzf "$input" 2>/dev/null | head -1 | cut -d'/' -f1)
|
|
159
|
+
set -o pipefail
|
|
160
|
+
|
|
161
|
+
local name="${2:-$original_name}"
|
|
162
|
+
|
|
163
|
+
if [ -z "$name" ]; then
|
|
164
|
+
log_error "Could not determine snapshot name from archive"
|
|
165
|
+
exit 1
|
|
166
|
+
fi
|
|
167
|
+
|
|
168
|
+
check_container
|
|
169
|
+
|
|
170
|
+
log_info "Importing snapshot from ${input} as '${name}'..."
|
|
171
|
+
|
|
172
|
+
# Ensure snapshot directory exists
|
|
173
|
+
docker exec "$CONTAINER" mkdir -p "$SNAPSHOT_BASE"
|
|
174
|
+
|
|
175
|
+
# Remove existing snapshot if present
|
|
176
|
+
if docker exec "$CONTAINER" test -d "${SNAPSHOT_BASE}/${name}"; then
|
|
177
|
+
log_warn "Removing existing snapshot '${name}'..."
|
|
178
|
+
docker exec "$CONTAINER" rm -rf "${SNAPSHOT_BASE}/${name}"
|
|
179
|
+
fi
|
|
180
|
+
|
|
181
|
+
# Extract snapshot into container
|
|
182
|
+
# Use 'docker cp' to avoid stdin pipe issues
|
|
183
|
+
log_info "Copying archive to container..."
|
|
184
|
+
docker cp "$input" "$CONTAINER:/tmp/import_snapshot.tar.gz"
|
|
185
|
+
|
|
186
|
+
log_info "Extracting archive..."
|
|
187
|
+
if ! docker exec "$CONTAINER" tar -xzf /tmp/import_snapshot.tar.gz -C "$SNAPSHOT_BASE"; then
|
|
188
|
+
log_error "Failed to extract snapshot archive"
|
|
189
|
+
docker exec "$CONTAINER" rm -f /tmp/import_snapshot.tar.gz
|
|
190
|
+
exit 1
|
|
191
|
+
fi
|
|
192
|
+
|
|
193
|
+
# Clean up
|
|
194
|
+
docker exec "$CONTAINER" rm -f /tmp/import_snapshot.tar.gz
|
|
195
|
+
|
|
196
|
+
# If renaming, move the extracted directory
|
|
197
|
+
if [ "$name" != "$original_name" ]; then
|
|
198
|
+
docker exec "$CONTAINER" mv "${SNAPSHOT_BASE}/${original_name}" "${SNAPSHOT_BASE}/${name}"
|
|
199
|
+
fi
|
|
200
|
+
|
|
201
|
+
# Validate after import
|
|
202
|
+
if validate_snapshot_internal "$name"; then
|
|
203
|
+
log_success "Import completed successfully"
|
|
204
|
+
docker exec "$CONTAINER" ls -lh "${SNAPSHOT_BASE}/${name}/"
|
|
205
|
+
else
|
|
206
|
+
log_error "Import failed - snapshot validation failed"
|
|
207
|
+
exit 1
|
|
208
|
+
fi
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
# List all snapshots
|
|
212
|
+
cmd_list() {
|
|
213
|
+
check_container
|
|
214
|
+
|
|
215
|
+
log_info "Available snapshots in ${CONTAINER}:"
|
|
216
|
+
echo ""
|
|
217
|
+
|
|
218
|
+
if docker exec "$CONTAINER" test -d "$SNAPSHOT_BASE"; then
|
|
219
|
+
docker exec "$CONTAINER" ls -la "$SNAPSHOT_BASE" 2>/dev/null | tail -n +2 || echo " (none)"
|
|
220
|
+
|
|
221
|
+
echo ""
|
|
222
|
+
log_info "Snapshot details:"
|
|
223
|
+
for snapshot in $(docker exec "$CONTAINER" ls "$SNAPSHOT_BASE" 2>/dev/null); do
|
|
224
|
+
local snapshot_dir="${SNAPSHOT_BASE}/${snapshot}"
|
|
225
|
+
if docker exec "$CONTAINER" test -d "$snapshot_dir"; then
|
|
226
|
+
local ram_size
|
|
227
|
+
ram_size=$(docker exec "$CONTAINER" stat -c%s "${snapshot_dir}/ram.bin" 2>/dev/null || echo "0")
|
|
228
|
+
local ram_mb=$((ram_size / 1024 / 1024))
|
|
229
|
+
echo " ${snapshot}: ${ram_mb}MB RAM"
|
|
230
|
+
fi
|
|
231
|
+
done
|
|
232
|
+
else
|
|
233
|
+
echo " No snapshots found (snapshot directory does not exist)"
|
|
234
|
+
fi
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
# Validate a snapshot
|
|
238
|
+
cmd_validate() {
|
|
239
|
+
local name="${1:?Usage: $0 validate <name>}"
|
|
240
|
+
|
|
241
|
+
check_container
|
|
242
|
+
|
|
243
|
+
if ! docker exec "$CONTAINER" test -d "${SNAPSHOT_BASE}/${name}"; then
|
|
244
|
+
log_error "Snapshot '${name}' does not exist"
|
|
245
|
+
exit 1
|
|
246
|
+
fi
|
|
247
|
+
|
|
248
|
+
validate_snapshot_internal "$name"
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
# Show usage
|
|
252
|
+
usage() {
|
|
253
|
+
echo "Usage: $0 {export|import|list|validate} [args...]"
|
|
254
|
+
echo ""
|
|
255
|
+
echo "Commands:"
|
|
256
|
+
echo " export <name> <output.tar.gz> Export a snapshot to a file"
|
|
257
|
+
echo " import <input.tar.gz> [name] Import a snapshot from a file"
|
|
258
|
+
echo " list List all available snapshots"
|
|
259
|
+
echo " validate <name> Validate a snapshot is not corrupt"
|
|
260
|
+
echo ""
|
|
261
|
+
echo "Examples:"
|
|
262
|
+
echo " $0 export w8rl_clean ./my_snapshot.tar.gz"
|
|
263
|
+
echo " $0 import ./my_snapshot.tar.gz"
|
|
264
|
+
echo " $0 import ./my_snapshot.tar.gz custom_name"
|
|
265
|
+
echo " $0 list"
|
|
266
|
+
echo " $0 validate w8rl_clean"
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
# Main command dispatch
|
|
270
|
+
case "${1:-}" in
|
|
271
|
+
export)
|
|
272
|
+
shift
|
|
273
|
+
cmd_export "$@"
|
|
274
|
+
;;
|
|
275
|
+
import)
|
|
276
|
+
shift
|
|
277
|
+
cmd_import "$@"
|
|
278
|
+
;;
|
|
279
|
+
list)
|
|
280
|
+
cmd_list
|
|
281
|
+
;;
|
|
282
|
+
validate)
|
|
283
|
+
shift
|
|
284
|
+
cmd_validate "$@"
|
|
285
|
+
;;
|
|
286
|
+
-h|--help|help)
|
|
287
|
+
usage
|
|
288
|
+
;;
|
|
289
|
+
*)
|
|
290
|
+
usage
|
|
291
|
+
exit 1
|
|
292
|
+
;;
|
|
293
|
+
esac
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
export AGENT_BROWSER_SOCKET_DIR=/tmp/agent-browser
|
|
5
|
+
mkdir -p $AGENT_BROWSER_SOCKET_DIR
|
|
6
|
+
|
|
7
|
+
echo "[Agent] Starting Agent Browser Service"
|
|
8
|
+
|
|
9
|
+
# Resolve IP of android-service to avoid Chrome Host header security block
|
|
10
|
+
echo "[Agent] Resolving android-service IP..."
|
|
11
|
+
ANDROID_IP=""
|
|
12
|
+
until [ -n "$ANDROID_IP" ]; do
|
|
13
|
+
ANDROID_IP=$(getent hosts android-service | awk '{ print $1 }' | head -n 1)
|
|
14
|
+
if [ -z "$ANDROID_IP" ]; then
|
|
15
|
+
echo "[Agent] Waiting for DNS resolution..."
|
|
16
|
+
sleep 2
|
|
17
|
+
fi
|
|
18
|
+
done
|
|
19
|
+
echo "[Agent] Android IP: $ANDROID_IP"
|
|
20
|
+
|
|
21
|
+
# Wait for CDP Bridge (Port 9224)
|
|
22
|
+
echo "[Agent] Waiting for Android CDP at http://$ANDROID_IP:9224..."
|
|
23
|
+
until curl -s http://$ANDROID_IP:9224/json/version > /dev/null; do
|
|
24
|
+
echo "[Agent] Waiting for CDP..."
|
|
25
|
+
sleep 5
|
|
26
|
+
done
|
|
27
|
+
echo "[Agent] CDP Bridge is ready!"
|
|
28
|
+
|
|
29
|
+
# Start Daemon
|
|
30
|
+
echo "[Agent] Starting Daemon..."
|
|
31
|
+
export AGENT_BROWSER_CDP_URL=http://$ANDROID_IP:9224
|
|
32
|
+
node dist/daemon.js &
|
|
33
|
+
DAEMON_PID=$!
|
|
34
|
+
|
|
35
|
+
# Start Socat Bridge for CLI
|
|
36
|
+
SOCKET_FILE="$AGENT_BROWSER_SOCKET_DIR/default.sock"
|
|
37
|
+
TCP_PORT=3000
|
|
38
|
+
|
|
39
|
+
echo "[Agent] Waiting for socket..."
|
|
40
|
+
while [ ! -S "$SOCKET_FILE" ]; do
|
|
41
|
+
if ! kill -0 $DAEMON_PID 2>/dev/null; then
|
|
42
|
+
echo "[Agent] Daemon crashed!"
|
|
43
|
+
exit 1
|
|
44
|
+
fi
|
|
45
|
+
sleep 0.5
|
|
46
|
+
done
|
|
47
|
+
|
|
48
|
+
echo "[Agent] Ready on port $TCP_PORT"
|
|
49
|
+
exec socat TCP-LISTEN:$TCP_PORT,bind=0.0.0.0,reuseaddr,fork UNIX-CONNECT:$SOCKET_FILE
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Syncs the version from package.json to all other config files.
|
|
5
|
+
* Run this script before building or releasing.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { execSync } from "child_process";
|
|
9
|
+
import { readFileSync, writeFileSync } from "fs";
|
|
10
|
+
import { dirname, join } from "path";
|
|
11
|
+
import { fileURLToPath } from "url";
|
|
12
|
+
|
|
13
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
14
|
+
const rootDir = join(__dirname, "..");
|
|
15
|
+
const cliDir = join(rootDir, "cli");
|
|
16
|
+
|
|
17
|
+
// Read version from package.json (single source of truth)
|
|
18
|
+
const packageJson = JSON.parse(
|
|
19
|
+
readFileSync(join(rootDir, "package.json"), "utf-8")
|
|
20
|
+
);
|
|
21
|
+
const version = packageJson.version;
|
|
22
|
+
|
|
23
|
+
console.log(`Syncing version ${version} to all config files...`);
|
|
24
|
+
|
|
25
|
+
// Update Cargo.toml
|
|
26
|
+
const cargoTomlPath = join(cliDir, "Cargo.toml");
|
|
27
|
+
let cargoToml = readFileSync(cargoTomlPath, "utf-8");
|
|
28
|
+
const cargoVersionRegex = /^version\s*=\s*"[^"]*"/m;
|
|
29
|
+
const newCargoVersion = `version = "${version}"`;
|
|
30
|
+
|
|
31
|
+
let cargoTomlUpdated = false;
|
|
32
|
+
if (cargoVersionRegex.test(cargoToml)) {
|
|
33
|
+
const oldMatch = cargoToml.match(cargoVersionRegex)?.[0];
|
|
34
|
+
if (oldMatch !== newCargoVersion) {
|
|
35
|
+
cargoToml = cargoToml.replace(cargoVersionRegex, newCargoVersion);
|
|
36
|
+
writeFileSync(cargoTomlPath, cargoToml);
|
|
37
|
+
console.log(` Updated cli/Cargo.toml: ${oldMatch} -> ${newCargoVersion}`);
|
|
38
|
+
cargoTomlUpdated = true;
|
|
39
|
+
} else {
|
|
40
|
+
console.log(` cli/Cargo.toml already up to date`);
|
|
41
|
+
}
|
|
42
|
+
} else {
|
|
43
|
+
console.error(" Could not find version field in cli/Cargo.toml");
|
|
44
|
+
process.exit(1);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Update Cargo.lock to match Cargo.toml
|
|
48
|
+
if (cargoTomlUpdated) {
|
|
49
|
+
try {
|
|
50
|
+
execSync("cargo update -p agent-browser --offline", {
|
|
51
|
+
cwd: cliDir,
|
|
52
|
+
stdio: "pipe",
|
|
53
|
+
});
|
|
54
|
+
console.log(` Updated cli/Cargo.lock`);
|
|
55
|
+
} catch {
|
|
56
|
+
// --offline may fail if package not in cache, try without it
|
|
57
|
+
try {
|
|
58
|
+
execSync("cargo update -p agent-browser", {
|
|
59
|
+
cwd: cliDir,
|
|
60
|
+
stdio: "pipe",
|
|
61
|
+
});
|
|
62
|
+
console.log(` Updated cli/Cargo.lock`);
|
|
63
|
+
} catch (e) {
|
|
64
|
+
console.error(` Warning: Could not update Cargo.lock: ${e.message}`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
console.log("Version sync complete.");
|