jumpy-lion 0.1.5 → 0.1.6-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "jumpy-lion",
3
3
  "description": "",
4
4
  "type": "module",
5
- "version": "0.1.5",
5
+ "version": "0.1.6-beta.0",
6
6
  "engines": {
7
7
  "node": ">=20.0.0"
8
8
  },
@@ -21,7 +21,8 @@
21
21
  "./package.json": "./package.json"
22
22
  },
23
23
  "files": [
24
- "dist"
24
+ "dist",
25
+ "scripts/postinstall.cjs"
25
26
  ],
26
27
  "license": "Apache-2.0",
27
28
  "scripts": {
@@ -30,6 +31,9 @@
30
31
  "compile": "tsc -p tsconfig.build.json",
31
32
  "copy-extension": "cp -r src/fingerprinting/all-fingerprint-defender dist/fingerprinting/ && cp -r src/fingerprinting/anti-webgpu dist/fingerprinting/",
32
33
  "build": "npm run clean && npm run compile && npm run copy-extension",
34
+ "package:browser": "bash browser/scripts/package-binary.sh",
35
+ "release:browser": "node scripts/release-browser.cjs",
36
+ "postinstall": "node scripts/postinstall.cjs",
33
37
  "ci:build": "turbo run build --cache-dir=\".turbo\"",
34
38
  "test:unit": "vitest run unit",
35
39
  "test": "vitest run --silent",
@@ -0,0 +1,242 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * postinstall.cjs — Download the pre-built anti-detect browser binary
4
+ *
5
+ * Runs automatically on `npm install`. Downloads the correct platform archive
6
+ * from the public Apify key-value store `jumpy-lion` (no auth required) and
7
+ * extracts it into `.browser/` inside the package root.
8
+ *
9
+ * Environment variables:
10
+ * SKIP_BROWSER_DOWNLOAD=1 Skip entirely (e.g. CI that ships its own Chrome)
11
+ * BROWSER_DOWNLOAD_URL Override the download URL verbatim (mirrors,
12
+ * staging, offline envs). When set, no other env
13
+ * vars are consulted.
14
+ *
15
+ * If the download fails for any reason (network error, bad override URL,
16
+ * asset missing) postinstall prints a warning and exits 0 — the crawler will
17
+ * fall back to the system Chrome at runtime. `npm install` never fails hard.
18
+ *
19
+ * Supported platforms:
20
+ * linux-x64 → chrome-v<VERSION>-linux-x64.tar.gz
21
+ * darwin-arm64 → chrome-v<VERSION>-darwin-arm64.tar.gz
22
+ * darwin-x64 → chrome-v<VERSION>-darwin-x64.tar.gz
23
+ * win32-x64 → chrome-v<VERSION>-win32-x64.zip
24
+ */
25
+
26
+ 'use strict';
27
+
28
+ const https = require('https');
29
+ const http = require('http');
30
+ const fs = require('fs');
31
+ const path = require('path');
32
+ const { spawnSync } = require('child_process');
33
+
34
+ // ─── Config ──────────────────────────────────────────────────────────────────
35
+
36
+ // Public Apify key-value store. `generalAccess: ANYONE_WITH_URL` means record
37
+ // reads do not require an Apify token. If we ever rotate the store, update
38
+ // both this ID and the matching upload target in scripts/release-browser.cjs.
39
+ const KV_STORE_ID = '6R1GXIyIQVzdYnGWd';
40
+ const KV_STORE_URL = `https://api.apify.com/v2/key-value-stores/${KV_STORE_ID}/records`;
41
+
42
+ const PKG_VERSION = require('../package.json').version;
43
+ const INSTALL_DIR = path.join(__dirname, '..', '.browser');
44
+
45
+ // Archive name → platform key. Matches package-binary.sh output.
46
+ function archiveName(platformKey, version) {
47
+ const ext = platformKey === 'win32-x64' ? 'zip' : 'tar.gz';
48
+ return `chrome-v${version}-${platformKey}.${ext}`;
49
+ }
50
+
51
+ // Fall-back check for binaries bundled inside the npm tarball itself (beta
52
+ // releases that ship the full binary, see browser/BUILD.md § Beta).
53
+ const BUNDLED_BINARY_PATHS = {
54
+ 'linux-x64': path.join(__dirname, '..', 'browser', 'dist', 'linux-x64', 'chrome'),
55
+ 'darwin-arm64': path.join(__dirname, '..', 'browser', 'dist', 'darwin-arm64', 'Chromium.app', 'Contents', 'MacOS', 'Chromium'),
56
+ 'darwin-x64': path.join(__dirname, '..', 'browser', 'dist', 'darwin-x64', 'Chromium.app', 'Contents', 'MacOS', 'Chromium'),
57
+ 'win32-x64': path.join(__dirname, '..', 'browser', 'dist', 'win32-x64', 'chrome.exe'),
58
+ };
59
+
60
+ // ─── Main ─────────────────────────────────────────────────────────────────────
61
+
62
+ async function main() {
63
+ if (process.env.SKIP_BROWSER_DOWNLOAD === '1') {
64
+ console.log('[cdp-crawler] SKIP_BROWSER_DOWNLOAD=1 — skipping browser download');
65
+ return;
66
+ }
67
+
68
+ const platformKey = `${process.platform}-${process.arch}`;
69
+
70
+ // Bundled (beta) binary takes precedence.
71
+ const bundledBinary = BUNDLED_BINARY_PATHS[platformKey];
72
+ if (bundledBinary && fs.existsSync(bundledBinary)) {
73
+ console.log('[cdp-crawler] Browser binary found in browser/dist/ — skipping download.');
74
+ return;
75
+ }
76
+
77
+ if (!BUNDLED_BINARY_PATHS[platformKey]) {
78
+ console.warn(`[cdp-crawler] No pre-built browser for platform: ${platformKey}`);
79
+ console.warn('[cdp-crawler] Will fall back to system Chrome on first use.');
80
+ return;
81
+ }
82
+
83
+ // Already installed at the correct version → skip.
84
+ const versionFile = path.join(INSTALL_DIR, '.version');
85
+ const installedVersion = fs.existsSync(versionFile)
86
+ ? fs.readFileSync(versionFile, 'utf8').trim()
87
+ : null;
88
+ if (installedVersion === PKG_VERSION) {
89
+ console.log(`[cdp-crawler] Anti-detect browser v${PKG_VERSION} already installed.`);
90
+ return;
91
+ }
92
+
93
+ const name = archiveName(platformKey, PKG_VERSION);
94
+ const downloadUrl = process.env.BROWSER_DOWNLOAD_URL
95
+ || `${KV_STORE_URL}/${name}`;
96
+
97
+ console.log(`[cdp-crawler] Downloading anti-detect browser for ${platformKey}...`);
98
+ console.log(`[cdp-crawler] URL: ${downloadUrl}`);
99
+
100
+ fs.mkdirSync(INSTALL_DIR, { recursive: true });
101
+ const assetPath = path.join(INSTALL_DIR, name);
102
+
103
+ try {
104
+ await download(downloadUrl, assetPath);
105
+ console.log(`[cdp-crawler] Downloaded: ${assetPath}`);
106
+
107
+ await extract(assetPath, INSTALL_DIR, platformKey);
108
+ console.log(`[cdp-crawler] Extracted to: ${INSTALL_DIR}`);
109
+
110
+ if (process.platform !== 'win32') {
111
+ setExecutablePermissions(INSTALL_DIR, platformKey);
112
+ }
113
+
114
+ fs.writeFileSync(versionFile, PKG_VERSION);
115
+ console.log(`[cdp-crawler] Anti-detect browser v${PKG_VERSION} installed successfully.`);
116
+
117
+ fs.unlinkSync(assetPath);
118
+ } catch (err) {
119
+ console.warn(`[cdp-crawler] Browser download failed: ${err.message}`);
120
+ console.warn('[cdp-crawler] Will fall back to system Chrome on first use.');
121
+ if (fs.existsSync(assetPath)) {
122
+ try { fs.unlinkSync(assetPath); } catch { /* ignore */ }
123
+ }
124
+ }
125
+ }
126
+
127
+ // ─── Helpers ──────────────────────────────────────────────────────────────────
128
+
129
+ function download(url, destPath) {
130
+ return new Promise((resolve, reject) => {
131
+ const file = fs.createWriteStream(destPath);
132
+
133
+ // The KV store record endpoint returns a 302 to a signed S3 URL. No
134
+ // auth header is needed on either hop for a public store, so the
135
+ // redirect follower is just a simple recursion.
136
+ const request = (requestUrl) => {
137
+ const client = requestUrl.startsWith('https:') ? https : http;
138
+ const headers = { 'User-Agent': 'cdp-crawler-postinstall' };
139
+
140
+ client.get(requestUrl, { headers }, (res) => {
141
+ if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) {
142
+ const redirectUrl = res.headers.location;
143
+ if (!redirectUrl) {
144
+ reject(new Error(`Redirect with no location header (status ${res.statusCode})`));
145
+ return;
146
+ }
147
+ res.resume();
148
+ request(redirectUrl);
149
+ return;
150
+ }
151
+
152
+ if (res.statusCode === 404) {
153
+ reject(new Error(
154
+ `Asset not found (404). The release archive for v${PKG_VERSION} ` +
155
+ `has not been published to the public Apify store yet.\n` +
156
+ `Expected: ${url}\n` +
157
+ `To build locally: npm run build:browser`,
158
+ ));
159
+ return;
160
+ }
161
+
162
+ if (res.statusCode !== 200) {
163
+ reject(new Error(`HTTP ${res.statusCode} from ${requestUrl}`));
164
+ return;
165
+ }
166
+
167
+ const totalBytes = parseInt(res.headers['content-length'] || '0', 10);
168
+ let receivedBytes = 0;
169
+ let lastLogPct = 0;
170
+
171
+ res.on('data', (chunk) => {
172
+ receivedBytes += chunk.length;
173
+ if (totalBytes > 0) {
174
+ const pct = Math.floor(receivedBytes / totalBytes * 100);
175
+ if (pct >= lastLogPct + 10) {
176
+ process.stdout.write(`\r[cdp-crawler] Downloading... ${pct}%`);
177
+ lastLogPct = pct;
178
+ }
179
+ }
180
+ });
181
+
182
+ res.pipe(file);
183
+ file.on('finish', () => {
184
+ file.close();
185
+ process.stdout.write('\n');
186
+ resolve();
187
+ });
188
+ file.on('error', reject);
189
+ }).on('error', reject);
190
+ };
191
+
192
+ request(url);
193
+ });
194
+ }
195
+
196
+ async function extract(archivePath, destDir, platformKey) {
197
+ if (platformKey === 'win32-x64') {
198
+ const result = spawnSync('powershell', [
199
+ '-NoProfile', '-NonInteractive', '-Command',
200
+ `Expand-Archive -Force -Path "${archivePath}" -DestinationPath "${destDir}"`,
201
+ ], { stdio: 'inherit' });
202
+ if (result.status !== 0) {
203
+ throw new Error('Failed to extract zip with PowerShell');
204
+ }
205
+ } else {
206
+ const result = spawnSync('tar', ['-xzf', archivePath, '-C', destDir], { stdio: 'inherit' });
207
+ if (result.status !== 0) {
208
+ throw new Error('Failed to extract tar.gz');
209
+ }
210
+ }
211
+ }
212
+
213
+ function setExecutablePermissions(installDir, platformKey) {
214
+ const executablePaths = {
215
+ 'linux-x64': [
216
+ path.join(installDir, 'linux-x64', 'chrome'),
217
+ path.join(installDir, 'linux-x64', 'chrome_sandbox'),
218
+ ],
219
+ 'darwin-arm64': [
220
+ path.join(installDir, 'darwin-arm64', 'Chromium.app', 'Contents', 'MacOS', 'Chromium'),
221
+ ],
222
+ 'darwin-x64': [
223
+ path.join(installDir, 'darwin-x64', 'Chromium.app', 'Contents', 'MacOS', 'Chromium'),
224
+ ],
225
+ };
226
+
227
+ const paths = executablePaths[platformKey] || [];
228
+ for (const exePath of paths) {
229
+ if (fs.existsSync(exePath)) {
230
+ fs.chmodSync(exePath, 0o755);
231
+ console.log(`[cdp-crawler] Set executable: ${exePath}`);
232
+ }
233
+ }
234
+ }
235
+
236
+ // ─── Entry ────────────────────────────────────────────────────────────────────
237
+
238
+ main().catch((err) => {
239
+ console.error('[cdp-crawler] postinstall error:', err);
240
+ // Exit 0 so `npm install` never fails on browser download issues.
241
+ process.exit(0);
242
+ });