registry-sync 8.0.0 → 8.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/sync +1 -1
- package/package.json +11 -10
- package/src/client.js +27 -0
- package/src/download.js +89 -0
- package/src/index.js +46 -0
- package/src/integrity.js +20 -0
- package/src/metadata.js +59 -0
- package/src/{normalize-yarn-pattern.ts → normalize-yarn-pattern.js} +26 -35
- package/src/pregyp.js +84 -0
- package/src/resolve.js +205 -0
- package/src/sync.js +17 -0
- package/src/client.ts +0 -35
- package/src/download.ts +0 -141
- package/src/index.ts +0 -75
- package/src/integrity.ts +0 -27
- package/src/metadata.ts +0 -78
- package/src/pregyp.ts +0 -170
- package/src/resolve.ts +0 -276
- package/src/sync.ts +0 -21
- package/src/types.d.ts +0 -99
package/bin/sync
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "registry-sync",
|
|
3
|
-
"version": "8.
|
|
3
|
+
"version": "8.2.0",
|
|
4
4
|
"description": "synchronize a remote npm registry for private use",
|
|
5
5
|
"repository": "https://github.com/heikkipora/registry-sync",
|
|
6
6
|
"type": "module",
|
|
@@ -13,24 +13,25 @@
|
|
|
13
13
|
"fix-eslint": "eslint --fix src test release-test/server/src",
|
|
14
14
|
"test": "mocha --timeout 120000 test/*.ts",
|
|
15
15
|
"typecheck": "tsc",
|
|
16
|
-
"release-test": "cd release-test && ./run-sync-install-cycle.sh"
|
|
16
|
+
"release-test": "cd release-test && ./run-sync-install-cycle.sh",
|
|
17
|
+
"refresh-manifests": "cd test/manifests && ./refresh-manifests.sh"
|
|
17
18
|
},
|
|
18
19
|
"author": "Heikki Pora",
|
|
19
20
|
"license": "MIT",
|
|
20
21
|
"dependencies": {
|
|
21
22
|
"@yarnpkg/lockfile": "1.1.0",
|
|
22
|
-
"axios": "1.13.
|
|
23
|
-
"commander": "14.0.
|
|
24
|
-
"lru-cache": "11.2.
|
|
25
|
-
"semver": "7.7.
|
|
23
|
+
"axios": "1.13.4",
|
|
24
|
+
"commander": "14.0.3",
|
|
25
|
+
"lru-cache": "11.2.5",
|
|
26
|
+
"semver": "7.7.4",
|
|
26
27
|
"ssri": "13.0.0",
|
|
27
28
|
"tar-fs": "3.1.1"
|
|
28
29
|
},
|
|
29
30
|
"devDependencies": {
|
|
30
31
|
"@types/chai": "5.2.3",
|
|
31
|
-
"@types/lodash": "4.17.
|
|
32
|
+
"@types/lodash": "4.17.23",
|
|
32
33
|
"@types/mocha": "10.0.10",
|
|
33
|
-
"@types/node": "
|
|
34
|
+
"@types/node": "22.18.0",
|
|
34
35
|
"@types/semver": "7.7.1",
|
|
35
36
|
"@types/ssri": "7.1.5",
|
|
36
37
|
"@types/tar-fs": "2.0.4",
|
|
@@ -39,10 +40,10 @@
|
|
|
39
40
|
"eslint": "9.39.2",
|
|
40
41
|
"eslint-plugin-mocha": "11.2.0",
|
|
41
42
|
"express": "5.2.1",
|
|
42
|
-
"globals": "17.
|
|
43
|
+
"globals": "17.3.0",
|
|
43
44
|
"mocha": "11.7.5",
|
|
44
45
|
"typescript": "5.9.3",
|
|
45
|
-
"typescript-eslint": "8.
|
|
46
|
+
"typescript-eslint": "8.54.0"
|
|
46
47
|
},
|
|
47
48
|
"keywords": [
|
|
48
49
|
"registry",
|
package/src/client.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import * as https from 'https';
|
|
2
|
+
import axios from 'axios';
|
|
3
|
+
import { LRUCache } from 'lru-cache';
|
|
4
|
+
const metadataCache = new LRUCache({ max: 100 });
|
|
5
|
+
const client = axios.create({
|
|
6
|
+
httpsAgent: new https.Agent({ keepAlive: true }),
|
|
7
|
+
timeout: 30 * 1000
|
|
8
|
+
});
|
|
9
|
+
export async function fetchJsonWithCacheCloned(url, token) {
|
|
10
|
+
const cached = metadataCache.get(url);
|
|
11
|
+
if (cached) {
|
|
12
|
+
return structuredClone(cached);
|
|
13
|
+
}
|
|
14
|
+
const value = await fetch(url, 'json', token);
|
|
15
|
+
metadataCache.set(url, value);
|
|
16
|
+
return structuredClone(value);
|
|
17
|
+
}
|
|
18
|
+
export function fetchBinaryData(url, token) {
|
|
19
|
+
return fetch(url, 'arraybuffer', token);
|
|
20
|
+
}
|
|
21
|
+
async function fetch(url, responseType, token) {
|
|
22
|
+
const config = { responseType };
|
|
23
|
+
if (token !== '') {
|
|
24
|
+
config.headers = { authorization: 'Bearer ' + token };
|
|
25
|
+
}
|
|
26
|
+
return (await client.get(url, config)).data;
|
|
27
|
+
}
|
package/src/download.js
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import * as semver from 'semver';
|
|
4
|
+
import * as url from 'url';
|
|
5
|
+
import assert from 'assert';
|
|
6
|
+
import { downloadPrebuiltBinaries, hasPrebuiltBinaries } from "./pregyp.js";
|
|
7
|
+
import { fetchBinaryData, fetchJsonWithCacheCloned } from "./client.js";
|
|
8
|
+
import { rewriteMetadataInTarball, rewriteVersionMetadata, tarballFilename } from "./metadata.js";
|
|
9
|
+
import { verifyIntegrity } from "./integrity.js";
|
|
10
|
+
export async function downloadAll(packages, { localUrl, prebuiltBinaryProperties, registryUrl, registryToken, rootFolder, enforceTarballsOverHttps }) {
|
|
11
|
+
const downloadFromRegistry = download.bind(null, registryUrl, registryToken, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps);
|
|
12
|
+
for (const pkg of packages) {
|
|
13
|
+
await downloadFromRegistry(pkg);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
async function download(registryUrl, registryToken, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, { name, version }) {
|
|
17
|
+
const registryMetadata = await fetchMetadataCloned(name, registryUrl, registryToken);
|
|
18
|
+
const versionMetadata = registryMetadata.versions[version];
|
|
19
|
+
if (!versionMetadata) {
|
|
20
|
+
throw new Error(`Unknown package version ${name}@${version}`);
|
|
21
|
+
}
|
|
22
|
+
const localFolder = await ensureLocalFolderExists(name, rootFolder);
|
|
23
|
+
let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps, registryToken);
|
|
24
|
+
if (hasPrebuiltBinaries(versionMetadata)) {
|
|
25
|
+
const localPregypFolder = await ensureLocalFolderExists(version, localFolder);
|
|
26
|
+
await downloadPrebuiltBinaries(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
|
|
27
|
+
data = await rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder);
|
|
28
|
+
}
|
|
29
|
+
await saveTarball(versionMetadata, data, localFolder);
|
|
30
|
+
rewriteVersionMetadata(versionMetadata, data, localUrl);
|
|
31
|
+
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder);
|
|
32
|
+
}
|
|
33
|
+
async function downloadTarball({ _id: id, dist }, enforceTarballsOverHttps, registryToken) {
|
|
34
|
+
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball;
|
|
35
|
+
const data = await fetchBinaryData(tarballUrl, registryToken);
|
|
36
|
+
verifyIntegrity(data, id, dist);
|
|
37
|
+
return data;
|
|
38
|
+
}
|
|
39
|
+
function saveTarball({ name, version }, data, localFolder) {
|
|
40
|
+
return fs.promises.writeFile(tarballPath(name, version, localFolder), data);
|
|
41
|
+
}
|
|
42
|
+
async function updateMetadata(versionMetadata, defaultMetadata, registryUrl, localFolder) {
|
|
43
|
+
const { version } = versionMetadata;
|
|
44
|
+
const localMetadataPath = path.join(localFolder, 'index.json');
|
|
45
|
+
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata);
|
|
46
|
+
localMetadata.versions[version] = versionMetadata;
|
|
47
|
+
localMetadata.time[version] = defaultMetadata.time[version];
|
|
48
|
+
localMetadata['dist-tags'] = collectDistTags(localMetadata, defaultMetadata);
|
|
49
|
+
await saveMetadata(localMetadataPath, localMetadata);
|
|
50
|
+
}
|
|
51
|
+
// Collect dist-tags entries (name -> version) from registry metadata,
|
|
52
|
+
// which point to versions we have locally available.
|
|
53
|
+
// Override 'latest' tag to ensure its validity as we might not have the version
|
|
54
|
+
// that is tagged latest in registry
|
|
55
|
+
function collectDistTags(localMetadata, defaultMetadata) {
|
|
56
|
+
const availableVersions = Object.keys(localMetadata.versions);
|
|
57
|
+
const validDistTags = Object.entries(defaultMetadata['dist-tags']).filter(([, version]) => availableVersions.includes(version));
|
|
58
|
+
const latest = availableVersions.sort(semver.compare).pop();
|
|
59
|
+
assert(latest, 'At least one version should be locally available to determine "latest" dist-tag');
|
|
60
|
+
return {
|
|
61
|
+
...Object.fromEntries(validDistTags),
|
|
62
|
+
latest
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
async function loadMetadata(path, defaultMetadata) {
|
|
66
|
+
try {
|
|
67
|
+
const json = await fs.promises.readFile(path, 'utf8');
|
|
68
|
+
return JSON.parse(json);
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
return { ...defaultMetadata, 'dist-tags': {}, time: {}, versions: {} };
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function saveMetadata(path, metadata) {
|
|
75
|
+
const json = JSON.stringify(metadata, null, 2);
|
|
76
|
+
return fs.promises.writeFile(path, json, 'utf8');
|
|
77
|
+
}
|
|
78
|
+
function tarballPath(name, version, localFolder) {
|
|
79
|
+
return path.join(localFolder, tarballFilename(name, version));
|
|
80
|
+
}
|
|
81
|
+
async function ensureLocalFolderExists(name, rootFolder) {
|
|
82
|
+
const localFolder = path.resolve(rootFolder, name);
|
|
83
|
+
await fs.promises.mkdir(localFolder, { recursive: true });
|
|
84
|
+
return localFolder;
|
|
85
|
+
}
|
|
86
|
+
function fetchMetadataCloned(name, registryUrl, registryToken) {
|
|
87
|
+
const urlSafeName = name.replace(/\//g, '%2f');
|
|
88
|
+
return fetchJsonWithCacheCloned(url.resolve(registryUrl, urlSafeName), registryToken);
|
|
89
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import { synchronize } from "./sync.js";
|
|
5
|
+
import { URL } from 'url';
|
|
6
|
+
const { version } = JSON.parse(fs.readFileSync(path.join(import.meta.dirname, '..', 'package.json'), 'utf-8'));
|
|
7
|
+
const program = new Command();
|
|
8
|
+
program
|
|
9
|
+
.version(version)
|
|
10
|
+
.requiredOption('--root <path>', 'Path to save NPM package tarballs and metadata to')
|
|
11
|
+
.requiredOption('--manifest <file>', 'Path to a package-lock.json or yarn.lock file to use as catalog for mirrored NPM packages.')
|
|
12
|
+
.requiredOption('--localUrl <url>', 'URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)')
|
|
13
|
+
.option('--binaryAbi <list>', 'Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/')
|
|
14
|
+
.option('--binaryArch <list>', 'Comma-separated list of CPU architectures to download pre-built binaries for. Valid values: arm, ia32, and x64')
|
|
15
|
+
.option('--binaryPlatform <list>', 'Comma-separated list of OS platforms to download pre-built binaries for. Valid values: linux, darwin, win32, sunos, freebsd, openbsd, and aix')
|
|
16
|
+
.option('--registryUrl [url]', 'Optional URL to use as NPM registry when fetching packages. Default value is https://registry.npmjs.org')
|
|
17
|
+
.option('--registryToken [string]', 'Optional Bearer token for the registry.')
|
|
18
|
+
.option('--dontEnforceHttps', 'Disable the default behavior of downloading tarballs over HTTPS (will use whichever protocol is defined in the registry metadata)')
|
|
19
|
+
.option('--includeDev', 'Include also packages found from devDependencies section of the --manifest')
|
|
20
|
+
.option('--dryRun', 'Print packages that would be downloaded but do not download them')
|
|
21
|
+
.parse(process.argv);
|
|
22
|
+
const rawOptions = program.opts();
|
|
23
|
+
// use current (abi,arch,platform) triplet as default if none is specified
|
|
24
|
+
// so the user doesn't have to look them up if build is always done on the
|
|
25
|
+
// same kind of machine
|
|
26
|
+
const binaryAbi = rawOptions.binaryAbi || process.versions.modules;
|
|
27
|
+
const binaryArch = rawOptions.binaryArch || process.arch;
|
|
28
|
+
const binaryPlatform = rawOptions.binaryPlatform || process.platform;
|
|
29
|
+
const abis = binaryAbi.split(',').map(Number);
|
|
30
|
+
const architectures = binaryArch.split(',');
|
|
31
|
+
const platforms = binaryPlatform.split(',');
|
|
32
|
+
const prebuiltBinaryProperties = abis
|
|
33
|
+
.map(abi => architectures.map(arch => platforms.map(platform => ({ abi, arch, platform }))).flat())
|
|
34
|
+
.flat();
|
|
35
|
+
const options = {
|
|
36
|
+
localUrl: new URL(rawOptions.localUrl),
|
|
37
|
+
manifest: rawOptions.manifest,
|
|
38
|
+
prebuiltBinaryProperties,
|
|
39
|
+
registryUrl: rawOptions.registryUrl || 'https://registry.npmjs.org',
|
|
40
|
+
registryToken: rawOptions.registryToken || '',
|
|
41
|
+
rootFolder: rawOptions.root,
|
|
42
|
+
enforceTarballsOverHttps: Boolean(!rawOptions.dontEnforceHttps),
|
|
43
|
+
includeDevDependencies: Boolean(rawOptions.includeDev),
|
|
44
|
+
dryRun: Boolean(rawOptions.dryRun)
|
|
45
|
+
};
|
|
46
|
+
synchronize(options);
|
package/src/integrity.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as ssri from 'ssri';
|
|
2
|
+
export function verifyIntegrity(data, id, { integrity, shasum }) {
|
|
3
|
+
if (!integrity && !shasum) {
|
|
4
|
+
throw new Error(`Integrity values not present in metadata for ${id}`);
|
|
5
|
+
}
|
|
6
|
+
if (integrity) {
|
|
7
|
+
if (!ssri.checkData(data, integrity)) {
|
|
8
|
+
throw new Error(`Integrity check failed for ${id}`);
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
else if (sha1(data) != shasum) {
|
|
12
|
+
throw new Error(`Integrity check with SHA1 failed for failed for ${id}`);
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
export function sha1(data) {
|
|
16
|
+
return ssri.fromData(data, { algorithms: ['sha1'] }).hexDigest();
|
|
17
|
+
}
|
|
18
|
+
export function sha512(data) {
|
|
19
|
+
return ssri.fromData(data, { algorithms: ['sha512'] }).toString();
|
|
20
|
+
}
|
package/src/metadata.js
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import * as tar from 'tar-fs';
|
|
4
|
+
import * as zlib from 'zlib';
|
|
5
|
+
import { hasPrebuiltBinaries } from "./pregyp.js";
|
|
6
|
+
import { Readable } from 'stream';
|
|
7
|
+
import { sha1, sha512 } from "./integrity.js";
|
|
8
|
+
export function rewriteVersionMetadata(versionMetadata, data, localUrl) {
|
|
9
|
+
versionMetadata.dist.tarball = localTarballUrl(versionMetadata, localUrl);
|
|
10
|
+
if (hasPrebuiltBinaries(versionMetadata)) {
|
|
11
|
+
versionMetadata.binary.host = localUrl.origin;
|
|
12
|
+
versionMetadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
|
|
13
|
+
versionMetadata.dist.integrity = sha512(data);
|
|
14
|
+
versionMetadata.dist.shasum = sha1(data);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
export async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder) {
|
|
18
|
+
const tmpFolder = path.join(localFolder, '.tmp');
|
|
19
|
+
await fs.promises.mkdir(tmpFolder, { recursive: true });
|
|
20
|
+
await extractTgz(data, tmpFolder);
|
|
21
|
+
const manifestPath = path.join(tmpFolder, 'package', 'package.json');
|
|
22
|
+
const json = await fs.promises.readFile(manifestPath, 'utf8');
|
|
23
|
+
const metadata = JSON.parse(json);
|
|
24
|
+
metadata.binary.host = localUrl.origin;
|
|
25
|
+
metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
|
|
26
|
+
await fs.promises.writeFile(manifestPath, JSON.stringify(metadata, null, 2));
|
|
27
|
+
const updatedData = await compressTgz(tmpFolder);
|
|
28
|
+
await fs.promises.rm(tmpFolder, { recursive: true });
|
|
29
|
+
return updatedData;
|
|
30
|
+
}
|
|
31
|
+
function createPrebuiltBinaryRemotePath(url, versionMetadata) {
|
|
32
|
+
return `${removeTrailingSlash(url.pathname)}/${versionMetadata.name}/${versionMetadata.version}/`;
|
|
33
|
+
}
|
|
34
|
+
export function extractTgz(data, folder) {
|
|
35
|
+
return new Promise((resolve, reject) => {
|
|
36
|
+
const tgz = Readable.from(data).pipe(zlib.createGunzip()).pipe(tar.extract(folder));
|
|
37
|
+
tgz.on('finish', resolve);
|
|
38
|
+
tgz.on('error', reject);
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
function compressTgz(folder) {
|
|
42
|
+
return new Promise((resolve, reject) => {
|
|
43
|
+
const chunks = [];
|
|
44
|
+
const tgz = tar.pack(folder).pipe(zlib.createGzip());
|
|
45
|
+
tgz.on('data', (chunk) => chunks.push(chunk));
|
|
46
|
+
tgz.on('end', () => resolve(Buffer.concat(chunks)));
|
|
47
|
+
tgz.on('error', reject);
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
function localTarballUrl({ name, version }, localUrl) {
|
|
51
|
+
return `${localUrl.origin}${removeTrailingSlash(localUrl.pathname)}/${name}/${tarballFilename(name, version)}`;
|
|
52
|
+
}
|
|
53
|
+
export function tarballFilename(name, version) {
|
|
54
|
+
const normalized = name.replace(/\//g, '-');
|
|
55
|
+
return `${normalized}-${version}.tgz`;
|
|
56
|
+
}
|
|
57
|
+
function removeTrailingSlash(str) {
|
|
58
|
+
return str.replace(/\/$/, '');
|
|
59
|
+
}
|
|
@@ -27,40 +27,31 @@ ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
27
27
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
28
28
|
*/
|
|
29
29
|
// From https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/util/normalize-pattern.js#L2
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
// if we're a scope then remove the @ and add it back later
|
|
41
|
-
let isScoped = false
|
|
42
|
-
if (name[0] === '@') {
|
|
43
|
-
isScoped = true
|
|
44
|
-
name = name.slice(1)
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// take first part as the name
|
|
48
|
-
const parts = name.split('@')
|
|
49
|
-
if (parts.length > 1) {
|
|
50
|
-
name = parts.shift()!
|
|
51
|
-
range = parts.join('@')
|
|
52
|
-
|
|
53
|
-
if (range) {
|
|
54
|
-
hasVersion = true
|
|
55
|
-
} else {
|
|
56
|
-
range = '*'
|
|
30
|
+
export function normalizeYarnPackagePattern(pattern) {
|
|
31
|
+
let hasVersion = false;
|
|
32
|
+
let range = 'latest';
|
|
33
|
+
let name = pattern;
|
|
34
|
+
// if we're a scope then remove the @ and add it back later
|
|
35
|
+
let isScoped = false;
|
|
36
|
+
if (name[0] === '@') {
|
|
37
|
+
isScoped = true;
|
|
38
|
+
name = name.slice(1);
|
|
57
39
|
}
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
40
|
+
// take first part as the name
|
|
41
|
+
const parts = name.split('@');
|
|
42
|
+
if (parts.length > 1) {
|
|
43
|
+
name = parts.shift();
|
|
44
|
+
range = parts.join('@');
|
|
45
|
+
if (range) {
|
|
46
|
+
hasVersion = true;
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
range = '*';
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
// add back @ scope suffix
|
|
53
|
+
if (isScoped) {
|
|
54
|
+
name = `@${name}`;
|
|
55
|
+
}
|
|
56
|
+
return { name, range, hasVersion };
|
|
66
57
|
}
|
package/src/pregyp.js
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import * as semver from 'semver';
|
|
4
|
+
import * as url from 'url';
|
|
5
|
+
import { fetchBinaryData } from "./client.js";
|
|
6
|
+
export function hasPrebuiltBinaries(metadata) {
|
|
7
|
+
return Boolean(metadata.binary
|
|
8
|
+
&& metadata.binary.host
|
|
9
|
+
&& metadata.binary.module_name
|
|
10
|
+
&& metadata.binary.package_name
|
|
11
|
+
&& metadata.binary.remote_path);
|
|
12
|
+
}
|
|
13
|
+
export async function downloadPrebuiltBinaries(versionMetadata, localFolder, prebuiltBinaryProperties) {
|
|
14
|
+
const { binary, name, version } = versionMetadata;
|
|
15
|
+
if (!binary.napi_versions) {
|
|
16
|
+
for (const { abi, arch, platform } of prebuiltBinaryProperties) {
|
|
17
|
+
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch);
|
|
18
|
+
}
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
for (const napiVersion of binary.napi_versions) {
|
|
22
|
+
for (const { abi, arch, platform } of prebuiltBinaryProperties) {
|
|
23
|
+
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
async function downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
|
|
28
|
+
try {
|
|
29
|
+
const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion);
|
|
30
|
+
await fs.promises.writeFile(prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion), data);
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file)
|
|
34
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
35
|
+
const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404);
|
|
36
|
+
if (!fileNotFoundError) {
|
|
37
|
+
console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`);
|
|
38
|
+
throw err;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
function fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion) {
|
|
43
|
+
return fetchBinaryData(prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion), '');
|
|
44
|
+
}
|
|
45
|
+
function prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
|
|
46
|
+
return path.join(localFolder, prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion));
|
|
47
|
+
}
|
|
48
|
+
function prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion) {
|
|
49
|
+
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion).replace(/\/$/, '');
|
|
50
|
+
const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion);
|
|
51
|
+
return url.resolve(binary.host, `${remotePath}/${fileName}`);
|
|
52
|
+
}
|
|
53
|
+
function prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion) {
|
|
54
|
+
return formatPrebuilt(binary.remote_path, name, version, binary.module_name, abi, platform, arch, napiVersion);
|
|
55
|
+
}
|
|
56
|
+
function prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion) {
|
|
57
|
+
return formatPrebuilt(binary.package_name, name, version, binary.module_name, abi, platform, arch, napiVersion);
|
|
58
|
+
}
|
|
59
|
+
// see node-pre-gyp: /lib/util/versioning.js for documentation of possible values
|
|
60
|
+
function formatPrebuilt(formatString, name, version, moduleName, abi, platform, arch, napiVersion) {
|
|
61
|
+
const moduleVersion = semver.parse(version, false, true);
|
|
62
|
+
const prerelease = (moduleVersion.prerelease || []).join('.');
|
|
63
|
+
const build = (moduleVersion.build || []).join('.');
|
|
64
|
+
const formatted = formatString
|
|
65
|
+
.replace('{name}', name)
|
|
66
|
+
.replace('{version}', version)
|
|
67
|
+
.replace('{major}', moduleVersion.major.toString())
|
|
68
|
+
.replace('{minor}', moduleVersion.minor.toString())
|
|
69
|
+
.replace('{patch}', moduleVersion.patch.toString())
|
|
70
|
+
.replace('{prerelease}', prerelease)
|
|
71
|
+
.replace('{build}', build)
|
|
72
|
+
.replace('{module_name}', moduleName)
|
|
73
|
+
.replace('{node_abi}', `node-v${abi}`)
|
|
74
|
+
.replace('{platform}', platform)
|
|
75
|
+
.replace('{arch}', arch)
|
|
76
|
+
.replace('{libc}', libc(platform))
|
|
77
|
+
.replace('{configuration}', 'Release')
|
|
78
|
+
.replace('{toolset}', '')
|
|
79
|
+
.replace(/[/]+/g, '/');
|
|
80
|
+
return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString()) : formatted;
|
|
81
|
+
}
|
|
82
|
+
function libc(platform) {
|
|
83
|
+
return platform === 'linux' ? 'glibc' : 'unknown';
|
|
84
|
+
}
|
package/src/resolve.js
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as pathLib from 'path';
|
|
3
|
+
import * as readline from 'readline';
|
|
4
|
+
import * as url from 'url';
|
|
5
|
+
import assert, { deepStrictEqual } from 'assert';
|
|
6
|
+
import yarnLockfile from '@yarnpkg/lockfile';
|
|
7
|
+
import { normalizeYarnPackagePattern } from "./normalize-yarn-pattern.js";
|
|
8
|
+
const YARN_LOCK_FILENAME = 'yarn.lock';
|
|
9
|
+
export async function updateDependenciesCache(newDependencies, cacheFilePath, prebuiltBinaryProperties) {
|
|
10
|
+
const { dependencies: cachedDependencies } = await loadCache(cacheFilePath);
|
|
11
|
+
const dependencies = cachedDependencies.concat(newDependencies).sort(sortById).filter(uniqueById);
|
|
12
|
+
const data = {
|
|
13
|
+
dependencies,
|
|
14
|
+
prebuiltBinaryProperties,
|
|
15
|
+
prebuiltBinaryNApiSupport: true
|
|
16
|
+
};
|
|
17
|
+
return fs.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8');
|
|
18
|
+
}
|
|
19
|
+
export async function dependenciesNotInCache(dependencies, cacheFilePath, prebuiltBinaryProperties) {
|
|
20
|
+
const { dependencies: cachedDependencies, prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, prebuiltBinaryNApiSupport } = await loadCache(cacheFilePath);
|
|
21
|
+
if (cachedDependencies.length > 0 &&
|
|
22
|
+
(!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) {
|
|
23
|
+
console.log(`Pre-built binary properties changed, re-downloading all current packages`);
|
|
24
|
+
return dependencies;
|
|
25
|
+
}
|
|
26
|
+
const packageIdsInCache = cachedDependencies.map(pkg => pkg.id);
|
|
27
|
+
return dependencies.filter(pkg => !packageIdsInCache.includes(pkg.id));
|
|
28
|
+
}
|
|
29
|
+
async function loadCache(cacheFilePath) {
|
|
30
|
+
try {
|
|
31
|
+
const data = JSON.parse(await fs.promises.readFile(cacheFilePath, 'utf8'));
|
|
32
|
+
// Migrate V1 legacy cache file schema to V2
|
|
33
|
+
if (Array.isArray(data)) {
|
|
34
|
+
return {
|
|
35
|
+
dependencies: data,
|
|
36
|
+
prebuiltBinaryProperties: [],
|
|
37
|
+
prebuiltBinaryNApiSupport: false
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
return data;
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
// empty V2 cache
|
|
44
|
+
return {
|
|
45
|
+
dependencies: [],
|
|
46
|
+
prebuiltBinaryProperties: [],
|
|
47
|
+
prebuiltBinaryNApiSupport: true
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
function isNonRegistryYarnPackagePattern(packagePattern) {
|
|
52
|
+
if (
|
|
53
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/link-resolver.js#L14
|
|
54
|
+
packagePattern.startsWith('link:') ||
|
|
55
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/file-resolver.js#L18
|
|
56
|
+
packagePattern.startsWith('file:') ||
|
|
57
|
+
/^\.{1,2}\//.test(packagePattern) ||
|
|
58
|
+
pathLib.isAbsolute(packagePattern) ||
|
|
59
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/tarball-resolver.js#L15
|
|
60
|
+
packagePattern.startsWith('http://') ||
|
|
61
|
+
packagePattern.startsWith('https://') ||
|
|
62
|
+
(packagePattern.indexOf('@') < 0 && (packagePattern.endsWith('.tgz') || packagePattern.endsWith('.tar.gz'))) ||
|
|
63
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/github-resolver.js#L6
|
|
64
|
+
packagePattern.startsWith('github:') ||
|
|
65
|
+
/^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(packagePattern) ||
|
|
66
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gitlab-resolver.js#L6
|
|
67
|
+
packagePattern.startsWith('gitlab:') ||
|
|
68
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/bitbucket-resolver.js#L6
|
|
69
|
+
packagePattern.startsWith('bitbucket:') ||
|
|
70
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gist-resolver.js#L26
|
|
71
|
+
packagePattern.startsWith('gist:') ||
|
|
72
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
|
|
73
|
+
/^git:|^git\+.+:|^ssh:|^https?:.+\.git$|^https?:.+\.git#.+/.test(packagePattern)) {
|
|
74
|
+
return true;
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
|
|
78
|
+
const { hostname, path } = url.parse(packagePattern);
|
|
79
|
+
if (hostname && path && ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'].indexOf(hostname) >= 0) {
|
|
80
|
+
return path.split('/').filter((p) => !!p).length === 2;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
return false;
|
|
84
|
+
}
|
|
85
|
+
function resolvePackageNameFromRegistryYarnPackagePattern(packagePattern) {
|
|
86
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/registry-resolver.js#L12
|
|
87
|
+
const match = packagePattern.match(/^(\S+):(@?.*?)(@(.*?)|)$/);
|
|
88
|
+
if (match) {
|
|
89
|
+
return match[2];
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
throw new Error(`Failed to resolve yarn package pattern ${packagePattern}, unrecognized format`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
function resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies) {
|
|
96
|
+
const packages = yarnLockDependencies.reduce((filterMappedDependencies, { packagePattern, version }) => {
|
|
97
|
+
if (isNonRegistryYarnPackagePattern(packagePattern)) {
|
|
98
|
+
return filterMappedDependencies;
|
|
99
|
+
}
|
|
100
|
+
let packageName;
|
|
101
|
+
if (packagePattern.startsWith('npm:') || packagePattern.startsWith('yarn:')) {
|
|
102
|
+
packageName = resolvePackageNameFromRegistryYarnPackagePattern(packagePattern);
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
// Package pattern not yet recognized, continue with parsing logic from
|
|
106
|
+
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L99
|
|
107
|
+
const { name: namePart, range: rangePart } = normalizeYarnPackagePattern(packagePattern);
|
|
108
|
+
if (isNonRegistryYarnPackagePattern(rangePart)) {
|
|
109
|
+
return filterMappedDependencies;
|
|
110
|
+
}
|
|
111
|
+
if (rangePart.startsWith('npm:') || rangePart.startsWith('yarn:')) {
|
|
112
|
+
packageName = resolvePackageNameFromRegistryYarnPackagePattern(rangePart);
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
// Finally, we just assume that the pattern is a registry pattern,
|
|
116
|
+
// see https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L119
|
|
117
|
+
packageName = namePart;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
filterMappedDependencies.push({ id: `${packageName}@${version}`, name: packageName, version });
|
|
121
|
+
return filterMappedDependencies;
|
|
122
|
+
}, []);
|
|
123
|
+
return packages;
|
|
124
|
+
}
|
|
125
|
+
async function parseDependenciesFromNpmLockFile(lockFilepath, includeDevDependencies) {
|
|
126
|
+
const packageLock = JSON.parse(await fs.promises.readFile(lockFilepath, 'utf8'));
|
|
127
|
+
const fileVersion = packageLock.lockfileVersion || 1;
|
|
128
|
+
if (![2, 3].includes(fileVersion)) {
|
|
129
|
+
throw new Error(`Unsupported package-lock.json version ${fileVersion}`);
|
|
130
|
+
}
|
|
131
|
+
const dependencies = collectNpmLockfileDependencies(packageLock, includeDevDependencies);
|
|
132
|
+
return dependencies.map(({ name, version }) => ({ id: `${name}@${version}`, name, version }));
|
|
133
|
+
}
|
|
134
|
+
async function parseDependenciesFromYarnLockFile(lockFilepath) {
|
|
135
|
+
const lockFileStream = fs.createReadStream(lockFilepath);
|
|
136
|
+
const lockFileReadlineInterface = readline.createInterface({
|
|
137
|
+
input: lockFileStream,
|
|
138
|
+
crlfDelay: Infinity
|
|
139
|
+
});
|
|
140
|
+
for await (const line of lockFileReadlineInterface) {
|
|
141
|
+
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/lockfile/stringify.js#L111
|
|
142
|
+
if (/# yarn lockfile v1\s*$/.test(line)) {
|
|
143
|
+
// lockfile version 1 recognized
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
if (/^\s*$/.test(line) || /^\s*#/.test(line)) {
|
|
147
|
+
// skip empty or comment lines
|
|
148
|
+
continue;
|
|
149
|
+
}
|
|
150
|
+
throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, unrecognized format, only version 1 is supported`);
|
|
151
|
+
}
|
|
152
|
+
lockFileStream.destroy();
|
|
153
|
+
const lockfileContents = await fs.promises.readFile(lockFilepath, 'utf8');
|
|
154
|
+
const { type: lockfileParseStatus, object: packagePatternToLockedVersion } = yarnLockfile.parse(lockfileContents);
|
|
155
|
+
if (lockfileParseStatus !== 'success') {
|
|
156
|
+
throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, parse status ${lockfileParseStatus}`);
|
|
157
|
+
}
|
|
158
|
+
const yarnLockDependencies = Object.entries(packagePatternToLockedVersion).map(([packagePattern, { version }]) => ({ packagePattern, version }));
|
|
159
|
+
return resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies);
|
|
160
|
+
}
|
|
161
|
+
export async function dependenciesFromPackageLock(path, includeDevDependencies) {
|
|
162
|
+
const filename = pathLib.basename(path);
|
|
163
|
+
const dependencies = filename === YARN_LOCK_FILENAME
|
|
164
|
+
? await parseDependenciesFromYarnLockFile(path)
|
|
165
|
+
: await parseDependenciesFromNpmLockFile(path, includeDevDependencies);
|
|
166
|
+
return dependencies.sort(sortById).filter(uniqueById).filter(isNotLocal);
|
|
167
|
+
}
|
|
168
|
+
function sortById(a, b) {
|
|
169
|
+
return a.id.localeCompare(b.id);
|
|
170
|
+
}
|
|
171
|
+
function uniqueById(value, index, values) {
|
|
172
|
+
return values.findIndex(v => v.id === value.id) === index;
|
|
173
|
+
}
|
|
174
|
+
function isNotLocal(dependency) {
|
|
175
|
+
// if the version starts with the url scheme 'file:' that means that
|
|
176
|
+
// the package is fetched from the local filesystem relative to the
|
|
177
|
+
// package-lock that we were passed; it could for instance be a git
|
|
178
|
+
// submodule. this package will not be fetched through the web server
|
|
179
|
+
// that we set up anyway, so don't attempt to synchronize it
|
|
180
|
+
return !dependency.version.startsWith('file:');
|
|
181
|
+
}
|
|
182
|
+
function collectNpmLockfileDependencies({ packages }, includeDevDependencies) {
|
|
183
|
+
return Object.entries(packages)
|
|
184
|
+
.filter(([name, props]) => name.length > 0 && (includeDevDependencies || !props.dev))
|
|
185
|
+
.map(([name, props]) => ({
|
|
186
|
+
name: props.name || pathToName(name),
|
|
187
|
+
version: props.version
|
|
188
|
+
}));
|
|
189
|
+
}
|
|
190
|
+
// "node_modules/lodash" -> "lodash"
|
|
191
|
+
// "node_modules/make-dir/node_modules/semver" -> "semver"
|
|
192
|
+
function pathToName(path) {
|
|
193
|
+
const name = path.split('node_modules/').pop();
|
|
194
|
+
assert(name, `Failed to extract package name from path ${path}`);
|
|
195
|
+
return name;
|
|
196
|
+
}
|
|
197
|
+
function isDeepEqual(a, b) {
|
|
198
|
+
try {
|
|
199
|
+
deepStrictEqual(a, b);
|
|
200
|
+
return true;
|
|
201
|
+
}
|
|
202
|
+
catch {
|
|
203
|
+
return false;
|
|
204
|
+
}
|
|
205
|
+
}
|