registry-sync 7.1.0 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.js DELETED
@@ -1,48 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const fs = require("fs");
4
- const path = require("path");
5
- const commander_1 = require("commander");
6
- const sync_1 = require("./sync");
7
- const url_1 = require("url");
8
- const { version } = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf-8'));
9
- const program = new commander_1.Command();
10
- program
11
- .version(version)
12
- .requiredOption('--root <path>', 'Path to save NPM package tarballs and metadata to')
13
- .requiredOption('--manifest <file>', 'Path to a package-lock.json or yarn.lock file to use as catalog for mirrored NPM packages.')
14
- .requiredOption('--localUrl <url>', 'URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)')
15
- .option('--binaryAbi <list>', 'Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/')
16
- .option('--binaryArch <list>', 'Comma-separated list of CPU architectures to download pre-built binaries for. Valid values: arm, ia32, and x64')
17
- .option('--binaryPlatform <list>', 'Comma-separated list of OS platforms to download pre-built binaries for. Valid values: linux, darwin, win32, sunos, freebsd, openbsd, and aix')
18
- .option('--registryUrl [url]', 'Optional URL to use as NPM registry when fetching packages. Default value is https://registry.npmjs.org')
19
- .option('--registryToken [string]', 'Optional Bearer token for the registry.')
20
- .option('--dontEnforceHttps', 'Disable the default behavior of downloading tarballs over HTTPS (will use whichever protocol is defined in the registry metadata)')
21
- .option('--includeDev', 'Include also packages found from devDependencies section of the --manifest')
22
- .option('--dryRun', 'Print packages that would be downloaded but do not download them')
23
- .parse(process.argv);
24
- const rawOptions = program.opts();
25
- // use current (abi,arch,platform) triplet as default if none is specified
26
- // so the user doesn't have to look them up if build is always done on the
27
- // same kind of machine
28
- const binaryAbi = rawOptions.binaryAbi || process.versions.modules;
29
- const binaryArch = rawOptions.binaryArch || process.arch;
30
- const binaryPlatform = rawOptions.binaryPlatform || process.platform;
31
- const abis = binaryAbi.split(',').map(Number);
32
- const architectures = binaryArch.split(',');
33
- const platforms = binaryPlatform.split(',');
34
- const prebuiltBinaryProperties = abis
35
- .map(abi => architectures.map(arch => platforms.map(platform => ({ abi, arch, platform }))).flat())
36
- .flat();
37
- const options = {
38
- localUrl: new url_1.URL(rawOptions.localUrl),
39
- manifest: rawOptions.manifest,
40
- prebuiltBinaryProperties,
41
- registryUrl: rawOptions.registryUrl || 'https://registry.npmjs.org',
42
- registryToken: rawOptions.registryToken || '',
43
- rootFolder: rawOptions.root,
44
- enforceTarballsOverHttps: Boolean(!rawOptions.dontEnforceHttps),
45
- includeDevDependencies: Boolean(rawOptions.includeDev),
46
- dryRun: Boolean(rawOptions.dryRun)
47
- };
48
- (0, sync_1.synchronize)(options);
package/src/integrity.js DELETED
@@ -1,25 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.verifyIntegrity = verifyIntegrity;
4
- exports.sha1 = sha1;
5
- exports.sha512 = sha512;
6
- const ssri = require("ssri");
7
- function verifyIntegrity(data, id, { integrity, shasum }) {
8
- if (!integrity && !shasum) {
9
- throw new Error(`Integrity values not present in metadata for ${id}`);
10
- }
11
- if (integrity) {
12
- if (!ssri.checkData(data, integrity)) {
13
- throw new Error(`Integrity check failed for ${id}`);
14
- }
15
- }
16
- else if (sha1(data) != shasum) {
17
- throw new Error(`Integrity check with SHA1 failed for failed for ${id}`);
18
- }
19
- }
20
- function sha1(data) {
21
- return ssri.fromData(data, { algorithms: ['sha1'] }).hexDigest();
22
- }
23
- function sha512(data) {
24
- return ssri.fromData(data, { algorithms: ['sha512'] }).toString();
25
- }
package/src/metadata.js DELETED
@@ -1,65 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.rewriteVersionMetadata = rewriteVersionMetadata;
4
- exports.rewriteMetadataInTarball = rewriteMetadataInTarball;
5
- exports.extractTgz = extractTgz;
6
- exports.tarballFilename = tarballFilename;
7
- const fs = require("fs");
8
- const path = require("path");
9
- const tar = require("tar-fs");
10
- const zlib = require("zlib");
11
- const pregyp_1 = require("./pregyp");
12
- const stream_1 = require("stream");
13
- const integrity_1 = require("./integrity");
14
- function rewriteVersionMetadata(versionMetadata, data, localUrl) {
15
- versionMetadata.dist.tarball = localTarballUrl(versionMetadata, localUrl);
16
- if ((0, pregyp_1.hasPrebuiltBinaries)(versionMetadata)) {
17
- versionMetadata.binary.host = localUrl.origin;
18
- versionMetadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
19
- versionMetadata.dist.integrity = (0, integrity_1.sha512)(data);
20
- versionMetadata.dist.shasum = (0, integrity_1.sha1)(data);
21
- }
22
- }
23
- async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder) {
24
- const tmpFolder = path.join(localFolder, '.tmp');
25
- await fs.promises.mkdir(tmpFolder, { recursive: true });
26
- await extractTgz(data, tmpFolder);
27
- const manifestPath = path.join(tmpFolder, 'package', 'package.json');
28
- const json = await fs.promises.readFile(manifestPath, 'utf8');
29
- const metadata = JSON.parse(json);
30
- metadata.binary.host = localUrl.origin;
31
- metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
32
- await fs.promises.writeFile(manifestPath, JSON.stringify(metadata, null, 2));
33
- const updatedData = await compressTgz(tmpFolder);
34
- await fs.promises.rm(tmpFolder, { recursive: true });
35
- return updatedData;
36
- }
37
- function createPrebuiltBinaryRemotePath(url, versionMetadata) {
38
- return `${removeTrailingSlash(url.pathname)}/${versionMetadata.name}/${versionMetadata.version}/`;
39
- }
40
- function extractTgz(data, folder) {
41
- return new Promise((resolve, reject) => {
42
- const tgz = stream_1.Readable.from(data).pipe(zlib.createGunzip()).pipe(tar.extract(folder));
43
- tgz.on('finish', resolve);
44
- tgz.on('error', reject);
45
- });
46
- }
47
- function compressTgz(folder) {
48
- return new Promise((resolve, reject) => {
49
- const chunks = [];
50
- const tgz = tar.pack(folder).pipe(zlib.createGzip());
51
- tgz.on('data', (chunk) => chunks.push(chunk));
52
- tgz.on('end', () => resolve(Buffer.concat(chunks)));
53
- tgz.on('error', reject);
54
- });
55
- }
56
- function localTarballUrl({ name, version }, localUrl) {
57
- return `${localUrl.origin}${removeTrailingSlash(localUrl.pathname)}/${name}/${tarballFilename(name, version)}`;
58
- }
59
- function tarballFilename(name, version) {
60
- const normalized = name.replace(/\//g, '-');
61
- return `${normalized}-${version}.tgz`;
62
- }
63
- function removeTrailingSlash(str) {
64
- return str.replace(/\/$/, '');
65
- }
package/src/pregyp.js DELETED
@@ -1,83 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.hasPrebuiltBinaries = hasPrebuiltBinaries;
4
- exports.downloadPrebuiltBinaries = downloadPrebuiltBinaries;
5
- const fs = require("fs");
6
- const path = require("path");
7
- const semver = require("semver");
8
- const url = require("url");
9
- const client_1 = require("./client");
10
- function hasPrebuiltBinaries({ binary }) {
11
- return Boolean(binary && binary.module_name);
12
- }
13
- async function downloadPrebuiltBinaries(versionMetadata, localFolder, prebuiltBinaryProperties) {
14
- const { binary, name, version } = versionMetadata;
15
- if (!binary.napi_versions) {
16
- for (const { abi, arch, platform } of prebuiltBinaryProperties) {
17
- await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch);
18
- }
19
- return;
20
- }
21
- for (const napiVersion of binary.napi_versions) {
22
- for (const { abi, arch, platform } of prebuiltBinaryProperties) {
23
- await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion);
24
- }
25
- }
26
- }
27
- async function downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
28
- try {
29
- const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion);
30
- await fs.promises.writeFile(prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion), data);
31
- }
32
- catch (err) {
33
- // pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file)
34
- const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404);
35
- if (!fileNotFoundError) {
36
- console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`);
37
- throw err;
38
- }
39
- }
40
- }
41
- function fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion) {
42
- return (0, client_1.fetchBinaryData)(prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion), '');
43
- }
44
- function prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
45
- return path.join(localFolder, prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion));
46
- }
47
- function prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion) {
48
- const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion).replace(/\/$/, '');
49
- const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion);
50
- return url.resolve(binary.host, `${remotePath}/${fileName}`);
51
- }
52
- function prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion) {
53
- return formatPrebuilt(binary.remote_path, name, version, binary.module_name, abi, platform, arch, napiVersion);
54
- }
55
- function prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion) {
56
- return formatPrebuilt(binary.package_name, name, version, binary.module_name, abi, platform, arch, napiVersion);
57
- }
58
- // see node-pre-gyp: /lib/util/versioning.js for documentation of possible values
59
- function formatPrebuilt(formatString, name, version, moduleName, abi, platform, arch, napiVersion) {
60
- const moduleVersion = semver.parse(version);
61
- const prerelease = (moduleVersion.prerelease || []).join('.');
62
- const build = (moduleVersion.build || []).join('.');
63
- const formatted = formatString
64
- .replace('{name}', name)
65
- .replace('{version}', version)
66
- .replace('{major}', moduleVersion.major.toString())
67
- .replace('{minor}', moduleVersion.minor.toString())
68
- .replace('{patch}', moduleVersion.patch.toString())
69
- .replace('{prerelease}', prerelease)
70
- .replace('{build}', build)
71
- .replace('{module_name}', moduleName)
72
- .replace('{node_abi}', `node-v${abi}`)
73
- .replace('{platform}', platform)
74
- .replace('{arch}', arch)
75
- .replace('{libc}', libc(platform))
76
- .replace('{configuration}', 'Release')
77
- .replace('{toolset}', '')
78
- .replace(/[/]+/g, '/');
79
- return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString()) : formatted;
80
- }
81
- function libc(platform) {
82
- return platform === 'linux' ? 'glibc' : 'unknown';
83
- }
package/src/resolve.js DELETED
@@ -1,207 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.updateDependenciesCache = updateDependenciesCache;
4
- exports.dependenciesNotInCache = dependenciesNotInCache;
5
- exports.dependenciesFromPackageLock = dependenciesFromPackageLock;
6
- const fs = require("fs");
7
- const pathLib = require("path");
8
- const readline = require("readline");
9
- const url = require("url");
10
- const assert_1 = require("assert");
11
- const yarnLockfile = require("@yarnpkg/lockfile");
12
- const normalize_yarn_pattern_1 = require("./normalize-yarn-pattern");
13
- const YARN_LOCK_FILENAME = 'yarn.lock';
14
- async function updateDependenciesCache(newDependencies, cacheFilePath, prebuiltBinaryProperties) {
15
- const { dependencies: cachedDependencies } = await loadCache(cacheFilePath);
16
- const dependencies = cachedDependencies.concat(newDependencies).sort(sortById).filter(uniqueById);
17
- const data = {
18
- dependencies,
19
- prebuiltBinaryProperties,
20
- prebuiltBinaryNApiSupport: true
21
- };
22
- return fs.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8');
23
- }
24
- async function dependenciesNotInCache(dependencies, cacheFilePath, prebuiltBinaryProperties) {
25
- const { dependencies: cachedDependencies, prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, prebuiltBinaryNApiSupport } = await loadCache(cacheFilePath);
26
- if (cachedDependencies.length > 0 &&
27
- (!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) {
28
- console.log(`Pre-built binary properties changed, re-downloading all current packages`);
29
- return dependencies;
30
- }
31
- const packageIdsInCache = cachedDependencies.map(pkg => pkg.id);
32
- return dependencies.filter(pkg => !packageIdsInCache.includes(pkg.id));
33
- }
34
- async function loadCache(cacheFilePath) {
35
- try {
36
- const data = JSON.parse(await fs.promises.readFile(cacheFilePath, 'utf8'));
37
- // Migrate V1 legacy cache file schema to V2
38
- if (Array.isArray(data)) {
39
- return {
40
- dependencies: data,
41
- prebuiltBinaryProperties: [],
42
- prebuiltBinaryNApiSupport: false
43
- };
44
- }
45
- return data;
46
- }
47
- catch {
48
- // empty V2 cache
49
- return {
50
- dependencies: [],
51
- prebuiltBinaryProperties: [],
52
- prebuiltBinaryNApiSupport: true
53
- };
54
- }
55
- }
56
- function isNonRegistryYarnPackagePattern(packagePattern) {
57
- if (
58
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/link-resolver.js#L14
59
- packagePattern.startsWith('link:') ||
60
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/file-resolver.js#L18
61
- packagePattern.startsWith('file:') ||
62
- /^\.{1,2}\//.test(packagePattern) ||
63
- pathLib.isAbsolute(packagePattern) ||
64
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/tarball-resolver.js#L15
65
- packagePattern.startsWith('http://') ||
66
- packagePattern.startsWith('https://') ||
67
- (packagePattern.indexOf('@') < 0 && (packagePattern.endsWith('.tgz') || packagePattern.endsWith('.tar.gz'))) ||
68
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/github-resolver.js#L6
69
- packagePattern.startsWith('github:') ||
70
- /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(packagePattern) ||
71
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gitlab-resolver.js#L6
72
- packagePattern.startsWith('gitlab:') ||
73
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/bitbucket-resolver.js#L6
74
- packagePattern.startsWith('bitbucket:') ||
75
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gist-resolver.js#L26
76
- packagePattern.startsWith('gist:') ||
77
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
78
- /^git:|^git\+.+:|^ssh:|^https?:.+\.git$|^https?:.+\.git#.+/.test(packagePattern)) {
79
- return true;
80
- }
81
- else {
82
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
83
- const { hostname, path } = url.parse(packagePattern);
84
- if (hostname && path && ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'].indexOf(hostname) >= 0) {
85
- return path.split('/').filter((p) => !!p).length === 2;
86
- }
87
- }
88
- }
89
- function resolvePackageNameFromRegistryYarnPackagePattern(packagePattern) {
90
- // See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/registry-resolver.js#L12
91
- const match = packagePattern.match(/^(\S+):(@?.*?)(@(.*?)|)$/);
92
- if (match) {
93
- return match[2];
94
- }
95
- else {
96
- throw new Error(`Failed to resolve yarn package pattern ${packagePattern}, unrecognized format`);
97
- }
98
- }
99
- function resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies) {
100
- const packages = yarnLockDependencies.reduce((filterMappedDependencies, { packagePattern, version }) => {
101
- if (isNonRegistryYarnPackagePattern(packagePattern)) {
102
- return filterMappedDependencies;
103
- }
104
- let packageName;
105
- if (packagePattern.startsWith('npm:') || packagePattern.startsWith('yarn:')) {
106
- packageName = resolvePackageNameFromRegistryYarnPackagePattern(packagePattern);
107
- }
108
- else {
109
- // Package pattern not yet recognized, continue with parsing logic from
110
- // https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L99
111
- const { name: namePart, range: rangePart } = (0, normalize_yarn_pattern_1.normalizeYarnPackagePattern)(packagePattern);
112
- if (isNonRegistryYarnPackagePattern(rangePart)) {
113
- return filterMappedDependencies;
114
- }
115
- if (rangePart.startsWith('npm:') || rangePart.startsWith('yarn:')) {
116
- packageName = resolvePackageNameFromRegistryYarnPackagePattern(rangePart);
117
- }
118
- else {
119
- // Finally, we just assume that the pattern is a registry pattern,
120
- // see https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L119
121
- packageName = namePart;
122
- }
123
- }
124
- filterMappedDependencies.push({ id: `${packageName}@${version}`, name: packageName, version });
125
- return filterMappedDependencies;
126
- }, []);
127
- return packages;
128
- }
129
- async function parseDependenciesFromNpmLockFile(lockFilepath, includeDevDependencies) {
130
- const packageLock = JSON.parse(await fs.promises.readFile(lockFilepath, 'utf8'));
131
- const fileVersion = packageLock.lockfileVersion || 1;
132
- if (![2, 3].includes(packageLock.lockfileVersion)) {
133
- throw new Error(`Unsupported package-lock.json version ${fileVersion}`);
134
- }
135
- const dependencies = collectNpmLockfileDependencies(packageLock, includeDevDependencies);
136
- return dependencies.map(({ name, version }) => ({ id: `${name}@${version}`, name, version }));
137
- }
138
- async function parseDependenciesFromYarnLockFile(lockFilepath) {
139
- const lockFileStream = fs.createReadStream(lockFilepath);
140
- const lockFileReadlineInterface = readline.createInterface({
141
- input: lockFileStream,
142
- crlfDelay: Infinity
143
- });
144
- for await (const line of lockFileReadlineInterface) {
145
- // https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/lockfile/stringify.js#L111
146
- if (/# yarn lockfile v1\s*$/.test(line)) {
147
- // lockfile version 1 recognized
148
- break;
149
- }
150
- if (/^\s*$/.test(line) || /^\s*#/.test(line)) {
151
- // skip empty or comment lines
152
- continue;
153
- }
154
- throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, unrecognized format, only version 1 is supported`);
155
- }
156
- lockFileStream.destroy();
157
- const lockfileContents = await fs.promises.readFile(lockFilepath, 'utf8');
158
- const { type: lockfileParseStatus, object: packagePatternToLockedVersion } = yarnLockfile.parse(lockfileContents);
159
- if (lockfileParseStatus !== 'success') {
160
- throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, parse status ${lockfileParseStatus}`);
161
- }
162
- const yarnLockDependencies = Object.entries(packagePatternToLockedVersion).map(([packagePattern, { version }]) => ({ packagePattern, version }));
163
- return resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies);
164
- }
165
- async function dependenciesFromPackageLock(path, includeDevDependencies) {
166
- const filename = pathLib.basename(path);
167
- const dependencies = filename === YARN_LOCK_FILENAME
168
- ? await parseDependenciesFromYarnLockFile(path)
169
- : await parseDependenciesFromNpmLockFile(path, includeDevDependencies);
170
- return dependencies.sort(sortById).filter(uniqueById).filter(isNotLocal);
171
- }
172
- function sortById(a, b) {
173
- return a.id.localeCompare(b.id);
174
- }
175
- function uniqueById(value, index, values) {
176
- return values.findIndex(v => v.id === value.id) === index;
177
- }
178
- function isNotLocal(dependency) {
179
- // if the version starts with the url scheme 'file:' that means that
180
- // the package is fetched from the local filesystem relative to the
181
- // package-lock that we were passed; it could for instance be a git
182
- // submodule. this package will not be fetched through the web server
183
- // that we set up anyway, so don't attempt to synchronize it
184
- return !dependency.version.startsWith('file:');
185
- }
186
- function collectNpmLockfileDependencies({ packages }, includeDevDependencies) {
187
- return Object.entries(packages)
188
- .filter(([name, props]) => name.length > 0 && (includeDevDependencies || !props.dev))
189
- .map(([name, props]) => ({
190
- name: props.name || pathToName(name),
191
- version: props.version
192
- }));
193
- }
194
- // "node_modules/lodash" -> "lodash"
195
- // "node_modules/make-dir/node_modules/semver" -> "semver"
196
- function pathToName(path) {
197
- return path.split('node_modules/').pop();
198
- }
199
- function isDeepEqual(a, b) {
200
- try {
201
- (0, assert_1.deepStrictEqual)(a, b);
202
- return true;
203
- }
204
- catch {
205
- return false;
206
- }
207
- }
package/src/sync.js DELETED
@@ -1,20 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.synchronize = synchronize;
4
- const download_1 = require("./download");
5
- const resolve_1 = require("./resolve");
6
- async function synchronize(options) {
7
- const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`;
8
- const packages = await (0, resolve_1.dependenciesFromPackageLock)(options.manifest, options.includeDevDependencies);
9
- const newPackages = await (0, resolve_1.dependenciesNotInCache)(packages, cacheFilePath, options.prebuiltBinaryProperties);
10
- if (options.dryRun) {
11
- console.log(newPackages.map(({ name, version }) => `${name}@${version}`).join('\n'));
12
- console.log(`\nWould download ${newPackages.length} packages.`);
13
- }
14
- else {
15
- await (0, download_1.downloadAll)(newPackages, options);
16
- await (0, resolve_1.updateDependenciesCache)(newPackages, cacheFilePath, options.prebuiltBinaryProperties);
17
- console.log(`Downloaded ${newPackages.length} packages`);
18
- }
19
- return newPackages;
20
- }