registry-sync 7.0.2 → 8.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/bin/sync +1 -2
- package/package.json +23 -33
- package/src/client.ts +35 -0
- package/src/download.ts +141 -0
- package/src/index.ts +75 -0
- package/src/integrity.ts +27 -0
- package/src/metadata.ts +78 -0
- package/src/{normalize-yarn-pattern.js → normalize-yarn-pattern.ts} +35 -29
- package/src/pregyp.ts +170 -0
- package/src/resolve.ts +276 -0
- package/src/sync.ts +21 -0
- package/src/types.d.ts +99 -0
- package/src/client.js +0 -30
- package/src/download.js +0 -89
- package/src/index.js +0 -48
- package/src/integrity.js +0 -25
- package/src/metadata.js +0 -65
- package/src/pregyp.js +0 -83
- package/src/resolve.js +0 -207
- package/src/sync.js +0 -20
package/src/pregyp.ts
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import * as fs from 'fs'
|
|
2
|
+
import * as path from 'path'
|
|
3
|
+
import * as semver from 'semver'
|
|
4
|
+
import * as url from 'url'
|
|
5
|
+
import {fetchBinaryData} from './client.ts'
|
|
6
|
+
import type {PlatformVariant, PrebuiltBinaryMetadata, VersionMetadata, VersionMetadataWithBinary} from './types.d.ts'
|
|
7
|
+
|
|
8
|
+
export function hasPrebuiltBinaries(metadata: VersionMetadata): metadata is VersionMetadataWithBinary {
|
|
9
|
+
return Boolean(metadata.binary
|
|
10
|
+
&& metadata.binary.host
|
|
11
|
+
&& metadata.binary.module_name
|
|
12
|
+
&& metadata.binary.package_name
|
|
13
|
+
&& metadata.binary.remote_path
|
|
14
|
+
)
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export async function downloadPrebuiltBinaries(
|
|
18
|
+
versionMetadata: VersionMetadataWithBinary,
|
|
19
|
+
localFolder: string,
|
|
20
|
+
prebuiltBinaryProperties: PlatformVariant[]
|
|
21
|
+
): Promise<void> {
|
|
22
|
+
const {binary, name, version} = versionMetadata
|
|
23
|
+
|
|
24
|
+
if (!binary.napi_versions) {
|
|
25
|
+
for (const {abi, arch, platform} of prebuiltBinaryProperties) {
|
|
26
|
+
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch)
|
|
27
|
+
}
|
|
28
|
+
return
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
for (const napiVersion of binary.napi_versions) {
|
|
32
|
+
for (const {abi, arch, platform} of prebuiltBinaryProperties) {
|
|
33
|
+
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async function downloadPrebuiltBinary(
|
|
39
|
+
localFolder: string,
|
|
40
|
+
name: string,
|
|
41
|
+
version: string,
|
|
42
|
+
binary: PrebuiltBinaryMetadata,
|
|
43
|
+
abi: number,
|
|
44
|
+
platform: string,
|
|
45
|
+
arch: string,
|
|
46
|
+
napiVersion?: number
|
|
47
|
+
): Promise<void> {
|
|
48
|
+
try {
|
|
49
|
+
const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion)
|
|
50
|
+
await fs.promises.writeFile(
|
|
51
|
+
prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion),
|
|
52
|
+
data
|
|
53
|
+
)
|
|
54
|
+
} catch (err: unknown) {
|
|
55
|
+
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file)
|
|
56
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
57
|
+
const fileNotFoundError = (err as any).response && ((err as any).response.status == 403 || (err as any).response.status == 404)
|
|
58
|
+
if (!fileNotFoundError) {
|
|
59
|
+
console.error(
|
|
60
|
+
`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`
|
|
61
|
+
)
|
|
62
|
+
throw err
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function fetchPrebuiltBinary(
|
|
68
|
+
name: string,
|
|
69
|
+
version: string,
|
|
70
|
+
binary: PrebuiltBinaryMetadata,
|
|
71
|
+
abi: number,
|
|
72
|
+
platform: string,
|
|
73
|
+
arch: string,
|
|
74
|
+
napiVersion?: number
|
|
75
|
+
): Promise<Buffer> {
|
|
76
|
+
return fetchBinaryData(prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion), '')
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function prebuiltBinaryFilePath(
|
|
80
|
+
localFolder: string,
|
|
81
|
+
name: string,
|
|
82
|
+
version: string,
|
|
83
|
+
binary: PrebuiltBinaryMetadata,
|
|
84
|
+
abi: number,
|
|
85
|
+
platform: string,
|
|
86
|
+
arch: string,
|
|
87
|
+
napiVersion?: number
|
|
88
|
+
): string {
|
|
89
|
+
return path.join(localFolder, prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion))
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
function prebuiltBinaryUrl(
|
|
93
|
+
name: string,
|
|
94
|
+
version: string,
|
|
95
|
+
binary: PrebuiltBinaryMetadata,
|
|
96
|
+
abi: number,
|
|
97
|
+
platform: string,
|
|
98
|
+
arch: string,
|
|
99
|
+
napiVersion?: number
|
|
100
|
+
): string {
|
|
101
|
+
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion).replace(
|
|
102
|
+
/\/$/,
|
|
103
|
+
''
|
|
104
|
+
)
|
|
105
|
+
const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion)
|
|
106
|
+
return url.resolve(binary.host, `${remotePath}/${fileName}`)
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function prebuiltBinaryRemotePath(
|
|
110
|
+
name: string,
|
|
111
|
+
version: string,
|
|
112
|
+
binary: PrebuiltBinaryMetadata,
|
|
113
|
+
abi: number,
|
|
114
|
+
platform: string,
|
|
115
|
+
arch: string,
|
|
116
|
+
napiVersion?: number
|
|
117
|
+
): string {
|
|
118
|
+
return formatPrebuilt(binary.remote_path, name, version, binary.module_name, abi, platform, arch, napiVersion)
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function prebuiltBinaryFileName(
|
|
122
|
+
name: string,
|
|
123
|
+
version: string,
|
|
124
|
+
binary: PrebuiltBinaryMetadata,
|
|
125
|
+
abi: number,
|
|
126
|
+
platform: string,
|
|
127
|
+
arch: string,
|
|
128
|
+
napiVersion?: number
|
|
129
|
+
): string {
|
|
130
|
+
return formatPrebuilt(binary.package_name, name, version, binary.module_name, abi, platform, arch, napiVersion)
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// see node-pre-gyp: /lib/util/versioning.js for documentation of possible values
|
|
134
|
+
function formatPrebuilt(
|
|
135
|
+
formatString: string,
|
|
136
|
+
name: string,
|
|
137
|
+
version: string,
|
|
138
|
+
moduleName: string,
|
|
139
|
+
abi: number,
|
|
140
|
+
platform: string,
|
|
141
|
+
arch: string,
|
|
142
|
+
napiVersion?: number
|
|
143
|
+
): string {
|
|
144
|
+
const moduleVersion = semver.parse(version, false, true)
|
|
145
|
+
const prerelease = (moduleVersion.prerelease || []).join('.')
|
|
146
|
+
const build = (moduleVersion.build || []).join('.')
|
|
147
|
+
|
|
148
|
+
const formatted = formatString
|
|
149
|
+
.replace('{name}', name)
|
|
150
|
+
.replace('{version}', version)
|
|
151
|
+
.replace('{major}', moduleVersion.major.toString())
|
|
152
|
+
.replace('{minor}', moduleVersion.minor.toString())
|
|
153
|
+
.replace('{patch}', moduleVersion.patch.toString())
|
|
154
|
+
.replace('{prerelease}', prerelease)
|
|
155
|
+
.replace('{build}', build)
|
|
156
|
+
.replace('{module_name}', moduleName)
|
|
157
|
+
.replace('{node_abi}', `node-v${abi}`)
|
|
158
|
+
.replace('{platform}', platform)
|
|
159
|
+
.replace('{arch}', arch)
|
|
160
|
+
.replace('{libc}', libc(platform))
|
|
161
|
+
.replace('{configuration}', 'Release')
|
|
162
|
+
.replace('{toolset}', '')
|
|
163
|
+
.replace(/[/]+/g, '/')
|
|
164
|
+
|
|
165
|
+
return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString()) : formatted
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function libc(platform: string): string {
|
|
169
|
+
return platform === 'linux' ? 'glibc' : 'unknown'
|
|
170
|
+
}
|
package/src/resolve.ts
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
import * as fs from 'fs'
|
|
2
|
+
import * as pathLib from 'path'
|
|
3
|
+
import * as readline from 'readline'
|
|
4
|
+
import * as url from 'url'
|
|
5
|
+
import assert, {deepStrictEqual} from 'assert'
|
|
6
|
+
import yarnLockfile from '@yarnpkg/lockfile'
|
|
7
|
+
import {normalizeYarnPackagePattern} from './normalize-yarn-pattern.ts'
|
|
8
|
+
import type {
|
|
9
|
+
CacheSchemaV1,
|
|
10
|
+
CacheSchemaV2,
|
|
11
|
+
Package,
|
|
12
|
+
PackageLock,
|
|
13
|
+
PackageWithId,
|
|
14
|
+
PlatformVariant,
|
|
15
|
+
YarnLockDependency
|
|
16
|
+
} from './types.d.ts'
|
|
17
|
+
|
|
18
|
+
const YARN_LOCK_FILENAME = 'yarn.lock'
|
|
19
|
+
|
|
20
|
+
export async function updateDependenciesCache(
|
|
21
|
+
newDependencies: PackageWithId[],
|
|
22
|
+
cacheFilePath: string,
|
|
23
|
+
prebuiltBinaryProperties: PlatformVariant[]
|
|
24
|
+
): Promise<void> {
|
|
25
|
+
const {dependencies: cachedDependencies} = await loadCache(cacheFilePath)
|
|
26
|
+
const dependencies = cachedDependencies.concat(newDependencies).sort(sortById).filter(uniqueById)
|
|
27
|
+
|
|
28
|
+
const data: CacheSchemaV2 = {
|
|
29
|
+
dependencies,
|
|
30
|
+
prebuiltBinaryProperties,
|
|
31
|
+
prebuiltBinaryNApiSupport: true
|
|
32
|
+
}
|
|
33
|
+
return fs.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8')
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export async function dependenciesNotInCache(
|
|
37
|
+
dependencies: PackageWithId[],
|
|
38
|
+
cacheFilePath: string,
|
|
39
|
+
prebuiltBinaryProperties: PlatformVariant[]
|
|
40
|
+
): Promise<PackageWithId[]> {
|
|
41
|
+
const {
|
|
42
|
+
dependencies: cachedDependencies,
|
|
43
|
+
prebuiltBinaryProperties: cachedPrebuiltBinaryProperties,
|
|
44
|
+
prebuiltBinaryNApiSupport
|
|
45
|
+
} = await loadCache(cacheFilePath)
|
|
46
|
+
if (
|
|
47
|
+
cachedDependencies.length > 0 &&
|
|
48
|
+
(!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)
|
|
49
|
+
) {
|
|
50
|
+
console.log(`Pre-built binary properties changed, re-downloading all current packages`)
|
|
51
|
+
return dependencies
|
|
52
|
+
}
|
|
53
|
+
const packageIdsInCache = cachedDependencies.map(pkg => pkg.id)
|
|
54
|
+
return dependencies.filter(pkg => !packageIdsInCache.includes(pkg.id))
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async function loadCache(cacheFilePath: string): Promise<CacheSchemaV2> {
|
|
58
|
+
try {
|
|
59
|
+
const data: CacheSchemaV1 | CacheSchemaV2 = JSON.parse(await fs.promises.readFile(cacheFilePath, 'utf8'))
|
|
60
|
+
// Migrate V1 legacy cache file schema to V2
|
|
61
|
+
if (Array.isArray(data)) {
|
|
62
|
+
return {
|
|
63
|
+
dependencies: data,
|
|
64
|
+
prebuiltBinaryProperties: [],
|
|
65
|
+
prebuiltBinaryNApiSupport: false
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return data
|
|
69
|
+
} catch {
|
|
70
|
+
// empty V2 cache
|
|
71
|
+
return {
|
|
72
|
+
dependencies: [],
|
|
73
|
+
prebuiltBinaryProperties: [],
|
|
74
|
+
prebuiltBinaryNApiSupport: true
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function isNonRegistryYarnPackagePattern(packagePattern: string): boolean {
|
|
80
|
+
if (
|
|
81
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/link-resolver.js#L14
|
|
82
|
+
packagePattern.startsWith('link:') ||
|
|
83
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/file-resolver.js#L18
|
|
84
|
+
packagePattern.startsWith('file:') ||
|
|
85
|
+
/^\.{1,2}\//.test(packagePattern) ||
|
|
86
|
+
pathLib.isAbsolute(packagePattern) ||
|
|
87
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/tarball-resolver.js#L15
|
|
88
|
+
packagePattern.startsWith('http://') ||
|
|
89
|
+
packagePattern.startsWith('https://') ||
|
|
90
|
+
(packagePattern.indexOf('@') < 0 && (packagePattern.endsWith('.tgz') || packagePattern.endsWith('.tar.gz'))) ||
|
|
91
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/github-resolver.js#L6
|
|
92
|
+
packagePattern.startsWith('github:') ||
|
|
93
|
+
/^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(packagePattern) ||
|
|
94
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gitlab-resolver.js#L6
|
|
95
|
+
packagePattern.startsWith('gitlab:') ||
|
|
96
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/bitbucket-resolver.js#L6
|
|
97
|
+
packagePattern.startsWith('bitbucket:') ||
|
|
98
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gist-resolver.js#L26
|
|
99
|
+
packagePattern.startsWith('gist:') ||
|
|
100
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
|
|
101
|
+
/^git:|^git\+.+:|^ssh:|^https?:.+\.git$|^https?:.+\.git#.+/.test(packagePattern)
|
|
102
|
+
) {
|
|
103
|
+
return true
|
|
104
|
+
} else {
|
|
105
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
|
|
106
|
+
const {hostname, path} = url.parse(packagePattern)
|
|
107
|
+
if (hostname && path && ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'].indexOf(hostname) >= 0) {
|
|
108
|
+
return path.split('/').filter((p): boolean => !!p).length === 2
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
return false
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function resolvePackageNameFromRegistryYarnPackagePattern(packagePattern: string): string {
|
|
115
|
+
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/registry-resolver.js#L12
|
|
116
|
+
const match = packagePattern.match(/^(\S+):(@?.*?)(@(.*?)|)$/)
|
|
117
|
+
if (match) {
|
|
118
|
+
return match[2]
|
|
119
|
+
} else {
|
|
120
|
+
throw new Error(`Failed to resolve yarn package pattern ${packagePattern}, unrecognized format`)
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies: YarnLockDependency[]): PackageWithId[] {
|
|
125
|
+
const packages: PackageWithId[] = yarnLockDependencies.reduce(
|
|
126
|
+
(filterMappedDependencies: PackageWithId[], {packagePattern, version}) => {
|
|
127
|
+
if (isNonRegistryYarnPackagePattern(packagePattern)) {
|
|
128
|
+
return filterMappedDependencies
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
let packageName
|
|
132
|
+
|
|
133
|
+
if (packagePattern.startsWith('npm:') || packagePattern.startsWith('yarn:')) {
|
|
134
|
+
packageName = resolvePackageNameFromRegistryYarnPackagePattern(packagePattern)
|
|
135
|
+
} else {
|
|
136
|
+
// Package pattern not yet recognized, continue with parsing logic from
|
|
137
|
+
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L99
|
|
138
|
+
const {name: namePart, range: rangePart} = normalizeYarnPackagePattern(packagePattern)
|
|
139
|
+
|
|
140
|
+
if (isNonRegistryYarnPackagePattern(rangePart)) {
|
|
141
|
+
return filterMappedDependencies
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
if (rangePart.startsWith('npm:') || rangePart.startsWith('yarn:')) {
|
|
145
|
+
packageName = resolvePackageNameFromRegistryYarnPackagePattern(rangePart)
|
|
146
|
+
} else {
|
|
147
|
+
// Finally, we just assume that the pattern is a registry pattern,
|
|
148
|
+
// see https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L119
|
|
149
|
+
packageName = namePart
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
filterMappedDependencies.push({id: `${packageName}@${version}`, name: packageName, version})
|
|
154
|
+
|
|
155
|
+
return filterMappedDependencies
|
|
156
|
+
},
|
|
157
|
+
[]
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return packages
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async function parseDependenciesFromNpmLockFile(
|
|
164
|
+
lockFilepath: string,
|
|
165
|
+
includeDevDependencies: boolean
|
|
166
|
+
): Promise<PackageWithId[]> {
|
|
167
|
+
const packageLock: PackageLock = JSON.parse(await fs.promises.readFile(lockFilepath, 'utf8'))
|
|
168
|
+
const fileVersion = packageLock.lockfileVersion || 1
|
|
169
|
+
if (![2, 3].includes(fileVersion)) {
|
|
170
|
+
throw new Error(`Unsupported package-lock.json version ${fileVersion}`)
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const dependencies = collectNpmLockfileDependencies(packageLock, includeDevDependencies)
|
|
174
|
+
return dependencies.map(({name, version}) => ({id: `${name}@${version}`, name, version}))
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async function parseDependenciesFromYarnLockFile(lockFilepath: string): Promise<PackageWithId[]> {
|
|
178
|
+
const lockFileStream = fs.createReadStream(lockFilepath)
|
|
179
|
+
const lockFileReadlineInterface = readline.createInterface({
|
|
180
|
+
input: lockFileStream,
|
|
181
|
+
crlfDelay: Infinity
|
|
182
|
+
})
|
|
183
|
+
|
|
184
|
+
for await (const line of lockFileReadlineInterface) {
|
|
185
|
+
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/lockfile/stringify.js#L111
|
|
186
|
+
if (/# yarn lockfile v1\s*$/.test(line)) {
|
|
187
|
+
// lockfile version 1 recognized
|
|
188
|
+
break
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
if (/^\s*$/.test(line) || /^\s*#/.test(line)) {
|
|
192
|
+
// skip empty or comment lines
|
|
193
|
+
continue
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
throw new Error(
|
|
197
|
+
`Failed to parse file ${lockFilepath} as yarn lockfile, unrecognized format, only version 1 is supported`
|
|
198
|
+
)
|
|
199
|
+
}
|
|
200
|
+
lockFileStream.destroy()
|
|
201
|
+
|
|
202
|
+
const lockfileContents = await fs.promises.readFile(lockFilepath, 'utf8')
|
|
203
|
+
const {
|
|
204
|
+
type: lockfileParseStatus,
|
|
205
|
+
object: packagePatternToLockedVersion
|
|
206
|
+
}: {
|
|
207
|
+
type: 'success' | 'merge' | 'conflict'
|
|
208
|
+
object: {[packagePattern: string]: {version: string}}
|
|
209
|
+
} = yarnLockfile.parse(lockfileContents)
|
|
210
|
+
|
|
211
|
+
if (lockfileParseStatus !== 'success') {
|
|
212
|
+
throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, parse status ${lockfileParseStatus}`)
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const yarnLockDependencies: YarnLockDependency[] = Object.entries(packagePatternToLockedVersion).map(
|
|
216
|
+
([packagePattern, {version}]) => ({packagePattern, version})
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
return resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies)
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
export async function dependenciesFromPackageLock(
|
|
223
|
+
path: string,
|
|
224
|
+
includeDevDependencies: boolean
|
|
225
|
+
): Promise<PackageWithId[]> {
|
|
226
|
+
const filename = pathLib.basename(path)
|
|
227
|
+
const dependencies =
|
|
228
|
+
filename === YARN_LOCK_FILENAME
|
|
229
|
+
? await parseDependenciesFromYarnLockFile(path)
|
|
230
|
+
: await parseDependenciesFromNpmLockFile(path, includeDevDependencies)
|
|
231
|
+
|
|
232
|
+
return dependencies.sort(sortById).filter(uniqueById).filter(isNotLocal)
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function sortById(a: PackageWithId, b: PackageWithId): number {
|
|
236
|
+
return a.id.localeCompare(b.id)
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
function uniqueById(value: PackageWithId, index: number, values: PackageWithId[]): boolean {
|
|
240
|
+
return values.findIndex(v => v.id === value.id) === index
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
function isNotLocal(dependency: PackageWithId): boolean {
|
|
244
|
+
// if the version starts with the url scheme 'file:' that means that
|
|
245
|
+
// the package is fetched from the local filesystem relative to the
|
|
246
|
+
// package-lock that we were passed; it could for instance be a git
|
|
247
|
+
// submodule. this package will not be fetched through the web server
|
|
248
|
+
// that we set up anyway, so don't attempt to synchronize it
|
|
249
|
+
return !dependency.version.startsWith('file:')
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
function collectNpmLockfileDependencies({packages}: PackageLock, includeDevDependencies: boolean): Package[] {
|
|
253
|
+
return Object.entries(packages)
|
|
254
|
+
.filter(([name, props]) => name.length > 0 && (includeDevDependencies || !props.dev))
|
|
255
|
+
.map(([name, props]) => ({
|
|
256
|
+
name: props.name || pathToName(name),
|
|
257
|
+
version: props.version
|
|
258
|
+
}))
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// "node_modules/lodash" -> "lodash"
|
|
262
|
+
// "node_modules/make-dir/node_modules/semver" -> "semver"
|
|
263
|
+
function pathToName(path: string) {
|
|
264
|
+
const name = path.split('node_modules/').pop()
|
|
265
|
+
assert(name, `Failed to extract package name from path ${path}`)
|
|
266
|
+
return name
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
function isDeepEqual(a: PlatformVariant[], b: PlatformVariant[]): boolean {
|
|
270
|
+
try {
|
|
271
|
+
deepStrictEqual(a, b)
|
|
272
|
+
return true
|
|
273
|
+
} catch {
|
|
274
|
+
return false
|
|
275
|
+
}
|
|
276
|
+
}
|
package/src/sync.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import {dependenciesFromPackageLock, dependenciesNotInCache, updateDependenciesCache} from './resolve.ts'
|
|
2
|
+
import {downloadAll} from './download.ts'
|
|
3
|
+
import type {CommandLineOptions, PackageWithId} from './types.d.ts'
|
|
4
|
+
|
|
5
|
+
export async function synchronize(options: CommandLineOptions): Promise<PackageWithId[]> {
|
|
6
|
+
const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`
|
|
7
|
+
|
|
8
|
+
const packages = await dependenciesFromPackageLock(options.manifest, options.includeDevDependencies)
|
|
9
|
+
const newPackages = await dependenciesNotInCache(packages, cacheFilePath, options.prebuiltBinaryProperties)
|
|
10
|
+
|
|
11
|
+
if (options.dryRun) {
|
|
12
|
+
console.log(newPackages.map(({name, version}) => `${name}@${version}`).join('\n'))
|
|
13
|
+
console.log(`\nWould download ${newPackages.length} packages.`)
|
|
14
|
+
} else {
|
|
15
|
+
await downloadAll(newPackages, options)
|
|
16
|
+
await updateDependenciesCache(newPackages, cacheFilePath, options.prebuiltBinaryProperties)
|
|
17
|
+
console.log(`Downloaded ${newPackages.length} packages`)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return newPackages
|
|
21
|
+
}
|
package/src/types.d.ts
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import type {URL} from 'url'
|
|
2
|
+
|
|
3
|
+
export interface CommandLineOptions {
|
|
4
|
+
localUrl: URL
|
|
5
|
+
manifest: string
|
|
6
|
+
prebuiltBinaryProperties: PlatformVariant[]
|
|
7
|
+
registryUrl: string
|
|
8
|
+
registryToken: string
|
|
9
|
+
rootFolder: string
|
|
10
|
+
enforceTarballsOverHttps: boolean
|
|
11
|
+
includeDevDependencies: boolean
|
|
12
|
+
dryRun: boolean
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface PackageLock {
|
|
16
|
+
name: string
|
|
17
|
+
version: string
|
|
18
|
+
lockfileVersion?: number
|
|
19
|
+
packages: {
|
|
20
|
+
[path: string]: {
|
|
21
|
+
version: string
|
|
22
|
+
// only aliased packages have the name property
|
|
23
|
+
name?: string
|
|
24
|
+
dev?: true
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export interface YarnLockDependency {
|
|
30
|
+
packagePattern: string
|
|
31
|
+
version: string
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export interface PackageWithId extends Package {
|
|
35
|
+
id: string
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export interface Package {
|
|
39
|
+
name: string
|
|
40
|
+
version: string
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface CacheSchemaV2 {
|
|
44
|
+
dependencies: PackageWithId[]
|
|
45
|
+
prebuiltBinaryProperties: PlatformVariant[]
|
|
46
|
+
prebuiltBinaryNApiSupport: boolean
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export type CacheSchemaV1 = PackageWithId[]
|
|
50
|
+
|
|
51
|
+
export interface PlatformVariant {
|
|
52
|
+
abi: number
|
|
53
|
+
arch: string
|
|
54
|
+
platform: string
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export interface RegistryMetadata {
|
|
58
|
+
_id: string
|
|
59
|
+
versions: {
|
|
60
|
+
[name: string]: VersionMetadata
|
|
61
|
+
}
|
|
62
|
+
time: {
|
|
63
|
+
modified?: string
|
|
64
|
+
created?: string
|
|
65
|
+
[version: string]: string
|
|
66
|
+
}
|
|
67
|
+
'dist-tags': {
|
|
68
|
+
latest?: string
|
|
69
|
+
[tag: string]: string
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export interface VersionMetadata {
|
|
74
|
+
_id: string
|
|
75
|
+
name: string
|
|
76
|
+
version: string
|
|
77
|
+
binary?: PrebuiltBinaryMetadata
|
|
78
|
+
dist: {
|
|
79
|
+
shasum?: string
|
|
80
|
+
integrity?: string
|
|
81
|
+
tarball: string
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// The following properties may be present but are ignored by registry-sync
|
|
86
|
+
// module_path?: string
|
|
87
|
+
// remote_uri?: string
|
|
88
|
+
export interface PrebuiltBinaryMetadata {
|
|
89
|
+
host: string
|
|
90
|
+
module_name: string
|
|
91
|
+
napi_versions?: number[]
|
|
92
|
+
package_name: string
|
|
93
|
+
remote_path: string
|
|
94
|
+
template: string
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
export interface VersionMetadataWithBinary extends VersionMetadata {
|
|
98
|
+
binary: PrebuiltBinaryMetadata
|
|
99
|
+
}
|
package/src/client.js
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.fetchJsonWithCacheCloned = fetchJsonWithCacheCloned;
|
|
4
|
-
exports.fetchBinaryData = fetchBinaryData;
|
|
5
|
-
const https = require("https");
|
|
6
|
-
const axios_1 = require("axios");
|
|
7
|
-
const lru_cache_1 = require("lru-cache");
|
|
8
|
-
const metadataCache = new lru_cache_1.LRUCache({ max: 100 });
|
|
9
|
-
const client = axios_1.default.create({
|
|
10
|
-
httpsAgent: new https.Agent({ keepAlive: true }),
|
|
11
|
-
timeout: 30 * 1000
|
|
12
|
-
});
|
|
13
|
-
async function fetchJsonWithCacheCloned(url, token) {
|
|
14
|
-
if (metadataCache.has(url)) {
|
|
15
|
-
return structuredClone(metadataCache.get(url));
|
|
16
|
-
}
|
|
17
|
-
const value = await fetch(url, 'json', token);
|
|
18
|
-
metadataCache.set(url, value);
|
|
19
|
-
return structuredClone(value);
|
|
20
|
-
}
|
|
21
|
-
function fetchBinaryData(url, token) {
|
|
22
|
-
return fetch(url, 'arraybuffer', token);
|
|
23
|
-
}
|
|
24
|
-
async function fetch(url, responseType, token) {
|
|
25
|
-
const config = { responseType };
|
|
26
|
-
if (token !== '') {
|
|
27
|
-
config.headers = { authorization: 'Bearer ' + token };
|
|
28
|
-
}
|
|
29
|
-
return (await client.get(url, config)).data;
|
|
30
|
-
}
|
package/src/download.js
DELETED
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.downloadAll = downloadAll;
|
|
4
|
-
const fs = require("fs");
|
|
5
|
-
const path = require("path");
|
|
6
|
-
const semver = require("semver");
|
|
7
|
-
const url = require("url");
|
|
8
|
-
const integrity_1 = require("./integrity");
|
|
9
|
-
const pregyp_1 = require("./pregyp");
|
|
10
|
-
const client_1 = require("./client");
|
|
11
|
-
const metadata_1 = require("./metadata");
|
|
12
|
-
async function downloadAll(packages, { localUrl, prebuiltBinaryProperties, registryUrl, registryToken, rootFolder, enforceTarballsOverHttps }) {
|
|
13
|
-
const downloadFromRegistry = download.bind(null, registryUrl, registryToken, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps);
|
|
14
|
-
for (const pkg of packages) {
|
|
15
|
-
await downloadFromRegistry(pkg);
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
async function download(registryUrl, registryToken, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, { name, version }) {
|
|
19
|
-
const registryMetadata = await fetchMetadataCloned(name, registryUrl, registryToken);
|
|
20
|
-
const versionMetadata = registryMetadata.versions[version];
|
|
21
|
-
if (!versionMetadata) {
|
|
22
|
-
throw new Error(`Unknown package version ${name}@${version}`);
|
|
23
|
-
}
|
|
24
|
-
const localFolder = await ensureLocalFolderExists(name, rootFolder);
|
|
25
|
-
let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps, registryToken);
|
|
26
|
-
if ((0, pregyp_1.hasPrebuiltBinaries)(versionMetadata)) {
|
|
27
|
-
const localPregypFolder = await ensureLocalFolderExists(version, localFolder);
|
|
28
|
-
await (0, pregyp_1.downloadPrebuiltBinaries)(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
|
|
29
|
-
data = await (0, metadata_1.rewriteMetadataInTarball)(data, versionMetadata, localUrl, localFolder);
|
|
30
|
-
}
|
|
31
|
-
await saveTarball(versionMetadata, data, localFolder);
|
|
32
|
-
(0, metadata_1.rewriteVersionMetadata)(versionMetadata, data, localUrl);
|
|
33
|
-
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder);
|
|
34
|
-
}
|
|
35
|
-
async function downloadTarball({ _id: id, dist }, enforceTarballsOverHttps, registryToken) {
|
|
36
|
-
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball;
|
|
37
|
-
const data = await (0, client_1.fetchBinaryData)(tarballUrl, registryToken);
|
|
38
|
-
(0, integrity_1.verifyIntegrity)(data, id, dist);
|
|
39
|
-
return data;
|
|
40
|
-
}
|
|
41
|
-
function saveTarball({ name, version }, data, localFolder) {
|
|
42
|
-
return fs.promises.writeFile(tarballPath(name, version, localFolder), data);
|
|
43
|
-
}
|
|
44
|
-
async function updateMetadata(versionMetadata, defaultMetadata, registryUrl, localFolder) {
|
|
45
|
-
const { version } = versionMetadata;
|
|
46
|
-
const localMetadataPath = path.join(localFolder, 'index.json');
|
|
47
|
-
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata);
|
|
48
|
-
localMetadata.versions[version] = versionMetadata;
|
|
49
|
-
localMetadata.time[version] = defaultMetadata.time[version];
|
|
50
|
-
localMetadata['dist-tags'] = collectDistTags(localMetadata, defaultMetadata);
|
|
51
|
-
await saveMetadata(localMetadataPath, localMetadata);
|
|
52
|
-
}
|
|
53
|
-
// Collect thise dist-tags entries (name -> version) from registry metadata,
|
|
54
|
-
// which point to versions we have locally available.
|
|
55
|
-
// Override 'latest' tag to ensure its validity as we might not have the version
|
|
56
|
-
// that is tagged latest in registry
|
|
57
|
-
function collectDistTags(localMetadata, defaultMetadata) {
|
|
58
|
-
const availableVersions = Object.keys(localMetadata.versions);
|
|
59
|
-
const validDistTags = Object.entries(defaultMetadata['dist-tags']).filter(([, version]) => availableVersions.includes(version));
|
|
60
|
-
return {
|
|
61
|
-
...Object.fromEntries(validDistTags),
|
|
62
|
-
latest: availableVersions.sort(semver.compare).pop()
|
|
63
|
-
};
|
|
64
|
-
}
|
|
65
|
-
async function loadMetadata(path, defaultMetadata) {
|
|
66
|
-
try {
|
|
67
|
-
const json = await fs.promises.readFile(path, 'utf8');
|
|
68
|
-
return JSON.parse(json);
|
|
69
|
-
}
|
|
70
|
-
catch {
|
|
71
|
-
return { ...defaultMetadata, 'dist-tags': {}, time: {}, versions: {} };
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
function saveMetadata(path, metadata) {
|
|
75
|
-
const json = JSON.stringify(metadata, null, 2);
|
|
76
|
-
return fs.promises.writeFile(path, json, 'utf8');
|
|
77
|
-
}
|
|
78
|
-
function tarballPath(name, version, localFolder) {
|
|
79
|
-
return path.join(localFolder, (0, metadata_1.tarballFilename)(name, version));
|
|
80
|
-
}
|
|
81
|
-
async function ensureLocalFolderExists(name, rootFolder) {
|
|
82
|
-
const localFolder = path.resolve(rootFolder, name);
|
|
83
|
-
await fs.promises.mkdir(localFolder, { recursive: true });
|
|
84
|
-
return localFolder;
|
|
85
|
-
}
|
|
86
|
-
function fetchMetadataCloned(name, registryUrl, registryToken) {
|
|
87
|
-
const urlSafeName = name.replace(/\//g, '%2f');
|
|
88
|
-
return (0, client_1.fetchJsonWithCacheCloned)(url.resolve(registryUrl, urlSafeName), registryToken);
|
|
89
|
-
}
|