@definitelytyped/definitions-parser 0.0.177 → 0.0.179
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/allowedPackageJsonDependencies.txt +5 -0
- package/dist/get-affected-packages.d.ts +7 -8
- package/dist/get-affected-packages.js +35 -71
- package/dist/get-affected-packages.js.map +1 -1
- package/dist/git.d.ts +10 -7
- package/dist/git.js +80 -68
- package/dist/git.js.map +1 -1
- package/dist/lib/definition-parser-worker.js +1 -1
- package/dist/lib/definition-parser-worker.js.map +1 -1
- package/dist/lib/definition-parser.d.ts +4 -11
- package/dist/lib/definition-parser.js +137 -352
- package/dist/lib/definition-parser.js.map +1 -1
- package/dist/lib/module-info.d.ts +1 -13
- package/dist/lib/module-info.js +8 -170
- package/dist/lib/module-info.js.map +1 -1
- package/dist/lib/settings.d.ts +1 -0
- package/dist/lib/settings.js +2 -1
- package/dist/lib/settings.js.map +1 -1
- package/dist/lib/utils.d.ts +1 -0
- package/dist/lib/utils.js +8 -1
- package/dist/lib/utils.js.map +1 -1
- package/dist/mocks.d.ts +1 -1
- package/dist/mocks.js +51 -42
- package/dist/mocks.js.map +1 -1
- package/dist/packages.d.ts +63 -131
- package/dist/packages.js +104 -126
- package/dist/packages.js.map +1 -1
- package/dist/parse-definitions.d.ts +0 -1
- package/dist/parse-definitions.js +23 -9
- package/dist/parse-definitions.js.map +1 -1
- package/package.json +12 -12
|
@@ -26,26 +26,29 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
26
26
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.readFileAndThrowOnBOM = exports.
|
|
30
|
-
const ts = __importStar(require("typescript"));
|
|
29
|
+
exports.readFileAndThrowOnBOM = exports.parseVersionFromDirectoryName = exports.getTypingInfo = void 0;
|
|
31
30
|
const header_parser_1 = require("@definitelytyped/header-parser");
|
|
32
|
-
const module_info_1 = require("./module-info");
|
|
33
|
-
const packages_1 = require("../packages");
|
|
34
|
-
const settings_1 = require("./settings");
|
|
35
31
|
const utils_1 = require("@definitelytyped/utils");
|
|
32
|
+
const assert_1 = __importDefault(require("assert"));
|
|
36
33
|
const path_1 = __importDefault(require("path"));
|
|
34
|
+
const ts = __importStar(require("typescript"));
|
|
35
|
+
const packages_1 = require("../packages");
|
|
36
|
+
const module_info_1 = require("./module-info");
|
|
37
|
+
const settings_1 = require("./settings");
|
|
38
|
+
const utils_2 = require("./utils");
|
|
37
39
|
function matchesVersion(typingsDataRaw, version, considerLibraryMinorVersion) {
|
|
38
|
-
return (typingsDataRaw.libraryMajorVersion === version.major &&
|
|
40
|
+
return (typingsDataRaw.header.libraryMajorVersion === version.major &&
|
|
39
41
|
(considerLibraryMinorVersion
|
|
40
|
-
? version.minor === undefined || typingsDataRaw.libraryMinorVersion === version.minor
|
|
42
|
+
? version.minor === undefined || typingsDataRaw.header.libraryMinorVersion === version.minor
|
|
41
43
|
: true));
|
|
42
44
|
}
|
|
43
45
|
function formattedLibraryVersion(typingsDataRaw) {
|
|
44
|
-
return `${typingsDataRaw.libraryMajorVersion}.${typingsDataRaw.libraryMinorVersion}`;
|
|
46
|
+
return `${typingsDataRaw.header.libraryMajorVersion}.${typingsDataRaw.header.libraryMinorVersion}`;
|
|
45
47
|
}
|
|
46
48
|
async function getTypingInfo(packageName, dt) {
|
|
49
|
+
const errors = [];
|
|
47
50
|
if (packageName !== packageName.toLowerCase()) {
|
|
48
|
-
|
|
51
|
+
errors.push(`Package name \`${packageName}\` should be strictly lowercase`);
|
|
49
52
|
}
|
|
50
53
|
const fs = dt.subDir("types").subDir((0, packages_1.getMangledNameForScopedPackage)(packageName));
|
|
51
54
|
const [rootDirectoryLs, olderVersionDirectories] = (0, utils_1.split)(fs.readdir(), (fileOrDirectoryName) => {
|
|
@@ -54,37 +57,47 @@ async function getTypingInfo(packageName, dt) {
|
|
|
54
57
|
});
|
|
55
58
|
const moduleResolutionHost = (0, utils_1.createModuleResolutionHost)(dt, dt.debugPath());
|
|
56
59
|
const considerLibraryMinorVersion = olderVersionDirectories.some(({ version }) => version.minor !== undefined);
|
|
57
|
-
const
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
}
|
|
60
|
+
const latestDataResult = await combineDataForAllTypesVersions(packageName, rootDirectoryLs, fs, moduleResolutionHost);
|
|
61
|
+
if (Array.isArray(latestDataResult)) {
|
|
62
|
+
return { errors: [...errors, ...latestDataResult] };
|
|
63
|
+
}
|
|
64
|
+
const latestData = { libraryVersionDirectoryName: undefined, ...latestDataResult };
|
|
61
65
|
const older = await Promise.all(olderVersionDirectories.map(async ({ directoryName, version: directoryVersion }) => {
|
|
66
|
+
var _a;
|
|
62
67
|
if (matchesVersion(latestData, directoryVersion, considerLibraryMinorVersion)) {
|
|
63
|
-
const latest = `${latestData.libraryMajorVersion}.${latestData.libraryMinorVersion}`;
|
|
64
|
-
|
|
68
|
+
const latest = `${latestData.header.libraryMajorVersion}.${latestData.header.libraryMinorVersion}`;
|
|
69
|
+
errors.push(`The latest version of the '${packageName}' package is ${latest}, so the subdirectory '${directoryName}' is not allowed` +
|
|
65
70
|
(`v${latest}` === directoryName
|
|
66
71
|
? "."
|
|
67
|
-
: `; since it applies to any ${latestData.libraryMajorVersion}.* version, up to and including ${latest}.`));
|
|
72
|
+
: `; since it applies to any ${latestData.header.libraryMajorVersion}.* version, up to and including ${latest}.`));
|
|
68
73
|
}
|
|
69
74
|
// tslint:disable-next-line:non-literal-fs-path -- Not a reference to the fs package
|
|
70
75
|
const ls = fs.readdir(directoryName);
|
|
71
|
-
const
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
76
|
+
const result = await combineDataForAllTypesVersions(packageName, ls, fs.subDir(directoryName), moduleResolutionHost);
|
|
77
|
+
if (Array.isArray(result)) {
|
|
78
|
+
errors.push(...result);
|
|
79
|
+
return result;
|
|
80
|
+
}
|
|
81
|
+
const data = { libraryVersionDirectoryName: (0, packages_1.formatTypingVersion)(directoryVersion), ...result };
|
|
75
82
|
if (!matchesVersion(data, directoryVersion, considerLibraryMinorVersion)) {
|
|
76
83
|
if (considerLibraryMinorVersion) {
|
|
77
|
-
|
|
78
|
-
`but
|
|
84
|
+
errors.push(`Directory ${directoryName} indicates major.minor version ${directoryVersion.major}.${(_a = directoryVersion.minor) !== null && _a !== void 0 ? _a : "*"}, ` +
|
|
85
|
+
`but package.json indicates major.minor version ${data.header.libraryMajorVersion}.${data.header.libraryMinorVersion}`);
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
errors.push(`Directory ${directoryName} indicates major version ${directoryVersion.major}, but package.json indicates major version ` +
|
|
89
|
+
data.header.libraryMajorVersion.toString());
|
|
79
90
|
}
|
|
80
|
-
throw new Error(`Directory ${directoryName} indicates major version ${directoryVersion.major}, but header indicates major version ` +
|
|
81
|
-
data.libraryMajorVersion.toString());
|
|
82
91
|
}
|
|
83
92
|
return data;
|
|
84
93
|
}));
|
|
94
|
+
if (errors.length) {
|
|
95
|
+
return { errors };
|
|
96
|
+
}
|
|
85
97
|
const res = {};
|
|
86
98
|
res[formattedLibraryVersion(latestData)] = latestData;
|
|
87
99
|
for (const o of older) {
|
|
100
|
+
(0, assert_1.default)(!Array.isArray(o));
|
|
88
101
|
res[formattedLibraryVersion(o)] = o;
|
|
89
102
|
}
|
|
90
103
|
return res;
|
|
@@ -92,6 +105,7 @@ async function getTypingInfo(packageName, dt) {
|
|
|
92
105
|
exports.getTypingInfo = getTypingInfo;
|
|
93
106
|
const packageJsonName = "package.json";
|
|
94
107
|
function getTypesVersionsAndPackageJson(ls) {
|
|
108
|
+
const errors = [];
|
|
95
109
|
const withoutPackageJson = ls.filter((name) => name !== packageJsonName);
|
|
96
110
|
const [remainingLs, typesVersions] = (0, utils_1.split)(withoutPackageJson, (fileOrDirectoryName) => {
|
|
97
111
|
const match = /^ts(\d+\.\d+)$/.exec(fileOrDirectoryName);
|
|
@@ -100,11 +114,14 @@ function getTypesVersionsAndPackageJson(ls) {
|
|
|
100
114
|
}
|
|
101
115
|
const version = match[1];
|
|
102
116
|
if (parseInt(version, 10) < 3) {
|
|
103
|
-
|
|
117
|
+
errors.push(`Directory name starting with 'ts' should be a TypeScript version newer than 3.0. Got: ${version}`);
|
|
104
118
|
}
|
|
105
119
|
return version;
|
|
106
120
|
});
|
|
107
|
-
|
|
121
|
+
if (errors.length) {
|
|
122
|
+
return errors;
|
|
123
|
+
}
|
|
124
|
+
return { remainingLs, typesVersions };
|
|
108
125
|
}
|
|
109
126
|
/**
|
|
110
127
|
* Parses a directory name into a version that either holds a single major version or a major and minor version.
|
|
@@ -127,84 +144,80 @@ function parseVersionFromDirectoryName(directoryName) {
|
|
|
127
144
|
};
|
|
128
145
|
}
|
|
129
146
|
exports.parseVersionFromDirectoryName = parseVersionFromDirectoryName;
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
147
|
+
async function combineDataForAllTypesVersions(typingsPackageName, ls, fs, moduleResolutionHost) {
|
|
148
|
+
const errors = [];
|
|
149
|
+
const typesVersionAndPackageJson = getTypesVersionsAndPackageJson(ls);
|
|
150
|
+
if (Array.isArray(typesVersionAndPackageJson)) {
|
|
151
|
+
errors.push(...typesVersionAndPackageJson);
|
|
152
|
+
}
|
|
153
|
+
const { remainingLs, typesVersions } = Array.isArray(typesVersionAndPackageJson)
|
|
154
|
+
? { remainingLs: [], typesVersions: [] }
|
|
155
|
+
: typesVersionAndPackageJson;
|
|
156
|
+
const packageJson = fs.readJson(packageJsonName);
|
|
157
|
+
const dataForRoot = getTypingDataForSingleTypesVersion(undefined, typingsPackageName, remainingLs, fs, moduleResolutionHost);
|
|
158
|
+
if (Array.isArray(dataForRoot)) {
|
|
159
|
+
errors.push(...dataForRoot);
|
|
138
160
|
}
|
|
139
|
-
return {
|
|
140
|
-
major: Number(match[1]),
|
|
141
|
-
minor: match[3] !== undefined ? Number(match[3]) : undefined, // tslint:disable-line strict-type-predicates (false positive)
|
|
142
|
-
};
|
|
143
|
-
}
|
|
144
|
-
exports.tryParsePackageVersion = tryParsePackageVersion;
|
|
145
|
-
/**
|
|
146
|
-
* Like `tryParsePackageVersion`, but throws if the input format is not parseable.
|
|
147
|
-
*/
|
|
148
|
-
function parsePackageVersion(versionString) {
|
|
149
|
-
const version = tryParsePackageVersion(versionString);
|
|
150
|
-
if (version === "*") {
|
|
151
|
-
throw new Error(`Version string '${versionString}' is not a valid format.`);
|
|
152
|
-
}
|
|
153
|
-
return version;
|
|
154
|
-
}
|
|
155
|
-
exports.parsePackageVersion = parsePackageVersion;
|
|
156
|
-
async function combineDataForAllTypesVersions(typingsPackageName, ls, fs, directoryVersion, moduleResolutionHost) {
|
|
157
|
-
const { remainingLs, typesVersions, hasPackageJson } = getTypesVersionsAndPackageJson(ls);
|
|
158
|
-
const packageJson = hasPackageJson
|
|
159
|
-
? fs.readJson(packageJsonName)
|
|
160
|
-
: {};
|
|
161
|
-
const packageJsonType = checkPackageJsonType(packageJson.type, packageJsonName);
|
|
162
|
-
// Every typesVersion has an index.d.ts, but only the root index.d.ts should have a header.
|
|
163
|
-
const { contributors, libraryMajorVersion, libraryMinorVersion, typeScriptVersion: minTsVersion, libraryName, projects, } = (0, header_parser_1.parseHeaderOrFail)(`${typingsPackageName} > index.d.ts`, readFileAndThrowOnBOM("index.d.ts", fs));
|
|
164
|
-
const dataForRoot = getTypingDataForSingleTypesVersion(undefined, typingsPackageName, remainingLs, fs, directoryVersion, moduleResolutionHost);
|
|
165
161
|
const dataForOtherTypesVersions = typesVersions.map((tsVersion) => {
|
|
166
162
|
const subFs = fs.subDir(`ts${tsVersion}`);
|
|
167
|
-
|
|
163
|
+
const data = getTypingDataForSingleTypesVersion(tsVersion, typingsPackageName, subFs.readdir(), subFs, moduleResolutionHost);
|
|
164
|
+
if (Array.isArray(data)) {
|
|
165
|
+
errors.push(...data);
|
|
166
|
+
}
|
|
167
|
+
return data;
|
|
168
168
|
});
|
|
169
|
+
const packageJsonType = (0, header_parser_1.checkPackageJsonType)(packageJson.type, packageJsonName);
|
|
170
|
+
if (Array.isArray(packageJsonType)) {
|
|
171
|
+
errors.push(...packageJsonType);
|
|
172
|
+
}
|
|
173
|
+
const packageJsonResult = (0, header_parser_1.validatePackageJson)(typingsPackageName, packageJson, typesVersions);
|
|
174
|
+
if (Array.isArray(packageJsonResult)) {
|
|
175
|
+
errors.push(...packageJsonResult);
|
|
176
|
+
}
|
|
177
|
+
const header = Array.isArray(packageJsonResult) ? undefined : packageJsonResult;
|
|
178
|
+
const allowedDependencies = await (0, settings_1.getAllowedPackageJsonDependencies)();
|
|
179
|
+
errors.push(...(0, header_parser_1.checkPackageJsonDependencies)(packageJson.dependencies, packageJsonName, allowedDependencies));
|
|
180
|
+
errors.push(...(0, header_parser_1.checkPackageJsonDependencies)(packageJson.devDependencies, packageJsonName, allowedDependencies, `@${settings_1.scopeName}/${typingsPackageName}`));
|
|
181
|
+
const imports = (0, header_parser_1.checkPackageJsonImports)(packageJson.imports, packageJsonName);
|
|
182
|
+
if (Array.isArray(imports)) {
|
|
183
|
+
errors.push(...imports);
|
|
184
|
+
}
|
|
185
|
+
const exports = (0, header_parser_1.checkPackageJsonExportsAndAddPJsonEntry)(packageJson.exports, packageJsonName);
|
|
186
|
+
if (Array.isArray(exports)) {
|
|
187
|
+
errors.push(...exports);
|
|
188
|
+
}
|
|
189
|
+
const license = (0, header_parser_1.getLicenseFromPackageJson)(packageJson.license);
|
|
190
|
+
if (Array.isArray(license)) {
|
|
191
|
+
errors.push(...license);
|
|
192
|
+
}
|
|
193
|
+
if (errors.length) {
|
|
194
|
+
return errors;
|
|
195
|
+
}
|
|
169
196
|
const allTypesVersions = [dataForRoot, ...dataForOtherTypesVersions];
|
|
170
|
-
const license = (0, packages_1.getLicenseFromPackageJson)(packageJson.license);
|
|
171
|
-
const packageJsonDependencies = await checkPackageJsonDependencies(packageJson.dependencies, packageJsonName);
|
|
172
197
|
const files = Array.from((0, utils_1.flatMap)(allTypesVersions, ({ typescriptVersion, declFiles }) => declFiles.map((file) => (typescriptVersion === undefined ? file : `ts${typescriptVersion}/${file}`))));
|
|
173
198
|
// Note that only the first project is collected right now
|
|
174
199
|
return {
|
|
175
|
-
|
|
176
|
-
typingsPackageName,
|
|
177
|
-
projectName: projects[0],
|
|
178
|
-
contributors,
|
|
179
|
-
libraryMajorVersion,
|
|
180
|
-
libraryMinorVersion,
|
|
181
|
-
minTsVersion,
|
|
200
|
+
header: (0, utils_1.assertDefined)(header),
|
|
182
201
|
typesVersions,
|
|
183
202
|
files,
|
|
184
|
-
license,
|
|
185
|
-
dependencies:
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
globals: getAllUniqueValues(allTypesVersions, "globals"),
|
|
191
|
-
declaredModules: getAllUniqueValues(allTypesVersions, "declaredModules"),
|
|
192
|
-
imports: checkPackageJsonImports(packageJson.imports, packageJsonName),
|
|
193
|
-
exports: checkPackageJsonExportsAndAddPJsonEntry(packageJson.exports, packageJsonName),
|
|
203
|
+
license: license,
|
|
204
|
+
dependencies: packageJson.dependencies,
|
|
205
|
+
devDependencies: packageJson.devDependencies,
|
|
206
|
+
contentHash: hash([...files, packageJsonName], (0, utils_1.mapDefined)(allTypesVersions, (a) => a.tsconfigPathsForHash), fs),
|
|
207
|
+
imports: imports,
|
|
208
|
+
exports: exports,
|
|
194
209
|
type: packageJsonType,
|
|
195
210
|
};
|
|
196
211
|
}
|
|
197
|
-
function getAllUniqueValues(records, key) {
|
|
198
|
-
return (0, utils_1.unique)((0, utils_1.flatMap)(records, (x) => x[key]));
|
|
199
|
-
}
|
|
200
212
|
/**
|
|
201
213
|
* @param typescriptVersion Set if this is in e.g. a `ts3.1` directory.
|
|
202
214
|
* @param packageName Name of the outermost directory; e.g. for "node/v4" this is just "node".
|
|
203
215
|
* @param ls All file/directory names in `directory`.
|
|
204
216
|
* @param fs FS rooted at the directory for this particular TS version, e.g. `types/abs/ts3.1` or `types/abs` when typescriptVersion is undefined.
|
|
205
217
|
*/
|
|
206
|
-
function getTypingDataForSingleTypesVersion(typescriptVersion, packageName, ls, fs,
|
|
218
|
+
function getTypingDataForSingleTypesVersion(typescriptVersion, packageName, ls, fs, moduleResolutionHost) {
|
|
207
219
|
var _a;
|
|
220
|
+
const errors = [];
|
|
208
221
|
const tsconfig = fs.readJson("tsconfig.json");
|
|
209
222
|
const configHost = {
|
|
210
223
|
...moduleResolutionHost,
|
|
@@ -212,9 +225,9 @@ function getTypingDataForSingleTypesVersion(typescriptVersion, packageName, ls,
|
|
|
212
225
|
useCaseSensitiveFileNames: true,
|
|
213
226
|
};
|
|
214
227
|
const compilerOptions = ts.parseJsonConfigFileContent(tsconfig, configHost, path_1.default.resolve("/", fs.debugPath())).options;
|
|
215
|
-
checkFilesFromTsConfig(packageName, tsconfig, fs.debugPath());
|
|
216
|
-
const { types, tests
|
|
217
|
-
const usedFiles = new Set([...types.keys(), ...tests
|
|
228
|
+
errors.push(...checkFilesFromTsConfig(packageName, tsconfig, fs.debugPath()));
|
|
229
|
+
const { types, tests } = (0, module_info_1.allReferencedFiles)((_a = tsconfig.files) !== null && _a !== void 0 ? _a : [], fs, packageName, moduleResolutionHost, compilerOptions);
|
|
230
|
+
const usedFiles = new Set([...types.keys(), ...tests, "tsconfig.json", "tslint.json"].map((f) => (0, utils_2.slicePrefixes)(f, "node_modules/@types/" + packageName + "/")));
|
|
218
231
|
const otherFiles = ls.includes(unusedFilesName)
|
|
219
232
|
? fs
|
|
220
233
|
// tslint:disable-next-line:non-literal-fs-path -- Not a reference to the fs package
|
|
@@ -223,162 +236,53 @@ function getTypingDataForSingleTypesVersion(typescriptVersion, packageName, ls,
|
|
|
223
236
|
.filter(Boolean)
|
|
224
237
|
: [];
|
|
225
238
|
if (ls.includes(unusedFilesName) && !otherFiles.length) {
|
|
226
|
-
|
|
239
|
+
errors.push(`In ${packageName}: OTHER_FILES.txt is empty.`);
|
|
227
240
|
}
|
|
228
241
|
for (const file of otherFiles) {
|
|
229
242
|
if (!isRelativePath(file)) {
|
|
230
|
-
|
|
243
|
+
errors.push(`In ${packageName}: A path segment is empty or all dots ${file}`);
|
|
231
244
|
}
|
|
232
245
|
}
|
|
233
|
-
|
|
246
|
+
// Note: findAllUnusedFiles also modifies usedFiles and otherFiles and errors
|
|
247
|
+
const unusedFiles = findAllUnusedFiles(ls, usedFiles, otherFiles, errors, packageName, fs);
|
|
248
|
+
if (unusedFiles.length) {
|
|
249
|
+
errors.push("\n\t* " +
|
|
250
|
+
unusedFiles.map((unused) => `Unused file ${unused}`).join("\n\t* ") +
|
|
251
|
+
`\n\t(used files: ${JSON.stringify(Array.from(usedFiles))})`);
|
|
252
|
+
}
|
|
234
253
|
for (const untestedTypeFile of (0, utils_1.filter)(otherFiles, (name) => name.endsWith(".d.ts") || name.endsWith(".d.mts") || name.endsWith(".d.cts"))) {
|
|
235
254
|
// add d.ts files from OTHER_FILES.txt in order get their dependencies
|
|
236
255
|
// tslint:disable-next-line:non-literal-fs-path -- Not a reference to the fs package
|
|
237
256
|
types.set(untestedTypeFile, (0, module_info_1.createSourceFile)(untestedTypeFile, fs.readFile(untestedTypeFile), moduleResolutionHost, compilerOptions));
|
|
238
257
|
}
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
// Don't count an import of "x" as a dependency if we saw `declare module "x"` somewhere.
|
|
242
|
-
const dependenciesSet = new Set([...dependenciesWithDeclaredModules]
|
|
243
|
-
.filter((m) => !declaredModulesSet.has(m))
|
|
244
|
-
.map((m) => rootName(m, types, packageName))
|
|
245
|
-
.filter((dependency) => dependency !== packageName));
|
|
246
|
-
const testDependencies = [
|
|
247
|
-
...(0, module_info_1.getTestDependencies)(packageName, tests.keys(), dependenciesSet, fs, moduleResolutionHost, compilerOptions),
|
|
248
|
-
]
|
|
249
|
-
.filter((m) => !declaredModulesSet.has(m))
|
|
250
|
-
.map((m) => rootName(m, types, packageName))
|
|
251
|
-
.filter((dependency) => dependency !== packageName);
|
|
252
|
-
const { paths } = tsconfig.compilerOptions;
|
|
253
|
-
const hydratedPackageName = (_a = (0, utils_1.unmangleScopedPackage)(packageName)) !== null && _a !== void 0 ? _a : packageName;
|
|
254
|
-
if (directoryVersion && hasNonRelativeImports && !(paths && `${hydratedPackageName}/*` in paths)) {
|
|
255
|
-
const mapping = JSON.stringify([`${packageName}/v${(0, packages_1.formatTypingVersion)(directoryVersion)}/*`]);
|
|
256
|
-
throw new Error(`${hydratedPackageName}: Older version ${(0, packages_1.formatTypingVersion)(directoryVersion)} must have a "paths" entry of "${hydratedPackageName}/*": ${mapping}`);
|
|
257
|
-
}
|
|
258
|
-
const { dependencies, pathMappings } = calculateDependencies(packageName, tsconfig, dependenciesSet, directoryVersion);
|
|
259
|
-
const tsconfigPathsForHash = JSON.stringify(tsconfig.compilerOptions.paths);
|
|
258
|
+
if (errors.length)
|
|
259
|
+
return errors;
|
|
260
260
|
return {
|
|
261
261
|
typescriptVersion,
|
|
262
|
-
dependencies,
|
|
263
|
-
testDependencies,
|
|
264
|
-
pathMappings,
|
|
265
|
-
globals,
|
|
266
|
-
declaredModules,
|
|
267
262
|
declFiles: (0, utils_1.sort)(types.keys()),
|
|
268
|
-
tsconfigPathsForHash,
|
|
263
|
+
tsconfigPathsForHash: JSON.stringify(tsconfig.compilerOptions.paths),
|
|
269
264
|
};
|
|
270
265
|
}
|
|
271
|
-
/**
|
|
272
|
-
* "foo/bar/baz" -> "foo"; "@foo/bar/baz" -> "@foo/bar"
|
|
273
|
-
* Note: Throws an error for references like
|
|
274
|
-
* "bar/v3" because referencing old versions of *other* packages is illegal;
|
|
275
|
-
* those directories won't exist in the published @types package.
|
|
276
|
-
*/
|
|
277
|
-
function rootName(importText, typeFiles, packageName) {
|
|
278
|
-
let slash = importText.indexOf("/");
|
|
279
|
-
// Root of `@foo/bar/baz` is `@foo/bar`
|
|
280
|
-
if (importText.startsWith("@")) {
|
|
281
|
-
// Use second "/"
|
|
282
|
-
slash = importText.indexOf("/", slash + 1);
|
|
283
|
-
}
|
|
284
|
-
const root = importText.slice(0, slash);
|
|
285
|
-
const postImport = importText.slice(slash + 1);
|
|
286
|
-
if (slash > -1 && postImport.match(/v\d+$/) && !typeFiles.has(postImport + ".d.ts") && root !== packageName) {
|
|
287
|
-
throw new Error(`${importText}: do not directly import specific versions of another types package.
|
|
288
|
-
You should work with the latest version of ${root} instead.`);
|
|
289
|
-
}
|
|
290
|
-
return slash === -1 ? importText : root;
|
|
291
|
-
}
|
|
292
|
-
function checkPackageJsonExportsAndAddPJsonEntry(exports, path) {
|
|
293
|
-
if (exports === undefined)
|
|
294
|
-
return exports;
|
|
295
|
-
if (typeof exports === "string") {
|
|
296
|
-
return exports;
|
|
297
|
-
}
|
|
298
|
-
if (typeof exports !== "object") {
|
|
299
|
-
throw new Error(`Package exports at path ${path} should be an object or string.`);
|
|
300
|
-
}
|
|
301
|
-
if (exports === null) {
|
|
302
|
-
throw new Error(`Package exports at path ${path} should not be null.`);
|
|
303
|
-
}
|
|
304
|
-
if (!exports["./package.json"]) {
|
|
305
|
-
exports["./package.json"] = "./package.json";
|
|
306
|
-
}
|
|
307
|
-
return exports;
|
|
308
|
-
}
|
|
309
|
-
function checkPackageJsonImports(imports, path) {
|
|
310
|
-
if (imports === undefined)
|
|
311
|
-
return imports;
|
|
312
|
-
if (typeof imports !== "object") {
|
|
313
|
-
throw new Error(`Package imports at path ${path} should be an object or string.`);
|
|
314
|
-
}
|
|
315
|
-
if (imports === null) {
|
|
316
|
-
throw new Error(`Package imports at path ${path} should not be null.`);
|
|
317
|
-
}
|
|
318
|
-
return imports;
|
|
319
|
-
}
|
|
320
|
-
function checkPackageJsonType(type, path) {
|
|
321
|
-
if (type === undefined)
|
|
322
|
-
return type;
|
|
323
|
-
if (type !== "module") {
|
|
324
|
-
throw new Error(`Package type at path ${path} can only be 'module'.`);
|
|
325
|
-
}
|
|
326
|
-
return type;
|
|
327
|
-
}
|
|
328
|
-
async function checkPackageJsonDependencies(dependencies, path) {
|
|
329
|
-
if (dependencies === undefined) {
|
|
330
|
-
// tslint:disable-line strict-type-predicates (false positive)
|
|
331
|
-
return [];
|
|
332
|
-
}
|
|
333
|
-
if (dependencies === null || typeof dependencies !== "object") {
|
|
334
|
-
// tslint:disable-line strict-type-predicates
|
|
335
|
-
throw new Error(`${path} should contain "dependencies" or not exist.`);
|
|
336
|
-
}
|
|
337
|
-
const deps = [];
|
|
338
|
-
for (const dependencyName of Object.keys(dependencies)) {
|
|
339
|
-
// `dependencies` cannot be null because of check above.
|
|
340
|
-
if (!(await (0, settings_1.getAllowedPackageJsonDependencies)()).has(dependencyName)) {
|
|
341
|
-
const msg = dependencyName.startsWith("@types/")
|
|
342
|
-
? `Dependency ${dependencyName} not in the allowed dependencies list.
|
|
343
|
-
Don't use a 'package.json' for @types dependencies unless this package relies on
|
|
344
|
-
an old version of types that have since been moved to the source repo.
|
|
345
|
-
For example, if package *P* used to have types on Definitely Typed at @types/P,
|
|
346
|
-
but now has its own types, a dependent package *D* will need to use package.json
|
|
347
|
-
to refer to @types/P if it relies on old versions of P's types.
|
|
348
|
-
In this case, please make a pull request to microsoft/DefinitelyTyped-tools adding @types/P to \`packages/definitions-parser/allowedPackageJsonDependencies.txt\`.`
|
|
349
|
-
: `Dependency ${dependencyName} not in the allowed dependencies list.
|
|
350
|
-
If you are depending on another \`@types\` package, do *not* add it to a \`package.json\`. Path mapping should make the import work.
|
|
351
|
-
For namespaced dependencies you then have to add a \`paths\` mapping from \`@namespace/*\` to \`namespace__*\` in \`tsconfig.json\`.
|
|
352
|
-
If this is an external library that provides typings, please make a pull request to microsoft/DefinitelyTyped-tools adding it to \`packages/definitions-parser/allowedPackageJsonDependencies.txt\`.`;
|
|
353
|
-
throw new Error(`In ${path}: ${msg}`);
|
|
354
|
-
}
|
|
355
|
-
const version = dependencies[dependencyName];
|
|
356
|
-
if (typeof version !== "string") {
|
|
357
|
-
// tslint:disable-line strict-type-predicates
|
|
358
|
-
throw new Error(`In ${path}: Dependency version for ${dependencyName} should be a string.`);
|
|
359
|
-
}
|
|
360
|
-
deps.push({ name: dependencyName, version });
|
|
361
|
-
}
|
|
362
|
-
return deps;
|
|
363
|
-
}
|
|
364
266
|
function checkFilesFromTsConfig(packageName, tsconfig, directoryPath) {
|
|
267
|
+
const errors = [];
|
|
365
268
|
const tsconfigPath = `${directoryPath}/tsconfig.json`;
|
|
366
269
|
if (tsconfig.include) {
|
|
367
|
-
|
|
270
|
+
errors.push(`In tsconfig, don't use "include", must use "files"`);
|
|
368
271
|
}
|
|
369
272
|
const files = tsconfig.files;
|
|
370
273
|
if (!files) {
|
|
371
|
-
|
|
274
|
+
errors.push(`${tsconfigPath} needs to specify "files"`);
|
|
275
|
+
return errors;
|
|
372
276
|
}
|
|
373
277
|
for (const file of files) {
|
|
374
278
|
if (file.startsWith("./")) {
|
|
375
|
-
|
|
279
|
+
errors.push(`In ${tsconfigPath}: Unnecessary "./" at the start of ${file}`);
|
|
376
280
|
}
|
|
377
281
|
if (!isRelativePath(file)) {
|
|
378
|
-
|
|
282
|
+
errors.push(`In ${tsconfigPath}: A path segment is empty or all dots ${file}`);
|
|
379
283
|
}
|
|
380
284
|
if (file.endsWith(".d.ts") && file !== "index.d.ts") {
|
|
381
|
-
|
|
285
|
+
errors.push(`${packageName}: Only index.d.ts may be listed explicitly in tsconfig's "files" entry.
|
|
382
286
|
Other d.ts files must either be referenced through index.d.ts, tests, or added to OTHER_FILES.txt.`);
|
|
383
287
|
}
|
|
384
288
|
if (!file.endsWith(".d.ts") && !file.startsWith("test/")) {
|
|
@@ -388,136 +292,15 @@ Other d.ts files must either be referenced through index.d.ts, tests, or added t
|
|
|
388
292
|
? `Expected file '${file}' to be named '${expectedName}' or to be inside a '${directoryPath}/test/' directory`
|
|
389
293
|
: `Unexpected file extension for '${file}' -- expected '.ts' or '.tsx' (maybe this should not be in "files", but ` +
|
|
390
294
|
"OTHER_FILES.txt)";
|
|
391
|
-
|
|
295
|
+
errors.push(message);
|
|
392
296
|
}
|
|
393
297
|
}
|
|
394
298
|
}
|
|
299
|
+
return errors;
|
|
395
300
|
}
|
|
396
301
|
function isRelativePath(path) {
|
|
397
302
|
return path.split(/\//).every((part) => part.length > 0 && !part.match(/^\.+$|[\\\n\r]/));
|
|
398
303
|
}
|
|
399
|
-
function calculateDependencies(packageName, tsconfig, dependencyNames, directoryVersion) {
|
|
400
|
-
var _a;
|
|
401
|
-
const paths = (tsconfig.compilerOptions && tsconfig.compilerOptions.paths) || {};
|
|
402
|
-
const dependencies = {};
|
|
403
|
-
const pathMappings = {};
|
|
404
|
-
const scopedPackageName = (_a = (0, utils_1.unmangleScopedPackage)(packageName)) !== null && _a !== void 0 ? _a : packageName;
|
|
405
|
-
for (const dependencyName of Object.keys(paths)) {
|
|
406
|
-
const pathMappingList = paths[dependencyName];
|
|
407
|
-
if (pathMappingList.length !== 1) {
|
|
408
|
-
throw new Error(`In ${packageName}: Path mapping for ${dependencyName} may only have 1 entry.`);
|
|
409
|
-
}
|
|
410
|
-
const pathMapping = pathMappingList[0];
|
|
411
|
-
if (pathMapping === "./node_modules/" + dependencyName) {
|
|
412
|
-
// allow passthrough remappings for packages like webpack that have shipped their own types,
|
|
413
|
-
// but have some dependents on DT that depend on the new types and some that depend on the old types
|
|
414
|
-
continue;
|
|
415
|
-
}
|
|
416
|
-
// Path mapping may be for "@foo/*" -> "foo__*".
|
|
417
|
-
const unversionedScopedPackageName = (0, utils_1.removeVersionFromPackageName)((0, utils_1.unmangleScopedPackage)(pathMapping));
|
|
418
|
-
if (unversionedScopedPackageName !== undefined) {
|
|
419
|
-
if (dependencyName !== unversionedScopedPackageName) {
|
|
420
|
-
throw new Error(`Expected directory ${pathMapping} to be the path mapping for ${dependencyName}`);
|
|
421
|
-
}
|
|
422
|
-
if (!(0, utils_1.hasVersionNumberInMapping)(pathMapping)) {
|
|
423
|
-
continue;
|
|
424
|
-
}
|
|
425
|
-
}
|
|
426
|
-
// Might have a path mapping for "foo/*" to support subdirectories
|
|
427
|
-
const rootDirectory = withoutEnd(dependencyName, "/*");
|
|
428
|
-
if (rootDirectory !== undefined) {
|
|
429
|
-
if (!(rootDirectory in paths)) {
|
|
430
|
-
throw new Error(`In ${packageName}: found path mapping for ${dependencyName} but not for ${rootDirectory}`);
|
|
431
|
-
}
|
|
432
|
-
continue;
|
|
433
|
-
}
|
|
434
|
-
// buffer -> node/buffer may be required because of the non-node 'buffer' package on npm
|
|
435
|
-
// which DT infrastructure depends on, and which resolves before node's ambient module 'buffer'
|
|
436
|
-
if (dependencyName === "buffer" && pathMapping === "node/buffer") {
|
|
437
|
-
dependencies.node = "*";
|
|
438
|
-
continue;
|
|
439
|
-
}
|
|
440
|
-
const pathMappingVersion = parseDependencyVersionFromPath(dependencyName, dependencyName, pathMapping);
|
|
441
|
-
if (dependencyName === packageName) {
|
|
442
|
-
if (directoryVersion === undefined) {
|
|
443
|
-
throw new Error(`In ${packageName}: Latest version of a package should not have a path mapping for itself.`);
|
|
444
|
-
}
|
|
445
|
-
if (directoryVersion.major !== pathMappingVersion.major || directoryVersion.minor !== pathMappingVersion.minor) {
|
|
446
|
-
const correctPathMapping = [`${dependencyName}/v${(0, packages_1.formatTypingVersion)(directoryVersion)}`];
|
|
447
|
-
throw new Error(`In ${packageName}: Must have a "paths" entry of "${dependencyName}": ${JSON.stringify(correctPathMapping)}`);
|
|
448
|
-
}
|
|
449
|
-
}
|
|
450
|
-
else {
|
|
451
|
-
if (dependencyNames.has(dependencyName)) {
|
|
452
|
-
dependencies[dependencyName] = pathMappingVersion;
|
|
453
|
-
}
|
|
454
|
-
}
|
|
455
|
-
// Else, the path mapping may be necessary if it is for a transitive dependency. We will check this in check-parse-results.
|
|
456
|
-
pathMappings[dependencyName] = pathMappingVersion;
|
|
457
|
-
}
|
|
458
|
-
if (directoryVersion !== undefined && !(paths && scopedPackageName in paths)) {
|
|
459
|
-
const mapping = JSON.stringify([`${packageName}/v${(0, packages_1.formatTypingVersion)(directoryVersion)}`]);
|
|
460
|
-
throw new Error(`${scopedPackageName}: Older version ${(0, packages_1.formatTypingVersion)(directoryVersion)} must have a "paths" entry of "${scopedPackageName}": ${mapping}`);
|
|
461
|
-
}
|
|
462
|
-
for (const dependency of dependencyNames) {
|
|
463
|
-
if (!dependencies[dependency] && !nodeBuiltins.has(dependency)) {
|
|
464
|
-
dependencies[dependency] = "*";
|
|
465
|
-
}
|
|
466
|
-
}
|
|
467
|
-
return { dependencies, pathMappings };
|
|
468
|
-
}
|
|
469
|
-
const nodeBuiltins = new Set([
|
|
470
|
-
"assert",
|
|
471
|
-
"async_hooks",
|
|
472
|
-
"buffer",
|
|
473
|
-
"child_process",
|
|
474
|
-
"cluster",
|
|
475
|
-
"console",
|
|
476
|
-
"constants",
|
|
477
|
-
"crypto",
|
|
478
|
-
"dgram",
|
|
479
|
-
"dns",
|
|
480
|
-
"domain",
|
|
481
|
-
"events",
|
|
482
|
-
"fs",
|
|
483
|
-
"http",
|
|
484
|
-
"http2",
|
|
485
|
-
"https",
|
|
486
|
-
"module",
|
|
487
|
-
"net",
|
|
488
|
-
"os",
|
|
489
|
-
"path",
|
|
490
|
-
"perf_hooks",
|
|
491
|
-
"process",
|
|
492
|
-
"punycode",
|
|
493
|
-
"querystring",
|
|
494
|
-
"readline",
|
|
495
|
-
"repl",
|
|
496
|
-
"stream",
|
|
497
|
-
"string_decoder",
|
|
498
|
-
"timers",
|
|
499
|
-
"tls",
|
|
500
|
-
"tty",
|
|
501
|
-
"url",
|
|
502
|
-
"util",
|
|
503
|
-
"v8",
|
|
504
|
-
"vm",
|
|
505
|
-
"zlib",
|
|
506
|
-
]);
|
|
507
|
-
function parseDependencyVersionFromPath(packageName, dependencyName, dependencyPath) {
|
|
508
|
-
const versionString = (0, utils_1.withoutStart)(dependencyPath, `${(0, utils_1.mangleScopedPackage)(dependencyName)}/`);
|
|
509
|
-
const version = versionString === undefined ? undefined : parseVersionFromDirectoryName(versionString);
|
|
510
|
-
if (version === undefined) {
|
|
511
|
-
throw new Error(`In ${packageName}, unexpected path mapping for ${dependencyName}: '${dependencyPath}'`);
|
|
512
|
-
}
|
|
513
|
-
return version;
|
|
514
|
-
}
|
|
515
|
-
function withoutEnd(s, end) {
|
|
516
|
-
if (s.endsWith(end)) {
|
|
517
|
-
return s.slice(0, s.length - end.length);
|
|
518
|
-
}
|
|
519
|
-
return undefined;
|
|
520
|
-
}
|
|
521
304
|
function hash(files, tsconfigPathsForHash, fs) {
|
|
522
305
|
const fileContents = files.map((f) => `${f}**${readFileAndThrowOnBOM(f, fs)}`);
|
|
523
306
|
let allContent = fileContents.join("||");
|
|
@@ -537,22 +320,24 @@ function readFileAndThrowOnBOM(fileName, fs) {
|
|
|
537
320
|
}
|
|
538
321
|
exports.readFileAndThrowOnBOM = readFileAndThrowOnBOM;
|
|
539
322
|
const unusedFilesName = "OTHER_FILES.txt";
|
|
540
|
-
|
|
323
|
+
/** Modifies usedFiles and otherFiles and errors */
|
|
324
|
+
function findAllUnusedFiles(ls, usedFiles, otherFiles, errors, packageName, fs) {
|
|
541
325
|
// Double-check that no windows "\\" broke in.
|
|
542
326
|
for (const fileName of usedFiles) {
|
|
543
327
|
if ((0, utils_1.hasWindowsSlashes)(fileName)) {
|
|
544
|
-
|
|
328
|
+
errors.push(`In ${packageName}: windows slash detected in ${fileName}`);
|
|
545
329
|
}
|
|
546
330
|
}
|
|
547
|
-
|
|
331
|
+
return findAllUnusedRecur(new Set(ls), usedFiles, new Set(otherFiles), errors, fs);
|
|
548
332
|
}
|
|
549
|
-
function
|
|
333
|
+
function findAllUnusedRecur(ls, usedFiles, otherFiles, errors, fs) {
|
|
334
|
+
const unused = [];
|
|
550
335
|
for (const lsEntry of ls) {
|
|
551
336
|
if (usedFiles.has(lsEntry)) {
|
|
552
337
|
continue;
|
|
553
338
|
}
|
|
554
|
-
if (
|
|
555
|
-
|
|
339
|
+
if (otherFiles.has(lsEntry)) {
|
|
340
|
+
otherFiles.delete(lsEntry);
|
|
556
341
|
continue;
|
|
557
342
|
}
|
|
558
343
|
if (fs.isDirectory(lsEntry)) {
|
|
@@ -563,8 +348,7 @@ function checkAllUsedRecur(ls, usedFiles, unusedFiles, fs) {
|
|
|
563
348
|
}
|
|
564
349
|
const lssubdir = subdir.readdir();
|
|
565
350
|
if (lssubdir.length === 0) {
|
|
566
|
-
|
|
567
|
-
throw new Error(`Empty directory ${subdir.debugPath()} (${(0, utils_1.join)(usedFiles)})`);
|
|
351
|
+
errors.push(`Empty directory ${subdir.debugPath()} (${(0, utils_1.join)(usedFiles)})`);
|
|
568
352
|
}
|
|
569
353
|
function takeSubdirectoryOutOfSet(originalSet) {
|
|
570
354
|
const subdirSet = new Set();
|
|
@@ -577,7 +361,7 @@ function checkAllUsedRecur(ls, usedFiles, unusedFiles, fs) {
|
|
|
577
361
|
}
|
|
578
362
|
return subdirSet;
|
|
579
363
|
}
|
|
580
|
-
|
|
364
|
+
findAllUnusedRecur(lssubdir, takeSubdirectoryOutOfSet(usedFiles), takeSubdirectoryOutOfSet(otherFiles), errors, subdir);
|
|
581
365
|
}
|
|
582
366
|
else {
|
|
583
367
|
if (lsEntry.toLowerCase() !== "readme.md" &&
|
|
@@ -585,15 +369,16 @@ function checkAllUsedRecur(ls, usedFiles, unusedFiles, fs) {
|
|
|
585
369
|
lsEntry !== ".editorconfig" &&
|
|
586
370
|
lsEntry !== ".eslintrc.json" &&
|
|
587
371
|
lsEntry !== unusedFilesName) {
|
|
588
|
-
|
|
372
|
+
unused.push(`${fs.debugPath()}/${lsEntry}`);
|
|
589
373
|
}
|
|
590
374
|
}
|
|
591
375
|
}
|
|
592
|
-
for (const
|
|
593
|
-
if (usedFiles.has(
|
|
594
|
-
|
|
376
|
+
for (const otherFile of otherFiles) {
|
|
377
|
+
if (usedFiles.has(otherFile)) {
|
|
378
|
+
errors.push(`File ${fs.debugPath()}${otherFile} listed in ${unusedFilesName} is already reachable from tsconfig.json.`);
|
|
595
379
|
}
|
|
596
|
-
|
|
380
|
+
errors.push(`File ${fs.debugPath()}/${otherFile} listed in ${unusedFilesName} does not exist.`);
|
|
597
381
|
}
|
|
382
|
+
return unused;
|
|
598
383
|
}
|
|
599
384
|
//# sourceMappingURL=definition-parser.js.map
|