rev-dep 1.5.4 → 2.0.0-alpha-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Readme.md +5 -0
- package/bin.js +66 -1
- package/package.json +6 -49
- package/README.md +0 -558
- package/babel.js +0 -1
- package/dist/babel/babelParsingOptions.d.ts +0 -5
- package/dist/babel/babelParsingOptions.js +0 -15
- package/dist/babel/groupBy.d.ts +0 -1
- package/dist/babel/groupBy.js +0 -14
- package/dist/babel/index.d.ts +0 -1
- package/dist/babel/index.js +0 -449
- package/dist/babel/processCodeTextModificationsArray.d.ts +0 -30
- package/dist/babel/processCodeTextModificationsArray.js +0 -55
- package/dist/babel/template.d.ts +0 -1
- package/dist/babel/template.js +0 -13
- package/dist/babel/transform.d.ts +0 -6
- package/dist/babel/transform.js +0 -66
- package/dist/babel/transformCli.d.ts +0 -1
- package/dist/babel/transformCli.js +0 -18
- package/dist/cli/commonOptions.d.ts +0 -27
- package/dist/cli/commonOptions.js +0 -29
- package/dist/cli/createCommands.d.ts +0 -2
- package/dist/cli/createCommands.js +0 -19
- package/dist/cli/docs/generate.d.ts +0 -20
- package/dist/cli/docs/generate.js +0 -90
- package/dist/cli/docs/index.d.ts +0 -3
- package/dist/cli/docs/index.js +0 -18
- package/dist/cli/docs/template.d.ts +0 -3
- package/dist/cli/docs/template.js +0 -51
- package/dist/cli/entryPoints/index.d.ts +0 -2
- package/dist/cli/entryPoints/index.js +0 -52
- package/dist/cli/entryPoints/types.d.ts +0 -4
- package/dist/cli/entryPoints/types.js +0 -2
- package/dist/cli/files/index.d.ts +0 -2
- package/dist/cli/files/index.js +0 -36
- package/dist/cli/files/types.d.ts +0 -3
- package/dist/cli/files/types.js +0 -2
- package/dist/cli/index.d.ts +0 -1
- package/dist/cli/index.js +0 -10
- package/dist/cli/nodeModules/index.d.ts +0 -2
- package/dist/cli/nodeModules/index.js +0 -36
- package/dist/cli/nodeModules/types.d.ts +0 -3
- package/dist/cli/nodeModules/types.js +0 -2
- package/dist/cli/resolve/formatResults.d.ts +0 -9
- package/dist/cli/resolve/formatResults.js +0 -67
- package/dist/cli/resolve/index.d.ts +0 -2
- package/dist/cli/resolve/index.js +0 -47
- package/dist/cli/resolve/types.d.ts +0 -11
- package/dist/cli/resolve/types.js +0 -2
- package/dist/lib/buildDepsGraph.d.ts +0 -2
- package/dist/lib/buildDepsGraph.js +0 -49
- package/dist/lib/cleanupDpdmDeps.d.ts +0 -3
- package/dist/lib/cleanupDpdmDeps.js +0 -39
- package/dist/lib/getDepsSetWebpack.d.ts +0 -6
- package/dist/lib/getDepsSetWebpack.js +0 -44
- package/dist/lib/getDepsTree.d.ts +0 -1
- package/dist/lib/getDepsTree.js +0 -24
- package/dist/lib/getEntryPoints.d.ts +0 -18
- package/dist/lib/getEntryPoints.js +0 -83
- package/dist/lib/getFilesForEntryPoint.d.ts +0 -8
- package/dist/lib/getFilesForEntryPoint.js +0 -12
- package/dist/lib/getMaxDepthInGraph.d.ts +0 -4
- package/dist/lib/getMaxDepthInGraph.js +0 -21
- package/dist/lib/getNodeModulesForEntryPoint.d.ts +0 -8
- package/dist/lib/getNodeModulesForEntryPoint.js +0 -18
- package/dist/lib/resolve.d.ts +0 -15
- package/dist/lib/resolve.js +0 -60
- package/dist/lib/types.d.ts +0 -12
- package/dist/lib/types.js +0 -2
- package/dist/lib/utils.d.ts +0 -6
- package/dist/lib/utils.js +0 -44
- package/dist/module.d.ts +0 -5
- package/dist/module.js +0 -28
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.babelParsingOptions = void 0;
|
|
4
|
-
exports.babelParsingOptions = {
|
|
5
|
-
errorRecovery: true,
|
|
6
|
-
sourceType: 'module',
|
|
7
|
-
plugins: [
|
|
8
|
-
'jsx',
|
|
9
|
-
'typescript',
|
|
10
|
-
'objectRestSpread',
|
|
11
|
-
'classProperties',
|
|
12
|
-
'asyncGenerators',
|
|
13
|
-
'decorators-legacy'
|
|
14
|
-
]
|
|
15
|
-
};
|
package/dist/babel/groupBy.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export function groupBy(arr: any, property: any): any;
|
package/dist/babel/groupBy.js
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.groupBy = void 0;
|
|
4
|
-
function groupBy(arr, property) {
|
|
5
|
-
return arr.reduce((result, obj) => {
|
|
6
|
-
const key = obj[property];
|
|
7
|
-
if (!result[key]) {
|
|
8
|
-
result[key] = [];
|
|
9
|
-
}
|
|
10
|
-
result[key].push(obj);
|
|
11
|
-
return result;
|
|
12
|
-
}, {});
|
|
13
|
-
}
|
|
14
|
-
exports.groupBy = groupBy;
|
package/dist/babel/index.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/dist/babel/index.js
DELETED
|
@@ -1,449 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/*eslint-disable @typescript-eslint/no-var-requires */
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
const node_path = require('path');
|
|
5
|
-
const fs = require('fs');
|
|
6
|
-
const parser = require('@babel/parser');
|
|
7
|
-
const utils_1 = require("../lib/utils");
|
|
8
|
-
const template_1 = require("./template");
|
|
9
|
-
const SKIP = Symbol('SKIP');
|
|
10
|
-
const babelParsingOptions_1 = require("./babelParsingOptions");
|
|
11
|
-
const groupBy_1 = require("./groupBy");
|
|
12
|
-
/**
|
|
13
|
-
*
|
|
14
|
-
* TODO
|
|
15
|
-
* + If that has to be used as a codemod, we have to refactor to make sure we don't change structure of other parts of the code and we preserve imports order
|
|
16
|
-
* +- group named imports from the same file
|
|
17
|
-
* + support imports from baseUrl from TS config -> relative | baseUrl | alias
|
|
18
|
-
* + persist the original import alias
|
|
19
|
-
* + allow for a list of files to rewire
|
|
20
|
-
* + use cache for not resolved modules as well
|
|
21
|
-
* + handle type imports properly - we don't preserve the import was a type import
|
|
22
|
-
* + do not touch imports that don't need changes
|
|
23
|
-
*/
|
|
24
|
-
module.exports = function plugin({ types }, { tsConfigPath = (0, utils_1.findTsConfig)(), cache = new Map(), includeBarrelExportFiles, excludeBarrelExportFiles = [] }) {
|
|
25
|
-
const root = tsConfigPath.replace('/tsconfig.json', '');
|
|
26
|
-
const tsConfigContent = fs.readFileSync(tsConfigPath).toString();
|
|
27
|
-
const tsConfigContentCleaned = tsConfigContent
|
|
28
|
-
// remove comments
|
|
29
|
-
.replace(/^(\s)*\/\//gm, '')
|
|
30
|
-
.replace(/\/\*.+?\*\//gm, '');
|
|
31
|
-
const tsConfig = JSON.parse(tsConfigContentCleaned);
|
|
32
|
-
const aliases = tsConfig.compilerOptions.paths;
|
|
33
|
-
const aliasesKeys = Object.keys(aliases);
|
|
34
|
-
const makeRegExpFromAliasExpression = (aliasExpression) => {
|
|
35
|
-
return new RegExp(`^${aliasExpression.replace('*', '(.+)')}$`);
|
|
36
|
-
};
|
|
37
|
-
const aliasesRegexes = Object.keys(aliases).map(makeRegExpFromAliasExpression);
|
|
38
|
-
// TODO we assume that only one aliased path can exist
|
|
39
|
-
const aliasedPathRegExps = Object.values(aliases).map(([fistAliasedPath]) => makeRegExpFromAliasExpression(fistAliasedPath));
|
|
40
|
-
const interpolateAliasWithPath = (aliasKey, aliasedPathRegExp, resolvedSourcePathRelativeToBaseUrl) => {
|
|
41
|
-
const [_, ...groups] = aliasedPathRegExp.exec(resolvedSourcePathRelativeToBaseUrl);
|
|
42
|
-
const aliasParts = aliasKey.split('*');
|
|
43
|
-
const interpolatedAlias = aliasParts.reduce((mergedPath, aliasPart, idx) => {
|
|
44
|
-
var _a;
|
|
45
|
-
return `${mergedPath}${aliasPart}${(_a = groups[idx]) !== null && _a !== void 0 ? _a : ''}`;
|
|
46
|
-
}, '');
|
|
47
|
-
return interpolatedAlias;
|
|
48
|
-
};
|
|
49
|
-
let baseUrlDirs = [];
|
|
50
|
-
const baseUrl = tsConfig.compilerOptions.baseUrl;
|
|
51
|
-
if (baseUrl) {
|
|
52
|
-
const baseDirPath = node_path.join(root, baseUrl);
|
|
53
|
-
const dirNames = fs
|
|
54
|
-
.readdirSync(baseDirPath, { withFileTypes: true })
|
|
55
|
-
.filter((dirent) => dirent.isDirectory())
|
|
56
|
-
.map((dirent) => dirent.name + '/');
|
|
57
|
-
baseUrlDirs = dirNames;
|
|
58
|
-
}
|
|
59
|
-
const getFile = (original, paths) => {
|
|
60
|
-
if (paths.length === 0) {
|
|
61
|
-
console.warn('Cannot resolve import ' + original);
|
|
62
|
-
return null;
|
|
63
|
-
}
|
|
64
|
-
const path = node_path.normalize(paths[0]);
|
|
65
|
-
try {
|
|
66
|
-
return [path, fs.readFileSync(path).toString()];
|
|
67
|
-
}
|
|
68
|
-
catch (e) {
|
|
69
|
-
return getFile(original, paths.slice(1));
|
|
70
|
-
}
|
|
71
|
-
};
|
|
72
|
-
const shouldPathBeAnalyzed = (path) => {
|
|
73
|
-
const aliasRegexIdx = aliasesRegexes.findIndex((aliasRegex) => aliasRegex.test(path));
|
|
74
|
-
const isRelative = path.startsWith('.');
|
|
75
|
-
const isAbsolute = path.startsWith('/');
|
|
76
|
-
const isBaseUrlPath = baseUrlDirs.some((dir) => path.startsWith(dir));
|
|
77
|
-
return aliasRegexIdx > -1 || isRelative || isAbsolute || isBaseUrlPath;
|
|
78
|
-
};
|
|
79
|
-
const getCacheKey = (identifier, filePath) => `${identifier}-${filePath}`;
|
|
80
|
-
const lookup = (identifier, filePath, cwd) => {
|
|
81
|
-
const cached = cache.get(getCacheKey(identifier, filePath));
|
|
82
|
-
if (cached) {
|
|
83
|
-
return { ...cached, isCached: true };
|
|
84
|
-
}
|
|
85
|
-
const withExtension = /(\.ts|\.tsx)$/.test(filePath)
|
|
86
|
-
? [filePath]
|
|
87
|
-
: [
|
|
88
|
-
`${filePath}.ts`,
|
|
89
|
-
`${filePath}.tsx`,
|
|
90
|
-
`${filePath}/index.ts`,
|
|
91
|
-
`${filePath}/index.tsx`,
|
|
92
|
-
`${filePath}.js`,
|
|
93
|
-
`${filePath}.jsx`,
|
|
94
|
-
`${filePath}/index.js`,
|
|
95
|
-
`${filePath}/index.jsx`
|
|
96
|
-
];
|
|
97
|
-
const fileInfo = getFile(filePath, withExtension);
|
|
98
|
-
if (!fileInfo) {
|
|
99
|
-
return { resolvedAs: null, visitedFiles: [] };
|
|
100
|
-
}
|
|
101
|
-
const [resolvedFilePath, file] = fileInfo;
|
|
102
|
-
try {
|
|
103
|
-
const ast = parser.parse(file, babelParsingOptions_1.babelParsingOptions);
|
|
104
|
-
/**
|
|
105
|
-
* {
|
|
106
|
-
* identifier?: string,
|
|
107
|
-
* source: string
|
|
108
|
-
* }
|
|
109
|
-
*/
|
|
110
|
-
const toLookup = [];
|
|
111
|
-
let resolvedAs = null;
|
|
112
|
-
ast.program.body.forEach((declaration) => {
|
|
113
|
-
var _a, _b, _c;
|
|
114
|
-
if (resolvedAs === null) {
|
|
115
|
-
if (types.isExportNamedDeclaration(declaration)) {
|
|
116
|
-
if (((_a = declaration.declaration) === null || _a === void 0 ? void 0 : _a.type.startsWith('TS')) &&
|
|
117
|
-
((_b = declaration.declaration) === null || _b === void 0 ? void 0 : _b.type.endsWith('Declaration'))) {
|
|
118
|
-
const typeName = declaration.declaration.id.name;
|
|
119
|
-
if (typeName === identifier) {
|
|
120
|
-
resolvedAs = {
|
|
121
|
-
// This should be 'type' of something else, but ESLint would handle that
|
|
122
|
-
type: 'named',
|
|
123
|
-
identifier,
|
|
124
|
-
source: filePath
|
|
125
|
-
};
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
else if (types.isVariableDeclaration(declaration.declaration)) {
|
|
129
|
-
const hasIdentifier = declaration.declaration.declarations.find((declarator) => {
|
|
130
|
-
return declarator.id.name === identifier;
|
|
131
|
-
});
|
|
132
|
-
if (hasIdentifier) {
|
|
133
|
-
resolvedAs = {
|
|
134
|
-
type: 'named',
|
|
135
|
-
identifier,
|
|
136
|
-
source: filePath
|
|
137
|
-
};
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
else if (types.isFunctionDeclaration(declaration.declaration) ||
|
|
141
|
-
types.isClassDeclaration(declaration.declaration)) {
|
|
142
|
-
if (declaration.declaration.id.name === identifier) {
|
|
143
|
-
resolvedAs = {
|
|
144
|
-
type: 'named',
|
|
145
|
-
identifier,
|
|
146
|
-
source: filePath
|
|
147
|
-
};
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
else {
|
|
151
|
-
const source = (_c = declaration.source) === null || _c === void 0 ? void 0 : _c.value;
|
|
152
|
-
declaration.specifiers.forEach((specifier) => {
|
|
153
|
-
if (types.isExportSpecifier(specifier)) {
|
|
154
|
-
if (specifier.exported.name === identifier) {
|
|
155
|
-
if (specifier.local.name === 'default' && source) {
|
|
156
|
-
resolvedAs = {
|
|
157
|
-
type: 'default',
|
|
158
|
-
identifier,
|
|
159
|
-
source: getModulePath(source, resolvedFilePath, cwd)
|
|
160
|
-
};
|
|
161
|
-
}
|
|
162
|
-
else if (source === undefined) {
|
|
163
|
-
// Here we could check if identifier comes from import statement, and if so, lookup deeper
|
|
164
|
-
resolvedAs = {
|
|
165
|
-
type: 'named',
|
|
166
|
-
identifier,
|
|
167
|
-
source: filePath
|
|
168
|
-
};
|
|
169
|
-
}
|
|
170
|
-
else if (shouldPathBeAnalyzed(source)) {
|
|
171
|
-
toLookup.push({
|
|
172
|
-
identifier: specifier.local.name,
|
|
173
|
-
source: getModulePath(source, resolvedFilePath, cwd)
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
});
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
else if (types.isExportAllDeclaration(declaration) &&
|
|
182
|
-
shouldPathBeAnalyzed(declaration.source.value)) {
|
|
183
|
-
toLookup.push({
|
|
184
|
-
identifier,
|
|
185
|
-
source: getModulePath(declaration.source.value, resolvedFilePath, cwd)
|
|
186
|
-
});
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
});
|
|
190
|
-
if (resolvedAs) {
|
|
191
|
-
return { resolvedAs, visitedFiles: [resolvedAs.source] };
|
|
192
|
-
}
|
|
193
|
-
const nestedResult = toLookup
|
|
194
|
-
.map(({ identifier, source }) => lookup(identifier, source, cwd))
|
|
195
|
-
.filter((lookUpResult) => lookUpResult.resolvedAs !== null);
|
|
196
|
-
if (nestedResult[0]) {
|
|
197
|
-
return {
|
|
198
|
-
resolvedAs: nestedResult[0].resolvedAs,
|
|
199
|
-
visitedFiles: [resolvedFilePath, ...nestedResult[0].visitedFiles]
|
|
200
|
-
};
|
|
201
|
-
}
|
|
202
|
-
return { resolvedAs: null, visitedFiles: [] };
|
|
203
|
-
}
|
|
204
|
-
catch (e) {
|
|
205
|
-
console.log('Lookup parse error', filePath, e);
|
|
206
|
-
process.exit(0);
|
|
207
|
-
}
|
|
208
|
-
};
|
|
209
|
-
const getModulePath = (sourcePath, fileName, cwd) => {
|
|
210
|
-
var _a;
|
|
211
|
-
const aliasRegexIdx = aliasesRegexes.findIndex((aliasRegex) => aliasRegex.test(sourcePath));
|
|
212
|
-
const relativeFileName = node_path.relative(cwd, fileName);
|
|
213
|
-
const aliasKey = aliasesKeys[aliasRegexIdx];
|
|
214
|
-
const alias = (_a = aliases[aliasKey]) === null || _a === void 0 ? void 0 : _a[0]; // TODO we assume that only one aliased path can exist in config
|
|
215
|
-
const isAbsoluteToBaseDir = baseUrlDirs.some((baseUrlDir) => sourcePath.startsWith(baseUrlDir));
|
|
216
|
-
let modulePath = '';
|
|
217
|
-
if (alias) {
|
|
218
|
-
let relative = alias;
|
|
219
|
-
if (aliasKey.endsWith('*')) {
|
|
220
|
-
const aliasKeyPrefix = aliasKey.replace('*', '');
|
|
221
|
-
relative = alias.replace('*', sourcePath.replace(aliasKeyPrefix, ''));
|
|
222
|
-
}
|
|
223
|
-
modulePath = node_path.resolve(cwd, relative);
|
|
224
|
-
}
|
|
225
|
-
else if (isAbsoluteToBaseDir) {
|
|
226
|
-
modulePath = node_path.join(cwd, sourcePath);
|
|
227
|
-
}
|
|
228
|
-
else {
|
|
229
|
-
// we need ../ to skip current file name
|
|
230
|
-
modulePath = node_path.join(cwd, relativeFileName, '../' + sourcePath);
|
|
231
|
-
}
|
|
232
|
-
return node_path.normalize(modulePath);
|
|
233
|
-
};
|
|
234
|
-
const getImportKind = (sourcePath) => {
|
|
235
|
-
const aliasRegexIdx = aliasesRegexes.findIndex((aliasRegex) => aliasRegex.test(sourcePath));
|
|
236
|
-
const isRelative = sourcePath.startsWith('.');
|
|
237
|
-
const isBaseUrlPath = baseUrlDirs.some((dir) => sourcePath.startsWith(dir));
|
|
238
|
-
if (aliasRegexIdx > -1) {
|
|
239
|
-
return 'aliased';
|
|
240
|
-
}
|
|
241
|
-
if (isRelative) {
|
|
242
|
-
return 'relative';
|
|
243
|
-
}
|
|
244
|
-
if (isBaseUrlPath) {
|
|
245
|
-
return 'baseUrl';
|
|
246
|
-
}
|
|
247
|
-
throw new Error('Could not determine import kind');
|
|
248
|
-
};
|
|
249
|
-
return {
|
|
250
|
-
visitor: {
|
|
251
|
-
Program() {
|
|
252
|
-
// console.log('Cache size', cache.size)
|
|
253
|
-
},
|
|
254
|
-
ImportDeclaration(path, state) {
|
|
255
|
-
const filename = state.filename;
|
|
256
|
-
const getImportSourceFormatted = (resolvedSourcePath, importKind) => {
|
|
257
|
-
const baseDirPath = node_path.join(root, baseUrl);
|
|
258
|
-
if (importKind === 'baseUrl') {
|
|
259
|
-
const relativeToBaseUrl = node_path.relative(baseDirPath, resolvedSourcePath);
|
|
260
|
-
return relativeToBaseUrl;
|
|
261
|
-
}
|
|
262
|
-
if (importKind === 'aliased') {
|
|
263
|
-
const originalSource = path.node.source.value;
|
|
264
|
-
const currentAliasIdx = aliasesRegexes.findIndex((aliasRegex) => aliasRegex.test(originalSource));
|
|
265
|
-
const resolvedSourcePathRelativeToBaseUrl = resolvedSourcePath
|
|
266
|
-
.replace(baseDirPath, '')
|
|
267
|
-
.replace(/^\//, '');
|
|
268
|
-
// Try to use current alias if it matches new path
|
|
269
|
-
if (currentAliasIdx > -1) {
|
|
270
|
-
const aliasKey = aliasesKeys[currentAliasIdx];
|
|
271
|
-
const aliasedPathRegExp = aliasedPathRegExps[currentAliasIdx];
|
|
272
|
-
if (aliasedPathRegExp.test(resolvedSourcePathRelativeToBaseUrl)) {
|
|
273
|
-
return interpolateAliasWithPath(aliasKey, aliasedPathRegExp, resolvedSourcePathRelativeToBaseUrl);
|
|
274
|
-
}
|
|
275
|
-
}
|
|
276
|
-
// Try finding matching alias
|
|
277
|
-
const newMatchingAliasIndex = aliasedPathRegExps.findIndex((aliasedPathRegexp) => aliasedPathRegexp.test(resolvedSourcePathRelativeToBaseUrl));
|
|
278
|
-
if (newMatchingAliasIndex > -1) {
|
|
279
|
-
const aliasKey = aliasesKeys[newMatchingAliasIndex];
|
|
280
|
-
const aliasedPathRegExp = aliasedPathRegExps[newMatchingAliasIndex];
|
|
281
|
-
return interpolateAliasWithPath(aliasKey, aliasedPathRegExp, resolvedSourcePathRelativeToBaseUrl);
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
|
-
const rel = node_path.relative(node_path.dirname(filename), resolvedSourcePath);
|
|
285
|
-
const whatever = rel.startsWith('.') ? rel : './' + rel;
|
|
286
|
-
// remove file extension
|
|
287
|
-
return whatever.replace(/\.(ts|js|tsx|jsx|cjs|mjs)$/, '');
|
|
288
|
-
};
|
|
289
|
-
const node = path.node;
|
|
290
|
-
const isTypeImport = node.importKind === 'type';
|
|
291
|
-
const source = node.source;
|
|
292
|
-
if (source.type !== 'StringLiteral') {
|
|
293
|
-
return;
|
|
294
|
-
}
|
|
295
|
-
if (node.specifiers.length === 0) {
|
|
296
|
-
// Skip imports without 'from'
|
|
297
|
-
return;
|
|
298
|
-
}
|
|
299
|
-
const shouldSkip = node[SKIP] || !shouldPathBeAnalyzed(source.value);
|
|
300
|
-
if (shouldSkip) {
|
|
301
|
-
return;
|
|
302
|
-
}
|
|
303
|
-
const importKind = getImportKind(source.value);
|
|
304
|
-
const modulePath = getModulePath(source.value, filename, root);
|
|
305
|
-
const defaultSpecifier = node.specifiers.find((specifier) => specifier.type === 'ImportDefaultSpecifier' // import $$ from '$$'
|
|
306
|
-
);
|
|
307
|
-
const namespaceSpecifier = node.specifiers.find((specifier) => specifier.type === 'ImportNamespaceSpecifier' // import * as $$ from '$$'
|
|
308
|
-
);
|
|
309
|
-
const specifiers = node.specifiers.filter((specifier) => specifier.type === 'ImportSpecifier' // import { $$ } from '$$'
|
|
310
|
-
);
|
|
311
|
-
const results = specifiers.map((specifier) => {
|
|
312
|
-
const importedName = specifier.imported.name;
|
|
313
|
-
const result = lookup(importedName, modulePath, root);
|
|
314
|
-
if (!(result === null || result === void 0 ? void 0 : result.isCached)) {
|
|
315
|
-
const cacheKey = getCacheKey(importedName, modulePath);
|
|
316
|
-
// console.log('resolved not cached', cacheKey, result)
|
|
317
|
-
const originalImport = {
|
|
318
|
-
identifier: importedName,
|
|
319
|
-
local: specifier.local.name,
|
|
320
|
-
source: source.value // cannot cache non absolute path
|
|
321
|
-
};
|
|
322
|
-
const originalImportToCache = {
|
|
323
|
-
identifier: importedName,
|
|
324
|
-
local: specifier.local.name,
|
|
325
|
-
source: modulePath
|
|
326
|
-
};
|
|
327
|
-
const originalResolution = {
|
|
328
|
-
resolvedAs: originalImportToCache,
|
|
329
|
-
visitedFiles: []
|
|
330
|
-
};
|
|
331
|
-
if (!result.resolvedAs) {
|
|
332
|
-
cache.set(cacheKey, originalResolution);
|
|
333
|
-
return originalImport;
|
|
334
|
-
}
|
|
335
|
-
if (includeBarrelExportFiles &&
|
|
336
|
-
!includeBarrelExportFiles.some((fileThatHasToBeVisited) => result.visitedFiles.includes(fileThatHasToBeVisited))) {
|
|
337
|
-
cache.set(cacheKey, originalResolution);
|
|
338
|
-
return originalImport;
|
|
339
|
-
}
|
|
340
|
-
if (excludeBarrelExportFiles.some((fileThatCannotBeVisited) => result.visitedFiles.includes(fileThatCannotBeVisited))) {
|
|
341
|
-
cache.set(cacheKey, originalResolution);
|
|
342
|
-
return originalImport;
|
|
343
|
-
}
|
|
344
|
-
cache.set(cacheKey, result);
|
|
345
|
-
}
|
|
346
|
-
return {
|
|
347
|
-
...result.resolvedAs,
|
|
348
|
-
source: getImportSourceFormatted(result.resolvedAs.source, importKind),
|
|
349
|
-
local: specifier.local.name
|
|
350
|
-
};
|
|
351
|
-
});
|
|
352
|
-
const defaultResult = defaultSpecifier
|
|
353
|
-
? lookup('default', modulePath, root)
|
|
354
|
-
: null;
|
|
355
|
-
if (defaultResult && !defaultResult.isCached) {
|
|
356
|
-
const cacheKey = getCacheKey('default', modulePath);
|
|
357
|
-
const originalImportToCache = {
|
|
358
|
-
source: modulePath
|
|
359
|
-
};
|
|
360
|
-
const originalResolution = {
|
|
361
|
-
resolvedAs: originalImportToCache,
|
|
362
|
-
visitedFiles: []
|
|
363
|
-
};
|
|
364
|
-
if (!defaultResult.resolvedAs) {
|
|
365
|
-
cache.set(cacheKey, originalResolution);
|
|
366
|
-
}
|
|
367
|
-
else if (includeBarrelExportFiles &&
|
|
368
|
-
!includeBarrelExportFiles.some((fileThatHasToBeVisited) => defaultResult.visitedFiles.includes(fileThatHasToBeVisited))) {
|
|
369
|
-
cache.set(cacheKey, originalResolution);
|
|
370
|
-
}
|
|
371
|
-
else if (excludeBarrelExportFiles.some((fileThatCannotBeVisited) => defaultResult.visitedFiles.includes(fileThatCannotBeVisited))) {
|
|
372
|
-
cache.set(cacheKey, originalResolution);
|
|
373
|
-
}
|
|
374
|
-
else {
|
|
375
|
-
cache.set(cacheKey, defaultResult);
|
|
376
|
-
}
|
|
377
|
-
}
|
|
378
|
-
const buildNamed = (0, template_1.template)(`
|
|
379
|
-
import { %%IMPORT_NAME%% } from '%%SOURCE%%';
|
|
380
|
-
`);
|
|
381
|
-
const buildNamedWithAlias = (0, template_1.template)(`
|
|
382
|
-
import { %%IMPORTED_NAME%% as %%LOCAL_NAME%% } from '%%SOURCE%%';
|
|
383
|
-
`);
|
|
384
|
-
const buildDefault = (0, template_1.template)(`
|
|
385
|
-
import %%IMPORT_NAME%% from '%%SOURCE%%';
|
|
386
|
-
`);
|
|
387
|
-
const buildNamespace = (0, template_1.template)(`
|
|
388
|
-
import * as %%IMPORT_NAME%% from '%%SOURCE%%';
|
|
389
|
-
`);
|
|
390
|
-
const defaultImport = (defaultResult === null || defaultResult === void 0 ? void 0 : defaultResult.resolvedAs)
|
|
391
|
-
? [
|
|
392
|
-
buildDefault({
|
|
393
|
-
IMPORT_NAME: defaultSpecifier.local.name,
|
|
394
|
-
SOURCE: getImportSourceFormatted(defaultResult.resolvedAs.source, importKind)
|
|
395
|
-
})
|
|
396
|
-
]
|
|
397
|
-
: defaultSpecifier
|
|
398
|
-
? [
|
|
399
|
-
buildDefault({
|
|
400
|
-
IMPORT_NAME: defaultSpecifier.local.name,
|
|
401
|
-
SOURCE: source.value
|
|
402
|
-
})
|
|
403
|
-
]
|
|
404
|
-
: [];
|
|
405
|
-
const namespaceImport = namespaceSpecifier
|
|
406
|
-
? [
|
|
407
|
-
buildNamespace({
|
|
408
|
-
IMPORT_NAME: namespaceSpecifier.local.name,
|
|
409
|
-
SOURCE: source.value
|
|
410
|
-
})
|
|
411
|
-
]
|
|
412
|
-
: [];
|
|
413
|
-
const importsFromNamedGroupedBySource = Object.values((0, groupBy_1.groupBy)(results, 'source'));
|
|
414
|
-
const named = importsFromNamedGroupedBySource.map((imports) => {
|
|
415
|
-
const source = imports[0].source;
|
|
416
|
-
const defaultImport = imports.find(({ type }) => type === 'default');
|
|
417
|
-
const nonDefault = imports.filter(({ type }) => type !== 'default');
|
|
418
|
-
const defaultPart = defaultImport
|
|
419
|
-
? `${defaultImport.identifier}`
|
|
420
|
-
: null;
|
|
421
|
-
const nonDefaultPart = nonDefault.length > 0
|
|
422
|
-
? nonDefault
|
|
423
|
-
.map(({ identifier, local }) => identifier !== local
|
|
424
|
-
? `${identifier} as ${local}`
|
|
425
|
-
: identifier)
|
|
426
|
-
.join(', ')
|
|
427
|
-
: null;
|
|
428
|
-
return `import ${isTypeImport ? 'type ' : ''}${defaultPart ? `${defaultPart}${nonDefaultPart ? ', ' : ''}` : ''}${nonDefaultPart ? `{ ${nonDefaultPart} }` : ''} from '${source}';`;
|
|
429
|
-
});
|
|
430
|
-
const newImports = [...namespaceImport, ...defaultImport, ...named].map((node) => {
|
|
431
|
-
return node;
|
|
432
|
-
});
|
|
433
|
-
if (!state.file.metadata) {
|
|
434
|
-
state.file.metadata = {};
|
|
435
|
-
}
|
|
436
|
-
if (!state.file.metadata[filename]) {
|
|
437
|
-
state.file.metadata[filename] = [];
|
|
438
|
-
}
|
|
439
|
-
const modification = {
|
|
440
|
-
modificationCode: newImports.join('\n'),
|
|
441
|
-
start: path.node.start,
|
|
442
|
-
end: path.node.end,
|
|
443
|
-
loc: path.node.loc
|
|
444
|
-
};
|
|
445
|
-
state.file.metadata[filename].push(modification);
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
};
|
|
449
|
-
};
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
export declare type CodeModification = {
|
|
2
|
-
modificationCode: string;
|
|
3
|
-
};
|
|
4
|
-
export declare type CodeChange = CodeModification;
|
|
5
|
-
export declare type CodeChangeWithLocation = CodeModification & MatchPosition;
|
|
6
|
-
export declare type MatchPosition = {
|
|
7
|
-
start: number;
|
|
8
|
-
end: number;
|
|
9
|
-
loc: Location;
|
|
10
|
-
};
|
|
11
|
-
declare type ModifyCodeAsTextParams = {
|
|
12
|
-
code: string;
|
|
13
|
-
modificationCode?: string;
|
|
14
|
-
alreadyChangedCodes?: string[];
|
|
15
|
-
location: Pick<MatchPosition, 'start' | 'end'>;
|
|
16
|
-
};
|
|
17
|
-
declare type ProcessCodeModificationsArrayParams = {
|
|
18
|
-
code: string;
|
|
19
|
-
changes: CodeChangeWithLocation[];
|
|
20
|
-
};
|
|
21
|
-
export declare const regExpTest: (regExp: RegExp, text: string) => boolean;
|
|
22
|
-
export declare function modifyCodeAsText({ code, modificationCode, location }: ModifyCodeAsTextParams): {
|
|
23
|
-
fileCode: string;
|
|
24
|
-
locationsChange: {
|
|
25
|
-
from: number;
|
|
26
|
-
to: number;
|
|
27
|
-
};
|
|
28
|
-
};
|
|
29
|
-
export declare function processTextCodeModificationsArray({ code, changes }: ProcessCodeModificationsArrayParams): string;
|
|
30
|
-
export {};
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.processTextCodeModificationsArray = exports.modifyCodeAsText = exports.regExpTest = void 0;
|
|
4
|
-
const regExpTest = (regExp, text) => {
|
|
5
|
-
if (!text) {
|
|
6
|
-
return false;
|
|
7
|
-
}
|
|
8
|
-
const matches = text.match(regExp);
|
|
9
|
-
return matches !== null && matches.length > 0;
|
|
10
|
-
};
|
|
11
|
-
exports.regExpTest = regExpTest;
|
|
12
|
-
function modifyCodeAsText({ code, modificationCode, location }) {
|
|
13
|
-
let fileCode = code;
|
|
14
|
-
const codeBeforeMatch = fileCode.slice(0, location.start);
|
|
15
|
-
const codeAfterMatch = fileCode.slice(location.end);
|
|
16
|
-
const replacedCodeLength = location.end - location.start;
|
|
17
|
-
const replacementCodeLength = modificationCode.length;
|
|
18
|
-
const locationsChange = {
|
|
19
|
-
from: location.end,
|
|
20
|
-
to: location.end + replacementCodeLength - replacedCodeLength
|
|
21
|
-
};
|
|
22
|
-
fileCode = `${codeBeforeMatch}${modificationCode}${codeAfterMatch}`;
|
|
23
|
-
return { fileCode, locationsChange };
|
|
24
|
-
}
|
|
25
|
-
exports.modifyCodeAsText = modifyCodeAsText;
|
|
26
|
-
function processTextCodeModificationsArray({ code, changes }) {
|
|
27
|
-
let modifiedCode = code;
|
|
28
|
-
/**
|
|
29
|
-
* Include only changes that are unique by it's location.
|
|
30
|
-
* Remove changes that are inside range of other changes
|
|
31
|
-
*/
|
|
32
|
-
const pendingChanges = changes.filter((change, changeIdx) => !changes.some((otherChange, otherChangeIdx) => otherChangeIdx !== changeIdx &&
|
|
33
|
-
otherChange.start <= change.start &&
|
|
34
|
-
otherChange.end >= change.end &&
|
|
35
|
-
// insert changes has the same start and end to distinguish them from anchor node, that might have other changes attached
|
|
36
|
-
change.start !== change.end));
|
|
37
|
-
while (pendingChanges.length > 0) {
|
|
38
|
-
const change = pendingChanges.shift();
|
|
39
|
-
const { locationsChange, fileCode } = modifyCodeAsText({
|
|
40
|
-
code: modifiedCode,
|
|
41
|
-
modificationCode: change.modificationCode,
|
|
42
|
-
location: { start: change.start, end: change.end }
|
|
43
|
-
});
|
|
44
|
-
modifiedCode = fileCode;
|
|
45
|
-
pendingChanges.forEach((pendingChange) => {
|
|
46
|
-
if (pendingChange.start >= locationsChange.from) {
|
|
47
|
-
const diff = locationsChange.to - locationsChange.from;
|
|
48
|
-
pendingChange.end += diff;
|
|
49
|
-
pendingChange.start += diff;
|
|
50
|
-
}
|
|
51
|
-
});
|
|
52
|
-
}
|
|
53
|
-
return modifiedCode;
|
|
54
|
-
}
|
|
55
|
-
exports.processTextCodeModificationsArray = processTextCodeModificationsArray;
|
package/dist/babel/template.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare function template(template: string): (params: Record<string, string>) => string;
|
package/dist/babel/template.js
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.template = void 0;
|
|
4
|
-
function template(template) {
|
|
5
|
-
return (params) => {
|
|
6
|
-
let code = template.trim();
|
|
7
|
-
Object.entries(params).forEach(([key, value]) => {
|
|
8
|
-
code = code.replace(new RegExp(`%%${key}%%`, 'g'), value);
|
|
9
|
-
});
|
|
10
|
-
return code;
|
|
11
|
-
};
|
|
12
|
-
}
|
|
13
|
-
exports.template = template;
|
package/dist/babel/transform.js
DELETED
|
@@ -1,66 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.transform = void 0;
|
|
4
|
-
/*eslint-disable @typescript-eslint/no-var-requires */
|
|
5
|
-
const { getFilesList } = require('@codeque/core');
|
|
6
|
-
const babelCore = require('@babel/core');
|
|
7
|
-
const parser = require('@babel/parser');
|
|
8
|
-
const fs = require('fs');
|
|
9
|
-
const path = require('path');
|
|
10
|
-
const babelParsingOptions_1 = require("./babelParsingOptions");
|
|
11
|
-
const processCodeTextModificationsArray_1 = require("./processCodeTextModificationsArray");
|
|
12
|
-
const transform = async ({ rootPath, inputFilePath, includeBarrelExportFiles, excludeBarrelExportFiles }) => {
|
|
13
|
-
const root = path.resolve(rootPath);
|
|
14
|
-
const resolvedInputFilePath = inputFilePath
|
|
15
|
-
? path.join(root, inputFilePath)
|
|
16
|
-
: undefined;
|
|
17
|
-
console.log('root', root);
|
|
18
|
-
const filesList = resolvedInputFilePath
|
|
19
|
-
? [path.resolve(resolvedInputFilePath)]
|
|
20
|
-
: await getFilesList({
|
|
21
|
-
searchRoot: root,
|
|
22
|
-
extensionTester: /\.(ts|tsx)$/
|
|
23
|
-
});
|
|
24
|
-
const errors = [];
|
|
25
|
-
let progressCount = 0;
|
|
26
|
-
let cache = new Map();
|
|
27
|
-
for (const filePath of filesList) {
|
|
28
|
-
try {
|
|
29
|
-
const fileName = path.parse(filePath).name;
|
|
30
|
-
const fileContent = fs.readFileSync(filePath).toString();
|
|
31
|
-
const result = babelCore.transformFileSync(filePath, {
|
|
32
|
-
plugins: [
|
|
33
|
-
[
|
|
34
|
-
__dirname + '/index.js',
|
|
35
|
-
{
|
|
36
|
-
tsConfigPath: path.join(root, 'tsconfig.json'),
|
|
37
|
-
cache,
|
|
38
|
-
includeBarrelExportFiles,
|
|
39
|
-
excludeBarrelExportFiles
|
|
40
|
-
}
|
|
41
|
-
]
|
|
42
|
-
],
|
|
43
|
-
parserOpts: babelParsingOptions_1.babelParsingOptions,
|
|
44
|
-
filename: fileName
|
|
45
|
-
});
|
|
46
|
-
const changes = result.metadata[filePath];
|
|
47
|
-
if ((changes === null || changes === void 0 ? void 0 : changes.length) > 0) {
|
|
48
|
-
const resultCode = (0, processCodeTextModificationsArray_1.processTextCodeModificationsArray)({
|
|
49
|
-
code: fileContent,
|
|
50
|
-
changes
|
|
51
|
-
});
|
|
52
|
-
fs.writeFileSync(filePath, resultCode);
|
|
53
|
-
}
|
|
54
|
-
progressCount++;
|
|
55
|
-
if (progressCount % 100 === 0) {
|
|
56
|
-
console.log(`${progressCount}+${errors.length}/${filesList.length}`);
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
catch (e) {
|
|
60
|
-
errors.push(e);
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
console.log(errors);
|
|
64
|
-
console.log(`Done: ${progressCount}/${filesList.length}; Failed: ${errors.length}`);
|
|
65
|
-
};
|
|
66
|
-
exports.transform = transform;
|