@lage-run/hasher 0.2.2 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.json +37 -1
- package/CHANGELOG.md +11 -2
- package/lib/FileHasher.d.ts +13 -0
- package/lib/FileHasher.js +181 -0
- package/lib/PackageTree.d.ts +20 -0
- package/lib/PackageTree.js +178 -0
- package/lib/TargetHasher.d.ts +47 -0
- package/lib/TargetHasher.js +218 -0
- package/lib/__tests__/TargetHasher.test.js +128 -0
- package/lib/__tests__/getPackageDeps.test.js +70 -67
- package/lib/__tests__/resolveDependenciesHelper.js +19 -13
- package/lib/__tests__/resolveExternalDependencies.test.js +16 -16
- package/lib/__tests__/resolveInternalDependencies.test.js +12 -12
- package/lib/getPackageDeps.js +25 -15
- package/lib/hashStrings.d.ts +1 -0
- package/lib/hashStrings.js +28 -0
- package/lib/index.d.ts +1 -14
- package/lib/index.js +6 -70
- package/lib/nameAtVersion.d.ts +1 -0
- package/lib/nameAtVersion.js +13 -0
- package/lib/resolveExternalDependencies.js +18 -12
- package/lib/resolveInternalDependencies.js +8 -4
- package/package.json +9 -5
- package/lib/__tests__/createPackageHashes.test.js +0 -44
- package/lib/__tests__/getRepoDeps.test.d.ts +0 -1
- package/lib/__tests__/getRepoDeps.test.js +0 -253
- package/lib/__tests__/getRepoState.test.d.ts +0 -1
- package/lib/__tests__/getRepoState.test.js +0 -104
- package/lib/__tests__/hashOfFiles.test.d.ts +0 -1
- package/lib/__tests__/hashOfFiles.test.js +0 -103
- package/lib/__tests__/helpers.test.d.ts +0 -1
- package/lib/__tests__/helpers.test.js +0 -28
- package/lib/__tests__/index.test.d.ts +0 -1
- package/lib/__tests__/index.test.js +0 -99
- package/lib/createPackageHashes.d.ts +0 -4
- package/lib/createPackageHashes.js +0 -48
- package/lib/getRepoState.d.ts +0 -76
- package/lib/getRepoState.js +0 -256
- package/lib/hashOfFiles.d.ts +0 -14
- package/lib/hashOfFiles.js +0 -71
- package/lib/hashOfPackage.d.ts +0 -9
- package/lib/hashOfPackage.js +0 -65
- package/lib/helpers.d.ts +0 -3
- package/lib/helpers.js +0 -47
- package/lib/repoInfo.d.ts +0 -26
- package/lib/repoInfo.js +0 -65
- /package/lib/__tests__/{createPackageHashes.test.d.ts → TargetHasher.test.d.ts} +0 -0
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
Object.defineProperty(exports, "TargetHasher", {
|
|
6
|
+
enumerable: true,
|
|
7
|
+
get: function() {
|
|
8
|
+
return TargetHasher;
|
|
9
|
+
}
|
|
10
|
+
});
|
|
11
|
+
const _globhasher = require("glob-hasher");
|
|
12
|
+
const _fastglob = /*#__PURE__*/ _interop_require_default(require("fast-glob"));
|
|
13
|
+
const _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
|
|
14
|
+
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
|
|
15
|
+
const _workspacetools = require("workspace-tools");
|
|
16
|
+
const _infoFromPackageJson = require("workspace-tools/lib/infoFromPackageJson.js");
|
|
17
|
+
const _hashStrings = require("./hashStrings.js");
|
|
18
|
+
const _resolveInternalDependencies = require("./resolveInternalDependencies.js");
|
|
19
|
+
const _resolveExternalDependencies = require("./resolveExternalDependencies.js");
|
|
20
|
+
const _FileHasher = require("./FileHasher.js");
|
|
21
|
+
const _PackageTree = require("./PackageTree.js");
|
|
22
|
+
function _define_property(obj, key, value) {
|
|
23
|
+
if (key in obj) {
|
|
24
|
+
Object.defineProperty(obj, key, {
|
|
25
|
+
value: value,
|
|
26
|
+
enumerable: true,
|
|
27
|
+
configurable: true,
|
|
28
|
+
writable: true
|
|
29
|
+
});
|
|
30
|
+
} else {
|
|
31
|
+
obj[key] = value;
|
|
32
|
+
}
|
|
33
|
+
return obj;
|
|
34
|
+
}
|
|
35
|
+
function _interop_require_default(obj) {
|
|
36
|
+
return obj && obj.__esModule ? obj : {
|
|
37
|
+
default: obj
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
class TargetHasher {
|
|
41
|
+
getPackageInfos(workspacePackages) {
|
|
42
|
+
const { root } = this.options;
|
|
43
|
+
const packageInfos = {};
|
|
44
|
+
if (workspacePackages.length) {
|
|
45
|
+
for (const pkg of workspacePackages){
|
|
46
|
+
packageInfos[pkg.name] = pkg.packageJson;
|
|
47
|
+
}
|
|
48
|
+
} else {
|
|
49
|
+
const packageJsonPath = _path.default.join(root, "package.json");
|
|
50
|
+
if (_fs.default.existsSync(packageJsonPath)) {
|
|
51
|
+
try {
|
|
52
|
+
const packageJson = JSON.parse(_fs.default.readFileSync(packageJsonPath, "utf-8"));
|
|
53
|
+
const rootInfo = (0, _infoFromPackageJson.infoFromPackageJson)(packageJson, packageJsonPath);
|
|
54
|
+
if (rootInfo) {
|
|
55
|
+
packageInfos[rootInfo.name] = rootInfo;
|
|
56
|
+
}
|
|
57
|
+
} catch (e) {
|
|
58
|
+
throw new Error(`Invalid package.json file detected ${packageJsonPath}: ${e?.message || e}`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return packageInfos;
|
|
63
|
+
}
|
|
64
|
+
expandInputPatterns(patterns, target) {
|
|
65
|
+
const expandedPatterns = {};
|
|
66
|
+
for (const pattern of patterns){
|
|
67
|
+
if (pattern.startsWith("^") || pattern.startsWith("!^")) {
|
|
68
|
+
const matchPattern = pattern.replace("^", "");
|
|
69
|
+
// get all the packages that are transitive deps and add them to the list
|
|
70
|
+
const queue = [
|
|
71
|
+
target.packageName
|
|
72
|
+
];
|
|
73
|
+
const visited = new Set();
|
|
74
|
+
while(queue.length > 0){
|
|
75
|
+
const pkg = queue.pop();
|
|
76
|
+
if (visited.has(pkg)) {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
visited.add(pkg);
|
|
80
|
+
if (pkg !== target.packageName) {
|
|
81
|
+
expandedPatterns[pkg] = expandedPatterns[pkg] ?? [];
|
|
82
|
+
expandedPatterns[pkg].push(matchPattern);
|
|
83
|
+
}
|
|
84
|
+
if (this.dependencyMap.dependencies.has(pkg)) {
|
|
85
|
+
const deps = this.dependencyMap.dependencies.get(pkg);
|
|
86
|
+
if (deps) {
|
|
87
|
+
for (const dep of deps){
|
|
88
|
+
queue.push(dep);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
} else {
|
|
94
|
+
const pkg = target.packageName;
|
|
95
|
+
expandedPatterns[pkg] = expandedPatterns[pkg] ?? [];
|
|
96
|
+
expandedPatterns[pkg].push(pattern);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return expandedPatterns;
|
|
100
|
+
}
|
|
101
|
+
ensureInitialized() {
|
|
102
|
+
if (!this.initializedPromise) {
|
|
103
|
+
throw new Error("TargetHasher is not initialized");
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
async initialize() {
|
|
107
|
+
const { environmentGlob , root } = this.options;
|
|
108
|
+
if (this.initializedPromise) {
|
|
109
|
+
await this.initializedPromise;
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
this.initializedPromise = Promise.all([
|
|
113
|
+
this.fileHasher.readManifest().then(()=>(0, _fastglob.default)(environmentGlob, {
|
|
114
|
+
cwd: root
|
|
115
|
+
})).then((files)=>this.fileHasher.hash(files)).then((hash)=>this.globalInputsHash = hash),
|
|
116
|
+
(0, _workspacetools.getWorkspacesAsync)(root).then((workspaceInfo)=>this.workspaceInfo = workspaceInfo).then(()=>{
|
|
117
|
+
this.packageInfos = this.getPackageInfos(this.workspaceInfo);
|
|
118
|
+
this.dependencyMap = (0, _workspacetools.createDependencyMap)(this.packageInfos, {
|
|
119
|
+
withDevDependencies: true,
|
|
120
|
+
withPeerDependencies: false
|
|
121
|
+
});
|
|
122
|
+
this.packageTree = new _PackageTree.PackageTree({
|
|
123
|
+
root,
|
|
124
|
+
packageInfos: this.packageInfos,
|
|
125
|
+
// TODO: (optimization) false if process.env.TF_BUILD || process.env.CI
|
|
126
|
+
includeUntracked: true
|
|
127
|
+
});
|
|
128
|
+
return this.packageTree.initialize();
|
|
129
|
+
}),
|
|
130
|
+
(0, _workspacetools.parseLockFile)(root).then((lockInfo)=>this.lockInfo = lockInfo)
|
|
131
|
+
]);
|
|
132
|
+
await this.initializedPromise;
|
|
133
|
+
}
|
|
134
|
+
async hash(target) {
|
|
135
|
+
this.ensureInitialized();
|
|
136
|
+
const { root } = this.options;
|
|
137
|
+
if (target.cwd === root && target.cache) {
|
|
138
|
+
if (!target.inputs) {
|
|
139
|
+
throw new Error("Root-level targets must have `inputs` defined if it has cache enabled.");
|
|
140
|
+
}
|
|
141
|
+
const files = await (0, _fastglob.default)(target.inputs, {
|
|
142
|
+
cwd: root
|
|
143
|
+
});
|
|
144
|
+
const fileFashes = (0, _globhasher.hash)(files, {
|
|
145
|
+
cwd: root
|
|
146
|
+
}) ?? {};
|
|
147
|
+
const hashes = Object.values(fileFashes);
|
|
148
|
+
return (0, _hashStrings.hashStrings)(hashes);
|
|
149
|
+
}
|
|
150
|
+
// 1. add hash of target's inputs
|
|
151
|
+
// 2. add hash of target packages' internal and external deps
|
|
152
|
+
const { dependencies , devDependencies } = this.packageInfos[target.packageName];
|
|
153
|
+
const workspaceInfo = this.workspaceInfo;
|
|
154
|
+
const parsedLock = this.lockInfo;
|
|
155
|
+
const allDependencies = {
|
|
156
|
+
...dependencies,
|
|
157
|
+
...devDependencies
|
|
158
|
+
};
|
|
159
|
+
const internalDeps = (0, _resolveInternalDependencies.resolveInternalDependencies)(allDependencies, workspaceInfo);
|
|
160
|
+
const externalDeps = (0, _resolveExternalDependencies.resolveExternalDependencies)(allDependencies, workspaceInfo, parsedLock);
|
|
161
|
+
const resolvedDependencies = [
|
|
162
|
+
...internalDeps,
|
|
163
|
+
...externalDeps
|
|
164
|
+
].sort();
|
|
165
|
+
const inputs = target.inputs ?? [
|
|
166
|
+
"**/*"
|
|
167
|
+
];
|
|
168
|
+
const packagePatterns = this.expandInputPatterns(inputs, target);
|
|
169
|
+
const files = [];
|
|
170
|
+
for (const [pkg, patterns] of Object.entries(packagePatterns)){
|
|
171
|
+
const packageFiles = this.packageTree.getPackageFiles(pkg, patterns);
|
|
172
|
+
files.push(...packageFiles);
|
|
173
|
+
}
|
|
174
|
+
const fileHashes = this.fileHasher.hash(files) ?? {}; // this list is sorted by file name
|
|
175
|
+
// get target hashes
|
|
176
|
+
const targetDepHashes = target.dependencies?.sort().map((targetDep)=>this.targetHashes[targetDep]);
|
|
177
|
+
const combinedHashes = [
|
|
178
|
+
// Environmental hashes
|
|
179
|
+
...Object.values(this.globalInputsHash ?? {}),
|
|
180
|
+
`${target.id}|${JSON.stringify(this.options.cliArgs)}`,
|
|
181
|
+
this.options.cacheKey || "",
|
|
182
|
+
// File content hashes based on target.inputs
|
|
183
|
+
...Object.values(fileHashes),
|
|
184
|
+
// Dependency hashes
|
|
185
|
+
...resolvedDependencies,
|
|
186
|
+
...targetDepHashes
|
|
187
|
+
].filter(Boolean);
|
|
188
|
+
const hashString = (0, _hashStrings.hashStrings)(combinedHashes);
|
|
189
|
+
this.targetHashes[target.id] = hashString;
|
|
190
|
+
return hashString;
|
|
191
|
+
}
|
|
192
|
+
async cleanup() {
|
|
193
|
+
await this.fileHasher.writeManifest();
|
|
194
|
+
}
|
|
195
|
+
constructor(options){
|
|
196
|
+
_define_property(this, "options", void 0);
|
|
197
|
+
_define_property(this, "fileHasher", void 0);
|
|
198
|
+
_define_property(this, "packageTree", void 0);
|
|
199
|
+
_define_property(this, "initializedPromise", void 0);
|
|
200
|
+
_define_property(this, "packageInfos", void 0);
|
|
201
|
+
_define_property(this, "workspaceInfo", void 0);
|
|
202
|
+
_define_property(this, "globalInputsHash", void 0);
|
|
203
|
+
_define_property(this, "lockInfo", void 0);
|
|
204
|
+
_define_property(this, "targetHashes", void 0);
|
|
205
|
+
_define_property(this, "dependencyMap", void 0);
|
|
206
|
+
this.options = options;
|
|
207
|
+
this.packageInfos = {};
|
|
208
|
+
this.targetHashes = {};
|
|
209
|
+
this.dependencyMap = {
|
|
210
|
+
dependencies: new Map(),
|
|
211
|
+
dependents: new Map()
|
|
212
|
+
};
|
|
213
|
+
const { root } = options;
|
|
214
|
+
this.fileHasher = new _FileHasher.FileHasher({
|
|
215
|
+
root
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", {
|
|
3
|
+
value: true
|
|
4
|
+
});
|
|
5
|
+
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
|
|
6
|
+
const _index = require("../index");
|
|
7
|
+
const _monorepofixture = require("@lage-run/monorepo-fixture");
|
|
8
|
+
function _interop_require_default(obj) {
|
|
9
|
+
return obj && obj.__esModule ? obj : {
|
|
10
|
+
default: obj
|
|
11
|
+
};
|
|
12
|
+
}
|
|
13
|
+
const fixturesPath = _path.default.join(__dirname, "..", "__fixtures__");
|
|
14
|
+
describe("The main Hasher class", ()=>{
|
|
15
|
+
async function setupFixture(fixture = "monorepo") {
|
|
16
|
+
const monorepo = new _monorepofixture.Monorepo("fixture");
|
|
17
|
+
await monorepo.init(_path.default.join(fixturesPath, fixture));
|
|
18
|
+
return monorepo;
|
|
19
|
+
}
|
|
20
|
+
async function getHash(hasher, target) {
|
|
21
|
+
await hasher.initialize();
|
|
22
|
+
const hash = await hasher.hash(target);
|
|
23
|
+
await hasher.cleanup();
|
|
24
|
+
return hash;
|
|
25
|
+
}
|
|
26
|
+
function createTarget(root, packageName, task) {
|
|
27
|
+
return {
|
|
28
|
+
cwd: _path.default.join(root, "packages", packageName),
|
|
29
|
+
dependencies: [],
|
|
30
|
+
dependents: [],
|
|
31
|
+
depSpecs: [],
|
|
32
|
+
id: `${packageName}#${task}`,
|
|
33
|
+
label: `${packageName}#${task}`,
|
|
34
|
+
packageName,
|
|
35
|
+
task
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
it("creates different hashes given different fixtures", async ()=>{
|
|
39
|
+
const monorepo1 = await setupFixture("monorepo");
|
|
40
|
+
const hasher = new _index.TargetHasher({
|
|
41
|
+
root: monorepo1.root,
|
|
42
|
+
environmentGlob: []
|
|
43
|
+
});
|
|
44
|
+
const target = createTarget(monorepo1.root, "package-a", "build");
|
|
45
|
+
const hash = await getHash(hasher, target);
|
|
46
|
+
const monorepo2 = await setupFixture("monorepo-different");
|
|
47
|
+
const target2 = createTarget(monorepo2.root, "package-a", "build");
|
|
48
|
+
const hasher2 = new _index.TargetHasher({
|
|
49
|
+
root: monorepo2.root,
|
|
50
|
+
environmentGlob: []
|
|
51
|
+
});
|
|
52
|
+
const hash2 = await getHash(hasher2, target2);
|
|
53
|
+
expect(hash).not.toEqual(hash2);
|
|
54
|
+
monorepo1.cleanup();
|
|
55
|
+
monorepo2.cleanup();
|
|
56
|
+
});
|
|
57
|
+
it("creates the same hash given the same fixture, with different target hasher instances", async ()=>{
|
|
58
|
+
const monorepo1 = await setupFixture("monorepo");
|
|
59
|
+
const hasher = new _index.TargetHasher({
|
|
60
|
+
root: monorepo1.root,
|
|
61
|
+
environmentGlob: []
|
|
62
|
+
});
|
|
63
|
+
const target = createTarget(monorepo1.root, "package-a", "build");
|
|
64
|
+
const hash = await getHash(hasher, target);
|
|
65
|
+
const monorepo2 = await setupFixture("monorepo");
|
|
66
|
+
const hasher2 = new _index.TargetHasher({
|
|
67
|
+
root: monorepo2.root,
|
|
68
|
+
environmentGlob: []
|
|
69
|
+
});
|
|
70
|
+
const target2 = createTarget(monorepo2.root, "package-a", "build");
|
|
71
|
+
const hash2 = await getHash(hasher2, target2);
|
|
72
|
+
expect(hash).toEqual(hash2);
|
|
73
|
+
monorepo1.cleanup();
|
|
74
|
+
monorepo2.cleanup();
|
|
75
|
+
});
|
|
76
|
+
it("creates different hashes when a src file has changed", async ()=>{
|
|
77
|
+
const monorepo1 = await setupFixture("monorepo");
|
|
78
|
+
const hasher = new _index.TargetHasher({
|
|
79
|
+
root: monorepo1.root,
|
|
80
|
+
environmentGlob: []
|
|
81
|
+
});
|
|
82
|
+
const target = createTarget(monorepo1.root, "package-a", "build");
|
|
83
|
+
const hash = await getHash(hasher, target);
|
|
84
|
+
const monorepo2 = await setupFixture("monorepo");
|
|
85
|
+
const hasher2 = new _index.TargetHasher({
|
|
86
|
+
root: monorepo2.root,
|
|
87
|
+
environmentGlob: []
|
|
88
|
+
});
|
|
89
|
+
const target2 = createTarget(monorepo2.root, "package-a", "build");
|
|
90
|
+
await monorepo2.commitFiles({
|
|
91
|
+
"packages/package-a/src/index.ts": "console.log('hello world');"
|
|
92
|
+
});
|
|
93
|
+
const hash2 = await getHash(hasher2, target2);
|
|
94
|
+
expect(hash).not.toEqual(hash2);
|
|
95
|
+
monorepo1.cleanup();
|
|
96
|
+
monorepo2.cleanup();
|
|
97
|
+
});
|
|
98
|
+
it("creates different hashes when a src file has changed for a dependency", async ()=>{
|
|
99
|
+
const monorepo1 = await setupFixture("monorepo-with-deps");
|
|
100
|
+
const hasher = new _index.TargetHasher({
|
|
101
|
+
root: monorepo1.root,
|
|
102
|
+
environmentGlob: []
|
|
103
|
+
});
|
|
104
|
+
const target = createTarget(monorepo1.root, "package-a", "build");
|
|
105
|
+
target.inputs = [
|
|
106
|
+
"**/*",
|
|
107
|
+
"^**/*"
|
|
108
|
+
];
|
|
109
|
+
const hash = await getHash(hasher, target);
|
|
110
|
+
const monorepo2 = await setupFixture("monorepo-with-deps");
|
|
111
|
+
await monorepo2.commitFiles({
|
|
112
|
+
"packages/package-b/src/index.ts": "console.log('hello world');"
|
|
113
|
+
});
|
|
114
|
+
const hasher2 = new _index.TargetHasher({
|
|
115
|
+
root: monorepo2.root,
|
|
116
|
+
environmentGlob: []
|
|
117
|
+
});
|
|
118
|
+
const target2 = createTarget(monorepo2.root, "package-a", "build");
|
|
119
|
+
target2.inputs = [
|
|
120
|
+
"**/*",
|
|
121
|
+
"^**/*"
|
|
122
|
+
];
|
|
123
|
+
const hash2 = await getHash(hasher2, target2);
|
|
124
|
+
expect(hash).not.toEqual(hash2);
|
|
125
|
+
monorepo1.cleanup();
|
|
126
|
+
monorepo2.cleanup();
|
|
127
|
+
});
|
|
128
|
+
});
|
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", {
|
|
3
3
|
value: true
|
|
4
4
|
});
|
|
5
|
-
const _path = /*#__PURE__*/
|
|
6
|
-
const _fs = /*#__PURE__*/
|
|
7
|
-
const _childProcess = require("child_process");
|
|
5
|
+
const _path = /*#__PURE__*/ _interop_require_wildcard(require("path"));
|
|
6
|
+
const _fs = /*#__PURE__*/ _interop_require_wildcard(require("fs"));
|
|
8
7
|
const _getPackageDeps = require("../getPackageDeps");
|
|
8
|
+
const _monorepofixture = require("@lage-run/monorepo-fixture");
|
|
9
9
|
function _getRequireWildcardCache(nodeInterop) {
|
|
10
10
|
if (typeof WeakMap !== "function") return null;
|
|
11
11
|
var cacheBabelInterop = new WeakMap();
|
|
@@ -14,7 +14,7 @@ function _getRequireWildcardCache(nodeInterop) {
|
|
|
14
14
|
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
|
|
15
15
|
})(nodeInterop);
|
|
16
16
|
}
|
|
17
|
-
function
|
|
17
|
+
function _interop_require_wildcard(obj, nodeInterop) {
|
|
18
18
|
if (!nodeInterop && obj && obj.__esModule) {
|
|
19
19
|
return obj;
|
|
20
20
|
}
|
|
@@ -45,9 +45,9 @@ function _interopRequireWildcard(obj, nodeInterop) {
|
|
|
45
45
|
}
|
|
46
46
|
return newObj;
|
|
47
47
|
}
|
|
48
|
-
const SOURCE_PATH = _path.join(__dirname
|
|
49
|
-
const TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "
|
|
50
|
-
const NESTED_TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "
|
|
48
|
+
const SOURCE_PATH = _path.join(__dirname, "..", "__fixtures__");
|
|
49
|
+
const TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "test-project");
|
|
50
|
+
const NESTED_TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "nested-test-project");
|
|
51
51
|
const FileSystem = {
|
|
52
52
|
writeFile: _fs.writeFileSync,
|
|
53
53
|
deleteFile: _fs.rmSync
|
|
@@ -97,8 +97,10 @@ describe(_getPackageDeps.parseGitLsTree.name, ()=>{
|
|
|
97
97
|
});
|
|
98
98
|
});
|
|
99
99
|
describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
100
|
-
it("can parse committed file", ()=>{
|
|
101
|
-
const
|
|
100
|
+
it("can parse committed file", async ()=>{
|
|
101
|
+
const monorepo = new _monorepofixture.Monorepo("parse-commited-file");
|
|
102
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
103
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
102
104
|
const expectedFiles = {
|
|
103
105
|
"file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
104
106
|
"file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
@@ -108,19 +110,24 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
108
110
|
const filePaths = Array.from(results.keys()).sort();
|
|
109
111
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
110
112
|
});
|
|
111
|
-
it("can handle files in subfolders", ()=>{
|
|
112
|
-
const
|
|
113
|
+
it("can handle files in subfolders", async ()=>{
|
|
114
|
+
const monorepo = new _monorepofixture.Monorepo("files-in-subfolders");
|
|
115
|
+
await monorepo.init(NESTED_TEST_PROJECT_PATH);
|
|
116
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
113
117
|
const expectedFiles = {
|
|
114
118
|
"src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
115
119
|
"package.json": "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
116
120
|
};
|
|
117
121
|
const filePaths = Array.from(results.keys()).sort();
|
|
118
122
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
123
|
+
await monorepo.cleanup();
|
|
119
124
|
});
|
|
120
|
-
it("can handle adding one file", ()=>{
|
|
121
|
-
const
|
|
125
|
+
it("can handle adding one file", async ()=>{
|
|
126
|
+
const monorepo = new _monorepofixture.Monorepo("add-one-file");
|
|
127
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
128
|
+
const tempFilePath = _path.join(monorepo.root, "a.txt");
|
|
122
129
|
FileSystem.writeFile(tempFilePath, "a");
|
|
123
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
130
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
124
131
|
try {
|
|
125
132
|
const expectedFiles = {
|
|
126
133
|
"a.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
|
|
@@ -133,14 +140,17 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
133
140
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
134
141
|
} finally{
|
|
135
142
|
FileSystem.deleteFile(tempFilePath);
|
|
143
|
+
await monorepo.cleanup();
|
|
136
144
|
}
|
|
137
145
|
});
|
|
138
|
-
it("can handle adding two files", ()=>{
|
|
139
|
-
const
|
|
140
|
-
|
|
146
|
+
it("can handle adding two files", async ()=>{
|
|
147
|
+
const monorepo = new _monorepofixture.Monorepo("add-two-files");
|
|
148
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
149
|
+
const tempFilePath1 = _path.join(monorepo.root, "a.txt");
|
|
150
|
+
const tempFilePath2 = _path.join(monorepo.root, "b.txt");
|
|
141
151
|
FileSystem.writeFile(tempFilePath1, "a");
|
|
142
152
|
FileSystem.writeFile(tempFilePath2, "a");
|
|
143
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
153
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
144
154
|
try {
|
|
145
155
|
const expectedFiles = {
|
|
146
156
|
"a.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
|
|
@@ -155,12 +165,15 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
155
165
|
} finally{
|
|
156
166
|
FileSystem.deleteFile(tempFilePath1);
|
|
157
167
|
FileSystem.deleteFile(tempFilePath2);
|
|
168
|
+
await monorepo.cleanup();
|
|
158
169
|
}
|
|
159
170
|
});
|
|
160
|
-
it("can handle removing one file", ()=>{
|
|
161
|
-
const
|
|
171
|
+
it("can handle removing one file", async ()=>{
|
|
172
|
+
const monorepo = new _monorepofixture.Monorepo("remove-one-file");
|
|
173
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
174
|
+
const testFilePath = _path.join(monorepo.root, "file1.txt");
|
|
162
175
|
FileSystem.deleteFile(testFilePath);
|
|
163
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
176
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
164
177
|
try {
|
|
165
178
|
const expectedFiles = {
|
|
166
179
|
"file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
@@ -170,15 +183,15 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
170
183
|
const filePaths = Array.from(results.keys()).sort();
|
|
171
184
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
172
185
|
} finally{
|
|
173
|
-
|
|
174
|
-
stdio: "ignore"
|
|
175
|
-
});
|
|
186
|
+
await monorepo.cleanup();
|
|
176
187
|
}
|
|
177
188
|
});
|
|
178
|
-
it("can handle changing one file", ()=>{
|
|
179
|
-
const
|
|
189
|
+
it("can handle changing one file", async ()=>{
|
|
190
|
+
const monorepo = new _monorepofixture.Monorepo("change-one-file");
|
|
191
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
192
|
+
const testFilePath = _path.join(monorepo.root, "file1.txt");
|
|
180
193
|
FileSystem.writeFile(testFilePath, "abc");
|
|
181
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
194
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
182
195
|
try {
|
|
183
196
|
const expectedFiles = {
|
|
184
197
|
"file1.txt": "f2ba8f84ab5c1bce84a7b441cb1959cfc7093b7f",
|
|
@@ -189,13 +202,13 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
189
202
|
const filePaths = Array.from(results.keys()).sort();
|
|
190
203
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
191
204
|
} finally{
|
|
192
|
-
|
|
193
|
-
stdio: "ignore"
|
|
194
|
-
});
|
|
205
|
+
await monorepo.cleanup();
|
|
195
206
|
}
|
|
196
207
|
});
|
|
197
|
-
it("can exclude a committed file", ()=>{
|
|
198
|
-
const
|
|
208
|
+
it("can exclude a committed file", async ()=>{
|
|
209
|
+
const monorepo = new _monorepofixture.Monorepo("exclude-comitted-file");
|
|
210
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
211
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root, [
|
|
199
212
|
"file1.txt",
|
|
200
213
|
"file 2.txt",
|
|
201
214
|
"file蝴蝶.txt"
|
|
@@ -205,11 +218,14 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
205
218
|
};
|
|
206
219
|
const filePaths = Array.from(results.keys()).sort();
|
|
207
220
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
221
|
+
await monorepo.cleanup();
|
|
208
222
|
});
|
|
209
|
-
it("can exclude an added file", ()=>{
|
|
210
|
-
const
|
|
223
|
+
it("can exclude an added file", async ()=>{
|
|
224
|
+
const monorepo = new _monorepofixture.Monorepo("exclude-added-file");
|
|
225
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
226
|
+
const tempFilePath = _path.join(monorepo.root, "a.txt");
|
|
211
227
|
FileSystem.writeFile(tempFilePath, "a");
|
|
212
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
228
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root, [
|
|
213
229
|
"a.txt"
|
|
214
230
|
]);
|
|
215
231
|
try {
|
|
@@ -223,13 +239,15 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
223
239
|
expect(filePaths).toHaveLength(Object.keys(expectedFiles).length);
|
|
224
240
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
225
241
|
} finally{
|
|
226
|
-
|
|
242
|
+
await monorepo.cleanup();
|
|
227
243
|
}
|
|
228
244
|
});
|
|
229
|
-
it("can handle a filename with spaces", ()=>{
|
|
230
|
-
const
|
|
245
|
+
it("can handle a filename with spaces", async ()=>{
|
|
246
|
+
const monorepo = new _monorepofixture.Monorepo("filename-with-spaces");
|
|
247
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
248
|
+
const tempFilePath = _path.join(monorepo.root, "a file.txt");
|
|
231
249
|
FileSystem.writeFile(tempFilePath, "a");
|
|
232
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
250
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
233
251
|
try {
|
|
234
252
|
const expectedFiles = {
|
|
235
253
|
"file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
@@ -242,13 +260,15 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
242
260
|
expect(filePaths).toHaveLength(Object.keys(expectedFiles).length);
|
|
243
261
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
244
262
|
} finally{
|
|
245
|
-
|
|
263
|
+
await monorepo.cleanup();
|
|
246
264
|
}
|
|
247
265
|
});
|
|
248
|
-
it("can handle a filename with multiple spaces", ()=>{
|
|
249
|
-
const
|
|
266
|
+
it("can handle a filename with multiple spaces", async ()=>{
|
|
267
|
+
const monorepo = new _monorepofixture.Monorepo("filename-multiple-spaces");
|
|
268
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
269
|
+
const tempFilePath = _path.join(monorepo.root, "a file name.txt");
|
|
250
270
|
FileSystem.writeFile(tempFilePath, "a");
|
|
251
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
271
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
252
272
|
try {
|
|
253
273
|
const expectedFiles = {
|
|
254
274
|
"file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
@@ -261,32 +281,15 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
261
281
|
expect(filePaths).toHaveLength(Object.keys(expectedFiles).length);
|
|
262
282
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
263
283
|
} finally{
|
|
264
|
-
|
|
265
|
-
}
|
|
266
|
-
});
|
|
267
|
-
it("can handle a filename with non-standard characters", ()=>{
|
|
268
|
-
const tempFilePath = _path.join(TEST_PROJECT_PATH, "newFile批把.txt");
|
|
269
|
-
FileSystem.writeFile(tempFilePath, "a");
|
|
270
|
-
const results = (0, _getPackageDeps.getPackageDeps)(TEST_PROJECT_PATH);
|
|
271
|
-
try {
|
|
272
|
-
const expectedFiles = {
|
|
273
|
-
"file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
274
|
-
"file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
|
|
275
|
-
"file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
|
|
276
|
-
"newFile批把.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
|
|
277
|
-
"package.json": "18a1e415e56220fa5122428a4ef8eb8874756576"
|
|
278
|
-
};
|
|
279
|
-
const filePaths = Array.from(results.keys()).sort();
|
|
280
|
-
expect(filePaths).toHaveLength(Object.keys(expectedFiles).length);
|
|
281
|
-
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
282
|
-
} finally{
|
|
283
|
-
FileSystem.deleteFile(tempFilePath);
|
|
284
|
+
await monorepo.cleanup();
|
|
284
285
|
}
|
|
285
286
|
});
|
|
286
|
-
it("can handle a filename with non-standard characters", ()=>{
|
|
287
|
-
const
|
|
287
|
+
it("can handle a filename with non-standard characters", async ()=>{
|
|
288
|
+
const monorepo = new _monorepofixture.Monorepo("non-standard-characters");
|
|
289
|
+
await monorepo.init(TEST_PROJECT_PATH);
|
|
290
|
+
const tempFilePath = _path.join(monorepo.root, "newFile批把.txt");
|
|
288
291
|
FileSystem.writeFile(tempFilePath, "a");
|
|
289
|
-
const results = (0, _getPackageDeps.getPackageDeps)(
|
|
292
|
+
const results = (0, _getPackageDeps.getPackageDeps)(monorepo.root);
|
|
290
293
|
try {
|
|
291
294
|
const expectedFiles = {
|
|
292
295
|
"file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
|
|
@@ -299,7 +302,7 @@ describe(_getPackageDeps.getPackageDeps.name, ()=>{
|
|
|
299
302
|
expect(filePaths).toHaveLength(Object.keys(expectedFiles).length);
|
|
300
303
|
filePaths.forEach((filePath)=>expect(results.get(filePath)).toEqual(expectedFiles[filePath]));
|
|
301
304
|
} finally{
|
|
302
|
-
|
|
305
|
+
await monorepo.cleanup();
|
|
303
306
|
}
|
|
304
307
|
});
|
|
305
308
|
});
|