@lage-run/hasher 0.2.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.json +52 -1
  2. package/CHANGELOG.md +19 -2
  3. package/lib/FileHasher.d.ts +13 -0
  4. package/lib/FileHasher.js +181 -0
  5. package/lib/PackageTree.d.ts +20 -0
  6. package/lib/PackageTree.js +178 -0
  7. package/lib/TargetHasher.d.ts +47 -0
  8. package/lib/TargetHasher.js +218 -0
  9. package/lib/__tests__/TargetHasher.test.js +128 -0
  10. package/lib/__tests__/getPackageDeps.test.js +70 -67
  11. package/lib/__tests__/resolveDependenciesHelper.js +19 -13
  12. package/lib/__tests__/resolveExternalDependencies.test.js +16 -16
  13. package/lib/__tests__/resolveInternalDependencies.test.js +12 -12
  14. package/lib/getPackageDeps.js +25 -15
  15. package/lib/hashStrings.d.ts +1 -0
  16. package/lib/hashStrings.js +28 -0
  17. package/lib/index.d.ts +1 -13
  18. package/lib/index.js +6 -70
  19. package/lib/nameAtVersion.d.ts +1 -0
  20. package/lib/nameAtVersion.js +13 -0
  21. package/lib/resolveExternalDependencies.js +18 -12
  22. package/lib/resolveInternalDependencies.js +8 -4
  23. package/package.json +9 -5
  24. package/lib/__tests__/createPackageHashes.test.js +0 -44
  25. package/lib/__tests__/getRepoDeps.test.d.ts +0 -1
  26. package/lib/__tests__/getRepoDeps.test.js +0 -253
  27. package/lib/__tests__/getRepoState.test.d.ts +0 -1
  28. package/lib/__tests__/getRepoState.test.js +0 -104
  29. package/lib/__tests__/hashOfFiles.test.d.ts +0 -1
  30. package/lib/__tests__/hashOfFiles.test.js +0 -103
  31. package/lib/__tests__/helpers.test.d.ts +0 -1
  32. package/lib/__tests__/helpers.test.js +0 -28
  33. package/lib/__tests__/index.test.d.ts +0 -1
  34. package/lib/__tests__/index.test.js +0 -98
  35. package/lib/createPackageHashes.d.ts +0 -4
  36. package/lib/createPackageHashes.js +0 -48
  37. package/lib/getRepoState.d.ts +0 -76
  38. package/lib/getRepoState.js +0 -256
  39. package/lib/hashOfFiles.d.ts +0 -14
  40. package/lib/hashOfFiles.js +0 -71
  41. package/lib/hashOfPackage.d.ts +0 -9
  42. package/lib/hashOfPackage.js +0 -65
  43. package/lib/helpers.d.ts +0 -3
  44. package/lib/helpers.js +0 -47
  45. package/lib/repoInfo.d.ts +0 -26
  46. package/lib/repoInfo.js +0 -65
  47. /package/lib/__tests__/{createPackageHashes.test.d.ts → TargetHasher.test.d.ts} +0 -0
@@ -1,253 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- const _path = /*#__PURE__*/ _interopRequireWildcard(require("path"));
6
- const _fs = /*#__PURE__*/ _interopRequireWildcard(require("fs"));
7
- const _childProcess = require("child_process");
8
- const _getRepoState = require("../getRepoState");
9
- function _getRequireWildcardCache(nodeInterop) {
10
- if (typeof WeakMap !== "function") return null;
11
- var cacheBabelInterop = new WeakMap();
12
- var cacheNodeInterop = new WeakMap();
13
- return (_getRequireWildcardCache = function(nodeInterop) {
14
- return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
15
- })(nodeInterop);
16
- }
17
- function _interopRequireWildcard(obj, nodeInterop) {
18
- if (!nodeInterop && obj && obj.__esModule) {
19
- return obj;
20
- }
21
- if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
22
- return {
23
- default: obj
24
- };
25
- }
26
- var cache = _getRequireWildcardCache(nodeInterop);
27
- if (cache && cache.has(obj)) {
28
- return cache.get(obj);
29
- }
30
- var newObj = {};
31
- var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
32
- for(var key in obj){
33
- if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
34
- var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
35
- if (desc && (desc.get || desc.set)) {
36
- Object.defineProperty(newObj, key, desc);
37
- } else {
38
- newObj[key] = obj[key];
39
- }
40
- }
41
- }
42
- newObj.default = obj;
43
- if (cache) {
44
- cache.set(obj, newObj);
45
- }
46
- return newObj;
47
- }
48
- const SOURCE_PATH = _path.join(__dirname).replace(_path.join("lib", "__tests__"), _path.join("src", "__tests__"));
49
- const TEST_PREFIX = `packages/hasher/src/__tests__/`;
50
- const TEST_PROJECT_PATH = _path.join(SOURCE_PATH, "testProject");
51
- const FILTERS = [
52
- `testProject/`,
53
- `nestedTestProject/`
54
- ];
55
- const FileSystem = {
56
- writeFile: _fs.writeFileSync,
57
- deleteFile: _fs.rmSync
58
- };
59
- function getRelevantEntries(results) {
60
- const relevantResults = new Map();
61
- for (const [key, hash] of results){
62
- if (key.startsWith(TEST_PREFIX)) {
63
- const partialKey = key.slice(TEST_PREFIX.length);
64
- for (const filter of FILTERS){
65
- if (partialKey.startsWith(filter)) {
66
- relevantResults.set(partialKey, hash);
67
- }
68
- }
69
- }
70
- }
71
- return relevantResults;
72
- }
73
- describe(_getRepoState.getRepoRoot.name, ()=>{
74
- it(`returns the correct directory`, ()=>{
75
- const root = (0, _getRepoState.getRepoRoot)(__dirname);
76
- const expectedRoot = _path.resolve(__dirname, "../../../..").replace(/\\/g, "/");
77
- expect(root).toEqual(expectedRoot);
78
- });
79
- });
80
- describe(_getRepoState.parseGitLsTree.name, ()=>{
81
- it("can handle a blob", ()=>{
82
- const filename = "src/typings/tsd.d.ts";
83
- const hash = "3451bccdc831cb43d7a70ed8e628dcf9c7f888c8";
84
- const output = `100644 blob ${hash}\t${filename}\x00`;
85
- const changes = (0, _getRepoState.parseGitLsTree)(output);
86
- expect(changes.size).toEqual(1); // Expect there to be exactly 1 change
87
- expect(changes.get(filename)).toEqual(hash); // Expect the hash to be ${hash}
88
- });
89
- it("can handle a submodule", ()=>{
90
- const filename = "rushstack";
91
- const hash = "c5880bf5b0c6c1f2e2c43c95beeb8f0a808e8bac";
92
- const output = `160000 commit ${hash}\t${filename}\x00`;
93
- const changes = (0, _getRepoState.parseGitLsTree)(output);
94
- expect(changes.size).toEqual(1); // Expect there to be exactly 1 change
95
- expect(changes.get(filename)).toEqual(hash); // Expect the hash to be ${hash}
96
- });
97
- it("can handle multiple lines", ()=>{
98
- const filename1 = "src/typings/tsd.d.ts";
99
- const hash1 = "3451bccdc831cb43d7a70ed8e628dcf9c7f888c8";
100
- const filename2 = "src/foo bar/tsd.d.ts";
101
- const hash2 = "0123456789abcdef1234567890abcdef01234567";
102
- const output = `100644 blob ${hash1}\t${filename1}\x00100666 blob ${hash2}\t${filename2}\0`;
103
- const changes = (0, _getRepoState.parseGitLsTree)(output);
104
- expect(changes.size).toEqual(2); // Expect there to be exactly 2 changes
105
- expect(changes.get(filename1)).toEqual(hash1); // Expect the hash to be ${hash1}
106
- expect(changes.get(filename2)).toEqual(hash2); // Expect the hash to be ${hash2}
107
- });
108
- });
109
- describe(_getRepoState.getRepoState.name, ()=>{
110
- it("can parse committed files", ()=>{
111
- const results = (0, _getRepoState.getRepoState)(__dirname);
112
- const filteredResults = getRelevantEntries(results);
113
- const expectedFiles = new Map(Object.entries({
114
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
115
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
116
- "testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
117
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
118
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
119
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
120
- }));
121
- for (const [filePath, hash] of expectedFiles){
122
- expect(filteredResults.get(filePath)).toEqual(hash);
123
- }
124
- expect(filteredResults.size).toEqual(expectedFiles.size);
125
- });
126
- it("can handle adding one file", ()=>{
127
- const tempFilePath = _path.join(TEST_PROJECT_PATH, "a.txt");
128
- FileSystem.writeFile(tempFilePath, "a");
129
- const results = (0, _getRepoState.getRepoState)(__dirname);
130
- const filteredResults = getRelevantEntries(results);
131
- try {
132
- const expectedFiles = new Map(Object.entries({
133
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
134
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
135
- "testProject/a.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
136
- "testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
137
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
138
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
139
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
140
- }));
141
- for (const [filePath, hash] of expectedFiles){
142
- expect(filteredResults.get(filePath)).toEqual(hash);
143
- }
144
- expect(filteredResults.size).toEqual(expectedFiles.size);
145
- } finally{
146
- FileSystem.deleteFile(tempFilePath);
147
- }
148
- });
149
- it("can handle adding two files", ()=>{
150
- const tempFilePath1 = _path.join(TEST_PROJECT_PATH, "a.txt");
151
- const tempFilePath2 = _path.join(TEST_PROJECT_PATH, "b.txt");
152
- FileSystem.writeFile(tempFilePath1, "a");
153
- FileSystem.writeFile(tempFilePath2, "a");
154
- const results = (0, _getRepoState.getRepoState)(__dirname);
155
- const filteredResults = getRelevantEntries(results);
156
- try {
157
- const expectedFiles = new Map(Object.entries({
158
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
159
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
160
- "testProject/a.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
161
- "testProject/b.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
162
- "testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
163
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
164
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
165
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
166
- }));
167
- for (const [filePath, hash] of expectedFiles){
168
- expect(filteredResults.get(filePath)).toEqual(hash);
169
- }
170
- expect(filteredResults.size).toEqual(expectedFiles.size);
171
- } finally{
172
- FileSystem.deleteFile(tempFilePath1);
173
- FileSystem.deleteFile(tempFilePath2);
174
- }
175
- });
176
- it("can handle removing one file", ()=>{
177
- const testFilePath = _path.join(TEST_PROJECT_PATH, "file1.txt");
178
- FileSystem.deleteFile(testFilePath);
179
- const results = (0, _getRepoState.getRepoState)(__dirname);
180
- const filteredResults = getRelevantEntries(results);
181
- try {
182
- const expectedFiles = new Map(Object.entries({
183
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
184
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
185
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
186
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
187
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
188
- }));
189
- for (const [filePath, hash] of expectedFiles){
190
- expect(filteredResults.get(filePath)).toEqual(hash);
191
- }
192
- expect(filteredResults.size).toEqual(expectedFiles.size);
193
- } finally{
194
- (0, _childProcess.execSync)(`git checkout --force HEAD -- ${TEST_PREFIX}testProject/file1.txt`, {
195
- stdio: "ignore",
196
- cwd: (0, _getRepoState.getRepoRoot)(__dirname)
197
- });
198
- }
199
- });
200
- it("can handle changing one file", ()=>{
201
- const testFilePath = _path.join(TEST_PROJECT_PATH, "file1.txt");
202
- FileSystem.writeFile(testFilePath, "abc");
203
- const results = (0, _getRepoState.getRepoState)(__dirname);
204
- const filteredResults = getRelevantEntries(results);
205
- try {
206
- const expectedFiles = new Map(Object.entries({
207
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
208
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
209
- "testProject/file1.txt": "f2ba8f84ab5c1bce84a7b441cb1959cfc7093b7f",
210
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
211
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
212
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
213
- }));
214
- for (const [filePath, hash] of expectedFiles){
215
- expect(filteredResults.get(filePath)).toEqual(hash);
216
- }
217
- expect(filteredResults.size).toEqual(expectedFiles.size);
218
- } finally{
219
- FileSystem.writeFile(testFilePath, "file1.");
220
- }
221
- });
222
- it("can handle uncommitted filenames with spaces and non-ASCII characters", ()=>{
223
- const tempFilePath1 = _path.join(TEST_PROJECT_PATH, "a file.txt");
224
- const tempFilePath2 = _path.join(TEST_PROJECT_PATH, "a file name.txt");
225
- const tempFilePath3 = _path.join(TEST_PROJECT_PATH, "newFile批把.txt");
226
- FileSystem.writeFile(tempFilePath1, "a");
227
- FileSystem.writeFile(tempFilePath2, "a");
228
- FileSystem.writeFile(tempFilePath3, "a");
229
- const results = (0, _getRepoState.getRepoState)(__dirname);
230
- const filteredResults = getRelevantEntries(results);
231
- try {
232
- const expectedFiles = new Map(Object.entries({
233
- "nestedTestProject/src/file 1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
234
- [`nestedTestProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576",
235
- "testProject/a file.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
236
- "testProject/a file name.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
237
- "testProject/file1.txt": "c7b2f707ac99ca522f965210a7b6b0b109863f34",
238
- "testProject/file 2.txt": "a385f754ec4fede884a4864d090064d9aeef8ccb",
239
- "testProject/file蝴蝶.txt": "ae814af81e16cb2ae8c57503c77e2cab6b5462ba",
240
- "testProject/newFile批把.txt": "2e65efe2a145dda7ee51d1741299f848e5bf752e",
241
- [`testProject/package.json`]: "18a1e415e56220fa5122428a4ef8eb8874756576"
242
- }));
243
- for (const [filePath, hash] of expectedFiles){
244
- expect(filteredResults.get(filePath)).toEqual(hash);
245
- }
246
- expect(filteredResults.size).toEqual(expectedFiles.size);
247
- } finally{
248
- FileSystem.deleteFile(tempFilePath1);
249
- FileSystem.deleteFile(tempFilePath2);
250
- FileSystem.deleteFile(tempFilePath3);
251
- }
252
- });
253
- });
@@ -1 +0,0 @@
1
- export {};
@@ -1,104 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- const _getRepoState = require("../getRepoState");
6
- describe(_getRepoState.parseGitVersion.name, ()=>{
7
- it("Can parse valid git version responses", ()=>{
8
- expect((0, _getRepoState.parseGitVersion)("git version 2.30.2.windows.1")).toEqual({
9
- major: 2,
10
- minor: 30,
11
- patch: 2
12
- });
13
- expect((0, _getRepoState.parseGitVersion)("git version 2.30.2.windows.1.g8b8f8e")).toEqual({
14
- major: 2,
15
- minor: 30,
16
- patch: 2
17
- });
18
- expect((0, _getRepoState.parseGitVersion)("git version 2.30.2")).toEqual({
19
- major: 2,
20
- minor: 30,
21
- patch: 2
22
- });
23
- });
24
- it("Rejects invalid git version responses", ()=>{
25
- expect(()=>(0, _getRepoState.parseGitVersion)("2.22.0.windows.1")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "2.22.0.windows.1""`);
26
- expect(()=>(0, _getRepoState.parseGitVersion)("git version 2.30.A")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version 2.30.A""`);
27
- expect(()=>(0, _getRepoState.parseGitVersion)("git version 2.30")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version 2.30""`);
28
- expect(()=>(0, _getRepoState.parseGitVersion)("git version .2.30")).toThrowErrorMatchingInlineSnapshot(`"While validating the Git installation, the "git version" command produced unexpected output: "git version .2.30""`);
29
- });
30
- });
31
- describe(_getRepoState.parseGitStatus.name, ()=>{
32
- it("Finds index entries", ()=>{
33
- const files = [
34
- `A.ts`,
35
- `B.ts`,
36
- `C.ts`
37
- ];
38
- const input = [
39
- `A ${files[0]}`,
40
- `D ${files[1]}`,
41
- `M ${files[2]}`,
42
- ""
43
- ].join("\0");
44
- const result = (0, _getRepoState.parseGitStatus)(input);
45
- expect(result.size).toEqual(3);
46
- expect(result.get(files[0])).toEqual(true);
47
- expect(result.get(files[1])).toEqual(false);
48
- expect(result.get(files[2])).toEqual(true);
49
- });
50
- it("Finds working tree entries", ()=>{
51
- const files = [
52
- `A.ts`,
53
- `B.ts`,
54
- `C.ts`
55
- ];
56
- const input = [
57
- ` A ${files[0]}`,
58
- ` D ${files[1]}`,
59
- ` M ${files[2]}`,
60
- ""
61
- ].join("\0");
62
- const result = (0, _getRepoState.parseGitStatus)(input);
63
- expect(result.size).toEqual(3);
64
- expect(result.get(files[0])).toEqual(true);
65
- expect(result.get(files[1])).toEqual(false);
66
- expect(result.get(files[2])).toEqual(true);
67
- });
68
- it("Can handle untracked files", ()=>{
69
- const files = [
70
- `A.ts`,
71
- `B.ts`,
72
- `C.ts`
73
- ];
74
- const input = [
75
- `?? ${files[0]}`,
76
- `?? ${files[1]}`,
77
- `?? ${files[2]}`,
78
- ""
79
- ].join("\0");
80
- const result = (0, _getRepoState.parseGitStatus)(input);
81
- expect(result.size).toEqual(3);
82
- expect(result.get(files[0])).toEqual(true);
83
- expect(result.get(files[1])).toEqual(true);
84
- expect(result.get(files[2])).toEqual(true);
85
- });
86
- it("Can handle files modified in both index and working tree", ()=>{
87
- const files = [
88
- `A.ts`,
89
- `B.ts`,
90
- `C.ts`
91
- ];
92
- const input = [
93
- `D ${files[0]}`,
94
- `AD ${files[1]}`,
95
- `DA ${files[2]}`,
96
- ""
97
- ].join("\0");
98
- const result = (0, _getRepoState.parseGitStatus)(input);
99
- expect(result.size).toEqual(3);
100
- expect(result.get(files[0])).toEqual(false);
101
- expect(result.get(files[1])).toEqual(false);
102
- expect(result.get(files[2])).toEqual(true);
103
- });
104
- });
@@ -1 +0,0 @@
1
- export {};
@@ -1,103 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- const _path = /*#__PURE__*/ _interopRequireDefault(require("path"));
6
- const _fsExtra = /*#__PURE__*/ _interopRequireDefault(require("fs-extra"));
7
- const _monorepoFixture = require("@lage-run/monorepo-fixture");
8
- const _hashOfFiles = require("../hashOfFiles");
9
- const _repoInfo = require("../repoInfo");
10
- function _interopRequireDefault(obj) {
11
- return obj && obj.__esModule ? obj : {
12
- default: obj
13
- };
14
- }
15
- const fixturesPath = _path.default.join(__dirname, "..", "__fixtures__");
16
- describe("generateHashOfFiles()", ()=>{
17
- it("creates different hashes for different hashes", async ()=>{
18
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
19
- await monorepo.init(_path.default.join(fixturesPath, "monorepo"));
20
- const packageRoot = monorepo.root;
21
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
22
- const hashOfPackage = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
23
- _fsExtra.default.writeFileSync(_path.default.join(packageRoot, "foo.txt"), "bar");
24
- repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
25
- const hashOfPackageWithFoo = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
26
- expect(hashOfPackage).not.toEqual(hashOfPackageWithFoo);
27
- _fsExtra.default.writeFileSync(_path.default.join(packageRoot, "foo.txt"), "foo");
28
- repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
29
- const hashOfPackageWithFoo2 = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
30
- expect(hashOfPackageWithFoo).not.toEqual(hashOfPackageWithFoo2);
31
- _fsExtra.default.unlinkSync(_path.default.join(packageRoot, "foo.txt"));
32
- repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
33
- const hashOfPackageWithoutFoo = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
34
- expect(hashOfPackage).toEqual(hashOfPackageWithoutFoo);
35
- await monorepo.cleanup();
36
- });
37
- it("is not confused by package names being substring of other packages", async ()=>{
38
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
39
- await monorepo.init(_path.default.join(fixturesPath, "monorepo"));
40
- const packageRoot = monorepo.root;
41
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
42
- const hashOfPackageA = await (0, _hashOfFiles.generateHashOfFiles)(_path.default.join(packageRoot, "packages", "package-a"), repoInfo);
43
- await _fsExtra.default.mkdir(_path.default.join(packageRoot, "packages", "package-abc"));
44
- await _fsExtra.default.writeFile(_path.default.join(packageRoot, "packages", "package-abc", "foo"), "bar");
45
- repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
46
- const newHashOfPackageA = await (0, _hashOfFiles.generateHashOfFiles)(_path.default.join(packageRoot, "packages", "package-a"), repoInfo);
47
- expect(hashOfPackageA).toEqual(newHashOfPackageA);
48
- await monorepo.cleanup();
49
- });
50
- it("file paths are included in hash", async ()=>{
51
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
52
- await monorepo.init(_path.default.join(fixturesPath, "empty"));
53
- const packageRoot = monorepo.root;
54
- _fsExtra.default.writeFileSync(_path.default.join(packageRoot, "foo.txt"), "bar");
55
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
56
- const hashOfPackageWithFoo = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
57
- _fsExtra.default.unlinkSync(_path.default.join(packageRoot, "foo.txt"));
58
- _fsExtra.default.writeFileSync(_path.default.join(packageRoot, "bar.txt"), "bar");
59
- repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
60
- const hashOfPackageWithBar = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
61
- expect(hashOfPackageWithFoo).not.toEqual(hashOfPackageWithBar);
62
- await monorepo.cleanup();
63
- });
64
- // This test will be run on Windows and on Linux on the CI
65
- it("file paths are consistent across platforms", async ()=>{
66
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
67
- await monorepo.init(_path.default.join(fixturesPath, "empty"));
68
- const packageRoot = monorepo.root;
69
- // Create a folder to make sure we get folder separators as part of the file name
70
- const folder = _path.default.join(packageRoot, "foo");
71
- _fsExtra.default.mkdirpSync(folder);
72
- _fsExtra.default.writeFileSync(_path.default.join(folder, "foo.txt"), "bar");
73
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(packageRoot);
74
- const hashOfPackage = await (0, _hashOfFiles.generateHashOfFiles)(packageRoot, repoInfo);
75
- expect(hashOfPackage).toEqual("4d4ca2ecc436e1198554f5d03236ea8f956ac0c4");
76
- await monorepo.cleanup();
77
- });
78
- // This test will be run on Windows and on Linux on the CI
79
- it("file paths in a package not defined in a workspace (malformed monorepo) are consistent across platforms (uses slow path)", async ()=>{
80
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
81
- await monorepo.init(_path.default.join(fixturesPath, "empty"));
82
- const workspaceRoot = monorepo.root;
83
- // Create a folder to make sure we get folder separators as part of the file name
84
- const folder = _path.default.join(workspaceRoot, "packages", "foo");
85
- _fsExtra.default.mkdirpSync(folder);
86
- _fsExtra.default.writeFileSync(_path.default.join(folder, "foo.txt"), "bar");
87
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(workspaceRoot);
88
- const hashOfPackage = await (0, _hashOfFiles.generateHashOfFiles)(folder, repoInfo);
89
- expect(hashOfPackage).toEqual("438b5f734e6de1ef0eb9114a28ef230a9ff83f54");
90
- await monorepo.cleanup();
91
- });
92
- it("file paths in a monorepo are consistent across platforms (uses fast path)", async ()=>{
93
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
94
- await monorepo.init(_path.default.join(fixturesPath, "monorepo"));
95
- const workspaceRoot = monorepo.root;
96
- const folder = _path.default.join(workspaceRoot, "packages", "package-a");
97
- _fsExtra.default.writeFileSync(_path.default.join(folder, "foo.txt"), "bar");
98
- let repoInfo = await (0, _repoInfo.getRepoInfoNoCache)(workspaceRoot);
99
- const hashOfPackage = await (0, _hashOfFiles.generateHashOfFiles)(folder, repoInfo);
100
- expect(hashOfPackage).toEqual("b91634233c6a3768136391c804967bf0e0a6578d");
101
- await monorepo.cleanup();
102
- });
103
- });
@@ -1 +0,0 @@
1
- export {};
@@ -1,28 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- const _helpers = require("../helpers");
6
- describe("hashStrings()", ()=>{
7
- it("creates different hashes given different lists", ()=>{
8
- const list = [];
9
- list.push("foo");
10
- list.push("bar");
11
- const hash = (0, _helpers.hashStrings)(list);
12
- list.push("baz");
13
- const hashWithBaz = (0, _helpers.hashStrings)(list);
14
- expect(hash).not.toEqual(hashWithBaz);
15
- list.pop();
16
- const hashWithoutBaz = (0, _helpers.hashStrings)(list);
17
- expect(hash).toEqual(hashWithoutBaz);
18
- });
19
- it("lists of different order produce the same hash", ()=>{
20
- const list = [];
21
- list.push("foo");
22
- list.push("bar");
23
- const hash = (0, _helpers.hashStrings)(list);
24
- list.reverse();
25
- const hashReverse = (0, _helpers.hashStrings)(list);
26
- expect(hash).toEqual(hashReverse);
27
- });
28
- });
@@ -1 +0,0 @@
1
- export {};
@@ -1,98 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- const _path = /*#__PURE__*/ _interopRequireDefault(require("path"));
6
- const _index = require("../index");
7
- const _monorepoFixture = require("@lage-run/monorepo-fixture");
8
- function _interopRequireDefault(obj) {
9
- return obj && obj.__esModule ? obj : {
10
- default: obj
11
- };
12
- }
13
- const fixturesPath = _path.default.join(__dirname, "..", "__fixtures__");
14
- describe("addToQueue", ()=>{
15
- const setupAddToQueue = async ()=>{
16
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
17
- await monorepo.init(_path.default.join(fixturesPath, "monorepo"));
18
- const packageRoot = monorepo.root;
19
- const packageToAdd = "package-a";
20
- const packagePath = _path.default.join(packageRoot, "packages", packageToAdd);
21
- const workspaces = [
22
- {
23
- name: packageToAdd,
24
- path: packagePath,
25
- packageJson: {
26
- name: "",
27
- packageJsonPath: "",
28
- version: ""
29
- }
30
- }
31
- ];
32
- const internalDependencies = [
33
- packageToAdd
34
- ];
35
- const queue = [];
36
- const done = [];
37
- return {
38
- internalDependencies,
39
- queue,
40
- done,
41
- workspaces,
42
- packageToAdd,
43
- packagePath
44
- };
45
- };
46
- it("adds internal dependencies to the queue", async ()=>{
47
- const { internalDependencies , queue , done , workspaces , packagePath } = await setupAddToQueue();
48
- (0, _index.addToQueue)(internalDependencies, queue, done, workspaces);
49
- const expectedQueue = [
50
- packagePath
51
- ];
52
- expect(queue).toEqual(expectedQueue);
53
- });
54
- it("doesn't add to the queue if the package has been evaluated", async ()=>{
55
- let { internalDependencies , queue , done , workspaces , packageToAdd } = await setupAddToQueue();
56
- // Override
57
- done = [
58
- {
59
- name: packageToAdd,
60
- filesHash: "",
61
- dependenciesHash: "",
62
- internalDependencies: []
63
- }
64
- ];
65
- (0, _index.addToQueue)(internalDependencies, queue, done, workspaces);
66
- expect(queue).toEqual([]);
67
- });
68
- it("doesn't add to the queue if the package is already in the queue", async ()=>{
69
- let { internalDependencies , queue , done , workspaces , packagePath } = await setupAddToQueue();
70
- // Override
71
- queue = [
72
- packagePath
73
- ];
74
- (0, _index.addToQueue)(internalDependencies, queue, done, workspaces);
75
- const expectedQueue = [
76
- packagePath
77
- ];
78
- expect(queue).toEqual(expectedQueue);
79
- });
80
- });
81
- describe("The main Hasher class", ()=>{
82
- const setupFixtureAndReturnHash = async (fixture = "monorepo")=>{
83
- const monorepo = new _monorepoFixture.Monorepo("monorepo");
84
- await monorepo.init(_path.default.join(fixturesPath, fixture));
85
- const packageRoot = monorepo.root;
86
- const buildSignature = "yarn build";
87
- const hasher = new _index.Hasher(packageRoot);
88
- const hash = await hasher.createPackageHash(buildSignature);
89
- return hash;
90
- };
91
- it("creates different hashes given different fixtures", async ()=>{
92
- const hash = await setupFixtureAndReturnHash();
93
- const hashOfBasic = await setupFixtureAndReturnHash("basic");
94
- expect(hash).not.toEqual(hashOfBasic);
95
- const hashOfMonorepoAgain = await setupFixtureAndReturnHash();
96
- expect(hash).toEqual(hashOfMonorepoAgain);
97
- });
98
- });
@@ -1,4 +0,0 @@
1
- import type { WorkspaceInfo } from "workspace-tools";
2
- export declare function createPackageHashes(root: string, workspaceInfo: WorkspaceInfo, repoHashes: {
3
- [key: string]: string;
4
- }): Record<string, [string, string][]>;
@@ -1,48 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", {
3
- value: true
4
- });
5
- Object.defineProperty(exports, "createPackageHashes", {
6
- enumerable: true,
7
- get: ()=>createPackageHashes
8
- });
9
- const _path = /*#__PURE__*/ _interopRequireDefault(require("path"));
10
- function _interopRequireDefault(obj) {
11
- return obj && obj.__esModule ? obj : {
12
- default: obj
13
- };
14
- }
15
- function createPackageHashes(root, workspaceInfo, repoHashes) {
16
- const pathTree = {};
17
- // Generate path tree of all packages in workspace (scale: ~2000 * ~3)
18
- for (const workspace of workspaceInfo){
19
- const pathParts = _path.default.relative(root, workspace.path).split(/[\\/]/);
20
- let currentNode = pathTree;
21
- for (const part of pathParts){
22
- currentNode[part] = currentNode[part] || {};
23
- currentNode = currentNode[part];
24
- }
25
- }
26
- // key: path/to/package (packageRoot), value: array of a tuple of [file, hash]
27
- const packageHashes = {};
28
- for (const [entry, value] of Object.entries(repoHashes)){
29
- const pathParts = entry.split(/[\\/]/);
30
- let node = pathTree;
31
- const packagePathParts = [];
32
- for (const part of pathParts){
33
- if (node[part]) {
34
- node = node[part];
35
- packagePathParts.push(part);
36
- } else {
37
- break;
38
- }
39
- }
40
- const packageRoot = packagePathParts.join("/");
41
- packageHashes[packageRoot] = packageHashes[packageRoot] || [];
42
- packageHashes[packageRoot].push([
43
- entry,
44
- value
45
- ]);
46
- }
47
- return packageHashes;
48
- }