@monorepolint/utils 0.6.0-alpha.2 → 0.6.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/.turbo/turbo-clean.log +1 -1
  2. package/.turbo/turbo-compile-typescript.log +1 -1
  3. package/.turbo/turbo-lint.log +7 -7
  4. package/.turbo/turbo-test.log +11 -10
  5. package/.turbo/turbo-transpile-typescript.log +4 -4
  6. package/CHANGELOG.md +4 -0
  7. package/build/js/index.js +489 -443
  8. package/build/js/index.js.map +1 -1
  9. package/build/tsconfig.tsbuildinfo +1 -1
  10. package/build/types/CachingHost.d.ts.map +1 -1
  11. package/build/types/Host.d.ts.map +1 -1
  12. package/build/types/PackageJson.d.ts.map +1 -1
  13. package/build/types/SimpleHost.d.ts.map +1 -1
  14. package/build/types/Table.d.ts.map +1 -1
  15. package/build/types/findWorkspaceDir.d.ts.map +1 -1
  16. package/build/types/getPackageNameToDir.d.ts.map +1 -1
  17. package/build/types/getWorkspacePackageDirs.d.ts.map +1 -1
  18. package/build/types/index.d.ts +7 -7
  19. package/build/types/index.d.ts.map +1 -1
  20. package/build/types/matchesAnyGlob.d.ts.map +1 -1
  21. package/build/types/mutateJson.d.ts.map +1 -1
  22. package/build/types/nanosecondsToSanity.d.ts.map +1 -1
  23. package/coverage/AggregateTiming.ts.html +2 -2
  24. package/coverage/CachingHost.ts.html +252 -42
  25. package/coverage/Host.ts.html +15 -3
  26. package/coverage/PackageJson.ts.html +4 -4
  27. package/coverage/SimpleHost.ts.html +40 -7
  28. package/coverage/Table.ts.html +137 -29
  29. package/coverage/Timing.ts.html +1 -1
  30. package/coverage/clover.xml +527 -396
  31. package/coverage/coverage-final.json +13 -13
  32. package/coverage/findWorkspaceDir.ts.html +4 -4
  33. package/coverage/getPackageNameToDir.ts.html +18 -6
  34. package/coverage/getWorkspacePackageDirs.ts.html +168 -138
  35. package/coverage/index.html +40 -40
  36. package/coverage/index.ts.html +30 -30
  37. package/coverage/matchesAnyGlob.ts.html +37 -10
  38. package/coverage/mutateJson.ts.html +17 -5
  39. package/coverage/nanosecondsToSanity.ts.html +11 -5
  40. package/package.json +1 -2
  41. package/src/AggregateTiming.ts +1 -1
  42. package/src/CachingHost.ts +104 -34
  43. package/src/Host.ts +5 -1
  44. package/src/PackageJson.ts +3 -3
  45. package/src/SimpleHost.ts +14 -3
  46. package/src/Table.ts +58 -22
  47. package/src/__tests__/CachingHost.spec.ts +203 -166
  48. package/src/findWorkspaceDir.ts +3 -3
  49. package/src/getPackageNameToDir.ts +6 -2
  50. package/src/getWorkspacePackageDirs.ts +19 -9
  51. package/src/index.ts +7 -7
  52. package/src/matchesAnyGlob.ts +12 -3
  53. package/src/mutateJson.ts +5 -1
  54. package/src/nanosecondsToSanity.ts +3 -1
  55. package/vitest.config.mjs +6 -7
  56. package/vitest.config.mjs.timestamp-1736878329730-aa478e2241542.mjs +2 -2
@@ -4,11 +4,11 @@
4
4
  * Licensed under the MIT license. See LICENSE file in the project root for details.
5
5
  *
6
6
  */
7
- import { describe, expect, it, beforeEach } from "vitest";
8
- import { CachingHost } from "../CachingHost.js";
9
7
  import * as realfs from "node:fs";
10
- import * as path from "node:path";
11
8
  import * as os from "node:os";
9
+ import * as path from "node:path";
10
+ import { beforeEach, describe, expect, it } from "vitest";
11
+ import { CachingHost } from "../CachingHost.js";
12
12
 
13
13
  interface TestCase<T> {
14
14
  getFs: () => T;
@@ -25,220 +25,257 @@ class RealFsTestCase implements TestCase<typeof realfs> {
25
25
  }
26
26
 
27
27
  describe(CachingHost, () => {
28
- describe.each([["fs", new RealFsTestCase()]])("%s", (_testCaseName, testCase) => {
29
- let baseDir: string;
30
- let fs: ReturnType<typeof testCase.getFs>;
31
-
32
- let SYMLINK_JSON_PATH: string;
33
- let SYMLINK_TXT_PATH: string;
34
- let FILE_JSON_PATH: string;
35
- let FILE_TXT_PATH: string;
36
-
37
- beforeEach(() => {
38
- fs = testCase.getFs();
39
- baseDir = testCase.createTmpDir();
40
-
41
- SYMLINK_JSON_PATH = path.resolve(baseDir, "symlink.json");
42
- SYMLINK_TXT_PATH = path.resolve(baseDir, "symlink.txt");
43
- FILE_TXT_PATH = path.resolve(baseDir, "file.txt");
44
- FILE_JSON_PATH = path.resolve(baseDir, "file.json");
28
+ describe.each([["fs", new RealFsTestCase()]])(
29
+ "%s",
30
+ (_testCaseName, testCase) => {
31
+ let baseDir: string;
32
+ let fs: ReturnType<typeof testCase.getFs>;
33
+
34
+ let SYMLINK_JSON_PATH: string;
35
+ let SYMLINK_TXT_PATH: string;
36
+ let FILE_JSON_PATH: string;
37
+ let FILE_TXT_PATH: string;
38
+
39
+ beforeEach(() => {
40
+ fs = testCase.getFs();
41
+ baseDir = testCase.createTmpDir();
42
+
43
+ SYMLINK_JSON_PATH = path.resolve(baseDir, "symlink.json");
44
+ SYMLINK_TXT_PATH = path.resolve(baseDir, "symlink.txt");
45
+ FILE_TXT_PATH = path.resolve(baseDir, "file.txt");
46
+ FILE_JSON_PATH = path.resolve(baseDir, "file.json");
47
+
48
+ fs.writeFileSync(FILE_JSON_PATH, JSON.stringify({ hi: "mom" }), {
49
+ encoding: "utf-8",
50
+ });
51
+ fs.symlinkSync(FILE_JSON_PATH, SYMLINK_JSON_PATH);
52
+
53
+ fs.writeFileSync(FILE_TXT_PATH, "hi dad", { encoding: "utf-8" });
54
+ fs.symlinkSync(FILE_TXT_PATH, SYMLINK_TXT_PATH);
55
+ });
56
+
57
+ function expectFileToExist(file: string) {
58
+ return expect(fs.existsSync(file));
59
+ }
60
+
61
+ function expectFileContents(file: string) {
62
+ return expect(fs.readFileSync(file, { encoding: "utf-8" }));
63
+ }
64
+
65
+ function expectSymlinkTarget(src: string, target: string) {
66
+ const stat = fs.lstatSync(src);
67
+ expect(stat.isSymbolicLink() && fs.readlinkSync(src)).toEqual(target);
68
+ }
69
+
70
+ it("Answers exists() properly", async () => {
71
+ expect.assertions(2);
72
+ await realfs.promises.writeFile(path.join(baseDir, "b.txt"), "hi", {
73
+ encoding: "utf-8",
74
+ });
75
+ const host = new CachingHost(fs as any);
76
+ expect(host.exists(path.join(baseDir, "b.txt"))).toBe(true);
77
+ expect(host.exists(path.join(baseDir, "nosuchfile.txt"))).toBe(false);
78
+ });
79
+
80
+ it("properly handles deletes", async () => {
81
+ expect.assertions(2);
82
+ const host = new CachingHost(fs as any);
83
+
84
+ host.writeFile(path.join(baseDir, "b.txt"), "hi", {
85
+ encoding: "utf-8",
86
+ });
87
+ host.deleteFile(path.join(baseDir, "b.txt"));
88
+ host.deleteFile(path.join(baseDir, "a.json"));
45
89
 
46
- fs.writeFileSync(FILE_JSON_PATH, JSON.stringify({ hi: "mom" }), { encoding: "utf-8" });
47
- fs.symlinkSync(FILE_JSON_PATH, SYMLINK_JSON_PATH);
90
+ await host.flush();
48
91
 
49
- fs.writeFileSync(FILE_TXT_PATH, "hi dad", { encoding: "utf-8" });
50
- fs.symlinkSync(FILE_TXT_PATH, SYMLINK_TXT_PATH);
51
- });
92
+ expectFileToExist(path.join(baseDir, "b.txt")).toBeFalsy();
93
+ expectFileToExist(path.join(baseDir, "a.txt")).toBeFalsy();
94
+ });
52
95
 
53
- function expectFileToExist(file: string) {
54
- return expect(fs.existsSync(file));
55
- }
96
+ it("handles simple read/write workflow", async () => {
97
+ expect.assertions(1);
56
98
 
57
- function expectFileContents(file: string) {
58
- return expect(fs.readFileSync(file, { encoding: "utf-8" }));
59
- }
99
+ const host = new CachingHost(fs as any);
100
+ host.writeFile(FILE_JSON_PATH, "cow", { encoding: "utf-8" });
60
101
 
61
- function expectSymlinkTarget(src: string, target: string) {
62
- const stat = fs.lstatSync(src);
63
- expect(stat.isSymbolicLink() && fs.readlinkSync(src)).toEqual(target);
64
- }
102
+ expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual(
103
+ "cow",
104
+ );
105
+ });
65
106
 
66
- it("Answers exists() properly", async () => {
67
- expect.assertions(2);
68
- await realfs.promises.writeFile(path.join(baseDir, "b.txt"), "hi", { encoding: "utf-8" });
69
- const host = new CachingHost(fs as any);
70
- expect(host.exists(path.join(baseDir, "b.txt"))).toBe(true);
71
- expect(host.exists(path.join(baseDir, "nosuchfile.txt"))).toBe(false);
72
- });
107
+ it("handles target symlink changing", async () => {
108
+ expect.assertions(1);
73
109
 
74
- it("properly handles deletes", async () => {
75
- expect.assertions(2);
76
- const host = new CachingHost(fs as any);
110
+ const host = new CachingHost(fs as any);
111
+ host.writeFile(FILE_JSON_PATH, "cow", { encoding: "utf-8" });
77
112
 
78
- host.writeFile(path.join(baseDir, "b.txt"), "hi", { encoding: "utf-8" });
79
- host.deleteFile(path.join(baseDir, "b.txt"));
80
- host.deleteFile(path.join(baseDir, "a.json"));
113
+ expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual(
114
+ "cow",
115
+ );
116
+ });
81
117
 
82
- await host.flush();
118
+ it("handles writing symlinks properly", async () => {
119
+ expect.assertions(8);
83
120
 
84
- expectFileToExist(path.join(baseDir, "b.txt")).toBeFalsy();
85
- expectFileToExist(path.join(baseDir, "a.txt")).toBeFalsy();
86
- });
121
+ const host = new CachingHost(fs as any);
87
122
 
88
- it("handles simple read/write workflow", async () => {
89
- expect.assertions(1);
123
+ // file.json should now hold "hmm"
124
+ host.writeFile(SYMLINK_JSON_PATH, "hmm", { encoding: "utf-8" });
90
125
 
91
- const host = new CachingHost(fs as any);
92
- host.writeFile(FILE_JSON_PATH, "cow", { encoding: "utf-8" });
126
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
127
+ "hmm",
128
+ );
129
+ expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual(
130
+ "hmm",
131
+ );
93
132
 
94
- expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual("cow");
95
- });
133
+ // Write it out so we can verify disk is right
134
+ await host.flush();
96
135
 
97
- it("handles target symlink changing", async () => {
98
- expect.assertions(1);
136
+ expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
137
+ expectFileToExist(FILE_TXT_PATH).toBeTruthy();
99
138
 
100
- const host = new CachingHost(fs as any);
101
- host.writeFile(FILE_JSON_PATH, "cow", { encoding: "utf-8" });
139
+ expectFileContents(FILE_JSON_PATH).toBe("hmm");
140
+ expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
102
141
 
103
- expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual("cow");
104
- });
142
+ expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
105
143
 
106
- it("handles writing symlinks properly", async () => {
107
- expect.assertions(8);
144
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
145
+ "hmm",
146
+ );
147
+ });
108
148
 
109
- const host = new CachingHost(fs as any);
149
+ it("handles writing symlinks properly if you read it first", async () => {
150
+ expect.assertions(8);
110
151
 
111
- // file.json should now hold "hmm"
112
- host.writeFile(SYMLINK_JSON_PATH, "hmm", { encoding: "utf-8" });
152
+ const host = new CachingHost(fs as any);
153
+ host.readFile(SYMLINK_JSON_PATH);
113
154
 
114
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
115
- expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
155
+ // file.json should now hold "hmm"
156
+ host.writeFile(path.join(baseDir, "symlink.json"), "hmm", {
157
+ encoding: "utf-8",
158
+ });
116
159
 
117
- // Write it out so we can verify disk is right
118
- await host.flush();
160
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
161
+ "hmm",
162
+ );
163
+ expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual(
164
+ "hmm",
165
+ );
119
166
 
120
- expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
121
- expectFileToExist(FILE_TXT_PATH).toBeTruthy();
167
+ // Write it out so we can verify disk is right
168
+ await host.flush();
122
169
 
123
- expectFileContents(FILE_JSON_PATH).toBe("hmm");
124
- expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
170
+ expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
171
+ expectFileToExist(FILE_TXT_PATH).toBeTruthy();
125
172
 
126
- expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
173
+ expectFileContents(FILE_JSON_PATH).toBe("hmm");
174
+ expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
127
175
 
128
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
129
- });
176
+ expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
130
177
 
131
- it("handles writing symlinks properly if you read it first", async () => {
132
- expect.assertions(8);
178
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
179
+ "hmm",
180
+ );
181
+ });
133
182
 
134
- const host = new CachingHost(fs as any);
135
- host.readFile(SYMLINK_JSON_PATH);
183
+ it("handles creating new symlinks", async () => {
184
+ expect.assertions(8);
136
185
 
137
- // file.json should now hold "hmm"
138
- host.writeFile(path.join(baseDir, "symlink.json"), "hmm", { encoding: "utf-8" });
186
+ const host = new CachingHost(fs as any);
139
187
 
140
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
141
- expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
188
+ host.readFile(SYMLINK_JSON_PATH);
142
189
 
143
- // Write it out so we can verify disk is right
144
- await host.flush();
190
+ // file.json should now hold "hmm"
191
+ host.writeFile(path.join(baseDir, "symlink.json"), "hmm", {
192
+ encoding: "utf-8",
193
+ });
145
194
 
146
- expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
147
- expectFileToExist(FILE_TXT_PATH).toBeTruthy();
195
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
196
+ "hmm",
197
+ );
198
+ expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual(
199
+ "hmm",
200
+ );
148
201
 
149
- expectFileContents(FILE_JSON_PATH).toBe("hmm");
150
- expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
202
+ // Write it out so we can verify disk is right
203
+ await host.flush();
151
204
 
152
- expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
205
+ expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
206
+ expectFileToExist(FILE_TXT_PATH).toBeTruthy();
153
207
 
154
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
155
- });
208
+ expectFileContents(FILE_JSON_PATH).toBe("hmm");
209
+ expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
156
210
 
157
- it("handles creating new symlinks", async () => {
158
- expect.assertions(8);
211
+ expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
159
212
 
160
- const host = new CachingHost(fs as any);
213
+ expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual(
214
+ "hmm",
215
+ );
216
+ });
161
217
 
162
- host.readFile(SYMLINK_JSON_PATH);
218
+ it("makes directories", async () => {
219
+ expect.assertions(3);
163
220
 
164
- // file.json should now hold "hmm"
165
- host.writeFile(path.join(baseDir, "symlink.json"), "hmm", { encoding: "utf-8" });
221
+ const host = new CachingHost(fs as any);
166
222
 
167
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
168
- expect(host.readFile(FILE_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
223
+ host.mkdir(path.join(baseDir, "foo", "bar", "baz"), {
224
+ recursive: true,
225
+ });
169
226
 
170
- // Write it out so we can verify disk is right
171
- await host.flush();
227
+ // Write it out so we can verify disk is right
228
+ await host.flush();
172
229
 
173
- expectFileToExist(SYMLINK_JSON_PATH).toBeTruthy();
174
- expectFileToExist(FILE_TXT_PATH).toBeTruthy();
230
+ expectFileToExist(path.join(baseDir, "foo")).toBeTruthy();
231
+ expectFileToExist(path.join(baseDir, "foo", "bar")).toBeTruthy();
232
+ expectFileToExist(path.join(baseDir, "foo", "bar", "baz")).toBeTruthy();
233
+ });
175
234
 
176
- expectFileContents(FILE_JSON_PATH).toBe("hmm");
177
- expectFileContents(SYMLINK_JSON_PATH).toBe("hmm");
235
+ it("can unlink empty dirs", async () => {
236
+ expect.assertions(1);
178
237
 
179
- expectSymlinkTarget(SYMLINK_JSON_PATH, FILE_JSON_PATH);
238
+ // base setup
239
+ const fooDirPath = path.join(baseDir, "foo");
240
+ fs.mkdirSync(fooDirPath, { recursive: true });
180
241
 
181
- expect(host.readFile(SYMLINK_JSON_PATH, { encoding: "utf-8" })).toEqual("hmm");
182
- });
183
-
184
- it("makes directories", async () => {
185
- expect.assertions(3);
186
-
187
- const host = new CachingHost(fs as any);
188
-
189
- host.mkdir(path.join(baseDir, "foo", "bar", "baz"), { recursive: true });
190
-
191
- // Write it out so we can verify disk is right
192
- await host.flush();
193
-
194
- expectFileToExist(path.join(baseDir, "foo")).toBeTruthy();
195
- expectFileToExist(path.join(baseDir, "foo", "bar")).toBeTruthy();
196
- expectFileToExist(path.join(baseDir, "foo", "bar", "baz")).toBeTruthy();
197
- });
242
+ // prep obj
243
+ const host = new CachingHost(fs as any);
244
+ host.rmdir(fooDirPath);
198
245
 
199
- it("can unlink empty dirs", async () => {
200
- expect.assertions(1);
246
+ // Write it out so we can verify disk is right
247
+ await host.flush();
201
248
 
202
- // base setup
203
- const fooDirPath = path.join(baseDir, "foo");
204
- fs.mkdirSync(fooDirPath, { recursive: true });
249
+ expectFileToExist(fooDirPath).toBeFalsy();
250
+ });
205
251
 
206
- // prep obj
207
- const host = new CachingHost(fs as any);
208
- host.rmdir(fooDirPath);
252
+ it("doesnt let you delete a directory with files", async () => {
253
+ expect.assertions(2);
209
254
 
210
- // Write it out so we can verify disk is right
211
- await host.flush();
255
+ const fooDirPath = path.join(baseDir, "foo");
256
+ const barFilePath = path.join(fooDirPath, "bar.txt");
212
257
 
213
- expectFileToExist(fooDirPath).toBeFalsy();
214
- });
258
+ const host = new CachingHost(fs as any);
259
+ host.mkdir(fooDirPath, { recursive: true });
260
+ host.writeJson(barFilePath, { hi: 5 });
215
261
 
216
- it("doesnt let you delete a directory with files", async () => {
217
- expect.assertions(2);
262
+ expect(() => {
263
+ host.rmdir(fooDirPath);
264
+ }).toThrow();
218
265
 
219
- const fooDirPath = path.join(baseDir, "foo");
220
- const barFilePath = path.join(fooDirPath, "bar.txt");
266
+ // Write it out so we can verify disk is right
267
+ await host.flush();
221
268
 
222
- const host = new CachingHost(fs as any);
223
- host.mkdir(fooDirPath, { recursive: true });
224
- host.writeJson(barFilePath, { hi: 5 });
269
+ expectFileToExist(fooDirPath).toBeTruthy();
270
+ });
225
271
 
226
- expect(() => {
227
- host.rmdir(fooDirPath);
228
- }).toThrow();
229
-
230
- // Write it out so we can verify disk is right
231
- await host.flush();
232
-
233
- expectFileToExist(fooDirPath).toBeTruthy();
234
- });
235
-
236
- it("doesn't let you rmdir() a file", () => {
237
- expect.assertions(1);
238
- const host = new CachingHost(fs as any);
239
- expect(() => {
240
- host.rmdir(FILE_JSON_PATH);
241
- }).toThrow();
242
- });
243
- });
272
+ it("doesn't let you rmdir() a file", () => {
273
+ expect.assertions(1);
274
+ const host = new CachingHost(fs as any);
275
+ expect(() => {
276
+ host.rmdir(FILE_JSON_PATH);
277
+ }).toThrow();
278
+ });
279
+ },
280
+ );
244
281
  });
@@ -5,11 +5,11 @@
5
5
  *
6
6
  */
7
7
 
8
+ import { findUp } from "find-up";
9
+ import * as fs from "fs";
8
10
  import * as path from "path";
9
11
  import { Host } from "./Host.js";
10
12
  import { PackageJson } from "./PackageJson.js";
11
- import * as fs from "fs";
12
- import { findUp } from "find-up";
13
13
 
14
14
  export async function findPnpmWorkspaceDir(cwd: string) {
15
15
  const workspaceManifestLocation = await findUp("pnpm-workspace.yaml", {
@@ -20,7 +20,7 @@ export async function findPnpmWorkspaceDir(cwd: string) {
20
20
 
21
21
  export async function findWorkspaceDir(
22
22
  host: Pick<Host, "readJson" | "exists">,
23
- dir: string
23
+ dir: string,
24
24
  ): Promise<string | undefined> {
25
25
  // Defining workspaces in package.json is not necessary in PNPM
26
26
  const maybePnpmWorkspaceDir = await findPnpmWorkspaceDir(dir);
@@ -17,11 +17,15 @@ import { PackageJson } from "./PackageJson.js";
17
17
  export async function getPackageNameToDir(
18
18
  host: Pick<Host, "readJson" | "exists">,
19
19
  workspaceDir: string,
20
- resolvePaths: boolean = false
20
+ resolvePaths: boolean = false,
21
21
  ) {
22
22
  const ret = new Map<string, string>();
23
23
 
24
- const workspacePackages = await getWorkspacePackageDirs(host, workspaceDir, resolvePaths);
24
+ const workspacePackages = await getWorkspacePackageDirs(
25
+ host,
26
+ workspaceDir,
27
+ resolvePaths,
28
+ );
25
29
  for (const packageDir of workspacePackages) {
26
30
  const packagePath = pathJoin(packageDir, "package.json");
27
31
  const { name } = host.readJson(packagePath) as PackageJson;
@@ -5,19 +5,19 @@
5
5
  *
6
6
  */
7
7
 
8
+ import { findPackages } from "find-packages";
8
9
  import { existsSync } from "fs";
9
10
  import * as glob from "glob";
10
- import * as path from "node:path";
11
11
  import * as fs from "node:fs";
12
+ import * as path from "node:path";
13
+ import readYamlFile from "read-yaml-file";
12
14
  import { Host } from "./Host.js";
13
15
  import { PackageJson } from "./PackageJson.js";
14
- import readYamlFile from "read-yaml-file";
15
- import { findPackages } from "find-packages";
16
16
 
17
17
  async function findPNPMWorkspacePackages(workspaceRoot: string) {
18
18
  workspaceRoot = fs.realpathSync(workspaceRoot);
19
19
  const workspaceManifest = await readYamlFile.default<{ packages?: string[] }>(
20
- path.join(workspaceRoot, "pnpm-workspace.yaml")
20
+ path.join(workspaceRoot, "pnpm-workspace.yaml"),
21
21
  );
22
22
 
23
23
  return findPackages(workspaceRoot, {
@@ -30,11 +30,15 @@ async function findPNPMWorkspacePackages(workspaceRoot: string) {
30
30
  export async function getWorkspacePackageDirs(
31
31
  host: Pick<Host, "readJson" | "exists">,
32
32
  workspaceDir: string,
33
- resolvePaths: boolean = false
33
+ resolvePaths: boolean = false,
34
34
  ) {
35
- const packageJson = host.readJson(path.join(workspaceDir, "package.json")) as PackageJson;
35
+ const packageJson = host.readJson(
36
+ path.join(workspaceDir, "package.json"),
37
+ ) as PackageJson;
36
38
 
37
- const isPnpmWorkspace = host.exists(path.join(workspaceDir, "pnpm-workspace.yaml"));
39
+ const isPnpmWorkspace = host.exists(
40
+ path.join(workspaceDir, "pnpm-workspace.yaml"),
41
+ );
38
42
  if (isPnpmWorkspace) {
39
43
  const workspacePackages = await findPNPMWorkspacePackages(workspaceDir);
40
44
  if (workspacePackages.length === 0) {
@@ -44,7 +48,9 @@ export async function getWorkspacePackageDirs(
44
48
  }
45
49
 
46
50
  if (!packageJson.workspaces) {
47
- throw new Error("Unsupported! Monorepo is not backed by either pnpm nor yarn workspaces.");
51
+ throw new Error(
52
+ "Unsupported! Monorepo is not backed by either pnpm nor yarn workspaces.",
53
+ );
48
54
  }
49
55
 
50
56
  const ret: string[] = [];
@@ -54,7 +60,11 @@ export async function getWorkspacePackageDirs(
54
60
 
55
61
  for (const pattern of packageGlobs) {
56
62
  for (const packagePath of glob.sync(pattern, { cwd: workspaceDir })) {
57
- const packageJsonPath = path.join(workspaceDir, packagePath, "package.json");
63
+ const packageJsonPath = path.join(
64
+ workspaceDir,
65
+ packagePath,
66
+ "package.json",
67
+ );
58
68
 
59
69
  if (existsSync(packageJsonPath)) {
60
70
  if (resolvePaths === true) {
package/src/index.ts CHANGED
@@ -5,16 +5,16 @@
5
5
  *
6
6
  */
7
7
 
8
- export { getWorkspacePackageDirs } from "./getWorkspacePackageDirs.js";
9
- export { mutateJson } from "./mutateJson.js";
10
- export { PackageJson } from "./PackageJson.js";
8
+ export { AggregateTiming } from "./AggregateTiming.js";
9
+ export { CachingHost } from "./CachingHost.js";
11
10
  export { findWorkspaceDir } from "./findWorkspaceDir.js";
12
11
  export { getPackageNameToDir } from "./getPackageNameToDir.js";
12
+ export { getWorkspacePackageDirs } from "./getWorkspacePackageDirs.js";
13
13
  export { Host } from "./Host.js";
14
- export { SimpleHost } from "./SimpleHost.js";
15
- export { CachingHost } from "./CachingHost.js";
16
14
  export { matchesAnyGlob } from "./matchesAnyGlob.js";
15
+ export { mutateJson } from "./mutateJson.js";
17
16
  export { nanosecondsToSanity } from "./nanosecondsToSanity.js";
18
- export { AggregateTiming } from "./AggregateTiming.js";
19
- export { Timing } from "./Timing.js";
17
+ export { PackageJson } from "./PackageJson.js";
18
+ export { SimpleHost } from "./SimpleHost.js";
20
19
  export { Table } from "./Table.js";
20
+ export { Timing } from "./Timing.js";
@@ -14,12 +14,18 @@ import { Table } from "./Table.js";
14
14
  * Multimap cache of whether a needle was found in the glob haystack. Short circuits many
15
15
  * individual checks against the globs.
16
16
  */
17
- const cache = new Map</* haystack */ readonly string[], Map</* needle */ string, /* result */ boolean>>();
17
+ const cache = new Map<
18
+ /* haystack */ readonly string[],
19
+ Map</* needle */ string, /* result */ boolean>
20
+ >();
18
21
 
19
22
  /**
20
23
  * Multimap cache of whether a needle matches a glob. Allows us to avoid regexp's.
21
24
  */
22
- const singleMatcherCache = new Map</* glob */ string, Map</* needle */ string, /* result*/ boolean>>();
25
+ const singleMatcherCache = new Map<
26
+ /* glob */ string,
27
+ Map</* needle */ string, /* result*/ boolean>
28
+ >();
23
29
 
24
30
  /**
25
31
  * Cache of glob to regular expression. Compiling the regular expression is expensive.
@@ -39,7 +45,10 @@ interface MatchesAnyGlob {
39
45
  (needle: string, haystack: readonly string[]): boolean | undefined;
40
46
  printStats?: () => void;
41
47
  }
42
- export const matchesAnyGlob: MatchesAnyGlob = function matchesAnyGlobFunc(needle: string, haystack: readonly string[]) {
48
+ export const matchesAnyGlob: MatchesAnyGlob = function matchesAnyGlobFunc(
49
+ needle: string,
50
+ haystack: readonly string[],
51
+ ) {
43
52
  matchTime -= process.hrtime.bigint();
44
53
 
45
54
  let cacheForHaystack = cache.get(haystack);
package/src/mutateJson.ts CHANGED
@@ -6,7 +6,11 @@
6
6
  */
7
7
 
8
8
  import { Host } from "./Host.js";
9
- export function mutateJson<T extends object>(path: string, host: Host, mutator: (f: T) => T) {
9
+ export function mutateJson<T extends object>(
10
+ path: string,
11
+ host: Host,
12
+ mutator: (f: T) => T,
13
+ ) {
10
14
  let file = host.readJson(path) as T;
11
15
  file = mutator(file);
12
16
  host.writeJson(path, file);
@@ -6,5 +6,7 @@
6
6
  */
7
7
 
8
8
  export function nanosecondsToSanity(n: bigint, precision: number = 9) {
9
- return n / BigInt(1000000000) + "." + ("" + (n % BigInt(1000000000))).padStart(9, "0").substring(0, precision) + "s";
9
+ return n / BigInt(1000000000) + "."
10
+ + ("" + (n % BigInt(1000000000))).padStart(9, "0").substring(0, precision)
11
+ + "s";
10
12
  }
package/vitest.config.mjs CHANGED
@@ -1,14 +1,13 @@
1
-
2
- import { coverageConfigDefaults, defineProject, defaultExclude } from 'vitest/config'
1
+ import { coverageConfigDefaults, defaultExclude, defineProject } from "vitest/config";
3
2
 
4
3
  export default defineProject({
5
4
  test: {
6
- exclude: [...defaultExclude, "**/build/**"],
5
+ exclude: [...defaultExclude, "**/build/**"],
7
6
  coverage: {
8
7
  provider: "v8",
9
8
  enabled: true,
10
- exclude: [...coverageConfigDefaults.exclude, "vitest.config.*"]
11
- }
9
+ pool: "forks",
10
+ exclude: [...coverageConfigDefaults.exclude, "vitest.config.*"],
11
+ },
12
12
  },
13
- })
14
-
13
+ });