@lage-run/cache 0.2.5 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.json CHANGED
@@ -2,7 +2,49 @@
2
2
  "name": "@lage-run/cache",
3
3
  "entries": [
4
4
  {
5
- "date": "Wed, 15 Feb 2023 16:50:52 GMT",
5
+ "date": "Wed, 08 Mar 2023 00:05:07 GMT",
6
+ "tag": "@lage-run/cache_v0.4.0",
7
+ "version": "0.4.0",
8
+ "comments": {
9
+ "minor": [
10
+ {
11
+ "author": "kchau@microsoft.com",
12
+ "package": "@lage-run/cache",
13
+ "commit": "e2eb2c00d1b23e2c65943e9c64134c14e04f985f",
14
+ "comment": "allows global script cache"
15
+ },
16
+ {
17
+ "author": "beachball",
18
+ "package": "@lage-run/cache",
19
+ "comment": "Bump @lage-run/hasher to v0.2.0",
20
+ "commit": "e2eb2c00d1b23e2c65943e9c64134c14e04f985f"
21
+ },
22
+ {
23
+ "author": "beachball",
24
+ "package": "@lage-run/cache",
25
+ "comment": "Bump @lage-run/target-graph to v0.7.0",
26
+ "commit": "e2eb2c00d1b23e2c65943e9c64134c14e04f985f"
27
+ }
28
+ ]
29
+ }
30
+ },
31
+ {
32
+ "date": "Tue, 21 Feb 2023 21:30:37 GMT",
33
+ "tag": "@lage-run/cache_v0.3.0",
34
+ "version": "0.3.0",
35
+ "comments": {
36
+ "minor": [
37
+ {
38
+ "author": "kchau@microsoft.com",
39
+ "package": "@lage-run/cache",
40
+ "commit": "d7a8a3fa2bcf434c59bea26d5964dd7235998ad2",
41
+ "comment": "cache directory to be centralized"
42
+ }
43
+ ]
44
+ }
45
+ },
46
+ {
47
+ "date": "Wed, 15 Feb 2023 16:51:15 GMT",
6
48
  "tag": "@lage-run/cache_v0.2.5",
7
49
  "version": "0.2.5",
8
50
  "comments": {
package/CHANGELOG.md CHANGED
@@ -1,12 +1,30 @@
1
1
  # Change Log - @lage-run/cache
2
2
 
3
- This log was last generated on Wed, 15 Feb 2023 16:50:52 GMT and should not be manually modified.
3
+ This log was last generated on Wed, 08 Mar 2023 00:05:07 GMT and should not be manually modified.
4
4
 
5
5
  <!-- Start content -->
6
6
 
7
+ ## 0.4.0
8
+
9
+ Wed, 08 Mar 2023 00:05:07 GMT
10
+
11
+ ### Minor changes
12
+
13
+ - allows global script cache (kchau@microsoft.com)
14
+ - Bump @lage-run/hasher to v0.2.0
15
+ - Bump @lage-run/target-graph to v0.7.0
16
+
17
+ ## 0.3.0
18
+
19
+ Tue, 21 Feb 2023 21:30:37 GMT
20
+
21
+ ### Minor changes
22
+
23
+ - cache directory to be centralized (kchau@microsoft.com)
24
+
7
25
  ## 0.2.5
8
26
 
9
- Wed, 15 Feb 2023 16:50:52 GMT
27
+ Wed, 15 Feb 2023 16:51:15 GMT
10
28
 
11
29
  ### Patches
12
30
 
@@ -8,11 +8,33 @@ Object.defineProperty(exports, "TargetHasher", {
8
8
  });
9
9
  const _hasher = require("@lage-run/hasher");
10
10
  const _saltJs = require("./salt.js");
11
+ const _globHasher = require("glob-hasher");
12
+ const _hashStringsJs = require("./hashStrings.js");
13
+ function sortObject(unordered) {
14
+ return Object.keys(unordered).sort((a, b)=>a.localeCompare(b)).reduce((obj, key)=>{
15
+ obj[key] = unordered[key];
16
+ return obj;
17
+ }, {});
18
+ }
11
19
  class TargetHasher {
12
20
  async hash(target) {
21
+ const { root } = this.options;
13
22
  const hashKey = await (0, _saltJs.salt)(target.environmentGlob ?? this.options.environmentGlob ?? [
14
23
  "lage.config.js"
15
24
  ], `${target.id}|${JSON.stringify(this.options.cliArgs)}`, this.options.root, this.options.cacheKey || "");
25
+ if (target.cwd === root && target.cache) {
26
+ if (!target.inputs) {
27
+ throw new Error("Root-level targets must have `inputs` defined if it has cache enabled.");
28
+ }
29
+ const hashes = (0, _globHasher.hashGlobGit)(target.inputs, {
30
+ cwd: root,
31
+ gitignore: false
32
+ }) ?? {};
33
+ const sortedHashMap = sortObject(hashes);
34
+ const sortedHashes = Object.values(sortedHashMap);
35
+ sortedHashes.push(hashKey);
36
+ return (0, _hashStringsJs.hashStrings)(sortedHashes);
37
+ }
16
38
  const hasher = new _hasher.Hasher(target.cwd);
17
39
  return hasher.createPackageHash(hashKey);
18
40
  }
@@ -0,0 +1,3 @@
1
+ type PromiseFn = () => Promise<unknown>;
2
+ export declare function chunkPromise(promises: (Promise<unknown> | PromiseFn)[], limit?: number): Promise<void>;
3
+ export {};
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ Object.defineProperty(exports, "chunkPromise", {
6
+ enumerable: true,
7
+ get: ()=>chunkPromise
8
+ });
9
+ async function chunkPromise(promises, limit = 5) {
10
+ for(let i = 0; i < promises.length; i += limit){
11
+ await Promise.all(promises.slice(i, i + limit).map((p)=>typeof p === "function" ? p() : p));
12
+ }
13
+ }
@@ -0,0 +1,3 @@
1
+ export declare function getCacheDirectoryRoot(root: string): string;
2
+ export declare function getCacheDirectory(root: string, hash: string): string;
3
+ export declare function getLogsCacheDirectory(root: string, hash: string): string;
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ function _export(target, all) {
6
+ for(var name in all)Object.defineProperty(target, name, {
7
+ enumerable: true,
8
+ get: all[name]
9
+ });
10
+ }
11
+ _export(exports, {
12
+ getCacheDirectoryRoot: ()=>getCacheDirectoryRoot,
13
+ getCacheDirectory: ()=>getCacheDirectory,
14
+ getLogsCacheDirectory: ()=>getLogsCacheDirectory
15
+ });
16
+ const _path = /*#__PURE__*/ _interopRequireDefault(require("path"));
17
+ function _interopRequireDefault(obj) {
18
+ return obj && obj.__esModule ? obj : {
19
+ default: obj
20
+ };
21
+ }
22
+ function getCacheDirectoryRoot(root) {
23
+ return _path.default.join(root, "node_modules", ".cache", "lage");
24
+ }
25
+ function getCacheDirectory(root, hash) {
26
+ return _path.default.join(getCacheDirectoryRoot(root), "cache", hash.substring(0, 4));
27
+ }
28
+ function getLogsCacheDirectory(root, hash) {
29
+ return _path.default.join(getCacheDirectoryRoot(root), "logs", hash.substring(0, 4));
30
+ }
@@ -0,0 +1 @@
1
+ export declare function hashStrings(strings: string | string[]): string;
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", {
3
+ value: true
4
+ });
5
+ Object.defineProperty(exports, "hashStrings", {
6
+ enumerable: true,
7
+ get: ()=>hashStrings
8
+ });
9
+ const _crypto = /*#__PURE__*/ _interopRequireDefault(require("crypto"));
10
+ function _interopRequireDefault(obj) {
11
+ return obj && obj.__esModule ? obj : {
12
+ default: obj
13
+ };
14
+ }
15
+ function hashStrings(strings) {
16
+ const hasher = _crypto.default.createHash("sha1");
17
+ const anArray = typeof strings === "string" ? [
18
+ strings
19
+ ] : strings;
20
+ const elements = [
21
+ ...anArray
22
+ ];
23
+ elements.sort((a, b)=>a.localeCompare(b));
24
+ elements.forEach((element)=>hasher.update(element));
25
+ return hasher.digest("hex");
26
+ }
package/lib/index.d.ts CHANGED
@@ -3,3 +3,4 @@ export { RemoteFallbackCacheProvider } from "./providers/RemoteFallbackCacheProv
3
3
  export { TargetHasher } from "./TargetHasher.js";
4
4
  export type { CacheOptions } from "./types/CacheOptions.js";
5
5
  export type { CacheProvider } from "./types/CacheProvider.js";
6
+ export { getCacheDirectory, getLogsCacheDirectory, getCacheDirectoryRoot } from "./getCacheDirectory.js";
package/lib/index.js CHANGED
@@ -11,8 +11,12 @@ function _export(target, all) {
11
11
  _export(exports, {
12
12
  BackfillCacheProvider: ()=>_backfillCacheProviderJs.BackfillCacheProvider,
13
13
  RemoteFallbackCacheProvider: ()=>_remoteFallbackCacheProviderJs.RemoteFallbackCacheProvider,
14
- TargetHasher: ()=>_targetHasherJs.TargetHasher
14
+ TargetHasher: ()=>_targetHasherJs.TargetHasher,
15
+ getCacheDirectory: ()=>_getCacheDirectoryJs.getCacheDirectory,
16
+ getLogsCacheDirectory: ()=>_getCacheDirectoryJs.getLogsCacheDirectory,
17
+ getCacheDirectoryRoot: ()=>_getCacheDirectoryJs.getCacheDirectoryRoot
15
18
  });
16
19
  const _backfillCacheProviderJs = require("./providers/BackfillCacheProvider.js");
17
20
  const _remoteFallbackCacheProviderJs = require("./providers/RemoteFallbackCacheProvider.js");
18
21
  const _targetHasherJs = require("./TargetHasher.js");
22
+ const _getCacheDirectoryJs = require("./getCacheDirectory.js");
@@ -16,6 +16,7 @@ export declare class BackfillCacheProvider implements CacheProvider {
16
16
  constructor(options: BackfillCacheProviderOptions);
17
17
  fetch(hash: string, target: Target): Promise<boolean>;
18
18
  put(hash: string, target: Target): Promise<void>;
19
- clear(): Promise<void>;
20
- purge(sinceDays: number): Promise<void>;
19
+ clear(concurrency?: number): Promise<void>;
20
+ purge(prunePeriod?: number, concurrency?: number): Promise<void>;
21
+ getCachePath(packagePath: string, hash: string): string;
21
22
  }
@@ -8,10 +8,11 @@ Object.defineProperty(exports, "BackfillCacheProvider", {
8
8
  });
9
9
  const _backfillWrapperJs = require("../backfillWrapper.js");
10
10
  const _backfillCache = require("backfill-cache");
11
- const _workspaceTools = require("workspace-tools");
12
11
  const _util = require("util");
13
12
  const _fs = /*#__PURE__*/ _interopRequireWildcard(require("fs"));
14
13
  const _path = /*#__PURE__*/ _interopRequireWildcard(require("path"));
14
+ const _getCacheDirectoryJs = require("../getCacheDirectory.js");
15
+ const _chunkPromiseJs = require("../chunkPromise.js");
15
16
  function _getRequireWildcardCache(nodeInterop) {
16
17
  if (typeof WeakMap !== "function") return null;
17
18
  var cacheBabelInterop = new WeakMap();
@@ -51,25 +52,25 @@ function _interopRequireWildcard(obj, nodeInterop) {
51
52
  }
52
53
  return newObj;
53
54
  }
54
- const rmdir = (0, _util.promisify)(_fs.rmdir);
55
- const rm = (0, _util.promisify)(_fs.unlink);
55
+ const rm = (0, _util.promisify)(_fs.rm);
56
56
  const readdir = (0, _util.promisify)(_fs.readdir);
57
57
  const stat = (0, _util.promisify)(_fs.stat);
58
58
  const MS_IN_A_DAY = 1000 * 60 * 60 * 24;
59
59
  class BackfillCacheProvider {
60
- getTargetCacheStorageProvider(cwd) {
60
+ getTargetCacheStorageProvider(cwd, hash) {
61
61
  const { cacheOptions } = this.options;
62
- const { cacheStorageConfig , internalCacheFolder , incrementalCaching } = (0, _backfillWrapperJs.createBackfillCacheConfig)(cwd, cacheOptions, this.backfillLogger);
62
+ const { cacheStorageConfig , incrementalCaching } = (0, _backfillWrapperJs.createBackfillCacheConfig)(cwd, cacheOptions, this.backfillLogger);
63
+ const cachePath = this.getCachePath(cwd, hash);
63
64
  return (0, _backfillCache.getCacheStorageProvider)(cacheStorageConfig ?? {
64
65
  provider: "local"
65
- }, internalCacheFolder, this.backfillLogger, cwd, incrementalCaching);
66
+ }, cachePath, this.backfillLogger, cwd, incrementalCaching);
66
67
  }
67
68
  async fetch(hash, target) {
68
69
  const { logger } = this.options;
69
70
  if (!hash) {
70
71
  return false;
71
72
  }
72
- const cacheStorage = this.getTargetCacheStorageProvider(target.cwd);
73
+ const cacheStorage = this.getTargetCacheStorageProvider(target.cwd, hash);
73
74
  try {
74
75
  return await cacheStorage.fetch(hash);
75
76
  } catch (error) {
@@ -89,7 +90,7 @@ class BackfillCacheProvider {
89
90
  if (!hash) {
90
91
  return;
91
92
  }
92
- const cacheStorage = this.getTargetCacheStorageProvider(target.cwd);
93
+ const cacheStorage = this.getTargetCacheStorageProvider(target.cwd, hash);
93
94
  try {
94
95
  await cacheStorage.put(hash, target.outputs ?? this.options.cacheOptions.outputGlob ?? [
95
96
  "**/*"
@@ -105,52 +106,50 @@ class BackfillCacheProvider {
105
106
  // backfill throws an error if outputGlob doesn't match any files, we will skip this error
106
107
  }
107
108
  }
108
- async clear() {
109
- const allPackages = (0, _workspaceTools.getPackageInfos)(this.options.root);
110
- for (const info of Object.values(allPackages)){
111
- const cachePath = getCachePath(info, this.options.cacheOptions.internalCacheFolder);
112
- if (_fs.existsSync(cachePath)) {
113
- const entries = await readdir(cachePath);
114
- for (const entry of entries){
115
- const entryPath = _path.join(cachePath, entry);
116
- const entryStat = await stat(entryPath);
117
- await removeCache(entryPath, entryStat);
118
- }
119
- }
120
- }
109
+ async clear(concurrency = 10) {
110
+ return this.purge(0, concurrency);
121
111
  }
122
- async purge(sinceDays) {
123
- const prunePeriod = sinceDays || 30;
112
+ async purge(prunePeriod = 30, concurrency = 10) {
124
113
  const now = new Date();
125
- const allPackages = (0, _workspaceTools.getPackageInfos)(this.options.root);
126
- for (const info of Object.values(allPackages)){
127
- const cachePath = getCachePath(info, this.options.cacheOptions.internalCacheFolder);
128
- if (_fs.existsSync(cachePath)) {
129
- const entries = await readdir(cachePath);
130
- for (const entry of entries){
131
- const entryPath = _path.join(cachePath, entry);
132
- const entryStat = await stat(entryPath);
133
- if (now.getTime() - entryStat.mtime.getTime() > prunePeriod * MS_IN_A_DAY) {
134
- await removeCache(entryPath, entryStat);
135
- }
114
+ const cacheTypes = [
115
+ "cache",
116
+ "logs"
117
+ ];
118
+ const entries = [];
119
+ for (const cacheType of cacheTypes){
120
+ const cacheTypeDirectory = _path.join((0, _getCacheDirectoryJs.getCacheDirectoryRoot)(this.options.root), cacheType);
121
+ if (_fs.existsSync(cacheTypeDirectory)) {
122
+ const hashPrefixes = await readdir(cacheTypeDirectory);
123
+ for (const prefix of hashPrefixes){
124
+ const cachePath = _path.join(cacheTypeDirectory, prefix);
125
+ entries.push(cachePath);
136
126
  }
137
127
  }
138
128
  }
129
+ await (0, _chunkPromiseJs.chunkPromise)(entries.map((entry)=>{
130
+ return async ()=>{
131
+ const entryPath = entry;
132
+ const entryStat = await stat(entryPath);
133
+ if (now.getTime() - entryStat.mtime.getTime() > prunePeriod * MS_IN_A_DAY) {
134
+ await removeCache(entryPath, entryStat);
135
+ }
136
+ };
137
+ }), concurrency);
138
+ }
139
+ getCachePath(packagePath, hash) {
140
+ return _path.relative(packagePath, (0, _getCacheDirectoryJs.getCacheDirectory)(this.options.root, hash));
139
141
  }
140
142
  constructor(options){
141
143
  this.options = options;
142
144
  this.backfillLogger = (0, _backfillWrapperJs.createBackfillLogger)();
143
145
  }
144
146
  }
145
- function getCachePath(info, internalCacheFolder) {
146
- return _path.resolve(_path.dirname(info.packageJsonPath), internalCacheFolder ?? "node_modules/.cache/backfill");
147
- }
148
147
  async function removeCache(cachePath, entryStat) {
149
148
  if (entryStat.isDirectory()) {
150
- rmdir(cachePath, {
149
+ return rm(cachePath, {
151
150
  recursive: true
152
151
  });
153
152
  } else {
154
- rm(cachePath);
153
+ return rm(cachePath);
155
154
  }
156
155
  }
package/lib/salt.js CHANGED
@@ -12,49 +12,8 @@ _export(exports, {
12
12
  "_testResetEnvHash": ()=>_testResetEnvHash,
13
13
  salt: ()=>salt
14
14
  });
15
- const _path = /*#__PURE__*/ _interopRequireWildcard(require("path"));
16
- const _crypto = /*#__PURE__*/ _interopRequireWildcard(require("crypto"));
17
- const _fastGlob = /*#__PURE__*/ _interopRequireWildcard(require("fast-glob"));
18
- const _promises = /*#__PURE__*/ _interopRequireWildcard(require("fs/promises"));
19
- function _getRequireWildcardCache(nodeInterop) {
20
- if (typeof WeakMap !== "function") return null;
21
- var cacheBabelInterop = new WeakMap();
22
- var cacheNodeInterop = new WeakMap();
23
- return (_getRequireWildcardCache = function(nodeInterop) {
24
- return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
25
- })(nodeInterop);
26
- }
27
- function _interopRequireWildcard(obj, nodeInterop) {
28
- if (!nodeInterop && obj && obj.__esModule) {
29
- return obj;
30
- }
31
- if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
32
- return {
33
- default: obj
34
- };
35
- }
36
- var cache = _getRequireWildcardCache(nodeInterop);
37
- if (cache && cache.has(obj)) {
38
- return cache.get(obj);
39
- }
40
- var newObj = {};
41
- var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
42
- for(var key in obj){
43
- if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
44
- var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
45
- if (desc && (desc.get || desc.set)) {
46
- Object.defineProperty(newObj, key, desc);
47
- } else {
48
- newObj[key] = obj[key];
49
- }
50
- }
51
- }
52
- newObj.default = obj;
53
- if (cache) {
54
- cache.set(obj, newObj);
55
- }
56
- return newObj;
57
- }
15
+ const _globHasher = require("glob-hasher");
16
+ const _hashStringsJs = require("./hashStrings.js");
58
17
  let envHashes = {};
59
18
  // A promise to guarantee the getEnvHashes is done one at a time
60
19
  let oneAtATime = Promise.resolve();
@@ -63,7 +22,7 @@ function _testResetEnvHash() {
63
22
  }
64
23
  async function salt(environmentGlobFiles, command, repoRoot, customKey = "") {
65
24
  const envHash = await getEnvHash(environmentGlobFiles, repoRoot);
66
- return hashStrings([
25
+ return (0, _hashStringsJs.hashStrings)([
67
26
  ...envHash,
68
27
  command,
69
28
  customKey
@@ -72,6 +31,12 @@ async function salt(environmentGlobFiles, command, repoRoot, customKey = "") {
72
31
  function envHashKey(environmentGlobFiles) {
73
32
  return environmentGlobFiles.sort().join("|");
74
33
  }
34
+ function sortObject(unordered) {
35
+ return Object.keys(unordered).sort((a, b)=>a.localeCompare(b)).reduce((obj, key)=>{
36
+ obj[key] = unordered[key];
37
+ return obj;
38
+ }, {});
39
+ }
75
40
  async function getEnvHash(environmentGlobFiles, repoRoot) {
76
41
  const key = envHashKey(environmentGlobFiles);
77
42
  // We want to make sure that we only call getEnvHashOneAtTime one at a time
@@ -85,35 +50,17 @@ async function getEnvHash(environmentGlobFiles, repoRoot) {
85
50
  });
86
51
  return oneAtATime;
87
52
  }
88
- async function getEnvHashOneAtTime(environmentGlobFiles, repoRoot) {
89
- const envHash = [];
90
- const newline = /\r\n|\r|\n/g;
91
- const LF = "\n";
92
- const files = _fastGlob.sync(environmentGlobFiles, {
93
- cwd: repoRoot
94
- });
95
- files.sort((a, b)=>a.localeCompare(b));
96
- for (const file of files){
97
- const hasher = _crypto.createHash("sha1");
98
- hasher.update(file);
99
- const fileBuffer = await _promises.readFile(_path.join(repoRoot, file), "utf-8");
100
- const data = fileBuffer.replace(newline, LF);
101
- hasher.update(data);
102
- envHash.push(hasher.digest("hex"));
103
- }
53
+ function getEnvHashOneAtTime(environmentGlobFiles, repoRoot) {
104
54
  const key = envHashKey(environmentGlobFiles);
105
- envHashes[key] = envHash;
106
- return envHash;
107
- }
108
- function hashStrings(strings) {
109
- const hasher = _crypto.createHash("sha1");
110
- const anArray = typeof strings === "string" ? [
111
- strings
112
- ] : strings;
113
- const elements = [
114
- ...anArray
115
- ];
116
- elements.sort((a, b)=>a.localeCompare(b));
117
- elements.forEach((element)=>hasher.update(element));
118
- return hasher.digest("hex");
55
+ if (environmentGlobFiles.length === 0) {
56
+ envHashes[key] = [];
57
+ return envHashes[key];
58
+ }
59
+ const hashes = (0, _globHasher.hashGlobGit)(environmentGlobFiles, {
60
+ cwd: repoRoot,
61
+ gitignore: false
62
+ });
63
+ const sortedHashes = sortObject(hashes);
64
+ envHashes[key] = Object.values(sortedHashes);
65
+ return envHashes[key];
119
66
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lage-run/cache",
3
- "version": "0.2.5",
3
+ "version": "0.4.0",
4
4
  "description": "Cache for Lage",
5
5
  "repository": {
6
6
  "url": "https://github.com/microsoft/lage"
@@ -15,20 +15,17 @@
15
15
  "lint": "monorepo-scripts lint"
16
16
  },
17
17
  "dependencies": {
18
- "@lage-run/hasher": "^0.1.3",
19
- "@lage-run/target-graph": "^0.6.2",
18
+ "@lage-run/hasher": "^0.2.0",
19
+ "@lage-run/target-graph": "^0.7.0",
20
20
  "@lage-run/logger": "^1.2.2",
21
21
  "backfill-config": "^6.3.0",
22
22
  "backfill-cache": "^5.6.1",
23
23
  "backfill-logger": "^5.1.3",
24
- "fast-glob": "^3.2.11",
25
- "workspace-tools": "^0.30.0"
24
+ "glob-hasher": "1.1.1"
26
25
  },
27
26
  "devDependencies": {
28
27
  "@lage-run/monorepo-fixture": "*",
29
- "@types/mock-fs": "4.13.1",
30
- "monorepo-scripts": "*",
31
- "mock-fs": "5.2.0"
28
+ "monorepo-scripts": "*"
32
29
  },
33
30
  "publishConfig": {
34
31
  "access": "public"