@atlaspack/cache 3.1.1-canary.32 → 3.1.1-canary.323

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,223 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.LMDBLiteCache = exports.LmdbWrapper = void 0;
7
+ exports.open = open;
8
+ const build_cache_1 = require("@atlaspack/build-cache");
9
+ const feature_flags_1 = require("@atlaspack/feature-flags");
10
+ const rust_1 = require("@atlaspack/rust");
11
+ // @ts-expect-error TS7016
12
+ const ncp_1 = __importDefault(require("ncp"));
13
+ const util_1 = require("util");
14
+ const stream_1 = __importDefault(require("stream"));
15
+ const path_1 = __importDefault(require("path"));
16
+ const fs_1 = require("@atlaspack/fs");
17
+ const package_json_1 = __importDefault(require("../package.json"));
18
+ const FSCache_1 = require("./FSCache");
19
+ const logger_1 = require("@atlaspack/logger");
20
+ const ncpAsync = (0, util_1.promisify)(ncp_1.default);
21
+ class LmdbWrapper {
22
+ constructor(lmdb) {
23
+ this.lmdb = lmdb;
24
+ }
25
+ has(key) {
26
+ return this.lmdb.hasSync(key);
27
+ }
28
+ async delete(key) {
29
+ await this.lmdb.delete(key);
30
+ }
31
+ get(key) {
32
+ return this.lmdb.getSync(key);
33
+ }
34
+ async put(key, value) {
35
+ const buffer = typeof value === 'string' ? Buffer.from(value) : value;
36
+ await this.lmdb.put(key, buffer);
37
+ }
38
+ *keys() {
39
+ const PAGE_SIZE = 10000000;
40
+ let currentKeys = this.lmdb.keysSync(0, PAGE_SIZE);
41
+ while (currentKeys.length > 0) {
42
+ for (const key of currentKeys) {
43
+ yield key;
44
+ }
45
+ currentKeys = this.lmdb.keysSync(currentKeys.length, PAGE_SIZE);
46
+ }
47
+ }
48
+ compact(targetPath) {
49
+ this.lmdb.compact(targetPath);
50
+ }
51
+ }
52
+ exports.LmdbWrapper = LmdbWrapper;
53
+ function open(directory,
54
+ // eslint-disable-next-line no-unused-vars
55
+ openOptions) {
56
+ return new LmdbWrapper(new rust_1.Lmdb({
57
+ path: directory,
58
+ asyncWrites: true,
59
+ mapSize: process.env.ATLASPACK_BUILD_ENV === 'test'
60
+ ? 1024 * 1024 * 1024
61
+ : 1024 * 1024 * 1024 * 15,
62
+ }));
63
+ }
64
+ const pipeline = (0, util_1.promisify)(stream_1.default.pipeline);
65
+ class LMDBLiteCache {
66
+ constructor(cacheDir) {
67
+ this.fs = new fs_1.NodeFS();
68
+ this.dir = cacheDir;
69
+ this.cacheFilesDirectory = path_1.default.join(cacheDir, 'files');
70
+ this.fsCache = new FSCache_1.FSCache(this.fs, cacheDir);
71
+ this.store = open(cacheDir, {
72
+ name: 'parcel-cache',
73
+ encoding: 'binary',
74
+ compression: true,
75
+ });
76
+ }
77
+ /**
78
+ * Use this to pass the native LMDB instance back to Rust.
79
+ */
80
+ getNativeRef() {
81
+ return this.store.lmdb;
82
+ }
83
+ async ensure() {
84
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
85
+ await this.fsCache.ensure();
86
+ }
87
+ await this.fs.mkdirp(this.cacheFilesDirectory);
88
+ return Promise.resolve();
89
+ }
90
+ serialize() {
91
+ return {
92
+ dir: this.dir,
93
+ };
94
+ }
95
+ static deserialize(cache) {
96
+ return new LMDBLiteCache(cache.dir);
97
+ }
98
+ has(key) {
99
+ return Promise.resolve(this.store.has(key));
100
+ }
101
+ get(key) {
102
+ let data = this.store.get(key);
103
+ if (data == null) {
104
+ return Promise.resolve(null);
105
+ }
106
+ return Promise.resolve((0, build_cache_1.deserialize)(data));
107
+ }
108
+ async set(key, value) {
109
+ await this.setBlob(key, (0, build_cache_1.serialize)(value));
110
+ }
111
+ getStream(key) {
112
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
113
+ return this.fs.createReadStream(path_1.default.join(this.dir, key));
114
+ }
115
+ return this.fs.createReadStream(this.getFileKey(key));
116
+ }
117
+ async setStream(key, stream) {
118
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
119
+ return pipeline(stream, this.fs.createWriteStream(path_1.default.join(this.dir, key)));
120
+ }
121
+ const filePath = this.getFileKey(key);
122
+ await this.fs.mkdirp(path_1.default.dirname(filePath));
123
+ return pipeline(stream, this.fs.createWriteStream(filePath));
124
+ }
125
+ // eslint-disable-next-line require-await
126
+ async getBlob(key) {
127
+ return this.getBlobSync(key);
128
+ }
129
+ getBlobSync(key) {
130
+ const buffer = this.store.get(key);
131
+ if (buffer == null) {
132
+ throw new Error(`Key ${key} not found in cache`);
133
+ }
134
+ return buffer;
135
+ }
136
+ async setBlob(key, contents) {
137
+ await this.store.put(key, contents);
138
+ }
139
+ getBuffer(key) {
140
+ return Promise.resolve(this.store.get(key));
141
+ }
142
+ hasLargeBlob(key) {
143
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
144
+ return this.fsCache.hasLargeBlob(key);
145
+ }
146
+ return this.fs.exists(this.getFileKey(key));
147
+ }
148
+ getLargeBlob(key) {
149
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
150
+ return this.fsCache.getLargeBlob(key);
151
+ }
152
+ return this.fs.readFile(this.getFileKey(key));
153
+ }
154
+ async setLargeBlob(key, contents, options) {
155
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
156
+ return this.fsCache.setLargeBlob(key, contents, options);
157
+ }
158
+ const targetPath = this.getFileKey(key);
159
+ await this.fs.mkdirp(path_1.default.dirname(targetPath));
160
+ return this.fs.writeFile(targetPath, contents);
161
+ }
162
+ /**
163
+ * @deprecated Use store.delete instead.
164
+ */
165
+ deleteLargeBlob(key) {
166
+ if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
167
+ return this.fsCache.deleteLargeBlob(key);
168
+ }
169
+ return this.store.delete(key);
170
+ }
171
+ keys() {
172
+ return this.store.keys();
173
+ }
174
+ async compact(targetPath) {
175
+ await this.fs.mkdirp(targetPath);
176
+ const files = await this.fs.readdir(this.dir);
177
+ // copy all files except data.mdb and lock.mdb to the target path (recursive)
178
+ for (const file of files) {
179
+ const filePath = path_1.default.join(this.dir, file);
180
+ if (file === 'data.mdb' || file === 'lock.mdb') {
181
+ continue;
182
+ }
183
+ await ncpAsync(filePath, path_1.default.join(targetPath, file));
184
+ }
185
+ this.store.compact(path_1.default.join(targetPath, 'data.mdb'));
186
+ }
187
+ refresh() { }
188
+ /**
189
+ * Streams, packages are stored in files instead of LMDB.
190
+ *
191
+ * On this case, if a cache key happens to have a parent traversal, ../..
192
+ * it is treated specially
193
+ *
194
+ * That is, something/../something and something are meant to be different
195
+ * keys.
196
+ *
197
+ * Plus we do not want to store values outside of the cache directory.
198
+ */
199
+ getFileKey(key) {
200
+ const cleanKey = key
201
+ .split('/')
202
+ .map((part) => {
203
+ if (part === '..') {
204
+ return '$$__parent_dir$$';
205
+ }
206
+ return part;
207
+ })
208
+ .join('/');
209
+ return path_1.default.join(this.cacheFilesDirectory, cleanKey);
210
+ }
211
+ async clear() {
212
+ await (0, logger_1.instrumentAsync)('LMDBLiteCache::clear', async () => {
213
+ const keys = await this.keys();
214
+ for (const key of keys) {
215
+ await this.store.delete(key);
216
+ }
217
+ await this.fs.rimraf(this.cacheFilesDirectory);
218
+ await this.fs.mkdirp(this.cacheFilesDirectory);
219
+ });
220
+ }
221
+ }
222
+ exports.LMDBLiteCache = LMDBLiteCache;
223
+ (0, build_cache_1.registerSerializableClass)(`${package_json_1.default.version}:LMDBLiteCache`, LMDBLiteCache);
@@ -0,0 +1,5 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.WRITE_LIMIT_CHUNK = void 0;
4
+ // Node has a file size limit of 2 GB
5
+ exports.WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;
package/dist/index.js ADDED
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./FSCache"), exports);
18
+ __exportStar(require("./IDBCache"), exports);
19
+ __exportStar(require("./LMDBLiteCache"), exports);
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
package/lib/FSCache.js CHANGED
@@ -25,6 +25,20 @@ function _util() {
25
25
  };
26
26
  return data;
27
27
  }
28
+ function _rust() {
29
+ const data = require("@atlaspack/rust");
30
+ _rust = function () {
31
+ return data;
32
+ };
33
+ return data;
34
+ }
35
+ function _featureFlags() {
36
+ const data = require("@atlaspack/feature-flags");
37
+ _featureFlags = function () {
38
+ return data;
39
+ };
40
+ return data;
41
+ }
28
42
  function _logger() {
29
43
  const data = _interopRequireDefault(require("@atlaspack/logger"));
30
44
  _logger = function () {
@@ -43,6 +57,7 @@ var _package = _interopRequireDefault(require("../package.json"));
43
57
  var _constants = require("./constants");
44
58
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
45
59
  // flowlint-next-line untyped-import:off
60
+
46
61
  const pipeline = (0, _util().promisify)(_stream().default.pipeline);
47
62
  class FSCache {
48
63
  constructor(fs, cacheDir) {
@@ -57,11 +72,17 @@ class FSCache {
57
72
  // This speeds up large caches on many file systems since there are fewer files in a single directory.
58
73
  let dirPromises = [];
59
74
  for (let i = 0; i < 256; i++) {
60
- dirPromises.push(this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
75
+ dirPromises.push(
76
+ // @ts-expect-error TS2345
77
+ this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
61
78
  }
62
79
  await Promise.all(dirPromises);
63
80
  }
64
81
  _getCachePath(cacheId) {
82
+ if ((0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
83
+ const cleanId = (0, _rust().hashString)(cacheId);
84
+ return _path().default.join(this.dir, cleanId.slice(0, 2), cleanId.slice(2));
85
+ }
65
86
  return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
66
87
  }
67
88
  getStream(key) {
@@ -91,6 +112,9 @@ class FSCache {
91
112
  }
92
113
  }
93
114
  #getFilePath(key, index) {
115
+ if ((0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
116
+ return _path().default.join(this.dir, `${(0, _rust().hashString)(key)}-${index}`);
117
+ }
94
118
  return _path().default.join(this.dir, `${key}-${index}`);
95
119
  }
96
120
  async #unlinkChunks(key, index) {
@@ -117,12 +141,19 @@ class FSCache {
117
141
  const writePromises = [];
118
142
  if (chunks === 1) {
119
143
  // If there's one chunk, don't slice the content
120
- writePromises.push(this.fs.writeFile(this.#getFilePath(key, 0), contents, {
144
+ writePromises.push(
145
+ // @ts-expect-error TS2345
146
+ this.fs.writeFile(this.#getFilePath(key, 0), contents, {
147
+ // @ts-expect-error TS2353
121
148
  signal: options === null || options === void 0 ? void 0 : options.signal
122
149
  }));
123
150
  } else {
124
151
  for (let i = 0; i < chunks; i += 1) {
125
- writePromises.push(this.fs.writeFile(this.#getFilePath(key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK), {
152
+ writePromises.push(
153
+ // @ts-expect-error TS2345
154
+ this.fs.writeFile(this.#getFilePath(key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK),
155
+ // @ts-expect-error TS2353
156
+ {
126
157
  signal: options === null || options === void 0 ? void 0 : options.signal
127
158
  }));
128
159
  }
@@ -137,6 +168,7 @@ class FSCache {
137
168
  let i = 0;
138
169
  let filePath = this.#getFilePath(key, i);
139
170
  while (await this.fs.exists(filePath)) {
171
+ // @ts-expect-error TS2345
140
172
  deletePromises.push(this.fs.rimraf(filePath));
141
173
  i += 1;
142
174
  filePath = this.#getFilePath(key, i);
@@ -34,12 +34,8 @@ function _idb() {
34
34
  }
35
35
  var _package = _interopRequireDefault(require("../package.json"));
36
36
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
37
- // $FlowFixMe[untyped-import]
38
- // $FlowFixMe[untyped-import]
39
37
  const STORE_NAME = 'cache';
40
38
  class IDBCache {
41
- // $FlowFixMe
42
-
43
39
  constructor() {
44
40
  this.store = (0, _idb().openDB)('REPL-parcel-cache', 1, {
45
41
  upgrade(db) {
@@ -75,9 +71,14 @@ class IDBCache {
75
71
  await (await this.store).put(STORE_NAME, (0, _buildCache().serialize)(value), key);
76
72
  }
77
73
  getStream(key) {
78
- let dataPromise = this.store.then(s => s.get(STORE_NAME, key)).then(d => Buffer.from(d)).catch(e => e);
74
+ let dataPromise = this.store
75
+ // @ts-expect-error TS7006
76
+ .then(s => s.get(STORE_NAME, key))
77
+ // @ts-expect-error TS7006
78
+ .then(d => Buffer.from(d))
79
+ // @ts-expect-error TS7006
80
+ .catch(e => e);
79
81
  const stream = new (_stream().Readable)({
80
- // $FlowFixMe(incompatible-call)
81
82
  async read() {
82
83
  let data = await dataPromise;
83
84
  if (data instanceof Error) {
package/lib/IDBCache.js CHANGED
@@ -4,7 +4,7 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.IDBCache = void 0;
7
- // $FlowFixMe
7
+ // @ts-expect-error TS2420
8
8
  class IDBCache {
9
9
  constructor() {
10
10
  throw new Error('IDBCache is only supported in the browser');
@@ -26,13 +26,6 @@ function _rust() {
26
26
  };
27
27
  return data;
28
28
  }
29
- function _fs() {
30
- const data = _interopRequireDefault(require("fs"));
31
- _fs = function () {
32
- return data;
33
- };
34
- return data;
35
- }
36
29
  function _ncp() {
37
30
  const data = _interopRequireDefault(require("ncp"));
38
31
  _ncp = function () {
@@ -61,26 +54,29 @@ function _path() {
61
54
  };
62
55
  return data;
63
56
  }
64
- function _fs2() {
57
+ function _fs() {
65
58
  const data = require("@atlaspack/fs");
66
- _fs2 = function () {
59
+ _fs = function () {
67
60
  return data;
68
61
  };
69
62
  return data;
70
63
  }
71
64
  var _package = _interopRequireDefault(require("../package.json"));
72
65
  var _FSCache = require("./FSCache");
66
+ function _logger() {
67
+ const data = require("@atlaspack/logger");
68
+ _logger = function () {
69
+ return data;
70
+ };
71
+ return data;
72
+ }
73
73
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
74
- // $FlowFixMe
74
+ // @ts-expect-error TS7016
75
+
75
76
  const ncpAsync = (0, _util().promisify)(_ncp().default);
76
77
  class LmdbWrapper {
77
78
  constructor(lmdb) {
78
79
  this.lmdb = lmdb;
79
-
80
- // $FlowFixMe
81
- this[Symbol.dispose] = () => {
82
- this.lmdb.close();
83
- };
84
80
  }
85
81
  has(key) {
86
82
  return this.lmdb.hasSync(key);
@@ -110,20 +106,25 @@ class LmdbWrapper {
110
106
  }
111
107
  }
112
108
  exports.LmdbWrapper = LmdbWrapper;
113
- function open(directory
109
+ function open(directory,
114
110
  // eslint-disable-next-line no-unused-vars
115
- ) {
111
+ openOptions) {
116
112
  return new LmdbWrapper(new (_rust().Lmdb)({
117
113
  path: directory,
118
114
  asyncWrites: true,
119
- mapSize: 1024 * 1024 * 1024 * 15
115
+ mapSize: process.env.ATLASPACK_BUILD_ENV === 'test' ? 1024 * 1024 * 1024 : 1024 * 1024 * 1024 * 15
120
116
  }));
121
117
  }
122
118
  const pipeline = (0, _util().promisify)(_stream().default.pipeline);
123
119
  class LMDBLiteCache {
120
+ /**
121
+ * Directory where we store raw files.
122
+ */
123
+
124
124
  constructor(cacheDir) {
125
- this.fs = new (_fs2().NodeFS)();
125
+ this.fs = new (_fs().NodeFS)();
126
126
  this.dir = cacheDir;
127
+ this.cacheFilesDirectory = _path().default.join(cacheDir, 'files');
127
128
  this.fsCache = new _FSCache.FSCache(this.fs, cacheDir);
128
129
  this.store = open(cacheDir, {
129
130
  name: 'parcel-cache',
@@ -142,6 +143,7 @@ class LMDBLiteCache {
142
143
  if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
143
144
  await this.fsCache.ensure();
144
145
  }
146
+ await this.fs.mkdirp(this.cacheFilesDirectory);
145
147
  return Promise.resolve();
146
148
  }
147
149
  serialize() {
@@ -166,10 +168,18 @@ class LMDBLiteCache {
166
168
  await this.setBlob(key, (0, _buildCache().serialize)(value));
167
169
  }
168
170
  getStream(key) {
169
- return this.fs.createReadStream(_path().default.join(this.dir, key));
171
+ if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
172
+ return this.fs.createReadStream(_path().default.join(this.dir, key));
173
+ }
174
+ return this.fs.createReadStream(this.getFileKey(key));
170
175
  }
171
- setStream(key, stream) {
172
- return pipeline(stream, this.fs.createWriteStream(_path().default.join(this.dir, key)));
176
+ async setStream(key, stream) {
177
+ if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
178
+ return pipeline(stream, this.fs.createWriteStream(_path().default.join(this.dir, key)));
179
+ }
180
+ const filePath = this.getFileKey(key);
181
+ await this.fs.mkdirp(_path().default.dirname(filePath));
182
+ return pipeline(stream, this.fs.createWriteStream(filePath));
173
183
  }
174
184
 
175
185
  // eslint-disable-next-line require-await
@@ -189,34 +199,25 @@ class LMDBLiteCache {
189
199
  getBuffer(key) {
190
200
  return Promise.resolve(this.store.get(key));
191
201
  }
192
- #getFilePath(key, index) {
193
- return _path().default.join(this.dir, `${key}-${index}`);
194
- }
195
202
  hasLargeBlob(key) {
196
203
  if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
197
204
  return this.fsCache.hasLargeBlob(key);
198
205
  }
199
- return this.has(key);
206
+ return this.fs.exists(this.getFileKey(key));
200
207
  }
201
-
202
- /**
203
- * @deprecated Use getBlob instead.
204
- */
205
208
  getLargeBlob(key) {
206
209
  if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
207
210
  return this.fsCache.getLargeBlob(key);
208
211
  }
209
- return Promise.resolve(this.getBlobSync(key));
212
+ return this.fs.readFile(this.getFileKey(key));
210
213
  }
211
-
212
- /**
213
- * @deprecated Use setBlob instead.
214
- */
215
- setLargeBlob(key, contents, options) {
214
+ async setLargeBlob(key, contents, options) {
216
215
  if (!(0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
217
216
  return this.fsCache.setLargeBlob(key, contents, options);
218
217
  }
219
- return this.setBlob(key, contents);
218
+ const targetPath = this.getFileKey(key);
219
+ await this.fs.mkdirp(_path().default.dirname(targetPath));
220
+ return this.fs.writeFile(targetPath, contents);
220
221
  }
221
222
 
222
223
  /**
@@ -232,10 +233,8 @@ class LMDBLiteCache {
232
233
  return this.store.keys();
233
234
  }
234
235
  async compact(targetPath) {
235
- await _fs().default.promises.mkdir(targetPath, {
236
- recursive: true
237
- });
238
- const files = await _fs().default.promises.readdir(this.dir);
236
+ await this.fs.mkdirp(targetPath);
237
+ const files = await this.fs.readdir(this.dir);
239
238
  // copy all files except data.mdb and lock.mdb to the target path (recursive)
240
239
  for (const file of files) {
241
240
  const filePath = _path().default.join(this.dir, file);
@@ -247,6 +246,37 @@ class LMDBLiteCache {
247
246
  this.store.compact(_path().default.join(targetPath, 'data.mdb'));
248
247
  }
249
248
  refresh() {}
249
+
250
+ /**
251
+ * Streams, packages are stored in files instead of LMDB.
252
+ *
253
+ * On this case, if a cache key happens to have a parent traversal, ../..
254
+ * it is treated specially
255
+ *
256
+ * That is, something/../something and something are meant to be different
257
+ * keys.
258
+ *
259
+ * Plus we do not want to store values outside of the cache directory.
260
+ */
261
+ getFileKey(key) {
262
+ const cleanKey = key.split('/').map(part => {
263
+ if (part === '..') {
264
+ return '$$__parent_dir$$';
265
+ }
266
+ return part;
267
+ }).join('/');
268
+ return _path().default.join(this.cacheFilesDirectory, cleanKey);
269
+ }
270
+ async clear() {
271
+ await (0, _logger().instrumentAsync)('LMDBLiteCache::clear', async () => {
272
+ const keys = await this.keys();
273
+ for (const key of keys) {
274
+ await this.store.delete(key);
275
+ }
276
+ await this.fs.rimraf(this.cacheFilesDirectory);
277
+ await this.fs.mkdirp(this.cacheFilesDirectory);
278
+ });
279
+ }
250
280
  }
251
281
  exports.LMDBLiteCache = LMDBLiteCache;
252
282
  (0, _buildCache().registerSerializableClass)(`${_package.default.version}:LMDBLiteCache`, LMDBLiteCache);
@@ -0,0 +1,27 @@
1
+ import type { Readable } from 'stream';
2
+ import type { FilePath } from '@atlaspack/types';
3
+ import type { FileSystem } from '@atlaspack/fs';
4
+ import type { Cache } from './types';
5
+ export declare class FSCache implements Cache {
6
+ #private;
7
+ fs: FileSystem;
8
+ dir: FilePath;
9
+ constructor(fs: FileSystem, cacheDir: FilePath);
10
+ ensure(): Promise<void>;
11
+ _getCachePath(cacheId: string): FilePath;
12
+ getStream(key: string): Readable;
13
+ setStream(key: string, stream: Readable): Promise<void>;
14
+ has(key: string): Promise<boolean>;
15
+ getBlob(key: string): Promise<Buffer>;
16
+ setBlob(key: string, contents: Buffer | string): Promise<void>;
17
+ getBuffer(key: string): Promise<Buffer | null | undefined>;
18
+ hasLargeBlob(key: string): Promise<boolean>;
19
+ getLargeBlob(key: string): Promise<Buffer>;
20
+ setLargeBlob(key: string, contents: Buffer | string, options?: {
21
+ signal?: AbortSignal;
22
+ }): Promise<void>;
23
+ deleteLargeBlob(key: string): Promise<void>;
24
+ get<T>(key: string): Promise<T | null | undefined>;
25
+ set(key: string, value: unknown): Promise<void>;
26
+ refresh(): void;
27
+ }
@@ -0,0 +1,22 @@
1
+ import type { Cache } from './types';
2
+ import { Readable } from 'stream';
3
+ export declare class IDBCache implements Cache {
4
+ store: any;
5
+ constructor();
6
+ ensure(): Promise<void>;
7
+ serialize(): Record<any, any>;
8
+ static deserialize(): IDBCache;
9
+ has(key: string): Promise<boolean>;
10
+ get<T>(key: string): Promise<T | null | undefined>;
11
+ set(key: string, value: unknown): Promise<void>;
12
+ getStream(key: string): Readable;
13
+ setStream(key: string, stream: Readable): Promise<void>;
14
+ getBlob(key: string): Promise<Buffer>;
15
+ setBlob(key: string, contents: Buffer | string): Promise<void>;
16
+ getBuffer(key: string): Promise<Buffer | null | undefined>;
17
+ hasLargeBlob(key: string): Promise<boolean>;
18
+ getLargeBlob(key: string): Promise<Buffer>;
19
+ setLargeBlob(key: string, contents: Buffer | string): Promise<void>;
20
+ deleteLargeBlob(key: string): Promise<void>;
21
+ refresh(): void;
22
+ }
@@ -0,0 +1,4 @@
1
+ import type { Cache } from './types';
2
+ export declare class IDBCache implements Cache {
3
+ constructor();
4
+ }