@atlaspack/cache 3.1.1-canary.36 → 3.1.1-canary.360
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +426 -0
- package/dist/FSCache.js +171 -0
- package/dist/IDBCache.browser.js +123 -0
- package/dist/IDBCache.js +10 -0
- package/dist/LMDBLiteCache.js +223 -0
- package/dist/constants.js +5 -0
- package/dist/index.js +19 -0
- package/dist/types.js +2 -0
- package/lib/FSCache.js +35 -3
- package/lib/IDBCache.browser.js +7 -6
- package/lib/IDBCache.js +1 -1
- package/lib/LMDBLiteCache.js +71 -41
- package/lib/types/FSCache.d.ts +27 -0
- package/lib/types/IDBCache.browser.d.ts +22 -0
- package/lib/types/IDBCache.d.ts +4 -0
- package/lib/types/LMDBLiteCache.d.ts +78 -0
- package/lib/types/constants.d.ts +1 -0
- package/lib/types/index.d.ts +4 -0
- package/lib/types/types.d.ts +2 -0
- package/package.json +13 -14
- package/src/{FSCache.js → FSCache.ts} +30 -15
- package/src/{IDBCache.browser.js → IDBCache.browser.ts} +8 -10
- package/src/{IDBCache.js → IDBCache.ts} +1 -2
- package/src/{LMDBLiteCache.js → LMDBLiteCache.ts} +80 -39
- package/src/{constants.js → constants.ts} +0 -2
- package/src/{index.js → index.ts} +0 -2
- package/src/{types.js → types.ts} +0 -1
- package/test/LMDBLiteCache.test.ts +241 -0
- package/test/workerThreadsTest.js +42 -0
- package/tsconfig.json +27 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/index.d.ts +0 -12
- package/lib/types.d.ts +0 -2
- package/test/LMDBLiteCache.test.js +0 -69
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.IDBCache = void 0;
|
|
7
|
+
const stream_1 = require("stream");
|
|
8
|
+
const build_cache_1 = require("@atlaspack/build-cache");
|
|
9
|
+
const utils_1 = require("@atlaspack/utils");
|
|
10
|
+
const idb_1 = require("idb");
|
|
11
|
+
const package_json_1 = __importDefault(require("../package.json"));
|
|
12
|
+
const STORE_NAME = 'cache';
|
|
13
|
+
class IDBCache {
|
|
14
|
+
constructor() {
|
|
15
|
+
this.store = (0, idb_1.openDB)('REPL-parcel-cache', 1, {
|
|
16
|
+
upgrade(db) {
|
|
17
|
+
db.createObjectStore(STORE_NAME);
|
|
18
|
+
},
|
|
19
|
+
blocked() { },
|
|
20
|
+
blocking() { },
|
|
21
|
+
terminated() { },
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
ensure() {
|
|
25
|
+
return Promise.resolve();
|
|
26
|
+
}
|
|
27
|
+
serialize() {
|
|
28
|
+
return {
|
|
29
|
+
/*::...null*/
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
static deserialize() {
|
|
33
|
+
return new IDBCache();
|
|
34
|
+
}
|
|
35
|
+
has(key) {
|
|
36
|
+
return Promise.resolve(this.store.get(key) != null);
|
|
37
|
+
}
|
|
38
|
+
async get(key) {
|
|
39
|
+
let data = await (await this.store).get(STORE_NAME, key);
|
|
40
|
+
if (data == null) {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
return Promise.resolve((0, build_cache_1.deserialize)(data));
|
|
44
|
+
}
|
|
45
|
+
async set(key, value) {
|
|
46
|
+
await (await this.store).put(STORE_NAME, (0, build_cache_1.serialize)(value), key);
|
|
47
|
+
}
|
|
48
|
+
getStream(key) {
|
|
49
|
+
let dataPromise = this.store
|
|
50
|
+
// @ts-expect-error TS7006
|
|
51
|
+
.then((s) => s.get(STORE_NAME, key))
|
|
52
|
+
// @ts-expect-error TS7006
|
|
53
|
+
.then((d) => Buffer.from(d))
|
|
54
|
+
// @ts-expect-error TS7006
|
|
55
|
+
.catch((e) => e);
|
|
56
|
+
const stream = new stream_1.Readable({
|
|
57
|
+
async read() {
|
|
58
|
+
let data = await dataPromise;
|
|
59
|
+
if (data instanceof Error) {
|
|
60
|
+
stream.emit('error', data);
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
stream.push(Buffer.from(data));
|
|
64
|
+
stream.push(null);
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
});
|
|
68
|
+
return stream;
|
|
69
|
+
}
|
|
70
|
+
async setStream(key, stream) {
|
|
71
|
+
let buf = await (0, utils_1.bufferStream)(stream);
|
|
72
|
+
await (await this.store).put(STORE_NAME, buf, key);
|
|
73
|
+
}
|
|
74
|
+
async getBlob(key) {
|
|
75
|
+
let data = await (await this.store).get(STORE_NAME, key);
|
|
76
|
+
if (data == null) {
|
|
77
|
+
return Promise.reject(new Error(`Key ${key} not found in cache`));
|
|
78
|
+
}
|
|
79
|
+
return Buffer.from(data.buffer);
|
|
80
|
+
}
|
|
81
|
+
async setBlob(key, contents) {
|
|
82
|
+
let data = contents instanceof Uint8Array ? contents : Buffer.from(contents);
|
|
83
|
+
await (await this.store).put(STORE_NAME, data, key);
|
|
84
|
+
}
|
|
85
|
+
// async setBlobs(
|
|
86
|
+
// entries: $ReadOnlyArray<[string, Buffer | string]>,
|
|
87
|
+
// ): Promise<void> {
|
|
88
|
+
// const tx = (await this.store).transaction(STORE_NAME, 'readwrite');
|
|
89
|
+
// await Promise.all([
|
|
90
|
+
// ...entries.map(([key, value]) =>
|
|
91
|
+
// tx.store.put(
|
|
92
|
+
// value instanceof Uint8Array ? value : Buffer.from(value),
|
|
93
|
+
// key,
|
|
94
|
+
// ),
|
|
95
|
+
// ),
|
|
96
|
+
// tx.done,
|
|
97
|
+
// ]);
|
|
98
|
+
// }
|
|
99
|
+
async getBuffer(key) {
|
|
100
|
+
let data = await (await this.store).get(STORE_NAME, key);
|
|
101
|
+
if (data == null) {
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
return Buffer.from(data.buffer);
|
|
105
|
+
}
|
|
106
|
+
hasLargeBlob(key) {
|
|
107
|
+
return this.has(key);
|
|
108
|
+
}
|
|
109
|
+
getLargeBlob(key) {
|
|
110
|
+
return this.getBlob(key);
|
|
111
|
+
}
|
|
112
|
+
setLargeBlob(key, contents) {
|
|
113
|
+
return this.setBlob(key, contents);
|
|
114
|
+
}
|
|
115
|
+
async deleteLargeBlob(key) {
|
|
116
|
+
await (await this.store).delete(STORE_NAME, key);
|
|
117
|
+
}
|
|
118
|
+
refresh() {
|
|
119
|
+
// NOOP
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
exports.IDBCache = IDBCache;
|
|
123
|
+
(0, build_cache_1.registerSerializableClass)(`${package_json_1.default.version}:IDBCache`, IDBCache);
|
package/dist/IDBCache.js
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IDBCache = void 0;
|
|
4
|
+
// @ts-expect-error TS2420
|
|
5
|
+
class IDBCache {
|
|
6
|
+
constructor() {
|
|
7
|
+
throw new Error('IDBCache is only supported in the browser');
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
exports.IDBCache = IDBCache;
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LMDBLiteCache = exports.LmdbWrapper = void 0;
|
|
7
|
+
exports.open = open;
|
|
8
|
+
const build_cache_1 = require("@atlaspack/build-cache");
|
|
9
|
+
const feature_flags_1 = require("@atlaspack/feature-flags");
|
|
10
|
+
const rust_1 = require("@atlaspack/rust");
|
|
11
|
+
// @ts-expect-error TS7016
|
|
12
|
+
const ncp_1 = __importDefault(require("ncp"));
|
|
13
|
+
const util_1 = require("util");
|
|
14
|
+
const stream_1 = __importDefault(require("stream"));
|
|
15
|
+
const path_1 = __importDefault(require("path"));
|
|
16
|
+
const fs_1 = require("@atlaspack/fs");
|
|
17
|
+
const package_json_1 = __importDefault(require("../package.json"));
|
|
18
|
+
const FSCache_1 = require("./FSCache");
|
|
19
|
+
const logger_1 = require("@atlaspack/logger");
|
|
20
|
+
const ncpAsync = (0, util_1.promisify)(ncp_1.default);
|
|
21
|
+
class LmdbWrapper {
|
|
22
|
+
constructor(lmdb) {
|
|
23
|
+
this.lmdb = lmdb;
|
|
24
|
+
}
|
|
25
|
+
has(key) {
|
|
26
|
+
return this.lmdb.hasSync(key);
|
|
27
|
+
}
|
|
28
|
+
async delete(key) {
|
|
29
|
+
await this.lmdb.delete(key);
|
|
30
|
+
}
|
|
31
|
+
get(key) {
|
|
32
|
+
return this.lmdb.getSync(key);
|
|
33
|
+
}
|
|
34
|
+
async put(key, value) {
|
|
35
|
+
const buffer = typeof value === 'string' ? Buffer.from(value) : value;
|
|
36
|
+
await this.lmdb.put(key, buffer);
|
|
37
|
+
}
|
|
38
|
+
*keys() {
|
|
39
|
+
const PAGE_SIZE = 10000000;
|
|
40
|
+
let currentKeys = this.lmdb.keysSync(0, PAGE_SIZE);
|
|
41
|
+
while (currentKeys.length > 0) {
|
|
42
|
+
for (const key of currentKeys) {
|
|
43
|
+
yield key;
|
|
44
|
+
}
|
|
45
|
+
currentKeys = this.lmdb.keysSync(currentKeys.length, PAGE_SIZE);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
compact(targetPath) {
|
|
49
|
+
this.lmdb.compact(targetPath);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
exports.LmdbWrapper = LmdbWrapper;
|
|
53
|
+
function open(directory,
|
|
54
|
+
// eslint-disable-next-line no-unused-vars
|
|
55
|
+
openOptions) {
|
|
56
|
+
return new LmdbWrapper(new rust_1.Lmdb({
|
|
57
|
+
path: directory,
|
|
58
|
+
asyncWrites: true,
|
|
59
|
+
mapSize: process.env.ATLASPACK_BUILD_ENV === 'test'
|
|
60
|
+
? 1024 * 1024 * 1024
|
|
61
|
+
: 1024 * 1024 * 1024 * 15,
|
|
62
|
+
}));
|
|
63
|
+
}
|
|
64
|
+
const pipeline = (0, util_1.promisify)(stream_1.default.pipeline);
|
|
65
|
+
class LMDBLiteCache {
|
|
66
|
+
constructor(cacheDir) {
|
|
67
|
+
this.fs = new fs_1.NodeFS();
|
|
68
|
+
this.dir = cacheDir;
|
|
69
|
+
this.cacheFilesDirectory = path_1.default.join(cacheDir, 'files');
|
|
70
|
+
this.fsCache = new FSCache_1.FSCache(this.fs, cacheDir);
|
|
71
|
+
this.store = open(cacheDir, {
|
|
72
|
+
name: 'parcel-cache',
|
|
73
|
+
encoding: 'binary',
|
|
74
|
+
compression: true,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Use this to pass the native LMDB instance back to Rust.
|
|
79
|
+
*/
|
|
80
|
+
getNativeRef() {
|
|
81
|
+
return this.store.lmdb;
|
|
82
|
+
}
|
|
83
|
+
async ensure() {
|
|
84
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
85
|
+
await this.fsCache.ensure();
|
|
86
|
+
}
|
|
87
|
+
await this.fs.mkdirp(this.cacheFilesDirectory);
|
|
88
|
+
return Promise.resolve();
|
|
89
|
+
}
|
|
90
|
+
serialize() {
|
|
91
|
+
return {
|
|
92
|
+
dir: this.dir,
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
static deserialize(cache) {
|
|
96
|
+
return new LMDBLiteCache(cache.dir);
|
|
97
|
+
}
|
|
98
|
+
has(key) {
|
|
99
|
+
return Promise.resolve(this.store.has(key));
|
|
100
|
+
}
|
|
101
|
+
get(key) {
|
|
102
|
+
let data = this.store.get(key);
|
|
103
|
+
if (data == null) {
|
|
104
|
+
return Promise.resolve(null);
|
|
105
|
+
}
|
|
106
|
+
return Promise.resolve((0, build_cache_1.deserialize)(data));
|
|
107
|
+
}
|
|
108
|
+
async set(key, value) {
|
|
109
|
+
await this.setBlob(key, (0, build_cache_1.serialize)(value));
|
|
110
|
+
}
|
|
111
|
+
getStream(key) {
|
|
112
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
113
|
+
return this.fs.createReadStream(path_1.default.join(this.dir, key));
|
|
114
|
+
}
|
|
115
|
+
return this.fs.createReadStream(this.getFileKey(key));
|
|
116
|
+
}
|
|
117
|
+
async setStream(key, stream) {
|
|
118
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
119
|
+
return pipeline(stream, this.fs.createWriteStream(path_1.default.join(this.dir, key)));
|
|
120
|
+
}
|
|
121
|
+
const filePath = this.getFileKey(key);
|
|
122
|
+
await this.fs.mkdirp(path_1.default.dirname(filePath));
|
|
123
|
+
return pipeline(stream, this.fs.createWriteStream(filePath));
|
|
124
|
+
}
|
|
125
|
+
// eslint-disable-next-line require-await
|
|
126
|
+
async getBlob(key) {
|
|
127
|
+
return this.getBlobSync(key);
|
|
128
|
+
}
|
|
129
|
+
getBlobSync(key) {
|
|
130
|
+
const buffer = this.store.get(key);
|
|
131
|
+
if (buffer == null) {
|
|
132
|
+
throw new Error(`Key ${key} not found in cache`);
|
|
133
|
+
}
|
|
134
|
+
return buffer;
|
|
135
|
+
}
|
|
136
|
+
async setBlob(key, contents) {
|
|
137
|
+
await this.store.put(key, contents);
|
|
138
|
+
}
|
|
139
|
+
getBuffer(key) {
|
|
140
|
+
return Promise.resolve(this.store.get(key));
|
|
141
|
+
}
|
|
142
|
+
hasLargeBlob(key) {
|
|
143
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
144
|
+
return this.fsCache.hasLargeBlob(key);
|
|
145
|
+
}
|
|
146
|
+
return this.fs.exists(this.getFileKey(key));
|
|
147
|
+
}
|
|
148
|
+
getLargeBlob(key) {
|
|
149
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
150
|
+
return this.fsCache.getLargeBlob(key);
|
|
151
|
+
}
|
|
152
|
+
return this.fs.readFile(this.getFileKey(key));
|
|
153
|
+
}
|
|
154
|
+
async setLargeBlob(key, contents, options) {
|
|
155
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
156
|
+
return this.fsCache.setLargeBlob(key, contents, options);
|
|
157
|
+
}
|
|
158
|
+
const targetPath = this.getFileKey(key);
|
|
159
|
+
await this.fs.mkdirp(path_1.default.dirname(targetPath));
|
|
160
|
+
return this.fs.writeFile(targetPath, contents);
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* @deprecated Use store.delete instead.
|
|
164
|
+
*/
|
|
165
|
+
deleteLargeBlob(key) {
|
|
166
|
+
if (!(0, feature_flags_1.getFeatureFlag)('cachePerformanceImprovements')) {
|
|
167
|
+
return this.fsCache.deleteLargeBlob(key);
|
|
168
|
+
}
|
|
169
|
+
return this.store.delete(key);
|
|
170
|
+
}
|
|
171
|
+
keys() {
|
|
172
|
+
return this.store.keys();
|
|
173
|
+
}
|
|
174
|
+
async compact(targetPath) {
|
|
175
|
+
await this.fs.mkdirp(targetPath);
|
|
176
|
+
const files = await this.fs.readdir(this.dir);
|
|
177
|
+
// copy all files except data.mdb and lock.mdb to the target path (recursive)
|
|
178
|
+
for (const file of files) {
|
|
179
|
+
const filePath = path_1.default.join(this.dir, file);
|
|
180
|
+
if (file === 'data.mdb' || file === 'lock.mdb') {
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
await ncpAsync(filePath, path_1.default.join(targetPath, file));
|
|
184
|
+
}
|
|
185
|
+
this.store.compact(path_1.default.join(targetPath, 'data.mdb'));
|
|
186
|
+
}
|
|
187
|
+
refresh() { }
|
|
188
|
+
/**
|
|
189
|
+
* Streams, packages are stored in files instead of LMDB.
|
|
190
|
+
*
|
|
191
|
+
* On this case, if a cache key happens to have a parent traversal, ../..
|
|
192
|
+
* it is treated specially
|
|
193
|
+
*
|
|
194
|
+
* That is, something/../something and something are meant to be different
|
|
195
|
+
* keys.
|
|
196
|
+
*
|
|
197
|
+
* Plus we do not want to store values outside of the cache directory.
|
|
198
|
+
*/
|
|
199
|
+
getFileKey(key) {
|
|
200
|
+
const cleanKey = key
|
|
201
|
+
.split('/')
|
|
202
|
+
.map((part) => {
|
|
203
|
+
if (part === '..') {
|
|
204
|
+
return '$$__parent_dir$$';
|
|
205
|
+
}
|
|
206
|
+
return part;
|
|
207
|
+
})
|
|
208
|
+
.join('/');
|
|
209
|
+
return path_1.default.join(this.cacheFilesDirectory, cleanKey);
|
|
210
|
+
}
|
|
211
|
+
async clear() {
|
|
212
|
+
await (0, logger_1.instrumentAsync)('LMDBLiteCache::clear', async () => {
|
|
213
|
+
const keys = await this.keys();
|
|
214
|
+
for (const key of keys) {
|
|
215
|
+
await this.store.delete(key);
|
|
216
|
+
}
|
|
217
|
+
await this.fs.rimraf(this.cacheFilesDirectory);
|
|
218
|
+
await this.fs.mkdirp(this.cacheFilesDirectory);
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
exports.LMDBLiteCache = LMDBLiteCache;
|
|
223
|
+
(0, build_cache_1.registerSerializableClass)(`${package_json_1.default.version}:LMDBLiteCache`, LMDBLiteCache);
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./FSCache"), exports);
|
|
18
|
+
__exportStar(require("./IDBCache"), exports);
|
|
19
|
+
__exportStar(require("./LMDBLiteCache"), exports);
|
package/dist/types.js
ADDED
package/lib/FSCache.js
CHANGED
|
@@ -25,6 +25,20 @@ function _util() {
|
|
|
25
25
|
};
|
|
26
26
|
return data;
|
|
27
27
|
}
|
|
28
|
+
function _rust() {
|
|
29
|
+
const data = require("@atlaspack/rust");
|
|
30
|
+
_rust = function () {
|
|
31
|
+
return data;
|
|
32
|
+
};
|
|
33
|
+
return data;
|
|
34
|
+
}
|
|
35
|
+
function _featureFlags() {
|
|
36
|
+
const data = require("@atlaspack/feature-flags");
|
|
37
|
+
_featureFlags = function () {
|
|
38
|
+
return data;
|
|
39
|
+
};
|
|
40
|
+
return data;
|
|
41
|
+
}
|
|
28
42
|
function _logger() {
|
|
29
43
|
const data = _interopRequireDefault(require("@atlaspack/logger"));
|
|
30
44
|
_logger = function () {
|
|
@@ -43,6 +57,7 @@ var _package = _interopRequireDefault(require("../package.json"));
|
|
|
43
57
|
var _constants = require("./constants");
|
|
44
58
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
45
59
|
// flowlint-next-line untyped-import:off
|
|
60
|
+
|
|
46
61
|
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
|
|
47
62
|
class FSCache {
|
|
48
63
|
constructor(fs, cacheDir) {
|
|
@@ -57,11 +72,17 @@ class FSCache {
|
|
|
57
72
|
// This speeds up large caches on many file systems since there are fewer files in a single directory.
|
|
58
73
|
let dirPromises = [];
|
|
59
74
|
for (let i = 0; i < 256; i++) {
|
|
60
|
-
dirPromises.push(
|
|
75
|
+
dirPromises.push(
|
|
76
|
+
// @ts-expect-error TS2345
|
|
77
|
+
this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
|
|
61
78
|
}
|
|
62
79
|
await Promise.all(dirPromises);
|
|
63
80
|
}
|
|
64
81
|
_getCachePath(cacheId) {
|
|
82
|
+
if ((0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
|
|
83
|
+
const cleanId = (0, _rust().hashString)(cacheId);
|
|
84
|
+
return _path().default.join(this.dir, cleanId.slice(0, 2), cleanId.slice(2));
|
|
85
|
+
}
|
|
65
86
|
return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
|
|
66
87
|
}
|
|
67
88
|
getStream(key) {
|
|
@@ -91,6 +112,9 @@ class FSCache {
|
|
|
91
112
|
}
|
|
92
113
|
}
|
|
93
114
|
#getFilePath(key, index) {
|
|
115
|
+
if ((0, _featureFlags().getFeatureFlag)('cachePerformanceImprovements')) {
|
|
116
|
+
return _path().default.join(this.dir, `${(0, _rust().hashString)(key)}-${index}`);
|
|
117
|
+
}
|
|
94
118
|
return _path().default.join(this.dir, `${key}-${index}`);
|
|
95
119
|
}
|
|
96
120
|
async #unlinkChunks(key, index) {
|
|
@@ -117,12 +141,19 @@ class FSCache {
|
|
|
117
141
|
const writePromises = [];
|
|
118
142
|
if (chunks === 1) {
|
|
119
143
|
// If there's one chunk, don't slice the content
|
|
120
|
-
writePromises.push(
|
|
144
|
+
writePromises.push(
|
|
145
|
+
// @ts-expect-error TS2345
|
|
146
|
+
this.fs.writeFile(this.#getFilePath(key, 0), contents, {
|
|
147
|
+
// @ts-expect-error TS2353
|
|
121
148
|
signal: options === null || options === void 0 ? void 0 : options.signal
|
|
122
149
|
}));
|
|
123
150
|
} else {
|
|
124
151
|
for (let i = 0; i < chunks; i += 1) {
|
|
125
|
-
writePromises.push(
|
|
152
|
+
writePromises.push(
|
|
153
|
+
// @ts-expect-error TS2345
|
|
154
|
+
this.fs.writeFile(this.#getFilePath(key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK),
|
|
155
|
+
// @ts-expect-error TS2353
|
|
156
|
+
{
|
|
126
157
|
signal: options === null || options === void 0 ? void 0 : options.signal
|
|
127
158
|
}));
|
|
128
159
|
}
|
|
@@ -137,6 +168,7 @@ class FSCache {
|
|
|
137
168
|
let i = 0;
|
|
138
169
|
let filePath = this.#getFilePath(key, i);
|
|
139
170
|
while (await this.fs.exists(filePath)) {
|
|
171
|
+
// @ts-expect-error TS2345
|
|
140
172
|
deletePromises.push(this.fs.rimraf(filePath));
|
|
141
173
|
i += 1;
|
|
142
174
|
filePath = this.#getFilePath(key, i);
|
package/lib/IDBCache.browser.js
CHANGED
|
@@ -34,12 +34,8 @@ function _idb() {
|
|
|
34
34
|
}
|
|
35
35
|
var _package = _interopRequireDefault(require("../package.json"));
|
|
36
36
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
37
|
-
// $FlowFixMe[untyped-import]
|
|
38
|
-
// $FlowFixMe[untyped-import]
|
|
39
37
|
const STORE_NAME = 'cache';
|
|
40
38
|
class IDBCache {
|
|
41
|
-
// $FlowFixMe
|
|
42
|
-
|
|
43
39
|
constructor() {
|
|
44
40
|
this.store = (0, _idb().openDB)('REPL-parcel-cache', 1, {
|
|
45
41
|
upgrade(db) {
|
|
@@ -75,9 +71,14 @@ class IDBCache {
|
|
|
75
71
|
await (await this.store).put(STORE_NAME, (0, _buildCache().serialize)(value), key);
|
|
76
72
|
}
|
|
77
73
|
getStream(key) {
|
|
78
|
-
let dataPromise = this.store
|
|
74
|
+
let dataPromise = this.store
|
|
75
|
+
// @ts-expect-error TS7006
|
|
76
|
+
.then(s => s.get(STORE_NAME, key))
|
|
77
|
+
// @ts-expect-error TS7006
|
|
78
|
+
.then(d => Buffer.from(d))
|
|
79
|
+
// @ts-expect-error TS7006
|
|
80
|
+
.catch(e => e);
|
|
79
81
|
const stream = new (_stream().Readable)({
|
|
80
|
-
// $FlowFixMe(incompatible-call)
|
|
81
82
|
async read() {
|
|
82
83
|
let data = await dataPromise;
|
|
83
84
|
if (data instanceof Error) {
|
package/lib/IDBCache.js
CHANGED