@atlaspack/cache 3.1.1-canary.14 → 3.1.1-canary.141

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@atlaspack/cache",
3
3
  "description": "Interface for defining caches and file-system, IDB and LMDB implementations.",
4
- "version": "3.1.1-canary.14+267b0d591",
4
+ "version": "3.1.1-canary.141+2f29297bd",
5
5
  "license": "(MIT OR Apache-2.0)",
6
6
  "type": "commonjs",
7
7
  "publishConfig": {
@@ -11,24 +11,23 @@
11
11
  "type": "git",
12
12
  "url": "https://github.com/atlassian-labs/atlaspack.git"
13
13
  },
14
- "main": "lib/index.js",
15
- "source": "src/index.js",
16
- "types": "index.d.ts",
14
+ "main": "./lib/index.js",
15
+ "source": "./src/index.ts",
16
+ "types": "./lib/index.d.ts",
17
17
  "engines": {
18
18
  "node": ">= 16.0.0"
19
19
  },
20
20
  "scripts": {
21
21
  "test": "mocha",
22
- "build-ts": "mkdir -p lib && flow-to-ts src/types.js > lib/types.d.ts",
23
- "check-ts": "tsc --noEmit index.d.ts"
22
+ "check-ts": "tsc --emitDeclarationOnly --rootDir src"
24
23
  },
25
24
  "dependencies": {
26
- "@atlaspack/build-cache": "2.13.3-canary.82+267b0d591",
27
- "@atlaspack/feature-flags": "2.14.1-canary.82+267b0d591",
28
- "@atlaspack/fs": "2.14.5-canary.14+267b0d591",
29
- "@atlaspack/logger": "2.14.5-canary.14+267b0d591",
30
- "@atlaspack/rust": "3.2.1-canary.14+267b0d591",
31
- "@atlaspack/utils": "2.14.5-canary.14+267b0d591",
25
+ "@atlaspack/build-cache": "2.13.3-canary.209+2f29297bd",
26
+ "@atlaspack/feature-flags": "2.14.1-canary.209+2f29297bd",
27
+ "@atlaspack/fs": "2.14.5-canary.141+2f29297bd",
28
+ "@atlaspack/logger": "2.14.5-canary.141+2f29297bd",
29
+ "@atlaspack/rust": "3.2.1-canary.141+2f29297bd",
30
+ "@atlaspack/utils": "2.14.5-canary.141+2f29297bd",
32
31
  "ncp": "^2.0.0"
33
32
  },
34
33
  "devDependencies": {
@@ -37,5 +36,5 @@
37
36
  "browser": {
38
37
  "./src/IDBCache.js": "./src/IDBCache.browser.js"
39
38
  },
40
- "gitHead": "267b0d59131c00e4cd1526bbf87aa84b9c8fd411"
41
- }
39
+ "gitHead": "2f29297bd550840c9840871559349ef773bcbf6d"
40
+ }
@@ -1,5 +1,3 @@
1
- // @flow strict-local
2
-
3
1
  import type {Readable, Writable} from 'stream';
4
2
  import type {FilePath} from '@atlaspack/types';
5
3
  import type {FileSystem} from '@atlaspack/fs';
@@ -9,6 +7,8 @@ import stream from 'stream';
9
7
  import path from 'path';
10
8
  import {promisify} from 'util';
11
9
 
10
+ import {hashString} from '@atlaspack/rust';
11
+ import {getFeatureFlag} from '@atlaspack/feature-flags';
12
12
  import logger from '@atlaspack/logger';
13
13
  import {
14
14
  deserialize,
@@ -21,7 +21,7 @@ import packageJson from '../package.json';
21
21
 
22
22
  import {WRITE_LIMIT_CHUNK} from './constants';
23
23
 
24
- const pipeline: (Readable, Writable) => Promise<void> = promisify(
24
+ const pipeline: (arg1: Readable, arg2: Writable) => Promise<void> = promisify(
25
25
  stream.pipeline,
26
26
  );
27
27
 
@@ -40,9 +40,10 @@ export class FSCache implements Cache {
40
40
 
41
41
  // In parallel, create sub-directories for every possible hex value
42
42
  // This speeds up large caches on many file systems since there are fewer files in a single directory.
43
- let dirPromises = [];
43
+ let dirPromises: Array<Promise<undefined>> = [];
44
44
  for (let i = 0; i < 256; i++) {
45
45
  dirPromises.push(
46
+ // @ts-expect-error TS2345
46
47
  this.fs.mkdirp(path.join(this.dir, ('00' + i.toString(16)).slice(-2))),
47
48
  );
48
49
  }
@@ -51,6 +52,10 @@ export class FSCache implements Cache {
51
52
  }
52
53
 
53
54
  _getCachePath(cacheId: string): FilePath {
55
+ if (getFeatureFlag('cachePerformanceImprovements')) {
56
+ const cleanId = hashString(cacheId);
57
+ return path.join(this.dir, cleanId.slice(0, 2), cleanId.slice(2));
58
+ }
54
59
  return path.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
55
60
  }
56
61
 
@@ -77,10 +82,10 @@ export class FSCache implements Cache {
77
82
  await this.fs.writeFile(this._getCachePath(key), contents);
78
83
  }
79
84
 
80
- async getBuffer(key: string): Promise<?Buffer> {
85
+ async getBuffer(key: string): Promise<Buffer | null | undefined> {
81
86
  try {
82
87
  return await this.fs.readFile(this._getCachePath(key));
83
- } catch (err) {
88
+ } catch (err: any) {
84
89
  if (err.code === 'ENOENT') {
85
90
  return null;
86
91
  } else {
@@ -90,14 +95,17 @@ export class FSCache implements Cache {
90
95
  }
91
96
 
92
97
  #getFilePath(key: string, index: number): string {
98
+ if (getFeatureFlag('cachePerformanceImprovements')) {
99
+ return path.join(this.dir, `${hashString(key)}-${index}`);
100
+ }
93
101
  return path.join(this.dir, `${key}-${index}`);
94
102
  }
95
103
 
96
- async #unlinkChunks(key: string, index: number): Promise<void> {
104
+ async #unlinkChunks(key: string, index: number): Promise<undefined> {
97
105
  try {
98
106
  await this.fs.unlink(this.#getFilePath(key, index));
99
107
  await this.#unlinkChunks(key, index + 1);
100
- } catch (err) {
108
+ } catch (err: any) {
101
109
  // If there's an error, no more chunks are left to delete
102
110
  }
103
111
  }
@@ -120,21 +128,26 @@ export class FSCache implements Cache {
120
128
  async setLargeBlob(
121
129
  key: string,
122
130
  contents: Buffer | string,
123
- options?: {|signal?: AbortSignal|},
131
+ options?: {
132
+ signal?: AbortSignal;
133
+ },
124
134
  ): Promise<void> {
125
135
  const chunks = Math.ceil(contents.length / WRITE_LIMIT_CHUNK);
126
136
 
127
- const writePromises: Promise<void>[] = [];
137
+ const writePromises: Promise<undefined>[] = [];
128
138
  if (chunks === 1) {
129
139
  // If there's one chunk, don't slice the content
130
140
  writePromises.push(
141
+ // @ts-expect-error TS2345
131
142
  this.fs.writeFile(this.#getFilePath(key, 0), contents, {
143
+ // @ts-expect-error TS2353
132
144
  signal: options?.signal,
133
145
  }),
134
146
  );
135
147
  } else {
136
148
  for (let i = 0; i < chunks; i += 1) {
137
149
  writePromises.push(
150
+ // @ts-expect-error TS2345
138
151
  this.fs.writeFile(
139
152
  this.#getFilePath(key, i),
140
153
  typeof contents === 'string'
@@ -146,6 +159,7 @@ export class FSCache implements Cache {
146
159
  i * WRITE_LIMIT_CHUNK,
147
160
  (i + 1) * WRITE_LIMIT_CHUNK,
148
161
  ),
162
+ // @ts-expect-error TS2353
149
163
  {signal: options?.signal},
150
164
  ),
151
165
  );
@@ -159,12 +173,13 @@ export class FSCache implements Cache {
159
173
  }
160
174
 
161
175
  async deleteLargeBlob(key: string): Promise<void> {
162
- const deletePromises: Promise<void>[] = [];
176
+ const deletePromises: Promise<undefined>[] = [];
163
177
 
164
178
  let i = 0;
165
179
  let filePath = this.#getFilePath(key, i);
166
180
 
167
181
  while (await this.fs.exists(filePath)) {
182
+ // @ts-expect-error TS2345
168
183
  deletePromises.push(this.fs.rimraf(filePath));
169
184
  i += 1;
170
185
  filePath = this.#getFilePath(key, i);
@@ -173,11 +188,11 @@ export class FSCache implements Cache {
173
188
  await Promise.all(deletePromises);
174
189
  }
175
190
 
176
- async get<T>(key: string): Promise<?T> {
191
+ async get<T>(key: string): Promise<T | null | undefined> {
177
192
  try {
178
193
  let data = await this.fs.readFile(this._getCachePath(key));
179
194
  return deserialize(data);
180
- } catch (err) {
195
+ } catch (err: any) {
181
196
  if (err.code === 'ENOENT') {
182
197
  return null;
183
198
  } else {
@@ -186,13 +201,13 @@ export class FSCache implements Cache {
186
201
  }
187
202
  }
188
203
 
189
- async set(key: string, value: mixed): Promise<void> {
204
+ async set(key: string, value: unknown): Promise<void> {
190
205
  try {
191
206
  let blobPath = this._getCachePath(key);
192
207
  let data = serialize(value);
193
208
 
194
209
  await this.fs.writeFile(blobPath, data);
195
- } catch (err) {
210
+ } catch (err: any) {
196
211
  logger.error(err, '@atlaspack/cache');
197
212
  }
198
213
  }
@@ -1,4 +1,3 @@
1
- // @flow strict-local
2
1
  import type {Cache} from './types';
3
2
 
4
3
  import {Readable} from 'stream';
@@ -9,21 +8,18 @@ import {
9
8
  serialize,
10
9
  } from '@atlaspack/build-cache';
11
10
  import {bufferStream} from '@atlaspack/utils';
12
- // $FlowFixMe[untyped-import]
13
11
  import {openDB} from 'idb';
14
12
 
15
- // $FlowFixMe[untyped-import]
16
13
  import packageJson from '../package.json';
17
14
 
18
15
  const STORE_NAME = 'cache';
19
16
 
20
17
  export class IDBCache implements Cache {
21
- // $FlowFixMe
22
18
  store: any;
23
19
 
24
20
  constructor() {
25
21
  this.store = openDB('REPL-parcel-cache', 1, {
26
- upgrade(db) {
22
+ upgrade(db: any) {
27
23
  db.createObjectStore(STORE_NAME);
28
24
  },
29
25
  blocked() {},
@@ -36,7 +32,7 @@ export class IDBCache implements Cache {
36
32
  return Promise.resolve();
37
33
  }
38
34
 
39
- serialize(): {||} {
35
+ serialize(): Record<any, any> {
40
36
  return {
41
37
  /*::...null*/
42
38
  };
@@ -50,7 +46,7 @@ export class IDBCache implements Cache {
50
46
  return Promise.resolve(this.store.get(key) != null);
51
47
  }
52
48
 
53
- async get<T>(key: string): Promise<?T> {
49
+ async get<T>(key: string): Promise<T | null | undefined> {
54
50
  let data = await (await this.store).get(STORE_NAME, key);
55
51
  if (data == null) {
56
52
  return null;
@@ -59,17 +55,19 @@ export class IDBCache implements Cache {
59
55
  return Promise.resolve(deserialize(data));
60
56
  }
61
57
 
62
- async set(key: string, value: mixed): Promise<void> {
58
+ async set(key: string, value: unknown): Promise<void> {
63
59
  await (await this.store).put(STORE_NAME, serialize(value), key);
64
60
  }
65
61
 
66
62
  getStream(key: string): Readable {
67
63
  let dataPromise = this.store
64
+ // @ts-expect-error TS7006
68
65
  .then((s) => s.get(STORE_NAME, key))
66
+ // @ts-expect-error TS7006
69
67
  .then((d) => Buffer.from(d))
68
+ // @ts-expect-error TS7006
70
69
  .catch((e) => e);
71
70
  const stream = new Readable({
72
- // $FlowFixMe(incompatible-call)
73
71
  async read() {
74
72
  let data = await dataPromise;
75
73
  if (data instanceof Error) {
@@ -118,7 +116,7 @@ export class IDBCache implements Cache {
118
116
  // ]);
119
117
  // }
120
118
 
121
- async getBuffer(key: string): Promise<?Buffer> {
119
+ async getBuffer(key: string): Promise<Buffer | null | undefined> {
122
120
  let data = await (await this.store).get(STORE_NAME, key);
123
121
  if (data == null) {
124
122
  return null;
@@ -1,7 +1,6 @@
1
- // @flow strict-local
2
1
  import type {Cache} from './types';
3
2
 
4
- // $FlowFixMe
3
+ // @ts-expect-error TS2420
5
4
  export class IDBCache implements Cache {
6
5
  constructor() {
7
6
  throw new Error('IDBCache is only supported in the browser');
@@ -1,5 +1,3 @@
1
- // @flow strict-local
2
-
3
1
  import {
4
2
  deserialize,
5
3
  registerSerializableClass,
@@ -10,15 +8,15 @@ import {Lmdb} from '@atlaspack/rust';
10
8
  import type {FilePath} from '@atlaspack/types';
11
9
  import type {Cache} from './types';
12
10
  import type {Readable, Writable} from 'stream';
13
- import fs from 'fs';
11
+ // @ts-expect-error TS7016
14
12
  import ncp from 'ncp';
15
13
  import {promisify} from 'util';
16
14
  import stream from 'stream';
17
15
  import path from 'path';
18
16
  import {NodeFS} from '@atlaspack/fs';
19
- // $FlowFixMe
20
17
  import packageJson from '../package.json';
21
18
  import {FSCache} from './FSCache';
19
+ import {instrumentAsync} from '@atlaspack/logger';
22
20
 
23
21
  const ncpAsync = promisify(ncp);
24
22
 
@@ -35,11 +33,6 @@ export class LmdbWrapper {
35
33
 
36
34
  constructor(lmdb: Lmdb) {
37
35
  this.lmdb = lmdb;
38
-
39
- // $FlowFixMe
40
- this[Symbol.dispose] = () => {
41
- this.lmdb.close();
42
- };
43
36
  }
44
37
 
45
38
  has(key: string): boolean {
@@ -94,23 +87,28 @@ export function open(
94
87
  );
95
88
  }
96
89
 
97
- const pipeline: (Readable, Writable) => Promise<void> = promisify(
90
+ const pipeline: (arg1: Readable, arg2: Writable) => Promise<void> = promisify(
98
91
  stream.pipeline,
99
92
  );
100
93
 
101
- export type SerLMDBLiteCache = {|
102
- dir: FilePath,
103
- |};
94
+ export type SerLMDBLiteCache = {
95
+ dir: FilePath;
96
+ };
104
97
 
105
98
  export class LMDBLiteCache implements Cache {
106
99
  fs: NodeFS;
107
100
  dir: FilePath;
108
101
  store: LmdbWrapper;
109
102
  fsCache: FSCache;
103
+ /**
104
+ * Directory where we store raw files.
105
+ */
106
+ cacheFilesDirectory: FilePath;
110
107
 
111
108
  constructor(cacheDir: FilePath) {
112
109
  this.fs = new NodeFS();
113
110
  this.dir = cacheDir;
111
+ this.cacheFilesDirectory = path.join(cacheDir, 'files');
114
112
  this.fsCache = new FSCache(this.fs, cacheDir);
115
113
 
116
114
  this.store = open(cacheDir, {
@@ -131,6 +129,7 @@ export class LMDBLiteCache implements Cache {
131
129
  if (!getFeatureFlag('cachePerformanceImprovements')) {
132
130
  await this.fsCache.ensure();
133
131
  }
132
+ await this.fs.mkdirp(this.cacheFilesDirectory);
134
133
  return Promise.resolve();
135
134
  }
136
135
 
@@ -148,7 +147,7 @@ export class LMDBLiteCache implements Cache {
148
147
  return Promise.resolve(this.store.has(key));
149
148
  }
150
149
 
151
- get<T>(key: string): Promise<?T> {
150
+ get<T>(key: string): Promise<T | null | undefined> {
152
151
  let data = this.store.get(key);
153
152
  if (data == null) {
154
153
  return Promise.resolve(null);
@@ -157,19 +156,29 @@ export class LMDBLiteCache implements Cache {
157
156
  return Promise.resolve(deserialize(data));
158
157
  }
159
158
 
160
- async set(key: string, value: mixed): Promise<void> {
159
+ async set(key: string, value: unknown): Promise<void> {
161
160
  await this.setBlob(key, serialize(value));
162
161
  }
163
162
 
164
163
  getStream(key: string): Readable {
165
- return this.fs.createReadStream(path.join(this.dir, key));
164
+ if (!getFeatureFlag('cachePerformanceImprovements')) {
165
+ return this.fs.createReadStream(path.join(this.dir, key));
166
+ }
167
+
168
+ return this.fs.createReadStream(this.getFileKey(key));
166
169
  }
167
170
 
168
- setStream(key: string, stream: Readable): Promise<void> {
169
- return pipeline(
170
- stream,
171
- this.fs.createWriteStream(path.join(this.dir, key)),
172
- );
171
+ async setStream(key: string, stream: Readable): Promise<void> {
172
+ if (!getFeatureFlag('cachePerformanceImprovements')) {
173
+ return pipeline(
174
+ stream,
175
+ this.fs.createWriteStream(path.join(this.dir, key)),
176
+ );
177
+ }
178
+
179
+ const filePath = this.getFileKey(key);
180
+ await this.fs.mkdirp(path.dirname(filePath));
181
+ return pipeline(stream, this.fs.createWriteStream(filePath));
173
182
  }
174
183
 
175
184
  // eslint-disable-next-line require-await
@@ -189,43 +198,39 @@ export class LMDBLiteCache implements Cache {
189
198
  await this.store.put(key, contents);
190
199
  }
191
200
 
192
- getBuffer(key: string): Promise<?Buffer> {
201
+ getBuffer(key: string): Promise<Buffer | null | undefined> {
193
202
  return Promise.resolve(this.store.get(key));
194
203
  }
195
204
 
196
- #getFilePath(key: string, index: number): string {
197
- return path.join(this.dir, `${key}-${index}`);
198
- }
199
-
200
205
  hasLargeBlob(key: string): Promise<boolean> {
201
206
  if (!getFeatureFlag('cachePerformanceImprovements')) {
202
207
  return this.fsCache.hasLargeBlob(key);
203
208
  }
204
- return this.has(key);
209
+
210
+ return this.fs.exists(this.getFileKey(key));
205
211
  }
206
212
 
207
- /**
208
- * @deprecated Use getBlob instead.
209
- */
210
213
  getLargeBlob(key: string): Promise<Buffer> {
211
214
  if (!getFeatureFlag('cachePerformanceImprovements')) {
212
215
  return this.fsCache.getLargeBlob(key);
213
216
  }
214
- return Promise.resolve(this.getBlobSync(key));
217
+ return this.fs.readFile(this.getFileKey(key));
215
218
  }
216
219
 
217
- /**
218
- * @deprecated Use setBlob instead.
219
- */
220
- setLargeBlob(
220
+ async setLargeBlob(
221
221
  key: string,
222
222
  contents: Buffer | string,
223
- options?: {|signal?: AbortSignal|},
223
+ options?: {
224
+ signal?: AbortSignal;
225
+ },
224
226
  ): Promise<void> {
225
227
  if (!getFeatureFlag('cachePerformanceImprovements')) {
226
228
  return this.fsCache.setLargeBlob(key, contents, options);
227
229
  }
228
- return this.setBlob(key, contents);
230
+
231
+ const targetPath = this.getFileKey(key);
232
+ await this.fs.mkdirp(path.dirname(targetPath));
233
+ return this.fs.writeFile(targetPath, contents);
229
234
  }
230
235
 
231
236
  /**
@@ -244,9 +249,9 @@ export class LMDBLiteCache implements Cache {
244
249
  }
245
250
 
246
251
  async compact(targetPath: string): Promise<void> {
247
- await fs.promises.mkdir(targetPath, {recursive: true});
252
+ await this.fs.mkdirp(targetPath);
248
253
 
249
- const files = await fs.promises.readdir(this.dir);
254
+ const files = await this.fs.readdir(this.dir);
250
255
  // copy all files except data.mdb and lock.mdb to the target path (recursive)
251
256
  for (const file of files) {
252
257
  const filePath = path.join(this.dir, file);
@@ -262,6 +267,42 @@ export class LMDBLiteCache implements Cache {
262
267
  }
263
268
 
264
269
  refresh(): void {}
270
+
271
+ /**
272
+ * Streams, packages are stored in files instead of LMDB.
273
+ *
274
+ * On this case, if a cache key happens to have a parent traversal, ../..
275
+ * it is treated specially
276
+ *
277
+ * That is, something/../something and something are meant to be different
278
+ * keys.
279
+ *
280
+ * Plus we do not want to store values outside of the cache directory.
281
+ */
282
+ getFileKey(key: string): string {
283
+ const cleanKey = key
284
+ .split('/')
285
+ .map((part) => {
286
+ if (part === '..') {
287
+ return '$$__parent_dir$$';
288
+ }
289
+ return part;
290
+ })
291
+ .join('/');
292
+ return path.join(this.cacheFilesDirectory, cleanKey);
293
+ }
294
+
295
+ async clear(): Promise<void> {
296
+ await instrumentAsync('LMDBLiteCache::clear', async () => {
297
+ const keys = await this.keys();
298
+ for (const key of keys) {
299
+ await this.store.delete(key);
300
+ }
301
+
302
+ await this.fs.rimraf(this.cacheFilesDirectory);
303
+ await this.fs.mkdirp(this.cacheFilesDirectory);
304
+ });
305
+ }
265
306
  }
266
307
 
267
308
  registerSerializableClass(
@@ -1,4 +1,2 @@
1
- // @flow strict-local
2
-
3
1
  // Node has a file size limit of 2 GB
4
2
  export const WRITE_LIMIT_CHUNK = 2 * 1024 ** 3;
@@ -1,5 +1,3 @@
1
- // @flow
2
-
3
1
  export * from './FSCache';
4
2
  export * from './IDBCache';
5
3
  export * from './LMDBLiteCache';
@@ -1,4 +1,3 @@
1
- // @flow
2
1
  import type {Cache} from '@atlaspack/types';
3
2
 
4
3
  export type {Cache};