boltdocs 1.3.0 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{cache-EHR7SXRU.mjs → cache-GQHF6BXI.mjs} +1 -1
- package/dist/{chunk-GSYECEZY.mjs → chunk-CYBWLFOG.mjs} +5 -1
- package/dist/node/index.js +36 -20
- package/dist/node/index.mjs +34 -22
- package/package.json +1 -1
- package/src/client/app/index.tsx +344 -344
- package/src/client/app/preload.tsx +56 -56
- package/src/client/index.ts +40 -40
- package/src/client/ssr.tsx +51 -51
- package/src/client/theme/components/CodeBlock/CodeBlock.tsx +76 -76
- package/src/client/theme/components/CodeBlock/index.ts +1 -1
- package/src/client/theme/components/PackageManagerTabs/PackageManagerTabs.tsx +154 -154
- package/src/client/theme/components/PackageManagerTabs/index.ts +1 -1
- package/src/client/theme/components/PackageManagerTabs/pkg-tabs.css +64 -64
- package/src/client/theme/components/Playground/Playground.tsx +124 -124
- package/src/client/theme/components/Playground/index.ts +1 -1
- package/src/client/theme/components/Playground/playground.css +168 -168
- package/src/client/theme/components/Video/Video.tsx +84 -84
- package/src/client/theme/components/Video/index.ts +1 -1
- package/src/client/theme/components/Video/video.css +41 -41
- package/src/client/theme/components/mdx/Admonition.tsx +80 -80
- package/src/client/theme/components/mdx/Badge.tsx +31 -31
- package/src/client/theme/components/mdx/Button.tsx +50 -50
- package/src/client/theme/components/mdx/Card.tsx +80 -80
- package/src/client/theme/components/mdx/List.tsx +57 -57
- package/src/client/theme/components/mdx/Tabs.tsx +94 -94
- package/src/client/theme/components/mdx/index.ts +18 -18
- package/src/client/theme/components/mdx/mdx-components.css +424 -424
- package/src/client/theme/icons/bun.tsx +62 -62
- package/src/client/theme/icons/deno.tsx +20 -20
- package/src/client/theme/icons/discord.tsx +12 -12
- package/src/client/theme/icons/github.tsx +15 -15
- package/src/client/theme/icons/npm.tsx +13 -13
- package/src/client/theme/icons/pnpm.tsx +72 -72
- package/src/client/theme/icons/twitter.tsx +12 -12
- package/src/client/theme/styles/markdown.css +343 -343
- package/src/client/theme/styles/variables.css +162 -162
- package/src/client/theme/styles.css +37 -37
- package/src/client/theme/ui/BackgroundGradient/BackgroundGradient.tsx +10 -10
- package/src/client/theme/ui/BackgroundGradient/index.ts +1 -1
- package/src/client/theme/ui/Breadcrumbs/Breadcrumbs.tsx +68 -68
- package/src/client/theme/ui/Breadcrumbs/index.ts +1 -1
- package/src/client/theme/ui/Footer/footer.css +32 -32
- package/src/client/theme/ui/Head/Head.tsx +69 -69
- package/src/client/theme/ui/Head/index.ts +1 -1
- package/src/client/theme/ui/LanguageSwitcher/LanguageSwitcher.tsx +125 -125
- package/src/client/theme/ui/LanguageSwitcher/index.ts +1 -1
- package/src/client/theme/ui/LanguageSwitcher/language-switcher.css +98 -98
- package/src/client/theme/ui/Layout/Layout.tsx +202 -202
- package/src/client/theme/ui/Layout/base.css +76 -76
- package/src/client/theme/ui/Layout/index.ts +2 -2
- package/src/client/theme/ui/Layout/pagination.css +72 -72
- package/src/client/theme/ui/Layout/responsive.css +36 -36
- package/src/client/theme/ui/Link/Link.tsx +254 -254
- package/src/client/theme/ui/Link/index.ts +2 -2
- package/src/client/theme/ui/Loading/Loading.tsx +10 -10
- package/src/client/theme/ui/Loading/index.ts +1 -1
- package/src/client/theme/ui/Loading/loading.css +30 -30
- package/src/client/theme/ui/Navbar/GithubStars.tsx +27 -27
- package/src/client/theme/ui/Navbar/Navbar.tsx +145 -145
- package/src/client/theme/ui/Navbar/index.ts +2 -2
- package/src/client/theme/ui/Navbar/navbar.css +233 -233
- package/src/client/theme/ui/NotFound/NotFound.tsx +19 -19
- package/src/client/theme/ui/NotFound/index.ts +1 -1
- package/src/client/theme/ui/NotFound/not-found.css +64 -64
- package/src/client/theme/ui/OnThisPage/OnThisPage.tsx +235 -235
- package/src/client/theme/ui/OnThisPage/index.ts +1 -1
- package/src/client/theme/ui/OnThisPage/toc.css +132 -132
- package/src/client/theme/ui/PoweredBy/PoweredBy.tsx +18 -18
- package/src/client/theme/ui/PoweredBy/index.ts +1 -1
- package/src/client/theme/ui/PoweredBy/powered-by.css +76 -76
- package/src/client/theme/ui/SearchDialog/SearchDialog.tsx +199 -199
- package/src/client/theme/ui/SearchDialog/index.ts +1 -1
- package/src/client/theme/ui/SearchDialog/search.css +152 -152
- package/src/client/theme/ui/Sidebar/Sidebar.tsx +204 -204
- package/src/client/theme/ui/Sidebar/index.ts +1 -1
- package/src/client/theme/ui/Sidebar/sidebar.css +236 -236
- package/src/client/theme/ui/ThemeToggle/ThemeToggle.tsx +69 -69
- package/src/client/theme/ui/ThemeToggle/index.ts +1 -1
- package/src/client/theme/ui/VersionSwitcher/VersionSwitcher.tsx +136 -136
- package/src/client/theme/ui/VersionSwitcher/index.ts +1 -1
- package/src/client/types.ts +50 -50
- package/src/client/utils.ts +26 -26
- package/src/node/cache.ts +408 -408
- package/src/node/config.ts +192 -192
- package/src/node/index.ts +21 -21
- package/src/node/mdx.ts +120 -120
- package/src/node/plugin/entry.ts +58 -58
- package/src/node/plugin/html.ts +55 -55
- package/src/node/plugin/index.ts +193 -193
- package/src/node/plugin/types.ts +11 -11
- package/src/node/routes/cache.ts +28 -28
- package/src/node/routes/index.ts +167 -167
- package/src/node/routes/parser.ts +153 -127
- package/src/node/routes/sorter.ts +42 -42
- package/src/node/routes/types.ts +49 -49
- package/src/node/ssg/index.ts +114 -114
- package/src/node/ssg/meta.ts +33 -34
- package/src/node/ssg/options.ts +13 -13
- package/src/node/ssg/sitemap.ts +55 -54
- package/src/node/utils.ts +145 -134
- package/tsconfig.json +20 -20
- package/tsup.config.ts +22 -22
package/src/node/cache.ts
CHANGED
|
@@ -1,408 +1,408 @@
|
|
|
1
|
-
import fs from "fs";
|
|
2
|
-
import path from "path";
|
|
3
|
-
import crypto from "crypto";
|
|
4
|
-
import zlib from "zlib";
|
|
5
|
-
import { promisify } from "util";
|
|
6
|
-
import { getFileMtime } from "./utils";
|
|
7
|
-
|
|
8
|
-
const writeFile = promisify(fs.writeFile);
|
|
9
|
-
const readFile = promisify(fs.readFile);
|
|
10
|
-
const mkdir = promisify(fs.mkdir);
|
|
11
|
-
const rename = promisify(fs.rename);
|
|
12
|
-
const unlink = promisify(fs.unlink);
|
|
13
|
-
|
|
14
|
-
/**
|
|
15
|
-
* Configuration constants for the caching system.
|
|
16
|
-
*/
|
|
17
|
-
const CACHE_DIR = process.env.BOLTDOCS_CACHE_DIR || ".boltdocs";
|
|
18
|
-
const ASSETS_DIR = "assets";
|
|
19
|
-
const SHARDS_DIR = "shards";
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* Default limits for the caching system.
|
|
23
|
-
*/
|
|
24
|
-
const DEFAULT_LRU_LIMIT = parseInt(
|
|
25
|
-
process.env.BOLTDOCS_CACHE_LRU_LIMIT || "2000",
|
|
26
|
-
10,
|
|
27
|
-
);
|
|
28
|
-
const DEFAULT_COMPRESS = process.env.BOLTDOCS_CACHE_COMPRESS !== "0";
|
|
29
|
-
|
|
30
|
-
/**
|
|
31
|
-
* Simple LRU cache implementation to prevent memory leaks.
|
|
32
|
-
*/
|
|
33
|
-
class LRUCache<K, V> {
|
|
34
|
-
private cache = new Map<K, V>();
|
|
35
|
-
constructor(private limit: number) {}
|
|
36
|
-
|
|
37
|
-
get(key: K): V | undefined {
|
|
38
|
-
const val = this.cache.get(key);
|
|
39
|
-
if (val !== undefined) {
|
|
40
|
-
this.cache.delete(key);
|
|
41
|
-
this.cache.set(key, val);
|
|
42
|
-
}
|
|
43
|
-
return val;
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
set(key: K, value: V): void {
|
|
47
|
-
if (this.cache.has(key)) {
|
|
48
|
-
this.cache.delete(key);
|
|
49
|
-
} else if (this.cache.size >= this.limit) {
|
|
50
|
-
const firstKey = this.cache.keys().next().value;
|
|
51
|
-
if (firstKey !== undefined) {
|
|
52
|
-
this.cache.delete(firstKey);
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
this.cache.set(key, value);
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
get size() {
|
|
59
|
-
return this.cache.size;
|
|
60
|
-
}
|
|
61
|
-
clear() {
|
|
62
|
-
this.cache.clear();
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Simple background task queue to prevent blocking the main thread during IO.
|
|
68
|
-
*/
|
|
69
|
-
class BackgroundQueue {
|
|
70
|
-
private queue: Promise<any> = Promise.resolve();
|
|
71
|
-
private pendingCount = 0;
|
|
72
|
-
|
|
73
|
-
add(task: () => Promise<any>) {
|
|
74
|
-
this.pendingCount++;
|
|
75
|
-
this.queue = this.queue.then(task).finally(() => {
|
|
76
|
-
this.pendingCount--;
|
|
77
|
-
});
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
async flush() {
|
|
81
|
-
await this.queue;
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
get pending() {
|
|
85
|
-
return this.pendingCount;
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
const backgroundQueue = new BackgroundQueue();
|
|
90
|
-
|
|
91
|
-
/**
|
|
92
|
-
* Generic file-based cache with per-file granularity and asynchronous persistence.
|
|
93
|
-
*/
|
|
94
|
-
export class FileCache<T> {
|
|
95
|
-
private entries = new Map<string, { data: T; mtime: number }>();
|
|
96
|
-
private readonly cachePath: string | null = null;
|
|
97
|
-
private readonly compress: boolean;
|
|
98
|
-
|
|
99
|
-
constructor(
|
|
100
|
-
options: { name?: string; root?: string; compress?: boolean } = {},
|
|
101
|
-
) {
|
|
102
|
-
this.compress =
|
|
103
|
-
options.compress !== undefined ? options.compress : DEFAULT_COMPRESS;
|
|
104
|
-
if (options.name) {
|
|
105
|
-
const root = options.root || process.cwd();
|
|
106
|
-
const ext = this.compress ? "json.gz" : "json";
|
|
107
|
-
this.cachePath = path.resolve(root, CACHE_DIR, `${options.name}.${ext}`);
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
/**
|
|
112
|
-
* Loads the cache. Synchronous for startup simplicity but uses fast I/O.
|
|
113
|
-
*/
|
|
114
|
-
load(): void {
|
|
115
|
-
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
116
|
-
if (!this.cachePath || !fs.existsSync(this.cachePath)) return;
|
|
117
|
-
|
|
118
|
-
try {
|
|
119
|
-
let raw = fs.readFileSync(this.cachePath);
|
|
120
|
-
if (this.cachePath.endsWith(".gz")) {
|
|
121
|
-
raw = zlib.gunzipSync(raw);
|
|
122
|
-
}
|
|
123
|
-
const data = JSON.parse(raw.toString("utf-8"));
|
|
124
|
-
this.entries = new Map(Object.entries(data));
|
|
125
|
-
} catch (e) {
|
|
126
|
-
// Fallback: ignore cache errors
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
/**
|
|
131
|
-
* Saves the cache in the background.
|
|
132
|
-
*/
|
|
133
|
-
save(): void {
|
|
134
|
-
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
135
|
-
if (!this.cachePath) return;
|
|
136
|
-
|
|
137
|
-
const data = Object.fromEntries(this.entries);
|
|
138
|
-
const content = JSON.stringify(data);
|
|
139
|
-
const target = this.cachePath;
|
|
140
|
-
const useCompress = this.compress;
|
|
141
|
-
|
|
142
|
-
backgroundQueue.add(async () => {
|
|
143
|
-
try {
|
|
144
|
-
await mkdir(path.dirname(target), { recursive: true });
|
|
145
|
-
let buffer = Buffer.from(content);
|
|
146
|
-
if (useCompress) {
|
|
147
|
-
buffer = zlib.gzipSync(buffer);
|
|
148
|
-
}
|
|
149
|
-
const tempPath = `${target}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
150
|
-
await writeFile(tempPath, buffer);
|
|
151
|
-
await rename(tempPath, target);
|
|
152
|
-
} catch (e) {
|
|
153
|
-
// Fallback: critical error logging skipped for performance
|
|
154
|
-
}
|
|
155
|
-
});
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
get(filePath: string): T | null {
|
|
159
|
-
const entry = this.entries.get(filePath);
|
|
160
|
-
if (!entry) return null;
|
|
161
|
-
if (getFileMtime(filePath) !== entry.mtime) return null;
|
|
162
|
-
return entry.data;
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
set(filePath: string, data: T): void {
|
|
166
|
-
this.entries.set(filePath, {
|
|
167
|
-
data,
|
|
168
|
-
mtime: getFileMtime(filePath),
|
|
169
|
-
});
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
isValid(filePath: string): boolean {
|
|
173
|
-
const entry = this.entries.get(filePath);
|
|
174
|
-
if (!entry) return false;
|
|
175
|
-
return getFileMtime(filePath) === entry.mtime;
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
invalidate(filePath: string): void {
|
|
179
|
-
this.entries.delete(filePath);
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
invalidateAll(): void {
|
|
183
|
-
this.entries.clear();
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
pruneStale(currentFiles: Set<string>): void {
|
|
187
|
-
for (const key of this.entries.keys()) {
|
|
188
|
-
if (!currentFiles.has(key)) {
|
|
189
|
-
this.entries.delete(key);
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
get size(): number {
|
|
195
|
-
return this.entries.size;
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
async flush() {
|
|
199
|
-
await backgroundQueue.flush();
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
/**
|
|
204
|
-
* Sharded Cache: Optimized for large-scale data (like MDX transformations).
|
|
205
|
-
* Uses a memory index and individual files for each entry to avoid massive JSON parsing.
|
|
206
|
-
*/
|
|
207
|
-
export class TransformCache {
|
|
208
|
-
private index = new Map<string, string>(); // key -> hash
|
|
209
|
-
private memoryCache = new LRUCache<string, string>(DEFAULT_LRU_LIMIT);
|
|
210
|
-
private readonly baseDir: string;
|
|
211
|
-
private readonly shardsDir: string;
|
|
212
|
-
private readonly indexPath: string;
|
|
213
|
-
|
|
214
|
-
constructor(name: string, root: string = process.cwd()) {
|
|
215
|
-
this.baseDir = path.resolve(root, CACHE_DIR, `transform-${name}`);
|
|
216
|
-
this.shardsDir = path.resolve(this.baseDir, SHARDS_DIR);
|
|
217
|
-
this.indexPath = path.resolve(this.baseDir, "index.json");
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
/**
|
|
221
|
-
* Loads the index into memory.
|
|
222
|
-
*/
|
|
223
|
-
load(): void {
|
|
224
|
-
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
225
|
-
if (!fs.existsSync(this.indexPath)) return;
|
|
226
|
-
|
|
227
|
-
try {
|
|
228
|
-
const data = fs.readFileSync(this.indexPath, "utf-8");
|
|
229
|
-
this.index = new Map(Object.entries(JSON.parse(data)));
|
|
230
|
-
} catch (e) {
|
|
231
|
-
// Index might be corrupt, ignore
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
/**
|
|
236
|
-
* Persists the index in background.
|
|
237
|
-
*/
|
|
238
|
-
save(): void {
|
|
239
|
-
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
240
|
-
const data = JSON.stringify(Object.fromEntries(this.index));
|
|
241
|
-
const target = this.indexPath;
|
|
242
|
-
|
|
243
|
-
backgroundQueue.add(async () => {
|
|
244
|
-
await mkdir(path.dirname(target), { recursive: true });
|
|
245
|
-
await writeFile(target, data);
|
|
246
|
-
});
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
/**
|
|
250
|
-
* Batch Read: Retrieves multiple transformation results concurrently.
|
|
251
|
-
*/
|
|
252
|
-
async getMany(keys: string[]): Promise<Map<string, string>> {
|
|
253
|
-
const results = new Map<string, string>();
|
|
254
|
-
const toLoad: string[] = [];
|
|
255
|
-
|
|
256
|
-
for (const key of keys) {
|
|
257
|
-
const mem = this.memoryCache.get(key);
|
|
258
|
-
if (mem) results.set(key, mem);
|
|
259
|
-
else if (this.index.has(key)) toLoad.push(key);
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
if (toLoad.length > 0) {
|
|
263
|
-
const shards = await Promise.all(
|
|
264
|
-
toLoad.map(async (key) => {
|
|
265
|
-
const hash = this.index.get(key)!;
|
|
266
|
-
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
267
|
-
try {
|
|
268
|
-
const compressed = await readFile(shardPath);
|
|
269
|
-
const decompressed = zlib.gunzipSync(compressed).toString("utf-8");
|
|
270
|
-
this.memoryCache.set(key, decompressed);
|
|
271
|
-
return { key, val: decompressed };
|
|
272
|
-
} catch (e) {
|
|
273
|
-
return null;
|
|
274
|
-
}
|
|
275
|
-
}),
|
|
276
|
-
);
|
|
277
|
-
|
|
278
|
-
for (const s of shards) {
|
|
279
|
-
if (s) results.set(s.key, s.val);
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
return results;
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
/**
|
|
287
|
-
* Retrieves a cached transformation. Fast lookup via index, lazy loading from disk.
|
|
288
|
-
*/
|
|
289
|
-
get(key: string): string | null {
|
|
290
|
-
// 1. Check memory first (LRU)
|
|
291
|
-
const mem = this.memoryCache.get(key);
|
|
292
|
-
if (mem) return mem;
|
|
293
|
-
|
|
294
|
-
// 2. Check index
|
|
295
|
-
const hash = this.index.get(key);
|
|
296
|
-
if (!hash) return null;
|
|
297
|
-
|
|
298
|
-
// 3. Load from shard (synchronous read for Vite's transform hook compatibility)
|
|
299
|
-
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
300
|
-
if (!fs.existsSync(shardPath)) return null;
|
|
301
|
-
|
|
302
|
-
try {
|
|
303
|
-
const compressed = fs.readFileSync(shardPath);
|
|
304
|
-
const decompressed = zlib.gunzipSync(compressed).toString("utf-8");
|
|
305
|
-
this.memoryCache.set(key, decompressed);
|
|
306
|
-
return decompressed;
|
|
307
|
-
} catch (e) {
|
|
308
|
-
return null;
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
/**
|
|
313
|
-
* Stores a transformation result.
|
|
314
|
-
*/
|
|
315
|
-
set(key: string, result: string): void {
|
|
316
|
-
const hash = crypto.createHash("md5").update(result).digest("hex");
|
|
317
|
-
this.index.set(key, hash);
|
|
318
|
-
this.memoryCache.set(key, result);
|
|
319
|
-
|
|
320
|
-
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
321
|
-
|
|
322
|
-
// Background write shard
|
|
323
|
-
backgroundQueue.add(async () => {
|
|
324
|
-
if (fs.existsSync(shardPath)) return; // Already exists
|
|
325
|
-
await mkdir(this.shardsDir, { recursive: true });
|
|
326
|
-
|
|
327
|
-
const compressed = zlib.gzipSync(Buffer.from(result));
|
|
328
|
-
const tempPath = `${shardPath}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
329
|
-
await writeFile(tempPath, compressed);
|
|
330
|
-
await rename(tempPath, shardPath);
|
|
331
|
-
});
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
get size() {
|
|
335
|
-
return this.index.size;
|
|
336
|
-
}
|
|
337
|
-
|
|
338
|
-
async flush() {
|
|
339
|
-
await backgroundQueue.flush();
|
|
340
|
-
}
|
|
341
|
-
}
|
|
342
|
-
|
|
343
|
-
/**
|
|
344
|
-
* Specialized cache for processed assets (e.g., optimized images).
|
|
345
|
-
*/
|
|
346
|
-
export class AssetCache {
|
|
347
|
-
private readonly assetsDir: string;
|
|
348
|
-
|
|
349
|
-
constructor(root: string = process.cwd()) {
|
|
350
|
-
this.assetsDir = path.resolve(root, CACHE_DIR, ASSETS_DIR);
|
|
351
|
-
}
|
|
352
|
-
|
|
353
|
-
private getFileHash(filePath: string): string {
|
|
354
|
-
return crypto
|
|
355
|
-
.createHash("md5")
|
|
356
|
-
.update(fs.readFileSync(filePath))
|
|
357
|
-
.digest("hex");
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
get(sourcePath: string, cacheKey: string): string | null {
|
|
361
|
-
if (!fs.existsSync(sourcePath)) return null;
|
|
362
|
-
const sourceHash = this.getFileHash(sourcePath);
|
|
363
|
-
const cachedPath = this.getCachedPath(
|
|
364
|
-
sourcePath,
|
|
365
|
-
`${cacheKey}-${sourceHash}`,
|
|
366
|
-
);
|
|
367
|
-
return fs.existsSync(cachedPath) ? cachedPath : null;
|
|
368
|
-
}
|
|
369
|
-
|
|
370
|
-
set(sourcePath: string, cacheKey: string, content: Buffer | string): void {
|
|
371
|
-
const sourceHash = this.getFileHash(sourcePath);
|
|
372
|
-
const cachedPath = this.getCachedPath(
|
|
373
|
-
sourcePath,
|
|
374
|
-
`${cacheKey}-${sourceHash}`,
|
|
375
|
-
);
|
|
376
|
-
|
|
377
|
-
backgroundQueue.add(async () => {
|
|
378
|
-
await mkdir(this.assetsDir, { recursive: true });
|
|
379
|
-
const tempPath = `${cachedPath}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
380
|
-
await writeFile(tempPath, content);
|
|
381
|
-
await rename(tempPath, cachedPath);
|
|
382
|
-
});
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
private getCachedPath(sourcePath: string, cacheKey: string): string {
|
|
386
|
-
const ext = path.extname(sourcePath);
|
|
387
|
-
const name = path.basename(sourcePath, ext);
|
|
388
|
-
const safeKey = cacheKey.replace(/[^a-z0-9]/gi, "-").toLowerCase();
|
|
389
|
-
return path.join(this.assetsDir, `${name}.${safeKey}${ext}`);
|
|
390
|
-
}
|
|
391
|
-
|
|
392
|
-
clear(): void {
|
|
393
|
-
if (fs.existsSync(this.assetsDir)) {
|
|
394
|
-
fs.rmSync(this.assetsDir, { recursive: true, force: true });
|
|
395
|
-
}
|
|
396
|
-
}
|
|
397
|
-
|
|
398
|
-
async flush() {
|
|
399
|
-
await backgroundQueue.flush();
|
|
400
|
-
}
|
|
401
|
-
}
|
|
402
|
-
|
|
403
|
-
/**
|
|
404
|
-
* Flushes all pending background cache operations.
|
|
405
|
-
*/
|
|
406
|
-
export async function flushCache() {
|
|
407
|
-
await backgroundQueue.flush();
|
|
408
|
-
}
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import crypto from "crypto";
|
|
4
|
+
import zlib from "zlib";
|
|
5
|
+
import { promisify } from "util";
|
|
6
|
+
import { getFileMtime } from "./utils";
|
|
7
|
+
|
|
8
|
+
const writeFile = promisify(fs.writeFile);
|
|
9
|
+
const readFile = promisify(fs.readFile);
|
|
10
|
+
const mkdir = promisify(fs.mkdir);
|
|
11
|
+
const rename = promisify(fs.rename);
|
|
12
|
+
const unlink = promisify(fs.unlink);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Configuration constants for the caching system.
|
|
16
|
+
*/
|
|
17
|
+
const CACHE_DIR = process.env.BOLTDOCS_CACHE_DIR || ".boltdocs";
|
|
18
|
+
const ASSETS_DIR = "assets";
|
|
19
|
+
const SHARDS_DIR = "shards";
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Default limits for the caching system.
|
|
23
|
+
*/
|
|
24
|
+
const DEFAULT_LRU_LIMIT = parseInt(
|
|
25
|
+
process.env.BOLTDOCS_CACHE_LRU_LIMIT || "2000",
|
|
26
|
+
10,
|
|
27
|
+
);
|
|
28
|
+
const DEFAULT_COMPRESS = process.env.BOLTDOCS_CACHE_COMPRESS !== "0";
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Simple LRU cache implementation to prevent memory leaks.
|
|
32
|
+
*/
|
|
33
|
+
class LRUCache<K, V> {
|
|
34
|
+
private cache = new Map<K, V>();
|
|
35
|
+
constructor(private limit: number) {}
|
|
36
|
+
|
|
37
|
+
get(key: K): V | undefined {
|
|
38
|
+
const val = this.cache.get(key);
|
|
39
|
+
if (val !== undefined) {
|
|
40
|
+
this.cache.delete(key);
|
|
41
|
+
this.cache.set(key, val);
|
|
42
|
+
}
|
|
43
|
+
return val;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
set(key: K, value: V): void {
|
|
47
|
+
if (this.cache.has(key)) {
|
|
48
|
+
this.cache.delete(key);
|
|
49
|
+
} else if (this.cache.size >= this.limit) {
|
|
50
|
+
const firstKey = this.cache.keys().next().value;
|
|
51
|
+
if (firstKey !== undefined) {
|
|
52
|
+
this.cache.delete(firstKey);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
this.cache.set(key, value);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
get size() {
|
|
59
|
+
return this.cache.size;
|
|
60
|
+
}
|
|
61
|
+
clear() {
|
|
62
|
+
this.cache.clear();
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Simple background task queue to prevent blocking the main thread during IO.
|
|
68
|
+
*/
|
|
69
|
+
class BackgroundQueue {
|
|
70
|
+
private queue: Promise<any> = Promise.resolve();
|
|
71
|
+
private pendingCount = 0;
|
|
72
|
+
|
|
73
|
+
add(task: () => Promise<any>) {
|
|
74
|
+
this.pendingCount++;
|
|
75
|
+
this.queue = this.queue.then(task).finally(() => {
|
|
76
|
+
this.pendingCount--;
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async flush() {
|
|
81
|
+
await this.queue;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
get pending() {
|
|
85
|
+
return this.pendingCount;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const backgroundQueue = new BackgroundQueue();
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Generic file-based cache with per-file granularity and asynchronous persistence.
|
|
93
|
+
*/
|
|
94
|
+
export class FileCache<T> {
|
|
95
|
+
private entries = new Map<string, { data: T; mtime: number }>();
|
|
96
|
+
private readonly cachePath: string | null = null;
|
|
97
|
+
private readonly compress: boolean;
|
|
98
|
+
|
|
99
|
+
constructor(
|
|
100
|
+
options: { name?: string; root?: string; compress?: boolean } = {},
|
|
101
|
+
) {
|
|
102
|
+
this.compress =
|
|
103
|
+
options.compress !== undefined ? options.compress : DEFAULT_COMPRESS;
|
|
104
|
+
if (options.name) {
|
|
105
|
+
const root = options.root || process.cwd();
|
|
106
|
+
const ext = this.compress ? "json.gz" : "json";
|
|
107
|
+
this.cachePath = path.resolve(root, CACHE_DIR, `${options.name}.${ext}`);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Loads the cache. Synchronous for startup simplicity but uses fast I/O.
|
|
113
|
+
*/
|
|
114
|
+
load(): void {
|
|
115
|
+
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
116
|
+
if (!this.cachePath || !fs.existsSync(this.cachePath)) return;
|
|
117
|
+
|
|
118
|
+
try {
|
|
119
|
+
let raw = fs.readFileSync(this.cachePath);
|
|
120
|
+
if (this.cachePath.endsWith(".gz")) {
|
|
121
|
+
raw = zlib.gunzipSync(raw);
|
|
122
|
+
}
|
|
123
|
+
const data = JSON.parse(raw.toString("utf-8"));
|
|
124
|
+
this.entries = new Map(Object.entries(data));
|
|
125
|
+
} catch (e) {
|
|
126
|
+
// Fallback: ignore cache errors
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Saves the cache in the background.
|
|
132
|
+
*/
|
|
133
|
+
save(): void {
|
|
134
|
+
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
135
|
+
if (!this.cachePath) return;
|
|
136
|
+
|
|
137
|
+
const data = Object.fromEntries(this.entries);
|
|
138
|
+
const content = JSON.stringify(data);
|
|
139
|
+
const target = this.cachePath;
|
|
140
|
+
const useCompress = this.compress;
|
|
141
|
+
|
|
142
|
+
backgroundQueue.add(async () => {
|
|
143
|
+
try {
|
|
144
|
+
await mkdir(path.dirname(target), { recursive: true });
|
|
145
|
+
let buffer = Buffer.from(content);
|
|
146
|
+
if (useCompress) {
|
|
147
|
+
buffer = zlib.gzipSync(buffer);
|
|
148
|
+
}
|
|
149
|
+
const tempPath = `${target}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
150
|
+
await writeFile(tempPath, buffer);
|
|
151
|
+
await rename(tempPath, target);
|
|
152
|
+
} catch (e) {
|
|
153
|
+
// Fallback: critical error logging skipped for performance
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
get(filePath: string): T | null {
|
|
159
|
+
const entry = this.entries.get(filePath);
|
|
160
|
+
if (!entry) return null;
|
|
161
|
+
if (getFileMtime(filePath) !== entry.mtime) return null;
|
|
162
|
+
return entry.data;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
set(filePath: string, data: T): void {
|
|
166
|
+
this.entries.set(filePath, {
|
|
167
|
+
data,
|
|
168
|
+
mtime: getFileMtime(filePath),
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
isValid(filePath: string): boolean {
|
|
173
|
+
const entry = this.entries.get(filePath);
|
|
174
|
+
if (!entry) return false;
|
|
175
|
+
return getFileMtime(filePath) === entry.mtime;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
invalidate(filePath: string): void {
|
|
179
|
+
this.entries.delete(filePath);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
invalidateAll(): void {
|
|
183
|
+
this.entries.clear();
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
pruneStale(currentFiles: Set<string>): void {
|
|
187
|
+
for (const key of this.entries.keys()) {
|
|
188
|
+
if (!currentFiles.has(key)) {
|
|
189
|
+
this.entries.delete(key);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
get size(): number {
|
|
195
|
+
return this.entries.size;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async flush() {
|
|
199
|
+
await backgroundQueue.flush();
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Sharded Cache: Optimized for large-scale data (like MDX transformations).
|
|
205
|
+
* Uses a memory index and individual files for each entry to avoid massive JSON parsing.
|
|
206
|
+
*/
|
|
207
|
+
export class TransformCache {
|
|
208
|
+
private index = new Map<string, string>(); // key -> hash
|
|
209
|
+
private memoryCache = new LRUCache<string, string>(DEFAULT_LRU_LIMIT);
|
|
210
|
+
private readonly baseDir: string;
|
|
211
|
+
private readonly shardsDir: string;
|
|
212
|
+
private readonly indexPath: string;
|
|
213
|
+
|
|
214
|
+
constructor(name: string, root: string = process.cwd()) {
|
|
215
|
+
this.baseDir = path.resolve(root, CACHE_DIR, `transform-${name}`);
|
|
216
|
+
this.shardsDir = path.resolve(this.baseDir, SHARDS_DIR);
|
|
217
|
+
this.indexPath = path.resolve(this.baseDir, "index.json");
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Loads the index into memory.
|
|
222
|
+
*/
|
|
223
|
+
load(): void {
|
|
224
|
+
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
225
|
+
if (!fs.existsSync(this.indexPath)) return;
|
|
226
|
+
|
|
227
|
+
try {
|
|
228
|
+
const data = fs.readFileSync(this.indexPath, "utf-8");
|
|
229
|
+
this.index = new Map(Object.entries(JSON.parse(data)));
|
|
230
|
+
} catch (e) {
|
|
231
|
+
// Index might be corrupt, ignore
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
/**
|
|
236
|
+
* Persists the index in background.
|
|
237
|
+
*/
|
|
238
|
+
save(): void {
|
|
239
|
+
if (process.env.BOLTDOCS_NO_CACHE === "1") return;
|
|
240
|
+
const data = JSON.stringify(Object.fromEntries(this.index));
|
|
241
|
+
const target = this.indexPath;
|
|
242
|
+
|
|
243
|
+
backgroundQueue.add(async () => {
|
|
244
|
+
await mkdir(path.dirname(target), { recursive: true });
|
|
245
|
+
await writeFile(target, data);
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Batch Read: Retrieves multiple transformation results concurrently.
|
|
251
|
+
*/
|
|
252
|
+
async getMany(keys: string[]): Promise<Map<string, string>> {
|
|
253
|
+
const results = new Map<string, string>();
|
|
254
|
+
const toLoad: string[] = [];
|
|
255
|
+
|
|
256
|
+
for (const key of keys) {
|
|
257
|
+
const mem = this.memoryCache.get(key);
|
|
258
|
+
if (mem) results.set(key, mem);
|
|
259
|
+
else if (this.index.has(key)) toLoad.push(key);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
if (toLoad.length > 0) {
|
|
263
|
+
const shards = await Promise.all(
|
|
264
|
+
toLoad.map(async (key) => {
|
|
265
|
+
const hash = this.index.get(key)!;
|
|
266
|
+
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
267
|
+
try {
|
|
268
|
+
const compressed = await readFile(shardPath);
|
|
269
|
+
const decompressed = zlib.gunzipSync(compressed).toString("utf-8");
|
|
270
|
+
this.memoryCache.set(key, decompressed);
|
|
271
|
+
return { key, val: decompressed };
|
|
272
|
+
} catch (e) {
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
}),
|
|
276
|
+
);
|
|
277
|
+
|
|
278
|
+
for (const s of shards) {
|
|
279
|
+
if (s) results.set(s.key, s.val);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
return results;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* Retrieves a cached transformation. Fast lookup via index, lazy loading from disk.
|
|
288
|
+
*/
|
|
289
|
+
get(key: string): string | null {
|
|
290
|
+
// 1. Check memory first (LRU)
|
|
291
|
+
const mem = this.memoryCache.get(key);
|
|
292
|
+
if (mem) return mem;
|
|
293
|
+
|
|
294
|
+
// 2. Check index
|
|
295
|
+
const hash = this.index.get(key);
|
|
296
|
+
if (!hash) return null;
|
|
297
|
+
|
|
298
|
+
// 3. Load from shard (synchronous read for Vite's transform hook compatibility)
|
|
299
|
+
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
300
|
+
if (!fs.existsSync(shardPath)) return null;
|
|
301
|
+
|
|
302
|
+
try {
|
|
303
|
+
const compressed = fs.readFileSync(shardPath);
|
|
304
|
+
const decompressed = zlib.gunzipSync(compressed).toString("utf-8");
|
|
305
|
+
this.memoryCache.set(key, decompressed);
|
|
306
|
+
return decompressed;
|
|
307
|
+
} catch (e) {
|
|
308
|
+
return null;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
/**
|
|
313
|
+
* Stores a transformation result.
|
|
314
|
+
*/
|
|
315
|
+
set(key: string, result: string): void {
|
|
316
|
+
const hash = crypto.createHash("md5").update(result).digest("hex");
|
|
317
|
+
this.index.set(key, hash);
|
|
318
|
+
this.memoryCache.set(key, result);
|
|
319
|
+
|
|
320
|
+
const shardPath = path.resolve(this.shardsDir, `${hash}.gz`);
|
|
321
|
+
|
|
322
|
+
// Background write shard
|
|
323
|
+
backgroundQueue.add(async () => {
|
|
324
|
+
if (fs.existsSync(shardPath)) return; // Already exists
|
|
325
|
+
await mkdir(this.shardsDir, { recursive: true });
|
|
326
|
+
|
|
327
|
+
const compressed = zlib.gzipSync(Buffer.from(result));
|
|
328
|
+
const tempPath = `${shardPath}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
329
|
+
await writeFile(tempPath, compressed);
|
|
330
|
+
await rename(tempPath, shardPath);
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
get size() {
|
|
335
|
+
return this.index.size;
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
async flush() {
|
|
339
|
+
await backgroundQueue.flush();
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* Specialized cache for processed assets (e.g., optimized images).
|
|
345
|
+
*/
|
|
346
|
+
export class AssetCache {
|
|
347
|
+
private readonly assetsDir: string;
|
|
348
|
+
|
|
349
|
+
constructor(root: string = process.cwd()) {
|
|
350
|
+
this.assetsDir = path.resolve(root, CACHE_DIR, ASSETS_DIR);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
private getFileHash(filePath: string): string {
|
|
354
|
+
return crypto
|
|
355
|
+
.createHash("md5")
|
|
356
|
+
.update(fs.readFileSync(filePath))
|
|
357
|
+
.digest("hex");
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
get(sourcePath: string, cacheKey: string): string | null {
|
|
361
|
+
if (!fs.existsSync(sourcePath)) return null;
|
|
362
|
+
const sourceHash = this.getFileHash(sourcePath);
|
|
363
|
+
const cachedPath = this.getCachedPath(
|
|
364
|
+
sourcePath,
|
|
365
|
+
`${cacheKey}-${sourceHash}`,
|
|
366
|
+
);
|
|
367
|
+
return fs.existsSync(cachedPath) ? cachedPath : null;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
set(sourcePath: string, cacheKey: string, content: Buffer | string): void {
|
|
371
|
+
const sourceHash = this.getFileHash(sourcePath);
|
|
372
|
+
const cachedPath = this.getCachedPath(
|
|
373
|
+
sourcePath,
|
|
374
|
+
`${cacheKey}-${sourceHash}`,
|
|
375
|
+
);
|
|
376
|
+
|
|
377
|
+
backgroundQueue.add(async () => {
|
|
378
|
+
await mkdir(this.assetsDir, { recursive: true });
|
|
379
|
+
const tempPath = `${cachedPath}.${crypto.randomBytes(4).toString("hex")}.tmp`;
|
|
380
|
+
await writeFile(tempPath, content);
|
|
381
|
+
await rename(tempPath, cachedPath);
|
|
382
|
+
});
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
private getCachedPath(sourcePath: string, cacheKey: string): string {
|
|
386
|
+
const ext = path.extname(sourcePath);
|
|
387
|
+
const name = path.basename(sourcePath, ext);
|
|
388
|
+
const safeKey = cacheKey.replace(/[^a-z0-9]/gi, "-").toLowerCase();
|
|
389
|
+
return path.join(this.assetsDir, `${name}.${safeKey}${ext}`);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
clear(): void {
|
|
393
|
+
if (fs.existsSync(this.assetsDir)) {
|
|
394
|
+
fs.rmSync(this.assetsDir, { recursive: true, force: true });
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
async flush() {
|
|
399
|
+
await backgroundQueue.flush();
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
/**
|
|
404
|
+
* Flushes all pending background cache operations.
|
|
405
|
+
*/
|
|
406
|
+
export async function flushCache() {
|
|
407
|
+
await backgroundQueue.flush();
|
|
408
|
+
}
|