mark-deco 0.28.0 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/browser.cjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
package/dist/browser.d.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  /**
package/dist/browser.mjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { createMutex } from "async-primitives";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  /**
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { f as fetchOEmbedData, b as generateHtml, C as CORSError } from "./html-generator-CClcgkAK.js";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { f as fetchText, a as isCORSError, c as createTimeoutSignal, b as combineAbortSignals, d as fetchJson, i as isBrowser } from "./utils-B06SsBEd.js";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import * as cheerio from "cheerio";
package/dist/index.cjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
package/dist/index.d.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { HTMLBeautifyOptions } from 'js-beautify';
package/dist/index.mjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import __screwUpDefaultImportModule0$2 from "js-beautify";
package/dist/internal.cjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import * as cheerio_2 from 'cheerio';
package/dist/internal.mjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { c, e, a, b, f, d, g, h, i, r, t } from "./html-generator-DOlAM9Ff.js";
package/dist/misc.cjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
package/dist/misc.d.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  import { load } from 'cheerio';
package/dist/misc.mjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  const resolveUrl = (url, baseUrl) => {
package/dist/node.cjs CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
@@ -47,13 +47,12 @@ const generateFileHash = async (input) => {
47
47
  throw new Error(`Failed to generate hash: ${error}`);
48
48
  }
49
49
  };
50
- const createFileSystemCacheStorage = (cacheDir, options = {}) => {
50
+ const createFileSystemCacheStorage = (cacheDir) => {
51
51
  if (utils.isBrowser()) {
52
52
  throw new Error(
53
53
  "File system cache is only available in Node.js environment, not in browsers"
54
54
  );
55
55
  }
56
- const { enableCompression = true } = options;
57
56
  const mutex = asyncPrimitives.createMutex();
58
57
  let gzipAsync = null;
59
58
  let gunzipAsync = null;
@@ -69,17 +68,12 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
69
68
  const hash = await generateFileHash(key);
70
69
  return hash;
71
70
  };
72
- const getPlainFileName = (baseName) => `${baseName}.json`;
73
71
  const getCompressedFileName = (baseName) => `${baseName}.json.gz`;
74
- const readCacheEntry = async (filePath, compressed) => {
75
- if (compressed) {
76
- await ensureCompression();
77
- const buffer = await promises.readFile(filePath);
78
- const unzipped = await gunzipAsync(buffer);
79
- return JSON.parse(unzipped.toString("utf-8"));
80
- }
81
- const content = await promises.readFile(filePath, "utf-8");
82
- return JSON.parse(content);
72
+ const readCacheEntry = async (filePath) => {
73
+ await ensureCompression();
74
+ const buffer = await promises.readFile(filePath);
75
+ const unzipped = await gunzipAsync(buffer);
76
+ return JSON.parse(unzipped.toString("utf-8"));
83
77
  };
84
78
  const isMissingFileError = (error) => typeof error === "object" && error !== null && "code" in error && error.code === "ENOENT";
85
79
  const ensureCacheDir = async () => {
@@ -93,7 +87,6 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
93
87
  };
94
88
  const get = async (key) => {
95
89
  const baseName = await generateFileBaseName(key);
96
- const plainFilePath = path.join(cacheDir, getPlainFileName(baseName));
97
90
  const compressedFilePath = path.join(cacheDir, getCompressedFileName(baseName));
98
91
  try {
99
92
  await ensureCacheDir();
@@ -101,45 +94,16 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
101
94
  throw new Error(`Failed to ensure cache directory: ${error}`);
102
95
  }
103
96
  let entry;
104
- let filePath;
105
- if (enableCompression) {
106
- try {
107
- entry = await readCacheEntry(compressedFilePath, true);
108
- filePath = compressedFilePath;
109
- } catch (error) {
110
- if (!isMissingFileError(error)) {
111
- try {
112
- await promises.unlink(compressedFilePath);
113
- } catch (e) {
114
- }
115
- return null;
116
- }
97
+ try {
98
+ entry = await readCacheEntry(compressedFilePath);
99
+ } catch (error) {
100
+ if (!isMissingFileError(error)) {
117
101
  try {
118
- entry = await readCacheEntry(plainFilePath, false);
119
- filePath = plainFilePath;
120
- } catch (plainError) {
121
- if (!isMissingFileError(plainError)) {
122
- try {
123
- await promises.unlink(plainFilePath);
124
- } catch (e) {
125
- }
126
- }
127
- return null;
128
- }
129
- }
130
- } else {
131
- try {
132
- entry = await readCacheEntry(plainFilePath, false);
133
- filePath = plainFilePath;
134
- } catch (error) {
135
- if (!isMissingFileError(error)) {
136
- try {
137
- await promises.unlink(plainFilePath);
138
- } catch (e) {
139
- }
102
+ await promises.unlink(compressedFilePath);
103
+ } catch (e) {
140
104
  }
141
- return null;
142
105
  }
106
+ return null;
143
107
  }
144
108
  if (entry.ttl !== void 0) {
145
109
  const isExpired = entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;
@@ -149,18 +113,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
149
113
  const currentTime = Date.now();
150
114
  const stillExpired = entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;
151
115
  if (stillExpired) {
152
- await promises.unlink(filePath);
153
- if (filePath === compressedFilePath) {
154
- try {
155
- await promises.unlink(plainFilePath);
156
- } catch (e) {
157
- }
158
- } else if (filePath === plainFilePath) {
159
- try {
160
- await promises.unlink(compressedFilePath);
161
- } catch (e) {
162
- }
163
- }
116
+ await promises.unlink(compressedFilePath);
164
117
  }
165
118
  return null;
166
119
  } catch (e) {
@@ -174,7 +127,6 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
174
127
  };
175
128
  const set = async (key, value, ttl) => {
176
129
  const baseName = await generateFileBaseName(key);
177
- const plainFilePath = path.join(cacheDir, getPlainFileName(baseName));
178
130
  const compressedFilePath = path.join(cacheDir, getCompressedFileName(baseName));
179
131
  const entry = {
180
132
  data: value,
@@ -184,32 +136,32 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
184
136
  entry.ttl = ttl;
185
137
  }
186
138
  const serialized = JSON.stringify(entry, null, 2);
187
- let payload;
188
- if (enableCompression) {
189
- await ensureCompression();
190
- payload = await gzipAsync(serialized);
191
- } else {
192
- payload = serialized;
193
- }
139
+ await ensureCompression();
140
+ const payload = await gzipAsync(serialized);
194
141
  const lockHandle = await mutex.lock();
195
142
  try {
196
143
  await ensureCacheDir();
197
- const filePath = enableCompression ? compressedFilePath : plainFilePath;
144
+ const tempSuffix = `${process.pid}-${Date.now()}-${crypto.randomBytes(6).toString("hex")}`;
145
+ const tempFilePath = path.join(
146
+ cacheDir,
147
+ `${baseName}.json.gz.tmp-${tempSuffix}`
148
+ );
198
149
  try {
199
- if (enableCompression) {
200
- await promises.writeFile(filePath, payload);
201
- try {
202
- await promises.unlink(plainFilePath);
203
- } catch (e) {
204
- }
205
- } else {
206
- await promises.writeFile(filePath, payload, "utf-8");
150
+ await promises.writeFile(tempFilePath, payload);
151
+ try {
152
+ await promises.rename(tempFilePath, compressedFilePath);
153
+ } catch (e) {
207
154
  try {
208
155
  await promises.unlink(compressedFilePath);
209
- } catch (e) {
156
+ } catch (e2) {
210
157
  }
158
+ await promises.rename(tempFilePath, compressedFilePath);
211
159
  }
212
160
  } catch (error) {
161
+ try {
162
+ await promises.unlink(tempFilePath);
163
+ } catch (e) {
164
+ }
213
165
  throw new Error(`Failed to write cache entry: ${error}`);
214
166
  }
215
167
  } catch (importError) {
@@ -220,15 +172,10 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
220
172
  };
221
173
  const deleteEntry = async (key) => {
222
174
  const baseName = await generateFileBaseName(key);
223
- const plainFilePath = path.join(cacheDir, getPlainFileName(baseName));
224
175
  const compressedFilePath = path.join(cacheDir, getCompressedFileName(baseName));
225
176
  const lockHandle = await mutex.lock();
226
177
  try {
227
178
  await ensureCacheDir();
228
- try {
229
- await promises.unlink(plainFilePath);
230
- } catch (e) {
231
- }
232
179
  try {
233
180
  await promises.unlink(compressedFilePath);
234
181
  } catch (e) {
@@ -245,7 +192,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
245
192
  await ensureCacheDir();
246
193
  const files = await promises.readdir(cacheDir);
247
194
  const cacheFiles = files.filter(
248
- (file) => file.endsWith(".json") || file.endsWith(".json.gz")
195
+ (file) => file.endsWith(".json.gz")
249
196
  );
250
197
  for (const file of cacheFiles) {
251
198
  const filePath = path.join(cacheDir, file);
@@ -264,7 +211,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
264
211
  await ensureCacheDir();
265
212
  const files = await promises.readdir(cacheDir);
266
213
  const cacheFiles = files.filter(
267
- (file) => file.endsWith(".json") || file.endsWith(".json.gz")
214
+ (file) => file.endsWith(".json.gz")
268
215
  );
269
216
  if (cacheFiles.length === 0) {
270
217
  return 0;
@@ -275,9 +222,8 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
275
222
  let validCount = 0;
276
223
  for (const file of cacheFiles) {
277
224
  const filePath = path.join(cacheDir, file);
278
- const isCompressed = file.endsWith(".json.gz");
279
225
  try {
280
- const entry = await readCacheEntry(filePath, isCompressed);
226
+ const entry = await readCacheEntry(filePath);
281
227
  if (entry.ttl !== void 0) {
282
228
  const isExpired = entry.ttl === 0 || now > entry.timestamp + entry.ttl;
283
229
  if (isExpired) {
package/dist/node.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"node.cjs","sources":["../src/cache/filesystem.ts"],"sourcesContent":["// mark-deco - Flexible Markdown to HTML conversion library\n// Copyright (c) Kouji Matsui. (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/mark-deco\n\nimport { mkdir, readdir, readFile, unlink, writeFile } from 'fs/promises';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { createMutex } from 'async-primitives';\nimport { promisify } from 'util';\n\nimport type { CacheStorage, CacheEntry } from './index';\nimport { isBrowser } from '../utils';\n\n/**\n * Generate SHA-256 hash for cache key using Node.js crypto module\n * Provides better collision resistance than simple hash algorithms\n * @param input - Input string to hash\n * @returns Promise resolving to hexadecimal hash string\n */\nconst generateFileHash = async (input: string): Promise<string> => {\n try {\n const hash = createHash('sha256');\n hash.update(input, 'utf8');\n return hash.digest('hex');\n } catch (error) {\n throw new Error(`Failed to generate hash: ${error}`);\n }\n};\n\nexport interface FileSystemCacheOptions {\n /** Enable gzip compression for cache files (default: true) */\n enableCompression?: boolean;\n}\n\n/**\n * Create file system-based cache storage instance\n * Uses Node.js file system to store cache entries as JSON files\n * @param cacheDir - Directory path to store cache files (will be created if it doesn't exist)\n * @param options - File system cache options (optional)\n * @returns FileSystemCache instance that uses file system\n * @throws Error if file system operations fail or if not running in Node.js environment\n */\nexport const createFileSystemCacheStorage = (\n cacheDir: string,\n options: FileSystemCacheOptions = {}\n): CacheStorage => {\n // Check if we're in a browser environment\n if (isBrowser()) {\n throw new Error(\n 'File system cache is only available in Node.js environment, not in browsers'\n );\n }\n\n const { enableCompression = true } = options;\n\n const mutex = createMutex();\n let gzipAsync: ((input: string | Buffer) => Promise<Buffer>) | null = null;\n let gunzipAsync: ((input: Buffer) => Promise<Buffer>) | null = null;\n\n const ensureCompression = async (): Promise<void> => {\n if (gzipAsync && gunzipAsync) {\n return;\n }\n\n const { gzip, gunzip } = await import('zlib');\n gzipAsync = promisify(gzip);\n gunzipAsync = promisify(gunzip);\n };\n\n /**\n * Generate safe file name from cache key using hash\n */\n const generateFileBaseName = async (key: string): Promise<string> => {\n const hash = await generateFileHash(key);\n return hash;\n };\n\n const getPlainFileName = (baseName: string): string => `${baseName}.json`;\n const getCompressedFileName = (baseName: string): string =>\n `${baseName}.json.gz`;\n\n const readCacheEntry = async (\n filePath: string,\n compressed: boolean\n ): Promise<CacheEntry> => {\n if (compressed) {\n await ensureCompression();\n const buffer = await readFile(filePath);\n const unzipped = await gunzipAsync!(buffer);\n return JSON.parse(unzipped.toString('utf-8'));\n }\n\n const content = await readFile(filePath, 'utf-8');\n return JSON.parse(content);\n };\n\n const isMissingFileError = (error: unknown): boolean =>\n typeof error === 'object' &&\n error !== null &&\n 'code' in error &&\n (error as { code?: string }).code === 'ENOENT';\n\n /**\n * Ensure cache directory exists\n * This operation is idempotent and safe to call concurrently\n */\n const ensureCacheDir = async (): Promise<void> => {\n try {\n await mkdir(cacheDir, { recursive: true });\n } catch (error: unknown) {\n // Ignore EEXIST errors since directory already exists\n if ((error as { code?: string }).code !== 'EEXIST') {\n throw new Error(`Failed to create cache directory: ${error}`);\n }\n }\n };\n\n const get = async (key: string): Promise<string | null> => {\n // Pre-compute file name outside of lock (pure function)\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n // Ensure cache directory exists (idempotent operation)\n try {\n await ensureCacheDir();\n } catch (error) {\n throw new Error(`Failed to ensure cache directory: ${error}`);\n }\n\n let entry: CacheEntry;\n let filePath: string;\n\n if (enableCompression) {\n try {\n entry = await readCacheEntry(compressedFilePath, true);\n filePath = compressedFilePath;\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n return null;\n }\n\n try {\n entry = await readCacheEntry(plainFilePath, false);\n filePath = plainFilePath;\n } catch (plainError) {\n if (!isMissingFileError(plainError)) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n }\n } else {\n try {\n entry = await readCacheEntry(plainFilePath, false);\n filePath = plainFilePath;\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n }\n\n // Check TTL expiration - only lock if we need to delete expired file\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;\n if (isExpired) {\n // Lock only for the deletion operation\n const lockHandle = await mutex.lock();\n try {\n // Double-check expiration under lock to avoid race conditions\n const currentTime = Date.now();\n const stillExpired =\n entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;\n if (stillExpired) {\n await unlink(filePath);\n if (filePath === compressedFilePath) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n } else if (filePath === plainFilePath) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n }\n return null;\n } catch {\n // File might have been deleted by another process, ignore\n return null;\n } finally {\n lockHandle.release();\n }\n }\n }\n\n return entry.data;\n };\n\n const set = async (\n key: string,\n value: string,\n ttl?: number\n ): Promise<void> => {\n // Pre-compute everything possible outside of lock\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n const entry: CacheEntry = {\n data: value,\n timestamp: Date.now(),\n };\n\n if (ttl !== undefined) {\n entry.ttl = ttl;\n }\n\n const serialized = JSON.stringify(entry, null, 2);\n let payload: string | Buffer;\n if (enableCompression) {\n await ensureCompression();\n payload = await gzipAsync!(serialized);\n } else {\n payload = serialized;\n }\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n const filePath = enableCompression ? compressedFilePath : plainFilePath;\n\n try {\n // For cache systems, last-write-wins is often acceptable\n // We minimize lock time by only protecting the actual write operation\n if (enableCompression) {\n await writeFile(filePath, payload);\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n } else {\n await writeFile(filePath, payload, 'utf-8');\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n } catch (error) {\n throw new Error(`Failed to write cache entry: ${error}`);\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const deleteEntry = async (key: string): Promise<void> => {\n // Pre-compute file name outside of lock\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n try {\n await unlink(plainFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n\n try {\n await unlink(compressedFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const clear = async (): Promise<void> => {\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n const files = await readdir(cacheDir);\n\n // Filter files outside the deletion loop for better performance\n const cacheFiles = files.filter(\n (file: string) => file.endsWith('.json') || file.endsWith('.json.gz')\n );\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n await unlink(filePath);\n } catch {\n // Ignore individual file deletion errors\n }\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const size = async (): Promise<number> => {\n await ensureCacheDir();\n\n // Get file list without lock first\n const files = await readdir(cacheDir);\n const cacheFiles = files.filter(\n (file: string) => file.endsWith('.json') || file.endsWith('.json.gz')\n );\n\n if (cacheFiles.length === 0) {\n return 0;\n }\n\n // Clean up expired entries with lock\n const lockHandle = await mutex.lock();\n try {\n const now = Date.now();\n let validCount = 0;\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n const isCompressed = file.endsWith('.json.gz');\n try {\n const entry = await readCacheEntry(filePath, isCompressed);\n\n // Check if entry is expired\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || now > entry.timestamp + entry.ttl;\n if (isExpired) {\n await unlink(filePath);\n continue; // Don't count this file\n }\n }\n validCount++;\n } catch {\n // If we can't read or parse the file, delete it\n try {\n await unlink(filePath);\n } catch {\n // Ignore unlink errors\n }\n }\n }\n\n return validCount;\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n return {\n get,\n set,\n delete: deleteEntry,\n clear,\n size,\n };\n};\n"],"names":["createHash","isBrowser","createMutex","promisify","readFile","mkdir","join","unlink","writeFile","readdir","e"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,MAAM,mBAAmB,OAAO,UAAmC;AACjE,MAAI;AACF,UAAM,OAAOA,OAAAA,WAAW,QAAQ;AAChC,SAAK,OAAO,OAAO,MAAM;AACzB,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,4BAA4B,KAAK,EAAE;AAAA,EACrD;AACF;AAeO,MAAM,+BAA+B,CAC1C,UACA,UAAkC,OACjB;AAEjB,MAAIC,MAAAA,aAAa;AACf,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,EAAE,oBAAoB,KAAA,IAAS;AAErC,QAAM,QAAQC,gBAAAA,YAAA;AACd,MAAI,YAAkE;AACtE,MAAI,cAA2D;AAE/D,QAAM,oBAAoB,YAA2B;AACnD,QAAI,aAAa,aAAa;AAC5B;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,WAAW,MAAM,OAAO,MAAM;AAC5C,gBAAYC,KAAAA,UAAU,IAAI;AAC1B,kBAAcA,KAAAA,UAAU,MAAM;AAAA,EAChC;AAKA,QAAM,uBAAuB,OAAO,QAAiC;AACnE,UAAM,OAAO,MAAM,iBAAiB,GAAG;AACvC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAmB,CAAC,aAA6B,GAAG,QAAQ;AAClE,QAAM,wBAAwB,CAAC,aAC7B,GAAG,QAAQ;AAEb,QAAM,iBAAiB,OACrB,UACA,eACwB;AACxB,QAAI,YAAY;AACd,YAAM,kBAAA;AACN,YAAM,SAAS,MAAMC,SAAAA,SAAS,QAAQ;AACtC,YAAM,WAAW,MAAM,YAAa,MAAM;AAC1C,aAAO,KAAK,MAAM,SAAS,SAAS,OAAO,CAAC;AAAA,IAC9C;AAEA,UAAM,UAAU,MAAMA,kBAAS,UAAU,OAAO;AAChD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B;AAEA,QAAM,qBAAqB,CAAC,UAC1B,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAA4B,SAAS;AAMxC,QAAM,iBAAiB,YAA2B;AAChD,QAAI;AACF,YAAMC,SAAAA,MAAM,UAAU,EAAE,WAAW,MAAM;AAAA,IAC3C,SAAS,OAAgB;AAEvB,UAAK,MAA4B,SAAS,UAAU;AAClD,cAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,OAAO,QAAwC;AAEzD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgBC,KAAAA,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqBA,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAGzE,QAAI;AACF,YAAM,eAAA;AAAA,IACR,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,IAC9D;AAEA,QAAI;AACJ,QAAI;AAEJ,QAAI,mBAAmB;AACrB,UAAI;AACF,gBAAQ,MAAM,eAAe,oBAAoB,IAAI;AACrD,mBAAW;AAAA,MACb,SAAS,OAAO;AACd,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,cAAI;AACF,kBAAMC,SAAAA,OAAO,kBAAkB;AAAA,UACjC,SAAQ;AAAA,UAER;AACA,iBAAO;AAAA,QACT;AAEA,YAAI;AACF,kBAAQ,MAAM,eAAe,eAAe,KAAK;AACjD,qBAAW;AAAA,QACb,SAAS,YAAY;AACnB,cAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,gBAAI;AACF,oBAAMA,SAAAA,OAAO,aAAa;AAAA,YAC5B,SAAQ;AAAA,YAER;AAAA,UACF;AACA,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF,OAAO;AACL,UAAI;AACF,gBAAQ,MAAM,eAAe,eAAe,KAAK;AACjD,mBAAW;AAAA,MACb,SAAS,OAAO;AACd,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,cAAI;AACF,kBAAMA,SAAAA,OAAO,aAAa;AAAA,UAC5B,SAAQ;AAAA,UAER;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,MAAM,QAAQ,QAAW;AAC3B,YAAM,YACJ,MAAM,QAAQ,KAAK,KAAK,IAAA,IAAQ,MAAM,YAAY,MAAM;AAC1D,UAAI,WAAW;AAEb,cAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,YAAI;AAEF,gBAAM,cAAc,KAAK,IAAA;AACzB,gBAAM,eACJ,MAAM,QAAQ,KAAK,cAAc,MAAM,YAAY,MAAM;AAC3D,cAAI,cAAc;AAChB,kBAAMA,SAAAA,OAAO,QAAQ;AACrB,gBAAI,aAAa,oBAAoB;AACnC,kBAAI;AACF,sBAAMA,SAAAA,OAAO,aAAa;AAAA,cAC5B,SAAQ;AAAA,cAER;AAAA,YACF,WAAW,aAAa,eAAe;AACrC,kBAAI;AACF,sBAAMA,SAAAA,OAAO,kBAAkB;AAAA,cACjC,SAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AACA,iBAAO;AAAA,QACT,SAAQ;AAEN,iBAAO;AAAA,QACT,UAAA;AACE,qBAAW,QAAA;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM;AAAA,EACf;AAEA,QAAM,MAAM,OACV,KACA,OACA,QACkB;AAElB,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgBD,KAAAA,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqBA,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AACzE,UAAM,QAAoB;AAAA,MACxB,MAAM;AAAA,MACN,WAAW,KAAK,IAAA;AAAA,IAAI;AAGtB,QAAI,QAAQ,QAAW;AACrB,YAAM,MAAM;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,UAAU,OAAO,MAAM,CAAC;AAChD,QAAI;AACJ,QAAI,mBAAmB;AACrB,YAAM,kBAAA;AACN,gBAAU,MAAM,UAAW,UAAU;AAAA,IACvC,OAAO;AACL,gBAAU;AAAA,IACZ;AAEA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,YAAM,WAAW,oBAAoB,qBAAqB;AAE1D,UAAI;AAGF,YAAI,mBAAmB;AACrB,gBAAME,SAAAA,UAAU,UAAU,OAAO;AACjC,cAAI;AACF,kBAAMD,SAAAA,OAAO,aAAa;AAAA,UAC5B,SAAQ;AAAA,UAER;AAAA,QACF,OAAO;AACL,gBAAMC,mBAAU,UAAU,SAAS,OAAO;AAC1C,cAAI;AACF,kBAAMD,SAAAA,OAAO,kBAAkB;AAAA,UACjC,SAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,QAA+B;AAExD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgBD,KAAAA,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqBA,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAEzE,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,UAAI;AACF,cAAMC,SAAAA,OAAO,aAAa;AAAA,MAC5B,SAAQ;AAAA,MAER;AAEA,UAAI;AACF,cAAMA,SAAAA,OAAO,kBAAkB;AAAA,MACjC,SAAQ;AAAA,MAER;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,QAAQ,YAA2B;AACvC,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AACN,YAAM,QAAQ,MAAME,SAAAA,QAAQ,QAAQ;AAGpC,YAAM,aAAa,MAAM;AAAA,QACvB,CAAC,SAAiB,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,UAAU;AAAA,MAAA;AAGtE,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAWH,KAAAA,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAMC,SAAAA,OAAO,QAAQ;AAAA,QACvB,SAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,OAAO,YAA6B;AACxC,UAAM,eAAA;AAGN,UAAM,QAAQ,MAAME,SAAAA,QAAQ,QAAQ;AACpC,UAAM,aAAa,MAAM;AAAA,MACvB,CAAC,SAAiB,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,UAAU;AAAA,IAAA;AAGtE,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,MAAM,KAAK,IAAA;AACjB,UAAI,aAAa;AAEjB,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAWH,KAAAA,KAAK,UAAU,IAAI;AACpC,cAAM,eAAe,KAAK,SAAS,UAAU;AAC7C,YAAI;AACF,gBAAM,QAAQ,MAAM,eAAe,UAAU,YAAY;AAGzD,cAAI,MAAM,QAAQ,QAAW;AAC3B,kBAAM,YACJ,MAAM,QAAQ,KAAK,MAAM,MAAM,YAAY,MAAM;AACnD,gBAAI,WAAW;AACb,oBAAMC,SAAAA,OAAO,QAAQ;AACrB;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF,SAAQ;AAEN,cAAI;AACF,kBAAMA,SAAAA,OAAO,QAAQ;AAAA,UACvB,SAAQG,IAAA;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
1
+ {"version":3,"file":"node.cjs","sources":["../src/cache/filesystem.ts"],"sourcesContent":["// mark-deco - Flexible Markdown to HTML conversion library\n// Copyright (c) Kouji Matsui. (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/mark-deco\n\nimport {\n mkdir,\n readdir,\n readFile,\n rename,\n unlink,\n writeFile,\n} from 'fs/promises';\nimport { join } from 'path';\nimport { createHash, randomBytes } from 'crypto';\nimport { createMutex } from 'async-primitives';\nimport { promisify } from 'util';\n\nimport type { CacheStorage, CacheEntry } from './index';\nimport { isBrowser } from '../utils';\n\n/**\n * Generate SHA-256 hash for cache key using Node.js crypto module\n * Provides better collision resistance than simple hash algorithms\n * @param input - Input string to hash\n * @returns Promise resolving to hexadecimal hash string\n */\nconst generateFileHash = async (input: string): Promise<string> => {\n try {\n const hash = createHash('sha256');\n hash.update(input, 'utf8');\n return hash.digest('hex');\n } catch (error) {\n throw new Error(`Failed to generate hash: ${error}`);\n }\n};\n\n/**\n * Create file system-based cache storage instance\n * Uses Node.js file system to store cache entries as gzip-compressed JSON files\n * @param cacheDir - Directory path to store cache files (will be created if it doesn't exist)\n * @returns FileSystemCache instance that uses file system\n * @throws Error if file system operations fail or if not running in Node.js environment\n */\nexport const createFileSystemCacheStorage = (\n cacheDir: string\n): CacheStorage => {\n // Check if we're in a browser environment\n if (isBrowser()) {\n throw new Error(\n 'File system cache is only available in Node.js environment, not in browsers'\n );\n }\n\n const mutex = createMutex();\n let gzipAsync: ((input: string | Buffer) => Promise<Buffer>) | null = null;\n let gunzipAsync: ((input: Buffer) => Promise<Buffer>) | null = null;\n\n const ensureCompression = async (): Promise<void> => {\n if (gzipAsync && gunzipAsync) {\n return;\n }\n\n const { gzip, gunzip } = await import('zlib');\n gzipAsync = promisify(gzip);\n gunzipAsync = promisify(gunzip);\n };\n\n /**\n * Generate safe file name from cache key using hash\n */\n const generateFileBaseName = async (key: string): Promise<string> => {\n const hash = await generateFileHash(key);\n return hash;\n };\n\n const getCompressedFileName = (baseName: string): string =>\n `${baseName}.json.gz`;\n\n const readCacheEntry = async (filePath: string): Promise<CacheEntry> => {\n await ensureCompression();\n const buffer = await readFile(filePath);\n const unzipped = await gunzipAsync!(buffer);\n return JSON.parse(unzipped.toString('utf-8'));\n };\n\n const isMissingFileError = (error: unknown): boolean =>\n typeof error === 'object' &&\n error !== null &&\n 'code' in error &&\n (error as { code?: string }).code === 'ENOENT';\n\n /**\n * Ensure cache directory exists\n * This operation is idempotent and safe to call concurrently\n */\n const ensureCacheDir = async (): Promise<void> => {\n try {\n await mkdir(cacheDir, { recursive: true });\n } catch (error: unknown) {\n // Ignore EEXIST errors since directory already exists\n if ((error as { code?: string }).code !== 'EEXIST') {\n throw new Error(`Failed to create cache directory: ${error}`);\n }\n }\n };\n\n const get = async (key: string): Promise<string | null> => {\n // Pre-compute file name outside of lock (pure function)\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n // Ensure cache directory exists (idempotent operation)\n try {\n await ensureCacheDir();\n } catch (error) {\n throw new Error(`Failed to ensure cache directory: ${error}`);\n }\n\n let entry: CacheEntry;\n\n try {\n entry = await readCacheEntry(compressedFilePath);\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n\n // Check TTL expiration - only lock if we need to delete expired file\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;\n if (isExpired) {\n // Lock only for the deletion operation\n const lockHandle = await mutex.lock();\n try {\n // Double-check expiration under lock to avoid race conditions\n const currentTime = Date.now();\n const stillExpired =\n entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;\n if (stillExpired) {\n await unlink(compressedFilePath);\n }\n return null;\n } catch {\n // File might have been deleted by another process, ignore\n return null;\n } finally {\n lockHandle.release();\n }\n }\n }\n\n return entry.data;\n };\n\n const set = async (\n key: string,\n value: string,\n ttl?: number\n ): Promise<void> => {\n // Pre-compute everything possible outside of lock\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n const entry: CacheEntry = {\n data: value,\n timestamp: Date.now(),\n };\n\n if (ttl !== undefined) {\n entry.ttl = ttl;\n }\n\n const serialized = JSON.stringify(entry, null, 2);\n await ensureCompression();\n const payload = await gzipAsync!(serialized);\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n const tempSuffix = `${process.pid}-${Date.now()}-${randomBytes(6).toString('hex')}`;\n const tempFilePath = join(\n cacheDir,\n `${baseName}.json.gz.tmp-${tempSuffix}`\n );\n\n try {\n // Write to a temp file then rename to avoid partial reads.\n await writeFile(tempFilePath, payload);\n try {\n await rename(tempFilePath, compressedFilePath);\n } catch {\n // Fallback: remove existing file then rename (Windows-friendly).\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n await rename(tempFilePath, compressedFilePath);\n }\n } catch (error) {\n try {\n // Best-effort cleanup of temp file\n await unlink(tempFilePath);\n } catch {\n // Ignore cleanup errors\n }\n throw new Error(`Failed to write cache entry: ${error}`);\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const deleteEntry = async (key: string): Promise<void> => {\n // Pre-compute file name outside of lock\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n try {\n await unlink(compressedFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const clear = async (): Promise<void> => {\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n const files = await readdir(cacheDir);\n\n // Filter files outside the deletion loop for better performance\n const cacheFiles = files.filter((file: string) =>\n file.endsWith('.json.gz')\n );\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n await unlink(filePath);\n } catch {\n // Ignore individual file deletion errors\n }\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const size = async (): Promise<number> => {\n await ensureCacheDir();\n\n // Get file list without lock first\n const files = await readdir(cacheDir);\n const cacheFiles = files.filter((file: string) =>\n file.endsWith('.json.gz')\n );\n\n if (cacheFiles.length === 0) {\n return 0;\n }\n\n // Clean up expired entries with lock\n const lockHandle = await mutex.lock();\n try {\n const now = Date.now();\n let validCount = 0;\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n const entry = await readCacheEntry(filePath);\n\n // Check if entry is expired\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || now > entry.timestamp + entry.ttl;\n if (isExpired) {\n await unlink(filePath);\n continue; // Don't count this file\n }\n }\n validCount++;\n } catch {\n // If we can't read or parse the file, delete it\n try {\n await unlink(filePath);\n } catch {\n // Ignore unlink errors\n }\n }\n }\n\n return validCount;\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n return {\n get,\n set,\n delete: deleteEntry,\n clear,\n size,\n };\n};\n"],"names":["createHash","isBrowser","createMutex","promisify","readFile","mkdir","join","unlink","randomBytes","writeFile","rename","e","readdir"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BA,MAAM,mBAAmB,OAAO,UAAmC;AACjE,MAAI;AACF,UAAM,OAAOA,OAAAA,WAAW,QAAQ;AAChC,SAAK,OAAO,OAAO,MAAM;AACzB,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,4BAA4B,KAAK,EAAE;AAAA,EACrD;AACF;AASO,MAAM,+BAA+B,CAC1C,aACiB;AAEjB,MAAIC,MAAAA,aAAa;AACf,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,QAAQC,gBAAAA,YAAA;AACd,MAAI,YAAkE;AACtE,MAAI,cAA2D;AAE/D,QAAM,oBAAoB,YAA2B;AACnD,QAAI,aAAa,aAAa;AAC5B;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,WAAW,MAAM,OAAO,MAAM;AAC5C,gBAAYC,KAAAA,UAAU,IAAI;AAC1B,kBAAcA,KAAAA,UAAU,MAAM;AAAA,EAChC;AAKA,QAAM,uBAAuB,OAAO,QAAiC;AACnE,UAAM,OAAO,MAAM,iBAAiB,GAAG;AACvC,WAAO;AAAA,EACT;AAEA,QAAM,wBAAwB,CAAC,aAC7B,GAAG,QAAQ;AAEb,QAAM,iBAAiB,OAAO,aAA0C;AACtE,UAAM,kBAAA;AACN,UAAM,SAAS,MAAMC,SAAAA,SAAS,QAAQ;AACtC,UAAM,WAAW,MAAM,YAAa,MAAM;AAC1C,WAAO,KAAK,MAAM,SAAS,SAAS,OAAO,CAAC;AAAA,EAC9C;AAEA,QAAM,qBAAqB,CAAC,UAC1B,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAA4B,SAAS;AAMxC,QAAM,iBAAiB,YAA2B;AAChD,QAAI;AACF,YAAMC,SAAAA,MAAM,UAAU,EAAE,WAAW,MAAM;AAAA,IAC3C,SAAS,OAAgB;AAEvB,UAAK,MAA4B,SAAS,UAAU;AAClD,cAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,OAAO,QAAwC;AAEzD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqBC,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAGzE,QAAI;AACF,YAAM,eAAA;AAAA,IACR,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,IAC9D;AAEA,QAAI;AAEJ,QAAI;AACF,cAAQ,MAAM,eAAe,kBAAkB;AAAA,IACjD,SAAS,OAAO;AACd,UAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,YAAI;AACF,gBAAMC,SAAAA,OAAO,kBAAkB;AAAA,QACjC,SAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAGA,QAAI,MAAM,QAAQ,QAAW;AAC3B,YAAM,YACJ,MAAM,QAAQ,KAAK,KAAK,IAAA,IAAQ,MAAM,YAAY,MAAM;AAC1D,UAAI,WAAW;AAEb,cAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,YAAI;AAEF,gBAAM,cAAc,KAAK,IAAA;AACzB,gBAAM,eACJ,MAAM,QAAQ,KAAK,cAAc,MAAM,YAAY,MAAM;AAC3D,cAAI,cAAc;AAChB,kBAAMA,SAAAA,OAAO,kBAAkB;AAAA,UACjC;AACA,iBAAO;AAAA,QACT,SAAQ;AAEN,iBAAO;AAAA,QACT,UAAA;AACE,qBAAW,QAAA;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM;AAAA,EACf;AAEA,QAAM,MAAM,OACV,KACA,OACA,QACkB;AAElB,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqBD,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AACzE,UAAM,QAAoB;AAAA,MACxB,MAAM;AAAA,MACN,WAAW,KAAK,IAAA;AAAA,IAAI;AAGtB,QAAI,QAAQ,QAAW;AACrB,YAAM,MAAM;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,UAAU,OAAO,MAAM,CAAC;AAChD,UAAM,kBAAA;AACN,UAAM,UAAU,MAAM,UAAW,UAAU;AAE3C,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,YAAM,aAAa,GAAG,QAAQ,GAAG,IAAI,KAAK,IAAA,CAAK,IAAIE,OAAAA,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC;AACjF,YAAM,eAAeF,KAAAA;AAAAA,QACnB;AAAA,QACA,GAAG,QAAQ,gBAAgB,UAAU;AAAA,MAAA;AAGvC,UAAI;AAEF,cAAMG,SAAAA,UAAU,cAAc,OAAO;AACrC,YAAI;AACF,gBAAMC,SAAAA,OAAO,cAAc,kBAAkB;AAAA,QAC/C,SAAQ;AAEN,cAAI;AACF,kBAAMH,SAAAA,OAAO,kBAAkB;AAAA,UACjC,SAAQI,IAAA;AAAA,UAER;AACA,gBAAMD,SAAAA,OAAO,cAAc,kBAAkB;AAAA,QAC/C;AAAA,MACF,SAAS,OAAO;AACd,YAAI;AAEF,gBAAMH,SAAAA,OAAO,YAAY;AAAA,QAC3B,SAAQ;AAAA,QAER;AACA,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,QAA+B;AAExD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqBD,KAAAA,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAEzE,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,UAAI;AACF,cAAMC,SAAAA,OAAO,kBAAkB;AAAA,MACjC,SAAQ;AAAA,MAER;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,QAAQ,YAA2B;AACvC,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AACN,YAAM,QAAQ,MAAMK,SAAAA,QAAQ,QAAQ;AAGpC,YAAM,aAAa,MAAM;AAAA,QAAO,CAAC,SAC/B,KAAK,SAAS,UAAU;AAAA,MAAA;AAG1B,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAWN,KAAAA,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAMC,SAAAA,OAAO,QAAQ;AAAA,QACvB,SAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,OAAO,YAA6B;AACxC,UAAM,eAAA;AAGN,UAAM,QAAQ,MAAMK,SAAAA,QAAQ,QAAQ;AACpC,UAAM,aAAa,MAAM;AAAA,MAAO,CAAC,SAC/B,KAAK,SAAS,UAAU;AAAA,IAAA;AAG1B,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,MAAM,KAAK,IAAA;AACjB,UAAI,aAAa;AAEjB,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAWN,KAAAA,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAM,QAAQ,MAAM,eAAe,QAAQ;AAG3C,cAAI,MAAM,QAAQ,QAAW;AAC3B,kBAAM,YACJ,MAAM,QAAQ,KAAK,MAAM,MAAM,YAAY,MAAM;AACnD,gBAAI,WAAW;AACb,oBAAMC,SAAAA,OAAO,QAAQ;AACrB;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF,SAAQ;AAEN,cAAI;AACF,kBAAMA,SAAAA,OAAO,QAAQ;AAAA,UACvB,SAAQI,IAAA;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
package/dist/node.d.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  /**
@@ -26,17 +26,11 @@ declare interface CacheStorage_2 {
26
26
 
27
27
  /**
28
28
  * Create file system-based cache storage instance
29
- * Uses Node.js file system to store cache entries as JSON files
29
+ * Uses Node.js file system to store cache entries as gzip-compressed JSON files
30
30
  * @param cacheDir - Directory path to store cache files (will be created if it doesn't exist)
31
- * @param options - File system cache options (optional)
32
31
  * @returns FileSystemCache instance that uses file system
33
32
  * @throws Error if file system operations fail or if not running in Node.js environment
34
33
  */
35
- export declare const createFileSystemCacheStorage: (cacheDir: string, options?: FileSystemCacheOptions) => CacheStorage_2;
36
-
37
- export declare interface FileSystemCacheOptions {
38
- /** Enable gzip compression for cache files (default: true) */
39
- enableCompression?: boolean;
40
- }
34
+ export declare const createFileSystemCacheStorage: (cacheDir: string) => CacheStorage_2;
41
35
 
42
36
  export { }
package/dist/node.mjs CHANGED
@@ -1,16 +1,16 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
- import { readdir, unlink, writeFile, mkdir, readFile } from "fs/promises";
11
+ import { readdir, unlink, writeFile, rename, mkdir, readFile } from "fs/promises";
12
12
  import { join } from "path";
13
- import { createHash } from "crypto";
13
+ import { randomBytes, createHash } from "crypto";
14
14
  import { createMutex } from "async-primitives";
15
15
  import { promisify } from "util";
16
16
  import { i as isBrowser } from "./utils-B06SsBEd.js";
@@ -23,13 +23,12 @@ const generateFileHash = async (input) => {
23
23
  throw new Error(`Failed to generate hash: ${error}`);
24
24
  }
25
25
  };
26
- const createFileSystemCacheStorage = (cacheDir, options = {}) => {
26
+ const createFileSystemCacheStorage = (cacheDir) => {
27
27
  if (isBrowser()) {
28
28
  throw new Error(
29
29
  "File system cache is only available in Node.js environment, not in browsers"
30
30
  );
31
31
  }
32
- const { enableCompression = true } = options;
33
32
  const mutex = createMutex();
34
33
  let gzipAsync = null;
35
34
  let gunzipAsync = null;
@@ -45,17 +44,12 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
45
44
  const hash = await generateFileHash(key);
46
45
  return hash;
47
46
  };
48
- const getPlainFileName = (baseName) => `${baseName}.json`;
49
47
  const getCompressedFileName = (baseName) => `${baseName}.json.gz`;
50
- const readCacheEntry = async (filePath, compressed) => {
51
- if (compressed) {
52
- await ensureCompression();
53
- const buffer = await readFile(filePath);
54
- const unzipped = await gunzipAsync(buffer);
55
- return JSON.parse(unzipped.toString("utf-8"));
56
- }
57
- const content = await readFile(filePath, "utf-8");
58
- return JSON.parse(content);
48
+ const readCacheEntry = async (filePath) => {
49
+ await ensureCompression();
50
+ const buffer = await readFile(filePath);
51
+ const unzipped = await gunzipAsync(buffer);
52
+ return JSON.parse(unzipped.toString("utf-8"));
59
53
  };
60
54
  const isMissingFileError = (error) => typeof error === "object" && error !== null && "code" in error && error.code === "ENOENT";
61
55
  const ensureCacheDir = async () => {
@@ -69,7 +63,6 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
69
63
  };
70
64
  const get = async (key) => {
71
65
  const baseName = await generateFileBaseName(key);
72
- const plainFilePath = join(cacheDir, getPlainFileName(baseName));
73
66
  const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));
74
67
  try {
75
68
  await ensureCacheDir();
@@ -77,45 +70,16 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
77
70
  throw new Error(`Failed to ensure cache directory: ${error}`);
78
71
  }
79
72
  let entry;
80
- let filePath;
81
- if (enableCompression) {
82
- try {
83
- entry = await readCacheEntry(compressedFilePath, true);
84
- filePath = compressedFilePath;
85
- } catch (error) {
86
- if (!isMissingFileError(error)) {
87
- try {
88
- await unlink(compressedFilePath);
89
- } catch (e) {
90
- }
91
- return null;
92
- }
73
+ try {
74
+ entry = await readCacheEntry(compressedFilePath);
75
+ } catch (error) {
76
+ if (!isMissingFileError(error)) {
93
77
  try {
94
- entry = await readCacheEntry(plainFilePath, false);
95
- filePath = plainFilePath;
96
- } catch (plainError) {
97
- if (!isMissingFileError(plainError)) {
98
- try {
99
- await unlink(plainFilePath);
100
- } catch (e) {
101
- }
102
- }
103
- return null;
104
- }
105
- }
106
- } else {
107
- try {
108
- entry = await readCacheEntry(plainFilePath, false);
109
- filePath = plainFilePath;
110
- } catch (error) {
111
- if (!isMissingFileError(error)) {
112
- try {
113
- await unlink(plainFilePath);
114
- } catch (e) {
115
- }
78
+ await unlink(compressedFilePath);
79
+ } catch (e) {
116
80
  }
117
- return null;
118
81
  }
82
+ return null;
119
83
  }
120
84
  if (entry.ttl !== void 0) {
121
85
  const isExpired = entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;
@@ -125,18 +89,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
125
89
  const currentTime = Date.now();
126
90
  const stillExpired = entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;
127
91
  if (stillExpired) {
128
- await unlink(filePath);
129
- if (filePath === compressedFilePath) {
130
- try {
131
- await unlink(plainFilePath);
132
- } catch (e) {
133
- }
134
- } else if (filePath === plainFilePath) {
135
- try {
136
- await unlink(compressedFilePath);
137
- } catch (e) {
138
- }
139
- }
92
+ await unlink(compressedFilePath);
140
93
  }
141
94
  return null;
142
95
  } catch (e) {
@@ -150,7 +103,6 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
150
103
  };
151
104
  const set = async (key, value, ttl) => {
152
105
  const baseName = await generateFileBaseName(key);
153
- const plainFilePath = join(cacheDir, getPlainFileName(baseName));
154
106
  const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));
155
107
  const entry = {
156
108
  data: value,
@@ -160,32 +112,32 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
160
112
  entry.ttl = ttl;
161
113
  }
162
114
  const serialized = JSON.stringify(entry, null, 2);
163
- let payload;
164
- if (enableCompression) {
165
- await ensureCompression();
166
- payload = await gzipAsync(serialized);
167
- } else {
168
- payload = serialized;
169
- }
115
+ await ensureCompression();
116
+ const payload = await gzipAsync(serialized);
170
117
  const lockHandle = await mutex.lock();
171
118
  try {
172
119
  await ensureCacheDir();
173
- const filePath = enableCompression ? compressedFilePath : plainFilePath;
120
+ const tempSuffix = `${process.pid}-${Date.now()}-${randomBytes(6).toString("hex")}`;
121
+ const tempFilePath = join(
122
+ cacheDir,
123
+ `${baseName}.json.gz.tmp-${tempSuffix}`
124
+ );
174
125
  try {
175
- if (enableCompression) {
176
- await writeFile(filePath, payload);
177
- try {
178
- await unlink(plainFilePath);
179
- } catch (e) {
180
- }
181
- } else {
182
- await writeFile(filePath, payload, "utf-8");
126
+ await writeFile(tempFilePath, payload);
127
+ try {
128
+ await rename(tempFilePath, compressedFilePath);
129
+ } catch (e) {
183
130
  try {
184
131
  await unlink(compressedFilePath);
185
- } catch (e) {
132
+ } catch (e2) {
186
133
  }
134
+ await rename(tempFilePath, compressedFilePath);
187
135
  }
188
136
  } catch (error) {
137
+ try {
138
+ await unlink(tempFilePath);
139
+ } catch (e) {
140
+ }
189
141
  throw new Error(`Failed to write cache entry: ${error}`);
190
142
  }
191
143
  } catch (importError) {
@@ -196,15 +148,10 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
196
148
  };
197
149
  const deleteEntry = async (key) => {
198
150
  const baseName = await generateFileBaseName(key);
199
- const plainFilePath = join(cacheDir, getPlainFileName(baseName));
200
151
  const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));
201
152
  const lockHandle = await mutex.lock();
202
153
  try {
203
154
  await ensureCacheDir();
204
- try {
205
- await unlink(plainFilePath);
206
- } catch (e) {
207
- }
208
155
  try {
209
156
  await unlink(compressedFilePath);
210
157
  } catch (e) {
@@ -221,7 +168,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
221
168
  await ensureCacheDir();
222
169
  const files = await readdir(cacheDir);
223
170
  const cacheFiles = files.filter(
224
- (file) => file.endsWith(".json") || file.endsWith(".json.gz")
171
+ (file) => file.endsWith(".json.gz")
225
172
  );
226
173
  for (const file of cacheFiles) {
227
174
  const filePath = join(cacheDir, file);
@@ -240,7 +187,7 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
240
187
  await ensureCacheDir();
241
188
  const files = await readdir(cacheDir);
242
189
  const cacheFiles = files.filter(
243
- (file) => file.endsWith(".json") || file.endsWith(".json.gz")
190
+ (file) => file.endsWith(".json.gz")
244
191
  );
245
192
  if (cacheFiles.length === 0) {
246
193
  return 0;
@@ -251,9 +198,8 @@ const createFileSystemCacheStorage = (cacheDir, options = {}) => {
251
198
  let validCount = 0;
252
199
  for (const file of cacheFiles) {
253
200
  const filePath = join(cacheDir, file);
254
- const isCompressed = file.endsWith(".json.gz");
255
201
  try {
256
- const entry = await readCacheEntry(filePath, isCompressed);
202
+ const entry = await readCacheEntry(filePath);
257
203
  if (entry.ttl !== void 0) {
258
204
  const isExpired = entry.ttl === 0 || now > entry.timestamp + entry.ttl;
259
205
  if (isExpired) {
package/dist/node.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"node.mjs","sources":["../src/cache/filesystem.ts"],"sourcesContent":["// mark-deco - Flexible Markdown to HTML conversion library\n// Copyright (c) Kouji Matsui. (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/mark-deco\n\nimport { mkdir, readdir, readFile, unlink, writeFile } from 'fs/promises';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { createMutex } from 'async-primitives';\nimport { promisify } from 'util';\n\nimport type { CacheStorage, CacheEntry } from './index';\nimport { isBrowser } from '../utils';\n\n/**\n * Generate SHA-256 hash for cache key using Node.js crypto module\n * Provides better collision resistance than simple hash algorithms\n * @param input - Input string to hash\n * @returns Promise resolving to hexadecimal hash string\n */\nconst generateFileHash = async (input: string): Promise<string> => {\n try {\n const hash = createHash('sha256');\n hash.update(input, 'utf8');\n return hash.digest('hex');\n } catch (error) {\n throw new Error(`Failed to generate hash: ${error}`);\n }\n};\n\nexport interface FileSystemCacheOptions {\n /** Enable gzip compression for cache files (default: true) */\n enableCompression?: boolean;\n}\n\n/**\n * Create file system-based cache storage instance\n * Uses Node.js file system to store cache entries as JSON files\n * @param cacheDir - Directory path to store cache files (will be created if it doesn't exist)\n * @param options - File system cache options (optional)\n * @returns FileSystemCache instance that uses file system\n * @throws Error if file system operations fail or if not running in Node.js environment\n */\nexport const createFileSystemCacheStorage = (\n cacheDir: string,\n options: FileSystemCacheOptions = {}\n): CacheStorage => {\n // Check if we're in a browser environment\n if (isBrowser()) {\n throw new Error(\n 'File system cache is only available in Node.js environment, not in browsers'\n );\n }\n\n const { enableCompression = true } = options;\n\n const mutex = createMutex();\n let gzipAsync: ((input: string | Buffer) => Promise<Buffer>) | null = null;\n let gunzipAsync: ((input: Buffer) => Promise<Buffer>) | null = null;\n\n const ensureCompression = async (): Promise<void> => {\n if (gzipAsync && gunzipAsync) {\n return;\n }\n\n const { gzip, gunzip } = await import('zlib');\n gzipAsync = promisify(gzip);\n gunzipAsync = promisify(gunzip);\n };\n\n /**\n * Generate safe file name from cache key using hash\n */\n const generateFileBaseName = async (key: string): Promise<string> => {\n const hash = await generateFileHash(key);\n return hash;\n };\n\n const getPlainFileName = (baseName: string): string => `${baseName}.json`;\n const getCompressedFileName = (baseName: string): string =>\n `${baseName}.json.gz`;\n\n const readCacheEntry = async (\n filePath: string,\n compressed: boolean\n ): Promise<CacheEntry> => {\n if (compressed) {\n await ensureCompression();\n const buffer = await readFile(filePath);\n const unzipped = await gunzipAsync!(buffer);\n return JSON.parse(unzipped.toString('utf-8'));\n }\n\n const content = await readFile(filePath, 'utf-8');\n return JSON.parse(content);\n };\n\n const isMissingFileError = (error: unknown): boolean =>\n typeof error === 'object' &&\n error !== null &&\n 'code' in error &&\n (error as { code?: string }).code === 'ENOENT';\n\n /**\n * Ensure cache directory exists\n * This operation is idempotent and safe to call concurrently\n */\n const ensureCacheDir = async (): Promise<void> => {\n try {\n await mkdir(cacheDir, { recursive: true });\n } catch (error: unknown) {\n // Ignore EEXIST errors since directory already exists\n if ((error as { code?: string }).code !== 'EEXIST') {\n throw new Error(`Failed to create cache directory: ${error}`);\n }\n }\n };\n\n const get = async (key: string): Promise<string | null> => {\n // Pre-compute file name outside of lock (pure function)\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n // Ensure cache directory exists (idempotent operation)\n try {\n await ensureCacheDir();\n } catch (error) {\n throw new Error(`Failed to ensure cache directory: ${error}`);\n }\n\n let entry: CacheEntry;\n let filePath: string;\n\n if (enableCompression) {\n try {\n entry = await readCacheEntry(compressedFilePath, true);\n filePath = compressedFilePath;\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n return null;\n }\n\n try {\n entry = await readCacheEntry(plainFilePath, false);\n filePath = plainFilePath;\n } catch (plainError) {\n if (!isMissingFileError(plainError)) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n }\n } else {\n try {\n entry = await readCacheEntry(plainFilePath, false);\n filePath = plainFilePath;\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n }\n\n // Check TTL expiration - only lock if we need to delete expired file\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;\n if (isExpired) {\n // Lock only for the deletion operation\n const lockHandle = await mutex.lock();\n try {\n // Double-check expiration under lock to avoid race conditions\n const currentTime = Date.now();\n const stillExpired =\n entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;\n if (stillExpired) {\n await unlink(filePath);\n if (filePath === compressedFilePath) {\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n } else if (filePath === plainFilePath) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n }\n return null;\n } catch {\n // File might have been deleted by another process, ignore\n return null;\n } finally {\n lockHandle.release();\n }\n }\n }\n\n return entry.data;\n };\n\n const set = async (\n key: string,\n value: string,\n ttl?: number\n ): Promise<void> => {\n // Pre-compute everything possible outside of lock\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n const entry: CacheEntry = {\n data: value,\n timestamp: Date.now(),\n };\n\n if (ttl !== undefined) {\n entry.ttl = ttl;\n }\n\n const serialized = JSON.stringify(entry, null, 2);\n let payload: string | Buffer;\n if (enableCompression) {\n await ensureCompression();\n payload = await gzipAsync!(serialized);\n } else {\n payload = serialized;\n }\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n const filePath = enableCompression ? compressedFilePath : plainFilePath;\n\n try {\n // For cache systems, last-write-wins is often acceptable\n // We minimize lock time by only protecting the actual write operation\n if (enableCompression) {\n await writeFile(filePath, payload);\n try {\n await unlink(plainFilePath);\n } catch {\n // Ignore cleanup errors\n }\n } else {\n await writeFile(filePath, payload, 'utf-8');\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n } catch (error) {\n throw new Error(`Failed to write cache entry: ${error}`);\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const deleteEntry = async (key: string): Promise<void> => {\n // Pre-compute file name outside of lock\n const baseName = await generateFileBaseName(key);\n const plainFilePath = join(cacheDir, getPlainFileName(baseName));\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n try {\n await unlink(plainFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n\n try {\n await unlink(compressedFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const clear = async (): Promise<void> => {\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n const files = await readdir(cacheDir);\n\n // Filter files outside the deletion loop for better performance\n const cacheFiles = files.filter(\n (file: string) => file.endsWith('.json') || file.endsWith('.json.gz')\n );\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n await unlink(filePath);\n } catch {\n // Ignore individual file deletion errors\n }\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const size = async (): Promise<number> => {\n await ensureCacheDir();\n\n // Get file list without lock first\n const files = await readdir(cacheDir);\n const cacheFiles = files.filter(\n (file: string) => file.endsWith('.json') || file.endsWith('.json.gz')\n );\n\n if (cacheFiles.length === 0) {\n return 0;\n }\n\n // Clean up expired entries with lock\n const lockHandle = await mutex.lock();\n try {\n const now = Date.now();\n let validCount = 0;\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n const isCompressed = file.endsWith('.json.gz');\n try {\n const entry = await readCacheEntry(filePath, isCompressed);\n\n // Check if entry is expired\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || now > entry.timestamp + entry.ttl;\n if (isExpired) {\n await unlink(filePath);\n continue; // Don't count this file\n }\n }\n validCount++;\n } catch {\n // If we can't read or parse the file, delete it\n try {\n await unlink(filePath);\n } catch {\n // Ignore unlink errors\n }\n }\n }\n\n return validCount;\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n return {\n get,\n set,\n delete: deleteEntry,\n clear,\n size,\n };\n};\n"],"names":["e"],"mappings":";;;;;;;;;;;;;;;;AAoBA,MAAM,mBAAmB,OAAO,UAAmC;AACjE,MAAI;AACF,UAAM,OAAO,WAAW,QAAQ;AAChC,SAAK,OAAO,OAAO,MAAM;AACzB,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,4BAA4B,KAAK,EAAE;AAAA,EACrD;AACF;AAeO,MAAM,+BAA+B,CAC1C,UACA,UAAkC,OACjB;AAEjB,MAAI,aAAa;AACf,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,EAAE,oBAAoB,KAAA,IAAS;AAErC,QAAM,QAAQ,YAAA;AACd,MAAI,YAAkE;AACtE,MAAI,cAA2D;AAE/D,QAAM,oBAAoB,YAA2B;AACnD,QAAI,aAAa,aAAa;AAC5B;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,WAAW,MAAM,OAAO,MAAM;AAC5C,gBAAY,UAAU,IAAI;AAC1B,kBAAc,UAAU,MAAM;AAAA,EAChC;AAKA,QAAM,uBAAuB,OAAO,QAAiC;AACnE,UAAM,OAAO,MAAM,iBAAiB,GAAG;AACvC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAmB,CAAC,aAA6B,GAAG,QAAQ;AAClE,QAAM,wBAAwB,CAAC,aAC7B,GAAG,QAAQ;AAEb,QAAM,iBAAiB,OACrB,UACA,eACwB;AACxB,QAAI,YAAY;AACd,YAAM,kBAAA;AACN,YAAM,SAAS,MAAM,SAAS,QAAQ;AACtC,YAAM,WAAW,MAAM,YAAa,MAAM;AAC1C,aAAO,KAAK,MAAM,SAAS,SAAS,OAAO,CAAC;AAAA,IAC9C;AAEA,UAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B;AAEA,QAAM,qBAAqB,CAAC,UAC1B,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAA4B,SAAS;AAMxC,QAAM,iBAAiB,YAA2B;AAChD,QAAI;AACF,YAAM,MAAM,UAAU,EAAE,WAAW,MAAM;AAAA,IAC3C,SAAS,OAAgB;AAEvB,UAAK,MAA4B,SAAS,UAAU;AAClD,cAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,OAAO,QAAwC;AAEzD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgB,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAGzE,QAAI;AACF,YAAM,eAAA;AAAA,IACR,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,IAC9D;AAEA,QAAI;AACJ,QAAI;AAEJ,QAAI,mBAAmB;AACrB,UAAI;AACF,gBAAQ,MAAM,eAAe,oBAAoB,IAAI;AACrD,mBAAW;AAAA,MACb,SAAS,OAAO;AACd,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,cAAI;AACF,kBAAM,OAAO,kBAAkB;AAAA,UACjC,SAAQ;AAAA,UAER;AACA,iBAAO;AAAA,QACT;AAEA,YAAI;AACF,kBAAQ,MAAM,eAAe,eAAe,KAAK;AACjD,qBAAW;AAAA,QACb,SAAS,YAAY;AACnB,cAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,gBAAI;AACF,oBAAM,OAAO,aAAa;AAAA,YAC5B,SAAQ;AAAA,YAER;AAAA,UACF;AACA,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF,OAAO;AACL,UAAI;AACF,gBAAQ,MAAM,eAAe,eAAe,KAAK;AACjD,mBAAW;AAAA,MACb,SAAS,OAAO;AACd,YAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,cAAI;AACF,kBAAM,OAAO,aAAa;AAAA,UAC5B,SAAQ;AAAA,UAER;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,MAAM,QAAQ,QAAW;AAC3B,YAAM,YACJ,MAAM,QAAQ,KAAK,KAAK,IAAA,IAAQ,MAAM,YAAY,MAAM;AAC1D,UAAI,WAAW;AAEb,cAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,YAAI;AAEF,gBAAM,cAAc,KAAK,IAAA;AACzB,gBAAM,eACJ,MAAM,QAAQ,KAAK,cAAc,MAAM,YAAY,MAAM;AAC3D,cAAI,cAAc;AAChB,kBAAM,OAAO,QAAQ;AACrB,gBAAI,aAAa,oBAAoB;AACnC,kBAAI;AACF,sBAAM,OAAO,aAAa;AAAA,cAC5B,SAAQ;AAAA,cAER;AAAA,YACF,WAAW,aAAa,eAAe;AACrC,kBAAI;AACF,sBAAM,OAAO,kBAAkB;AAAA,cACjC,SAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AACA,iBAAO;AAAA,QACT,SAAQ;AAEN,iBAAO;AAAA,QACT,UAAA;AACE,qBAAW,QAAA;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM;AAAA,EACf;AAEA,QAAM,MAAM,OACV,KACA,OACA,QACkB;AAElB,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgB,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AACzE,UAAM,QAAoB;AAAA,MACxB,MAAM;AAAA,MACN,WAAW,KAAK,IAAA;AAAA,IAAI;AAGtB,QAAI,QAAQ,QAAW;AACrB,YAAM,MAAM;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,UAAU,OAAO,MAAM,CAAC;AAChD,QAAI;AACJ,QAAI,mBAAmB;AACrB,YAAM,kBAAA;AACN,gBAAU,MAAM,UAAW,UAAU;AAAA,IACvC,OAAO;AACL,gBAAU;AAAA,IACZ;AAEA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,YAAM,WAAW,oBAAoB,qBAAqB;AAE1D,UAAI;AAGF,YAAI,mBAAmB;AACrB,gBAAM,UAAU,UAAU,OAAO;AACjC,cAAI;AACF,kBAAM,OAAO,aAAa;AAAA,UAC5B,SAAQ;AAAA,UAER;AAAA,QACF,OAAO;AACL,gBAAM,UAAU,UAAU,SAAS,OAAO;AAC1C,cAAI;AACF,kBAAM,OAAO,kBAAkB;AAAA,UACjC,SAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,QAA+B;AAExD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,gBAAgB,KAAK,UAAU,iBAAiB,QAAQ,CAAC;AAC/D,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAEzE,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,UAAI;AACF,cAAM,OAAO,aAAa;AAAA,MAC5B,SAAQ;AAAA,MAER;AAEA,UAAI;AACF,cAAM,OAAO,kBAAkB;AAAA,MACjC,SAAQ;AAAA,MAER;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,QAAQ,YAA2B;AACvC,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AACN,YAAM,QAAQ,MAAM,QAAQ,QAAQ;AAGpC,YAAM,aAAa,MAAM;AAAA,QACvB,CAAC,SAAiB,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,UAAU;AAAA,MAAA;AAGtE,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAW,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAM,OAAO,QAAQ;AAAA,QACvB,SAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,OAAO,YAA6B;AACxC,UAAM,eAAA;AAGN,UAAM,QAAQ,MAAM,QAAQ,QAAQ;AACpC,UAAM,aAAa,MAAM;AAAA,MACvB,CAAC,SAAiB,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,UAAU;AAAA,IAAA;AAGtE,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,MAAM,KAAK,IAAA;AACjB,UAAI,aAAa;AAEjB,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAW,KAAK,UAAU,IAAI;AACpC,cAAM,eAAe,KAAK,SAAS,UAAU;AAC7C,YAAI;AACF,gBAAM,QAAQ,MAAM,eAAe,UAAU,YAAY;AAGzD,cAAI,MAAM,QAAQ,QAAW;AAC3B,kBAAM,YACJ,MAAM,QAAQ,KAAK,MAAM,MAAM,YAAY,MAAM;AACnD,gBAAI,WAAW;AACb,oBAAM,OAAO,QAAQ;AACrB;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF,SAAQ;AAEN,cAAI;AACF,kBAAM,OAAO,QAAQ;AAAA,UACvB,SAAQA,IAAA;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EAAA;AAEJ;"}
1
+ {"version":3,"file":"node.mjs","sources":["../src/cache/filesystem.ts"],"sourcesContent":["// mark-deco - Flexible Markdown to HTML conversion library\n// Copyright (c) Kouji Matsui. (@kekyo@mi.kekyo.net)\n// Under MIT.\n// https://github.com/kekyo/mark-deco\n\nimport {\n mkdir,\n readdir,\n readFile,\n rename,\n unlink,\n writeFile,\n} from 'fs/promises';\nimport { join } from 'path';\nimport { createHash, randomBytes } from 'crypto';\nimport { createMutex } from 'async-primitives';\nimport { promisify } from 'util';\n\nimport type { CacheStorage, CacheEntry } from './index';\nimport { isBrowser } from '../utils';\n\n/**\n * Generate SHA-256 hash for cache key using Node.js crypto module\n * Provides better collision resistance than simple hash algorithms\n * @param input - Input string to hash\n * @returns Promise resolving to hexadecimal hash string\n */\nconst generateFileHash = async (input: string): Promise<string> => {\n try {\n const hash = createHash('sha256');\n hash.update(input, 'utf8');\n return hash.digest('hex');\n } catch (error) {\n throw new Error(`Failed to generate hash: ${error}`);\n }\n};\n\n/**\n * Create file system-based cache storage instance\n * Uses Node.js file system to store cache entries as gzip-compressed JSON files\n * @param cacheDir - Directory path to store cache files (will be created if it doesn't exist)\n * @returns FileSystemCache instance that uses file system\n * @throws Error if file system operations fail or if not running in Node.js environment\n */\nexport const createFileSystemCacheStorage = (\n cacheDir: string\n): CacheStorage => {\n // Check if we're in a browser environment\n if (isBrowser()) {\n throw new Error(\n 'File system cache is only available in Node.js environment, not in browsers'\n );\n }\n\n const mutex = createMutex();\n let gzipAsync: ((input: string | Buffer) => Promise<Buffer>) | null = null;\n let gunzipAsync: ((input: Buffer) => Promise<Buffer>) | null = null;\n\n const ensureCompression = async (): Promise<void> => {\n if (gzipAsync && gunzipAsync) {\n return;\n }\n\n const { gzip, gunzip } = await import('zlib');\n gzipAsync = promisify(gzip);\n gunzipAsync = promisify(gunzip);\n };\n\n /**\n * Generate safe file name from cache key using hash\n */\n const generateFileBaseName = async (key: string): Promise<string> => {\n const hash = await generateFileHash(key);\n return hash;\n };\n\n const getCompressedFileName = (baseName: string): string =>\n `${baseName}.json.gz`;\n\n const readCacheEntry = async (filePath: string): Promise<CacheEntry> => {\n await ensureCompression();\n const buffer = await readFile(filePath);\n const unzipped = await gunzipAsync!(buffer);\n return JSON.parse(unzipped.toString('utf-8'));\n };\n\n const isMissingFileError = (error: unknown): boolean =>\n typeof error === 'object' &&\n error !== null &&\n 'code' in error &&\n (error as { code?: string }).code === 'ENOENT';\n\n /**\n * Ensure cache directory exists\n * This operation is idempotent and safe to call concurrently\n */\n const ensureCacheDir = async (): Promise<void> => {\n try {\n await mkdir(cacheDir, { recursive: true });\n } catch (error: unknown) {\n // Ignore EEXIST errors since directory already exists\n if ((error as { code?: string }).code !== 'EEXIST') {\n throw new Error(`Failed to create cache directory: ${error}`);\n }\n }\n };\n\n const get = async (key: string): Promise<string | null> => {\n // Pre-compute file name outside of lock (pure function)\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n // Ensure cache directory exists (idempotent operation)\n try {\n await ensureCacheDir();\n } catch (error) {\n throw new Error(`Failed to ensure cache directory: ${error}`);\n }\n\n let entry: CacheEntry;\n\n try {\n entry = await readCacheEntry(compressedFilePath);\n } catch (error) {\n if (!isMissingFileError(error)) {\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n }\n return null;\n }\n\n // Check TTL expiration - only lock if we need to delete expired file\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || Date.now() > entry.timestamp + entry.ttl;\n if (isExpired) {\n // Lock only for the deletion operation\n const lockHandle = await mutex.lock();\n try {\n // Double-check expiration under lock to avoid race conditions\n const currentTime = Date.now();\n const stillExpired =\n entry.ttl === 0 || currentTime > entry.timestamp + entry.ttl;\n if (stillExpired) {\n await unlink(compressedFilePath);\n }\n return null;\n } catch {\n // File might have been deleted by another process, ignore\n return null;\n } finally {\n lockHandle.release();\n }\n }\n }\n\n return entry.data;\n };\n\n const set = async (\n key: string,\n value: string,\n ttl?: number\n ): Promise<void> => {\n // Pre-compute everything possible outside of lock\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n const entry: CacheEntry = {\n data: value,\n timestamp: Date.now(),\n };\n\n if (ttl !== undefined) {\n entry.ttl = ttl;\n }\n\n const serialized = JSON.stringify(entry, null, 2);\n await ensureCompression();\n const payload = await gzipAsync!(serialized);\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n const tempSuffix = `${process.pid}-${Date.now()}-${randomBytes(6).toString('hex')}`;\n const tempFilePath = join(\n cacheDir,\n `${baseName}.json.gz.tmp-${tempSuffix}`\n );\n\n try {\n // Write to a temp file then rename to avoid partial reads.\n await writeFile(tempFilePath, payload);\n try {\n await rename(tempFilePath, compressedFilePath);\n } catch {\n // Fallback: remove existing file then rename (Windows-friendly).\n try {\n await unlink(compressedFilePath);\n } catch {\n // Ignore cleanup errors\n }\n await rename(tempFilePath, compressedFilePath);\n }\n } catch (error) {\n try {\n // Best-effort cleanup of temp file\n await unlink(tempFilePath);\n } catch {\n // Ignore cleanup errors\n }\n throw new Error(`Failed to write cache entry: ${error}`);\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const deleteEntry = async (key: string): Promise<void> => {\n // Pre-compute file name outside of lock\n const baseName = await generateFileBaseName(key);\n const compressedFilePath = join(cacheDir, getCompressedFileName(baseName));\n\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n\n try {\n await unlink(compressedFilePath);\n } catch {\n // File doesn't exist, ignore the error\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const clear = async (): Promise<void> => {\n const lockHandle = await mutex.lock();\n try {\n await ensureCacheDir();\n const files = await readdir(cacheDir);\n\n // Filter files outside the deletion loop for better performance\n const cacheFiles = files.filter((file: string) =>\n file.endsWith('.json.gz')\n );\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n await unlink(filePath);\n } catch {\n // Ignore individual file deletion errors\n }\n }\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n const size = async (): Promise<number> => {\n await ensureCacheDir();\n\n // Get file list without lock first\n const files = await readdir(cacheDir);\n const cacheFiles = files.filter((file: string) =>\n file.endsWith('.json.gz')\n );\n\n if (cacheFiles.length === 0) {\n return 0;\n }\n\n // Clean up expired entries with lock\n const lockHandle = await mutex.lock();\n try {\n const now = Date.now();\n let validCount = 0;\n\n for (const file of cacheFiles) {\n const filePath = join(cacheDir, file);\n try {\n const entry = await readCacheEntry(filePath);\n\n // Check if entry is expired\n if (entry.ttl !== undefined) {\n const isExpired =\n entry.ttl === 0 || now > entry.timestamp + entry.ttl;\n if (isExpired) {\n await unlink(filePath);\n continue; // Don't count this file\n }\n }\n validCount++;\n } catch {\n // If we can't read or parse the file, delete it\n try {\n await unlink(filePath);\n } catch {\n // Ignore unlink errors\n }\n }\n }\n\n return validCount;\n } catch (importError) {\n throw new Error(`Failed to import required modules: ${importError}`);\n } finally {\n lockHandle.release();\n }\n };\n\n return {\n get,\n set,\n delete: deleteEntry,\n clear,\n size,\n };\n};\n"],"names":["e"],"mappings":";;;;;;;;;;;;;;;;AA2BA,MAAM,mBAAmB,OAAO,UAAmC;AACjE,MAAI;AACF,UAAM,OAAO,WAAW,QAAQ;AAChC,SAAK,OAAO,OAAO,MAAM;AACzB,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,4BAA4B,KAAK,EAAE;AAAA,EACrD;AACF;AASO,MAAM,+BAA+B,CAC1C,aACiB;AAEjB,MAAI,aAAa;AACf,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,QAAQ,YAAA;AACd,MAAI,YAAkE;AACtE,MAAI,cAA2D;AAE/D,QAAM,oBAAoB,YAA2B;AACnD,QAAI,aAAa,aAAa;AAC5B;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,WAAW,MAAM,OAAO,MAAM;AAC5C,gBAAY,UAAU,IAAI;AAC1B,kBAAc,UAAU,MAAM;AAAA,EAChC;AAKA,QAAM,uBAAuB,OAAO,QAAiC;AACnE,UAAM,OAAO,MAAM,iBAAiB,GAAG;AACvC,WAAO;AAAA,EACT;AAEA,QAAM,wBAAwB,CAAC,aAC7B,GAAG,QAAQ;AAEb,QAAM,iBAAiB,OAAO,aAA0C;AACtE,UAAM,kBAAA;AACN,UAAM,SAAS,MAAM,SAAS,QAAQ;AACtC,UAAM,WAAW,MAAM,YAAa,MAAM;AAC1C,WAAO,KAAK,MAAM,SAAS,SAAS,OAAO,CAAC;AAAA,EAC9C;AAEA,QAAM,qBAAqB,CAAC,UAC1B,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAA4B,SAAS;AAMxC,QAAM,iBAAiB,YAA2B;AAChD,QAAI;AACF,YAAM,MAAM,UAAU,EAAE,WAAW,MAAM;AAAA,IAC3C,SAAS,OAAgB;AAEvB,UAAK,MAA4B,SAAS,UAAU;AAClD,cAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,OAAO,QAAwC;AAEzD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAGzE,QAAI;AACF,YAAM,eAAA;AAAA,IACR,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,qCAAqC,KAAK,EAAE;AAAA,IAC9D;AAEA,QAAI;AAEJ,QAAI;AACF,cAAQ,MAAM,eAAe,kBAAkB;AAAA,IACjD,SAAS,OAAO;AACd,UAAI,CAAC,mBAAmB,KAAK,GAAG;AAC9B,YAAI;AACF,gBAAM,OAAO,kBAAkB;AAAA,QACjC,SAAQ;AAAA,QAER;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAGA,QAAI,MAAM,QAAQ,QAAW;AAC3B,YAAM,YACJ,MAAM,QAAQ,KAAK,KAAK,IAAA,IAAQ,MAAM,YAAY,MAAM;AAC1D,UAAI,WAAW;AAEb,cAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,YAAI;AAEF,gBAAM,cAAc,KAAK,IAAA;AACzB,gBAAM,eACJ,MAAM,QAAQ,KAAK,cAAc,MAAM,YAAY,MAAM;AAC3D,cAAI,cAAc;AAChB,kBAAM,OAAO,kBAAkB;AAAA,UACjC;AACA,iBAAO;AAAA,QACT,SAAQ;AAEN,iBAAO;AAAA,QACT,UAAA;AACE,qBAAW,QAAA;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM;AAAA,EACf;AAEA,QAAM,MAAM,OACV,KACA,OACA,QACkB;AAElB,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AACzE,UAAM,QAAoB;AAAA,MACxB,MAAM;AAAA,MACN,WAAW,KAAK,IAAA;AAAA,IAAI;AAGtB,QAAI,QAAQ,QAAW;AACrB,YAAM,MAAM;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,UAAU,OAAO,MAAM,CAAC;AAChD,UAAM,kBAAA;AACN,UAAM,UAAU,MAAM,UAAW,UAAU;AAE3C,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,YAAM,aAAa,GAAG,QAAQ,GAAG,IAAI,KAAK,IAAA,CAAK,IAAI,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC;AACjF,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,GAAG,QAAQ,gBAAgB,UAAU;AAAA,MAAA;AAGvC,UAAI;AAEF,cAAM,UAAU,cAAc,OAAO;AACrC,YAAI;AACF,gBAAM,OAAO,cAAc,kBAAkB;AAAA,QAC/C,SAAQ;AAEN,cAAI;AACF,kBAAM,OAAO,kBAAkB;AAAA,UACjC,SAAQA,IAAA;AAAA,UAER;AACA,gBAAM,OAAO,cAAc,kBAAkB;AAAA,QAC/C;AAAA,MACF,SAAS,OAAO;AACd,YAAI;AAEF,gBAAM,OAAO,YAAY;AAAA,QAC3B,SAAQ;AAAA,QAER;AACA,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,cAAc,OAAO,QAA+B;AAExD,UAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,UAAM,qBAAqB,KAAK,UAAU,sBAAsB,QAAQ,CAAC;AAEzE,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AAEN,UAAI;AACF,cAAM,OAAO,kBAAkB;AAAA,MACjC,SAAQ;AAAA,MAER;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,QAAQ,YAA2B;AACvC,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,eAAA;AACN,YAAM,QAAQ,MAAM,QAAQ,QAAQ;AAGpC,YAAM,aAAa,MAAM;AAAA,QAAO,CAAC,SAC/B,KAAK,SAAS,UAAU;AAAA,MAAA;AAG1B,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAW,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAM,OAAO,QAAQ;AAAA,QACvB,SAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,QAAM,OAAO,YAA6B;AACxC,UAAM,eAAA;AAGN,UAAM,QAAQ,MAAM,QAAQ,QAAQ;AACpC,UAAM,aAAa,MAAM;AAAA,MAAO,CAAC,SAC/B,KAAK,SAAS,UAAU;AAAA,IAAA;AAG1B,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,MAAM,MAAM,KAAA;AAC/B,QAAI;AACF,YAAM,MAAM,KAAK,IAAA;AACjB,UAAI,aAAa;AAEjB,iBAAW,QAAQ,YAAY;AAC7B,cAAM,WAAW,KAAK,UAAU,IAAI;AACpC,YAAI;AACF,gBAAM,QAAQ,MAAM,eAAe,QAAQ;AAG3C,cAAI,MAAM,QAAQ,QAAW;AAC3B,kBAAM,YACJ,MAAM,QAAQ,KAAK,MAAM,MAAM,YAAY,MAAM;AACnD,gBAAI,WAAW;AACb,oBAAM,OAAO,QAAQ;AACrB;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF,SAAQ;AAEN,cAAI;AACF,kBAAM,OAAO,QAAQ;AAAA,UACvB,SAAQA,IAAA;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,aAAa;AACpB,YAAM,IAAI,MAAM,sCAAsC,WAAW,EAAE;AAAA,IACrE,UAAA;AACE,iBAAW,QAAA;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EAAA;AAEJ;"}
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  const isCORSError = (error) => {
@@ -1,11 +1,11 @@
1
1
  /*!
2
2
  * name: mark-deco
3
- * version: 0.28.0
3
+ * version: 0.29.0
4
4
  * description: Flexible Markdown to HTML conversion library
5
5
  * author: Kouji Matsui (@kekyo@mi.kekyo.net)
6
6
  * license: MIT
7
7
  * repository.url: https://github.com/kekyo/mark-deco
8
- * git.commit.hash: 124aac654302073ffbbd202ac2794d6c9cdc56ea
8
+ * git.commit.hash: 671fab29f293a6a2bb863f3b1d1d00cb1c3112d5
9
9
  */
10
10
 
11
11
  "use strict";
package/package.json CHANGED
@@ -1,20 +1,20 @@
1
1
  {
2
2
  "git": {
3
3
  "tags": [
4
- "0.28.0"
4
+ "0.29.0"
5
5
  ],
6
6
  "branches": [
7
7
  "main"
8
8
  ],
9
- "version": "0.28.0",
9
+ "version": "0.29.0",
10
10
  "commit": {
11
- "hash": "124aac654302073ffbbd202ac2794d6c9cdc56ea",
12
- "shortHash": "124aac6",
13
- "date": "2026-01-24T23:29:49+09:00",
11
+ "hash": "671fab29f293a6a2bb863f3b1d1d00cb1c3112d5",
12
+ "shortHash": "671fab2",
13
+ "date": "2026-01-25T16:54:44+09:00",
14
14
  "message": "Merge branch 'develop'"
15
15
  }
16
16
  },
17
- "version": "0.28.0",
17
+ "version": "0.29.0",
18
18
  "description": "Flexible Markdown to HTML conversion library",
19
19
  "name": "mark-deco",
20
20
  "keywords": [
@@ -103,5 +103,5 @@
103
103
  "vite-plugin-dts": ">=4.5.4",
104
104
  "vitest": ">=1.0.0"
105
105
  },
106
- "buildDate": "2026-01-24T23:33:32+09:00"
106
+ "buildDate": "2026-01-25T16:56:53+09:00"
107
107
  }