fuma-content 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -13,8 +13,7 @@ function document(entryPoint, options) {
13
13
  ...rest
14
14
  } = item;
15
15
  const result = schema.safeParse(frontmatter);
16
- if (!result.success)
17
- throw createError(file, result.error);
16
+ if (!result.success) throw createError(file, result.error);
18
17
  return {
19
18
  file,
20
19
  info: result.data,
@@ -28,8 +27,7 @@ function json(entryPoint, options) {
28
27
  return read(entryPoint, ["json"], include).map(
29
28
  ({ file, default: data }) => {
30
29
  const result = schema.safeParse(data);
31
- if (!result.success)
32
- throw createError(file, result.error);
30
+ if (!result.success) throw createError(file, result.error);
33
31
  return {
34
32
  file,
35
33
  info: result.data
@@ -39,7 +37,7 @@ function json(entryPoint, options) {
39
37
  }
40
38
  function createError(file, err) {
41
39
  const message = `${file}:
42
- ${Object.entries(err.flatten().fieldErrors).map(([k, v]) => `${k}: ${v?.join(", ")}`).join("\n")}`;
40
+ ${Object.entries(err.flatten().fieldErrors).map(([k, v]) => `${k}: ${v?.join(", ") ?? ""}`).join("\n")}`;
43
41
  return new Error(message);
44
42
  }
45
43
  function read(entryPoint, format, include) {
@@ -3,7 +3,21 @@ import { ProcessorOptions } from '@mdx-js/mdx';
3
3
  import { VFile } from '@mdx-js/mdx/internal-create-format-aware-processors';
4
4
  import { FSWatcher } from 'chokidar';
5
5
 
6
+ interface OutputEntry extends Output {
7
+ /**
8
+ * extension of file, like: `md`
9
+ */
10
+ format: string;
11
+ file: string;
12
+ }
13
+ interface CompilerWithCache$1 extends Compiler {
14
+ _compileCache?: Map<string, OutputEntry>;
15
+ }
16
+ declare function compile(this: Compiler): Promise<OutputEntry[]>;
17
+ declare function compileFile(this: CompilerWithCache$1, file: string): Promise<OutputEntry>;
18
+
6
19
  interface Output {
20
+ dependencies?: OutputEntry[];
7
21
  content: string;
8
22
  _entryPoint?: unknown;
9
23
  _mdx?: {
@@ -34,16 +48,6 @@ interface Options extends ProcessorOptions {
34
48
  */
35
49
  declare const loadMDX: ({ lastModifiedTime, format: forceFormat, remarkExports, enableAbsoluteImport, ...rest }?: Options) => Transformer;
36
50
 
37
- interface OutputEntry extends Output {
38
- /**
39
- * extension of file, like: `md`
40
- */
41
- format: string;
42
- file: string;
43
- }
44
- declare function compile(this: Compiler): Promise<OutputEntry[]>;
45
- declare function compileFile(this: Compiler, file: string): Promise<OutputEntry>;
46
-
47
51
  interface EntryPointOptions {
48
52
  /**
49
53
  * Notice that `lazy` mode is not supported by `source` function
@@ -63,8 +67,11 @@ declare function loadEntryPoint(this: Compiler, entries: OutputEntry[]): OutputE
63
67
  interface EmitEntry extends OutputEntry {
64
68
  outputPath: string;
65
69
  }
70
+ interface CompilerWithCache extends Compiler {
71
+ _emitCache?: WeakMap<OutputEntry, EmitEntry>;
72
+ }
66
73
  declare function emit(this: Compiler): Promise<void>;
67
- declare function emitEntry(this: Compiler, entry: OutputEntry): Promise<EmitEntry>;
74
+ declare function emitEntry(this: CompilerWithCache, entry: OutputEntry): Promise<EmitEntry>;
68
75
 
69
76
  declare function watch(this: Compiler): FSWatcher;
70
77
 
@@ -92,7 +99,6 @@ interface Compiler {
92
99
  loaders: Record<string, Transformer>;
93
100
  _output?: OutputEntry[];
94
101
  _emit?: EmitEntry[];
95
- _cache: Map<string, OutputEntry>;
96
102
  }
97
103
 
98
104
  declare const defaultOptions: {
package/dist/internal.js CHANGED
@@ -57,8 +57,7 @@ import { spawn } from "cross-spawn";
57
57
  var cache = /* @__PURE__ */ new Map();
58
58
  function getGitTimestamp(file) {
59
59
  const cachedTimestamp = cache.get(file);
60
- if (cachedTimestamp)
61
- return Promise.resolve(cachedTimestamp);
60
+ if (cachedTimestamp) return Promise.resolve(cachedTimestamp);
62
61
  return new Promise((resolve, reject) => {
63
62
  const cwd = path2.dirname(file);
64
63
  if (!fs.existsSync(cwd)) {
@@ -72,8 +71,7 @@ function getGitTimestamp(file) {
72
71
  let output;
73
72
  child.stdout.on("data", (d) => output = new Date(String(d)));
74
73
  child.on("close", () => {
75
- if (output)
76
- cache.set(file, output);
74
+ if (output) cache.set(file, output);
77
75
  resolve(output);
78
76
  });
79
77
  child.on("error", reject);
@@ -120,8 +118,7 @@ function getMdastExport(name, value) {
120
118
  function remarkMdxExport({ values }) {
121
119
  return (tree, vfile) => {
122
120
  for (const name of values) {
123
- if (!(name in vfile.data))
124
- return;
121
+ if (!(name in vfile.data)) return;
125
122
  tree.children.unshift(getMdastExport(name, vfile.data[name]));
126
123
  }
127
124
  };
@@ -130,32 +127,54 @@ function remarkMdxExport({ values }) {
130
127
  // src/remark-plugins/remark-absolute-import.ts
131
128
  import * as path3 from "node:path";
132
129
  import { visit } from "unist-util-visit";
133
- function remarkAbsoluteImport({ enabled }) {
134
- return (tree, vfile) => {
135
- if (!enabled)
136
- return;
137
- visit(tree, ["mdxjsEsm"], (node) => {
138
- if (node.type !== "mdxjsEsm")
139
- return;
130
+ function remarkAbsoluteImport({
131
+ compiler,
132
+ transformFormats
133
+ }) {
134
+ return async (tree, vfile) => {
135
+ const transforms = [];
136
+ visit(tree, "mdxjsEsm", (node) => {
140
137
  const body = node.data?.estree?.body ?? [];
141
- body.forEach((statement) => {
142
- if (statement.type === "ImportDeclaration" && typeof statement.source.value === "string") {
143
- const value = statement.source.value;
144
- if (value.startsWith("./") || value.startsWith("../")) {
145
- const replace = getImportPath(
146
- path3.join(path3.dirname(vfile.path), value)
138
+ for (const statement of body) {
139
+ if (statement.type !== "ImportDeclaration" || typeof statement.source.value !== "string")
140
+ continue;
141
+ const value = statement.source.value;
142
+ if (!value.startsWith("./") && !value.startsWith("../")) continue;
143
+ const file = path3.join(path3.dirname(vfile.path), value);
144
+ if (transformFormats.includes(path3.extname(file).slice(1))) {
145
+ const transform = compiler.compileFile(file).then((entry) => {
146
+ statement.source.value = getImportPath(
147
+ getOutputPath(compiler, entry)
147
148
  );
148
- statement.source.value = replace;
149
- statement.source.raw = JSON.stringify(replace);
150
- }
149
+ delete statement.source.raw;
150
+ return entry;
151
+ });
152
+ transforms.push(transform);
153
+ continue;
151
154
  }
152
- });
155
+ const replace = getImportPath(file);
156
+ statement.source.value = replace;
157
+ delete statement.source.raw;
158
+ }
153
159
  });
160
+ vfile.data.ctx = { dependencies: await Promise.all(transforms) };
154
161
  };
155
162
  }
156
163
 
157
164
  // src/loader/mdx.ts
158
- var cache2 = /* @__PURE__ */ new Map();
165
+ function pluggable(enable, value) {
166
+ return enable ? [value] : [];
167
+ }
168
+ function getProcessor(compiler, options) {
169
+ if (!options.format) throw new Error("format is required");
170
+ compiler._mdxCache ||= /* @__PURE__ */ new Map();
171
+ let processor = compiler._mdxCache.get(options.format);
172
+ if (!processor) {
173
+ processor = createProcessor(options);
174
+ compiler._mdxCache.set(options.format, processor);
175
+ }
176
+ return processor;
177
+ }
159
178
  var loadMDX = ({
160
179
  lastModifiedTime,
161
180
  format: forceFormat,
@@ -163,25 +182,27 @@ var loadMDX = ({
163
182
  enableAbsoluteImport = true,
164
183
  ...rest
165
184
  } = {}) => {
166
- return async (file, source) => {
185
+ return async function transform(file, source) {
167
186
  const { content, data: frontmatter } = grayMatter(source);
168
187
  const detectedFormat = file.endsWith(".mdx") ? "mdx" : "md";
169
188
  const format = forceFormat ?? detectedFormat;
170
189
  let timestamp;
171
- let processor = cache2.get(format);
172
- if (processor === void 0) {
173
- processor = createProcessor({
174
- format,
175
- development: process.env.NODE_ENV === "development",
176
- ...rest,
177
- remarkPlugins: [
178
- ...rest.remarkPlugins ?? [],
179
- [remarkAbsoluteImport, { enabled: enableAbsoluteImport }],
180
- [remarkMdxExport, { values: remarkExports }]
181
- ]
182
- });
183
- cache2.set(format, processor);
184
- }
190
+ const processor = getProcessor(this, {
191
+ format,
192
+ development: process.env.NODE_ENV === "development",
193
+ ...rest,
194
+ remarkPlugins: [
195
+ ...rest.remarkPlugins ?? [],
196
+ ...pluggable(enableAbsoluteImport, [
197
+ remarkAbsoluteImport,
198
+ {
199
+ compiler: this,
200
+ transformFormats: Object.keys(this.loaders)
201
+ }
202
+ ]),
203
+ [remarkMdxExport, { values: remarkExports }]
204
+ ]
205
+ });
185
206
  if (lastModifiedTime === "git")
186
207
  timestamp = (await getGitTimestamp(file))?.getTime();
187
208
  const vfile = await processor.process({
@@ -193,6 +214,7 @@ var loadMDX = ({
193
214
  }
194
215
  });
195
216
  return {
217
+ dependencies: vfile.data.ctx.dependencies,
196
218
  content: String(vfile),
197
219
  _mdx: {
198
220
  vfile
@@ -220,13 +242,21 @@ async function emit() {
220
242
  this._emit = await Promise.all(emits);
221
243
  }
222
244
  async function emitEntry(entry) {
245
+ this._emitCache ||= /* @__PURE__ */ new WeakMap();
246
+ const cached = this._emitCache.get(entry);
247
+ if (cached) return cached;
223
248
  const outputPath = getOutputPath(this, entry);
224
249
  await fs2.mkdir(path4.dirname(outputPath), { recursive: true });
225
250
  await fs2.writeFile(outputPath, entry.content);
226
- return {
251
+ if (entry.dependencies) {
252
+ await Promise.all(entry.dependencies.map((dep) => this.emitEntry(dep)));
253
+ }
254
+ const output = {
227
255
  ...entry,
228
256
  outputPath
229
257
  };
258
+ this._emitCache.set(entry, output);
259
+ return output;
230
260
  }
231
261
 
232
262
  // src/compiler/compile.ts
@@ -260,11 +290,10 @@ function generateImport(compiler, output) {
260
290
  formats.set(entry.format, b);
261
291
  const importPath = getImportPath(getOutputPath(compiler, entry));
262
292
  const file = fullPath ? entry.file : getRelativePath(compiler.options.cwd, entry.file);
263
- const name = `p_${i}`;
293
+ const name = `p_${i.toString()}`;
264
294
  b.imports.push(`import * as ${name} from ${JSON.stringify(importPath)};`);
265
295
  b.entries.push(`{
266
296
  ...${name},
267
- format: ${JSON.stringify(entry.format)},
268
297
  file: ${JSON.stringify(file)},
269
298
  }`);
270
299
  });
@@ -290,16 +319,21 @@ load: () => import(${JSON.stringify(importPath)})
290
319
 
291
320
  // src/compiler/compile.ts
292
321
  async function compile() {
293
- this._output = await Promise.all(
294
- this.files.map((file) => this.compileFile(file))
322
+ const output = [];
323
+ await Promise.all(
324
+ this.files.map(async (file) => {
325
+ const entry = await this.compileFile(file);
326
+ output.push(entry);
327
+ })
295
328
  );
296
- this._output.push(loadEntryPoint.call(this, this._output));
297
- return this._output;
329
+ output.push(loadEntryPoint.call(this, output));
330
+ this._output = output;
331
+ return output;
298
332
  }
299
333
  async function compileFile(file) {
300
- const cache3 = this._cache.get(file);
301
- if (cache3)
302
- return cache3;
334
+ this._compileCache ||= /* @__PURE__ */ new Map();
335
+ const cache2 = this._compileCache.get(file);
336
+ if (cache2) return cache2;
303
337
  const format = path5.extname(file).slice(1);
304
338
  const content = (await fs3.readFile(file)).toString();
305
339
  const loader = this.loaders[format];
@@ -312,9 +346,12 @@ async function compileFile(file) {
312
346
  format,
313
347
  ...output
314
348
  };
315
- this._cache.set(file, entry);
349
+ this._compileCache.set(file, entry);
316
350
  return entry;
317
351
  }
352
+ function removeCache(compiler, file) {
353
+ compiler._compileCache?.delete(file);
354
+ }
318
355
 
319
356
  // src/compiler/watch.ts
320
357
  import { watch as watchFn } from "chokidar";
@@ -329,12 +366,12 @@ function watch() {
329
366
  }
330
367
  if (eventName === "unlink") {
331
368
  this.files = this.files.filter((file) => file !== absolutePath);
332
- this._cache.delete(absolutePath);
369
+ removeCache(this, absolutePath);
333
370
  void this.emit();
334
371
  }
335
372
  if (eventName === "change") {
336
373
  console.log("update", relativePath);
337
- this._cache.delete(absolutePath);
374
+ removeCache(this, absolutePath);
338
375
  void this.compileFile(absolutePath).then(async (entry) => {
339
376
  await this.emitEntry(entry);
340
377
  });
@@ -363,8 +400,7 @@ async function createCompiler(options) {
363
400
  watch,
364
401
  emit,
365
402
  compileFile,
366
- loaders: createLoaders(compilerOptions),
367
- _cache: /* @__PURE__ */ new Map()
403
+ loaders: createLoaders(compilerOptions)
368
404
  };
369
405
  }
370
406
  function createLoaders(options) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fuma-content",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "Write content for web apps",
5
5
  "keywords": [
6
6
  "Content",