@storyteller-platform/align 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/dist/align/__tests__/align.test.cjs +6 -5
  2. package/dist/align/__tests__/align.test.js +6 -5
  3. package/dist/align/align.cjs +133 -81
  4. package/dist/align/align.d.cts +1 -0
  5. package/dist/align/align.d.ts +1 -0
  6. package/dist/align/align.js +133 -81
  7. package/dist/align/getSentenceRanges.cjs +78 -149
  8. package/dist/align/getSentenceRanges.d.cts +1 -1
  9. package/dist/align/getSentenceRanges.d.ts +1 -1
  10. package/dist/align/getSentenceRanges.js +78 -149
  11. package/dist/align/slugify.cjs +16 -8
  12. package/dist/align/slugify.js +16 -8
  13. package/dist/errorAlign/__tests__/errorAlign.test.cjs +100 -0
  14. package/dist/errorAlign/__tests__/errorAlign.test.d.cts +2 -0
  15. package/dist/errorAlign/__tests__/errorAlign.test.d.ts +2 -0
  16. package/dist/errorAlign/__tests__/errorAlign.test.js +77 -0
  17. package/dist/errorAlign/__tests__/native.test.cjs +118 -0
  18. package/dist/errorAlign/__tests__/native.test.d.cts +2 -0
  19. package/dist/errorAlign/__tests__/native.test.d.ts +2 -0
  20. package/dist/errorAlign/__tests__/native.test.js +107 -0
  21. package/dist/errorAlign/backtraceGraph.cjs +298 -0
  22. package/dist/errorAlign/backtraceGraph.d.cts +103 -0
  23. package/dist/errorAlign/backtraceGraph.d.ts +103 -0
  24. package/dist/errorAlign/backtraceGraph.js +270 -0
  25. package/dist/errorAlign/beamSearch.cjs +302 -0
  26. package/dist/errorAlign/beamSearch.d.cts +53 -0
  27. package/dist/errorAlign/beamSearch.d.ts +53 -0
  28. package/dist/errorAlign/beamSearch.js +268 -0
  29. package/dist/errorAlign/core.cjs +33 -0
  30. package/dist/errorAlign/core.d.cts +5 -0
  31. package/dist/errorAlign/core.d.ts +5 -0
  32. package/dist/errorAlign/core.js +11 -0
  33. package/dist/errorAlign/editDistance.cjs +115 -0
  34. package/dist/errorAlign/editDistance.d.cts +46 -0
  35. package/dist/errorAlign/editDistance.d.ts +46 -0
  36. package/dist/errorAlign/editDistance.js +90 -0
  37. package/dist/errorAlign/errorAlign.cjs +159 -0
  38. package/dist/errorAlign/errorAlign.d.cts +15 -0
  39. package/dist/errorAlign/errorAlign.d.ts +15 -0
  40. package/dist/errorAlign/errorAlign.js +145 -0
  41. package/dist/errorAlign/graphMetadata.cjs +97 -0
  42. package/dist/errorAlign/graphMetadata.d.cts +44 -0
  43. package/dist/errorAlign/graphMetadata.d.ts +44 -0
  44. package/dist/errorAlign/graphMetadata.js +64 -0
  45. package/dist/errorAlign/hash.cjs +173 -0
  46. package/dist/errorAlign/hash.d.cts +28 -0
  47. package/dist/errorAlign/hash.d.ts +28 -0
  48. package/dist/errorAlign/hash.js +150 -0
  49. package/dist/errorAlign/native.cjs +60 -0
  50. package/dist/errorAlign/native.d.cts +18 -0
  51. package/dist/errorAlign/native.d.ts +18 -0
  52. package/dist/errorAlign/native.js +24 -0
  53. package/dist/errorAlign/node-gyp-build.d.cjs +1 -0
  54. package/dist/errorAlign/node-gyp-build.d.d.cts +3 -0
  55. package/dist/errorAlign/node-gyp-build.d.d.ts +3 -0
  56. package/dist/errorAlign/node-gyp-build.d.js +0 -0
  57. package/dist/errorAlign/pathToAlignment.cjs +122 -0
  58. package/dist/errorAlign/pathToAlignment.d.cts +11 -0
  59. package/dist/errorAlign/pathToAlignment.d.ts +11 -0
  60. package/dist/errorAlign/pathToAlignment.js +89 -0
  61. package/dist/errorAlign/utils.cjs +301 -0
  62. package/dist/errorAlign/utils.d.cts +107 -0
  63. package/dist/errorAlign/utils.d.ts +107 -0
  64. package/dist/errorAlign/utils.js +248 -0
  65. package/dist/index.d.cts +1 -0
  66. package/dist/index.d.ts +1 -0
  67. package/dist/markup/__tests__/markup.test.cjs +108 -81
  68. package/dist/markup/__tests__/markup.test.js +109 -82
  69. package/dist/markup/__tests__/parseDom.test.cjs +112 -0
  70. package/dist/markup/__tests__/parseDom.test.d.cts +2 -0
  71. package/dist/markup/__tests__/parseDom.test.d.ts +2 -0
  72. package/dist/markup/__tests__/parseDom.test.js +89 -0
  73. package/dist/markup/__tests__/serializeDom.test.cjs +120 -0
  74. package/dist/markup/__tests__/serializeDom.test.d.cts +2 -0
  75. package/dist/markup/__tests__/serializeDom.test.d.ts +2 -0
  76. package/dist/markup/__tests__/serializeDom.test.js +97 -0
  77. package/dist/markup/__tests__/transform.test.cjs +122 -0
  78. package/dist/markup/__tests__/transform.test.d.cts +2 -0
  79. package/dist/markup/__tests__/transform.test.d.ts +2 -0
  80. package/dist/markup/__tests__/transform.test.js +99 -0
  81. package/dist/markup/map.cjs +261 -0
  82. package/dist/markup/map.d.cts +50 -0
  83. package/dist/markup/map.d.ts +50 -0
  84. package/dist/markup/map.js +236 -0
  85. package/dist/markup/markup.cjs +23 -201
  86. package/dist/markup/markup.d.cts +5 -9
  87. package/dist/markup/markup.d.ts +5 -9
  88. package/dist/markup/markup.js +24 -203
  89. package/dist/markup/model.cjs +172 -0
  90. package/dist/markup/model.d.cts +57 -0
  91. package/dist/markup/model.d.ts +57 -0
  92. package/dist/markup/model.js +145 -0
  93. package/dist/markup/parseDom.cjs +59 -0
  94. package/dist/markup/parseDom.d.cts +7 -0
  95. package/dist/markup/parseDom.d.ts +7 -0
  96. package/dist/markup/parseDom.js +35 -0
  97. package/dist/markup/segmentation.cjs +11 -57
  98. package/dist/markup/segmentation.d.cts +6 -2
  99. package/dist/markup/segmentation.d.ts +6 -2
  100. package/dist/markup/segmentation.js +11 -58
  101. package/dist/markup/serializeDom.cjs +87 -0
  102. package/dist/markup/serializeDom.d.cts +7 -0
  103. package/dist/markup/serializeDom.d.ts +7 -0
  104. package/dist/markup/serializeDom.js +63 -0
  105. package/dist/markup/transform.cjs +92 -0
  106. package/dist/markup/transform.d.cts +11 -0
  107. package/dist/markup/transform.d.ts +11 -0
  108. package/dist/markup/transform.js +71 -0
  109. package/dist/types/node-gyp-build.d.cjs +1 -0
  110. package/dist/types/node-gyp-build.d.d.cts +3 -0
  111. package/dist/types/node-gyp-build.d.d.ts +3 -0
  112. package/dist/types/node-gyp-build.d.js +0 -0
  113. package/package.json +11 -4
@@ -1,53 +1,68 @@
1
1
  import assert from "node:assert";
2
+ import { mkdir, readFile, writeFile } from "node:fs/promises";
3
+ import { dirname, join } from "node:path";
2
4
  import { describe, it } from "node:test";
3
5
  import { Epub } from "@storyteller-platform/epub";
4
- import { appendTextNode, markupChapter } from "../markup.js";
6
+ import { markupChapter } from "../markup.js";
5
7
  import { getXhtmlSegmentation } from "../segmentation.js";
6
- void describe("appendTextNode", () => {
7
- void it("can append text nodes to empty parents", () => {
8
- const input = [];
9
- appendTextNode("chapter_one", input, "test", [], /* @__PURE__ */ new Set());
10
- assert.deepStrictEqual(input, [{ "#text": "test" }]);
11
- });
12
- void it("can append text nodes with marks", () => {
13
- const input = [];
14
- appendTextNode(
15
- "chapter_one",
16
- input,
17
- "test",
18
- [{ elementName: "a", attributes: { "@_href": "#" } }],
19
- /* @__PURE__ */ new Set()
20
- );
21
- assert.deepStrictEqual(input, [
22
- { a: [{ "#text": "test" }], ":@": { "@_href": "#" } }
23
- ]);
24
- });
25
- void it("can wrap text nodes with sentence spans", () => {
26
- const input = [];
27
- appendTextNode("chapter_one", input, "test", [], /* @__PURE__ */ new Set(), 0);
28
- assert.deepStrictEqual(input, [
29
- {
30
- span: [{ "#text": "test" }],
31
- ":@": { "@_id": "chapter_one-s0" }
8
+ function sanitizeFilename(title) {
9
+ return title.replace(/[/\\:*?"<>|]/g, "-").replace(/\s+/g, " ").trim().replace(/[.]+$/, "");
10
+ }
11
+ function truncate(input, byteLimit, suffix = "") {
12
+ const normalized = input.normalize("NFC");
13
+ const encoder = new TextEncoder();
14
+ let result = "";
15
+ for (const char of normalized) {
16
+ const withSuffix = result + char + suffix;
17
+ const byteLength = encoder.encode(withSuffix).length;
18
+ if (byteLength > byteLimit) break;
19
+ result += char;
20
+ }
21
+ return result + suffix;
22
+ }
23
+ function getSafeFilepathSegment(name, suffix = "") {
24
+ return truncate(sanitizeFilename(name), 150, suffix);
25
+ }
26
+ async function assertMarkupSnapshot(context, output) {
27
+ const snapshotFilename = getSafeFilepathSegment(context.fullName, ".snapshot");
28
+ const snapshotFilepath = join(
29
+ "src",
30
+ "markup",
31
+ "__snapshots__",
32
+ snapshotFilename
33
+ );
34
+ if (process.env["UPDATE_SNAPSHOTS"]) {
35
+ await mkdir(dirname(snapshotFilepath), { recursive: true });
36
+ await writeFile(snapshotFilepath, output, { encoding: "utf-8" });
37
+ return;
38
+ }
39
+ try {
40
+ const existingSnapshot = await readFile(snapshotFilepath, {
41
+ encoding: "utf-8"
42
+ });
43
+ const existingLines = existingSnapshot.split("\n");
44
+ const newLines = output.split("\n");
45
+ for (let i = 0; i < existingLines.length; i++) {
46
+ const existingLine = existingLines[i];
47
+ const newLine = newLines[i];
48
+ if (existingLine !== newLine) {
49
+ assert.strictEqual(
50
+ newLines.slice(Math.max(0, i - 5), i + 5),
51
+ existingLines.slice(Math.max(0, i - 5), i + 5)
52
+ );
32
53
  }
33
- ]);
34
- });
35
- void it("can join text nodes with the same sentence ids", () => {
36
- const input = [
37
- {
38
- span: [{ "#text": "test" }],
39
- ":@": { "@_id": "chapter_one-s0" }
40
- }
41
- ];
42
- appendTextNode("chapter_one", input, "test", [], /* @__PURE__ */ new Set(), 0);
43
- assert.deepStrictEqual(input, [
44
- {
45
- span: [{ "#text": "test" }, { "#text": "test" }],
46
- ":@": { "@_id": "chapter_one-s0" }
47
- }
48
- ]);
49
- });
50
- });
54
+ }
55
+ } catch (e) {
56
+ if (e instanceof assert.AssertionError) {
57
+ throw e;
58
+ }
59
+ throw new assert.AssertionError({
60
+ actual: output,
61
+ expected: "",
62
+ diff: "simple"
63
+ });
64
+ }
65
+ }
51
66
  void describe("markupChapter", () => {
52
67
  void it("can tag sentences", async (t) => {
53
68
  const input = Epub.xhtmlParser.parse(
@@ -86,16 +101,17 @@ void describe("markupChapter", () => {
86
101
  </html>
87
102
  `
88
103
  );
89
- const segmentation = await getXhtmlSegmentation(
104
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
90
105
  Epub.getXhtmlBody(input),
91
106
  {}
92
107
  );
93
108
  const { markedUp: output } = markupChapter(
94
109
  "chapter_one",
95
110
  input,
96
- segmentation
111
+ segmentation,
112
+ mapping
97
113
  );
98
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
114
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
99
115
  });
100
116
  void it("can tag sentences with formatting marks", async (t) => {
101
117
  const input = Epub.xhtmlParser.parse(
@@ -119,16 +135,17 @@ void describe("markupChapter", () => {
119
135
  </html>
120
136
  `
121
137
  );
122
- const segmentation = await getXhtmlSegmentation(
138
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
123
139
  Epub.getXhtmlBody(input),
124
140
  {}
125
141
  );
126
142
  const { markedUp: output } = markupChapter(
127
143
  "chapter_one",
128
144
  input,
129
- segmentation
145
+ segmentation,
146
+ mapping
130
147
  );
131
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
148
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
132
149
  });
133
150
  void it("can tag sentences with formatting marks that overlap sentence boundaries", async (t) => {
134
151
  const input = Epub.xhtmlParser.parse(
@@ -152,16 +169,17 @@ void describe("markupChapter", () => {
152
169
  </html>
153
170
  `
154
171
  );
155
- const segmentation = await getXhtmlSegmentation(
172
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
156
173
  Epub.getXhtmlBody(input),
157
174
  {}
158
175
  );
159
176
  const { markedUp: output } = markupChapter(
160
177
  "chapter_one",
161
178
  input,
162
- segmentation
179
+ segmentation,
180
+ mapping
163
181
  );
164
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
182
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
165
183
  });
166
184
  void it("can tag sentences with nested formatting marks", async (t) => {
167
185
  const input = Epub.xhtmlParser.parse(
@@ -185,16 +203,17 @@ void describe("markupChapter", () => {
185
203
  </html>
186
204
  `
187
205
  );
188
- const segmentation = await getXhtmlSegmentation(
206
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
189
207
  Epub.getXhtmlBody(input),
190
208
  {}
191
209
  );
192
210
  const { markedUp: output } = markupChapter(
193
211
  "chapter_one",
194
212
  input,
195
- segmentation
213
+ segmentation,
214
+ mapping
196
215
  );
197
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
216
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
198
217
  });
199
218
  void it("can tag sentences with atoms", async (t) => {
200
219
  const input = Epub.xhtmlParser.parse(
@@ -218,16 +237,17 @@ void describe("markupChapter", () => {
218
237
  </html>
219
238
  `
220
239
  );
221
- const segmentation = await getXhtmlSegmentation(
240
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
222
241
  Epub.getXhtmlBody(input),
223
242
  {}
224
243
  );
225
244
  const { markedUp: output } = markupChapter(
226
245
  "chapter_one",
227
246
  input,
228
- segmentation
247
+ segmentation,
248
+ mapping
229
249
  );
230
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
250
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
231
251
  });
232
252
  void it("can tag sentences in nested textblocks", async (t) => {
233
253
  const input = Epub.xhtmlParser.parse(
@@ -261,16 +281,17 @@ void describe("markupChapter", () => {
261
281
  </html>
262
282
  `
263
283
  );
264
- const segmentation = await getXhtmlSegmentation(
284
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
265
285
  Epub.getXhtmlBody(input),
266
286
  {}
267
287
  );
268
288
  const { markedUp: output } = markupChapter(
269
289
  "chapter_one",
270
290
  input,
271
- segmentation
291
+ segmentation,
292
+ mapping
272
293
  );
273
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
294
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
274
295
  });
275
296
  void it("can tag sentences that cross textblock boundaries", async (t) => {
276
297
  const input = Epub.xhtmlParser.parse(
@@ -297,18 +318,19 @@ void describe("markupChapter", () => {
297
318
  </html>
298
319
  `
299
320
  );
300
- const segmentation = await getXhtmlSegmentation(
321
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
301
322
  Epub.getXhtmlBody(input),
302
323
  {}
303
324
  );
304
325
  const { markedUp: output } = markupChapter(
305
326
  "chapter_one",
306
327
  input,
307
- segmentation
328
+ segmentation,
329
+ mapping
308
330
  );
309
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
331
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
310
332
  });
311
- void it("can handle soft page breaks", async (t) => {
333
+ void it.only("can handle soft page breaks", async (t) => {
312
334
  const input = Epub.xhtmlParser.parse(
313
335
  /* xml */
314
336
  `
@@ -340,16 +362,17 @@ void describe("markupChapter", () => {
340
362
  </body>
341
363
  </html>`
342
364
  );
343
- const segmentation = await getXhtmlSegmentation(
365
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
344
366
  Epub.getXhtmlBody(input),
345
367
  {}
346
368
  );
347
369
  const { markedUp: output } = markupChapter(
348
370
  "chapter_one",
349
371
  input,
350
- segmentation
372
+ segmentation,
373
+ mapping
351
374
  );
352
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
375
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
353
376
  });
354
377
  void it("can handle boolean-like text values", async (t) => {
355
378
  const input = Epub.xhtmlParser.parse(`
@@ -361,16 +384,17 @@ void describe("markupChapter", () => {
361
384
  </body>
362
385
  </html>
363
386
  `);
364
- const segmentation = await getXhtmlSegmentation(
387
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
365
388
  Epub.getXhtmlBody(input),
366
389
  {}
367
390
  );
368
391
  const { markedUp: output } = markupChapter(
369
392
  "chapter_one",
370
393
  input,
371
- segmentation
394
+ segmentation,
395
+ mapping
372
396
  );
373
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
397
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
374
398
  });
375
399
  void it("can handle number-like text values", async (t) => {
376
400
  const input = Epub.xhtmlParser.parse(`
@@ -382,16 +406,17 @@ void describe("markupChapter", () => {
382
406
  </body>
383
407
  </html>
384
408
  `);
385
- const segmentation = await getXhtmlSegmentation(
409
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
386
410
  Epub.getXhtmlBody(input),
387
411
  {}
388
412
  );
389
413
  const { markedUp: output } = markupChapter(
390
414
  "chapter_one",
391
415
  input,
392
- segmentation
416
+ segmentation,
417
+ mapping
393
418
  );
394
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
419
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
395
420
  });
396
421
  void it("can handle null-like text values", async (t) => {
397
422
  const input = Epub.xhtmlParser.parse(`
@@ -403,16 +428,17 @@ void describe("markupChapter", () => {
403
428
  </body>
404
429
  </html>
405
430
  `);
406
- const segmentation = await getXhtmlSegmentation(
431
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
407
432
  Epub.getXhtmlBody(input),
408
433
  {}
409
434
  );
410
435
  const { markedUp: output } = markupChapter(
411
436
  "chapter_one",
412
437
  input,
413
- segmentation
438
+ segmentation,
439
+ mapping
414
440
  );
415
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
441
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
416
442
  });
417
443
  void it("can preserve nbsp entities", async (t) => {
418
444
  const input = Epub.xhtmlParser.parse(`
@@ -427,15 +453,16 @@ void describe("markupChapter", () => {
427
453
  </body>
428
454
  </html>
429
455
  `);
430
- const segmentation = await getXhtmlSegmentation(
456
+ const { result: segmentation, mapping } = await getXhtmlSegmentation(
431
457
  Epub.getXhtmlBody(input),
432
458
  {}
433
459
  );
434
460
  const { markedUp: output } = markupChapter(
435
461
  "chapter_one",
436
462
  input,
437
- segmentation
463
+ segmentation,
464
+ mapping
438
465
  );
439
- t.assert.snapshot(Epub.xhtmlBuilder.build(output).split("\n"));
466
+ await assertMarkupSnapshot(t, Epub.xhtmlBuilder.build(output));
440
467
  });
441
468
  });
@@ -0,0 +1,112 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") {
10
+ for (let key of __getOwnPropNames(from))
11
+ if (!__hasOwnProp.call(to, key) && key !== except)
12
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
13
+ }
14
+ return to;
15
+ };
16
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
17
+ // If the importer is in node compatibility mode or this is not an ESM
18
+ // file that has been converted to a CommonJS file using a Babel-
19
+ // compatible transform (i.e. "__esModule" has not been set), then set
20
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
+ mod
23
+ ));
24
+ var import_node_assert = __toESM(require("node:assert"), 1);
25
+ var import_node_test = require("node:test");
26
+ var import_epub = require("@storyteller-platform/epub");
27
+ var import_model = require("../model.cjs");
28
+ var import_parseDom = require("../parseDom.cjs");
29
+ void (0, import_node_test.describe)("parseDom", () => {
30
+ void (0, import_node_test.it)("should parse hierarchical XML", () => {
31
+ const result = (0, import_parseDom.parseDom)([
32
+ import_epub.Epub.createXmlElement("p", {}, [
33
+ import_epub.Epub.createXmlElement("span", {}, [
34
+ import_epub.Epub.createXmlTextNode("Hello, world!")
35
+ ])
36
+ ])
37
+ ]);
38
+ import_node_assert.default.deepStrictEqual(
39
+ result,
40
+ new import_model.Root([
41
+ new import_model.Node("p", {}, [new import_model.TextNode("Hello, world!", [new import_model.Mark("span")])])
42
+ ])
43
+ );
44
+ });
45
+ void (0, import_node_test.it)("should parse nested marks", () => {
46
+ const result = (0, import_parseDom.parseDom)([
47
+ import_epub.Epub.createXmlElement("p", {}, [
48
+ import_epub.Epub.createXmlElement("strong", {}, [
49
+ import_epub.Epub.createXmlElement("em", {}, [
50
+ import_epub.Epub.createXmlTextNode("Hello, world!")
51
+ ])
52
+ ])
53
+ ])
54
+ ]);
55
+ import_node_assert.default.deepStrictEqual(
56
+ result,
57
+ new import_model.Root([
58
+ new import_model.Node("p", {}, [
59
+ new import_model.TextNode("Hello, world!", [new import_model.Mark("strong"), new import_model.Mark("em")])
60
+ ])
61
+ ])
62
+ );
63
+ });
64
+ void (0, import_node_test.it)("should preserve attributes", () => {
65
+ const result = (0, import_parseDom.parseDom)([
66
+ import_epub.Epub.createXmlElement("p", { id: "p1" }, [
67
+ import_epub.Epub.createXmlElement("span", { class: "red" }, [
68
+ import_epub.Epub.createXmlTextNode("Hello, world!")
69
+ ])
70
+ ])
71
+ ]);
72
+ import_node_assert.default.deepStrictEqual(
73
+ result,
74
+ new import_model.Root([
75
+ new import_model.Node("p", { id: "p1" }, [
76
+ new import_model.TextNode("Hello, world!", [new import_model.Mark("span", { class: "red" })])
77
+ ])
78
+ ])
79
+ );
80
+ });
81
+ void (0, import_node_test.it)("should preserve leaf nodes", () => {
82
+ const result = (0, import_parseDom.parseDom)([
83
+ import_epub.Epub.createXmlElement("p", {}, [
84
+ import_epub.Epub.createXmlElement("span", {}, [
85
+ import_epub.Epub.createXmlTextNode("Hello,"),
86
+ import_epub.Epub.createXmlElement("span", { class: "x-ebookmaker-pageno" }, [
87
+ import_epub.Epub.createXmlElement("a", { id: "Page_v" })
88
+ ]),
89
+ import_epub.Epub.createXmlTextNode(" world!")
90
+ ])
91
+ ])
92
+ ]);
93
+ import_node_assert.default.deepStrictEqual(
94
+ result,
95
+ new import_model.Root([
96
+ new import_model.Node("p", {}, [
97
+ new import_model.TextNode("Hello,", [new import_model.Mark("span")]),
98
+ new import_model.Node(
99
+ "a",
100
+ { id: "Page_v" },
101
+ [],
102
+ [
103
+ new import_model.Mark("span"),
104
+ new import_model.Mark("span", { class: "x-ebookmaker-pageno" })
105
+ ]
106
+ ),
107
+ new import_model.TextNode(" world!", [new import_model.Mark("span")])
108
+ ])
109
+ ])
110
+ );
111
+ });
112
+ });
@@ -0,0 +1,2 @@
1
+
2
+ export { }
@@ -0,0 +1,2 @@
1
+
2
+ export { }
@@ -0,0 +1,89 @@
1
+ import assert from "node:assert";
2
+ import { describe, it } from "node:test";
3
+ import { Epub } from "@storyteller-platform/epub";
4
+ import { Mark, Node, Root, TextNode } from "../model.js";
5
+ import { parseDom } from "../parseDom.js";
6
+ void describe("parseDom", () => {
7
+ void it("should parse hierarchical XML", () => {
8
+ const result = parseDom([
9
+ Epub.createXmlElement("p", {}, [
10
+ Epub.createXmlElement("span", {}, [
11
+ Epub.createXmlTextNode("Hello, world!")
12
+ ])
13
+ ])
14
+ ]);
15
+ assert.deepStrictEqual(
16
+ result,
17
+ new Root([
18
+ new Node("p", {}, [new TextNode("Hello, world!", [new Mark("span")])])
19
+ ])
20
+ );
21
+ });
22
+ void it("should parse nested marks", () => {
23
+ const result = parseDom([
24
+ Epub.createXmlElement("p", {}, [
25
+ Epub.createXmlElement("strong", {}, [
26
+ Epub.createXmlElement("em", {}, [
27
+ Epub.createXmlTextNode("Hello, world!")
28
+ ])
29
+ ])
30
+ ])
31
+ ]);
32
+ assert.deepStrictEqual(
33
+ result,
34
+ new Root([
35
+ new Node("p", {}, [
36
+ new TextNode("Hello, world!", [new Mark("strong"), new Mark("em")])
37
+ ])
38
+ ])
39
+ );
40
+ });
41
+ void it("should preserve attributes", () => {
42
+ const result = parseDom([
43
+ Epub.createXmlElement("p", { id: "p1" }, [
44
+ Epub.createXmlElement("span", { class: "red" }, [
45
+ Epub.createXmlTextNode("Hello, world!")
46
+ ])
47
+ ])
48
+ ]);
49
+ assert.deepStrictEqual(
50
+ result,
51
+ new Root([
52
+ new Node("p", { id: "p1" }, [
53
+ new TextNode("Hello, world!", [new Mark("span", { class: "red" })])
54
+ ])
55
+ ])
56
+ );
57
+ });
58
+ void it("should preserve leaf nodes", () => {
59
+ const result = parseDom([
60
+ Epub.createXmlElement("p", {}, [
61
+ Epub.createXmlElement("span", {}, [
62
+ Epub.createXmlTextNode("Hello,"),
63
+ Epub.createXmlElement("span", { class: "x-ebookmaker-pageno" }, [
64
+ Epub.createXmlElement("a", { id: "Page_v" })
65
+ ]),
66
+ Epub.createXmlTextNode(" world!")
67
+ ])
68
+ ])
69
+ ]);
70
+ assert.deepStrictEqual(
71
+ result,
72
+ new Root([
73
+ new Node("p", {}, [
74
+ new TextNode("Hello,", [new Mark("span")]),
75
+ new Node(
76
+ "a",
77
+ { id: "Page_v" },
78
+ [],
79
+ [
80
+ new Mark("span"),
81
+ new Mark("span", { class: "x-ebookmaker-pageno" })
82
+ ]
83
+ ),
84
+ new TextNode(" world!", [new Mark("span")])
85
+ ])
86
+ ])
87
+ );
88
+ });
89
+ });