@hcengineering/text-markdown 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/parser.js ADDED
@@ -0,0 +1,724 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
9
+ var __export = (target, all) => {
10
+ for (var name in all)
11
+ __defProp(target, name, { get: all[name], enumerable: true });
12
+ };
13
+ var __copyProps = (to, from, except, desc) => {
14
+ if (from && typeof from === "object" || typeof from === "function") {
15
+ for (let key of __getOwnPropNames(from))
16
+ if (!__hasOwnProp.call(to, key) && key !== except)
17
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
18
+ }
19
+ return to;
20
+ };
21
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
22
+ // If the importer is in node compatibility mode or this is not an ESM
23
+ // file that has been converted to a CommonJS file using a Babel-
24
+ // compatible transform (i.e. "__esModule" has not been set), then set
25
+ // "default" to the CommonJS "module.exports" for node compatibility.
26
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
27
+ mod
28
+ ));
29
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
30
+ var parser_exports = {};
31
+ __export(parser_exports, {
32
+ MarkdownParser: () => MarkdownParser,
33
+ isInlineToken: () => isInlineToken,
34
+ isListItemToken: () => isListItemToken,
35
+ isParagraphToken: () => isParagraphToken
36
+ });
37
+ module.exports = __toCommonJS(parser_exports);
38
+ var import_text_core = require("@hcengineering/text-core");
39
+ var import_text_html = require("@hcengineering/text-html");
40
+ var import_markdown_it = __toESM(require("markdown-it"));
41
+ var import_marks = require("./marks");
42
+ var import_node = require("./node");
43
+ function readSpec(rule, tok, state) {
44
+ if (typeof rule === "function") {
45
+ return rule(tok, state);
46
+ }
47
+ return rule;
48
+ }
49
+ __name(readSpec, "readSpec");
50
+ function isText(a, b) {
51
+ return (a.type === import_text_core.MarkupNodeType.text || a.type === import_text_core.MarkupNodeType.reference) && b.type === import_text_core.MarkupNodeType.text;
52
+ }
53
+ __name(isText, "isText");
54
+ function maybeMerge(a, b) {
55
+ if (isText(a, b) && ((0, import_marks.sameSet)(a.marks, b.marks) || a.text === "" && (a.marks?.length ?? 0) === 0)) {
56
+ if (a.text === "" && (a.marks?.length ?? 0) === 0) {
57
+ return { ...b };
58
+ }
59
+ return { ...a, text: (a.text ?? "") + (b.text ?? "") };
60
+ }
61
+ return void 0;
62
+ }
63
+ __name(maybeMerge, "maybeMerge");
64
+ class MarkdownParseState {
65
+ constructor(tokenHandlers2, refUrl, imageUrl) {
66
+ this.refUrl = refUrl;
67
+ this.imageUrl = imageUrl;
68
+ this.stack = [{ type: import_text_core.MarkupNodeType.doc, attrs: {}, content: [] }];
69
+ this.marks = [];
70
+ this.tokenHandlers = tokenHandlers2;
71
+ }
72
+ static {
73
+ __name(this, "MarkdownParseState");
74
+ }
75
+ stack;
76
+ marks;
77
+ tokenHandlers;
78
+ top() {
79
+ return this.stack[this.stack.length - 1];
80
+ }
81
+ push(elt) {
82
+ if (this.stack.length > 0) {
83
+ const tt = this.top();
84
+ tt?.content.push(elt);
85
+ }
86
+ }
87
+ mergeWithLast(nodes, node) {
88
+ const last = nodes[nodes.length - 1];
89
+ let merged;
90
+ if (last !== void 0 && (merged = maybeMerge(last, node)) !== void 0) {
91
+ nodes[nodes.length - 1] = merged;
92
+ return true;
93
+ }
94
+ return false;
95
+ }
96
+ // Adds the given text to the current position in the document,
97
+ // using the current marks as styling.
98
+ addText(text) {
99
+ const top = this.top();
100
+ if (text === void 0 || top === void 0 || text.length === 0) {
101
+ return;
102
+ }
103
+ const node = {
104
+ type: import_text_core.MarkupNodeType.text,
105
+ text
106
+ };
107
+ if (this.marks !== void 0) {
108
+ node.marks = this.marks;
109
+ }
110
+ const nodes = top.content;
111
+ if (!this.mergeWithLast(nodes, node)) {
112
+ nodes.push(node);
113
+ }
114
+ }
115
+ // Adds the given mark to the set of active marks.
116
+ openMark(mark) {
117
+ this.marks = (0, import_marks.addToSet)(mark, this.marks);
118
+ }
119
+ // Removes the given mark from the set of active marks.
120
+ closeMark(mark) {
121
+ this.marks = (0, import_marks.removeFromSet)(mark, this.marks);
122
+ }
123
+ parseTokens(toks) {
124
+ const _toks = [...toks ?? []];
125
+ while (_toks.length > 0) {
126
+ const tok = _toks.shift();
127
+ if (tok === void 0) {
128
+ break;
129
+ }
130
+ if (tok.type === "html_inline" && tok.content.trim() === "<sub>") {
131
+ while (_toks.length > 0) {
132
+ const _tok = _toks.shift();
133
+ if (_tok !== void 0) {
134
+ tok.content += _tok.content;
135
+ if (_tok.type === "html_inline" && _tok.content.trim() === "</sub>") {
136
+ break;
137
+ }
138
+ }
139
+ }
140
+ }
141
+ const handler = this.tokenHandlers[tok.type];
142
+ if (handler === void 0) {
143
+ throw new Error(`Token type '${String(tok.type)} not supported by Markdown parser`);
144
+ }
145
+ handler(this, tok);
146
+ }
147
+ }
148
+ // Add a node at the current position.
149
+ addNode(type, attrs2, content = []) {
150
+ const node = { type, content };
151
+ if (Object.keys(attrs2 ?? {}).length > 0) {
152
+ node.attrs = attrs2;
153
+ }
154
+ if (this.marks.length > 0) {
155
+ node.marks = this.marks;
156
+ }
157
+ this.push(node);
158
+ return node;
159
+ }
160
+ // Wrap subsequent content in a node of the given type.
161
+ openNode(type, attrs2) {
162
+ this.stack.push({ type, attrs: attrs2, content: [] });
163
+ }
164
+ // Close and return the node that is currently on top of the stack.
165
+ closeNode() {
166
+ if (this.marks.length > 0) this.marks = [];
167
+ const info = this.stack.pop();
168
+ if (info !== void 0) {
169
+ return this.addNode(info.type, info.attrs, info.content);
170
+ }
171
+ return { type: import_text_core.MarkupNodeType.doc };
172
+ }
173
+ }
174
+ function attrs(spec, token, state) {
175
+ return spec.getAttrs?.(token, state) ?? {};
176
+ }
177
+ __name(attrs, "attrs");
178
+ function noCloseToken(spec, type) {
179
+ return (spec.noCloseToken ?? false) || ["code_inline", "code_block", "fence"].indexOf(type) > 0;
180
+ }
181
+ __name(noCloseToken, "noCloseToken");
182
+ function withoutTrailingNewline(str) {
183
+ return str[str.length - 1] === "\n" ? str.slice(0, str.length - 1) : str;
184
+ }
185
+ __name(withoutTrailingNewline, "withoutTrailingNewline");
186
+ function addSpecBlock(handlers, spec, type, specBlock) {
187
+ if (noCloseToken(spec, type)) {
188
+ handlers[type] = newSimpleBlockHandler(specBlock, spec);
189
+ } else {
190
+ handlers[type + "_open"] = (state, tok) => {
191
+ state.openNode(readSpec(specBlock, tok, state), attrs(spec, tok, state));
192
+ if (spec.wrapContent === true) {
193
+ state.openNode(import_text_core.MarkupNodeType.paragraph, {});
194
+ }
195
+ };
196
+ handlers[type + "_close"] = (state) => {
197
+ if (spec.wrapContent === true) {
198
+ state.closeNode();
199
+ }
200
+ state.closeNode();
201
+ };
202
+ }
203
+ }
204
+ __name(addSpecBlock, "addSpecBlock");
205
+ function newSimpleBlockHandler(specBlock, spec) {
206
+ return (state, tok) => {
207
+ state.openNode(readSpec(specBlock, tok, state), attrs(spec, tok, state));
208
+ state.addText(withoutTrailingNewline(tok.content));
209
+ state.closeNode();
210
+ };
211
+ }
212
+ __name(newSimpleBlockHandler, "newSimpleBlockHandler");
213
+ function addSpecMark(handlers, spec, type, specMark) {
214
+ if (noCloseToken(spec, type)) {
215
+ handlers[type] = newSimpleMarkHandler(spec, specMark);
216
+ } else {
217
+ handlers[type + "_open"] = (state, tok) => {
218
+ state.openMark({ type: specMark, attrs: attrs(spec, tok, state) });
219
+ };
220
+ handlers[type + "_close"] = (state) => {
221
+ state.closeMark(specMark);
222
+ };
223
+ }
224
+ }
225
+ __name(addSpecMark, "addSpecMark");
226
+ function addSpecialRule(handlers, spec, type) {
227
+ handlers[type + "_open"] = (state, tok) => {
228
+ const type2 = spec.type(state, tok);
229
+ if (type2.node) {
230
+ state.openNode(type2.type, spec.getAttrs?.(tok, state) ?? {});
231
+ } else {
232
+ state.openMark({ type: type2.type, attrs: spec.getAttrs?.(tok, state) ?? {} });
233
+ }
234
+ };
235
+ handlers[type + "_close"] = (state, tok) => {
236
+ const type2 = spec.type(state, tok);
237
+ if (type2.node) {
238
+ state.closeNode();
239
+ } else {
240
+ state.closeMark(type2.type);
241
+ }
242
+ };
243
+ }
244
+ __name(addSpecialRule, "addSpecialRule");
245
+ function addIgnoreRule(handlers, spec, type) {
246
+ handlers[type + "_open"] = (state, tok) => {
247
+ };
248
+ handlers[type + "_close"] = (state, tok) => {
249
+ };
250
+ }
251
+ __name(addIgnoreRule, "addIgnoreRule");
252
+ function newSimpleMarkHandler(spec, specMark) {
253
+ return (state, tok) => {
254
+ state.openMark({ attrs: attrs(spec, tok, state), type: specMark });
255
+ state.addText(withoutTrailingNewline(tok.content));
256
+ state.closeMark(specMark);
257
+ };
258
+ }
259
+ __name(newSimpleMarkHandler, "newSimpleMarkHandler");
260
+ function tokenHandlers(tokensBlock2, tokensNode2, tokensMark2, specialRules, ignoreRules, htmlParser) {
261
+ const handlers = {};
262
+ Object.entries(tokensBlock2).forEach(([type, spec]) => {
263
+ addSpecBlock(handlers, spec, type, spec.block);
264
+ });
265
+ Object.entries(tokensNode2).forEach(([type, spec]) => {
266
+ addSpecNode(handlers, type, spec);
267
+ });
268
+ Object.entries(tokensMark2).forEach(([type, spec]) => {
269
+ addSpecMark(handlers, spec, type, spec.mark);
270
+ });
271
+ Object.entries(specialRules).forEach(([type, spec]) => {
272
+ addSpecialRule(handlers, spec, type);
273
+ });
274
+ Object.entries(ignoreRules).forEach(([type, spec]) => {
275
+ addIgnoreRule(handlers, spec, type);
276
+ });
277
+ handlers.html_inline = (state, tok) => {
278
+ try {
279
+ const top = state.top();
280
+ if (tok.content.trim() === "</a>" && top?.type === import_text_core.MarkupNodeType.embed) {
281
+ top.content = [];
282
+ state.closeNode();
283
+ return;
284
+ }
285
+ const markup = htmlParser(tok.content);
286
+ if (markup.content !== void 0) {
287
+ const shouldUnwrap = markup.content.length === 1 && markup.content[0].type === import_text_core.MarkupNodeType.paragraph && top?.type === import_text_core.MarkupNodeType.paragraph;
288
+ const content = (0, import_node.nodeContent)(shouldUnwrap ? markup.content[0] : markup);
289
+ for (const c of content) {
290
+ if (c.type === import_text_core.MarkupNodeType.embed) {
291
+ state.openNode(import_text_core.MarkupNodeType.embed, c.attrs ?? {});
292
+ continue;
293
+ }
294
+ state.push(c);
295
+ }
296
+ }
297
+ } catch (err) {
298
+ console.error(err);
299
+ state.addText(tok.content);
300
+ }
301
+ };
302
+ handlers.html_block = (state, tok) => {
303
+ try {
304
+ const model = htmlParser(tok.content);
305
+ const content = (0, import_node.nodeContent)(model);
306
+ for (const c of content) {
307
+ state.push(c);
308
+ }
309
+ } catch (err) {
310
+ console.error(err);
311
+ state.addText(tok.content);
312
+ }
313
+ };
314
+ addTextHandlers(handlers);
315
+ return handlers;
316
+ }
317
+ __name(tokenHandlers, "tokenHandlers");
318
+ function addTextHandlers(handlers) {
319
+ handlers.text = (state, tok) => {
320
+ state.addText(tok.content);
321
+ };
322
+ handlers.inline = (state, tok) => {
323
+ state.parseTokens(tok.children);
324
+ };
325
+ handlers.softbreak = (state) => {
326
+ state.addText("\n");
327
+ };
328
+ }
329
+ __name(addTextHandlers, "addTextHandlers");
330
+ function addSpecNode(handlers, type, spec) {
331
+ handlers[type] = (state, tok) => state.addNode(spec.node, attrs(spec, tok, state));
332
+ }
333
+ __name(addSpecNode, "addSpecNode");
334
+ function tokAttrGet(token, name) {
335
+ const attr = token.attrGet(name);
336
+ if (attr != null) {
337
+ return attr;
338
+ }
339
+ for (const [k, v] of token.attrs ?? []) {
340
+ if (k === name) {
341
+ return v;
342
+ }
343
+ }
344
+ }
345
+ __name(tokAttrGet, "tokAttrGet");
346
+ function tokToAttrs(token, ...names) {
347
+ const result = {};
348
+ for (const name of names) {
349
+ const attr = token.attrGet(name);
350
+ if (attr !== null) {
351
+ result[name] = attr;
352
+ }
353
+ }
354
+ return result;
355
+ }
356
+ __name(tokToAttrs, "tokToAttrs");
357
+ function todoItemMetaAttrsGet(tok) {
358
+ const userid = tokAttrGet(tok, "userid");
359
+ const todoid = tokAttrGet(tok, "todoid");
360
+ const result = {};
361
+ if (userid !== void 0) {
362
+ result.userid = userid;
363
+ }
364
+ if (todoid !== void 0) {
365
+ result.todoid = todoid;
366
+ }
367
+ return result;
368
+ }
369
+ __name(todoItemMetaAttrsGet, "todoItemMetaAttrsGet");
370
+ const tokensBlock = {
371
+ blockquote: { block: import_text_core.MarkupNodeType.blockquote },
372
+ paragraph: { block: import_text_core.MarkupNodeType.paragraph },
373
+ list_item: { block: import_text_core.MarkupNodeType.list_item },
374
+ task_item: { block: import_text_core.MarkupNodeType.taskItem, getAttrs: /* @__PURE__ */ __name((tok) => ({ "data-type": "taskItem" }), "getAttrs") },
375
+ bullet_list: {
376
+ block: import_text_core.MarkupNodeType.bullet_list,
377
+ getAttrs: /* @__PURE__ */ __name((tok) => ({
378
+ bullet: tok.markup
379
+ }), "getAttrs")
380
+ },
381
+ todo_list: {
382
+ block: import_text_core.MarkupNodeType.todoList,
383
+ getAttrs: /* @__PURE__ */ __name((tok) => ({
384
+ bullet: tok.markup
385
+ }), "getAttrs")
386
+ },
387
+ todo_item: {
388
+ block: import_text_core.MarkupNodeType.todoItem,
389
+ getAttrs: /* @__PURE__ */ __name((tok) => ({
390
+ checked: tokAttrGet(tok, "checked") === "true",
391
+ ...todoItemMetaAttrsGet(tok)
392
+ }), "getAttrs")
393
+ },
394
+ ordered_list: {
395
+ block: import_text_core.MarkupNodeType.ordered_list,
396
+ getAttrs: /* @__PURE__ */ __name((tok) => ({ order: tokAttrGet(tok, "start") ?? "1" }), "getAttrs")
397
+ },
398
+ task_list: {
399
+ block: import_text_core.MarkupNodeType.taskList,
400
+ getAttrs: /* @__PURE__ */ __name((tok) => ({ order: tokAttrGet(tok, "start") ?? "1", "data-type": "taskList" }), "getAttrs")
401
+ },
402
+ heading: {
403
+ block: import_text_core.MarkupNodeType.heading,
404
+ getAttrs: /* @__PURE__ */ __name((tok) => ({ level: Number(tok.tag.slice(1)), marker: tok.markup }), "getAttrs")
405
+ },
406
+ code_block: {
407
+ block: /* @__PURE__ */ __name((tok) => {
408
+ if (tok.info === "mermaid") {
409
+ return import_text_core.MarkupNodeType.mermaid;
410
+ }
411
+ return import_text_core.MarkupNodeType.code_block;
412
+ }, "block"),
413
+ getAttrs: /* @__PURE__ */ __name((tok) => {
414
+ return { language: tok.info ?? "" };
415
+ }, "getAttrs"),
416
+ noCloseToken: true
417
+ },
418
+ fence: {
419
+ block: /* @__PURE__ */ __name((tok) => {
420
+ if (tok.info === "mermaid") {
421
+ return import_text_core.MarkupNodeType.mermaid;
422
+ }
423
+ return import_text_core.MarkupNodeType.code_block;
424
+ }, "block"),
425
+ getAttrs: /* @__PURE__ */ __name((tok) => {
426
+ return { language: tok.info ?? "" };
427
+ }, "getAttrs"),
428
+ noCloseToken: true
429
+ },
430
+ sub: {
431
+ block: import_text_core.MarkupNodeType.subLink,
432
+ getAttrs: /* @__PURE__ */ __name((tok) => {
433
+ return { language: tok.info ?? "" };
434
+ }, "getAttrs"),
435
+ noCloseToken: false
436
+ },
437
+ table: {
438
+ block: import_text_core.MarkupNodeType.table,
439
+ noCloseToken: false
440
+ },
441
+ th: {
442
+ block: import_text_core.MarkupNodeType.table_header,
443
+ getAttrs: /* @__PURE__ */ __name((tok) => {
444
+ return {
445
+ colspan: Number(tok.attrGet("colspan") ?? "1"),
446
+ rowspan: Number(tok.attrGet("rowspan") ?? "1")
447
+ };
448
+ }, "getAttrs"),
449
+ wrapContent: true,
450
+ noCloseToken: false
451
+ },
452
+ tr: {
453
+ block: import_text_core.MarkupNodeType.table_row,
454
+ noCloseToken: false
455
+ },
456
+ td: {
457
+ block: import_text_core.MarkupNodeType.table_cell,
458
+ getAttrs: /* @__PURE__ */ __name((tok) => {
459
+ return {
460
+ colspan: Number(tok.attrGet("colspan") ?? "1"),
461
+ rowspan: Number(tok.attrGet("rowspan") ?? "1")
462
+ };
463
+ }, "getAttrs"),
464
+ wrapContent: true,
465
+ noCloseToken: false
466
+ }
467
+ };
468
+ const tokensNode = {
469
+ hr: { node: import_text_core.MarkupNodeType.horizontal_rule },
470
+ image: {
471
+ node: import_text_core.MarkupNodeType.image,
472
+ getAttrs: /* @__PURE__ */ __name((tok, state) => {
473
+ const result = tokToAttrs(tok, "src", "title", "alt", "data");
474
+ result.alt = convertStringLikeToken(tok, result.alt);
475
+ if (result.src.startsWith(state.imageUrl)) {
476
+ const url = new URL(result.src);
477
+ result["data-type"] = "image";
478
+ const file = url.searchParams.get("file");
479
+ if (file != null) {
480
+ result["file-id"] = file;
481
+ }
482
+ const width = url.searchParams.get("width");
483
+ if (width != null) {
484
+ result.width = width;
485
+ }
486
+ const height = url.searchParams.get("height");
487
+ if (height != null) {
488
+ result.height = height;
489
+ }
490
+ }
491
+ return result;
492
+ }, "getAttrs")
493
+ },
494
+ hardbreak: { node: import_text_core.MarkupNodeType.hard_break }
495
+ };
496
+ const tokensMark = {
497
+ em: {
498
+ mark: import_text_core.MarkupMarkType.em,
499
+ getAttrs: /* @__PURE__ */ __name((tok, state) => {
500
+ return { marker: tok.markup };
501
+ }, "getAttrs")
502
+ },
503
+ bold: {
504
+ mark: import_text_core.MarkupMarkType.bold,
505
+ getAttrs: /* @__PURE__ */ __name((tok, state) => {
506
+ return { marker: tok.markup };
507
+ }, "getAttrs")
508
+ },
509
+ strong: {
510
+ mark: import_text_core.MarkupMarkType.bold,
511
+ getAttrs: /* @__PURE__ */ __name((tok, state) => {
512
+ return { marker: tok.markup };
513
+ }, "getAttrs")
514
+ },
515
+ s: { mark: import_text_core.MarkupMarkType.strike },
516
+ u: { mark: import_text_core.MarkupMarkType.underline },
517
+ code_inline: {
518
+ mark: import_text_core.MarkupMarkType.code,
519
+ noCloseToken: true
520
+ }
521
+ };
522
+ const specialRule = {
523
+ link: {
524
+ type: /* @__PURE__ */ __name((state, tok) => {
525
+ const href = tok.attrGet("href");
526
+ if ((href?.startsWith(state.refUrl) ?? false) || state.stack[state.stack.length - 1]?.type === "reference") {
527
+ return { type: import_text_core.MarkupNodeType.reference, node: true };
528
+ }
529
+ return { type: import_text_core.MarkupMarkType.link, node: false, close: true };
530
+ }, "type"),
531
+ getAttrs: /* @__PURE__ */ __name((tok, state) => {
532
+ const attrs2 = tokToAttrs(tok, "href", "title");
533
+ if (attrs2.href !== void 0) {
534
+ try {
535
+ const url = new URL(attrs2.href);
536
+ if (attrs2.href.startsWith(state.refUrl) ?? false) {
537
+ return {
538
+ label: url.searchParams?.get("label") ?? "",
539
+ id: url.searchParams?.get("_id") ?? "",
540
+ objectclass: url.searchParams?.get("_class") ?? ""
541
+ };
542
+ }
543
+ } catch (err) {
544
+ }
545
+ }
546
+ return attrs2;
547
+ }, "getAttrs")
548
+ }
549
+ };
550
+ const ignoreRule = {
551
+ thead: {},
552
+ tbody: {}
553
+ };
554
+ const isInlineToken = /* @__PURE__ */ __name((token) => token?.type === "inline", "isInlineToken");
555
+ const isParagraphToken = /* @__PURE__ */ __name((token) => token?.type === "paragraph_open", "isParagraphToken");
556
+ const isListItemToken = /* @__PURE__ */ __name((token) => token?.type === "list_item_open", "isListItemToken");
557
+ const startsWithTodoMarkdown = /* @__PURE__ */ __name((token) => /^\[[xX \u00A0]\][ \u00A0]/.test(token.content), "startsWithTodoMarkdown");
558
+ const isCheckedTodoItem = /* @__PURE__ */ __name((token) => /^\[[xX]\][ \u00A0]/.test(token.content), "isCheckedTodoItem");
559
+ class MarkdownParser {
560
+ constructor(options) {
561
+ this.options = options;
562
+ this.tokenizer = (0, import_markdown_it.default)("default", {
563
+ html: true
564
+ });
565
+ this.tokenizer.core.ruler.after("inline", "task_list", this.listRule);
566
+ this.tokenizer.core.ruler.after("inline", "html_comment", this.htmlCommentRule);
567
+ this.htmlParser = options.htmlParser ?? import_text_html.htmlToMarkup;
568
+ this.tokenHandlers = tokenHandlers(tokensBlock, tokensNode, tokensMark, specialRule, ignoreRule, this.htmlParser);
569
+ }
570
+ static {
571
+ __name(this, "MarkdownParser");
572
+ }
573
+ tokenizer;
574
+ tokenHandlers;
575
+ htmlParser;
576
+ parse(text) {
577
+ const state = new MarkdownParseState(this.tokenHandlers, this.options.refUrl, this.options.imageUrl);
578
+ let doc;
579
+ const tokens = this.tokenizer.parse(text, {});
580
+ state.parseTokens(tokens);
581
+ do {
582
+ doc = state.closeNode();
583
+ } while (state.stack.length > 0);
584
+ return doc;
585
+ }
586
+ htmlCommentRule = /* @__PURE__ */ __name((state) => {
587
+ const tokens = state.tokens;
588
+ for (let i = 0; i < tokens.length; i++) {
589
+ if (tokens[i].type === "html_block" || tokens[i].type === "html_inline") {
590
+ const content = tokens[i].content.replaceAll("<!--", "<comment>").replaceAll("-->", "</comment>");
591
+ tokens[i].content = content;
592
+ }
593
+ }
594
+ return true;
595
+ }, "htmlCommentRule");
596
+ listRule = /* @__PURE__ */ __name((state) => {
597
+ const tokens = state.tokens;
598
+ const states = [];
599
+ for (let open = 0; open < tokens.length; open++) {
600
+ if (isTodoListItem(tokens, open)) {
601
+ convertTodoItem(tokens, open);
602
+ }
603
+ }
604
+ let listCloseIdx = -1;
605
+ let itemCloseIdx = -1;
606
+ for (let i = tokens.length - 1; i >= 0; i--) {
607
+ if (tokens[i].type === "bullet_list_close") {
608
+ states.push({ closeIdx: listCloseIdx, lastItemIdx: itemCloseIdx });
609
+ listCloseIdx = tokens.length - i;
610
+ itemCloseIdx = -1;
611
+ } else if (tokens[i].type === "list_item_close" || tokens[i].type === "todo_item_close") {
612
+ if (itemCloseIdx === -1) {
613
+ itemCloseIdx = tokens.length - i;
614
+ } else if (tokens[i].type !== tokens[tokens.length - itemCloseIdx].type) {
615
+ const bulletListOpen = new state.Token("bullet_list_open", "ul", 1);
616
+ bulletListOpen.markup = tokens[i + 1].markup;
617
+ tokens.splice(i + 1, 0, bulletListOpen);
618
+ tokens.splice(i + 1, 0, new state.Token("bullet_list_close", "ul", -1));
619
+ convertTodoList(tokens, i + 2, tokens.length - listCloseIdx, tokens.length - itemCloseIdx);
620
+ listCloseIdx = tokens.length - i - 1;
621
+ itemCloseIdx = tokens.length - i;
622
+ }
623
+ } else if (tokens[i].type === "bullet_list_open") {
624
+ if (itemCloseIdx !== -1) {
625
+ convertTodoList(tokens, i, tokens.length - listCloseIdx, tokens.length - itemCloseIdx);
626
+ }
627
+ const prevState = states.pop() ?? { closeIdx: -1, lastItemIdx: -1 };
628
+ listCloseIdx = prevState.closeIdx;
629
+ itemCloseIdx = prevState.lastItemIdx;
630
+ }
631
+ }
632
+ return true;
633
+ }, "listRule");
634
+ }
635
+ function convertTodoList(tokens, open, close, item) {
636
+ if (tokens[open].type !== "bullet_list_open") {
637
+ throw new Error("bullet_list_open token expected");
638
+ }
639
+ if (tokens[close].type !== "bullet_list_close") {
640
+ throw new Error("bullet_list_close token expected");
641
+ }
642
+ if (tokens[item].type === "todo_item_close") {
643
+ tokens[open].type = "todo_list_open";
644
+ tokens[close].type = "todo_list_close";
645
+ }
646
+ }
647
+ __name(convertTodoList, "convertTodoList");
648
+ function convertTodoItem(tokens, open) {
649
+ const close = findListItemCloseToken(tokens, open);
650
+ if (close !== -1) {
651
+ tokens[open].type = "todo_item_open";
652
+ tokens[close].type = "todo_item_close";
653
+ const inline = tokens[open + 2];
654
+ if (tokens[open].attrs == null) {
655
+ tokens[open].attrs = [];
656
+ }
657
+ ;
658
+ tokens[open].attrs.push(["checked", isCheckedTodoItem(inline) ? "true" : "false"]);
659
+ if (inline.children !== null) {
660
+ const newContent = inline.children[0].content.slice(4);
661
+ if (newContent.length > 0) {
662
+ inline.children[0].content = newContent;
663
+ } else {
664
+ inline.children = inline.children.slice(1);
665
+ }
666
+ const metaTok = inline.children.find(
667
+ (tok) => tok.type === "html_inline" && tok.content.startsWith("<!--") && tok.content.endsWith("-->")
668
+ );
669
+ if (metaTok !== void 0) {
670
+ const metaValues = metaTok.content.slice(5, -4).split(",");
671
+ for (const mv of metaValues) {
672
+ if (mv.startsWith("todoid")) {
673
+ ;
674
+ tokens[open].attrs.push(["todoid", mv.slice(7)]);
675
+ }
676
+ if (mv.startsWith("userid")) {
677
+ ;
678
+ tokens[open].attrs.push(["userid", mv.slice(7)]);
679
+ }
680
+ }
681
+ }
682
+ }
683
+ return true;
684
+ }
685
+ return false;
686
+ }
687
+ __name(convertTodoItem, "convertTodoItem");
688
+ function findListItemCloseToken(tokens, open) {
689
+ if (tokens[open].type !== "list_item_open") {
690
+ throw new Error("list_item_open token expected");
691
+ }
692
+ const level = tokens[open].level;
693
+ for (let close = open + 1; close < tokens.length; close++) {
694
+ if (tokens[close].type === "list_item_close" && tokens[close].level === level) {
695
+ return close;
696
+ }
697
+ }
698
+ return -1;
699
+ }
700
+ __name(findListItemCloseToken, "findListItemCloseToken");
701
+ function isTodoListItem(tokens, pos) {
702
+ return isListItemToken(tokens[pos]) && isParagraphToken(tokens[pos + 1]) && isInlineToken(tokens[pos + 2]) && startsWithTodoMarkdown(tokens[pos + 2]);
703
+ }
704
+ __name(isTodoListItem, "isTodoListItem");
705
+ function convertStringLikeToken(tok, attrValue) {
706
+ if (typeof attrValue === "string" && attrValue !== "") {
707
+ return attrValue;
708
+ }
709
+ const children = tok.children ?? [];
710
+ let out = "";
711
+ for (const child of children) {
712
+ switch (child.type) {
713
+ case "text":
714
+ out += child.content;
715
+ break;
716
+ case "hardbreak":
717
+ out += "\n";
718
+ break;
719
+ }
720
+ }
721
+ return out;
722
+ }
723
+ __name(convertStringLikeToken, "convertStringLikeToken");
724
+ //# sourceMappingURL=parser.js.map