@e-llm-studio/instant-learning 0.0.104 → 0.0.105

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (461) hide show
  1. package/dist/cjs/features/IL-OTJ/ILOTJ.js +1 -1
  2. package/dist/cjs/features/IL-OTJ/_components/ChatComponent.js +1 -1
  3. package/dist/cjs/features/IL-OTJ/_components/CitationLink.js +1 -1
  4. package/dist/cjs/features/IL-OTJ/_components/CognitiveDecisioningCard.js +1 -1
  5. package/dist/cjs/features/IL-OTJ/_components/ILPopup.styles.js +1 -1
  6. package/dist/cjs/features/IL-OTJ/_components/InputDataReasoningCard.js +2 -0
  7. package/dist/cjs/features/IL-OTJ/_components/InputDataReasoningCard.js.map +1 -0
  8. package/dist/cjs/features/IL-OTJ/_components/MessageRendering.js +1 -1
  9. package/dist/cjs/features/IL-OTJ/_components/MinimizedThinkingSteps.js +1 -1
  10. package/dist/cjs/features/IL-OTJ/_components/molecules/Condition/ConditionList.js +1 -1
  11. package/dist/cjs/features/IL-OTJ/_components/organisms/ConditionSection/ConditionSection.js +1 -1
  12. package/dist/cjs/features/IL-OTJ/_components/templates/TrafficManager/CreateRule.js +1 -1
  13. package/dist/cjs/node_modules/@e-llm-studio/citation/dist/features/Bookemon/Bookemon.js.map +1 -1
  14. package/dist/cjs/node_modules/@e-llm-studio/citation/dist/features/CitationRenderer/MarkdownRenderer.js +1 -1
  15. package/dist/cjs/node_modules/@e-llm-studio/citation/dist/features/CitationRenderer/MarkdownRenderer.js.map +1 -1
  16. package/dist/cjs/node_modules/mdast-util-find-and-replace/lib/index.js +1 -1
  17. package/dist/cjs/node_modules/mdast-util-find-and-replace/lib/index.js.map +1 -1
  18. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js +2 -0
  19. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js.map +1 -0
  20. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  21. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  22. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  23. package/dist/cjs/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  24. package/dist/cjs/node_modules/mdast-util-from-markdown/lib/index.js +2 -0
  25. package/dist/cjs/node_modules/mdast-util-from-markdown/lib/index.js.map +1 -0
  26. package/dist/cjs/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  27. package/dist/cjs/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  28. package/dist/cjs/node_modules/mdast-util-gfm/lib/index.js +1 -1
  29. package/dist/cjs/node_modules/mdast-util-gfm/lib/index.js.map +1 -1
  30. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/lib/index.js +1 -1
  31. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/lib/index.js.map +1 -1
  32. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/index.js +2 -0
  33. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/index.js.map +1 -0
  34. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  35. package/dist/cjs/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  36. package/dist/cjs/node_modules/mdast-util-gfm-footnote/lib/index.js +1 -1
  37. package/dist/cjs/node_modules/mdast-util-gfm-footnote/lib/index.js.map +1 -1
  38. package/dist/cjs/node_modules/mdast-util-gfm-strikethrough/lib/index.js +1 -1
  39. package/dist/cjs/node_modules/mdast-util-gfm-strikethrough/lib/index.js.map +1 -1
  40. package/dist/cjs/node_modules/mdast-util-gfm-table/lib/index.js +1 -1
  41. package/dist/cjs/node_modules/mdast-util-gfm-table/lib/index.js.map +1 -1
  42. package/dist/cjs/node_modules/mdast-util-gfm-task-list-item/lib/index.js +1 -1
  43. package/dist/cjs/node_modules/mdast-util-gfm-task-list-item/lib/index.js.map +1 -1
  44. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js +1 -1
  45. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js.map +1 -1
  46. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/handle/list-item.js.map +1 -1
  47. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/association.js +2 -0
  48. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/association.js.map +1 -0
  49. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/check-bullet.js.map +1 -1
  50. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js +1 -1
  51. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js.map +1 -1
  52. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/container-flow.js +2 -0
  53. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/container-flow.js.map +1 -0
  54. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js +2 -0
  55. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js.map +1 -0
  56. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js +2 -0
  57. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js.map +1 -0
  58. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js +2 -0
  59. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js.map +1 -0
  60. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js.map +1 -1
  61. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/safe.js +2 -0
  62. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/safe.js.map +1 -0
  63. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/track.js +2 -0
  64. package/dist/cjs/node_modules/mdast-util-to-markdown/lib/util/track.js.map +1 -0
  65. package/dist/cjs/node_modules/mdast-util-to-string/lib/index.js.map +1 -1
  66. package/dist/cjs/node_modules/micromark/lib/constructs.js +2 -0
  67. package/dist/cjs/node_modules/micromark/lib/constructs.js.map +1 -0
  68. package/dist/cjs/node_modules/micromark/lib/create-tokenizer.js +2 -0
  69. package/dist/cjs/node_modules/micromark/lib/create-tokenizer.js.map +1 -0
  70. package/dist/cjs/node_modules/micromark/lib/initialize/content.js +2 -0
  71. package/dist/cjs/node_modules/micromark/lib/initialize/content.js.map +1 -0
  72. package/dist/cjs/node_modules/micromark/lib/initialize/document.js +2 -0
  73. package/dist/cjs/node_modules/micromark/lib/initialize/document.js.map +1 -0
  74. package/dist/cjs/node_modules/micromark/lib/initialize/flow.js +2 -0
  75. package/dist/cjs/node_modules/micromark/lib/initialize/flow.js.map +1 -0
  76. package/dist/cjs/node_modules/micromark/lib/initialize/text.js +2 -0
  77. package/dist/cjs/node_modules/micromark/lib/initialize/text.js.map +1 -0
  78. package/dist/cjs/node_modules/micromark/lib/parse.js +2 -0
  79. package/dist/cjs/node_modules/micromark/lib/parse.js.map +1 -0
  80. package/dist/cjs/node_modules/micromark/lib/postprocess.js +2 -0
  81. package/dist/cjs/node_modules/micromark/lib/postprocess.js.map +1 -0
  82. package/dist/cjs/node_modules/micromark/lib/preprocess.js +2 -0
  83. package/dist/cjs/node_modules/micromark/lib/preprocess.js.map +1 -0
  84. package/dist/cjs/node_modules/micromark/node_modules/micromark-util-character/index.js +2 -0
  85. package/dist/cjs/node_modules/micromark/node_modules/micromark-util-character/index.js.map +1 -0
  86. package/dist/cjs/node_modules/micromark-core-commonmark/lib/attention.js +2 -0
  87. package/dist/cjs/node_modules/micromark-core-commonmark/lib/attention.js.map +1 -0
  88. package/dist/cjs/node_modules/micromark-core-commonmark/lib/autolink.js +2 -0
  89. package/dist/cjs/node_modules/micromark-core-commonmark/lib/autolink.js.map +1 -0
  90. package/dist/cjs/node_modules/micromark-core-commonmark/lib/blank-line.js +1 -1
  91. package/dist/cjs/node_modules/micromark-core-commonmark/lib/blank-line.js.map +1 -1
  92. package/dist/cjs/node_modules/micromark-core-commonmark/lib/block-quote.js +2 -0
  93. package/dist/cjs/node_modules/micromark-core-commonmark/lib/block-quote.js.map +1 -0
  94. package/dist/cjs/node_modules/micromark-core-commonmark/lib/character-escape.js +2 -0
  95. package/dist/cjs/node_modules/micromark-core-commonmark/lib/character-escape.js.map +1 -0
  96. package/dist/cjs/node_modules/micromark-core-commonmark/lib/character-reference.js +2 -0
  97. package/dist/cjs/node_modules/micromark-core-commonmark/lib/character-reference.js.map +1 -0
  98. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-fenced.js +2 -0
  99. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +1 -0
  100. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-indented.js +2 -0
  101. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-indented.js.map +1 -0
  102. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-text.js +2 -0
  103. package/dist/cjs/node_modules/micromark-core-commonmark/lib/code-text.js.map +1 -0
  104. package/dist/cjs/node_modules/micromark-core-commonmark/lib/content.js +2 -0
  105. package/dist/cjs/node_modules/micromark-core-commonmark/lib/content.js.map +1 -0
  106. package/dist/cjs/node_modules/micromark-core-commonmark/lib/definition.js +2 -0
  107. package/dist/cjs/node_modules/micromark-core-commonmark/lib/definition.js.map +1 -0
  108. package/dist/cjs/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +2 -0
  109. package/dist/cjs/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +1 -0
  110. package/dist/cjs/node_modules/micromark-core-commonmark/lib/heading-atx.js +2 -0
  111. package/dist/cjs/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +1 -0
  112. package/dist/cjs/node_modules/micromark-core-commonmark/lib/html-flow.js +2 -0
  113. package/dist/cjs/node_modules/micromark-core-commonmark/lib/html-flow.js.map +1 -0
  114. package/dist/cjs/node_modules/micromark-core-commonmark/lib/html-text.js +2 -0
  115. package/dist/cjs/node_modules/micromark-core-commonmark/lib/html-text.js.map +1 -0
  116. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-end.js +2 -0
  117. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-end.js.map +1 -0
  118. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-start-image.js +2 -0
  119. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +1 -0
  120. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-start-link.js +2 -0
  121. package/dist/cjs/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +1 -0
  122. package/dist/cjs/node_modules/micromark-core-commonmark/lib/line-ending.js +2 -0
  123. package/dist/cjs/node_modules/micromark-core-commonmark/lib/line-ending.js.map +1 -0
  124. package/dist/cjs/node_modules/micromark-core-commonmark/lib/list.js +2 -0
  125. package/dist/cjs/node_modules/micromark-core-commonmark/lib/list.js.map +1 -0
  126. package/dist/cjs/node_modules/micromark-core-commonmark/lib/setext-underline.js +2 -0
  127. package/dist/cjs/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +1 -0
  128. package/dist/cjs/node_modules/micromark-core-commonmark/lib/thematic-break.js +2 -0
  129. package/dist/cjs/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +1 -0
  130. package/dist/cjs/node_modules/micromark-core-commonmark/node_modules/micromark-util-character/index.js +2 -0
  131. package/dist/cjs/node_modules/micromark-core-commonmark/node_modules/micromark-util-character/index.js.map +1 -0
  132. package/dist/cjs/node_modules/micromark-extension-gfm/index.js +1 -1
  133. package/dist/cjs/node_modules/micromark-extension-gfm/index.js.map +1 -1
  134. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/lib/syntax.js +1 -1
  135. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/lib/syntax.js.map +1 -1
  136. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/index.js +2 -0
  137. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/index.js.map +1 -0
  138. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  139. package/dist/cjs/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  140. package/dist/cjs/node_modules/micromark-extension-gfm-footnote/lib/syntax.js +1 -1
  141. package/dist/cjs/node_modules/micromark-extension-gfm-footnote/lib/syntax.js.map +1 -1
  142. package/dist/cjs/node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character/index.js +2 -0
  143. package/dist/cjs/node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character/index.js.map +1 -0
  144. package/dist/cjs/node_modules/micromark-extension-gfm-strikethrough/lib/syntax.js +1 -1
  145. package/dist/cjs/node_modules/micromark-extension-gfm-strikethrough/lib/syntax.js.map +1 -1
  146. package/dist/cjs/node_modules/micromark-extension-gfm-table/lib/edit-map.js +1 -1
  147. package/dist/cjs/node_modules/micromark-extension-gfm-table/lib/edit-map.js.map +1 -1
  148. package/dist/cjs/node_modules/micromark-extension-gfm-table/lib/infer.js.map +1 -1
  149. package/dist/cjs/node_modules/micromark-extension-gfm-table/lib/syntax.js +1 -1
  150. package/dist/cjs/node_modules/micromark-extension-gfm-table/lib/syntax.js.map +1 -1
  151. package/dist/cjs/node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character/index.js +2 -0
  152. package/dist/cjs/node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character/index.js.map +1 -0
  153. package/dist/cjs/node_modules/micromark-extension-gfm-task-list-item/lib/syntax.js +1 -1
  154. package/dist/cjs/node_modules/micromark-extension-gfm-task-list-item/lib/syntax.js.map +1 -1
  155. package/dist/cjs/node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character/index.js +2 -0
  156. package/dist/cjs/node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character/index.js.map +1 -0
  157. package/dist/cjs/node_modules/micromark-factory-destination/index.js +2 -0
  158. package/dist/cjs/node_modules/micromark-factory-destination/index.js.map +1 -0
  159. package/dist/cjs/node_modules/micromark-factory-destination/node_modules/micromark-util-character/index.js +2 -0
  160. package/dist/cjs/node_modules/micromark-factory-destination/node_modules/micromark-util-character/index.js.map +1 -0
  161. package/dist/cjs/node_modules/micromark-factory-label/index.js +2 -0
  162. package/dist/cjs/node_modules/micromark-factory-label/index.js.map +1 -0
  163. package/dist/cjs/node_modules/micromark-factory-label/node_modules/micromark-util-character/index.js +2 -0
  164. package/dist/cjs/node_modules/micromark-factory-label/node_modules/micromark-util-character/index.js.map +1 -0
  165. package/dist/cjs/node_modules/micromark-factory-space/index.js +1 -1
  166. package/dist/cjs/node_modules/micromark-factory-space/index.js.map +1 -1
  167. package/dist/cjs/node_modules/micromark-factory-space/node_modules/micromark-util-character/index.js +2 -0
  168. package/dist/cjs/node_modules/micromark-factory-space/node_modules/micromark-util-character/index.js.map +1 -0
  169. package/dist/cjs/node_modules/micromark-factory-title/index.js +2 -0
  170. package/dist/cjs/node_modules/micromark-factory-title/index.js.map +1 -0
  171. package/dist/cjs/node_modules/micromark-factory-title/node_modules/micromark-util-character/index.js +2 -0
  172. package/dist/cjs/node_modules/micromark-factory-title/node_modules/micromark-util-character/index.js.map +1 -0
  173. package/dist/cjs/node_modules/micromark-factory-whitespace/index.js +2 -0
  174. package/dist/cjs/node_modules/micromark-factory-whitespace/index.js.map +1 -0
  175. package/dist/cjs/node_modules/micromark-factory-whitespace/node_modules/micromark-util-character/index.js +2 -0
  176. package/dist/cjs/node_modules/micromark-factory-whitespace/node_modules/micromark-util-character/index.js.map +1 -0
  177. package/dist/cjs/node_modules/micromark-util-chunked/index.js +1 -1
  178. package/dist/cjs/node_modules/micromark-util-chunked/index.js.map +1 -1
  179. package/dist/cjs/node_modules/micromark-util-classify-character/index.js +1 -1
  180. package/dist/cjs/node_modules/micromark-util-classify-character/index.js.map +1 -1
  181. package/dist/cjs/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/index.js +2 -0
  182. package/dist/cjs/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/index.js.map +1 -0
  183. package/dist/cjs/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  184. package/dist/cjs/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  185. package/dist/cjs/node_modules/micromark-util-combine-extensions/index.js.map +1 -1
  186. package/dist/cjs/node_modules/micromark-util-decode-numeric-character-reference/index.js +2 -0
  187. package/dist/cjs/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +1 -0
  188. package/dist/cjs/node_modules/micromark-util-decode-string/index.js +2 -0
  189. package/dist/cjs/node_modules/micromark-util-decode-string/index.js.map +1 -0
  190. package/dist/cjs/node_modules/micromark-util-html-tag-name/index.js +2 -0
  191. package/dist/cjs/node_modules/micromark-util-html-tag-name/index.js.map +1 -0
  192. package/dist/cjs/node_modules/micromark-util-normalize-identifier/index.js.map +1 -1
  193. package/dist/cjs/node_modules/micromark-util-resolve-all/index.js.map +1 -1
  194. package/dist/cjs/node_modules/micromark-util-subtokenize/index.js +2 -0
  195. package/dist/cjs/node_modules/micromark-util-subtokenize/index.js.map +1 -0
  196. package/dist/cjs/node_modules/react-markdown/lib/react-markdown.js +1 -1
  197. package/dist/cjs/node_modules/react-markdown/lib/react-markdown.js.map +1 -1
  198. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +1 -1
  199. package/dist/cjs/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +1 -1
  200. package/dist/cjs/node_modules/remark-gfm/index.js +2 -0
  201. package/dist/cjs/node_modules/remark-gfm/index.js.map +1 -0
  202. package/dist/cjs/node_modules/unified/lib/index.js +2 -0
  203. package/dist/cjs/node_modules/unified/lib/index.js.map +1 -0
  204. package/dist/cjs/node_modules/unified/node_modules/is-plain-obj/index.js +2 -0
  205. package/dist/cjs/node_modules/unified/node_modules/is-plain-obj/index.js.map +1 -0
  206. package/dist/cjs/node_modules/unified/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  207. package/dist/cjs/node_modules/unified/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  208. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/index.js +2 -0
  209. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/index.js.map +1 -0
  210. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minpath.browser.js +2 -0
  211. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minpath.browser.js.map +1 -0
  212. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minproc.browser.js +2 -0
  213. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minproc.browser.js.map +1 -0
  214. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minurl.browser.js +2 -0
  215. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minurl.browser.js.map +1 -0
  216. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minurl.shared.js +2 -0
  217. package/dist/cjs/node_modules/unified/node_modules/vfile/lib/minurl.shared.js.map +1 -0
  218. package/dist/cjs/node_modules/unified/node_modules/vfile-message/lib/index.js +2 -0
  219. package/dist/cjs/node_modules/unified/node_modules/vfile-message/lib/index.js.map +1 -0
  220. package/dist/features/IL-OTJ/ILOTJ.js +1 -1
  221. package/dist/features/IL-OTJ/_components/ChatComponent.js +1 -1
  222. package/dist/features/IL-OTJ/_components/CitationLink.js +1 -1
  223. package/dist/features/IL-OTJ/_components/CognitiveDecisioningCard.js +1 -1
  224. package/dist/features/IL-OTJ/_components/ILPopup.styles.js +1 -1
  225. package/dist/features/IL-OTJ/_components/InputDataReasoningCard.js +2 -0
  226. package/dist/features/IL-OTJ/_components/InputDataReasoningCard.js.map +1 -0
  227. package/dist/features/IL-OTJ/_components/MessageRendering.js +1 -1
  228. package/dist/features/IL-OTJ/_components/MinimizedThinkingSteps.js +1 -1
  229. package/dist/features/IL-OTJ/_components/molecules/Condition/ConditionList.js +1 -1
  230. package/dist/features/IL-OTJ/_components/organisms/ConditionSection/ConditionSection.js +1 -1
  231. package/dist/features/IL-OTJ/_components/templates/TrafficManager/CreateRule.js +1 -1
  232. package/dist/node_modules/@e-llm-studio/citation/dist/features/Bookemon/Bookemon.js.map +1 -1
  233. package/dist/node_modules/@e-llm-studio/citation/dist/features/CitationRenderer/MarkdownRenderer.js +1 -1
  234. package/dist/node_modules/@e-llm-studio/citation/dist/features/CitationRenderer/MarkdownRenderer.js.map +1 -1
  235. package/dist/node_modules/mdast-util-find-and-replace/lib/index.js +1 -1
  236. package/dist/node_modules/mdast-util-find-and-replace/lib/index.js.map +1 -1
  237. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js +2 -0
  238. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-is/lib/index.js.map +1 -0
  239. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js +2 -0
  240. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/color.browser.js.map +1 -0
  241. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js +2 -0
  242. package/dist/node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents/lib/index.js.map +1 -0
  243. package/dist/node_modules/mdast-util-from-markdown/lib/index.js +2 -0
  244. package/dist/node_modules/mdast-util-from-markdown/lib/index.js.map +1 -0
  245. package/dist/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  246. package/dist/node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  247. package/dist/node_modules/mdast-util-gfm/lib/index.js +1 -1
  248. package/dist/node_modules/mdast-util-gfm/lib/index.js.map +1 -1
  249. package/dist/node_modules/mdast-util-gfm-autolink-literal/lib/index.js +1 -1
  250. package/dist/node_modules/mdast-util-gfm-autolink-literal/lib/index.js.map +1 -1
  251. package/dist/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/index.js +2 -0
  252. package/dist/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/index.js.map +1 -0
  253. package/dist/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  254. package/dist/node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  255. package/dist/node_modules/mdast-util-gfm-footnote/lib/index.js +1 -1
  256. package/dist/node_modules/mdast-util-gfm-footnote/lib/index.js.map +1 -1
  257. package/dist/node_modules/mdast-util-gfm-strikethrough/lib/index.js +1 -1
  258. package/dist/node_modules/mdast-util-gfm-strikethrough/lib/index.js.map +1 -1
  259. package/dist/node_modules/mdast-util-gfm-table/lib/index.js +1 -1
  260. package/dist/node_modules/mdast-util-gfm-table/lib/index.js.map +1 -1
  261. package/dist/node_modules/mdast-util-gfm-task-list-item/lib/index.js +1 -1
  262. package/dist/node_modules/mdast-util-gfm-task-list-item/lib/index.js.map +1 -1
  263. package/dist/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js +1 -1
  264. package/dist/node_modules/mdast-util-to-markdown/lib/handle/inline-code.js.map +1 -1
  265. package/dist/node_modules/mdast-util-to-markdown/lib/handle/list-item.js.map +1 -1
  266. package/dist/node_modules/mdast-util-to-markdown/lib/util/association.js +2 -0
  267. package/dist/node_modules/mdast-util-to-markdown/lib/util/association.js.map +1 -0
  268. package/dist/node_modules/mdast-util-to-markdown/lib/util/check-bullet.js.map +1 -1
  269. package/dist/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js +1 -1
  270. package/dist/node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js.map +1 -1
  271. package/dist/node_modules/mdast-util-to-markdown/lib/util/container-flow.js +2 -0
  272. package/dist/node_modules/mdast-util-to-markdown/lib/util/container-flow.js.map +1 -0
  273. package/dist/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js +2 -0
  274. package/dist/node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js.map +1 -0
  275. package/dist/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js +2 -0
  276. package/dist/node_modules/mdast-util-to-markdown/lib/util/indent-lines.js.map +1 -0
  277. package/dist/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js +2 -0
  278. package/dist/node_modules/mdast-util-to-markdown/lib/util/pattern-compile.js.map +1 -0
  279. package/dist/node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js.map +1 -1
  280. package/dist/node_modules/mdast-util-to-markdown/lib/util/safe.js +2 -0
  281. package/dist/node_modules/mdast-util-to-markdown/lib/util/safe.js.map +1 -0
  282. package/dist/node_modules/mdast-util-to-markdown/lib/util/track.js +2 -0
  283. package/dist/node_modules/mdast-util-to-markdown/lib/util/track.js.map +1 -0
  284. package/dist/node_modules/mdast-util-to-string/lib/index.js.map +1 -1
  285. package/dist/node_modules/micromark/lib/constructs.js +2 -0
  286. package/dist/node_modules/micromark/lib/constructs.js.map +1 -0
  287. package/dist/node_modules/micromark/lib/create-tokenizer.js +2 -0
  288. package/dist/node_modules/micromark/lib/create-tokenizer.js.map +1 -0
  289. package/dist/node_modules/micromark/lib/initialize/content.js +2 -0
  290. package/dist/node_modules/micromark/lib/initialize/content.js.map +1 -0
  291. package/dist/node_modules/micromark/lib/initialize/document.js +2 -0
  292. package/dist/node_modules/micromark/lib/initialize/document.js.map +1 -0
  293. package/dist/node_modules/micromark/lib/initialize/flow.js +2 -0
  294. package/dist/node_modules/micromark/lib/initialize/flow.js.map +1 -0
  295. package/dist/node_modules/micromark/lib/initialize/text.js +2 -0
  296. package/dist/node_modules/micromark/lib/initialize/text.js.map +1 -0
  297. package/dist/node_modules/micromark/lib/parse.js +2 -0
  298. package/dist/node_modules/micromark/lib/parse.js.map +1 -0
  299. package/dist/node_modules/micromark/lib/postprocess.js +2 -0
  300. package/dist/node_modules/micromark/lib/postprocess.js.map +1 -0
  301. package/dist/node_modules/micromark/lib/preprocess.js +2 -0
  302. package/dist/node_modules/micromark/lib/preprocess.js.map +1 -0
  303. package/dist/node_modules/micromark/node_modules/micromark-util-character/index.js +2 -0
  304. package/dist/node_modules/micromark/node_modules/micromark-util-character/index.js.map +1 -0
  305. package/dist/node_modules/micromark-core-commonmark/lib/attention.js +2 -0
  306. package/dist/node_modules/micromark-core-commonmark/lib/attention.js.map +1 -0
  307. package/dist/node_modules/micromark-core-commonmark/lib/autolink.js +2 -0
  308. package/dist/node_modules/micromark-core-commonmark/lib/autolink.js.map +1 -0
  309. package/dist/node_modules/micromark-core-commonmark/lib/blank-line.js +1 -1
  310. package/dist/node_modules/micromark-core-commonmark/lib/blank-line.js.map +1 -1
  311. package/dist/node_modules/micromark-core-commonmark/lib/block-quote.js +2 -0
  312. package/dist/node_modules/micromark-core-commonmark/lib/block-quote.js.map +1 -0
  313. package/dist/node_modules/micromark-core-commonmark/lib/character-escape.js +2 -0
  314. package/dist/node_modules/micromark-core-commonmark/lib/character-escape.js.map +1 -0
  315. package/dist/node_modules/micromark-core-commonmark/lib/character-reference.js +2 -0
  316. package/dist/node_modules/micromark-core-commonmark/lib/character-reference.js.map +1 -0
  317. package/dist/node_modules/micromark-core-commonmark/lib/code-fenced.js +2 -0
  318. package/dist/node_modules/micromark-core-commonmark/lib/code-fenced.js.map +1 -0
  319. package/dist/node_modules/micromark-core-commonmark/lib/code-indented.js +2 -0
  320. package/dist/node_modules/micromark-core-commonmark/lib/code-indented.js.map +1 -0
  321. package/dist/node_modules/micromark-core-commonmark/lib/code-text.js +2 -0
  322. package/dist/node_modules/micromark-core-commonmark/lib/code-text.js.map +1 -0
  323. package/dist/node_modules/micromark-core-commonmark/lib/content.js +2 -0
  324. package/dist/node_modules/micromark-core-commonmark/lib/content.js.map +1 -0
  325. package/dist/node_modules/micromark-core-commonmark/lib/definition.js +2 -0
  326. package/dist/node_modules/micromark-core-commonmark/lib/definition.js.map +1 -0
  327. package/dist/node_modules/micromark-core-commonmark/lib/hard-break-escape.js +2 -0
  328. package/dist/node_modules/micromark-core-commonmark/lib/hard-break-escape.js.map +1 -0
  329. package/dist/node_modules/micromark-core-commonmark/lib/heading-atx.js +2 -0
  330. package/dist/node_modules/micromark-core-commonmark/lib/heading-atx.js.map +1 -0
  331. package/dist/node_modules/micromark-core-commonmark/lib/html-flow.js +2 -0
  332. package/dist/node_modules/micromark-core-commonmark/lib/html-flow.js.map +1 -0
  333. package/dist/node_modules/micromark-core-commonmark/lib/html-text.js +2 -0
  334. package/dist/node_modules/micromark-core-commonmark/lib/html-text.js.map +1 -0
  335. package/dist/node_modules/micromark-core-commonmark/lib/label-end.js +2 -0
  336. package/dist/node_modules/micromark-core-commonmark/lib/label-end.js.map +1 -0
  337. package/dist/node_modules/micromark-core-commonmark/lib/label-start-image.js +2 -0
  338. package/dist/node_modules/micromark-core-commonmark/lib/label-start-image.js.map +1 -0
  339. package/dist/node_modules/micromark-core-commonmark/lib/label-start-link.js +2 -0
  340. package/dist/node_modules/micromark-core-commonmark/lib/label-start-link.js.map +1 -0
  341. package/dist/node_modules/micromark-core-commonmark/lib/line-ending.js +2 -0
  342. package/dist/node_modules/micromark-core-commonmark/lib/line-ending.js.map +1 -0
  343. package/dist/node_modules/micromark-core-commonmark/lib/list.js +2 -0
  344. package/dist/node_modules/micromark-core-commonmark/lib/list.js.map +1 -0
  345. package/dist/node_modules/micromark-core-commonmark/lib/setext-underline.js +2 -0
  346. package/dist/node_modules/micromark-core-commonmark/lib/setext-underline.js.map +1 -0
  347. package/dist/node_modules/micromark-core-commonmark/lib/thematic-break.js +2 -0
  348. package/dist/node_modules/micromark-core-commonmark/lib/thematic-break.js.map +1 -0
  349. package/dist/node_modules/micromark-core-commonmark/node_modules/micromark-util-character/index.js +2 -0
  350. package/dist/node_modules/micromark-core-commonmark/node_modules/micromark-util-character/index.js.map +1 -0
  351. package/dist/node_modules/micromark-extension-gfm/index.js +1 -1
  352. package/dist/node_modules/micromark-extension-gfm/index.js.map +1 -1
  353. package/dist/node_modules/micromark-extension-gfm-autolink-literal/lib/syntax.js +1 -1
  354. package/dist/node_modules/micromark-extension-gfm-autolink-literal/lib/syntax.js.map +1 -1
  355. package/dist/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/index.js +2 -0
  356. package/dist/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/index.js.map +1 -0
  357. package/dist/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  358. package/dist/node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  359. package/dist/node_modules/micromark-extension-gfm-footnote/lib/syntax.js +1 -1
  360. package/dist/node_modules/micromark-extension-gfm-footnote/lib/syntax.js.map +1 -1
  361. package/dist/node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character/index.js +2 -0
  362. package/dist/node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character/index.js.map +1 -0
  363. package/dist/node_modules/micromark-extension-gfm-strikethrough/lib/syntax.js +1 -1
  364. package/dist/node_modules/micromark-extension-gfm-strikethrough/lib/syntax.js.map +1 -1
  365. package/dist/node_modules/micromark-extension-gfm-table/lib/edit-map.js +1 -1
  366. package/dist/node_modules/micromark-extension-gfm-table/lib/edit-map.js.map +1 -1
  367. package/dist/node_modules/micromark-extension-gfm-table/lib/infer.js.map +1 -1
  368. package/dist/node_modules/micromark-extension-gfm-table/lib/syntax.js +1 -1
  369. package/dist/node_modules/micromark-extension-gfm-table/lib/syntax.js.map +1 -1
  370. package/dist/node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character/index.js +2 -0
  371. package/dist/node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character/index.js.map +1 -0
  372. package/dist/node_modules/micromark-extension-gfm-task-list-item/lib/syntax.js +1 -1
  373. package/dist/node_modules/micromark-extension-gfm-task-list-item/lib/syntax.js.map +1 -1
  374. package/dist/node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character/index.js +2 -0
  375. package/dist/node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character/index.js.map +1 -0
  376. package/dist/node_modules/micromark-factory-destination/index.js +2 -0
  377. package/dist/node_modules/micromark-factory-destination/index.js.map +1 -0
  378. package/dist/node_modules/micromark-factory-destination/node_modules/micromark-util-character/index.js +2 -0
  379. package/dist/node_modules/micromark-factory-destination/node_modules/micromark-util-character/index.js.map +1 -0
  380. package/dist/node_modules/micromark-factory-label/index.js +2 -0
  381. package/dist/node_modules/micromark-factory-label/index.js.map +1 -0
  382. package/dist/node_modules/micromark-factory-label/node_modules/micromark-util-character/index.js +2 -0
  383. package/dist/node_modules/micromark-factory-label/node_modules/micromark-util-character/index.js.map +1 -0
  384. package/dist/node_modules/micromark-factory-space/index.js +1 -1
  385. package/dist/node_modules/micromark-factory-space/index.js.map +1 -1
  386. package/dist/node_modules/micromark-factory-space/node_modules/micromark-util-character/index.js +2 -0
  387. package/dist/node_modules/micromark-factory-space/node_modules/micromark-util-character/index.js.map +1 -0
  388. package/dist/node_modules/micromark-factory-title/index.js +2 -0
  389. package/dist/node_modules/micromark-factory-title/index.js.map +1 -0
  390. package/dist/node_modules/micromark-factory-title/node_modules/micromark-util-character/index.js +2 -0
  391. package/dist/node_modules/micromark-factory-title/node_modules/micromark-util-character/index.js.map +1 -0
  392. package/dist/node_modules/micromark-factory-whitespace/index.js +2 -0
  393. package/dist/node_modules/micromark-factory-whitespace/index.js.map +1 -0
  394. package/dist/node_modules/micromark-factory-whitespace/node_modules/micromark-util-character/index.js +2 -0
  395. package/dist/node_modules/micromark-factory-whitespace/node_modules/micromark-util-character/index.js.map +1 -0
  396. package/dist/node_modules/micromark-util-chunked/index.js +1 -1
  397. package/dist/node_modules/micromark-util-chunked/index.js.map +1 -1
  398. package/dist/node_modules/micromark-util-classify-character/index.js +1 -1
  399. package/dist/node_modules/micromark-util-classify-character/index.js.map +1 -1
  400. package/dist/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/index.js +2 -0
  401. package/dist/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/index.js.map +1 -0
  402. package/dist/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js +2 -0
  403. package/dist/node_modules/micromark-util-classify-character/node_modules/micromark-util-character/lib/unicode-punctuation-regex.js.map +1 -0
  404. package/dist/node_modules/micromark-util-combine-extensions/index.js.map +1 -1
  405. package/dist/node_modules/micromark-util-decode-numeric-character-reference/index.js +2 -0
  406. package/dist/node_modules/micromark-util-decode-numeric-character-reference/index.js.map +1 -0
  407. package/dist/node_modules/micromark-util-decode-string/index.js +2 -0
  408. package/dist/node_modules/micromark-util-decode-string/index.js.map +1 -0
  409. package/dist/node_modules/micromark-util-html-tag-name/index.js +2 -0
  410. package/dist/node_modules/micromark-util-html-tag-name/index.js.map +1 -0
  411. package/dist/node_modules/micromark-util-normalize-identifier/index.js.map +1 -1
  412. package/dist/node_modules/micromark-util-resolve-all/index.js.map +1 -1
  413. package/dist/node_modules/micromark-util-subtokenize/index.js +2 -0
  414. package/dist/node_modules/micromark-util-subtokenize/index.js.map +1 -0
  415. package/dist/node_modules/react-markdown/lib/react-markdown.js +1 -1
  416. package/dist/node_modules/react-markdown/lib/react-markdown.js.map +1 -1
  417. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js +1 -1
  418. package/dist/node_modules/react-markdown/node_modules/remark-parse/lib/index.js.map +1 -1
  419. package/dist/node_modules/remark-gfm/index.js +2 -0
  420. package/dist/node_modules/remark-gfm/index.js.map +1 -0
  421. package/dist/node_modules/unified/lib/index.js +2 -0
  422. package/dist/node_modules/unified/lib/index.js.map +1 -0
  423. package/dist/node_modules/unified/node_modules/is-plain-obj/index.js +2 -0
  424. package/dist/node_modules/unified/node_modules/is-plain-obj/index.js.map +1 -0
  425. package/dist/node_modules/unified/node_modules/unist-util-stringify-position/lib/index.js +2 -0
  426. package/dist/node_modules/unified/node_modules/unist-util-stringify-position/lib/index.js.map +1 -0
  427. package/dist/node_modules/unified/node_modules/vfile/lib/index.js +2 -0
  428. package/dist/node_modules/unified/node_modules/vfile/lib/index.js.map +1 -0
  429. package/dist/node_modules/unified/node_modules/vfile/lib/minpath.browser.js +2 -0
  430. package/dist/node_modules/unified/node_modules/vfile/lib/minpath.browser.js.map +1 -0
  431. package/dist/node_modules/unified/node_modules/vfile/lib/minproc.browser.js +2 -0
  432. package/dist/node_modules/unified/node_modules/vfile/lib/minproc.browser.js.map +1 -0
  433. package/dist/node_modules/unified/node_modules/vfile/lib/minurl.browser.js +2 -0
  434. package/dist/node_modules/unified/node_modules/vfile/lib/minurl.browser.js.map +1 -0
  435. package/dist/node_modules/unified/node_modules/vfile/lib/minurl.shared.js +2 -0
  436. package/dist/node_modules/unified/node_modules/vfile/lib/minurl.shared.js.map +1 -0
  437. package/dist/node_modules/unified/node_modules/vfile-message/lib/index.js +2 -0
  438. package/dist/node_modules/unified/node_modules/vfile-message/lib/index.js.map +1 -0
  439. package/dist/types/src/features/IL-OTJ/ILOTJ.d.ts +1 -0
  440. package/dist/types/src/features/IL-OTJ/ILOTJ.d.ts.map +1 -1
  441. package/dist/types/src/features/IL-OTJ/ILOTJTest.d.ts.map +1 -1
  442. package/dist/types/src/features/IL-OTJ/_components/ChatComponent.d.ts +1 -0
  443. package/dist/types/src/features/IL-OTJ/_components/ChatComponent.d.ts.map +1 -1
  444. package/dist/types/src/features/IL-OTJ/_components/CitationLink.d.ts +13 -0
  445. package/dist/types/src/features/IL-OTJ/_components/CitationLink.d.ts.map +1 -1
  446. package/dist/types/src/features/IL-OTJ/_components/CognitiveDecisioningCard.d.ts +3 -0
  447. package/dist/types/src/features/IL-OTJ/_components/CognitiveDecisioningCard.d.ts.map +1 -1
  448. package/dist/types/src/features/IL-OTJ/_components/ILPopup.styles.d.ts +1 -1
  449. package/dist/types/src/features/IL-OTJ/_components/ILPopup.styles.d.ts.map +1 -1
  450. package/dist/types/src/features/IL-OTJ/_components/InputDataReasoningCard.d.ts +10 -0
  451. package/dist/types/src/features/IL-OTJ/_components/InputDataReasoningCard.d.ts.map +1 -0
  452. package/dist/types/src/features/IL-OTJ/_components/MessageRendering.d.ts +3 -1
  453. package/dist/types/src/features/IL-OTJ/_components/MessageRendering.d.ts.map +1 -1
  454. package/dist/types/src/features/IL-OTJ/_components/_svg/RightSideIcons.d.ts +1 -0
  455. package/dist/types/src/features/IL-OTJ/_components/_svg/RightSideIcons.d.ts.map +1 -1
  456. package/dist/types/src/features/IL-OTJ/_components/molecules/Condition/ConditionList.d.ts +3 -1
  457. package/dist/types/src/features/IL-OTJ/_components/molecules/Condition/ConditionList.d.ts.map +1 -1
  458. package/dist/types/src/features/IL-OTJ/_components/organisms/ConditionSection/ConditionSection.d.ts +1 -1
  459. package/dist/types/src/features/IL-OTJ/_components/organisms/ConditionSection/ConditionSection.d.ts.map +1 -1
  460. package/dist/types/src/features/IL-OTJ/_components/templates/TrafficManager/CreateRule.d.ts.map +1 -1
  461. package/package.json +2 -2
@@ -0,0 +1 @@
1
+ {"version":3,"file":"code-indented.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/code-indented.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeIndented = {\n name: 'codeIndented',\n tokenize: tokenizeCodeIndented\n}\n\n/** @type {Construct} */\nconst furtherStart = {\n tokenize: tokenizeFurtherStart,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeIndented(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of code (indented).\n *\n * > **Parsing note**: it is not needed to check if this first line is a\n * > filled line (that it has a non-whitespace character), because blank lines\n * > are parsed already, so we never run into that.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: manually check if interrupting like `markdown-rs`.\n\n effects.enter('codeIndented')\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? atBreak(code)\n : nok(code)\n }\n\n /**\n * At a break.\n *\n * ```markdown\n * > | aaa\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === null) {\n return after(code)\n }\n if (markdownLineEnding(code)) {\n return effects.attempt(furtherStart, atBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return inside(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * > | aaa\n * ^^^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return atBreak(code)\n }\n effects.consume(code)\n return inside\n }\n\n /** @type {State} */\n function after(code) {\n effects.exit('codeIndented')\n // To do: allow interrupting like `markdown-rs`.\n // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeFurtherStart(effects, ok, nok) {\n const self = this\n return furtherStart\n\n /**\n * At eol, trying to parse another indent.\n *\n * ```markdown\n * > | aaa\n * ^\n * | bbb\n * ```\n *\n * @type {State}\n */\n function furtherStart(code) {\n // To do: improve `lazy` / `pierce` handling.\n // If this is a lazy line, it can’t be code.\n if (self.parser.lazy[self.now().line]) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return furtherStart\n }\n\n // To do: the code here in `micromark-js` is a bit different from\n // `markdown-rs` because there it can attempt spaces.\n // We can’t yet.\n //\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? ok(code)\n : markdownLineEnding(code)\n ? furtherStart(code)\n : nok(code)\n }\n}\n"],"names":["codeIndented","name","tokenize","effects","ok","nok","self","this","code","enter","factorySpace","afterPrefix","tail","events","length","type","sliceSerialize","atBreak","after","markdownLineEnding","attempt","furtherStart","inside","exit","consume","parser","lazy","now","line","partial"],"mappings":"6JAUY,MAACA,EAAe,CAC1BC,KAAM,eACNC,SAaF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAgBA,SAAeC,GAMb,OAHAL,EAAQM,MAAM,gBAGPC,EAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,EAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CG,EAAQT,GACRH,EAAIG,EACT,CAYD,SAASS,EAAQT,GACf,OAAa,OAATA,EACKU,EAAMV,GAEXW,EAAmBX,GACdL,EAAQiB,QAAQC,EAAcJ,EAASC,EAAvCf,CAA8CK,IAEvDL,EAAQM,MAAM,iBACPa,EAAOd,GACf,CAYD,SAASc,EAAOd,GACd,OAAa,OAATA,GAAiBW,EAAmBX,IACtCL,EAAQoB,KAAK,iBACNN,EAAQT,KAEjBL,EAAQqB,QAAQhB,GACTc,EACR,CAGD,SAASJ,EAAMV,GAKb,OAJAL,EAAQoB,KAAK,gBAINnB,EAAGI,EACX,CACH,GAvGMa,EAAe,CACnBnB,SA4GF,SAA8BC,EAASC,EAAIC,GACzC,MAAMC,EAAOC,KACb,OAAOc,EAaP,SAASA,EAAab,GAGpB,OAAIF,EAAKmB,OAAOC,KAAKpB,EAAKqB,MAAMC,MACvBvB,EAAIG,GAETW,EAAmBX,IACrBL,EAAQM,MAAM,cACdN,EAAQqB,QAAQhB,GAChBL,EAAQoB,KAAK,cACNF,GASFX,EAAaP,EAASQ,EAAa,aAAc,EAAjDD,CAAwDF,EAChE,CAYD,SAASG,EAAYH,GACnB,MAAMI,EAAON,EAAKO,OAAOP,EAAKO,OAAOC,OAAS,GAC9C,OAAOF,GACY,eAAjBA,EAAK,GAAGG,MACRH,EAAK,GAAGI,eAAeJ,EAAK,IAAI,GAAME,QAAU,EAC9CV,EAAGI,GACHW,EAAmBX,GACnBa,EAAab,GACbH,EAAIG,EACT,CACH,EApKEqB,SAAS"}
@@ -0,0 +1,2 @@
1
+ import{markdownLineEnding as e}from"../node_modules/micromark-util-character/index.js";const t={name:"codeText",tokenize:function(t,n,c){let i,o,r=0;return function(e){return t.enter("codeText"),t.enter("codeTextSequence"),d(e)};function d(e){return 96===e?(t.consume(e),r++,d):(t.exit("codeTextSequence"),u(e))}function u(n){return null===n?c(n):32===n?(t.enter("space"),t.consume(n),t.exit("space"),u):96===n?(o=t.enter("codeTextSequence"),i=0,x(n)):e(n)?(t.enter("lineEnding"),t.consume(n),t.exit("lineEnding"),u):(t.enter("codeTextData"),a(n))}function a(n){return null===n||32===n||96===n||e(n)?(t.exit("codeTextData"),u(n)):(t.consume(n),a)}function x(e){return 96===e?(t.consume(e),i++,x):i===r?(t.exit("codeTextSequence"),t.exit("codeText"),n(e)):(o.type="codeTextData",a(e))}},resolve:function(e){let t,n,c=e.length-4,i=3;if(!("lineEnding"!==e[i][1].type&&"space"!==e[i][1].type||"lineEnding"!==e[c][1].type&&"space"!==e[c][1].type))for(t=i;++t<c;)if("codeTextData"===e[t][1].type){e[i][1].type="codeTextPadding",e[c][1].type="codeTextPadding",i+=2,c-=2;break}t=i-1,c++;for(;++t<=c;)void 0===n?t!==c&&"lineEnding"!==e[t][1].type&&(n=t):t!==c&&"lineEnding"!==e[t][1].type||(e[n][1].type="codeTextData",t!==n+2&&(e[n][1].end=e[t-1][1].end,e.splice(n+2,t-n-2),c-=t-n-2,t=n+2),n=void 0);return e},previous:function(e){return 96!==e||"characterEscape"===this.events[this.events.length-1][1].type}};export{t as codeText};
2
+ //# sourceMappingURL=code-text.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"code-text.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/code-text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeText = {\n name: 'codeText',\n tokenize: tokenizeCodeText,\n resolve: resolveCodeText,\n previous\n}\n\n// To do: next major: don’t resolve, like `markdown-rs`.\n/** @type {Resolver} */\nfunction resolveCodeText(events) {\n let tailExitIndex = events.length - 4\n let headEnterIndex = 3\n /** @type {number} */\n let index\n /** @type {number | undefined} */\n let enter\n\n // If we start and end with an EOL or a space.\n if (\n (events[headEnterIndex][1].type === 'lineEnding' ||\n events[headEnterIndex][1].type === 'space') &&\n (events[tailExitIndex][1].type === 'lineEnding' ||\n events[tailExitIndex][1].type === 'space')\n ) {\n index = headEnterIndex\n\n // And we have data.\n while (++index < tailExitIndex) {\n if (events[index][1].type === 'codeTextData') {\n // Then we have padding.\n events[headEnterIndex][1].type = 'codeTextPadding'\n events[tailExitIndex][1].type = 'codeTextPadding'\n headEnterIndex += 2\n tailExitIndex -= 2\n break\n }\n }\n }\n\n // Merge adjacent spaces and data.\n index = headEnterIndex - 1\n tailExitIndex++\n while (++index <= tailExitIndex) {\n if (enter === undefined) {\n if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {\n enter = index\n }\n } else if (\n index === tailExitIndex ||\n events[index][1].type === 'lineEnding'\n ) {\n events[enter][1].type = 'codeTextData'\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n tailExitIndex -= index - enter - 2\n index = enter + 2\n }\n enter = undefined\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Previous}\n */\nfunction previous(code) {\n // If there is a previous code, there will always be a tail.\n return (\n code !== 96 ||\n this.events[this.events.length - 1][1].type === 'characterEscape'\n )\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeText(effects, ok, nok) {\n const self = this\n let sizeOpen = 0\n /** @type {number} */\n let size\n /** @type {Token} */\n let token\n return start\n\n /**\n * Start of code (text).\n *\n * ```markdown\n * > | `a`\n * ^\n * > | \\`a`\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('codeText')\n effects.enter('codeTextSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 96) {\n effects.consume(code)\n sizeOpen++\n return sequenceOpen\n }\n effects.exit('codeTextSequence')\n return between(code)\n }\n\n /**\n * Between something and something else.\n *\n * ```markdown\n * > | `a`\n * ^^\n * ```\n *\n * @type {State}\n */\n function between(code) {\n // EOF.\n if (code === null) {\n return nok(code)\n }\n\n // To do: next major: don’t do spaces in resolve, but when compiling,\n // like `markdown-rs`.\n // Tabs don’t work, and virtual spaces don’t make sense.\n if (code === 32) {\n effects.enter('space')\n effects.consume(code)\n effects.exit('space')\n return between\n }\n\n // Closing fence? Could also be data.\n if (code === 96) {\n token = effects.enter('codeTextSequence')\n size = 0\n return sequenceClose(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return between\n }\n\n // Data.\n effects.enter('codeTextData')\n return data(code)\n }\n\n /**\n * In data.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (\n code === null ||\n code === 32 ||\n code === 96 ||\n markdownLineEnding(code)\n ) {\n effects.exit('codeTextData')\n return between(code)\n }\n effects.consume(code)\n return data\n }\n\n /**\n * In closing sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n // More.\n if (code === 96) {\n effects.consume(code)\n size++\n return sequenceClose\n }\n\n // Done!\n if (size === sizeOpen) {\n effects.exit('codeTextSequence')\n effects.exit('codeText')\n return ok(code)\n }\n\n // More or less accents: mark as data.\n token.type = 'codeTextData'\n return data(code)\n }\n}\n"],"names":["codeText","name","tokenize","effects","ok","nok","size","token","sizeOpen","code","enter","sequenceOpen","consume","exit","between","sequenceClose","markdownLineEnding","data","type","resolve","events","index","tailExitIndex","length","headEnterIndex","undefined","end","splice","previous","this"],"mappings":"uFAYY,MAACA,EAAW,CACtBC,KAAM,WACNC,SA8EF,SAA0BC,EAASC,EAAIC,GAErC,IAEIC,EAEAC,EAJAC,EAAW,EAKf,OAcA,SAAeC,GAGb,OAFAN,EAAQO,MAAM,YACdP,EAAQO,MAAM,oBACPC,EAAaF,EACrB,EAYD,SAASE,EAAaF,GACpB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBD,IACOG,IAETR,EAAQU,KAAK,oBACNC,EAAQL,GAChB,CAYD,SAASK,EAAQL,GAEf,OAAa,OAATA,EACKJ,EAAII,GAMA,KAATA,GACFN,EAAQO,MAAM,SACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,SACNC,GAII,KAATL,GACFF,EAAQJ,EAAQO,MAAM,oBACtBJ,EAAO,EACAS,EAAcN,IAEnBO,EAAmBP,IACrBN,EAAQO,MAAM,cACdP,EAAQS,QAAQH,GAChBN,EAAQU,KAAK,cACNC,IAITX,EAAQO,MAAM,gBACPO,EAAKR,GACb,CAYD,SAASQ,EAAKR,GACZ,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACAO,EAAmBP,IAEnBN,EAAQU,KAAK,gBACNC,EAAQL,KAEjBN,EAAQS,QAAQH,GACTQ,EACR,CAYD,SAASF,EAAcN,GAErB,OAAa,KAATA,GACFN,EAAQS,QAAQH,GAChBH,IACOS,GAILT,IAASE,GACXL,EAAQU,KAAK,oBACbV,EAAQU,KAAK,YACNT,EAAGK,KAIZF,EAAMW,KAAO,eACND,EAAKR,GACb,CACH,EA7NEU,QAMF,SAAyBC,GACvB,IAGIC,EAEAX,EALAY,EAAgBF,EAAOG,OAAS,EAChCC,EAAiB,EAOrB,KACsC,eAAnCJ,EAAOI,GAAgB,GAAGN,MACU,UAAnCE,EAAOI,GAAgB,GAAGN,MACO,eAAlCE,EAAOE,GAAe,GAAGJ,MACU,UAAlCE,EAAOE,GAAe,GAAGJ,MAK3B,IAHAG,EAAQG,IAGCH,EAAQC,GACf,GAA8B,iBAA1BF,EAAOC,GAAO,GAAGH,KAAyB,CAE5CE,EAAOI,GAAgB,GAAGN,KAAO,kBACjCE,EAAOE,GAAe,GAAGJ,KAAO,kBAChCM,GAAkB,EAClBF,GAAiB,EACjB,KACD,CAKLD,EAAQG,EAAiB,EACzBF,IACA,OAASD,GAASC,QACFG,IAAVf,EACEW,IAAUC,GAA2C,eAA1BF,EAAOC,GAAO,GAAGH,OAC9CR,EAAQW,GAGVA,IAAUC,GACgB,eAA1BF,EAAOC,GAAO,GAAGH,OAEjBE,EAAOV,GAAO,GAAGQ,KAAO,eACpBG,IAAUX,EAAQ,IACpBU,EAAOV,GAAO,GAAGgB,IAAMN,EAAOC,EAAQ,GAAG,GAAGK,IAC5CN,EAAOO,OAAOjB,EAAQ,EAAGW,EAAQX,EAAQ,GACzCY,GAAiBD,EAAQX,EAAQ,EACjCW,EAAQX,EAAQ,GAElBA,OAAQe,GAGZ,OAAOL,CACT,EA1DEQ,SAgEF,SAAkBnB,GAEhB,OACW,KAATA,GACgD,oBAAhDoB,KAAKT,OAAOS,KAAKT,OAAOG,OAAS,GAAG,GAAGL,IAE3C"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as n}from"../../micromark-factory-space/index.js";import{markdownLineEnding as e}from"../node_modules/micromark-util-character/index.js";import{subtokenize as t}from"../../micromark-util-subtokenize/index.js";const r={tokenize:function(n,t){let r;return function(e){return n.enter("content"),r=n.enter("chunkContent",{contentType:"content"}),i(e)};function i(t){return null===t?c(t):e(t)?n.check(o,u,c)(t):(n.consume(t),i)}function c(e){return n.exit("chunkContent"),n.exit("content"),t(e)}function u(e){return n.consume(e),n.exit("chunkContent"),r.next=n.enter("chunkContent",{contentType:"content",previous:r}),r=r.next,i}},resolve:function(n){return t(n),n}},o={tokenize:function(t,r,o){const i=this;return function(e){return t.exit("chunkContent"),t.enter("lineEnding"),t.consume(e),t.exit("lineEnding"),n(t,c,"linePrefix")};function c(n){if(null===n||e(n))return o(n);const c=i.events[i.events.length-1];return!i.parser.constructs.disable.null.includes("codeIndented")&&c&&"linePrefix"===c[1].type&&c[2].sliceSerialize(c[1],!0).length>=4?r(n):t.interrupt(i.parser.constructs.flow,o,r)(n)}},partial:!0};export{r as content};
2
+ //# sourceMappingURL=content.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"content.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/content.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {subtokenize} from 'micromark-util-subtokenize'\n/**\n * No name because it must not be turned off.\n * @type {Construct}\n */\nexport const content = {\n tokenize: tokenizeContent,\n resolve: resolveContent\n}\n\n/** @type {Construct} */\nconst continuationConstruct = {\n tokenize: tokenizeContinuation,\n partial: true\n}\n\n/**\n * Content is transparent: it’s parsed right now. That way, definitions are also\n * parsed right now: before text in paragraphs (specifically, media) are parsed.\n *\n * @type {Resolver}\n */\nfunction resolveContent(events) {\n subtokenize(events)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContent(effects, ok) {\n /** @type {Token | undefined} */\n let previous\n return chunkStart\n\n /**\n * Before a content chunk.\n *\n * ```markdown\n * > | abc\n * ^\n * ```\n *\n * @type {State}\n */\n function chunkStart(code) {\n effects.enter('content')\n previous = effects.enter('chunkContent', {\n contentType: 'content'\n })\n return chunkInside(code)\n }\n\n /**\n * In a content chunk.\n *\n * ```markdown\n * > | abc\n * ^^^\n * ```\n *\n * @type {State}\n */\n function chunkInside(code) {\n if (code === null) {\n return contentEnd(code)\n }\n\n // To do: in `markdown-rs`, each line is parsed on its own, and everything\n // is stitched together resolving.\n if (markdownLineEnding(code)) {\n return effects.check(\n continuationConstruct,\n contentContinue,\n contentEnd\n )(code)\n }\n\n // Data.\n effects.consume(code)\n return chunkInside\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentEnd(code) {\n effects.exit('chunkContent')\n effects.exit('content')\n return ok(code)\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentContinue(code) {\n effects.consume(code)\n effects.exit('chunkContent')\n previous.next = effects.enter('chunkContent', {\n contentType: 'content',\n previous\n })\n previous = previous.next\n return chunkInside\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContinuation(effects, ok, nok) {\n const self = this\n return startLookahead\n\n /**\n *\n *\n * @type {State}\n */\n function startLookahead(code) {\n effects.exit('chunkContent')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, prefixed, 'linePrefix')\n }\n\n /**\n *\n *\n * @type {State}\n */\n function prefixed(code) {\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n\n // Always populated by defaults.\n\n const tail = self.events[self.events.length - 1]\n if (\n !self.parser.constructs.disable.null.includes('codeIndented') &&\n tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ) {\n return ok(code)\n }\n return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)\n }\n}\n"],"names":["content","tokenize","effects","ok","previous","code","enter","contentType","chunkInside","contentEnd","markdownLineEnding","check","continuationConstruct","contentContinue","consume","exit","next","resolve","events","subtokenize","nok","self","this","factorySpace","prefixed","tail","length","parser","constructs","disable","null","includes","type","sliceSerialize","interrupt","flow","partial"],"mappings":"qOAgBY,MAACA,EAAU,CACrBC,SAyBF,SAAyBC,EAASC,GAEhC,IAAIC,EACJ,OAYA,SAAoBC,GAKlB,OAJAH,EAAQI,MAAM,WACdF,EAAWF,EAAQI,MAAM,eAAgB,CACvCC,YAAa,YAERC,EAAYH,EACpB,EAYD,SAASG,EAAYH,GACnB,OAAa,OAATA,EACKI,EAAWJ,GAKhBK,EAAmBL,GACdH,EAAQS,MACbC,EACAC,EACAJ,EAHKP,CAILG,IAIJH,EAAQY,QAAQT,GACTG,EACR,CAOD,SAASC,EAAWJ,GAGlB,OAFAH,EAAQa,KAAK,gBACbb,EAAQa,KAAK,WACNZ,EAAGE,EACX,CAOD,SAASQ,EAAgBR,GAQvB,OAPAH,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,gBACbX,EAASY,KAAOd,EAAQI,MAAM,eAAgB,CAC5CC,YAAa,UACbH,aAEFA,EAAWA,EAASY,KACbR,CACR,CACH,EAvGES,QAeF,SAAwBC,GAEtB,OADAC,EAAYD,GACLA,CACT,GAdMN,EAAwB,CAC5BX,SAwGF,SAA8BC,EAASC,EAAIiB,GACzC,MAAMC,EAAOC,KACb,OAOA,SAAwBjB,GAKtB,OAJAH,EAAQa,KAAK,gBACbb,EAAQI,MAAM,cACdJ,EAAQY,QAAQT,GAChBH,EAAQa,KAAK,cACNQ,EAAarB,EAASsB,EAAU,aACxC,EAOD,SAASA,EAASnB,GAChB,GAAa,OAATA,GAAiBK,EAAmBL,GACtC,OAAOe,EAAIf,GAKb,MAAMoB,EAAOJ,EAAKH,OAAOG,EAAKH,OAAOQ,OAAS,GAC9C,OACGL,EAAKM,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,iBAC9CN,GACiB,eAAjBA,EAAK,GAAGO,MACRP,EAAK,GAAGQ,eAAeR,EAAK,IAAI,GAAMC,QAAU,EAEzCvB,EAAGE,GAELH,EAAQgC,UAAUb,EAAKM,OAAOC,WAAWO,KAAMf,EAAKjB,EAApDD,CAAwDG,EAChE,CACH,EA/IE+B,SAAS"}
@@ -0,0 +1,2 @@
1
+ import{factoryDestination as i}from"../../micromark-factory-destination/index.js";import{factoryLabel as n}from"../../micromark-factory-label/index.js";import{factorySpace as t}from"../../micromark-factory-space/index.js";import{factoryTitle as e}from"../../micromark-factory-title/index.js";import{factoryWhitespace as r}from"../../micromark-factory-whitespace/index.js";import{markdownLineEndingOrSpace as o,markdownSpace as a,markdownLineEnding as f}from"../node_modules/micromark-util-character/index.js";import{normalizeIdentifier as c}from"../../micromark-util-normalize-identifier/index.js";const u={name:"definition",tokenize:function(e,u,d){const s=this;let l;return function(i){return e.enter("definition"),function(i){return n.call(s,e,p,d,"definitionLabel","definitionLabelMarker","definitionLabelString")(i)}(i)};function p(i){return l=c(s.sliceSerialize(s.events[s.events.length-1][1]).slice(1,-1)),58===i?(e.enter("definitionMarker"),e.consume(i),e.exit("definitionMarker"),k):d(i)}function k(i){return o(i)?r(e,x)(i):x(i)}function x(n){return i(e,h,d,"definitionDestination","definitionDestinationLiteral","definitionDestinationLiteralMarker","definitionDestinationRaw","definitionDestinationString")(n)}function h(i){return e.attempt(m,j,j)(i)}function j(i){return a(i)?t(e,y,"whitespace")(i):y(i)}function y(i){return null===i||f(i)?(e.exit("definition"),s.parser.defined.push(l),u(i)):d(i)}}},m={tokenize:function(i,n,c){return function(n){return o(n)?r(i,u)(n):c(n)};function u(n){return e(i,m,c,"definitionTitle","definitionTitleMarker","definitionTitleString")(n)}function m(n){return a(n)?t(i,d,"whitespace")(n):d(n)}function d(i){return null===i||f(i)?n(i):c(i)}},partial:!0};export{u as definition};
2
+ //# sourceMappingURL=definition.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"definition.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/definition.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factorySpace} from 'micromark-factory-space'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\n/** @type {Construct} */\nexport const definition = {\n name: 'definition',\n tokenize: tokenizeDefinition\n}\n\n/** @type {Construct} */\nconst titleBefore = {\n tokenize: tokenizeTitleBefore,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeDefinition(effects, ok, nok) {\n const self = this\n /** @type {string} */\n let identifier\n return start\n\n /**\n * At start of a definition.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Do not interrupt paragraphs (but do follow definitions).\n // To do: do `interrupt` the way `markdown-rs` does.\n // To do: parse whitespace the way `markdown-rs` does.\n effects.enter('definition')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `[`.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n // To do: parse whitespace the way `markdown-rs` does.\n\n return factoryLabel.call(\n self,\n effects,\n labelAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionLabel',\n 'definitionLabelMarker',\n 'definitionLabelString'\n )(code)\n }\n\n /**\n * After label.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function labelAfter(code) {\n identifier = normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n if (code === 58) {\n effects.enter('definitionMarker')\n effects.consume(code)\n effects.exit('definitionMarker')\n return markerAfter\n }\n return nok(code)\n }\n\n /**\n * After marker.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function markerAfter(code) {\n // Note: whitespace is optional.\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, destinationBefore)(code)\n : destinationBefore(code)\n }\n\n /**\n * Before destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationBefore(code) {\n return factoryDestination(\n effects,\n destinationAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionDestination',\n 'definitionDestinationLiteral',\n 'definitionDestinationLiteralMarker',\n 'definitionDestinationRaw',\n 'definitionDestinationString'\n )(code)\n }\n\n /**\n * After destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationAfter(code) {\n return effects.attempt(titleBefore, after, after)(code)\n }\n\n /**\n * After definition.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return markdownSpace(code)\n ? factorySpace(effects, afterWhitespace, 'whitespace')(code)\n : afterWhitespace(code)\n }\n\n /**\n * After definition, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function afterWhitespace(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('definition')\n\n // Note: we don’t care about uniqueness.\n // It’s likely that that doesn’t happen very frequently.\n // It is more likely that it wastes precious time.\n self.parser.defined.push(identifier)\n\n // To do: `markdown-rs` interrupt.\n // // You’d be interrupting.\n // tokenizer.interrupt = true\n return ok(code)\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeTitleBefore(effects, ok, nok) {\n return titleBefore\n\n /**\n * After destination, at whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, beforeMarker)(code)\n : nok(code)\n }\n\n /**\n * At title.\n *\n * ```markdown\n * | [a]: b\n * > | \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeMarker(code) {\n return factoryTitle(\n effects,\n titleAfter,\n nok,\n 'definitionTitle',\n 'definitionTitleMarker',\n 'definitionTitleString'\n )(code)\n }\n\n /**\n * After title.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfter(code) {\n return markdownSpace(code)\n ? factorySpace(effects, titleAfterOptionalWhitespace, 'whitespace')(code)\n : titleAfterOptionalWhitespace(code)\n }\n\n /**\n * After title, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfterOptionalWhitespace(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n"],"names":["definition","name","tokenize","effects","ok","nok","self","this","identifier","code","enter","factoryLabel","call","labelAfter","before","normalizeIdentifier","sliceSerialize","events","length","slice","consume","exit","markerAfter","markdownLineEndingOrSpace","factoryWhitespace","destinationBefore","factoryDestination","destinationAfter","attempt","titleBefore","after","markdownSpace","factorySpace","afterWhitespace","markdownLineEnding","parser","defined","push","beforeMarker","factoryTitle","titleAfter","titleAfterOptionalWhitespace","partial"],"mappings":"slBAmBY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAaF,SAA4BC,EAASC,EAAIC,GACvC,MAAMC,EAAOC,KAEb,IAAIC,EACJ,OAYA,SAAeC,GAKb,OADAN,EAAQO,MAAM,cAchB,SAAgBD,GAGd,OAAOE,EAAaC,KAClBN,EACAH,EACAU,EAEAR,EACA,kBACA,wBACA,wBARKM,CASLF,EACH,CA1BQK,CAAOL,EACf,EAqCD,SAASI,EAAWJ,GAIlB,OAHAD,EAAaO,EACXT,EAAKU,eAAeV,EAAKW,OAAOX,EAAKW,OAAOC,OAAS,GAAG,IAAIC,MAAM,GAAI,IAE3D,KAATV,GACFN,EAAQO,MAAM,oBACdP,EAAQiB,QAAQX,GAChBN,EAAQkB,KAAK,oBACNC,GAEFjB,EAAII,EACZ,CAYD,SAASa,EAAYb,GAEnB,OAAOc,EAA0Bd,GAC7Be,EAAkBrB,EAASsB,EAA3BD,CAA8Cf,GAC9CgB,EAAkBhB,EACvB,CAYD,SAASgB,EAAkBhB,GACzB,OAAOiB,EACLvB,EACAwB,EAEAtB,EACA,wBACA,+BACA,qCACA,2BACA,8BATKqB,CAULjB,EACH,CAYD,SAASkB,EAAiBlB,GACxB,OAAON,EAAQyB,QAAQC,EAAaC,EAAOA,EAApC3B,CAA2CM,EACnD,CAcD,SAASqB,EAAMrB,GACb,OAAOsB,EAActB,GACjBuB,EAAa7B,EAAS8B,EAAiB,aAAvCD,CAAqDvB,GACrDwB,EAAgBxB,EACrB,CAcD,SAASwB,EAAgBxB,GACvB,OAAa,OAATA,GAAiByB,EAAmBzB,IACtCN,EAAQkB,KAAK,cAKbf,EAAK6B,OAAOC,QAAQC,KAAK7B,GAKlBJ,EAAGK,IAELJ,EAAII,EACZ,CACH,GAtLMoB,EAAc,CAClB3B,SA2LF,SAA6BC,EAASC,EAAIC,GACxC,OAcA,SAAqBI,GACnB,OAAOc,EAA0Bd,GAC7Be,EAAkBrB,EAASmC,EAA3Bd,CAAyCf,GACzCJ,EAAII,EACT,EAaD,SAAS6B,EAAa7B,GACpB,OAAO8B,EACLpC,EACAqC,EACAnC,EACA,kBACA,wBACA,wBANKkC,CAOL9B,EACH,CAYD,SAAS+B,EAAW/B,GAClB,OAAOsB,EAActB,GACjBuB,EAAa7B,EAASsC,EAA8B,aAApDT,CAAkEvB,GAClEgC,EAA6BhC,EAClC,CAYD,SAASgC,EAA6BhC,GACpC,OAAgB,OAATA,GAAiByB,EAAmBzB,GAAQL,EAAGK,GAAQJ,EAAII,EACnE,CACH,EAlQEiC,SAAS"}
@@ -0,0 +1,2 @@
1
+ import{markdownLineEnding as e}from"../node_modules/micromark-util-character/index.js";const r={name:"hardBreakEscape",tokenize:function(r,n,t){return function(e){return r.enter("hardBreakEscape"),r.consume(e),a};function a(a){return e(a)?(r.exit("hardBreakEscape"),n(a)):t(a)}}};export{r as hardBreakEscape};
2
+ //# sourceMappingURL=hard-break-escape.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hard-break-escape.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/hard-break-escape.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const hardBreakEscape = {\n name: 'hardBreakEscape',\n tokenize: tokenizeHardBreakEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHardBreakEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of a hard break (escape).\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('hardBreakEscape')\n effects.consume(code)\n return after\n }\n\n /**\n * After `\\`, at eol.\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownLineEnding(code)) {\n effects.exit('hardBreakEscape')\n return ok(code)\n }\n return nok(code)\n }\n}\n"],"names":["hardBreakEscape","name","tokenize","effects","ok","nok","code","enter","consume","after","markdownLineEnding","exit"],"mappings":"uFASY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAOF,SAAiCC,EAASC,EAAIC,GAC5C,OAaA,SAAeC,GAGb,OAFAH,EAAQI,MAAM,mBACdJ,EAAQK,QAAQF,GACTG,CACR,EAaD,SAASA,EAAMH,GACb,OAAII,EAAmBJ,IACrBH,EAAQQ,KAAK,mBACNP,EAAGE,IAELD,EAAIC,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as e}from"../../micromark-factory-space/index.js";import{markdownLineEndingOrSpace as t,markdownLineEnding as n,markdownSpace as r}from"../node_modules/micromark-util-character/index.js";import{splice as i}from"../../micromark-util-chunked/index.js";const a={name:"headingAtx",tokenize:function(i,a,u){let c=0;return function(e){return i.enter("atxHeading"),function(e){return i.enter("atxHeadingSequence"),o(e)}(e)};function o(e){return 35===e&&c++<6?(i.consume(e),o):null===e||t(e)?(i.exit("atxHeadingSequence"),x(e)):u(e)}function x(t){return 35===t?(i.enter("atxHeadingSequence"),d(t)):null===t||n(t)?(i.exit("atxHeading"),a(t)):r(t)?e(i,x,"whitespace")(t):(i.enter("atxHeadingText"),s(t))}function d(e){return 35===e?(i.consume(e),d):(i.exit("atxHeadingSequence"),x(e))}function s(e){return null===e||35===e||t(e)?(i.exit("atxHeadingText"),x(e)):(i.consume(e),s)}},resolve:function(e,t){let n,r,a=e.length-2,u=3;"whitespace"===e[u][1].type&&(u+=2);a-2>u&&"whitespace"===e[a][1].type&&(a-=2);"atxHeadingSequence"===e[a][1].type&&(u===a-1||a-4>u&&"whitespace"===e[a-2][1].type)&&(a-=u+1===a?2:4);a>u&&(n={type:"atxHeadingText",start:e[u][1].start,end:e[a][1].end},r={type:"chunkText",start:e[u][1].start,end:e[a][1].end,contentType:"text"},i(e,u,a-u+1,[["enter",n,t],["enter",r,t],["exit",r,t],["exit",n,t]]));return e}};export{a as headingAtx};
2
+ //# sourceMappingURL=heading-atx.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"heading-atx.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/heading-atx.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {Construct} */\nexport const headingAtx = {\n name: 'headingAtx',\n tokenize: tokenizeHeadingAtx,\n resolve: resolveHeadingAtx\n}\n\n/** @type {Resolver} */\nfunction resolveHeadingAtx(events, context) {\n let contentEnd = events.length - 2\n let contentStart = 3\n /** @type {Token} */\n let content\n /** @type {Token} */\n let text\n\n // Prefix whitespace, part of the opening.\n if (events[contentStart][1].type === 'whitespace') {\n contentStart += 2\n }\n\n // Suffix whitespace, part of the closing.\n if (\n contentEnd - 2 > contentStart &&\n events[contentEnd][1].type === 'whitespace'\n ) {\n contentEnd -= 2\n }\n if (\n events[contentEnd][1].type === 'atxHeadingSequence' &&\n (contentStart === contentEnd - 1 ||\n (contentEnd - 4 > contentStart &&\n events[contentEnd - 2][1].type === 'whitespace'))\n ) {\n contentEnd -= contentStart + 1 === contentEnd ? 2 : 4\n }\n if (contentEnd > contentStart) {\n content = {\n type: 'atxHeadingText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end\n }\n text = {\n type: 'chunkText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end,\n contentType: 'text'\n }\n splice(events, contentStart, contentEnd - contentStart + 1, [\n ['enter', content, context],\n ['enter', text, context],\n ['exit', text, context],\n ['exit', content, context]\n ])\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHeadingAtx(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of a heading (atx).\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse indent like `markdown-rs`.\n effects.enter('atxHeading')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `#`.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('atxHeadingSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 35 && size++ < 6) {\n effects.consume(code)\n return sequenceOpen\n }\n\n // Always at least one `#`.\n if (code === null || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n return nok(code)\n }\n\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === 35) {\n effects.enter('atxHeadingSequence')\n return sequenceFurther(code)\n }\n if (code === null || markdownLineEnding(code)) {\n effects.exit('atxHeading')\n // To do: interrupt like `markdown-rs`.\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n if (markdownSpace(code)) {\n return factorySpace(effects, atBreak, 'whitespace')(code)\n }\n\n // To do: generate `data` tokens, add the `text` token later.\n // Needs edit map, see: `markdown.rs`.\n effects.enter('atxHeadingText')\n return data(code)\n }\n\n /**\n * In further sequence (after whitespace).\n *\n * Could be normal “visible” hashes in the heading or a final sequence.\n *\n * ```markdown\n * > | ## aa ##\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceFurther(code) {\n if (code === 35) {\n effects.consume(code)\n return sequenceFurther\n }\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n\n /**\n * In text.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingText')\n return atBreak(code)\n }\n effects.consume(code)\n return data\n }\n}\n"],"names":["headingAtx","name","tokenize","effects","ok","nok","size","code","enter","sequenceOpen","before","consume","markdownLineEndingOrSpace","exit","atBreak","sequenceFurther","markdownLineEnding","markdownSpace","factorySpace","data","resolve","events","context","content","text","contentEnd","length","contentStart","type","start","end","contentType","splice"],"mappings":"8QAiBY,MAACA,EAAa,CACxBC,KAAM,aACNC,SA2DF,SAA4BC,EAASC,EAAIC,GACvC,IAAIC,EAAO,EACX,OAYA,SAAeC,GAGb,OADAJ,EAAQK,MAAM,cAchB,SAAgBD,GAEd,OADAJ,EAAQK,MAAM,sBACPC,EAAaF,EACrB,CAhBQG,CAAOH,EACf,EA2BD,SAASE,EAAaF,GACpB,OAAa,KAATA,GAAeD,IAAS,GAC1BH,EAAQQ,QAAQJ,GACTE,GAII,OAATF,GAAiBK,EAA0BL,IAC7CJ,EAAQU,KAAK,sBACNC,EAAQP,IAEVF,EAAIE,EACZ,CAYD,SAASO,EAAQP,GACf,OAAa,KAATA,GACFJ,EAAQK,MAAM,sBACPO,EAAgBR,IAEZ,OAATA,GAAiBS,EAAmBT,IACtCJ,EAAQU,KAAK,cAINT,EAAGG,IAERU,EAAcV,GACTW,EAAaf,EAASW,EAAS,aAA/BI,CAA6CX,IAKtDJ,EAAQK,MAAM,kBACPW,EAAKZ,GACb,CAcD,SAASQ,EAAgBR,GACvB,OAAa,KAATA,GACFJ,EAAQQ,QAAQJ,GACTQ,IAETZ,EAAQU,KAAK,sBACNC,EAAQP,GAChB,CAYD,SAASY,EAAKZ,GACZ,OAAa,OAATA,GAA0B,KAATA,GAAeK,EAA0BL,IAC5DJ,EAAQU,KAAK,kBACNC,EAAQP,KAEjBJ,EAAQQ,QAAQJ,GACTY,EACR,CACH,EA5LEC,QAIF,SAA2BC,EAAQC,GACjC,IAGIC,EAEAC,EALAC,EAAaJ,EAAOK,OAAS,EAC7BC,EAAe,EAOkB,eAAjCN,EAAOM,GAAc,GAAGC,OAC1BD,GAAgB,GAKhBF,EAAa,EAAIE,GACc,eAA/BN,EAAOI,GAAY,GAAGG,OAEtBH,GAAc,GAGiB,uBAA/BJ,EAAOI,GAAY,GAAGG,OACrBD,IAAiBF,EAAa,GAC5BA,EAAa,EAAIE,GACmB,eAAnCN,EAAOI,EAAa,GAAG,GAAGG,QAE9BH,GAAcE,EAAe,IAAMF,EAAa,EAAI,GAElDA,EAAaE,IACfJ,EAAU,CACRK,KAAM,iBACNC,MAAOR,EAAOM,GAAc,GAAGE,MAC/BC,IAAKT,EAAOI,GAAY,GAAGK,KAE7BN,EAAO,CACLI,KAAM,YACNC,MAAOR,EAAOM,GAAc,GAAGE,MAC/BC,IAAKT,EAAOI,GAAY,GAAGK,IAC3BC,YAAa,QAEfC,EAAOX,EAAQM,EAAcF,EAAaE,EAAe,EAAG,CAC1D,CAAC,QAASJ,EAASD,GACnB,CAAC,QAASE,EAAMF,GAChB,CAAC,OAAQE,EAAMF,GACf,CAAC,OAAQC,EAASD,MAGtB,OAAOD,CACT"}
@@ -0,0 +1,2 @@
1
+ import{asciiAlpha as n,markdownLineEndingOrSpace as e,asciiAlphanumeric as t,markdownSpace as r,markdownLineEnding as u}from"../node_modules/micromark-util-character/index.js";import{htmlRawNames as o,htmlBlockNames as c}from"../../micromark-util-html-tag-name/index.js";import{blankLine as i}from"./blank-line.js";const m={name:"htmlFlow",tokenize:function(i,m,f){const a=this;let h,p,d,w,g;return function(n){return function(n){return i.enter("htmlFlow"),i.enter("htmlFlowData"),i.consume(n),C}(n)};function C(e){return 33===e?(i.consume(e),x):47===e?(i.consume(e),p=!0,D):63===e?(i.consume(e),h=3,a.interrupt?m:N):n(e)?(i.consume(e),d=String.fromCharCode(e),E):f(e)}function x(e){return 45===e?(i.consume(e),h=2,F):91===e?(i.consume(e),h=5,w=0,k):n(e)?(i.consume(e),h=4,a.interrupt?m:N):f(e)}function F(n){return 45===n?(i.consume(n),a.interrupt?m:N):f(n)}function k(n){const e="CDATA[";return n===e.charCodeAt(w++)?(i.consume(n),6===w?a.interrupt?m:q:k):f(n)}function D(e){return n(e)?(i.consume(e),d=String.fromCharCode(e),E):f(e)}function E(n){if(null===n||47===n||62===n||e(n)){const e=47===n,t=d.toLowerCase();return e||p||!o.includes(t)?c.includes(d.toLowerCase())?(h=6,e?(i.consume(n),z):a.interrupt?m(n):q(n)):(h=7,a.interrupt&&!a.parser.lazy[a.now().line]?f(n):p?y(n):S(n)):(h=1,a.interrupt?m(n):q(n))}return 45===n||t(n)?(i.consume(n),d+=String.fromCharCode(n),E):f(n)}function z(n){return 62===n?(i.consume(n),a.interrupt?m:q):f(n)}function y(n){return r(n)?(i.consume(n),y):P(n)}function S(e){return 47===e?(i.consume(e),P):58===e||95===e||n(e)?(i.consume(e),j):r(e)?(i.consume(e),S):P(e)}function j(n){return 45===n||46===n||58===n||95===n||t(n)?(i.consume(n),j):A(n)}function A(n){return 61===n?(i.consume(n),L):r(n)?(i.consume(n),A):S(n)}function L(n){return null===n||60===n||61===n||62===n||96===n?f(n):34===n||39===n?(i.consume(n),g=n,T):r(n)?(i.consume(n),L):b(n)}function T(n){return n===g?(i.consume(n),g=null,v):null===n||u(n)?f(n):(i.consume(n),T)}function b(n){return null===n||34===n||39===n||47===n||60===n||61===n||62===n||96===n||e(n)?A(n):(i.consume(n),b)}function v(n){return 47===n||62===n||r(n)?S(n):f(n)}function P(n){return 62===n?(i.consume(n),_):f(n)}function _(n){return null===n||u(n)?q(n):r(n)?(i.consume(n),_):f(n)}function q(n){return 45===n&&2===h?(i.consume(n),I):60===n&&1===h?(i.consume(n),J):62===n&&4===h?(i.consume(n),O):63===n&&3===h?(i.consume(n),N):93===n&&5===h?(i.consume(n),M):!u(n)||6!==h&&7!==h?null===n||u(n)?(i.exit("htmlFlowData"),B(n)):(i.consume(n),q):(i.exit("htmlFlowData"),i.check(s,Q,B)(n))}function B(n){return i.check(l,G,Q)(n)}function G(n){return i.enter("lineEnding"),i.consume(n),i.exit("lineEnding"),H}function H(n){return null===n||u(n)?B(n):(i.enter("htmlFlowData"),q(n))}function I(n){return 45===n?(i.consume(n),N):q(n)}function J(n){return 47===n?(i.consume(n),d="",K):q(n)}function K(e){if(62===e){const n=d.toLowerCase();return o.includes(n)?(i.consume(e),O):q(e)}return n(e)&&d.length<8?(i.consume(e),d+=String.fromCharCode(e),K):q(e)}function M(n){return 93===n?(i.consume(n),N):q(n)}function N(n){return 62===n?(i.consume(n),O):45===n&&2===h?(i.consume(n),N):q(n)}function O(n){return null===n||u(n)?(i.exit("htmlFlowData"),Q(n)):(i.consume(n),O)}function Q(n){return i.exit("htmlFlow"),m(n)}},resolveTo:function(n){let e=n.length;for(;e--&&("enter"!==n[e][0]||"htmlFlow"!==n[e][1].type););e>1&&"linePrefix"===n[e-2][1].type&&(n[e][1].start=n[e-2][1].start,n[e+1][1].start=n[e-2][1].start,n.splice(e-2,2));return n},concrete:!0},s={tokenize:function(n,e,t){return function(r){return n.enter("lineEnding"),n.consume(r),n.exit("lineEnding"),n.attempt(i,e,t)}},partial:!0},l={tokenize:function(n,e,t){const r=this;return function(e){if(u(e))return n.enter("lineEnding"),n.consume(e),n.exit("lineEnding"),o;return t(e)};function o(n){return r.parser.lazy[r.now().line]?t(n):e(n)}},partial:!0};export{m as htmlFlow};
2
+ //# sourceMappingURL=html-flow.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"html-flow.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/html-flow.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {htmlBlockNames, htmlRawNames} from 'micromark-util-html-tag-name'\nimport {blankLine} from './blank-line.js'\n\n/** @type {Construct} */\nexport const htmlFlow = {\n name: 'htmlFlow',\n tokenize: tokenizeHtmlFlow,\n resolveTo: resolveToHtmlFlow,\n concrete: true\n}\n\n/** @type {Construct} */\nconst blankLineBefore = {\n tokenize: tokenizeBlankLineBefore,\n partial: true\n}\nconst nonLazyContinuationStart = {\n tokenize: tokenizeNonLazyContinuationStart,\n partial: true\n}\n\n/** @type {Resolver} */\nfunction resolveToHtmlFlow(events) {\n let index = events.length\n while (index--) {\n if (events[index][0] === 'enter' && events[index][1].type === 'htmlFlow') {\n break\n }\n }\n if (index > 1 && events[index - 2][1].type === 'linePrefix') {\n // Add the prefix start to the HTML token.\n events[index][1].start = events[index - 2][1].start\n // Add the prefix start to the HTML line token.\n events[index + 1][1].start = events[index - 2][1].start\n // Remove the line prefix.\n events.splice(index - 2, 2)\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHtmlFlow(effects, ok, nok) {\n const self = this\n /** @type {number} */\n let marker\n /** @type {boolean} */\n let closingTag\n /** @type {string} */\n let buffer\n /** @type {number} */\n let index\n /** @type {Code} */\n let markerB\n return start\n\n /**\n * Start of HTML (flow).\n *\n * ```markdown\n * > | <x />\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse indent like `markdown-rs`.\n return before(code)\n }\n\n /**\n * At `<`, after optional whitespace.\n *\n * ```markdown\n * > | <x />\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('htmlFlow')\n effects.enter('htmlFlowData')\n effects.consume(code)\n return open\n }\n\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | <x />\n * ^\n * > | <!doctype>\n * ^\n * > | <!--xxx-->\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 33) {\n effects.consume(code)\n return declarationOpen\n }\n if (code === 47) {\n effects.consume(code)\n closingTag = true\n return tagCloseStart\n }\n if (code === 63) {\n effects.consume(code)\n marker = 3\n // To do:\n // tokenizer.concrete = true\n // To do: use `markdown-rs` style interrupt.\n // While we’re in an instruction instead of a declaration, we’re on a `?`\n // right now, so we do need to search for `>`, similar to declarations.\n return self.interrupt ? ok : continuationDeclarationInside\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * After `<!`, at declaration, comment, or CDATA.\n *\n * ```markdown\n * > | <!doctype>\n * ^\n * > | <!--xxx-->\n * ^\n * > | <![CDATA[>&<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code)\n marker = 2\n return commentOpenInside\n }\n if (code === 91) {\n effects.consume(code)\n marker = 5\n index = 0\n return cdataOpenInside\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n marker = 4\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside\n }\n return nok(code)\n }\n\n /**\n * After `<!-`, inside a comment, at another `-`.\n *\n * ```markdown\n * > | <!--xxx-->\n * ^\n * ```\n *\n * @type {State}\n */\n function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code)\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside\n }\n return nok(code)\n }\n\n /**\n * After `<![`, inside CDATA, expecting `CDATA[`.\n *\n * ```markdown\n * > | <![CDATA[>&<]]>\n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function cdataOpenInside(code) {\n const value = 'CDATA['\n if (code === value.charCodeAt(index++)) {\n effects.consume(code)\n if (index === value.length) {\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation\n }\n return cdataOpenInside\n }\n return nok(code)\n }\n\n /**\n * After `</`, in closing tag, at tag name.\n *\n * ```markdown\n * > | </x>\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseStart(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * In tag name.\n *\n * ```markdown\n * > | <ab>\n * ^^\n * > | </ab>\n * ^^\n * ```\n *\n * @type {State}\n */\n function tagName(code) {\n if (\n code === null ||\n code === 47 ||\n code === 62 ||\n markdownLineEndingOrSpace(code)\n ) {\n const slash = code === 47\n const name = buffer.toLowerCase()\n if (!slash && !closingTag && htmlRawNames.includes(name)) {\n marker = 1\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code)\n }\n if (htmlBlockNames.includes(buffer.toLowerCase())) {\n marker = 6\n if (slash) {\n effects.consume(code)\n return basicSelfClosing\n }\n\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code)\n }\n marker = 7\n // Do not support complete HTML when interrupting.\n return self.interrupt && !self.parser.lazy[self.now().line]\n ? nok(code)\n : closingTag\n ? completeClosingTagAfter(code)\n : completeAttributeNameBefore(code)\n }\n\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n buffer += String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * After closing slash of a basic tag name.\n *\n * ```markdown\n * > | <div/>\n * ^\n * ```\n *\n * @type {State}\n */\n function basicSelfClosing(code) {\n if (code === 62) {\n effects.consume(code)\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation\n }\n return nok(code)\n }\n\n /**\n * After closing slash of a complete tag name.\n *\n * ```markdown\n * > | <x/>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeClosingTagAfter(code) {\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeClosingTagAfter\n }\n return completeEnd(code)\n }\n\n /**\n * At an attribute name.\n *\n * At first, this state is used after a complete tag name, after whitespace,\n * where it expects optional attributes or the end of the tag.\n * It is also reused after attributes, when expecting more optional\n * attributes.\n *\n * ```markdown\n * > | <a />\n * ^\n * > | <a :b>\n * ^\n * > | <a _b>\n * ^\n * > | <a b>\n * ^\n * > | <a >\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeNameBefore(code) {\n if (code === 47) {\n effects.consume(code)\n return completeEnd\n }\n\n // ASCII alphanumerical and `:` and `_`.\n if (code === 58 || code === 95 || asciiAlpha(code)) {\n effects.consume(code)\n return completeAttributeName\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeNameBefore\n }\n return completeEnd(code)\n }\n\n /**\n * In attribute name.\n *\n * ```markdown\n * > | <a :b>\n * ^\n * > | <a _b>\n * ^\n * > | <a b>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeName(code) {\n // ASCII alphanumerical and `-`, `.`, `:`, and `_`.\n if (\n code === 45 ||\n code === 46 ||\n code === 58 ||\n code === 95 ||\n asciiAlphanumeric(code)\n ) {\n effects.consume(code)\n return completeAttributeName\n }\n return completeAttributeNameAfter(code)\n }\n\n /**\n * After attribute name, at an optional initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | <a b>\n * ^\n * > | <a b=c>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code)\n return completeAttributeValueBefore\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeNameAfter\n }\n return completeAttributeNameBefore(code)\n }\n\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | <a b=c>\n * ^\n * > | <a b=\"c\">\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueBefore(code) {\n if (\n code === null ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 34 || code === 39) {\n effects.consume(code)\n markerB = code\n return completeAttributeValueQuoted\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeValueBefore\n }\n return completeAttributeValueUnquoted(code)\n }\n\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | <a b=\"c\">\n * ^\n * > | <a b='c'>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueQuoted(code) {\n if (code === markerB) {\n effects.consume(code)\n markerB = null\n return completeAttributeValueQuotedAfter\n }\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n effects.consume(code)\n return completeAttributeValueQuoted\n }\n\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | <a b=c>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueUnquoted(code) {\n if (\n code === null ||\n code === 34 ||\n code === 39 ||\n code === 47 ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96 ||\n markdownLineEndingOrSpace(code)\n ) {\n return completeAttributeNameAfter(code)\n }\n effects.consume(code)\n return completeAttributeValueUnquoted\n }\n\n /**\n * After double or single quoted attribute value, before whitespace or the\n * end of the tag.\n *\n * ```markdown\n * > | <a b=\"c\">\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || markdownSpace(code)) {\n return completeAttributeNameBefore(code)\n }\n return nok(code)\n }\n\n /**\n * In certain circumstances of a complete tag where only an `>` is allowed.\n *\n * ```markdown\n * > | <a b=\"c\">\n * ^\n * ```\n *\n * @type {State}\n */\n function completeEnd(code) {\n if (code === 62) {\n effects.consume(code)\n return completeAfter\n }\n return nok(code)\n }\n\n /**\n * After `>` in a complete tag.\n *\n * ```markdown\n * > | <x>\n * ^\n * ```\n *\n * @type {State}\n */\n function completeAfter(code) {\n if (code === null || markdownLineEnding(code)) {\n // // Do not form containers.\n // tokenizer.concrete = true\n return continuation(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAfter\n }\n return nok(code)\n }\n\n /**\n * In continuation of any HTML kind.\n *\n * ```markdown\n * > | <!--xxx-->\n * ^\n * ```\n *\n * @type {State}\n */\n function continuation(code) {\n if (code === 45 && marker === 2) {\n effects.consume(code)\n return continuationCommentInside\n }\n if (code === 60 && marker === 1) {\n effects.consume(code)\n return continuationRawTagOpen\n }\n if (code === 62 && marker === 4) {\n effects.consume(code)\n return continuationClose\n }\n if (code === 63 && marker === 3) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n if (code === 93 && marker === 5) {\n effects.consume(code)\n return continuationCdataInside\n }\n if (markdownLineEnding(code) && (marker === 6 || marker === 7)) {\n effects.exit('htmlFlowData')\n return effects.check(\n blankLineBefore,\n continuationAfter,\n continuationStart\n )(code)\n }\n if (code === null || markdownLineEnding(code)) {\n effects.exit('htmlFlowData')\n return continuationStart(code)\n }\n effects.consume(code)\n return continuation\n }\n\n /**\n * In continuation, at eol.\n *\n * ```markdown\n * > | <x>\n * ^\n * | asd\n * ```\n *\n * @type {State}\n */\n function continuationStart(code) {\n return effects.check(\n nonLazyContinuationStart,\n continuationStartNonLazy,\n continuationAfter\n )(code)\n }\n\n /**\n * In continuation, at eol, before non-lazy content.\n *\n * ```markdown\n * > | <x>\n * ^\n * | asd\n * ```\n *\n * @type {State}\n */\n function continuationStartNonLazy(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return continuationBefore\n }\n\n /**\n * In continuation, before non-lazy content.\n *\n * ```markdown\n * | <x>\n * > | asd\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n return continuationStart(code)\n }\n effects.enter('htmlFlowData')\n return continuation(code)\n }\n\n /**\n * In comment continuation, after one `-`, expecting another.\n *\n * ```markdown\n * > | <!--xxx-->\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationCommentInside(code) {\n if (code === 45) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In raw continuation, after `<`, at `/`.\n *\n * ```markdown\n * > | <script>console.log(1)</script>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationRawTagOpen(code) {\n if (code === 47) {\n effects.consume(code)\n buffer = ''\n return continuationRawEndTag\n }\n return continuation(code)\n }\n\n /**\n * In raw continuation, after `</`, in a raw tag name.\n *\n * ```markdown\n * > | <script>console.log(1)</script>\n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function continuationRawEndTag(code) {\n if (code === 62) {\n const name = buffer.toLowerCase()\n if (htmlRawNames.includes(name)) {\n effects.consume(code)\n return continuationClose\n }\n return continuation(code)\n }\n if (asciiAlpha(code) && buffer.length < 8) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer += String.fromCharCode(code)\n return continuationRawEndTag\n }\n return continuation(code)\n }\n\n /**\n * In cdata continuation, after `]`, expecting `]>`.\n *\n * ```markdown\n * > | <![CDATA[>&<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationCdataInside(code) {\n if (code === 93) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In declaration or instruction continuation, at `>`.\n *\n * ```markdown\n * > | <!-->\n * ^\n * > | <?>\n * ^\n * > | <!q>\n * ^\n * > | <!--ab-->\n * ^\n * > | <![CDATA[>&<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationDeclarationInside(code) {\n if (code === 62) {\n effects.consume(code)\n return continuationClose\n }\n\n // More dashes.\n if (code === 45 && marker === 2) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In closed continuation: everything we get until the eol/eof is part of it.\n *\n * ```markdown\n * > | <!doctype>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationClose(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('htmlFlowData')\n return continuationAfter(code)\n }\n effects.consume(code)\n return continuationClose\n }\n\n /**\n * Done.\n *\n * ```markdown\n * > | <!doctype>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationAfter(code) {\n effects.exit('htmlFlow')\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n // // No longer concrete.\n // tokenizer.concrete = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeNonLazyContinuationStart(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * At eol, before continuation.\n *\n * ```markdown\n * > | * ```js\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return after\n }\n return nok(code)\n }\n\n /**\n * A continuation.\n *\n * ```markdown\n * | * ```js\n * > | b\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlankLineBefore(effects, ok, nok) {\n return start\n\n /**\n * Before eol, expecting blank line.\n *\n * ```markdown\n * > | <div>\n * ^\n * |\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return effects.attempt(blankLine, ok, nok)\n }\n}\n"],"names":["htmlFlow","name","tokenize","effects","ok","nok","self","this","marker","closingTag","buffer","index","markerB","code","enter","consume","open","before","declarationOpen","tagCloseStart","interrupt","continuationDeclarationInside","asciiAlpha","String","fromCharCode","tagName","commentOpenInside","cdataOpenInside","value","charCodeAt","continuation","markdownLineEndingOrSpace","slash","toLowerCase","htmlRawNames","includes","htmlBlockNames","basicSelfClosing","parser","lazy","now","line","completeClosingTagAfter","completeAttributeNameBefore","asciiAlphanumeric","markdownSpace","completeEnd","completeAttributeName","completeAttributeNameAfter","completeAttributeValueBefore","completeAttributeValueQuoted","completeAttributeValueUnquoted","completeAttributeValueQuotedAfter","markdownLineEnding","completeAfter","continuationCommentInside","continuationRawTagOpen","continuationClose","continuationCdataInside","exit","continuationStart","check","blankLineBefore","continuationAfter","nonLazyContinuationStart","continuationStartNonLazy","continuationBefore","continuationRawEndTag","length","resolveTo","events","type","start","splice","concrete","attempt","blankLine","partial","after"],"mappings":"2TAoBY,MAACA,EAAW,CACtBC,KAAM,WACNC,SAsCF,SAA0BC,EAASC,EAAIC,GACrC,MAAMC,EAAOC,KAEb,IAAIC,EAEAC,EAEAC,EAEAC,EAEAC,EACJ,OAYA,SAAeC,GAEb,OAaF,SAAgBA,GAId,OAHAV,EAAQW,MAAM,YACdX,EAAQW,MAAM,gBACdX,EAAQY,QAAQF,GACTG,CACR,CAlBQC,CAAOJ,EACf,EAiCD,SAASG,EAAKH,GACZ,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACTK,GAEI,KAATL,GACFV,EAAQY,QAAQF,GAChBJ,GAAa,EACNU,GAEI,KAATN,GACFV,EAAQY,QAAQF,GAChBL,EAAS,EAMFF,EAAKc,UAAYhB,EAAKiB,GAI3BC,EAAWT,IACbV,EAAQY,QAAQF,GAEhBH,EAASa,OAAOC,aAAaX,GACtBY,GAEFpB,EAAIQ,EACZ,CAgBD,SAASK,EAAgBL,GACvB,OAAa,KAATA,GACFV,EAAQY,QAAQF,GAChBL,EAAS,EACFkB,GAEI,KAATb,GACFV,EAAQY,QAAQF,GAChBL,EAAS,EACTG,EAAQ,EACDgB,GAILL,EAAWT,IACbV,EAAQY,QAAQF,GAChBL,EAAS,EAGFF,EAAKc,UAAYhB,EAAKiB,GAExBhB,EAAIQ,EACZ,CAYD,SAASa,EAAkBb,GACzB,OAAa,KAATA,GACFV,EAAQY,QAAQF,GAGTP,EAAKc,UAAYhB,EAAKiB,GAExBhB,EAAIQ,EACZ,CAYD,SAASc,EAAgBd,GACvB,MAAMe,EAAQ,SACd,OAAIf,IAASe,EAAMC,WAAWlB,MAC5BR,EAAQY,QAAQF,GACFe,IAAVjB,EAGKL,EAAKc,UAAYhB,EAAK0B,EAExBH,GAEFtB,EAAIQ,EACZ,CAYD,SAASM,EAAcN,GACrB,OAAIS,EAAWT,IACbV,EAAQY,QAAQF,GAEhBH,EAASa,OAAOC,aAAaX,GACtBY,GAEFpB,EAAIQ,EACZ,CAcD,SAASY,EAAQZ,GACf,GACW,OAATA,GACS,KAATA,GACS,KAATA,GACAkB,EAA0BlB,GAC1B,CACA,MAAMmB,EAAiB,KAATnB,EACRZ,EAAOS,EAAOuB,cACpB,OAAKD,GAAUvB,IAAcyB,EAAaC,SAASlC,GAM/CmC,EAAeD,SAASzB,EAAOuB,gBACjCzB,EAAS,EACLwB,GACF7B,EAAQY,QAAQF,GACTwB,GAKF/B,EAAKc,UAAYhB,EAAGS,GAAQiB,EAAajB,KAElDL,EAAS,EAEFF,EAAKc,YAAcd,EAAKgC,OAAOC,KAAKjC,EAAKkC,MAAMC,MAClDpC,EAAIQ,GACJJ,EACAiC,EAAwB7B,GACxB8B,EAA4B9B,KAtB9BL,EAAS,EAGFF,EAAKc,UAAYhB,EAAGS,GAAQiB,EAAajB,GAoBnD,CAGD,OAAa,KAATA,GAAe+B,EAAkB/B,IACnCV,EAAQY,QAAQF,GAChBH,GAAUa,OAAOC,aAAaX,GACvBY,GAEFpB,EAAIQ,EACZ,CAYD,SAASwB,EAAiBxB,GACxB,OAAa,KAATA,GACFV,EAAQY,QAAQF,GAGTP,EAAKc,UAAYhB,EAAK0B,GAExBzB,EAAIQ,EACZ,CAYD,SAAS6B,EAAwB7B,GAC/B,OAAIgC,EAAchC,IAChBV,EAAQY,QAAQF,GACT6B,GAEFI,EAAYjC,EACpB,CAyBD,SAAS8B,EAA4B9B,GACnC,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACTiC,GAII,KAATjC,GAAwB,KAATA,GAAeS,EAAWT,IAC3CV,EAAQY,QAAQF,GACTkC,GAELF,EAAchC,IAChBV,EAAQY,QAAQF,GACT8B,GAEFG,EAAYjC,EACpB,CAgBD,SAASkC,EAAsBlC,GAE7B,OACW,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACA+B,EAAkB/B,IAElBV,EAAQY,QAAQF,GACTkC,GAEFC,EAA2BnC,EACnC,CAeD,SAASmC,EAA2BnC,GAClC,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACToC,GAELJ,EAAchC,IAChBV,EAAQY,QAAQF,GACTmC,GAEFL,EAA4B9B,EACpC,CAeD,SAASoC,EAA6BpC,GACpC,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,EAEOR,EAAIQ,GAEA,KAATA,GAAwB,KAATA,GACjBV,EAAQY,QAAQF,GAChBD,EAAUC,EACHqC,GAELL,EAAchC,IAChBV,EAAQY,QAAQF,GACToC,GAEFE,EAA+BtC,EACvC,CAcD,SAASqC,EAA6BrC,GACpC,OAAIA,IAASD,GACXT,EAAQY,QAAQF,GAChBD,EAAU,KACHwC,GAEI,OAATvC,GAAiBwC,EAAmBxC,GAC/BR,EAAIQ,IAEbV,EAAQY,QAAQF,GACTqC,EACR,CAYD,SAASC,EAA+BtC,GACtC,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACAkB,EAA0BlB,GAEnBmC,EAA2BnC,IAEpCV,EAAQY,QAAQF,GACTsC,EACR,CAaD,SAASC,EAAkCvC,GACzC,OAAa,KAATA,GAAwB,KAATA,GAAegC,EAAchC,GACvC8B,EAA4B9B,GAE9BR,EAAIQ,EACZ,CAYD,SAASiC,EAAYjC,GACnB,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACTyC,GAEFjD,EAAIQ,EACZ,CAYD,SAASyC,EAAczC,GACrB,OAAa,OAATA,GAAiBwC,EAAmBxC,GAG/BiB,EAAajB,GAElBgC,EAAchC,IAChBV,EAAQY,QAAQF,GACTyC,GAEFjD,EAAIQ,EACZ,CAYD,SAASiB,EAAajB,GACpB,OAAa,KAATA,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACT0C,GAEI,KAAT1C,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACT2C,GAEI,KAAT3C,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACT4C,GAEI,KAAT5C,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACTQ,GAEI,KAATR,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACT6C,IAELL,EAAmBxC,IAAqB,IAAXL,GAA2B,IAAXA,EAQpC,OAATK,GAAiBwC,EAAmBxC,IACtCV,EAAQwD,KAAK,gBACNC,EAAkB/C,KAE3BV,EAAQY,QAAQF,GACTiB,IAZL3B,EAAQwD,KAAK,gBACNxD,EAAQ0D,MACbC,EACAC,EACAH,EAHKzD,CAILU,GAQL,CAaD,SAAS+C,EAAkB/C,GACzB,OAAOV,EAAQ0D,MACbG,EACAC,EACAF,EAHK5D,CAILU,EACH,CAaD,SAASoD,EAAyBpD,GAIhC,OAHAV,EAAQW,MAAM,cACdX,EAAQY,QAAQF,GAChBV,EAAQwD,KAAK,cACNO,CACR,CAaD,SAASA,EAAmBrD,GAC1B,OAAa,OAATA,GAAiBwC,EAAmBxC,GAC/B+C,EAAkB/C,IAE3BV,EAAQW,MAAM,gBACPgB,EAAajB,GACrB,CAYD,SAAS0C,EAA0B1C,GACjC,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACTQ,GAEFS,EAAajB,EACrB,CAYD,SAAS2C,EAAuB3C,GAC9B,OAAa,KAATA,GACFV,EAAQY,QAAQF,GAChBH,EAAS,GACFyD,GAEFrC,EAAajB,EACrB,CAYD,SAASsD,EAAsBtD,GAC7B,GAAa,KAATA,EAAa,CACf,MAAMZ,EAAOS,EAAOuB,cACpB,OAAIC,EAAaC,SAASlC,IACxBE,EAAQY,QAAQF,GACT4C,GAEF3B,EAAajB,EACrB,CACD,OAAIS,EAAWT,IAASH,EAAO0D,OAAS,GACtCjE,EAAQY,QAAQF,GAEhBH,GAAUa,OAAOC,aAAaX,GACvBsD,GAEFrC,EAAajB,EACrB,CAYD,SAAS6C,EAAwB7C,GAC/B,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACTQ,GAEFS,EAAajB,EACrB,CAoBD,SAASQ,EAA8BR,GACrC,OAAa,KAATA,GACFV,EAAQY,QAAQF,GACT4C,GAII,KAAT5C,GAA0B,IAAXL,GACjBL,EAAQY,QAAQF,GACTQ,GAEFS,EAAajB,EACrB,CAYD,SAAS4C,EAAkB5C,GACzB,OAAa,OAATA,GAAiBwC,EAAmBxC,IACtCV,EAAQwD,KAAK,gBACNI,EAAkBlD,KAE3BV,EAAQY,QAAQF,GACT4C,EACR,CAYD,SAASM,EAAkBlD,GAMzB,OALAV,EAAQwD,KAAK,YAKNvD,EAAGS,EACX,CACH,EArzBEwD,UAeF,SAA2BC,GACzB,IAAI3D,EAAQ2D,EAAOF,OACnB,KAAOzD,MACoB,UAArB2D,EAAO3D,GAAO,IAA4C,aAA1B2D,EAAO3D,GAAO,GAAG4D,QAInD5D,EAAQ,GAAmC,eAA9B2D,EAAO3D,EAAQ,GAAG,GAAG4D,OAEpCD,EAAO3D,GAAO,GAAG6D,MAAQF,EAAO3D,EAAQ,GAAG,GAAG6D,MAE9CF,EAAO3D,EAAQ,GAAG,GAAG6D,MAAQF,EAAO3D,EAAQ,GAAG,GAAG6D,MAElDF,EAAOG,OAAO9D,EAAQ,EAAG,IAE3B,OAAO2D,CACT,EA9BEI,UAAU,GAINZ,EAAkB,CACtB5D,SAk2BF,SAAiCC,EAASC,EAAIC,GAC5C,OAaA,SAAeQ,GAIb,OAHAV,EAAQW,MAAM,cACdX,EAAQY,QAAQF,GAChBV,EAAQwD,KAAK,cACNxD,EAAQwE,QAAQC,EAAWxE,EAAIC,EACvC,CACH,EAr3BEwE,SAAS,GAELb,EAA2B,CAC/B9D,SAizBF,SAA0CC,EAASC,EAAIC,GACrD,MAAMC,EAAOC,KACb,OAaA,SAAeM,GACb,GAAIwC,EAAmBxC,GAIrB,OAHAV,EAAQW,MAAM,cACdX,EAAQY,QAAQF,GAChBV,EAAQwD,KAAK,cACNmB,EAET,OAAOzE,EAAIQ,EACZ,EAaD,SAASiE,EAAMjE,GACb,OAAOP,EAAKgC,OAAOC,KAAKjC,EAAKkC,MAAMC,MAAQpC,EAAIQ,GAAQT,EAAGS,EAC3D,CACH,EAv1BEgE,SAAS"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as n}from"../../micromark-factory-space/index.js";import{asciiAlpha as u,markdownLineEnding as e,asciiAlphanumeric as t,markdownSpace as o,markdownLineEndingOrSpace as c}from"../node_modules/micromark-util-character/index.js";const r={name:"htmlText",tokenize:function(r,m,i){const s=this;let f,l,a;return function(n){return r.enter("htmlText"),r.enter("htmlTextData"),r.consume(n),x};function x(n){return 33===n?(r.consume(n),d):47===n?(r.consume(n),b):63===n?(r.consume(n),C):u(n)?(r.consume(n),I):i(n)}function d(n){return 45===n?(r.consume(n),h):91===n?(r.consume(n),l=0,k):u(n)?(r.consume(n),v):i(n)}function h(n){return 45===n?(r.consume(n),D):i(n)}function T(n){return null===n?i(n):45===n?(r.consume(n),p):e(n)?(a=T,J(n)):(r.consume(n),T)}function p(n){return 45===n?(r.consume(n),D):T(n)}function D(n){return 62===n?H(n):45===n?p(n):T(n)}function k(n){const u="CDATA[";return n===u.charCodeAt(l++)?(r.consume(n),6===l?A:k):i(n)}function A(n){return null===n?i(n):93===n?(r.consume(n),g):e(n)?(a=A,J(n)):(r.consume(n),A)}function g(n){return 93===n?(r.consume(n),j):A(n)}function j(n){return 62===n?H(n):93===n?(r.consume(n),j):A(n)}function v(n){return null===n||62===n?H(n):e(n)?(a=v,J(n)):(r.consume(n),v)}function C(n){return null===n?i(n):63===n?(r.consume(n),E):e(n)?(a=C,J(n)):(r.consume(n),C)}function E(n){return 62===n?H(n):C(n)}function b(n){return u(n)?(r.consume(n),y):i(n)}function y(n){return 45===n||t(n)?(r.consume(n),y):z(n)}function z(n){return e(n)?(a=z,J(n)):o(n)?(r.consume(n),z):H(n)}function I(n){return 45===n||t(n)?(r.consume(n),I):47===n||62===n||c(n)?P(n):i(n)}function P(n){return 47===n?(r.consume(n),H):58===n||95===n||u(n)?(r.consume(n),_):e(n)?(a=P,J(n)):o(n)?(r.consume(n),P):H(n)}function _(n){return 45===n||46===n||58===n||95===n||t(n)?(r.consume(n),_):q(n)}function q(n){return 61===n?(r.consume(n),w):e(n)?(a=q,J(n)):o(n)?(r.consume(n),q):P(n)}function w(n){return null===n||60===n||61===n||62===n||96===n?i(n):34===n||39===n?(r.consume(n),f=n,B):e(n)?(a=w,J(n)):o(n)?(r.consume(n),w):(r.consume(n),F)}function B(n){return n===f?(r.consume(n),f=void 0,G):null===n?i(n):e(n)?(a=B,J(n)):(r.consume(n),B)}function F(n){return null===n||34===n||39===n||60===n||61===n||96===n?i(n):47===n||62===n||c(n)?P(n):(r.consume(n),F)}function G(n){return 47===n||62===n||c(n)?P(n):i(n)}function H(n){return 62===n?(r.consume(n),r.exit("htmlTextData"),r.exit("htmlText"),m):i(n)}function J(n){return r.exit("htmlTextData"),r.enter("lineEnding"),r.consume(n),r.exit("lineEnding"),K}function K(u){return o(u)?n(r,L,"linePrefix",s.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(u):L(u)}function L(n){return r.enter("htmlTextData"),a(n)}}};export{r as htmlText};
2
+ //# sourceMappingURL=html-text.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"html-text.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/html-text.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const htmlText = {\n name: 'htmlText',\n tokenize: tokenizeHtmlText\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHtmlText(effects, ok, nok) {\n const self = this\n /** @type {NonNullable<Code> | undefined} */\n let marker\n /** @type {number} */\n let index\n /** @type {State} */\n let returnState\n return start\n\n /**\n * Start of HTML (text).\n *\n * ```markdown\n * > | a <b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('htmlText')\n effects.enter('htmlTextData')\n effects.consume(code)\n return open\n }\n\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | a <b> c\n * ^\n * > | a <!doctype> c\n * ^\n * > | a <!--b--> c\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 33) {\n effects.consume(code)\n return declarationOpen\n }\n if (code === 47) {\n effects.consume(code)\n return tagCloseStart\n }\n if (code === 63) {\n effects.consume(code)\n return instruction\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n return tagOpen\n }\n return nok(code)\n }\n\n /**\n * After `<!`, at declaration, comment, or CDATA.\n *\n * ```markdown\n * > | a <!doctype> c\n * ^\n * > | a <!--b--> c\n * ^\n * > | a <![CDATA[>&<]]> c\n * ^\n * ```\n *\n * @type {State}\n */\n function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code)\n return commentOpenInside\n }\n if (code === 91) {\n effects.consume(code)\n index = 0\n return cdataOpenInside\n }\n if (asciiAlpha(code)) {\n effects.consume(code)\n return declaration\n }\n return nok(code)\n }\n\n /**\n * In a comment, after `<!-`, at another `-`.\n *\n * ```markdown\n * > | a <!--b--> c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code)\n return commentEnd\n }\n return nok(code)\n }\n\n /**\n * In comment.\n *\n * ```markdown\n * > | a <!--b--> c\n * ^\n * ```\n *\n * @type {State}\n */\n function comment(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 45) {\n effects.consume(code)\n return commentClose\n }\n if (markdownLineEnding(code)) {\n returnState = comment\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return comment\n }\n\n /**\n * In comment, after `-`.\n *\n * ```markdown\n * > | a <!--b--> c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentClose(code) {\n if (code === 45) {\n effects.consume(code)\n return commentEnd\n }\n return comment(code)\n }\n\n /**\n * In comment, after `--`.\n *\n * ```markdown\n * > | a <!--b--> c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentEnd(code) {\n return code === 62\n ? end(code)\n : code === 45\n ? commentClose(code)\n : comment(code)\n }\n\n /**\n * After `<![`, in CDATA, expecting `CDATA[`.\n *\n * ```markdown\n * > | a <![CDATA[>&<]]> b\n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function cdataOpenInside(code) {\n const value = 'CDATA['\n if (code === value.charCodeAt(index++)) {\n effects.consume(code)\n return index === value.length ? cdata : cdataOpenInside\n }\n return nok(code)\n }\n\n /**\n * In CDATA.\n *\n * ```markdown\n * > | a <![CDATA[>&<]]> b\n * ^^^\n * ```\n *\n * @type {State}\n */\n function cdata(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 93) {\n effects.consume(code)\n return cdataClose\n }\n if (markdownLineEnding(code)) {\n returnState = cdata\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return cdata\n }\n\n /**\n * In CDATA, after `]`, at another `]`.\n *\n * ```markdown\n * > | a <![CDATA[>&<]]> b\n * ^\n * ```\n *\n * @type {State}\n */\n function cdataClose(code) {\n if (code === 93) {\n effects.consume(code)\n return cdataEnd\n }\n return cdata(code)\n }\n\n /**\n * In CDATA, after `]]`, at `>`.\n *\n * ```markdown\n * > | a <![CDATA[>&<]]> b\n * ^\n * ```\n *\n * @type {State}\n */\n function cdataEnd(code) {\n if (code === 62) {\n return end(code)\n }\n if (code === 93) {\n effects.consume(code)\n return cdataEnd\n }\n return cdata(code)\n }\n\n /**\n * In declaration.\n *\n * ```markdown\n * > | a <!b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function declaration(code) {\n if (code === null || code === 62) {\n return end(code)\n }\n if (markdownLineEnding(code)) {\n returnState = declaration\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return declaration\n }\n\n /**\n * In instruction.\n *\n * ```markdown\n * > | a <?b?> c\n * ^\n * ```\n *\n * @type {State}\n */\n function instruction(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 63) {\n effects.consume(code)\n return instructionClose\n }\n if (markdownLineEnding(code)) {\n returnState = instruction\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return instruction\n }\n\n /**\n * In instruction, after `?`, at `>`.\n *\n * ```markdown\n * > | a <?b?> c\n * ^\n * ```\n *\n * @type {State}\n */\n function instructionClose(code) {\n return code === 62 ? end(code) : instruction(code)\n }\n\n /**\n * After `</`, in closing tag, at tag name.\n *\n * ```markdown\n * > | a </b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseStart(code) {\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n return tagClose\n }\n return nok(code)\n }\n\n /**\n * After `</x`, in a tag name.\n *\n * ```markdown\n * > | a </b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagClose(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n return tagClose\n }\n return tagCloseBetween(code)\n }\n\n /**\n * In closing tag, after tag name.\n *\n * ```markdown\n * > | a </b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseBetween(code) {\n if (markdownLineEnding(code)) {\n returnState = tagCloseBetween\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagCloseBetween\n }\n return end(code)\n }\n\n /**\n * After `<x`, in opening tag name.\n *\n * ```markdown\n * > | a <b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpen(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n return tagOpen\n }\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n return nok(code)\n }\n\n /**\n * In opening tag, after tag name.\n *\n * ```markdown\n * > | a <b> c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenBetween(code) {\n if (code === 47) {\n effects.consume(code)\n return end\n }\n\n // ASCII alphabetical and `:` and `_`.\n if (code === 58 || code === 95 || asciiAlpha(code)) {\n effects.consume(code)\n return tagOpenAttributeName\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenBetween\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenBetween\n }\n return end(code)\n }\n\n /**\n * In attribute name.\n *\n * ```markdown\n * > | a <b c> d\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeName(code) {\n // ASCII alphabetical and `-`, `.`, `:`, and `_`.\n if (\n code === 45 ||\n code === 46 ||\n code === 58 ||\n code === 95 ||\n asciiAlphanumeric(code)\n ) {\n effects.consume(code)\n return tagOpenAttributeName\n }\n return tagOpenAttributeNameAfter(code)\n }\n\n /**\n * After attribute name, before initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | a <b c> d\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code)\n return tagOpenAttributeValueBefore\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeNameAfter\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenAttributeNameAfter\n }\n return tagOpenBetween(code)\n }\n\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | a <b c=d> e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueBefore(code) {\n if (\n code === null ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 34 || code === 39) {\n effects.consume(code)\n marker = code\n return tagOpenAttributeValueQuoted\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeValueBefore\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenAttributeValueBefore\n }\n effects.consume(code)\n return tagOpenAttributeValueUnquoted\n }\n\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | a <b c=\"d\"> e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueQuoted(code) {\n if (code === marker) {\n effects.consume(code)\n marker = undefined\n return tagOpenAttributeValueQuotedAfter\n }\n if (code === null) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeValueQuoted\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return tagOpenAttributeValueQuoted\n }\n\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | a <b c=d> e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueUnquoted(code) {\n if (\n code === null ||\n code === 34 ||\n code === 39 ||\n code === 60 ||\n code === 61 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n effects.consume(code)\n return tagOpenAttributeValueUnquoted\n }\n\n /**\n * After double or single quoted attribute value, before whitespace or the end\n * of the tag.\n *\n * ```markdown\n * > | a <b c=\"d\"> e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n return nok(code)\n }\n\n /**\n * In certain circumstances of a tag where only an `>` is allowed.\n *\n * ```markdown\n * > | a <b c=\"d\"> e\n * ^\n * ```\n *\n * @type {State}\n */\n function end(code) {\n if (code === 62) {\n effects.consume(code)\n effects.exit('htmlTextData')\n effects.exit('htmlText')\n return ok\n }\n return nok(code)\n }\n\n /**\n * At eol.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * > | a <!--a\n * ^\n * | b-->\n * ```\n *\n * @type {State}\n */\n function lineEndingBefore(code) {\n effects.exit('htmlTextData')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return lineEndingAfter\n }\n\n /**\n * After eol, at optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a <!--a\n * > | b-->\n * ^\n * ```\n *\n * @type {State}\n */\n function lineEndingAfter(code) {\n // Always populated by defaults.\n\n return markdownSpace(code)\n ? factorySpace(\n effects,\n lineEndingAfterPrefix,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n : lineEndingAfterPrefix(code)\n }\n\n /**\n * After eol, after optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a <!--a\n * > | b-->\n * ^\n * ```\n *\n * @type {State}\n */\n function lineEndingAfterPrefix(code) {\n effects.enter('htmlTextData')\n return returnState(code)\n }\n}\n"],"names":["htmlText","name","tokenize","effects","ok","nok","self","this","marker","index","returnState","code","enter","consume","open","declarationOpen","tagCloseStart","instruction","asciiAlpha","tagOpen","commentOpenInside","cdataOpenInside","declaration","commentEnd","comment","commentClose","markdownLineEnding","lineEndingBefore","end","value","charCodeAt","cdata","cdataClose","cdataEnd","instructionClose","tagClose","asciiAlphanumeric","tagCloseBetween","markdownSpace","markdownLineEndingOrSpace","tagOpenBetween","tagOpenAttributeName","tagOpenAttributeNameAfter","tagOpenAttributeValueBefore","tagOpenAttributeValueQuoted","tagOpenAttributeValueUnquoted","undefined","tagOpenAttributeValueQuotedAfter","exit","lineEndingAfter","factorySpace","lineEndingAfterPrefix","parser","constructs","disable","null","includes"],"mappings":"sPAiBY,MAACA,EAAW,CACtBC,KAAM,WACNC,SAOF,SAA0BC,EAASC,EAAIC,GACrC,MAAMC,EAAOC,KAEb,IAAIC,EAEAC,EAEAC,EACJ,OAYA,SAAeC,GAIb,OAHAR,EAAQS,MAAM,YACdT,EAAQS,MAAM,gBACdT,EAAQU,QAAQF,GACTG,CACR,EAgBD,SAASA,EAAKH,GACZ,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTI,GAEI,KAATJ,GACFR,EAAQU,QAAQF,GACTK,GAEI,KAATL,GACFR,EAAQU,QAAQF,GACTM,GAILC,EAAWP,IACbR,EAAQU,QAAQF,GACTQ,GAEFd,EAAIM,EACZ,CAgBD,SAASI,EAAgBJ,GACvB,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTS,GAEI,KAATT,GACFR,EAAQU,QAAQF,GAChBF,EAAQ,EACDY,GAELH,EAAWP,IACbR,EAAQU,QAAQF,GACTW,GAEFjB,EAAIM,EACZ,CAYD,SAASS,EAAkBT,GACzB,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTY,GAEFlB,EAAIM,EACZ,CAYD,SAASa,EAAQb,GACf,OAAa,OAATA,EACKN,EAAIM,GAEA,KAATA,GACFR,EAAQU,QAAQF,GACTc,GAELC,EAAmBf,IACrBD,EAAcc,EACPG,EAAiBhB,KAE1BR,EAAQU,QAAQF,GACTa,EACR,CAYD,SAASC,EAAad,GACpB,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTY,GAEFC,EAAQb,EAChB,CAYD,SAASY,EAAWZ,GAClB,OAAgB,KAATA,EACHiB,EAAIjB,GACK,KAATA,EACAc,EAAad,GACba,EAAQb,EACb,CAYD,SAASU,EAAgBV,GACvB,MAAMkB,EAAQ,SACd,OAAIlB,IAASkB,EAAMC,WAAWrB,MAC5BN,EAAQU,QAAQF,GACCkB,IAAVpB,EAAyBsB,EAAQV,GAEnChB,EAAIM,EACZ,CAYD,SAASoB,EAAMpB,GACb,OAAa,OAATA,EACKN,EAAIM,GAEA,KAATA,GACFR,EAAQU,QAAQF,GACTqB,GAELN,EAAmBf,IACrBD,EAAcqB,EACPJ,EAAiBhB,KAE1BR,EAAQU,QAAQF,GACToB,EACR,CAYD,SAASC,EAAWrB,GAClB,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTsB,GAEFF,EAAMpB,EACd,CAYD,SAASsB,EAAStB,GAChB,OAAa,KAATA,EACKiB,EAAIjB,GAEA,KAATA,GACFR,EAAQU,QAAQF,GACTsB,GAEFF,EAAMpB,EACd,CAYD,SAASW,EAAYX,GACnB,OAAa,OAATA,GAA0B,KAATA,EACZiB,EAAIjB,GAETe,EAAmBf,IACrBD,EAAcY,EACPK,EAAiBhB,KAE1BR,EAAQU,QAAQF,GACTW,EACR,CAYD,SAASL,EAAYN,GACnB,OAAa,OAATA,EACKN,EAAIM,GAEA,KAATA,GACFR,EAAQU,QAAQF,GACTuB,GAELR,EAAmBf,IACrBD,EAAcO,EACPU,EAAiBhB,KAE1BR,EAAQU,QAAQF,GACTM,EACR,CAYD,SAASiB,EAAiBvB,GACxB,OAAgB,KAATA,EAAciB,EAAIjB,GAAQM,EAAYN,EAC9C,CAYD,SAASK,EAAcL,GAErB,OAAIO,EAAWP,IACbR,EAAQU,QAAQF,GACTwB,GAEF9B,EAAIM,EACZ,CAYD,SAASwB,EAASxB,GAEhB,OAAa,KAATA,GAAeyB,EAAkBzB,IACnCR,EAAQU,QAAQF,GACTwB,GAEFE,EAAgB1B,EACxB,CAYD,SAAS0B,EAAgB1B,GACvB,OAAIe,EAAmBf,IACrBD,EAAc2B,EACPV,EAAiBhB,IAEtB2B,EAAc3B,IAChBR,EAAQU,QAAQF,GACT0B,GAEFT,EAAIjB,EACZ,CAYD,SAASQ,EAAQR,GAEf,OAAa,KAATA,GAAeyB,EAAkBzB,IACnCR,EAAQU,QAAQF,GACTQ,GAEI,KAATR,GAAwB,KAATA,GAAe4B,EAA0B5B,GACnD6B,EAAe7B,GAEjBN,EAAIM,EACZ,CAYD,SAAS6B,EAAe7B,GACtB,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTiB,GAII,KAATjB,GAAwB,KAATA,GAAeO,EAAWP,IAC3CR,EAAQU,QAAQF,GACT8B,GAELf,EAAmBf,IACrBD,EAAc8B,EACPb,EAAiBhB,IAEtB2B,EAAc3B,IAChBR,EAAQU,QAAQF,GACT6B,GAEFZ,EAAIjB,EACZ,CAYD,SAAS8B,EAAqB9B,GAE5B,OACW,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACAyB,EAAkBzB,IAElBR,EAAQU,QAAQF,GACT8B,GAEFC,EAA0B/B,EAClC,CAaD,SAAS+B,EAA0B/B,GACjC,OAAa,KAATA,GACFR,EAAQU,QAAQF,GACTgC,GAELjB,EAAmBf,IACrBD,EAAcgC,EACPf,EAAiBhB,IAEtB2B,EAAc3B,IAChBR,EAAQU,QAAQF,GACT+B,GAEFF,EAAe7B,EACvB,CAaD,SAASgC,EAA4BhC,GACnC,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,EAEON,EAAIM,GAEA,KAATA,GAAwB,KAATA,GACjBR,EAAQU,QAAQF,GAChBH,EAASG,EACFiC,GAELlB,EAAmBf,IACrBD,EAAciC,EACPhB,EAAiBhB,IAEtB2B,EAAc3B,IAChBR,EAAQU,QAAQF,GACTgC,IAETxC,EAAQU,QAAQF,GACTkC,EACR,CAYD,SAASD,EAA4BjC,GACnC,OAAIA,IAASH,GACXL,EAAQU,QAAQF,GAChBH,OAASsC,EACFC,GAEI,OAATpC,EACKN,EAAIM,GAETe,EAAmBf,IACrBD,EAAckC,EACPjB,EAAiBhB,KAE1BR,EAAQU,QAAQF,GACTiC,EACR,CAYD,SAASC,EAA8BlC,GACrC,OACW,OAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,GACS,KAATA,EAEON,EAAIM,GAEA,KAATA,GAAwB,KAATA,GAAe4B,EAA0B5B,GACnD6B,EAAe7B,IAExBR,EAAQU,QAAQF,GACTkC,EACR,CAaD,SAASE,EAAiCpC,GACxC,OAAa,KAATA,GAAwB,KAATA,GAAe4B,EAA0B5B,GACnD6B,EAAe7B,GAEjBN,EAAIM,EACZ,CAYD,SAASiB,EAAIjB,GACX,OAAa,KAATA,GACFR,EAAQU,QAAQF,GAChBR,EAAQ6C,KAAK,gBACb7C,EAAQ6C,KAAK,YACN5C,GAEFC,EAAIM,EACZ,CAgBD,SAASgB,EAAiBhB,GAKxB,OAJAR,EAAQ6C,KAAK,gBACb7C,EAAQS,MAAM,cACdT,EAAQU,QAAQF,GAChBR,EAAQ6C,KAAK,cACNC,CACR,CAgBD,SAASA,EAAgBtC,GAGvB,OAAO2B,EAAc3B,GACjBuC,EACE/C,EACAgD,EACA,aACA7C,EAAK8C,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,qBACzCV,EACA,EANNI,CAOEvC,GACFwC,EAAsBxC,EAC3B,CAgBD,SAASwC,EAAsBxC,GAE7B,OADAR,EAAQS,MAAM,gBACPF,EAAYC,EACpB,CACH"}
@@ -0,0 +1,2 @@
1
+ import{factoryDestination as e}from"../../micromark-factory-destination/index.js";import{factoryLabel as r}from"../../micromark-factory-label/index.js";import{factoryTitle as t}from"../../micromark-factory-title/index.js";import{factoryWhitespace as n}from"../../micromark-factory-whitespace/index.js";import{markdownLineEndingOrSpace as i}from"../node_modules/micromark-util-character/index.js";import{push as a,splice as o}from"../../micromark-util-chunked/index.js";import{normalizeIdentifier as c}from"../../micromark-util-normalize-identifier/index.js";import{resolveAll as l}from"../../micromark-util-resolve-all/index.js";const s={name:"labelEnd",tokenize:function(e,r,t){const n=this;let i,a,o=n.events.length;for(;o--;)if(("labelImage"===n.events[o][1].type||"labelLink"===n.events[o][1].type)&&!n.events[o][1]._balanced){i=n.events[o][1];break}return function(r){if(!i)return t(r);if(i._inactive)return d(r);return a=n.parser.defined.includes(c(n.sliceSerialize({start:i.end,end:n.now()}))),e.enter("labelEnd"),e.enter("labelMarker"),e.consume(r),e.exit("labelMarker"),e.exit("labelEnd"),l};function l(r){return 40===r?e.attempt(u,k,a?k:d)(r):91===r?e.attempt(f,k,a?s:d)(r):a?k(r):d(r)}function s(r){return e.attempt(m,k,d)(r)}function k(e){return r(e)}function d(e){return i._balanced=!0,t(e)}},resolveTo:function(e,r){let t,n,i,c,s=e.length,u=0;for(;s--;)if(t=e[s][1],n){if("link"===t.type||"labelLink"===t.type&&t._inactive)break;"enter"===e[s][0]&&"labelLink"===t.type&&(t._inactive=!0)}else if(i){if("enter"===e[s][0]&&("labelImage"===t.type||"labelLink"===t.type)&&!t._balanced&&(n=s,"labelLink"!==t.type)){u=2;break}}else"labelEnd"===t.type&&(i=s);const f={type:"labelLink"===e[n][1].type?"link":"image",start:Object.assign({},e[n][1].start),end:Object.assign({},e[e.length-1][1].end)},m={type:"label",start:Object.assign({},e[n][1].start),end:Object.assign({},e[i][1].end)},k={type:"labelText",start:Object.assign({},e[n+u+2][1].end),end:Object.assign({},e[i-2][1].start)};return c=[["enter",f,r],["enter",m,r]],c=a(c,e.slice(n+1,n+u+3)),c=a(c,[["enter",k,r]]),c=a(c,l(r.parser.constructs.insideSpan.null,e.slice(n+u+4,i-3),r)),c=a(c,[["exit",k,r],e[i-2],e[i-1],["exit",m,r]]),c=a(c,e.slice(i+1)),c=a(c,[["exit",f,r]]),o(e,n,e.length,c),e},resolveAll:function(e){let r=-1;for(;++r<e.length;){const t=e[r][1];"labelImage"!==t.type&&"labelLink"!==t.type&&"labelEnd"!==t.type||(e.splice(r+1,"labelImage"===t.type?4:2),t.type="data",r++)}return e}},u={tokenize:function(r,a,o){return function(e){return r.enter("resource"),r.enter("resourceMarker"),r.consume(e),r.exit("resourceMarker"),c};function c(e){return i(e)?n(r,l)(e):l(e)}function l(t){return 41===t?k(t):e(r,s,u,"resourceDestination","resourceDestinationLiteral","resourceDestinationLiteralMarker","resourceDestinationRaw","resourceDestinationString",32)(t)}function s(e){return i(e)?n(r,f)(e):k(e)}function u(e){return o(e)}function f(e){return 34===e||39===e||40===e?t(r,m,o,"resourceTitle","resourceTitleMarker","resourceTitleString")(e):k(e)}function m(e){return i(e)?n(r,k)(e):k(e)}function k(e){return 41===e?(r.enter("resourceMarker"),r.consume(e),r.exit("resourceMarker"),r.exit("resource"),a):o(e)}}},f={tokenize:function(e,t,n){const i=this;return function(t){return r.call(i,e,a,o,"reference","referenceMarker","referenceString")(t)};function a(e){return i.parser.defined.includes(c(i.sliceSerialize(i.events[i.events.length-1][1]).slice(1,-1)))?t(e):n(e)}function o(e){return n(e)}}},m={tokenize:function(e,r,t){return function(r){return e.enter("reference"),e.enter("referenceMarker"),e.consume(r),e.exit("referenceMarker"),n};function n(n){return 93===n?(e.enter("referenceMarker"),e.consume(n),e.exit("referenceMarker"),e.exit("reference"),r):t(n)}}};export{s as labelEnd};
2
+ //# sourceMappingURL=label-end.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"label-end.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/label-end.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {markdownLineEndingOrSpace} from 'micromark-util-character'\nimport {push, splice} from 'micromark-util-chunked'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/** @type {Construct} */\nexport const labelEnd = {\n name: 'labelEnd',\n tokenize: tokenizeLabelEnd,\n resolveTo: resolveToLabelEnd,\n resolveAll: resolveAllLabelEnd\n}\n\n/** @type {Construct} */\nconst resourceConstruct = {\n tokenize: tokenizeResource\n}\n/** @type {Construct} */\nconst referenceFullConstruct = {\n tokenize: tokenizeReferenceFull\n}\n/** @type {Construct} */\nconst referenceCollapsedConstruct = {\n tokenize: tokenizeReferenceCollapsed\n}\n\n/** @type {Resolver} */\nfunction resolveAllLabelEnd(events) {\n let index = -1\n while (++index < events.length) {\n const token = events[index][1]\n if (\n token.type === 'labelImage' ||\n token.type === 'labelLink' ||\n token.type === 'labelEnd'\n ) {\n // Remove the marker.\n events.splice(index + 1, token.type === 'labelImage' ? 4 : 2)\n token.type = 'data'\n index++\n }\n }\n return events\n}\n\n/** @type {Resolver} */\nfunction resolveToLabelEnd(events, context) {\n let index = events.length\n let offset = 0\n /** @type {Token} */\n let token\n /** @type {number | undefined} */\n let open\n /** @type {number | undefined} */\n let close\n /** @type {Array<Event>} */\n let media\n\n // Find an opening.\n while (index--) {\n token = events[index][1]\n if (open) {\n // If we see another link, or inactive link label, we’ve been here before.\n if (\n token.type === 'link' ||\n (token.type === 'labelLink' && token._inactive)\n ) {\n break\n }\n\n // Mark other link openings as inactive, as we can’t have links in\n // links.\n if (events[index][0] === 'enter' && token.type === 'labelLink') {\n token._inactive = true\n }\n } else if (close) {\n if (\n events[index][0] === 'enter' &&\n (token.type === 'labelImage' || token.type === 'labelLink') &&\n !token._balanced\n ) {\n open = index\n if (token.type !== 'labelLink') {\n offset = 2\n break\n }\n }\n } else if (token.type === 'labelEnd') {\n close = index\n }\n }\n const group = {\n type: events[open][1].type === 'labelLink' ? 'link' : 'image',\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n }\n const label = {\n type: 'label',\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[close][1].end)\n }\n const text = {\n type: 'labelText',\n start: Object.assign({}, events[open + offset + 2][1].end),\n end: Object.assign({}, events[close - 2][1].start)\n }\n media = [\n ['enter', group, context],\n ['enter', label, context]\n ]\n\n // Opening marker.\n media = push(media, events.slice(open + 1, open + offset + 3))\n\n // Text open.\n media = push(media, [['enter', text, context]])\n\n // Always populated by defaults.\n\n // Between.\n media = push(\n media,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + offset + 4, close - 3),\n context\n )\n )\n\n // Text close, marker close, label close.\n media = push(media, [\n ['exit', text, context],\n events[close - 2],\n events[close - 1],\n ['exit', label, context]\n ])\n\n // Reference, resource, or so.\n media = push(media, events.slice(close + 1))\n\n // Media close.\n media = push(media, [['exit', group, context]])\n splice(events, open, events.length, media)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelEnd(effects, ok, nok) {\n const self = this\n let index = self.events.length\n /** @type {Token} */\n let labelStart\n /** @type {boolean} */\n let defined\n\n // Find an opening.\n while (index--) {\n if (\n (self.events[index][1].type === 'labelImage' ||\n self.events[index][1].type === 'labelLink') &&\n !self.events[index][1]._balanced\n ) {\n labelStart = self.events[index][1]\n break\n }\n }\n return start\n\n /**\n * Start of label end.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // If there is not an okay opening.\n if (!labelStart) {\n return nok(code)\n }\n\n // If the corresponding label (link) start is marked as inactive,\n // it means we’d be wrapping a link, like this:\n //\n // ```markdown\n // > | a [b [c](d) e](f) g.\n // ^\n // ```\n //\n // We can’t have that, so it’s just balanced brackets.\n if (labelStart._inactive) {\n return labelEndNok(code)\n }\n defined = self.parser.defined.includes(\n normalizeIdentifier(\n self.sliceSerialize({\n start: labelStart.end,\n end: self.now()\n })\n )\n )\n effects.enter('labelEnd')\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelEnd')\n return after\n }\n\n /**\n * After `]`.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n // Note: `markdown-rs` also parses GFM footnotes here, which for us is in\n // an extension.\n\n // Resource (`[asd](fgh)`)?\n if (code === 40) {\n return effects.attempt(\n resourceConstruct,\n labelEndOk,\n defined ? labelEndOk : labelEndNok\n )(code)\n }\n\n // Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?\n if (code === 91) {\n return effects.attempt(\n referenceFullConstruct,\n labelEndOk,\n defined ? referenceNotFull : labelEndNok\n )(code)\n }\n\n // Shortcut (`[asd]`) reference?\n return defined ? labelEndOk(code) : labelEndNok(code)\n }\n\n /**\n * After `]`, at `[`, but not at a full reference.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceNotFull(code) {\n return effects.attempt(\n referenceCollapsedConstruct,\n labelEndOk,\n labelEndNok\n )(code)\n }\n\n /**\n * Done, we found something.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function labelEndOk(code) {\n // Note: `markdown-rs` does a bunch of stuff here.\n return ok(code)\n }\n\n /**\n * Done, it’s nothing.\n *\n * There was an okay opening, but we didn’t match anything.\n *\n * ```markdown\n * > | [a](b c\n * ^\n * > | [a][b c\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function labelEndNok(code) {\n labelStart._balanced = true\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeResource(effects, ok, nok) {\n return resourceStart\n\n /**\n * At a resource.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceStart(code) {\n effects.enter('resource')\n effects.enter('resourceMarker')\n effects.consume(code)\n effects.exit('resourceMarker')\n return resourceBefore\n }\n\n /**\n * In resource, after `(`, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceOpen)(code)\n : resourceOpen(code)\n }\n\n /**\n * In resource, after optional whitespace, at `)` or a destination.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceOpen(code) {\n if (code === 41) {\n return resourceEnd(code)\n }\n return factoryDestination(\n effects,\n resourceDestinationAfter,\n resourceDestinationMissing,\n 'resourceDestination',\n 'resourceDestinationLiteral',\n 'resourceDestinationLiteralMarker',\n 'resourceDestinationRaw',\n 'resourceDestinationString',\n 32\n )(code)\n }\n\n /**\n * In resource, after destination, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceDestinationAfter(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceBetween)(code)\n : resourceEnd(code)\n }\n\n /**\n * At invalid destination.\n *\n * ```markdown\n * > | [a](<<) b\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceDestinationMissing(code) {\n return nok(code)\n }\n\n /**\n * In resource, after destination and whitespace, at `(` or title.\n *\n * ```markdown\n * > | [a](b ) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceBetween(code) {\n if (code === 34 || code === 39 || code === 40) {\n return factoryTitle(\n effects,\n resourceTitleAfter,\n nok,\n 'resourceTitle',\n 'resourceTitleMarker',\n 'resourceTitleString'\n )(code)\n }\n return resourceEnd(code)\n }\n\n /**\n * In resource, after title, at optional whitespace.\n *\n * ```markdown\n * > | [a](b \"c\") d\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceTitleAfter(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceEnd)(code)\n : resourceEnd(code)\n }\n\n /**\n * In resource, at `)`.\n *\n * ```markdown\n * > | [a](b) d\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceEnd(code) {\n if (code === 41) {\n effects.enter('resourceMarker')\n effects.consume(code)\n effects.exit('resourceMarker')\n effects.exit('resource')\n return ok\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeReferenceFull(effects, ok, nok) {\n const self = this\n return referenceFull\n\n /**\n * In a reference (full), at the `[`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFull(code) {\n return factoryLabel.call(\n self,\n effects,\n referenceFullAfter,\n referenceFullMissing,\n 'reference',\n 'referenceMarker',\n 'referenceString'\n )(code)\n }\n\n /**\n * In a reference (full), after `]`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFullAfter(code) {\n return self.parser.defined.includes(\n normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n )\n ? ok(code)\n : nok(code)\n }\n\n /**\n * In reference (full) that was missing.\n *\n * ```markdown\n * > | [a][b d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFullMissing(code) {\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeReferenceCollapsed(effects, ok, nok) {\n return referenceCollapsedStart\n\n /**\n * In reference (collapsed), at `[`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceCollapsedStart(code) {\n // We only attempt a collapsed label if there’s a `[`.\n\n effects.enter('reference')\n effects.enter('referenceMarker')\n effects.consume(code)\n effects.exit('referenceMarker')\n return referenceCollapsedOpen\n }\n\n /**\n * In reference (collapsed), at `]`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceCollapsedOpen(code) {\n if (code === 93) {\n effects.enter('referenceMarker')\n effects.consume(code)\n effects.exit('referenceMarker')\n effects.exit('reference')\n return ok\n }\n return nok(code)\n }\n}\n"],"names":["labelEnd","name","tokenize","effects","ok","nok","self","this","labelStart","defined","index","events","length","type","_balanced","code","_inactive","labelEndNok","parser","includes","normalizeIdentifier","sliceSerialize","start","end","now","enter","consume","exit","after","attempt","resourceConstruct","labelEndOk","referenceFullConstruct","referenceNotFull","referenceCollapsedConstruct","resolveTo","context","token","open","close","media","offset","group","Object","assign","label","text","push","slice","resolveAll","constructs","insideSpan","null","splice","resourceBefore","markdownLineEndingOrSpace","factoryWhitespace","resourceOpen","resourceEnd","factoryDestination","resourceDestinationAfter","resourceDestinationMissing","resourceBetween","factoryTitle","resourceTitleAfter","factoryLabel","call","referenceFullAfter","referenceFullMissing","referenceCollapsedOpen"],"mappings":"qnBAmBY,MAACA,EAAW,CACtBC,KAAM,WACNC,SA8IF,SAA0BC,EAASC,EAAIC,GACrC,MAAMC,EAAOC,KACb,IAEIC,EAEAC,EAJAC,EAAQJ,EAAKK,OAAOC,OAOxB,KAAOF,KACL,IACkC,eAA/BJ,EAAKK,OAAOD,GAAO,GAAGG,MACU,cAA/BP,EAAKK,OAAOD,GAAO,GAAGG,QACvBP,EAAKK,OAAOD,GAAO,GAAGI,UACvB,CACAN,EAAaF,EAAKK,OAAOD,GAAO,GAChC,KACD,CAEH,OAiBA,SAAeK,GAEb,IAAKP,EACH,OAAOH,EAAIU,GAYb,GAAIP,EAAWQ,UACb,OAAOC,EAAYF,GAerB,OAbAN,EAAUH,EAAKY,OAAOT,QAAQU,SAC5BC,EACEd,EAAKe,eAAe,CAClBC,MAAOd,EAAWe,IAClBA,IAAKjB,EAAKkB,UAIhBrB,EAAQsB,MAAM,YACdtB,EAAQsB,MAAM,eACdtB,EAAQuB,QAAQX,GAChBZ,EAAQwB,KAAK,eACbxB,EAAQwB,KAAK,YACNC,CACR,EAkBD,SAASA,EAAMb,GAKb,OAAa,KAATA,EACKZ,EAAQ0B,QACbC,EACAC,EACAtB,EAAUsB,EAAad,EAHlBd,CAILY,GAIS,KAATA,EACKZ,EAAQ0B,QACbG,EACAD,EACAtB,EAAUwB,EAAmBhB,EAHxBd,CAILY,GAIGN,EAAUsB,EAAWhB,GAAQE,EAAYF,EACjD,CAgBD,SAASkB,EAAiBlB,GACxB,OAAOZ,EAAQ0B,QACbK,EACAH,EACAd,EAHKd,CAILY,EACH,CAkBD,SAASgB,EAAWhB,GAElB,OAAOX,EAAGW,EACX,CAkBD,SAASE,EAAYF,GAEnB,OADAP,EAAWM,WAAY,EAChBT,EAAIU,EACZ,CACH,EA5TEoB,UAqCF,SAA2BxB,EAAQyB,GACjC,IAGIC,EAEAC,EAEAC,EAEAC,EATA9B,EAAQC,EAAOC,OACf6B,EAAS,EAWb,KAAO/B,KAEL,GADA2B,EAAQ1B,EAAOD,GAAO,GAClB4B,EAAM,CAER,GACiB,SAAfD,EAAMxB,MACU,cAAfwB,EAAMxB,MAAwBwB,EAAMrB,UAErC,MAKuB,UAArBL,EAAOD,GAAO,IAAiC,cAAf2B,EAAMxB,OACxCwB,EAAMrB,WAAY,EAErB,MAAM,GAAIuB,GACT,GACuB,UAArB5B,EAAOD,GAAO,KACE,eAAf2B,EAAMxB,MAAwC,cAAfwB,EAAMxB,QACrCwB,EAAMvB,YAEPwB,EAAO5B,EACY,cAAf2B,EAAMxB,MAAsB,CAC9B4B,EAAS,EACT,KACD,MAEqB,aAAfJ,EAAMxB,OACf0B,EAAQ7B,GAGZ,MAAMgC,EAAQ,CACZ7B,KAA+B,cAAzBF,EAAO2B,GAAM,GAAGzB,KAAuB,OAAS,QACtDS,MAAOqB,OAAOC,OAAO,CAAE,EAAEjC,EAAO2B,GAAM,GAAGhB,OACzCC,IAAKoB,OAAOC,OAAO,CAAA,EAAIjC,EAAOA,EAAOC,OAAS,GAAG,GAAGW,MAEhDsB,EAAQ,CACZhC,KAAM,QACNS,MAAOqB,OAAOC,OAAO,CAAE,EAAEjC,EAAO2B,GAAM,GAAGhB,OACzCC,IAAKoB,OAAOC,OAAO,CAAE,EAAEjC,EAAO4B,GAAO,GAAGhB,MAEpCuB,EAAO,CACXjC,KAAM,YACNS,MAAOqB,OAAOC,OAAO,CAAA,EAAIjC,EAAO2B,EAAOG,EAAS,GAAG,GAAGlB,KACtDA,IAAKoB,OAAOC,OAAO,GAAIjC,EAAO4B,EAAQ,GAAG,GAAGjB,QAuC9C,OArCAkB,EAAQ,CACN,CAAC,QAASE,EAAON,GACjB,CAAC,QAASS,EAAOT,IAInBI,EAAQO,EAAKP,EAAO7B,EAAOqC,MAAMV,EAAO,EAAGA,EAAOG,EAAS,IAG3DD,EAAQO,EAAKP,EAAO,CAAC,CAAC,QAASM,EAAMV,KAKrCI,EAAQO,EACNP,EACAS,EACEb,EAAQlB,OAAOgC,WAAWC,WAAWC,KACrCzC,EAAOqC,MAAMV,EAAOG,EAAS,EAAGF,EAAQ,GACxCH,IAKJI,EAAQO,EAAKP,EAAO,CAClB,CAAC,OAAQM,EAAMV,GACfzB,EAAO4B,EAAQ,GACf5B,EAAO4B,EAAQ,GACf,CAAC,OAAQM,EAAOT,KAIlBI,EAAQO,EAAKP,EAAO7B,EAAOqC,MAAMT,EAAQ,IAGzCC,EAAQO,EAAKP,EAAO,CAAC,CAAC,OAAQE,EAAON,KACrCiB,EAAO1C,EAAQ2B,EAAM3B,EAAOC,OAAQ4B,GAC7B7B,CACT,EAtIEsC,WAiBF,SAA4BtC,GAC1B,IAAID,GAAS,EACb,OAASA,EAAQC,EAAOC,QAAQ,CAC9B,MAAMyB,EAAQ1B,EAAOD,GAAO,GAEX,eAAf2B,EAAMxB,MACS,cAAfwB,EAAMxB,MACS,aAAfwB,EAAMxB,OAGNF,EAAO0C,OAAO3C,EAAQ,EAAkB,eAAf2B,EAAMxB,KAAwB,EAAI,GAC3DwB,EAAMxB,KAAO,OACbH,IAEH,CACD,OAAOC,CACT,GA7BMmB,EAAoB,CACxB5B,SA4TF,SAA0BC,EAASC,EAAIC,GACrC,OAYA,SAAuBU,GAKrB,OAJAZ,EAAQsB,MAAM,YACdtB,EAAQsB,MAAM,kBACdtB,EAAQuB,QAAQX,GAChBZ,EAAQwB,KAAK,kBACN2B,CACR,EAYD,SAASA,EAAevC,GACtB,OAAOwC,EAA0BxC,GAC7ByC,EAAkBrD,EAASsD,EAA3BD,CAAyCzC,GACzC0C,EAAa1C,EAClB,CAYD,SAAS0C,EAAa1C,GACpB,OAAa,KAATA,EACK2C,EAAY3C,GAEd4C,EACLxD,EACAyD,EACAC,EACA,sBACA,6BACA,mCACA,yBACA,4BACA,GATKF,CAUL5C,EACH,CAYD,SAAS6C,EAAyB7C,GAChC,OAAOwC,EAA0BxC,GAC7ByC,EAAkBrD,EAAS2D,EAA3BN,CAA4CzC,GAC5C2C,EAAY3C,EACjB,CAYD,SAAS8C,EAA2B9C,GAClC,OAAOV,EAAIU,EACZ,CAYD,SAAS+C,EAAgB/C,GACvB,OAAa,KAATA,GAAwB,KAATA,GAAwB,KAATA,EACzBgD,EACL5D,EACA6D,EACA3D,EACA,gBACA,sBACA,sBANK0D,CAOLhD,GAEG2C,EAAY3C,EACpB,CAYD,SAASiD,EAAmBjD,GAC1B,OAAOwC,EAA0BxC,GAC7ByC,EAAkBrD,EAASuD,EAA3BF,CAAwCzC,GACxC2C,EAAY3C,EACjB,CAYD,SAAS2C,EAAY3C,GACnB,OAAa,KAATA,GACFZ,EAAQsB,MAAM,kBACdtB,EAAQuB,QAAQX,GAChBZ,EAAQwB,KAAK,kBACbxB,EAAQwB,KAAK,YACNvB,GAEFC,EAAIU,EACZ,CACH,GAndMiB,EAAyB,CAC7B9B,SAwdF,SAA+BC,EAASC,EAAIC,GAC1C,MAAMC,EAAOC,KACb,OAYA,SAAuBQ,GACrB,OAAOkD,EAAaC,KAClB5D,EACAH,EACAgE,EACAC,EACA,YACA,kBACA,kBAPKH,CAQLlD,EACH,EAYD,SAASoD,EAAmBpD,GAC1B,OAAOT,EAAKY,OAAOT,QAAQU,SACzBC,EACEd,EAAKe,eAAef,EAAKK,OAAOL,EAAKK,OAAOC,OAAS,GAAG,IAAIoC,MAAM,GAAI,KAGtE5C,EAAGW,GACHV,EAAIU,EACT,CAYD,SAASqD,EAAqBrD,GAC5B,OAAOV,EAAIU,EACZ,CACH,GAhhBMmB,EAA8B,CAClChC,SAqhBF,SAAoCC,EAASC,EAAIC,GAC/C,OAcA,SAAiCU,GAO/B,OAJAZ,EAAQsB,MAAM,aACdtB,EAAQsB,MAAM,mBACdtB,EAAQuB,QAAQX,GAChBZ,EAAQwB,KAAK,mBACN0C,CACR,EAcD,SAASA,EAAuBtD,GAC9B,OAAa,KAATA,GACFZ,EAAQsB,MAAM,mBACdtB,EAAQuB,QAAQX,GAChBZ,EAAQwB,KAAK,mBACbxB,EAAQwB,KAAK,aACNvB,GAEFC,EAAIU,EACZ,CACH"}
@@ -0,0 +1,2 @@
1
+ import{labelEnd as e}from"./label-end.js";const r={name:"labelStartImage",tokenize:function(e,r,t){const n=this;return function(r){return e.enter("labelImage"),e.enter("labelImageMarker"),e.consume(r),e.exit("labelImageMarker"),l};function l(r){return 91===r?(e.enter("labelMarker"),e.consume(r),e.exit("labelMarker"),e.exit("labelImage"),a):t(r)}function a(e){return 94===e&&"_hiddenFootnoteSupport"in n.parser.constructs?t(e):r(e)}},resolveAll:e.resolveAll};export{r as labelStartImage};
2
+ //# sourceMappingURL=label-start-image.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"label-start-image.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/label-start-image.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {labelEnd} from './label-end.js'\n\n/** @type {Construct} */\nexport const labelStartImage = {\n name: 'labelStartImage',\n tokenize: tokenizeLabelStartImage,\n resolveAll: labelEnd.resolveAll\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelStartImage(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of label (image) start.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('labelImage')\n effects.enter('labelImageMarker')\n effects.consume(code)\n effects.exit('labelImageMarker')\n return open\n }\n\n /**\n * After `!`, at `[`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 91) {\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelImage')\n return after\n }\n return nok(code)\n }\n\n /**\n * After `![`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * This is needed in because, when GFM footnotes are enabled, images never\n * form when started with a `^`.\n * Instead, links form:\n *\n * ```markdown\n * ![^a](b)\n *\n * ![^a][b]\n *\n * [b]: c\n * ```\n *\n * ```html\n * <p>!<a href=\\\"b\\\">^a</a></p>\n * <p>!<a href=\\\"c\\\">^a</a></p>\n * ```\n *\n * @type {State}\n */\n function after(code) {\n // To do: use a new field to do this, this is still needed for\n // `micromark-extension-gfm-footnote`, but the `label-start-link`\n // behavior isn’t.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */\n return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs\n ? nok(code)\n : ok(code)\n }\n}\n"],"names":["labelStartImage","name","tokenize","effects","ok","nok","self","this","code","enter","consume","exit","open","after","parser","constructs","resolveAll","labelEnd"],"mappings":"0CAUY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAQF,SAAiCC,EAASC,EAAIC,GAC5C,MAAMC,EAAOC,KACb,OAYA,SAAeC,GAKb,OAJAL,EAAQM,MAAM,cACdN,EAAQM,MAAM,oBACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,oBACNC,CACR,EAYD,SAASA,EAAKJ,GACZ,OAAa,KAATA,GACFL,EAAQM,MAAM,eACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,cACNE,GAEFR,EAAIG,EACZ,CA6BD,SAASK,EAAML,GAMb,OAAgB,KAATA,GAAe,2BAA4BF,EAAKQ,OAAOC,WAC1DV,EAAIG,GACJJ,EAAGI,EACR,CACH,EAvFEQ,WAAYC,EAASD"}
@@ -0,0 +1,2 @@
1
+ import{labelEnd as e}from"./label-end.js";const n={name:"labelStartLink",tokenize:function(e,n,t){const r=this;return function(n){return e.enter("labelLink"),e.enter("labelMarker"),e.consume(n),e.exit("labelMarker"),e.exit("labelLink"),l};function l(e){return 94===e&&"_hiddenFootnoteSupport"in r.parser.constructs?t(e):n(e)}},resolveAll:e.resolveAll};export{n as labelStartLink};
2
+ //# sourceMappingURL=label-start-link.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"label-start-link.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/label-start-link.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {labelEnd} from './label-end.js'\n\n/** @type {Construct} */\nexport const labelStartLink = {\n name: 'labelStartLink',\n tokenize: tokenizeLabelStartLink,\n resolveAll: labelEnd.resolveAll\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelStartLink(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of label (link) start.\n *\n * ```markdown\n * > | a [b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('labelLink')\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelLink')\n return after\n }\n\n /** @type {State} */\n function after(code) {\n // To do: this isn’t needed in `micromark-extension-gfm-footnote`,\n // remove.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */\n return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs\n ? nok(code)\n : ok(code)\n }\n}\n"],"names":["labelStartLink","name","tokenize","effects","ok","nok","self","this","code","enter","consume","exit","after","parser","constructs","resolveAll","labelEnd"],"mappings":"0CAUY,MAACA,EAAiB,CAC5BC,KAAM,iBACNC,SAQF,SAAgCC,EAASC,EAAIC,GAC3C,MAAMC,EAAOC,KACb,OAYA,SAAeC,GAMb,OALAL,EAAQM,MAAM,aACdN,EAAQM,MAAM,eACdN,EAAQO,QAAQF,GAChBL,EAAQQ,KAAK,eACbR,EAAQQ,KAAK,aACNC,CACR,EAGD,SAASA,EAAMJ,GAKb,OAAgB,KAATA,GAAe,2BAA4BF,EAAKO,OAAOC,WAC1DT,EAAIG,GACJJ,EAAGI,EACR,CACH,EAxCEO,WAAYC,EAASD"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as n}from"../../micromark-factory-space/index.js";const e={name:"lineEnding",tokenize:function(e,i){return function(r){return e.enter("lineEnding"),e.consume(r),e.exit("lineEnding"),n(e,i,"linePrefix")}}};export{e as lineEnding};
2
+ //# sourceMappingURL=line-ending.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"line-ending.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/line-ending.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const lineEnding = {\n name: 'lineEnding',\n tokenize: tokenizeLineEnding\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLineEnding(effects, ok) {\n return start\n\n /** @type {State} */\n function start(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, ok, 'linePrefix')\n }\n}\n"],"names":["lineEnding","name","tokenize","effects","ok","code","enter","consume","exit","factorySpace"],"mappings":"sEAUY,MAACA,EAAa,CACxBC,KAAM,aACNC,SAOF,SAA4BC,EAASC,GACnC,OAGA,SAAeC,GAIb,OAHAF,EAAQG,MAAM,cACdH,EAAQI,QAAQF,GAChBF,EAAQK,KAAK,cACNC,EAAaN,EAASC,EAAI,aAClC,CACH"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as t}from"../../micromark-factory-space/index.js";import{asciiDigit as e,markdownSpace as n}from"../node_modules/micromark-util-character/index.js";import{blankLine as i}from"./blank-line.js";import{thematicBreak as r}from"./thematic-break.js";const a={name:"list",tokenize:function(t,a,c){const s=this,l=s.events[s.events.length-1];let u=l&&"linePrefix"===l[1].type?l[2].sliceSerialize(l[1],!0).length:0,m=0;return function(n){const i=s.containerState.type||(42===n||43===n||45===n?"listUnordered":"listOrdered");if("listUnordered"===i?!s.containerState.marker||n===s.containerState.marker:e(n)){if(s.containerState.type||(s.containerState.type=i,t.enter(i,{_container:!0})),"listUnordered"===i)return t.enter("listItemPrefix"),42===n||45===n?t.check(r,c,d)(n):d(n);if(!s.interrupt||49===n)return t.enter("listItemPrefix"),t.enter("listItemValue"),f(n)}return c(n)};function f(n){return e(n)&&++m<10?(t.consume(n),f):(!s.interrupt||m<2)&&(s.containerState.marker?n===s.containerState.marker:41===n||46===n)?(t.exit("listItemValue"),d(n)):c(n)}function d(e){return t.enter("listItemMarker"),t.consume(e),t.exit("listItemMarker"),s.containerState.marker=s.containerState.marker||e,t.check(i,s.interrupt?c:k,t.attempt(o,S,p))}function k(t){return s.containerState.initialBlankLine=!0,u++,S(t)}function p(e){return n(e)?(t.enter("listItemPrefixWhitespace"),t.consume(e),t.exit("listItemPrefixWhitespace"),S):c(e)}function S(e){return s.containerState.size=u+s.sliceSerialize(t.exit("listItemPrefix"),!0).length,a(e)}},continuation:{tokenize:function(e,r,o){const s=this;return s.containerState._closeFlow=void 0,e.check(i,(function(n){return s.containerState.furtherBlankLines=s.containerState.furtherBlankLines||s.containerState.initialBlankLine,t(e,r,"listItemIndent",s.containerState.size+1)(n)}),(function(t){if(s.containerState.furtherBlankLines||!n(t))return s.containerState.furtherBlankLines=void 0,s.containerState.initialBlankLine=void 0,l(t);return s.containerState.furtherBlankLines=void 0,s.containerState.initialBlankLine=void 0,e.attempt(c,r,l)(t)}));function l(n){return s.containerState._closeFlow=!0,s.interrupt=void 0,t(e,e.attempt(a,r,o),"linePrefix",s.parser.constructs.disable.null.includes("codeIndented")?void 0:4)(n)}}},exit:function(t){t.exit(this.containerState.type)}},o={tokenize:function(e,i,r){const a=this;return t(e,(function(t){const e=a.events[a.events.length-1];return!n(t)&&e&&"listItemPrefixWhitespace"===e[1].type?i(t):r(t)}),"listItemPrefixWhitespace",a.parser.constructs.disable.null.includes("codeIndented")?void 0:5)},partial:!0},c={tokenize:function(e,n,i){const r=this;return t(e,(function(t){const e=r.events[r.events.length-1];return e&&"listItemIndent"===e[1].type&&e[2].sliceSerialize(e[1],!0).length===r.containerState.size?n(t):i(t)}),"listItemIndent",r.containerState.size+1)},partial:!0};export{a as list};
2
+ //# sourceMappingURL=list.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"list.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/list.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {asciiDigit, markdownSpace} from 'micromark-util-character'\nimport {blankLine} from './blank-line.js'\nimport {thematicBreak} from './thematic-break.js'\n\n/** @type {Construct} */\nexport const list = {\n name: 'list',\n tokenize: tokenizeListStart,\n continuation: {\n tokenize: tokenizeListContinuation\n },\n exit: tokenizeListEnd\n}\n\n/** @type {Construct} */\nconst listItemPrefixWhitespaceConstruct = {\n tokenize: tokenizeListItemPrefixWhitespace,\n partial: true\n}\n\n/** @type {Construct} */\nconst indentConstruct = {\n tokenize: tokenizeIndent,\n partial: true\n}\n\n// To do: `markdown-rs` parses list items on their own and later stitches them\n// together.\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListStart(effects, ok, nok) {\n const self = this\n const tail = self.events[self.events.length - 1]\n let initialSize =\n tail && tail[1].type === 'linePrefix'\n ? tail[2].sliceSerialize(tail[1], true).length\n : 0\n let size = 0\n return start\n\n /** @type {State} */\n function start(code) {\n const kind =\n self.containerState.type ||\n (code === 42 || code === 43 || code === 45\n ? 'listUnordered'\n : 'listOrdered')\n if (\n kind === 'listUnordered'\n ? !self.containerState.marker || code === self.containerState.marker\n : asciiDigit(code)\n ) {\n if (!self.containerState.type) {\n self.containerState.type = kind\n effects.enter(kind, {\n _container: true\n })\n }\n if (kind === 'listUnordered') {\n effects.enter('listItemPrefix')\n return code === 42 || code === 45\n ? effects.check(thematicBreak, nok, atMarker)(code)\n : atMarker(code)\n }\n if (!self.interrupt || code === 49) {\n effects.enter('listItemPrefix')\n effects.enter('listItemValue')\n return inside(code)\n }\n }\n return nok(code)\n }\n\n /** @type {State} */\n function inside(code) {\n if (asciiDigit(code) && ++size < 10) {\n effects.consume(code)\n return inside\n }\n if (\n (!self.interrupt || size < 2) &&\n (self.containerState.marker\n ? code === self.containerState.marker\n : code === 41 || code === 46)\n ) {\n effects.exit('listItemValue')\n return atMarker(code)\n }\n return nok(code)\n }\n\n /**\n * @type {State}\n **/\n function atMarker(code) {\n effects.enter('listItemMarker')\n effects.consume(code)\n effects.exit('listItemMarker')\n self.containerState.marker = self.containerState.marker || code\n return effects.check(\n blankLine,\n // Can’t be empty when interrupting.\n self.interrupt ? nok : onBlank,\n effects.attempt(\n listItemPrefixWhitespaceConstruct,\n endOfPrefix,\n otherPrefix\n )\n )\n }\n\n /** @type {State} */\n function onBlank(code) {\n self.containerState.initialBlankLine = true\n initialSize++\n return endOfPrefix(code)\n }\n\n /** @type {State} */\n function otherPrefix(code) {\n if (markdownSpace(code)) {\n effects.enter('listItemPrefixWhitespace')\n effects.consume(code)\n effects.exit('listItemPrefixWhitespace')\n return endOfPrefix\n }\n return nok(code)\n }\n\n /** @type {State} */\n function endOfPrefix(code) {\n self.containerState.size =\n initialSize +\n self.sliceSerialize(effects.exit('listItemPrefix'), true).length\n return ok(code)\n }\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListContinuation(effects, ok, nok) {\n const self = this\n self.containerState._closeFlow = undefined\n return effects.check(blankLine, onBlank, notBlank)\n\n /** @type {State} */\n function onBlank(code) {\n self.containerState.furtherBlankLines =\n self.containerState.furtherBlankLines ||\n self.containerState.initialBlankLine\n\n // We have a blank line.\n // Still, try to consume at most the items size.\n return factorySpace(\n effects,\n ok,\n 'listItemIndent',\n self.containerState.size + 1\n )(code)\n }\n\n /** @type {State} */\n function notBlank(code) {\n if (self.containerState.furtherBlankLines || !markdownSpace(code)) {\n self.containerState.furtherBlankLines = undefined\n self.containerState.initialBlankLine = undefined\n return notInCurrentItem(code)\n }\n self.containerState.furtherBlankLines = undefined\n self.containerState.initialBlankLine = undefined\n return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)\n }\n\n /** @type {State} */\n function notInCurrentItem(code) {\n // While we do continue, we signal that the flow should be closed.\n self.containerState._closeFlow = true\n // As we’re closing flow, we’re no longer interrupting.\n self.interrupt = undefined\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n effects.attempt(list, ok, nok),\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n }\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeIndent(effects, ok, nok) {\n const self = this\n return factorySpace(\n effects,\n afterPrefix,\n 'listItemIndent',\n self.containerState.size + 1\n )\n\n /** @type {State} */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'listItemIndent' &&\n tail[2].sliceSerialize(tail[1], true).length === self.containerState.size\n ? ok(code)\n : nok(code)\n }\n}\n\n/**\n * @type {Exiter}\n * @this {TokenizeContext}\n */\nfunction tokenizeListEnd(effects) {\n effects.exit(this.containerState.type)\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListItemPrefixWhitespace(effects, ok, nok) {\n const self = this\n\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n afterPrefix,\n 'listItemPrefixWhitespace',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4 + 1\n )\n\n /** @type {State} */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return !markdownSpace(code) &&\n tail &&\n tail[1].type === 'listItemPrefixWhitespace'\n ? ok(code)\n : nok(code)\n }\n}\n"],"names":["list","name","tokenize","effects","ok","nok","self","this","tail","events","length","initialSize","type","sliceSerialize","size","code","kind","containerState","marker","asciiDigit","enter","_container","check","thematicBreak","atMarker","interrupt","inside","consume","exit","blankLine","onBlank","attempt","listItemPrefixWhitespaceConstruct","endOfPrefix","otherPrefix","initialBlankLine","markdownSpace","continuation","_closeFlow","undefined","furtherBlankLines","factorySpace","notInCurrentItem","indentConstruct","parser","constructs","disable","null","includes","partial"],"mappings":"wQAgBY,MAACA,EAAO,CAClBC,KAAM,OACNC,SA0BF,SAA2BC,EAASC,EAAIC,GACtC,MAAMC,EAAOC,KACPC,EAAOF,EAAKG,OAAOH,EAAKG,OAAOC,OAAS,GAC9C,IAAIC,EACFH,GAAyB,eAAjBA,EAAK,GAAGI,KACZJ,EAAK,GAAGK,eAAeL,EAAK,IAAI,GAAME,OACtC,EACFI,EAAO,EACX,OAGA,SAAeC,GACb,MAAMC,EACJV,EAAKW,eAAeL,OACV,KAATG,GAAwB,KAATA,GAAwB,KAATA,EAC3B,gBACA,eACN,GACW,kBAATC,GACKV,EAAKW,eAAeC,QAAUH,IAAST,EAAKW,eAAeC,OAC5DC,EAAWJ,GACf,CAOA,GANKT,EAAKW,eAAeL,OACvBN,EAAKW,eAAeL,KAAOI,EAC3Bb,EAAQiB,MAAMJ,EAAM,CAClBK,YAAY,KAGH,kBAATL,EAEF,OADAb,EAAQiB,MAAM,kBACE,KAATL,GAAwB,KAATA,EAClBZ,EAAQmB,MAAMC,EAAelB,EAAKmB,EAAlCrB,CAA4CY,GAC5CS,EAAST,GAEf,IAAKT,EAAKmB,WAAsB,KAATV,EAGrB,OAFAZ,EAAQiB,MAAM,kBACdjB,EAAQiB,MAAM,iBACPM,EAAOX,EAEjB,CACD,OAAOV,EAAIU,EACZ,EAGD,SAASW,EAAOX,GACd,OAAII,EAAWJ,MAAWD,EAAO,IAC/BX,EAAQwB,QAAQZ,GACTW,KAGLpB,EAAKmB,WAAaX,EAAO,KAC1BR,EAAKW,eAAeC,OACjBH,IAAST,EAAKW,eAAeC,OACpB,KAATH,GAAwB,KAATA,IAEnBZ,EAAQyB,KAAK,iBACNJ,EAAST,IAEXV,EAAIU,EACZ,CAKD,SAASS,EAAST,GAKhB,OAJAZ,EAAQiB,MAAM,kBACdjB,EAAQwB,QAAQZ,GAChBZ,EAAQyB,KAAK,kBACbtB,EAAKW,eAAeC,OAASZ,EAAKW,eAAeC,QAAUH,EACpDZ,EAAQmB,MACbO,EAEAvB,EAAKmB,UAAYpB,EAAMyB,EACvB3B,EAAQ4B,QACNC,EACAC,EACAC,GAGL,CAGD,SAASJ,EAAQf,GAGf,OAFAT,EAAKW,eAAekB,kBAAmB,EACvCxB,IACOsB,EAAYlB,EACpB,CAGD,SAASmB,EAAYnB,GACnB,OAAIqB,EAAcrB,IAChBZ,EAAQiB,MAAM,4BACdjB,EAAQwB,QAAQZ,GAChBZ,EAAQyB,KAAK,4BACNK,GAEF5B,EAAIU,EACZ,CAGD,SAASkB,EAAYlB,GAInB,OAHAT,EAAKW,eAAeH,KAClBH,EACAL,EAAKO,eAAeV,EAAQyB,KAAK,mBAAmB,GAAMlB,OACrDN,EAAGW,EACX,CACH,EAnIEsB,aAAc,CACZnC,SAwIJ,SAAkCC,EAASC,EAAIC,GAC7C,MAAMC,EAAOC,KAEb,OADAD,EAAKW,eAAeqB,gBAAaC,EAC1BpC,EAAQmB,MAAMO,GAGrB,SAAiBd,GAOf,OANAT,EAAKW,eAAeuB,kBAClBlC,EAAKW,eAAeuB,mBACpBlC,EAAKW,eAAekB,iBAIfM,EACLtC,EACAC,EACA,iBACAE,EAAKW,eAAeH,KAAO,EAJtB2B,CAKL1B,EACH,IAGD,SAAkBA,GAChB,GAAIT,EAAKW,eAAeuB,oBAAsBJ,EAAcrB,GAG1D,OAFAT,EAAKW,eAAeuB,uBAAoBD,EACxCjC,EAAKW,eAAekB,sBAAmBI,EAChCG,EAAiB3B,GAI1B,OAFAT,EAAKW,eAAeuB,uBAAoBD,EACxCjC,EAAKW,eAAekB,sBAAmBI,EAChCpC,EAAQ4B,QAAQY,EAAiBvC,EAAIsC,EAArCvC,CAAuDY,EAC/D,IAGD,SAAS2B,EAAiB3B,GAOxB,OALAT,EAAKW,eAAeqB,YAAa,EAEjChC,EAAKmB,eAAYc,EAGVE,EACLtC,EACAA,EAAQ4B,QAAQ/B,EAAMI,EAAIC,GAC1B,aACAC,EAAKsC,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,qBACzCT,EACA,EANCE,CAOL1B,EACH,CACH,GAxLEa,KAsNF,SAAyBzB,GACvBA,EAAQyB,KAAKrB,KAAKU,eAAeL,KACnC,GApNMoB,EAAoC,CACxC9B,SAyNF,SAA0CC,EAASC,EAAIC,GACrD,MAAMC,EAAOC,KAIb,OAAOkC,EACLtC,GASF,SAAqBY,GACnB,MAAMP,EAAOF,EAAKG,OAAOH,EAAKG,OAAOC,OAAS,GAC9C,OAAQ0B,EAAcrB,IACpBP,GACiB,6BAAjBA,EAAK,GAAGI,KACNR,EAAGW,GACHV,EAAIU,EACT,GAdC,2BACAT,EAAKsC,OAAOC,WAAWC,QAAQC,KAAKC,SAAS,qBACzCT,EACA,EAYR,EA/OEU,SAAS,GAILN,EAAkB,CACtBzC,SAmLF,SAAwBC,EAASC,EAAIC,GACnC,MAAMC,EAAOC,KACb,OAAOkC,EACLtC,GAOF,SAAqBY,GACnB,MAAMP,EAAOF,EAAKG,OAAOH,EAAKG,OAAOC,OAAS,GAC9C,OAAOF,GACY,mBAAjBA,EAAK,GAAGI,MACRJ,EAAK,GAAGK,eAAeL,EAAK,IAAI,GAAME,SAAWJ,EAAKW,eAAeH,KACnEV,EAAGW,GACHV,EAAIU,EACT,GAZC,iBACAT,EAAKW,eAAeH,KAAO,EAY/B,EApMEmC,SAAS"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as e}from"../../micromark-factory-space/index.js";import{markdownSpace as t,markdownLineEnding as n}from"../node_modules/micromark-util-character/index.js";const i={name:"setextUnderline",tokenize:function(i,r,s){const o=this;let a;return function(e){let t,n=o.events.length;for(;n--;)if("lineEnding"!==o.events[n][1].type&&"linePrefix"!==o.events[n][1].type&&"content"!==o.events[n][1].type){t="paragraph"===o.events[n][1].type;break}if(!o.parser.lazy[o.now().line]&&(o.interrupt||t))return i.enter("setextHeadingLine"),a=e,function(e){return i.enter("setextHeadingLineSequence"),c(e)}(e);return s(e)};function c(n){return n===a?(i.consume(n),c):(i.exit("setextHeadingLineSequence"),t(n)?e(i,p,"lineSuffix")(n):p(n))}function p(e){return null===e||n(e)?(i.exit("setextHeadingLine"),r(e)):s(e)}},resolveTo:function(e,t){let n,i,r,s=e.length;for(;s--;)if("enter"===e[s][0]){if("content"===e[s][1].type){n=s;break}"paragraph"===e[s][1].type&&(i=s)}else"content"===e[s][1].type&&e.splice(s,1),r||"definition"!==e[s][1].type||(r=s);const o={type:"setextHeading",start:Object.assign({},e[i][1].start),end:Object.assign({},e[e.length-1][1].end)};e[i][1].type="setextHeadingText",r?(e.splice(i,0,["enter",o,t]),e.splice(r+1,0,["exit",e[n][1],t]),e[n][1].end=Object.assign({},e[r][1].end)):e[n][1]=o;return e.push(["exit",o,t]),e}};export{i as setextUnderline};
2
+ //# sourceMappingURL=setext-underline.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"setext-underline.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/setext-underline.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const setextUnderline = {\n name: 'setextUnderline',\n tokenize: tokenizeSetextUnderline,\n resolveTo: resolveToSetextUnderline\n}\n\n/** @type {Resolver} */\nfunction resolveToSetextUnderline(events, context) {\n // To do: resolve like `markdown-rs`.\n let index = events.length\n /** @type {number | undefined} */\n let content\n /** @type {number | undefined} */\n let text\n /** @type {number | undefined} */\n let definition\n\n // Find the opening of the content.\n // It’ll always exist: we don’t tokenize if it isn’t there.\n while (index--) {\n if (events[index][0] === 'enter') {\n if (events[index][1].type === 'content') {\n content = index\n break\n }\n if (events[index][1].type === 'paragraph') {\n text = index\n }\n }\n // Exit\n else {\n if (events[index][1].type === 'content') {\n // Remove the content end (if needed we’ll add it later)\n events.splice(index, 1)\n }\n if (!definition && events[index][1].type === 'definition') {\n definition = index\n }\n }\n }\n const heading = {\n type: 'setextHeading',\n start: Object.assign({}, events[text][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n }\n\n // Change the paragraph to setext heading text.\n events[text][1].type = 'setextHeadingText'\n\n // If we have definitions in the content, we’ll keep on having content,\n // but we need move it.\n if (definition) {\n events.splice(text, 0, ['enter', heading, context])\n events.splice(definition + 1, 0, ['exit', events[content][1], context])\n events[content][1].end = Object.assign({}, events[definition][1].end)\n } else {\n events[content][1] = heading\n }\n\n // Add the heading exit at the end.\n events.push(['exit', heading, context])\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeSetextUnderline(effects, ok, nok) {\n const self = this\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * At start of heading (setext) underline.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n let index = self.events.length\n /** @type {boolean | undefined} */\n let paragraph\n // Find an opening.\n while (index--) {\n // Skip enter/exit of line ending, line prefix, and content.\n // We can now either have a definition or a paragraph.\n if (\n self.events[index][1].type !== 'lineEnding' &&\n self.events[index][1].type !== 'linePrefix' &&\n self.events[index][1].type !== 'content'\n ) {\n paragraph = self.events[index][1].type === 'paragraph'\n break\n }\n }\n\n // To do: handle lazy/pierce like `markdown-rs`.\n // To do: parse indent like `markdown-rs`.\n if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {\n effects.enter('setextHeadingLine')\n marker = code\n return before(code)\n }\n return nok(code)\n }\n\n /**\n * After optional whitespace, at `-` or `=`.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('setextHeadingLineSequence')\n return inside(code)\n }\n\n /**\n * In sequence.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker) {\n effects.consume(code)\n return inside\n }\n effects.exit('setextHeadingLineSequence')\n return markdownSpace(code)\n ? factorySpace(effects, after, 'lineSuffix')(code)\n : after(code)\n }\n\n /**\n * After sequence, after optional whitespace.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('setextHeadingLine')\n return ok(code)\n }\n return nok(code)\n }\n}\n"],"names":["setextUnderline","name","tokenize","effects","ok","nok","self","this","marker","code","paragraph","index","events","length","type","parser","lazy","now","line","interrupt","enter","inside","before","consume","exit","markdownSpace","factorySpace","after","markdownLineEnding","resolveTo","context","content","text","definition","splice","heading","start","Object","assign","end","push"],"mappings":"gLAYY,MAACA,EAAkB,CAC7BC,KAAM,kBACNC,SAkEF,SAAiCC,EAASC,EAAIC,GAC5C,MAAMC,EAAOC,KAEb,IAAIC,EACJ,OAaA,SAAeC,GACb,IAEIC,EAFAC,EAAQL,EAAKM,OAAOC,OAIxB,KAAOF,KAGL,GACiC,eAA/BL,EAAKM,OAAOD,GAAO,GAAGG,MACS,eAA/BR,EAAKM,OAAOD,GAAO,GAAGG,MACS,YAA/BR,EAAKM,OAAOD,GAAO,GAAGG,KACtB,CACAJ,EAA2C,cAA/BJ,EAAKM,OAAOD,GAAO,GAAGG,KAClC,KACD,CAKH,IAAKR,EAAKS,OAAOC,KAAKV,EAAKW,MAAMC,QAAUZ,EAAKa,WAAaT,GAG3D,OAFAP,EAAQiB,MAAM,qBACdZ,EAASC,EAiBb,SAAgBA,GAEd,OADAN,EAAQiB,MAAM,6BACPC,EAAOZ,EACf,CAnBUa,CAAOb,GAEhB,OAAOJ,EAAII,EACZ,EA6BD,SAASY,EAAOZ,GACd,OAAIA,IAASD,GACXL,EAAQoB,QAAQd,GACTY,IAETlB,EAAQqB,KAAK,6BACNC,EAAchB,GACjBiB,EAAavB,EAASwB,EAAO,aAA7BD,CAA2CjB,GAC3CkB,EAAMlB,GACX,CAaD,SAASkB,EAAMlB,GACb,OAAa,OAATA,GAAiBmB,EAAmBnB,IACtCN,EAAQqB,KAAK,qBACNpB,EAAGK,IAELJ,EAAII,EACZ,CACH,EAtKEoB,UAIF,SAAkCjB,EAAQkB,GAExC,IAEIC,EAEAC,EAEAC,EANAtB,EAAQC,EAAOC,OAUnB,KAAOF,KACL,GAAyB,UAArBC,EAAOD,GAAO,GAAgB,CAChC,GAA8B,YAA1BC,EAAOD,GAAO,GAAGG,KAAoB,CACvCiB,EAAUpB,EACV,KACD,CAC6B,cAA1BC,EAAOD,GAAO,GAAGG,OACnBkB,EAAOrB,EAEV,KAG+B,YAA1BC,EAAOD,GAAO,GAAGG,MAEnBF,EAAOsB,OAAOvB,EAAO,GAElBsB,GAAwC,eAA1BrB,EAAOD,GAAO,GAAGG,OAClCmB,EAAatB,GAInB,MAAMwB,EAAU,CACdrB,KAAM,gBACNsB,MAAOC,OAAOC,OAAO,CAAE,EAAE1B,EAAOoB,GAAM,GAAGI,OACzCG,IAAKF,OAAOC,OAAO,CAAA,EAAI1B,EAAOA,EAAOC,OAAS,GAAG,GAAG0B,MAItD3B,EAAOoB,GAAM,GAAGlB,KAAO,oBAInBmB,GACFrB,EAAOsB,OAAOF,EAAM,EAAG,CAAC,QAASG,EAASL,IAC1ClB,EAAOsB,OAAOD,EAAa,EAAG,EAAG,CAAC,OAAQrB,EAAOmB,GAAS,GAAID,IAC9DlB,EAAOmB,GAAS,GAAGQ,IAAMF,OAAOC,OAAO,CAAA,EAAI1B,EAAOqB,GAAY,GAAGM,MAEjE3B,EAAOmB,GAAS,GAAKI,EAKvB,OADAvB,EAAO4B,KAAK,CAAC,OAAQL,EAASL,IACvBlB,CACT"}
@@ -0,0 +1,2 @@
1
+ import{factorySpace as e}from"../../micromark-factory-space/index.js";import{markdownSpace as t,markdownLineEnding as r}from"../node_modules/micromark-util-character/index.js";const n={name:"thematicBreak",tokenize:function(n,c,i){let a,o=0;return function(e){return n.enter("thematicBreak"),function(e){return a=e,m(e)}(e)};function m(e){return e===a?(n.enter("thematicBreakSequence"),u(e)):o>=3&&(null===e||r(e))?(n.exit("thematicBreak"),c(e)):i(e)}function u(r){return r===a?(n.consume(r),o++,u):(n.exit("thematicBreakSequence"),t(r)?e(n,m,"whitespace")(r):m(r))}}};export{n as thematicBreak};
2
+ //# sourceMappingURL=thematic-break.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"thematic-break.js","sources":["../../../../node_modules/micromark-core-commonmark/lib/thematic-break.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const thematicBreak = {\n name: 'thematicBreak',\n tokenize: tokenizeThematicBreak\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeThematicBreak(effects, ok, nok) {\n let size = 0\n /** @type {NonNullable<Code>} */\n let marker\n return start\n\n /**\n * Start of thematic break.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('thematicBreak')\n // To do: parse indent like `markdown-rs`.\n return before(code)\n }\n\n /**\n * After optional whitespace, at marker.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n marker = code\n return atBreak(code)\n }\n\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === marker) {\n effects.enter('thematicBreakSequence')\n return sequence(code)\n }\n if (size >= 3 && (code === null || markdownLineEnding(code))) {\n effects.exit('thematicBreak')\n return ok(code)\n }\n return nok(code)\n }\n\n /**\n * In sequence.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function sequence(code) {\n if (code === marker) {\n effects.consume(code)\n size++\n return sequence\n }\n effects.exit('thematicBreakSequence')\n return markdownSpace(code)\n ? factorySpace(effects, atBreak, 'whitespace')(code)\n : atBreak(code)\n }\n}\n"],"names":["thematicBreak","name","tokenize","effects","ok","nok","marker","size","code","enter","atBreak","before","sequence","markdownLineEnding","exit","consume","markdownSpace","factorySpace"],"mappings":"gLAWY,MAACA,EAAgB,CAC3BC,KAAM,gBACNC,SAOF,SAA+BC,EAASC,EAAIC,GAC1C,IAEIC,EAFAC,EAAO,EAGX,OAYA,SAAeC,GAGb,OAFAL,EAAQM,MAAM,iBAehB,SAAgBD,GAEd,OADAF,EAASE,EACFE,EAAQF,EAChB,CAhBQG,CAAOH,EACf,EA2BD,SAASE,EAAQF,GACf,OAAIA,IAASF,GACXH,EAAQM,MAAM,yBACPG,EAASJ,IAEdD,GAAQ,IAAe,OAATC,GAAiBK,EAAmBL,KACpDL,EAAQW,KAAK,iBACNV,EAAGI,IAELH,EAAIG,EACZ,CAYD,SAASI,EAASJ,GAChB,OAAIA,IAASF,GACXH,EAAQY,QAAQP,GAChBD,IACOK,IAETT,EAAQW,KAAK,yBACNE,EAAcR,GACjBS,EAAad,EAASO,EAAS,aAA/BO,CAA6CT,GAC7CE,EAAQF,GACb,CACH"}
@@ -0,0 +1,2 @@
1
+ const n=a(/[A-Za-z]/),t=a(/[\dA-Za-z]/),r=a(/[#-'*+\--9=?A-Z^-~]/);function u(n){return null!==n&&(n<32||127===n)}const o=a(/\d/),e=a(/[\dA-Fa-f]/),c=a(/[!-/:-@[-`{-~]/);function f(n){return null!==n&&n<-2}function l(n){return null!==n&&(n<0||32===n)}function i(n){return-2===n||-1===n||32===n}function a(n){return function(t){return null!==t&&n.test(String.fromCharCode(t))}}export{n as asciiAlpha,t as asciiAlphanumeric,r as asciiAtext,u as asciiControl,o as asciiDigit,e as asciiHexDigit,c as asciiPunctuation,f as markdownLineEnding,l as markdownLineEndingOrSpace,i as markdownSpace};
2
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sources":["../../../../../node_modules/micromark-core-commonmark/node_modules/micromark-util-character/index.js"],"sourcesContent":["/**\n * @typedef {import('micromark-util-types').Code} Code\n */\n\nimport {unicodePunctuationRegex} from './lib/unicode-punctuation-regex.js'\n\n/**\n * Check whether the character code represents an ASCII alpha (`a` through `z`,\n * case insensitive).\n *\n * An **ASCII alpha** is an ASCII upper alpha or ASCII lower alpha.\n *\n * An **ASCII upper alpha** is a character in the inclusive range U+0041 (`A`)\n * to U+005A (`Z`).\n *\n * An **ASCII lower alpha** is a character in the inclusive range U+0061 (`a`)\n * to U+007A (`z`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAlpha = regexCheck(/[A-Za-z]/)\n\n/**\n * Check whether the character code represents an ASCII alphanumeric (`a`\n * through `z`, case insensitive, or `0` through `9`).\n *\n * An **ASCII alphanumeric** is an ASCII digit (see `asciiDigit`) or ASCII alpha\n * (see `asciiAlpha`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAlphanumeric = regexCheck(/[\\dA-Za-z]/)\n\n/**\n * Check whether the character code represents an ASCII atext.\n *\n * atext is an ASCII alphanumeric (see `asciiAlphanumeric`), or a character in\n * the inclusive ranges U+0023 NUMBER SIGN (`#`) to U+0027 APOSTROPHE (`'`),\n * U+002A ASTERISK (`*`), U+002B PLUS SIGN (`+`), U+002D DASH (`-`), U+002F\n * SLASH (`/`), U+003D EQUALS TO (`=`), U+003F QUESTION MARK (`?`), U+005E\n * CARET (`^`) to U+0060 GRAVE ACCENT (`` ` ``), or U+007B LEFT CURLY BRACE\n * (`{`) to U+007E TILDE (`~`).\n *\n * See:\n * **\\[RFC5322]**:\n * [Internet Message Format](https://tools.ietf.org/html/rfc5322).\n * P. Resnick.\n * IETF.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAtext = regexCheck(/[#-'*+\\--9=?A-Z^-~]/)\n\n/**\n * Check whether a character code is an ASCII control character.\n *\n * An **ASCII control** is a character in the inclusive range U+0000 NULL (NUL)\n * to U+001F (US), or U+007F (DEL).\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function asciiControl(code) {\n return (\n // Special whitespace codes (which have negative values), C0 and Control\n // character DEL\n code !== null && (code < 32 || code === 127)\n )\n}\n\n/**\n * Check whether the character code represents an ASCII digit (`0` through `9`).\n *\n * An **ASCII digit** is a character in the inclusive range U+0030 (`0`) to\n * U+0039 (`9`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiDigit = regexCheck(/\\d/)\n\n/**\n * Check whether the character code represents an ASCII hex digit (`a` through\n * `f`, case insensitive, or `0` through `9`).\n *\n * An **ASCII hex digit** is an ASCII digit (see `asciiDigit`), ASCII upper hex\n * digit, or an ASCII lower hex digit.\n *\n * An **ASCII upper hex digit** is a character in the inclusive range U+0041\n * (`A`) to U+0046 (`F`).\n *\n * An **ASCII lower hex digit** is a character in the inclusive range U+0061\n * (`a`) to U+0066 (`f`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiHexDigit = regexCheck(/[\\dA-Fa-f]/)\n\n/**\n * Check whether the character code represents ASCII punctuation.\n *\n * An **ASCII punctuation** is a character in the inclusive ranges U+0021\n * EXCLAMATION MARK (`!`) to U+002F SLASH (`/`), U+003A COLON (`:`) to U+0040 AT\n * SIGN (`@`), U+005B LEFT SQUARE BRACKET (`[`) to U+0060 GRAVE ACCENT\n * (`` ` ``), or U+007B LEFT CURLY BRACE (`{`) to U+007E TILDE (`~`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/)\n\n/**\n * Check whether a character code is a markdown line ending.\n *\n * A **markdown line ending** is the virtual characters M-0003 CARRIAGE RETURN\n * LINE FEED (CRLF), M-0004 LINE FEED (LF) and M-0005 CARRIAGE RETURN (CR).\n *\n * In micromark, the actual character U+000A LINE FEED (LF) and U+000D CARRIAGE\n * RETURN (CR) are replaced by these virtual characters depending on whether\n * they occurred together.\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownLineEnding(code) {\n return code !== null && code < -2\n}\n\n/**\n * Check whether a character code is a markdown line ending (see\n * `markdownLineEnding`) or markdown space (see `markdownSpace`).\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownLineEndingOrSpace(code) {\n return code !== null && (code < 0 || code === 32)\n}\n\n/**\n * Check whether a character code is a markdown space.\n *\n * A **markdown space** is the concrete character U+0020 SPACE (SP) and the\n * virtual characters M-0001 VIRTUAL SPACE (VS) and M-0002 HORIZONTAL TAB (HT).\n *\n * In micromark, the actual character U+0009 CHARACTER TABULATION (HT) is\n * replaced by one M-0002 HORIZONTAL TAB (HT) and between 0 and 3 M-0001 VIRTUAL\n * SPACE (VS) characters, depending on the column at which the tab occurred.\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownSpace(code) {\n return code === -2 || code === -1 || code === 32\n}\n\n// Size note: removing ASCII from the regex and using `asciiPunctuation` here\n// In fact adds to the bundle size.\n/**\n * Check whether the character code represents Unicode punctuation.\n *\n * A **Unicode punctuation** is a character in the Unicode `Pc` (Punctuation,\n * Connector), `Pd` (Punctuation, Dash), `Pe` (Punctuation, Close), `Pf`\n * (Punctuation, Final quote), `Pi` (Punctuation, Initial quote), `Po`\n * (Punctuation, Other), or `Ps` (Punctuation, Open) categories, or an ASCII\n * punctuation (see `asciiPunctuation`).\n *\n * See:\n * **\\[UNICODE]**:\n * [The Unicode Standard](https://www.unicode.org/versions/).\n * Unicode Consortium.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const unicodePunctuation = regexCheck(unicodePunctuationRegex)\n\n/**\n * Check whether the character code represents Unicode whitespace.\n *\n * Note that this does handle micromark specific markdown whitespace characters.\n * See `markdownLineEndingOrSpace` to check that.\n *\n * A **Unicode whitespace** is a character in the Unicode `Zs` (Separator,\n * Space) category, or U+0009 CHARACTER TABULATION (HT), U+000A LINE FEED (LF),\n * U+000C (FF), or U+000D CARRIAGE RETURN (CR) (**\\[UNICODE]**).\n *\n * See:\n * **\\[UNICODE]**:\n * [The Unicode Standard](https://www.unicode.org/versions/).\n * Unicode Consortium.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const unicodeWhitespace = regexCheck(/\\s/)\n\n/**\n * Create a code check from a regex.\n *\n * @param {RegExp} regex\n * @returns {(code: Code) => boolean}\n */\nfunction regexCheck(regex) {\n return check\n\n /**\n * Check whether a code matches the bound regex.\n *\n * @param {Code} code\n * Character code.\n * @returns {boolean}\n * Whether the character code matches the bound regex.\n */\n function check(code) {\n return code !== null && regex.test(String.fromCharCode(code))\n }\n}\n"],"names":["asciiAlpha","regexCheck","asciiAlphanumeric","asciiAtext","asciiControl","code","asciiDigit","asciiHexDigit","asciiPunctuation","markdownLineEnding","markdownLineEndingOrSpace","markdownSpace","regex","test","String","fromCharCode"],"mappings":"AAuBY,MAACA,EAAaC,EAAW,YAcxBC,EAAoBD,EAAW,cAuB/BE,EAAaF,EAAW,uBAa9B,SAASG,EAAaC,GAC3B,OAGW,OAATA,IAAkBA,EAAO,IAAe,MAATA,EAEnC,CAaY,MAACC,EAAaL,EAAW,MAoBxBM,EAAgBN,EAAW,cAe3BO,EAAmBP,EAAW,kBAiBpC,SAASQ,EAAmBJ,GACjC,OAAgB,OAATA,GAAiBA,GAAQ,CAClC,CAWO,SAASK,EAA0BL,GACxC,OAAgB,OAATA,IAAkBA,EAAO,GAAc,KAATA,EACvC,CAiBO,SAASM,EAAcN,GAC5B,OAAiB,IAAVA,IAAyB,IAAVA,GAAwB,KAATA,CACvC,CAqDA,SAASJ,EAAWW,GAClB,OAUA,SAAeP,GACb,OAAgB,OAATA,GAAiBO,EAAMC,KAAKC,OAAOC,aAAaV,GACxD,CACH"}
@@ -1,2 +1,2 @@
1
- import{combineExtensions as i}from"../micromark-util-combine-extensions/index.js";import{gfmAutolinkLiteral as o}from"../micromark-extension-gfm-autolink-literal/lib/syntax.js";import{gfmFootnote as m}from"../micromark-extension-gfm-footnote/lib/syntax.js";import{gfmStrikethrough as t}from"../micromark-extension-gfm-strikethrough/lib/syntax.js";import{gfmTable as r}from"../micromark-extension-gfm-table/lib/syntax.js";import{gfmTaskListItem as n}from"../micromark-extension-gfm-task-list-item/lib/syntax.js";function e(e){return i([o(),m(),t(e),r(),n()])}export{e as gfm};
1
+ import{combineExtensions as i}from"../micromark-util-combine-extensions/index.js";import{gfmAutolinkLiteral as o}from"../micromark-extension-gfm-autolink-literal/lib/syntax.js";import{gfmFootnote as m}from"../micromark-extension-gfm-footnote/lib/syntax.js";import{gfmStrikethrough as t}from"../micromark-extension-gfm-strikethrough/lib/syntax.js";import{gfmTable as r}from"../micromark-extension-gfm-table/lib/syntax.js";import{gfmTaskListItem as n}from"../micromark-extension-gfm-task-list-item/lib/syntax.js";function e(e){return i([o,m(),t(e),r,n])}export{e as gfm};
2
2
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../../../node_modules/micromark-extension-gfm/index.js"],"sourcesContent":["/**\n * @typedef {import('micromark-extension-gfm-footnote').HtmlOptions} HtmlOptions\n * @typedef {import('micromark-extension-gfm-strikethrough').Options} Options\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').HtmlExtension} HtmlExtension\n */\n\nimport {\n combineExtensions,\n combineHtmlExtensions\n} from 'micromark-util-combine-extensions'\nimport {\n gfmAutolinkLiteral,\n gfmAutolinkLiteralHtml\n} from 'micromark-extension-gfm-autolink-literal'\nimport {gfmFootnote, gfmFootnoteHtml} from 'micromark-extension-gfm-footnote'\nimport {\n gfmStrikethrough,\n gfmStrikethroughHtml\n} from 'micromark-extension-gfm-strikethrough'\nimport {gfmTable, gfmTableHtml} from 'micromark-extension-gfm-table'\nimport {gfmTagfilterHtml} from 'micromark-extension-gfm-tagfilter'\nimport {\n gfmTaskListItem,\n gfmTaskListItemHtml\n} from 'micromark-extension-gfm-task-list-item'\n\n/**\n * Create an extension for `micromark` to enable GFM syntax.\n *\n * @param {Options | null | undefined} [options]\n * Configuration (optional).\n *\n * Passed to `micromark-extens-gfm-strikethrough`.\n * @returns {Extension}\n * Extension for `micromark` that can be passed in `extensions` to enable GFM\n * syntax.\n */\nexport function gfm(options) {\n return combineExtensions([\n gfmAutolinkLiteral(),\n gfmFootnote(),\n gfmStrikethrough(options),\n gfmTable(),\n gfmTaskListItem()\n ])\n}\n\n/**\n * Create an extension for `micromark` to support GFM when serializing to HTML.\n *\n * @param {HtmlOptions | null | undefined} [options]\n * Configuration (optional).\n *\n * Passed to `micromark-extens-gfm-footnote`.\n * @returns {HtmlExtension}\n * Extension for `micromark` that can be passed in `htmlExtensions` to\n * support GFM when serializing to HTML.\n */\nexport function gfmHtml(options) {\n return combineHtmlExtensions([\n gfmAutolinkLiteralHtml(),\n gfmFootnoteHtml(options),\n gfmStrikethroughHtml(),\n gfmTableHtml(),\n gfmTagfilterHtml(),\n gfmTaskListItemHtml()\n ])\n}\n"],"names":["gfm","options","combineExtensions","gfmAutolinkLiteral","gfmFootnote","gfmStrikethrough","gfmTable","gfmTaskListItem"],"mappings":"+fAsCO,SAASA,EAAIC,GAClB,OAAOC,EAAkB,CACvBC,IACAC,IACAC,EAAiBJ,GACjBK,IACAC,KAEJ"}
1
+ {"version":3,"file":"index.js","sources":["../../../node_modules/micromark-extension-gfm/index.js"],"sourcesContent":["/**\n * @typedef {import('micromark-extension-gfm-footnote').HtmlOptions} HtmlOptions\n * @typedef {import('micromark-extension-gfm-strikethrough').Options} Options\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').HtmlExtension} HtmlExtension\n */\n\nimport {\n combineExtensions,\n combineHtmlExtensions\n} from 'micromark-util-combine-extensions'\nimport {\n gfmAutolinkLiteral,\n gfmAutolinkLiteralHtml\n} from 'micromark-extension-gfm-autolink-literal'\nimport {gfmFootnote, gfmFootnoteHtml} from 'micromark-extension-gfm-footnote'\nimport {\n gfmStrikethrough,\n gfmStrikethroughHtml\n} from 'micromark-extension-gfm-strikethrough'\nimport {gfmTable, gfmTableHtml} from 'micromark-extension-gfm-table'\nimport {gfmTagfilterHtml} from 'micromark-extension-gfm-tagfilter'\nimport {\n gfmTaskListItem,\n gfmTaskListItemHtml\n} from 'micromark-extension-gfm-task-list-item'\n\n/**\n * Create an extension for `micromark` to enable GFM syntax.\n *\n * @param {Options | null | undefined} [options]\n * Configuration (optional).\n *\n * Passed to `micromark-extens-gfm-strikethrough`.\n * @returns {Extension}\n * Extension for `micromark` that can be passed in `extensions` to enable GFM\n * syntax.\n */\nexport function gfm(options) {\n return combineExtensions([\n gfmAutolinkLiteral,\n gfmFootnote(),\n gfmStrikethrough(options),\n gfmTable,\n gfmTaskListItem\n ])\n}\n\n/**\n * Create an extension for `micromark` to support GFM when serializing to HTML.\n *\n * @param {HtmlOptions | null | undefined} [options]\n * Configuration.\n *\n * Passed to `micromark-extens-gfm-footnote`.\n * @returns {HtmlExtension}\n * Extension for `micromark` that can be passed in `htmlExtensions` to\n * support GFM when serializing to HTML.\n */\nexport function gfmHtml(options) {\n return combineHtmlExtensions([\n gfmAutolinkLiteralHtml,\n gfmFootnoteHtml(options),\n gfmStrikethroughHtml,\n gfmTableHtml,\n gfmTagfilterHtml,\n gfmTaskListItemHtml\n ])\n}\n"],"names":["gfm","options","combineExtensions","gfmAutolinkLiteral","gfmFootnote","gfmStrikethrough","gfmTable","gfmTaskListItem"],"mappings":"+fAsCO,SAASA,EAAIC,GAClB,OAAOC,EAAkB,CACvBC,EACAC,IACAC,EAAiBJ,GACjBK,EACAC,GAEJ"}
@@ -1,2 +1,2 @@
1
- import{asciiAlphanumeric as n,asciiAlpha as t,markdownLineEndingOrSpace as e,asciiControl as r,unicodeWhitespace as u,unicodePunctuation as i}from"../../micromark-util-character/index.js";const o={tokenize:function(n,t,e){let r=0;return function t(i){if((87===i||119===i)&&r<3)return r++,n.consume(i),t;if(46===i&&3===r)return n.consume(i),u;return e(i)};function u(n){return null===n?e(n):t(n)}},partial:!0},l={tokenize:function(n,t,r){let o,l,c;return a;function a(t){return 46===t||95===t?n.check(f,m,s)(t):null===t||e(t)||u(t)||45!==t&&i(t)?m(t):(c=!0,n.consume(t),a)}function s(t){return 95===t?o=!0:(l=o,o=void 0),n.consume(t),a}function m(n){return l||o||!c?r(n):t(n)}},partial:!0},c={tokenize:function(n,t){let r=0,i=0;return o;function o(c){return 40===c?(r++,n.consume(c),o):41===c&&i<r?l(c):33===c||34===c||38===c||39===c||41===c||42===c||44===c||46===c||58===c||59===c||60===c||63===c||93===c||95===c||126===c?n.check(f,t,l)(c):null===c||e(c)||u(c)?t(c):(n.consume(c),o)}function l(t){return 41===t&&i++,n.consume(t),o}},partial:!0},f={tokenize:function(n,r,i){return o;function o(t){return 33===t||34===t||39===t||41===t||42===t||44===t||46===t||58===t||59===t||63===t||95===t||126===t?(n.consume(t),o):38===t?(n.consume(t),c):93===t?(n.consume(t),l):60===t||null===t||e(t)||u(t)?r(t):i(t)}function l(n){return null===n||40===n||91===n||e(n)||u(n)?r(n):o(n)}function c(n){return t(n)?f(n):i(n)}function f(e){return 59===e?(n.consume(e),o):t(e)?(n.consume(e),f):i(e)}},partial:!0},a={tokenize:function(t,e,r){return function(n){return t.consume(n),u};function u(t){return n(t)?r(t):e(t)}},partial:!0},s={name:"wwwAutolink",tokenize:function(n,t,e){const r=this;return function(t){if(87!==t&&119!==t||!v.call(r,r.previous)||z(r.events))return e(t);return n.enter("literalAutolink"),n.enter("literalAutolinkWww"),n.check(o,n.attempt(l,n.attempt(c,u),e),e)(t)};function u(e){return n.exit("literalAutolinkWww"),n.exit("literalAutolink"),t(e)}},previous:v},m={name:"protocolAutolink",tokenize:function(n,o,f){const a=this;let s="",m=!1;return function(t){if((72===t||104===t)&&g.call(a,a.previous)&&!z(a.events))return n.enter("literalAutolink"),n.enter("literalAutolinkHttp"),s+=String.fromCodePoint(t),n.consume(t),k;return f(t)};function k(e){if(t(e)&&s.length<5)return s+=String.fromCodePoint(e),n.consume(e),k;if(58===e){const t=s.toLowerCase();if("http"===t||"https"===t)return n.consume(e),p}return f(e)}function p(t){return 47===t?(n.consume(t),m?A:(m=!0,p)):f(t)}function A(t){return null===t||r(t)||e(t)||u(t)||i(t)?f(t):n.attempt(l,n.attempt(c,h),f)(t)}function h(t){return n.exit("literalAutolinkHttp"),n.exit("literalAutolink"),o(t)}},previous:g},k={name:"emailAutolink",tokenize:function(e,r,u){const i=this;let o,l;return function(n){if(!w(n)||!x.call(i,i.previous)||z(i.events))return u(n);return e.enter("literalAutolink"),e.enter("literalAutolinkEmail"),c(n)};function c(n){return w(n)?(e.consume(n),c):64===n?(e.consume(n),f):u(n)}function f(t){return 46===t?e.check(a,m,s)(t):45===t||95===t||n(t)?(l=!0,e.consume(t),f):m(t)}function s(n){return e.consume(n),o=!0,f}function m(n){return l&&o&&t(i.previous)?(e.exit("literalAutolinkEmail"),e.exit("literalAutolink"),r(n)):u(n)}},previous:x},p={};function A(){return{text:p}}let h=48;for(;h<123;)p[h]=k,h++,58===h?h=65:91===h&&(h=97);function v(n){return null===n||40===n||42===n||95===n||91===n||93===n||126===n||e(n)}function g(n){return!t(n)}function x(n){return!(47===n||w(n))}function w(t){return 43===t||45===t||46===t||95===t||n(t)}function z(n){let t=n.length,e=!1;for(;t--;){const r=n[t][1];if(("labelLink"===r.type||"labelImage"===r.type)&&!r._balanced){e=!0;break}if(r._gfmAutolinkLiteralWalkedInto){e=!1;break}}return n.length>0&&!e&&(n[n.length-1][1]._gfmAutolinkLiteralWalkedInto=!0),e}p[43]=k,p[45]=k,p[46]=k,p[95]=k,p[72]=[k,m],p[104]=[k,m],p[87]=[k,s],p[119]=[k,s];export{A as gfmAutolinkLiteral};
1
+ import{asciiAlphanumeric as n,asciiAlpha as t,markdownLineEndingOrSpace as e,asciiControl as r,unicodeWhitespace as u,unicodePunctuation as i}from"../node_modules/micromark-util-character/index.js";const o={tokenize:function(n,t,e){let r=0;return function t(i){if((87===i||119===i)&&r<3)return r++,n.consume(i),t;if(46===i&&3===r)return n.consume(i),u;return e(i)};function u(n){return null===n?e(n):t(n)}},partial:!0},l={tokenize:function(n,t,r){let o,l,c;return a;function a(t){return 46===t||95===t?n.check(f,m,s)(t):null===t||e(t)||u(t)||45!==t&&i(t)?m(t):(c=!0,n.consume(t),a)}function s(t){return 95===t?o=!0:(l=o,o=void 0),n.consume(t),a}function m(n){return l||o||!c?r(n):t(n)}},partial:!0},c={tokenize:function(n,t){let r=0,i=0;return o;function o(c){return 40===c?(r++,n.consume(c),o):41===c&&i<r?l(c):33===c||34===c||38===c||39===c||41===c||42===c||44===c||46===c||58===c||59===c||60===c||63===c||93===c||95===c||126===c?n.check(f,t,l)(c):null===c||e(c)||u(c)?t(c):(n.consume(c),o)}function l(t){return 41===t&&i++,n.consume(t),o}},partial:!0},f={tokenize:function(n,r,i){return o;function o(t){return 33===t||34===t||39===t||41===t||42===t||44===t||46===t||58===t||59===t||63===t||95===t||126===t?(n.consume(t),o):38===t?(n.consume(t),c):93===t?(n.consume(t),l):60===t||null===t||e(t)||u(t)?r(t):i(t)}function l(n){return null===n||40===n||91===n||e(n)||u(n)?r(n):o(n)}function c(n){return t(n)?f(n):i(n)}function f(e){return 59===e?(n.consume(e),o):t(e)?(n.consume(e),f):i(e)}},partial:!0},a={tokenize:function(t,e,r){return function(n){return t.consume(n),u};function u(t){return n(t)?r(t):e(t)}},partial:!0},s={tokenize:function(n,t,e){const r=this;return function(t){if(87!==t&&119!==t||!v.call(r,r.previous)||z(r.events))return e(t);return n.enter("literalAutolink"),n.enter("literalAutolinkWww"),n.check(o,n.attempt(l,n.attempt(c,u),e),e)(t)};function u(e){return n.exit("literalAutolinkWww"),n.exit("literalAutolink"),t(e)}},previous:v},m={tokenize:function(n,o,f){const a=this;let s="",m=!1;return function(t){if((72===t||104===t)&&d.call(a,a.previous)&&!z(a.events))return n.enter("literalAutolink"),n.enter("literalAutolinkHttp"),s+=String.fromCodePoint(t),n.consume(t),k;return f(t)};function k(e){if(t(e)&&s.length<5)return s+=String.fromCodePoint(e),n.consume(e),k;if(58===e){const t=s.toLowerCase();if("http"===t||"https"===t)return n.consume(e),p}return f(e)}function p(t){return 47===t?(n.consume(t),m?h:(m=!0,p)):f(t)}function h(t){return null===t||r(t)||e(t)||u(t)||i(t)?f(t):n.attempt(l,n.attempt(c,A),f)(t)}function A(t){return n.exit("literalAutolinkHttp"),n.exit("literalAutolink"),o(t)}},previous:d},k={tokenize:function(e,r,u){const i=this;let o,l;return function(n){if(!x(n)||!g.call(i,i.previous)||z(i.events))return u(n);return e.enter("literalAutolink"),e.enter("literalAutolinkEmail"),c(n)};function c(n){return x(n)?(e.consume(n),c):64===n?(e.consume(n),f):u(n)}function f(t){return 46===t?e.check(a,m,s)(t):45===t||95===t||n(t)?(l=!0,e.consume(t),f):m(t)}function s(n){return e.consume(n),o=!0,f}function m(n){return l&&o&&t(i.previous)?(e.exit("literalAutolinkEmail"),e.exit("literalAutolink"),r(n)):u(n)}},previous:g},p={},h={text:p};let A=48;for(;A<123;)p[A]=k,A++,58===A?A=65:91===A&&(A=97);function v(n){return null===n||40===n||42===n||95===n||91===n||93===n||126===n||e(n)}function d(n){return!t(n)}function g(n){return!(47===n||x(n))}function x(t){return 43===t||45===t||46===t||95===t||n(t)}function z(n){let t=n.length,e=!1;for(;t--;){const r=n[t][1];if(("labelLink"===r.type||"labelImage"===r.type)&&!r._balanced){e=!0;break}if(r._gfmAutolinkLiteralWalkedInto){e=!1;break}}return n.length>0&&!e&&(n[n.length-1][1]._gfmAutolinkLiteralWalkedInto=!0),e}p[43]=k,p[45]=k,p[46]=k,p[95]=k,p[72]=[k,m],p[104]=[k,m],p[87]=[k,s],p[119]=[k,s];export{h as gfmAutolinkLiteral};
2
2
  //# sourceMappingURL=syntax.js.map